[med-svn] [lamarc] 01/02: New upstream version 2.1.10+dfsg

Andreas Tille tille at debian.org
Fri Dec 1 18:47:27 UTC 2017


This is an automated email from the git hooks/post-receive script.

tille pushed a commit to branch master
in repository lamarc.

commit 9cddc6e12343be9584325b92ebeb3ca9a98eac2d
Author: Andreas Tille <tille at debian.org>
Date:   Fri Dec 1 19:46:45 2017 +0100

    New upstream version 2.1.10+dfsg
---
 Makefile.am                                        | 1320 +++++
 config/conf.h.in                                   |  103 +
 config/local_build.h                               |  131 +
 configure.ac                                       |  508 ++
 doc/cross-notes.txt                                |  203 +
 doc/html/batch_converter/README.txt                |   12 +
 doc/html/batch_converter/chrom1.mig                |   14 +
 doc/html/batch_converter/chrom1_lamarc.html        |   71 +
 doc/html/batch_converter/chrom1_lamarc.xml         |   64 +
 doc/html/batch_converter/chrom2.mig                |   24 +
 doc/html/batch_converter/chrom2_lamarc.html        |   92 +
 doc/html/batch_converter/chrom2_lamarc.xml         |   85 +
 doc/html/batch_converter/chrom3_lamarc.html        |   89 +
 doc/html/batch_converter/chrom3_lamarc.xml         |   82 +
 doc/html/batch_converter/chrom3_phase_cmd.html     |   36 +
 doc/html/batch_converter/chrom3_phase_cmd.xml      |   29 +
 doc/html/batch_converter/chrom3microsat.mig        |    8 +
 doc/html/batch_converter/chrom3snp.mig             |   14 +
 .../batch_converter/exported-lamarc-input.html     |  230 +
 doc/html/batch_converter/exported-lamarc-input.xml |  223 +
 doc/html/batch_converter/images/CombinedPanels.png |  Bin 0 -> 67336 bytes
 doc/html/batch_converter/images/DataFilesTab.png   |  Bin 0 -> 25485 bytes
 .../images/DataPartitionsDivTab.png                |  Bin 0 -> 88376 bytes
 .../images/DataPartitionsMigTab.png                |  Bin 0 -> 48960 bytes
 doc/html/batch_converter/images/DebugLogTab.png    |  Bin 0 -> 23383 bytes
 .../batch_converter/images/DivMigMatrixTab.png     |  Bin 0 -> 72849 bytes
 doc/html/batch_converter/images/DivergeOff.png     |  Bin 0 -> 64697 bytes
 doc/html/batch_converter/images/EditMigration.png  |  Bin 0 -> 32193 bytes
 .../batch_converter/images/EditPanelCorrection.png |  Bin 0 -> 20924 bytes
 doc/html/batch_converter/images/FirstParent.png    |  Bin 0 -> 19278 bytes
 .../images/FirstParent2Children.png                |  Bin 0 -> 19682 bytes
 .../batch_converter/images/FullParentsImage.png    |  Bin 0 -> 66175 bytes
 .../batch_converter/images/InterumParentImage.png  |  Bin 0 -> 65907 bytes
 .../images/MigrationOnlyMatrixTab.png              |  Bin 0 -> 41108 bytes
 .../batch_converter/images/PanelCorrectionOn.png   |  Bin 0 -> 60165 bytes
 doc/html/batch_converter/images/SecondParent.png   |  Bin 0 -> 16307 bytes
 .../lam_conv_chrom1_export_file_selection.png      |  Bin 0 -> 20452 bytes
 .../images/lam_conv_chrom1_export_warn_1.png       |  Bin 0 -> 7972 bytes
 .../images/lam_conv_chrom1_input.png               |  Bin 0 -> 29752 bytes
 .../images/lam_conv_chrom1_segment_panel.png       |  Bin 0 -> 35863 bytes
 .../lam_conv_chrom2_export_warn_first_position.png |  Bin 0 -> 9068 bytes
 .../images/lam_conv_chrom2_export_warn_length.png  |  Bin 0 -> 7481 bytes
 .../lam_conv_chrom2_export_warn_locations.png      |  Bin 0 -> 14089 bytes
 .../images/lam_conv_chrom2_export_warn_map.png     |  Bin 0 -> 7510 bytes
 .../images/lam_conv_chrom2_input_snp.png           |  Bin 0 -> 31351 bytes
 .../images/lam_conv_chrom2_segment1.png            |  Bin 0 -> 38966 bytes
 .../lam_conv_chrom2_segment2_done_locations.png    |  Bin 0 -> 8755 bytes
 .../images/lam_conv_chrom3_error_map_position.png  |  Bin 0 -> 7615 bytes
 .../lam_conv_chrom3_error_phase_file_needed.png    |  Bin 0 -> 12513 bytes
 .../images/lam_conv_chrom3_input.png               |  Bin 0 -> 36784 bytes
 .../images/lam_conv_chrom3_region_panel.png        |  Bin 0 -> 18646 bytes
 .../images/lam_conv_chrom3_region_table.png        |  Bin 0 -> 8181 bytes
 .../images/lam_conv_chrom3_segment_snp.png         |  Bin 0 -> 34967 bytes
 .../images/lam_conv_export_file_mac_expanded.png   |  Bin 0 -> 80420 bytes
 .../images/lam_conv_export_file_mac_minimal.png    |  Bin 0 -> 38428 bytes
 doc/html/batch_converter/sample-conv-cmd.html      |  351 ++
 doc/html/batch_converter/sample-conv-cmd.xml       |  343 ++
 doc/html/bayes.html                                |  484 ++
 doc/html/bayes_howto.html                          |  334 ++
 doc/html/changes.html                              |  497 ++
 doc/html/comparing_curvefiles.sxc                  |  Bin 0 -> 366596 bytes
 doc/html/comparing_curvefiles.xls                  |  Bin 0 -> 1071104 bytes
 doc/html/compiling.html                            |  466 ++
 doc/html/converter.html                            |  441 ++
 doc/html/converter_cmd.html                        |  564 +++
 doc/html/curve-smoothing.html                      |  113 +
 doc/html/data_models.html                          |  228 +
 doc/html/data_required.html                        |  135 +
 doc/html/divergence.html                           |  113 +
 doc/html/forces.html                               |  394 ++
 doc/html/gamma.html                                |  169 +
 doc/html/genetic_map.html                          |  385 ++
 doc/html/genotype.html                             |  165 +
 doc/html/glossary.html                             |  430 ++
 doc/html/growthmenu.html                           |   39 +
 doc/html/images/LamarcAnalysisScreen.png           |  Bin 0 -> 14274 bytes
 doc/html/images/LamarcDataScreen.png               |  Bin 0 -> 17264 bytes
 doc/html/images/LamarcIOScreen.png                 |  Bin 0 -> 21204 bytes
 doc/html/images/LamarcMainScreen.png               |  Bin 0 -> 20674 bytes
 doc/html/images/LamarcOverviewScreen.png           |  Bin 0 -> 14497 bytes
 doc/html/images/LamarcSearchScreen.png             |  Bin 0 -> 16564 bytes
 doc/html/images/browser-gtk.gif                    |  Bin 0 -> 17520 bytes
 doc/html/images/correlated1.gif                    |  Bin 0 -> 3378 bytes
 doc/html/images/correlated2.gif                    |  Bin 0 -> 3350 bytes
 doc/html/images/datatab-2-osx.gif                  |  Bin 0 -> 78134 bytes
 doc/html/images/datatab-gtk.gif                    |  Bin 0 -> 38990 bytes
 doc/html/images/datatab-help-gtk.gif               |  Bin 0 -> 26802 bytes
 doc/html/images/gui_lam_conv.gif                   |  Bin 0 -> 326 bytes
 doc/html/images/lam_conv.gif                       |  Bin 0 -> 258 bytes
 doc/html/images/lamarc-128.png                     |  Bin 0 -> 1741 bytes
 doc/html/images/lamarc-256.png                     |  Bin 0 -> 3218 bytes
 doc/html/images/lamarc.gif                         |  Bin 0 -> 1359 bytes
 doc/html/images/loci-gtk.png                       |  Bin 0 -> 41295 bytes
 doc/html/images/loci2-gtk.png                      |  Bin 0 -> 22944 bytes
 doc/html/images/membershiptab-1-gtk.gif            |  Bin 0 -> 70450 bytes
 doc/html/images/membershiptab-2-gtk.gif            |  Bin 0 -> 66782 bytes
 doc/html/images/membershiptab-help-1-gtk.gif       |  Bin 0 -> 11441 bytes
 doc/html/images/membershiptab-help-2-gtk.gif       |  Bin 0 -> 72340 bytes
 doc/html/images/partitions-gtk.png                 |  Bin 0 -> 40015 bytes
 doc/html/images/partitions2-gtk.png                |  Bin 0 -> 43551 bytes
 doc/html/images/populations-gtk.png                |  Bin 0 -> 39694 bytes
 doc/html/images/regiontab-1-gtk.gif                |  Bin 0 -> 28495 bytes
 doc/html/images/regiontab-2-gtk.gif                |  Bin 0 -> 29367 bytes
 doc/html/images/startup-gtk.gif                    |  Bin 0 -> 15396 bytes
 doc/html/images/startup-gtk.png                    |  Bin 0 -> 23582 bytes
 doc/html/images/startup-win.gif                    |  Bin 0 -> 36195 bytes
 doc/html/images/tracer_trend.png                   |  Bin 0 -> 77568 bytes
 doc/html/images/uncorrelated.gif                   |  Bin 0 -> 3432 bytes
 doc/html/images/variably_correlated.gif            |  Bin 0 -> 4314 bytes
 doc/html/index.html                                |   97 +
 doc/html/insumfile.2reg3rep.html                   | 3327 +++++++++++++
 doc/html/insumfile.2reg3rep.xml                    | 3320 +++++++++++++
 doc/html/insumfile.3rep.html                       | 1607 ++++++
 doc/html/insumfile.3rep.xml                        | 1600 ++++++
 doc/html/limitations.html                          |   97 +
 doc/html/mapping.html                              |  466 ++
 doc/html/menu.html                                 | 1179 +++++
 doc/html/messages.html                             | 1123 +++++
 doc/html/migration_matrix.html                     |  150 +
 doc/html/output.html                               |  375 ++
 doc/html/outsumfile.2reg3rep.html                  | 3341 +++++++++++++
 doc/html/outsumfile.2reg3rep.xml                   | 3334 +++++++++++++
 doc/html/outsumfile.3rep.html                      | 1621 +++++++
 doc/html/outsumfile.3rep.xml                       | 1614 ++++++
 doc/html/overview.html                             |  130 +
 doc/html/panels.html                               |  163 +
 doc/html/parallel.html                             |  298 ++
 doc/html/parameters.html                           |  182 +
 doc/html/regions.html                              |   95 +
 doc/html/search.html                               |  375 ++
 doc/html/tracer.html                               |  132 +
 doc/html/trait_mapping/README.txt                  |   13 +
 doc/html/trait_mapping/lamarc-trait-input.html     |  164 +
 doc/html/trait_mapping/lamarc-trait-input.xml      |  157 +
 doc/html/trait_mapping/mapfile_funny-nose.txt      | 1009 ++++
 doc/html/trait_mapping/outfile.txt                 |  376 ++
 doc/html/trait_mapping/traitCmd.html               |  129 +
 doc/html/trait_mapping/traitCmd.xml                |  122 +
 doc/html/trait_mapping/traitCmd.xml.txt            |  122 +
 doc/html/trait_mapping/traitData.mig               |   19 +
 doc/html/troubleshooting.html                      |  575 +++
 doc/html/tutorial.html                             |  603 +++
 doc/html/tutorial2.html                            |  277 ++
 doc/html/upcoming.html                             |   65 +
 doc/html/viral_data.html                           |  231 +
 doc/html/xmlinput.html                             | 1166 +++++
 doc/licenses/boost.txt                             |   23 +
 doc/licenses/gpl.txt                               |  339 ++
 doc/licenses/lamarc.txt                            |   14 +
 doc/licenses/lgpl.txt                              |  517 ++
 doc/licenses/mingw.txt                             |   23 +
 doc/licenses/wx.txt                                |   53 +
 doc/licenses/wxdoc.txt                             |   60 +
 doc/licenses/zlib.txt                              |   21 +
 doc/testfiles/infile.2pop                          |  786 +++
 doc/testfiles/infile.baselocus                     |  158 +
 doc/testfiles/infile.bayes                         |  130 +
 doc/testfiles/infile.coalgrowmig                   |  394 ++
 doc/testfiles/infile.coalmigmsat                   | 5125 ++++++++++++++++++++
 doc/testfiles/infile.coalmigrep                    | 1702 +++++++
 doc/testfiles/infile.coalrec                       |  380 ++
 doc/testfiles/infile.coalregrep                    |  886 ++++
 doc/testfiles/infile.divergence                    |  430 ++
 doc/testfiles/infile.growmigheat                   |  393 ++
 doc/testfiles/infile.multicat                      |  129 +
 doc/testfiles/infile.multilocus                    |  345 ++
 doc/testfiles/infile.quick                         |  129 +
 doc/testfiles/infile.regheat                       |  195 +
 doc/testfiles/infile_gamma1.xml                    | 2417 +++++++++
 doc/testfiles/infile_gamma25.xml                   | 2416 +++++++++
 doc/testfiles/sample_infile.xml                    |  369 ++
 doc/testfiles/sample_outfile.txt                   |  317 ++
 doc/testfiles/sample_outsumfile.xml                |  576 +++
 doc/testfiles/sample_tracefile_coal_1.txt          |  301 ++
 doc/testfiles/v2.0.infiles/infile.2pop             |  760 +++
 doc/testfiles/v2.0.infiles/infile.baselocus        |  154 +
 doc/testfiles/v2.0.infiles/infile.bayes            |  100 +
 doc/testfiles/v2.0.infiles/infile.coalgrowmig      |  386 ++
 doc/testfiles/v2.0.infiles/infile.coalmigmsat      | 5113 +++++++++++++++++++
 doc/testfiles/v2.0.infiles/infile.coalmigrep       | 1631 +++++++
 doc/testfiles/v2.0.infiles/infile.coalrec          |  365 ++
 doc/testfiles/v2.0.infiles/infile.coalregrep       |  819 ++++
 doc/testfiles/v2.0.infiles/infile.growmigheat      |  366 ++
 doc/testfiles/v2.0.infiles/infile.multicat         |  112 +
 doc/testfiles/v2.0.infiles/infile.multilocus       |  287 ++
 doc/testfiles/v2.0.infiles/infile.quick            |   98 +
 doc/testfiles/v2.0.infiles/infile.regheat          |  146 +
 doc/testfiles/ward_short.phy                       |   73 +
 doc/wx-notes.txt                                   |  207 +
 doc/wx-osx-notes.txt                               |   57 +
 doc/xmltags                                        |   53 +
 resources/Info.plist.in                            |   36 +
 resources/command.in                               |   20 +
 resources/empty16.bmp                              |  Bin 0 -> 1146 bytes
 resources/empty16.xpm                              |   20 +
 resources/excl16.bmp                               |  Bin 0 -> 822 bytes
 resources/excl16.xpm                               |   21 +
 resources/giraffe32.bmp                            |  Bin 0 -> 3126 bytes
 resources/giraffe32.xpm                            |   48 +
 resources/lam_conv.icns                            |  Bin 0 -> 918 bytes
 resources/lam_conv.ico                             |  Bin 0 -> 3262 bytes
 resources/lam_conv_rc.rc                           |    3 +
 resources/lamarc.command                           |   24 +
 resources/lamarc.icns                              |  Bin 0 -> 884 bytes
 resources/lamarc.ico                               |  Bin 0 -> 2238 bytes
 resources/lamarc.xsd                               |  347 ++
 resources/lamarc_rc.rc                             |    1 +
 src/BUGS                                           |  453 ++
 src/bayeslike/bayesanalyzer_1d.cpp                 |  808 +++
 src/bayeslike/bayesanalyzer_1d.h                   |  138 +
 src/bayeslike/bayescurve.cpp                       |  514 ++
 src/bayeslike/bayescurve.h                         |   80 +
 src/bayeslike/bayesparamlike_1d.cpp                |  167 +
 src/bayeslike/bayesparamlike_1d.h                  |   52 +
 src/control/chainmanager.cpp                       | 2207 +++++++++
 src/control/chainmanager.h                         |  139 +
 src/control/chainout.cpp                           |   63 +
 src/control/chainout.h                             |  120 +
 src/control/chainpack.cpp                          |  544 +++
 src/control/chainpack.h                            |  128 +
 src/control/chainparam.cpp                         |  378 ++
 src/control/chainparam.h                           |  133 +
 src/control/constants.cpp                          |  112 +
 src/control/constants.h                            |  287 ++
 src/control/defaults.cpp                           |  368 ++
 src/control/defaults.h                             |  355 ++
 src/control/definitions.h                          |   79 +
 src/control/dynatracer.cpp                         |  592 +++
 src/control/dynatracer.h                           |  366 ++
 src/control/errhandling.cpp                        |  290 ++
 src/control/errhandling.h                          |  390 ++
 src/control/lamarc.cpp                             |  815 ++++
 src/control/lamarc.h                               |   43 +
 src/control/regiongammainfo.cpp                    |  146 +
 src/control/regiongammainfo.h                      |   72 +
 src/control/registry.cpp                           | 1122 +++++
 src/control/registry.h                             |  204 +
 src/control/sumfilehandler.cpp                     |  868 ++++
 src/control/sumfilehandler.h                       |  107 +
 src/control/types.h                                |   69 +
 src/control/userparam.cpp                          |  397 ++
 src/control/userparam.h                            |  220 +
 src/control/xmlsum_strings.cpp                     |  159 +
 src/control/xmlsum_strings.h                       |  160 +
 src/convErr/gc_cmdfile_err.cpp                     |  131 +
 src/convErr/gc_cmdfile_err.h                       |  113 +
 src/convErr/gc_data_missing_err.cpp                |   35 +
 src/convErr/gc_data_missing_err.h                  |   43 +
 src/convErr/gc_errhandling.cpp                     |  144 +
 src/convErr/gc_errhandling.h                       |   82 +
 src/convErr/gc_individual_err.cpp                  |   57 +
 src/convErr/gc_individual_err.h                    |   73 +
 src/convErr/gc_infile_err.cpp                      |  253 +
 src/convErr/gc_infile_err.h                        |  170 +
 src/convErr/gc_locus_err.cpp                       |  152 +
 src/convErr/gc_locus_err.h                         |  147 +
 src/convErr/gc_map_err.cpp                         |   42 +
 src/convErr/gc_map_err.h                           |   46 +
 src/convErr/gc_phase_err.cpp                       |  154 +
 src/convErr/gc_phase_err.h                         |  143 +
 src/convErr/gc_structures_err.cpp                  |  258 +
 src/convErr/gc_structures_err.h                    |  244 +
 src/convErr/gc_trait_err.cpp                       |   90 +
 src/convErr/gc_trait_err.h                         |   91 +
 src/convModel/cmdfileschema.cpp                    |  134 +
 src/convModel/cmdfileschema.h                      |   40 +
 src/convModel/gc_creation_info.cpp                 |  129 +
 src/convModel/gc_creation_info.h                   |   61 +
 src/convModel/gc_datastore.cpp                     |  661 +++
 src/convModel/gc_datastore.h                       |  257 +
 src/convModel/gc_datastore_export.cpp              | 1684 +++++++
 src/convModel/gc_datastore_files.cpp               |  616 +++
 src/convModel/gc_datastore_readcmd.cpp             | 1114 +++++
 src/convModel/gc_datastore_writebatch.cpp          |  539 ++
 src/convModel/gc_default.cpp                       |   30 +
 src/convModel/gc_default.h                         |   39 +
 src/convModel/gc_dictionary.cpp                    |  107 +
 src/convModel/gc_dictionary.h                      |   38 +
 src/convModel/gc_exportable.cpp                    |  133 +
 src/convModel/gc_exportable.h                      |   58 +
 src/convModel/gc_file.cpp                          |  272 ++
 src/convModel/gc_file.h                            |   77 +
 src/convModel/gc_file_info.cpp                     |  215 +
 src/convModel/gc_file_info.h                       |   77 +
 src/convModel/gc_id_set.cpp                        |   25 +
 src/convModel/gc_id_set.h                          |   26 +
 src/convModel/gc_individual.cpp                    |  198 +
 src/convModel/gc_individual.h                      |   80 +
 src/convModel/gc_mapfile.cpp                       |  123 +
 src/convModel/gc_mapfile.h                         |   66 +
 src/convModel/gc_migration.cpp                     |  252 +
 src/convModel/gc_migration.h                       |   85 +
 src/convModel/gc_panel.cpp                         |  174 +
 src/convModel/gc_panel.h                           |   72 +
 src/convModel/gc_parent.cpp                        |  268 +
 src/convModel/gc_parent.h                          |   84 +
 src/convModel/gc_phase.cpp                         |  158 +
 src/convModel/gc_phase.h                           |   51 +
 src/convModel/gc_phase_info.cpp                    |  584 +++
 src/convModel/gc_phase_info.h                      |  150 +
 src/convModel/gc_phenotype.cpp                     |  154 +
 src/convModel/gc_phenotype.h                       |   77 +
 src/convModel/gc_set_util.cpp                      |   78 +
 src/convModel/gc_set_util.h                        |   39 +
 src/convModel/gc_structure_maps.cpp                |  413 ++
 src/convModel/gc_structure_maps.h                  |  198 +
 src/convModel/gc_structures.cpp                    | 3084 ++++++++++++
 src/convModel/gc_structures.h                      |  331 ++
 src/convModel/gc_trait.cpp                         |  130 +
 src/convModel/gc_trait.h                           |   63 +
 src/convModel/gc_trait_allele.cpp                  |   66 +
 src/convModel/gc_trait_allele.h                    |   43 +
 src/convModel/gc_types.cpp                         |  107 +
 src/convModel/gc_types.h                           |  132 +
 src/convParse/gc_genotype_resolution.cpp           |  117 +
 src/convParse/gc_genotype_resolution.h             |   58 +
 src/convParse/gc_loci_match.cpp                    |  165 +
 src/convParse/gc_loci_match.h                      |   59 +
 src/convParse/gc_locus.cpp                         |  678 +++
 src/convParse/gc_locus.h                           |  130 +
 src/convParse/gc_migrate.cpp                       |  386 ++
 src/convParse/gc_migrate.h                         |   61 +
 src/convParse/gc_parse.cpp                         |  413 ++
 src/convParse/gc_parse.h                           |  111 +
 src/convParse/gc_parse_block.cpp                   |  166 +
 src/convParse/gc_parse_block.h                     |   73 +
 src/convParse/gc_parse_locus.cpp                   |   93 +
 src/convParse/gc_parse_locus.h                     |   52 +
 src/convParse/gc_parse_pop.cpp                     |   51 +
 src/convParse/gc_parse_pop.h                       |   37 +
 src/convParse/gc_parse_sample.cpp                  |  121 +
 src/convParse/gc_parse_sample.h                    |   54 +
 src/convParse/gc_parser.cpp                        |  368 ++
 src/convParse/gc_parser.h                          |   70 +
 src/convParse/gc_phylip.cpp                        |  190 +
 src/convParse/gc_phylip.h                          |   46 +
 src/convParse/gc_pop_match.cpp                     |  159 +
 src/convParse/gc_pop_match.h                       |   56 +
 src/convParse/gc_population.cpp                    |  120 +
 src/convParse/gc_population.h                      |   53 +
 src/convParse/gc_region.cpp                        |  308 ++
 src/convParse/gc_region.h                          |  102 +
 src/convParse/gc_sequential_data.cpp               |  187 +
 src/convParse/gc_sequential_data.h                 |   78 +
 src/convParse/tixml_util.cpp                       |  178 +
 src/convParse/tixml_util.h                         |   36 +
 src/convStrings/cnv_strings.cpp                    |  124 +
 src/convStrings/cnv_strings.h                      |  127 +
 src/convStrings/gc_strings.h                       |  550 +++
 src/convStrings/gc_strings_cmdfile.cpp             |   41 +
 src/convStrings/gc_strings_cmdfile.h               |   53 +
 src/convStrings/gc_strings_creation.cpp            |   18 +
 src/convStrings/gc_strings_creation.h              |   26 +
 src/convStrings/gc_strings_data.cpp                |   17 +
 src/convStrings/gc_strings_data.h                  |   30 +
 src/convStrings/gc_strings_err.cpp                 |   99 +
 src/convStrings/gc_strings_individual.cpp          |   20 +
 src/convStrings/gc_strings_individual.h            |   35 +
 src/convStrings/gc_strings_infile.cpp              |   43 +
 src/convStrings/gc_strings_infile.h                |   63 +
 src/convStrings/gc_strings_io.cpp                  |   19 +
 src/convStrings/gc_strings_io.h                    |   27 +
 src/convStrings/gc_strings_locus.cpp               |   41 +
 src/convStrings/gc_strings_locus.h                 |   46 +
 src/convStrings/gc_strings_map.cpp                 |   21 +
 src/convStrings/gc_strings_map.h                   |  108 +
 src/convStrings/gc_strings_mig.cpp                 |   25 +
 src/convStrings/gc_strings_mig.h                   |   33 +
 src/convStrings/gc_strings_parse.cpp               |   20 +
 src/convStrings/gc_strings_parse.h                 |   32 +
 src/convStrings/gc_strings_parse_locus.cpp         |   17 +
 src/convStrings/gc_strings_parse_locus.h           |   25 +
 src/convStrings/gc_strings_phase.cpp               |   38 +
 src/convStrings/gc_strings_phase.h                 |   50 +
 src/convStrings/gc_strings_pop.cpp                 |   16 +
 src/convStrings/gc_strings_pop.h                   |   24 +
 src/convStrings/gc_strings_region.cpp              |   24 +
 src/convStrings/gc_strings_region.h                |   36 +
 src/convStrings/gc_strings_str_a_m.cpp             |  228 +
 src/convStrings/gc_strings_str_n_z.cpp             |  210 +
 src/convStrings/gc_strings_structures.cpp          |   56 +
 src/convStrings/gc_strings_structures.h            |   69 +
 src/convStrings/gc_strings_trait.cpp               |   27 +
 src/convStrings/gc_strings_trait.h                 |   39 +
 src/convUtil/gc_file_util.cpp                      |  111 +
 src/convUtil/gc_file_util.h                        |   57 +
 src/conversion/ConverterIf.cpp                     |   61 +
 src/conversion/ConverterUI.cpp                     |  831 ++++
 src/conversion/ConverterUIMain.cpp                 |  441 ++
 src/conversion/Converter_ConverterIf.h             |   55 +
 src/conversion/Converter_ConverterUI.h             |  118 +
 src/conversion/Converter_DataSourceException.h     |  149 +
 src/conversion/Converter_DataSourceIf.h            |   33 +
 src/conversion/Converter_HapConverter.h            |   68 +
 src/conversion/Converter_IndividualDS.h            |  125 +
 src/conversion/Converter_LamarcDS.h                |  206 +
 src/conversion/Converter_MigrateConverter.h        |   88 +
 src/conversion/Converter_ModelDS.h                 |  116 +
 src/conversion/Converter_ParserUtil.h              |   61 +
 src/conversion/Converter_PhylipConverter.h         |   56 +
 src/conversion/Converter_PopulationDS.h            |  149 +
 src/conversion/Converter_RegionDS.h                |  164 +
 src/conversion/Converter_Sequence.h                |   79 +
 src/conversion/Converter_SpaceConverter.h          |   54 +
 src/conversion/Converter_SpacingDS.h               |   75 +
 src/conversion/Converter_UserFileUtil.h            |   36 +
 src/conversion/Converter_XmlParserUtil.h           |   57 +
 src/conversion/Converter_types.h                   |   48 +
 src/conversion/DataSourceException.cpp             |   59 +
 src/conversion/DataSourceIf.cpp                    |   31 +
 src/conversion/Documentation                       |  133 +
 src/conversion/HapConverter.cpp                    |  314 ++
 src/conversion/IndividualDS.cpp                    |  234 +
 src/conversion/LamarcDS.cpp                        |  646 +++
 src/conversion/Migrate1.txt                        |   88 +
 src/conversion/MigrateConverter.cpp                |  679 +++
 src/conversion/ModelDS.cpp                         |  264 +
 src/conversion/ParserUtil.cpp                      |  353 ++
 src/conversion/PhylipConverter.cpp                 |  315 ++
 src/conversion/PopulationDS.cpp                    |  427 ++
 src/conversion/RegionDS.cpp                        |  473 ++
 src/conversion/Sample7.txt                         |  144 +
 src/conversion/Sequence.cpp                        |  201 +
 src/conversion/SpaceConverter.cpp                  |  165 +
 src/conversion/SpacingDS.cpp                       |  187 +
 src/conversion/UserFileUtil.cpp                    |   26 +
 src/conversion/XmlParserUtil.cpp                   |  276 ++
 src/conversion/nomenuglobals.cpp                   |   36 +
 src/conversion/nomenuglobals.h                     |   48 +
 src/conversion/testfiles/fin-hel1                  |    8 +
 src/conversion/testfiles/fin-hel1r                 |    9 +
 src/conversion/testfiles/fin-hel2                  |    7 +
 src/conversion/testfiles/fin-hel2r                 |    9 +
 src/conversion/testfiles/fin-hel3                  |    7 +
 src/conversion/testfiles/fin-hel3r                 |    9 +
 src/conversion/testfiles/fin-kar1                  |   11 +
 src/conversion/testfiles/fin-kar1r                 |   13 +
 src/conversion/testfiles/fin-kar2                  |   11 +
 src/conversion/testfiles/fin-kar2r                 |   13 +
 src/conversion/testfiles/fin-kar3                  |   11 +
 src/conversion/testfiles/fin-kar3r                 |   13 +
 src/conversion/testfiles/helsinkimap               |    1 +
 src/conversion/testfiles/helsinkiphase             |    1 +
 src/conversion/testfiles/kareliaphase              |    1 +
 src/conversion/testfiles/msatr                     |   49 +
 src/conversion/testfiles/phyout                    |  404 ++
 src/datalike/calculators.cpp                       |  452 ++
 src/datalike/calculators.h                         |   63 +
 src/datalike/cellmanager.cpp                       |   62 +
 src/datalike/cellmanager.h                         |   59 +
 src/datalike/datapack.cpp                          |  722 +++
 src/datalike/datapack.h                            |  152 +
 src/datalike/datatype.cpp                          |  707 +++
 src/datalike/datatype.h                            |  228 +
 src/datalike/dlcalc.cpp                            | 1263 +++++
 src/datalike/dlcalc.h                              |  213 +
 src/datalike/dlcell.cpp                            |  697 +++
 src/datalike/dlcell.h                              |  314 ++
 src/datalike/dlmodel.cpp                           | 3634 ++++++++++++++
 src/datalike/dlmodel.h                             |  671 +++
 src/datalike/funcMax.cpp                           |  164 +
 src/datalike/funcMax.h                             |   78 +
 src/datalike/haplotypes.cpp                        |  359 ++
 src/datalike/haplotypes.h                          |   64 +
 src/datalike/locus.cpp                             | 1285 +++++
 src/datalike/locus.h                               |  226 +
 src/datalike/locuscell.cpp                         |  112 +
 src/datalike/locuscell.h                           |   62 +
 src/datalike/phenotypes.cpp                        |  203 +
 src/datalike/phenotypes.h                          |   64 +
 src/datalike/region.cpp                            | 1299 +++++
 src/datalike/region.h                              |  224 +
 src/datalike/tipdata.cpp                           |  159 +
 src/datalike/tipdata.h                             |   80 +
 src/force/epoch.cpp                                |   26 +
 src/force/epoch.h                                  |   39 +
 src/force/event.cpp                                | 1182 +++++
 src/force/event.h                                  |  348 ++
 src/force/force.cpp                                | 1940 ++++++++
 src/force/force.h                                  |  778 +++
 src/force/forceparam.cpp                           |  749 +++
 src/force/forceparam.h                             |  171 +
 src/force/forcesummary.cpp                         | 1094 +++++
 src/force/forcesummary.h                           |  217 +
 src/force/paramstat.cpp                            |  232 +
 src/force/paramstat.h                              |  256 +
 src/force/priorreport.cpp                          |   69 +
 src/force/priorreport.h                            |   51 +
 src/force/stair.cpp                                |   71 +
 src/force/stair.h                                  |   49 +
 src/force/timemanager.cpp                          | 2194 +++++++++
 src/force/timemanager.h                            |  291 ++
 src/force/timesize.cpp                             |  115 +
 src/force/timesize.h                               |   81 +
 src/guiconv/batchconverter.cpp                     |  101 +
 src/guiconv/batchconverter.h                       |   41 +
 src/guiconv/gc_assigntab.cpp                       |  753 +++
 src/guiconv/gc_assigntab.h                         |  160 +
 src/guiconv/gc_block_dialogs.cpp                   |  442 ++
 src/guiconv/gc_block_dialogs.h                     |  139 +
 src/guiconv/gc_cmdline.cpp                         |  132 +
 src/guiconv/gc_cmdline.h                           |   41 +
 src/guiconv/gc_color.cpp                           |   35 +
 src/guiconv/gc_color.h                             |   26 +
 src/guiconv/gc_data.cpp                            |  629 +++
 src/guiconv/gc_data.h                              |   81 +
 src/guiconv/gc_data_display.cpp                    |  103 +
 src/guiconv/gc_data_display.h                      |   38 +
 src/guiconv/gc_dialog.cpp                          |  681 +++
 src/guiconv/gc_dialog.h                            |  293 ++
 src/guiconv/gc_event_ids.h                         |   89 +
 src/guiconv/gc_event_publisher.cpp                 |   90 +
 src/guiconv/gc_event_publisher.h                   |   74 +
 src/guiconv/gc_export_dialogs.cpp                  |   91 +
 src/guiconv/gc_export_dialogs.h                    |   52 +
 src/guiconv/gc_file_dialogs.cpp                    |  783 +++
 src/guiconv/gc_file_dialogs.h                      |  181 +
 src/guiconv/gc_file_list.cpp                       |  135 +
 src/guiconv/gc_file_list.h                         |   66 +
 src/guiconv/gc_frame.cpp                           |  354 ++
 src/guiconv/gc_frame.h                             |   77 +
 src/guiconv/gc_layout.cpp                          |   27 +
 src/guiconv/gc_layout.h                            |   36 +
 src/guiconv/gc_locitab.cpp                         |  100 +
 src/guiconv/gc_locitab.h                           |   46 +
 src/guiconv/gc_locus_dialogs.cpp                   |  960 ++++
 src/guiconv/gc_locus_dialogs.h                     |  288 ++
 src/guiconv/gc_logic.cpp                           |  131 +
 src/guiconv/gc_logic.h                             |   51 +
 src/guiconv/gc_matrix_display.cpp                  |   32 +
 src/guiconv/gc_matrix_display.h                    |   27 +
 src/guiconv/gc_menu_actors.cpp                     |  150 +
 src/guiconv/gc_menu_actors.h                       |   96 +
 src/guiconv/gc_migration_dialogs.cpp               |  478 ++
 src/guiconv/gc_migration_dialogs.h                 |  154 +
 src/guiconv/gc_migtab.cpp                          |  455 ++
 src/guiconv/gc_migtab.h                            |   84 +
 src/guiconv/gc_panel_dialogs.cpp                   |  176 +
 src/guiconv/gc_panel_dialogs.h                     |   94 +
 src/guiconv/gc_parent_dialogs.cpp                  |  618 +++
 src/guiconv/gc_parent_dialogs.h                    |  157 +
 src/guiconv/gc_poptab.cpp                          |   81 +
 src/guiconv/gc_poptab.h                            |   46 +
 src/guiconv/gc_population_dialogs.cpp              |  285 ++
 src/guiconv/gc_population_dialogs.h                |  118 +
 src/guiconv/gc_quantum.cpp                         |  103 +
 src/guiconv/gc_quantum.h                           |   71 +
 src/guiconv/gc_region_dialogs.cpp                  |  418 ++
 src/guiconv/gc_region_dialogs.h                    |  202 +
 src/guiconv/gc_regiontab.cpp                       |   93 +
 src/guiconv/gc_regiontab.h                         |   46 +
 src/guiconv/gc_trait_dialogs.cpp                   |   39 +
 src/guiconv/gc_trait_dialogs.h                     |   21 +
 src/guiconv/gc_unit_dialogs.cpp                    |   34 +
 src/guiconv/gc_unit_dialogs.h                      |   21 +
 src/guiconv/guiconverter.cpp                       |  163 +
 src/guiconv/guiconverter.h                         |   48 +
 src/guiutil/gc_clickpanel.cpp                      |  291 ++
 src/guiutil/gc_clickpanel.h                        |   88 +
 src/guiutil/gc_gridpanel.cpp                       |  297 ++
 src/guiutil/gc_gridpanel.h                         |  113 +
 src/guiutil/gc_text_ctrl.cpp                       |   52 +
 src/guiutil/gc_text_ctrl.h                         |   46 +
 src/guiutil/gc_validators.cpp                      |   66 +
 src/guiutil/gc_validators.h                        |   54 +
 src/lamarcmenus/coalmenus.cpp                      |   98 +
 src/lamarcmenus/coalmenus.h                        |   67 +
 src/lamarcmenus/constraintmenus.cpp                |  418 ++
 src/lamarcmenus/constraintmenus.h                  |  163 +
 src/lamarcmenus/datafilenamedialog.cpp             |  102 +
 src/lamarcmenus/datafilenamedialog.h               |   44 +
 src/lamarcmenus/datamodelmenu.cpp                  |  788 +++
 src/lamarcmenus/datamodelmenu.h                    |  327 ++
 src/lamarcmenus/diseasemenus.cpp                   |  111 +
 src/lamarcmenus/diseasemenus.h                     |   70 +
 src/lamarcmenus/divmenus.cpp                       |   81 +
 src/lamarcmenus/divmenus.h                         |   51 +
 src/lamarcmenus/divmigmenus.cpp                    |  116 +
 src/lamarcmenus/divmigmenus.h                      |   65 +
 src/lamarcmenus/forcesmenus.cpp                    |  186 +
 src/lamarcmenus/forcesmenus.h                      |   66 +
 src/lamarcmenus/growthmenus.cpp                    |  123 +
 src/lamarcmenus/growthmenus.h                      |   74 +
 src/lamarcmenus/lamarc_strings.cpp                 |  181 +
 src/lamarcmenus/lamarc_strings.h                   |  107 +
 src/lamarcmenus/lamarcheaderdialog.cpp             |  146 +
 src/lamarcmenus/lamarcheaderdialog.h               |   28 +
 src/lamarcmenus/lamarcmenu.cpp                     |   47 +
 src/lamarcmenus/lamarcmenu.h                       |   27 +
 src/lamarcmenus/lamarcmenuitems.cpp                |  541 +++
 src/lamarcmenus/lamarcmenuitems.h                  |  296 ++
 src/lamarcmenus/logselectmenus.cpp                 |   52 +
 src/lamarcmenus/logselectmenus.h                   |   47 +
 src/lamarcmenus/migmenus.cpp                       |  108 +
 src/lamarcmenus/migmenus.h                         |   65 +
 src/lamarcmenus/nomenufilereaddialog.cpp           |   50 +
 src/lamarcmenus/nomenufilereaddialog.h             |   32 +
 src/lamarcmenus/outfilemenus.cpp                   |   43 +
 src/lamarcmenus/outfilemenus.h                     |   46 +
 src/lamarcmenus/overviewmenus.cpp                  |  782 +++
 src/lamarcmenus/overviewmenus.h                    |  322 ++
 src/lamarcmenus/popsizemenu.cpp                    |   57 +
 src/lamarcmenus/popsizemenu.h                      |   50 +
 src/lamarcmenus/priormenus.cpp                     |  201 +
 src/lamarcmenus/priormenus.h                       |  135 +
 src/lamarcmenus/profilemenus.cpp                   |  135 +
 src/lamarcmenus/profilemenus.h                     |   94 +
 src/lamarcmenus/recmenus.cpp                       |   68 +
 src/lamarcmenus/recmenus.h                         |   55 +
 src/lamarcmenus/regiongammamenus.cpp               |   52 +
 src/lamarcmenus/regiongammamenus.h                 |   47 +
 src/lamarcmenus/traitmodelmenu.cpp                 |  287 ++
 src/lamarcmenus/traitmodelmenu.h                   |  145 +
 src/lamarcmenus/treesummenus.cpp                   |   92 +
 src/lamarcmenus/treesummenus.h                     |   87 +
 src/menu/dialog.cpp                                |   22 +
 src/menu/dialog.h                                  |   25 +
 src/menu/dialognoinput.cpp                         |   78 +
 src/menu/dialognoinput.h                           |   53 +
 src/menu/dialogrepeat.cpp                          |   51 +
 src/menu/dialogrepeat.h                            |   38 +
 src/menu/display.cpp                               |  456 ++
 src/menu/display.h                                 |   94 +
 src/menu/matrixitem.cpp                            |  148 +
 src/menu/matrixitem.h                              |   75 +
 src/menu/menu_strings.cpp                          |   36 +
 src/menu/menu_strings.h                            |   46 +
 src/menu/menudefs.h                                |   18 +
 src/menu/menuerror.h                               |   29 +
 src/menu/menuinteraction.cpp                       |   22 +
 src/menu/menuinteraction.h                         |   28 +
 src/menu/menuitem.cpp                              |  142 +
 src/menu/menuitem.h                                |  163 +
 src/menu/menutypedefs.h                            |   29 +
 src/menu/newmenu.cpp                               |  211 +
 src/menu/newmenu.h                                 |   62 +
 src/menu/newmenuitems.cpp                          |  294 ++
 src/menu/newmenuitems.h                            |  228 +
 src/menu/setmenuitem.cpp                           |  199 +
 src/menu/setmenuitem.h                             |  217 +
 src/menu/togglemenuitem.cpp                        |   19 +
 src/menu/togglemenuitem.h                          |   24 +
 src/menu/twodtable.cpp                             |  173 +
 src/menu/twodtable.h                               |   42 +
 src/postlike/analyzer.cpp                          |   67 +
 src/postlike/analyzer.h                            |  118 +
 src/postlike/derivatives.cpp                       |  405 ++
 src/postlike/likelihood.cpp                        | 1454 ++++++
 src/postlike/likelihood.h                          |  315 ++
 src/postlike/maximizer.cpp                         | 2143 ++++++++
 src/postlike/maximizer.h                           |  200 +
 src/postlike/maximizer_strings.cpp                 |   47 +
 src/postlike/maximizer_strings.h                   |   51 +
 src/postlike/plforces.cpp                          | 3012 ++++++++++++
 src/postlike/plforces.h                            |  721 +++
 src/postlike/plotstat.cpp                          |   45 +
 src/postlike/plotstat.h                            |   77 +
 src/postlike/profile.cpp                           |  821 ++++
 src/report/curvefiles.cpp                          |  273 ++
 src/report/curvefiles.h                            |   41 +
 src/report/outputfile.cpp                          |  161 +
 src/report/outputfile.h                            |   90 +
 src/report/reportpage.cpp                          | 3017 ++++++++++++
 src/report/reportpage.h                            |  306 ++
 src/report/runreport.cpp                           |  727 +++
 src/report/runreport.h                             |  136 +
 src/report/spreadsheet.cpp                         |  240 +
 src/report/spreadsheet.h                           |   55 +
 src/report/xml_report.cpp                          |  646 +++
 src/report/xml_report.h                            |   62 +
 src/tools/mathx.cpp                                | 1740 +++++++
 src/tools/mathx.h                                  |  151 +
 src/tools/random.cpp                               |  160 +
 src/tools/random.h                                 |   50 +
 src/tools/rangex.cpp                               |  734 +++
 src/tools/rangex.h                                 |  124 +
 src/tools/stringx.cpp                              | 2206 +++++++++
 src/tools/stringx.h                                |  293 ++
 src/tools/timex.cpp                                |   69 +
 src/tools/timex.h                                  |   37 +
 src/tools/tools.h                                  |   22 +
 src/tools/vector_constants.cpp                     |   68 +
 src/tools/vector_constants.h                       |   38 +
 src/tools/vectorx.cpp                              |   62 +
 src/tools/vectorx.h                                |  217 +
 src/tree/argtree.cpp                               |  528 ++
 src/tree/argtree.h                                 |  100 +
 src/tree/arranger.cpp                              | 1440 ++++++
 src/tree/arranger.h                                |  413 ++
 src/tree/arranger_types.cpp                        |   31 +
 src/tree/arranger_types.h                          |   38 +
 src/tree/arrangervec.cpp                           |  313 ++
 src/tree/arrangervec.h                             |   89 +
 src/tree/branch.cpp                                | 2295 +++++++++
 src/tree/branch.h                                  |  682 +++
 src/tree/branchbuffer.cpp                          |  413 ++
 src/tree/branchbuffer.h                            |  115 +
 src/tree/branchtag.cpp                             |  112 +
 src/tree/branchtag.h                               |   62 +
 src/tree/chain.cpp                                 |  684 +++
 src/tree/chain.h                                   |  143 +
 src/tree/chainstate.cpp                            |  238 +
 src/tree/chainstate.h                              |  102 +
 src/tree/collector.cpp                             |  467 ++
 src/tree/collector.h                               |  138 +
 src/tree/collmanager.cpp                           |  451 ++
 src/tree/collmanager.h                             |   98 +
 src/tree/fc_status.cpp                             |  359 ++
 src/tree/fc_status.h                               |  186 +
 src/tree/individual.cpp                            |  431 ++
 src/tree/individual.h                              |  104 +
 src/tree/intervaldata.cpp                          |  208 +
 src/tree/intervaldata.h                            |  148 +
 src/tree/newick.cpp                                |  437 ++
 src/tree/newick.h                                  |  126 +
 src/tree/parameter.cpp                             |  678 +++
 src/tree/parameter.h                               |  257 +
 src/tree/partition.cpp                             |   16 +
 src/tree/partition.h                               |  108 +
 src/tree/prior.cpp                                 |  157 +
 src/tree/prior.h                                   |   67 +
 src/tree/range.cpp                                 | 1289 +++++
 src/tree/range.h                                   |  536 ++
 src/tree/rectree.cpp                               | 1168 +++++
 src/tree/sticksum.h                                |   55 +
 src/tree/summary.cpp                               |  707 +++
 src/tree/summary.h                                 |  259 +
 src/tree/tempident.cpp                             |  183 +
 src/tree/tempident.h                               |   87 +
 src/tree/timelist.cpp                              | 1621 +++++++
 src/tree/timelist.h                                |  187 +
 src/tree/tree.cpp                                  | 1565 ++++++
 src/tree/tree.h                                    |  362 ++
 src/tree/treesum.cpp                               |  930 ++++
 src/tree/treesum.h                                 |  182 +
 src/ui_interface/chainparam_interface.cpp          |  538 ++
 src/ui_interface/chainparam_interface.h            |  260 +
 src/ui_interface/constraint_interface.cpp          |  206 +
 src/ui_interface/constraint_interface.h            |   91 +
 src/ui_interface/data_interface.cpp                |  218 +
 src/ui_interface/data_interface.h                  |  118 +
 src/ui_interface/datamodel_interface.cpp           |  735 +++
 src/ui_interface/datamodel_interface.h             |  331 ++
 src/ui_interface/force_interface.cpp               | 1538 ++++++
 src/ui_interface/force_interface.h                 |  595 +++
 src/ui_interface/front_end_warnings.cpp            |   40 +
 src/ui_interface/front_end_warnings.h              |   33 +
 src/ui_interface/prior_interface.cpp               |  303 ++
 src/ui_interface/prior_interface.h                 |  110 +
 src/ui_interface/profile_interface.cpp             |  315 ++
 src/ui_interface/profile_interface.h               |  130 +
 src/ui_interface/setget.h                          |  617 +++
 src/ui_interface/setgetmachine.cpp                 |  385 ++
 src/ui_interface/setgetmachine.h                   |  105 +
 src/ui_interface/traitmodel_interface.cpp          |  286 ++
 src/ui_interface/traitmodel_interface.h            |  105 +
 src/ui_interface/ui_constants.cpp                  |   22 +
 src/ui_interface/ui_constants.h                    |   31 +
 src/ui_interface/ui_id.cpp                         |  147 +
 src/ui_interface/ui_id.h                           |   53 +
 src/ui_interface/ui_interface.cpp                  |  346 ++
 src/ui_interface/ui_interface.h                    |   97 +
 src/ui_interface/ui_regid.cpp                      |   79 +
 src/ui_interface/ui_regid.h                        |   51 +
 src/ui_interface/ui_strings.cpp                    |  248 +
 src/ui_interface/ui_strings.h                      |  253 +
 src/ui_interface/ui_warnings.cpp                   |   20 +
 src/ui_interface/ui_warnings.h                     |   28 +
 src/ui_interface/userparam_interface.cpp           |  664 +++
 src/ui_interface/userparam_interface.h             |  292 ++
 src/ui_util/undoredochain.cpp                      |  151 +
 src/ui_util/undoredochain.h                        |   78 +
 src/ui_vars/report_strings.cpp                     |   42 +
 src/ui_vars/report_strings.h                       |   50 +
 src/ui_vars/ui_vars.cpp                            |  117 +
 src/ui_vars/ui_vars.h                              |   64 +
 src/ui_vars/ui_vars_chainparams.cpp                |  545 +++
 src/ui_vars/ui_vars_chainparams.h                  |  124 +
 src/ui_vars/ui_vars_component.cpp                  |   42 +
 src/ui_vars/ui_vars_component.h                    |   39 +
 src/ui_vars/ui_vars_datamodel.cpp                  | 1102 +++++
 src/ui_vars/ui_vars_datamodel.h                    |  230 +
 src/ui_vars/ui_vars_datapackplus.cpp               |  629 +++
 src/ui_vars/ui_vars_datapackplus.h                 |  114 +
 src/ui_vars/ui_vars_forces.cpp                     | 2975 ++++++++++++
 src/ui_vars/ui_vars_forces.h                       |  605 +++
 src/ui_vars/ui_vars_prior.cpp                      |  217 +
 src/ui_vars/ui_vars_prior.h                        |   76 +
 src/ui_vars/ui_vars_traitmodels.cpp                |  373 ++
 src/ui_vars/ui_vars_traitmodels.h                  |  101 +
 src/ui_vars/ui_vars_userparams.cpp                 |  560 +++
 src/ui_vars/ui_vars_userparams.h                   |  164 +
 src/xml/lamarc_input_schema.h                      |  245 +
 src/xml/lamarcschema.cpp                           |  328 ++
 src/xml/parsetreeschema.cpp                        |  429 ++
 src/xml/parsetreeschema.h                          |  141 +
 src/xml/parsetreetodata.cpp                        | 1780 +++++++
 src/xml/parsetreetodata.h                          |  102 +
 src/xml/parsetreetosettings.cpp                    | 1484 ++++++
 src/xml/parsetreetosettings.h                      |   93 +
 src/xml/parsetreewalker.cpp                        |  155 +
 src/xml/parsetreewalker.h                          |   56 +
 src/xml/tixml_base.cpp                             |  166 +
 src/xml/tixml_base.h                               |   52 +
 src/xml/toxml.cpp                                  |  332 ++
 src/xml/toxml.h                                    |  106 +
 src/xml/xml.cpp                                    |  295 ++
 src/xml/xml.h                                      |   78 +
 src/xml/xml_strings.cpp                            |  357 ++
 src/xml/xml_strings.h                              |  341 ++
 810 files changed, 227393 insertions(+)

diff --git a/Makefile.am b/Makefile.am
new file mode 100644
index 0000000..4b19939
--- /dev/null
+++ b/Makefile.am
@@ -0,0 +1,1320 @@
+## Process this file with automake to produce Makefile.in
+
+lamarc_srcdir=$(top_srcdir)/src
+
+SUFFIXES        =       .rc
+
+AM_CXXFLAGS			= -D at LAMARC_CODE_OSTYPE@
+AM_CFLAGS			=
+AM_LDFLAGS			=
+
+if LAMARC_APP_MSW
+AM_CXXFLAGS			+= -static-libgcc
+AM_LDFLAGS			+= -static
+endif
+
+optflags                        = -O3 -funroll-loops
+debugflags                      = -ggdb
+## ############################################################
+## programs to build
+## ############################################################
+
+bin_PROGRAMS = lamarc
+
+EXTRA_PROGRAMS  =       old_lam_conv
+
+if CONVERTER
+if GUI
+bin_PROGRAMS += lam_conv
+else
+bin_PROGRAMS += batch_lam_conv
+endif
+endif
+
+if LAMARC_APP_OSX
+bin_PROGRAMS += lamarc.app
+if CONVERTER
+if GUI
+bin_PROGRAMS += lam_conv.app
+endif
+endif
+endif
+
+
+
+## ############################################################
+## to cross compile to Windows -- needs automation -- EWFIX
+## apparently we need both the generic and the specific rules
+## here for automake to work. Something must be broken.
+## the general rule allows automake to deduce that it can do
+## something to make objects from .rc files but it cannot
+## actually apply them successfully
+## ############################################################
+
+.rc.o:
+	@LAMARC_HOST_PREFIX at windres -i $< -o $@ -I $(top_srcdir)/resources
+
+lamarc_rc.o     :       $(top_srcdir)/resources/lamarc_rc.rc
+	@LAMARC_HOST_PREFIX at windres -i $< -o $@ -I $(top_srcdir)/resources
+
+lam_conv_rc.o   :       $(top_srcdir)/resources/lam_conv_rc.rc
+	@LAMARC_HOST_PREFIX at windres -i $< -o $@ -I $(top_srcdir)/resources
+
+## ############################################################
+## libraries to build
+## ############################################################
+
+## extra libraries to link in. We start with just tinyxml, but
+## we have a bunch of conditionals below which use "+=" to add
+## to this list
+LDADD				=
+
+noinst_LIBRARIES		= libtinyxml.a
+
+libtinyxml_a_CXXFLAGS	= -DTIXML_USE_STL
+
+libtinyxml_a_SOURCES	= \
+			tinyxml/tinyxml.h \
+			tinyxml/tinyxml.cpp \
+			tinyxml/tinyxmlerror.cpp \
+			tinyxml/tinyxmlparser.cpp \
+			tinyxml/tinystr.h \
+			tinyxml/tinystr.cpp
+
+## ############################################################
+## setting up compiler flags
+## ############################################################
+
+lamarc_includes         = \
+			-I $(builddir)/config \
+			-I $(top_srcdir)/config \
+			-I $(lamarc_srcdir)/bayeslike \
+			-I $(lamarc_srcdir)/control \
+			-I $(lamarc_srcdir)/conversion \
+			-I $(lamarc_srcdir)/convErr \
+			-I $(lamarc_srcdir)/convModel \
+			-I $(lamarc_srcdir)/convParse \
+			-I $(lamarc_srcdir)/convStrings \
+			-I $(lamarc_srcdir)/convUtil \
+			-I $(lamarc_srcdir)/datalike \
+			-I $(lamarc_srcdir)/force \
+			-I $(lamarc_srcdir)/guiconv \
+			-I $(lamarc_srcdir)/guiutil \
+			-I $(lamarc_srcdir)/lamarcmenus \
+			-I $(lamarc_srcdir)/menu \
+			-I $(lamarc_srcdir)/postlike \
+			-I $(lamarc_srcdir)/report \
+			-I $(lamarc_srcdir)/tools \
+			-I $(lamarc_srcdir)/tree \
+			-I $(lamarc_srcdir)/ui_interface \
+			-I $(lamarc_srcdir)/ui_util \
+			-I $(lamarc_srcdir)/ui_vars \
+			-I $(lamarc_srcdir)/xml \
+			-I $(top_srcdir)/boost \
+			-I $(top_srcdir)/resources \
+			-I $(top_srcdir)/tinyxml
+
+
+## include resource file if needed
+lamarc_resources                =
+lam_conv_resources              =
+batch_lam_conv_resources        =
+if LAMARC_APP_MSW
+lamarc_resources                +=      resources/lamarc_rc.rc
+lam_conv_resources              +=      resources/lam_conv_rc.rc
+batch_lam_conv_resources        +=      resources/lam_conv_rc.rc
+endif
+
+
+## flags to set under different options to ./configure
+
+if NODATA
+AM_CXXFLAGS += -DSTATIONARIES
+AM_CXXFLAGS += -DNOPROGRESSBAR
+endif
+
+if DENOVO
+AM_CXXFLAGS += -DDENOVO
+endif
+
+if DEBUG
+if USERCXXFLAGS
+# no other flags if user set
+else
+AM_CXXFLAGS += $(debugflags)
+endif
+else
+AM_CXXFLAGS     += -DNDEBUG
+if USERCXXFLAGS
+# no other flags if user set
+else
+AM_CXXFLAGS += $(optflags)
+endif
+endif
+
+if DMALLOC
+LDADD           += -ldmallocxx
+AM_CXXFLAGS     += -DDMALLOC_FUNC_CHECK
+else
+endif
+
+if EFENCE
+LDADD           += -lefence
+else
+endif
+
+if GCOV
+AM_CXXFLAGS += -fprofile-arcs -ftest-coverage
+else
+endif
+
+if JSIM
+AM_CXXFLAGS += -DJSIM
+else
+endif
+
+if PROFILE
+AM_CXXFLAGS += -pg
+else
+endif
+
+if NOPROGRESS
+AM_CXXFLAGS += -DNOPROGRESSBAR
+endif
+
+if TREETRACK
+AM_CXXFLAGS += -DTREETRACK
+else
+endif
+
+if UNIVB
+AM_CXXFLAGS += -arch ppc -arch i386
+endif
+
+lamarc_CXXFLAGS			= $(AM_CXXFLAGS) -Wall -Wextra -Wno-unused $(lamarc_includes) -DTIXML_USE_STL
+old_lam_conv_CXXFLAGS	= $(AM_CXXFLAGS) $(lamarc_includes)
+lam_conv_CXXFLAGS		= $(AM_CXXFLAGS) -Wall -Wextra -Wno-unused $(lamarc_includes) @WX_CXXFLAGS@ -DTIXML_USE_STL
+batch_lam_conv_CXXFLAGS	= $(AM_CXXFLAGS) -Wall -Wextra -Wno-unused $(lamarc_includes) @WX_CXXFLAGS@ -DTIXML_USE_STL
+
+if ARRCHECK
+AM_CXXFLAGS += -DHAPARRANGERTEST
+endif
+
+## ############################################################
+## detailing program sources
+## ############################################################
+
+## tell Mac where the /Developer/Tools are
+MACDEVTOOLS = /Developer/Tools
+
+## install-sh shouldn't be here, but there's a bug with AC_CONFIG_AUX_DIR
+## that keeps the configure script from finding this in the
+## config directory
+scripts_to_dist = install-sh
+
+py_parallel	= scripts/pyParallel/README \
+			scripts/pyParallel/divide_data.py \
+			scripts/pyParallel/combine_regions.py \
+			scripts/pyParallel/combine_replicates.py \
+			scripts/pyParallel/parallelCommon.py
+
+lam_conv_icons  = resources/lam_conv_rc.rc \
+		resources/lam_conv.ico \
+		resources/lam_conv.icns \
+		resources/empty16.bmp \
+		resources/excl16.bmp \
+		resources/giraffe32.bmp
+
+lamarc_icons    = resources/lamarc_rc.rc \
+		resources/lamarc.ico \
+		resources/lamarc.icns
+
+EXTRA_lamarc_SOURCES = $(lamarc_icons) \
+		       config/local_build.h
+
+bayeslike_sources   = \
+		    src/bayeslike/bayesanalyzer_1d.h \
+		    src/bayeslike/bayesanalyzer_1d.cpp \
+		    src/bayeslike/bayescurve.h \
+		    src/bayeslike/bayescurve.cpp \
+		    src/bayeslike/bayesparamlike_1d.h \
+		    src/bayeslike/bayesparamlike_1d.cpp
+
+control_sources     = \
+		    src/control/chainmanager.h \
+		    src/control/chainmanager.cpp \
+		    src/control/chainout.h \
+		    src/control/chainout.cpp \
+		    src/control/chainpack.h \
+		    src/control/chainpack.cpp \
+		    src/control/chainparam.h \
+		    src/control/chainparam.cpp \
+		    src/control/constants.h \
+		    src/control/constants.cpp \
+		    src/control/defaults.h \
+		    src/control/defaults.cpp \
+		    src/control/definitions.h \
+		    src/control/dynatracer.cpp \
+		    src/control/dynatracer.h \
+		    src/control/errhandling.h \
+		    src/control/errhandling.cpp \
+		    src/control/lamarc.h \
+		    src/control/lamarc.cpp \
+		    src/control/regiongammainfo.h \
+		    src/control/regiongammainfo.cpp \
+		    src/control/registry.h \
+		    src/control/registry.cpp \
+		    src/control/sumfilehandler.h \
+		    src/control/sumfilehandler.cpp \
+		    src/control/types.h \
+		    src/control/userparam.h \
+		    src/control/userparam.cpp \
+		    src/control/xmlsum_strings.h \
+		    src/control/xmlsum_strings.cpp
+
+datalike_sources    = \
+		    src/datalike/calculators.h \
+		    src/datalike/calculators.cpp \
+		    src/datalike/datapack.h \
+		    src/datalike/datapack.cpp \
+		    src/datalike/datatype.h \
+		    src/datalike/datatype.cpp \
+		    src/datalike/dlcalc.h \
+		    src/datalike/dlcalc.cpp \
+		    src/datalike/dlcell.h \
+		    src/datalike/dlcell.cpp \
+		    src/datalike/cellmanager.h \
+		    src/datalike/cellmanager.cpp \
+		    src/datalike/dlmodel.h \
+		    src/datalike/dlmodel.cpp \
+		    src/datalike/funcMax.h \
+		    src/datalike/funcMax.cpp \
+		    src/datalike/haplotypes.h \
+		    src/datalike/haplotypes.cpp \
+		    src/datalike/locus.h \
+		    src/datalike/locus.cpp \
+		    src/datalike/locuscell.h \
+		    src/datalike/locuscell.cpp \
+		    src/datalike/phenotypes.h \
+		    src/datalike/phenotypes.cpp \
+		    src/datalike/region.h \
+		    src/datalike/region.cpp \
+		    src/datalike/tipdata.h \
+		    src/datalike/tipdata.cpp
+
+force_sources       = \
+                    src/force/epoch.h \
+                    src/force/epoch.cpp \
+		    src/force/event.h \
+		    src/force/event.cpp \
+		    src/force/force.h \
+		    src/force/force.cpp \
+		    src/force/forceparam.h \
+		    src/force/forceparam.cpp \
+		    src/force/forcesummary.h \
+		    src/force/forcesummary.cpp \
+		    src/force/priorreport.h \
+		    src/force/priorreport.cpp \
+		    src/force/timemanager.h \
+		    src/force/timemanager.cpp \
+		    src/force/stair.h \
+		    src/force/stair.cpp \
+                    src/force/paramstat.h \
+                    src/force/paramstat.cpp
+
+lamarcmenus_sources = \
+		    src/lamarcmenus/coalmenus.h \
+		    src/lamarcmenus/coalmenus.cpp \
+		    src/lamarcmenus/constraintmenus.h \
+		    src/lamarcmenus/constraintmenus.cpp \
+		    src/lamarcmenus/datafilenamedialog.h \
+		    src/lamarcmenus/datafilenamedialog.cpp \
+		    src/lamarcmenus/datamodelmenu.h \
+		    src/lamarcmenus/datamodelmenu.cpp \
+		    src/lamarcmenus/diseasemenus.cpp \
+		    src/lamarcmenus/diseasemenus.h \
+		    src/lamarcmenus/divmenus.cpp \
+		    src/lamarcmenus/divmenus.h \
+		    src/lamarcmenus/divmigmenus.cpp \
+		    src/lamarcmenus/divmigmenus.h \
+		    src/lamarcmenus/forcesmenus.cpp \
+		    src/lamarcmenus/forcesmenus.h \
+		    src/lamarcmenus/growthmenus.cpp \
+		    src/lamarcmenus/growthmenus.h \
+		    src/lamarcmenus/lamarc_strings.cpp \
+		    src/lamarcmenus/lamarc_strings.h \
+		    src/lamarcmenus/lamarcheaderdialog.cpp \
+		    src/lamarcmenus/lamarcheaderdialog.h \
+		    src/lamarcmenus/lamarcmenu.cpp \
+		    src/lamarcmenus/lamarcmenu.h \
+		    src/lamarcmenus/lamarcmenuitems.cpp \
+		    src/lamarcmenus/lamarcmenuitems.h \
+		    src/lamarcmenus/logselectmenus.cpp \
+		    src/lamarcmenus/logselectmenus.h \
+		    src/lamarcmenus/migmenus.cpp \
+		    src/lamarcmenus/migmenus.h \
+		    src/lamarcmenus/nomenufilereaddialog.cpp \
+		    src/lamarcmenus/nomenufilereaddialog.h \
+		    src/lamarcmenus/outfilemenus.cpp \
+		    src/lamarcmenus/outfilemenus.h \
+		    src/lamarcmenus/overviewmenus.cpp \
+		    src/lamarcmenus/overviewmenus.h \
+		    src/lamarcmenus/popsizemenu.cpp \
+		    src/lamarcmenus/popsizemenu.h \
+		    src/lamarcmenus/priormenus.cpp \
+		    src/lamarcmenus/priormenus.h \
+		    src/lamarcmenus/profilemenus.cpp \
+		    src/lamarcmenus/profilemenus.h \
+		    src/lamarcmenus/recmenus.cpp \
+		    src/lamarcmenus/recmenus.h \
+		    src/lamarcmenus/regiongammamenus.cpp \
+		    src/lamarcmenus/regiongammamenus.h \
+		    src/lamarcmenus/traitmodelmenu.cpp \
+		    src/lamarcmenus/traitmodelmenu.h \
+		    src/lamarcmenus/treesummenus.cpp \
+		    src/lamarcmenus/treesummenus.h
+
+menu_sources        = \
+		    src/menu/dialog.h \
+		    src/menu/dialog.cpp \
+		    src/menu/dialognoinput.h \
+		    src/menu/dialognoinput.cpp \
+		    src/menu/dialogrepeat.h \
+		    src/menu/dialogrepeat.cpp \
+		    src/menu/display.h \
+		    src/menu/display.cpp \
+		    src/menu/matrixitem.h \
+		    src/menu/matrixitem.cpp \
+		    src/menu/menu_strings.h \
+		    src/menu/menu_strings.cpp \
+		    src/menu/menudefs.h \
+		    src/menu/menuerror.h \
+		    src/menu/menuinteraction.h \
+		    src/menu/menuinteraction.cpp \
+		    src/menu/menuitem.h \
+		    src/menu/menuitem.cpp \
+		    src/menu/menutypedefs.h \
+		    src/menu/newmenu.h \
+		    src/menu/newmenu.cpp \
+		    src/menu/newmenuitems.h \
+		    src/menu/newmenuitems.cpp \
+		    src/menu/setmenuitem.h \
+		    src/menu/setmenuitem.cpp \
+		    src/menu/togglemenuitem.h \
+		    src/menu/togglemenuitem.cpp \
+		    src/menu/twodtable.h \
+		    src/menu/twodtable.cpp
+
+postlike_sources    = \
+		    src/postlike/analyzer.h \
+		    src/postlike/analyzer.cpp \
+		    src/postlike/derivatives.cpp \
+		    src/postlike/likelihood.h \
+		    src/postlike/likelihood.cpp \
+		    src/postlike/maximizer.h \
+		    src/postlike/maximizer.cpp \
+		    src/postlike/maximizer_strings.h \
+		    src/postlike/maximizer_strings.cpp \
+		    src/postlike/plforces.h \
+		    src/postlike/plforces.cpp \
+		    src/postlike/plotstat.h \
+		    src/postlike/plotstat.cpp \
+		    src/postlike/profile.cpp
+
+report_sources  =   \
+		    src/report/curvefiles.h \
+		    src/report/curvefiles.cpp \
+		    src/report/outputfile.h \
+		    src/report/outputfile.cpp \
+		    src/report/reportpage.h \
+		    src/report/reportpage.cpp \
+		    src/report/runreport.h \
+		    src/report/runreport.cpp \
+		    src/report/spreadsheet.h \
+		    src/report/spreadsheet.cpp \
+		    src/report/xml_report.h \
+		    src/report/xml_report.cpp
+
+tools_sources   =   \
+		    src/tools/mathx.h \
+		    src/tools/mathx.cpp \
+		    src/tools/random.h \
+		    src/tools/random.cpp \
+		    src/tools/rangex.h \
+		    src/tools/rangex.cpp \
+		    src/tools/stringx.h \
+		    src/tools/stringx.cpp \
+		    src/tools/timex.h \
+		    src/tools/timex.cpp \
+		    src/tools/tools.h \
+		    src/tools/vector_constants.h \
+		    src/tools/vector_constants.cpp \
+		    src/tools/vectorx.h \
+		    src/tools/vectorx.cpp
+
+tree_sources    =   \
+		    src/tree/argtree.h \
+		    src/tree/argtree.cpp \
+		    src/tree/arranger.h \
+		    src/tree/arranger.cpp \
+		    src/tree/arranger_types.h \
+		    src/tree/arranger_types.cpp \
+		    src/tree/arrangervec.h \
+		    src/tree/arrangervec.cpp \
+		    src/tree/branch.h \
+		    src/tree/branch.cpp \
+		    src/tree/branchbuffer.h \
+		    src/tree/branchbuffer.cpp \
+		    src/tree/branchtag.h \
+		    src/tree/branchtag.cpp \
+		    src/tree/chain.h \
+		    src/tree/chain.cpp \
+		    src/tree/chainstate.h \
+		    src/tree/chainstate.cpp \
+		    src/tree/collector.h \
+		    src/tree/collector.cpp \
+		    src/tree/collmanager.h \
+		    src/tree/collmanager.cpp \
+		    src/tree/fc_status.h \
+		    src/tree/fc_status.cpp \
+		    src/tree/individual.h \
+		    src/tree/individual.cpp \
+		    src/tree/intervaldata.h \
+		    src/tree/intervaldata.cpp \
+		    src/tree/newick.h \
+		    src/tree/newick.cpp \
+		    src/tree/parameter.h \
+		    src/tree/parameter.cpp \
+		    src/tree/partition.h \
+		    src/tree/partition.cpp \
+		    src/tree/prior.h \
+		    src/tree/prior.cpp \
+		    src/tree/range.h \
+		    src/tree/range.cpp \
+		    src/tree/rectree.cpp \
+		    src/tree/summary.h \
+		    src/tree/summary.cpp \
+		    src/tree/tempident.h \
+		    src/tree/tempident.cpp \
+		    src/tree/timelist.h \
+		    src/tree/timelist.cpp \
+		    src/tree/tree.h \
+		    src/tree/tree.cpp \
+		    src/tree/treesum.h \
+		    src/tree/treesum.cpp
+
+ui_interface_sources    = \
+		    src/ui_interface/chainparam_interface.cpp \
+		    src/ui_interface/chainparam_interface.h \
+		    src/ui_interface/constraint_interface.cpp \
+		    src/ui_interface/constraint_interface.h \
+		    src/ui_interface/data_interface.cpp \
+		    src/ui_interface/data_interface.h \
+		    src/ui_interface/datamodel_interface.cpp \
+		    src/ui_interface/datamodel_interface.h \
+		    src/ui_interface/force_interface.cpp \
+		    src/ui_interface/force_interface.h \
+		    src/ui_interface/front_end_warnings.cpp \
+		    src/ui_interface/front_end_warnings.h \
+		    src/ui_interface/prior_interface.cpp \
+		    src/ui_interface/prior_interface.h \
+		    src/ui_interface/profile_interface.cpp \
+		    src/ui_interface/profile_interface.h \
+		    src/ui_interface/setget.h \
+		    src/ui_interface/setgetmachine.cpp \
+		    src/ui_interface/setgetmachine.h \
+		    src/ui_interface/traitmodel_interface.cpp \
+		    src/ui_interface/traitmodel_interface.h \
+		    src/ui_interface/ui_constants.cpp \
+		    src/ui_interface/ui_constants.h \
+		    src/ui_interface/ui_id.cpp \
+		    src/ui_interface/ui_id.h \
+		    src/ui_interface/ui_interface.cpp \
+		    src/ui_interface/ui_interface.h \
+		    src/ui_interface/ui_regid.cpp \
+		    src/ui_interface/ui_regid.h \
+		    src/ui_interface/ui_strings.cpp \
+		    src/ui_interface/ui_strings.h \
+		    src/ui_interface/ui_warnings.h \
+		    src/ui_interface/ui_warnings.cpp \
+		    src/ui_interface/userparam_interface.cpp \
+		    src/ui_interface/userparam_interface.h
+
+ui_util_sources =   \
+		    src/ui_util/undoredochain.h \
+		    src/ui_util/undoredochain.cpp
+
+ui_vars_sources =   \
+		    src/ui_vars/report_strings.h \
+		    src/ui_vars/report_strings.cpp \
+		    src/ui_vars/ui_vars.h \
+		    src/ui_vars/ui_vars.cpp \
+		    src/ui_vars/ui_vars_chainparams.h \
+		    src/ui_vars/ui_vars_chainparams.cpp \
+		    src/ui_vars/ui_vars_component.h \
+		    src/ui_vars/ui_vars_component.cpp \
+		    src/ui_vars/ui_vars_datamodel.h \
+		    src/ui_vars/ui_vars_datamodel.cpp \
+		    src/ui_vars/ui_vars_datapackplus.cpp \
+		    src/ui_vars/ui_vars_datapackplus.h \
+		    src/ui_vars/ui_vars_forces.cpp \
+		    src/ui_vars/ui_vars_forces.h \
+		    src/ui_vars/ui_vars_prior.cpp \
+		    src/ui_vars/ui_vars_prior.h \
+		    src/ui_vars/ui_vars_traitmodels.cpp \
+		    src/ui_vars/ui_vars_traitmodels.h \
+		    src/ui_vars/ui_vars_userparams.cpp \
+		    src/ui_vars/ui_vars_userparams.h
+
+xml_core_sources =  \
+		    src/xml/parsetreeschema.h \
+		    src/xml/parsetreeschema.cpp \
+		    src/xml/tixml_base.h \
+		    src/xml/tixml_base.cpp \
+		    src/xml/xml.h \
+		    src/xml/xml.cpp \
+		    src/xml/xml_strings.h \
+		    src/xml/xml_strings.cpp
+
+xml_support_sources = \
+		    src/control/constants.h \
+		    src/control/constants.cpp \
+		    src/control/defaults.h \
+		    src/control/defaults.cpp \
+		    src/tools/stringx.h \
+		    src/tools/stringx.cpp \
+		    src/force/paramstat.h \
+		    src/force/paramstat.cpp \
+		    src/ui_interface/ui_constants.h \
+		    src/ui_interface/ui_constants.cpp \
+		    src/ui_interface/ui_id.h \
+		    src/ui_interface/ui_id.cpp \
+		    src/ui_interface/ui_strings.h \
+		    src/ui_interface/ui_strings.cpp
+
+xml_sources     =   \
+		    src/xml/lamarcschema.cpp \
+		    src/xml/parsetreetodata.h \
+		    src/xml/parsetreetodata.cpp \
+		    src/xml/parsetreetosettings.h \
+		    src/xml/parsetreetosettings.cpp \
+		    src/xml/parsetreewalker.h \
+		    src/xml/parsetreewalker.cpp \
+		    src/xml/toxml.h \
+		    src/xml/toxml.cpp \
+		    $(xml_core_sources)
+
+tinyxml_sources =   \
+		    tinyxml/tinyxml.h \
+		    tinyxml/tinyxml.cpp \
+		    tinyxml/tinyxmlerror.cpp \
+		    tinyxml/tinyxmlparser.cpp \
+		    tinyxml/tinystr.h \
+		    tinyxml/tinystr.cpp
+
+config_sources  =   \
+		    config/conf.h
+
+lamarcgcov_sources = $(bayeslike_sources) \
+		    $(control_sources) \
+		    $(datalike_sources) \
+		    $(force_sources) \
+		    $(lamarcmenus_sources) \
+		    $(menu_sources) \
+		    $(postlike_sources) \
+		    $(report_sources) \
+		    $(tools_sources) \
+		    $(tree_sources) \
+		    $(ui_interface_sources) \
+		    $(ui_util_sources) \
+		    $(ui_vars_sources) \
+		    $(xml_sources)
+
+lamarc_SOURCES  =   $(lamarcgcov_sources) \
+					$(lamarc_resources) \
+					$(config_sources)
+
+lamarc_LDADD	= libtinyxml.a
+
+old_lam_conv_core       = \
+			src/conversion/ConverterIf.cpp \
+			src/conversion/Converter_ConverterIf.h \
+			src/conversion/Converter_ConverterUI.h \
+			src/conversion/Converter_DataSourceException.h \
+			src/conversion/Converter_DataSourceIf.h \
+			src/conversion/Converter_HapConverter.h \
+			src/conversion/Converter_IndividualDS.h \
+			src/conversion/Converter_LamarcDS.h \
+			src/conversion/Converter_MigrateConverter.h \
+			src/conversion/Converter_ModelDS.h \
+			src/conversion/Converter_ParserUtil.h \
+			src/conversion/Converter_PhylipConverter.h \
+			src/conversion/Converter_PopulationDS.h \
+			src/conversion/Converter_RegionDS.h \
+			src/conversion/Converter_Sequence.h \
+			src/conversion/Converter_SpaceConverter.h \
+			src/conversion/Converter_SpacingDS.h \
+			src/conversion/Converter_UserFileUtil.h \
+			src/conversion/Converter_XmlParserUtil.h \
+			src/conversion/Converter_types.h \
+			src/conversion/ConverterUI.cpp \
+			src/conversion/DataSourceException.cpp \
+			src/conversion/DataSourceIf.cpp \
+			src/conversion/HapConverter.cpp \
+			src/conversion/IndividualDS.cpp \
+			src/conversion/LamarcDS.cpp \
+			src/conversion/MigrateConverter.cpp \
+			src/conversion/ModelDS.cpp \
+			src/conversion/ParserUtil.cpp \
+			src/conversion/PhylipConverter.cpp \
+			src/conversion/PopulationDS.cpp \
+			src/conversion/RegionDS.cpp \
+			src/conversion/Sequence.cpp \
+			src/conversion/SpaceConverter.cpp \
+			src/conversion/SpacingDS.cpp \
+			src/conversion/UserFileUtil.cpp \
+			src/conversion/XmlParserUtil.cpp \
+			src/conversion/nomenuglobals.h \
+			src/conversion/nomenuglobals.cpp \
+			src/control/constants.h \
+			src/control/constants.cpp \
+			src/control/defaults.h \
+			src/control/defaults.cpp \
+			src/control/errhandling.h \
+			src/control/errhandling.cpp \
+			src/tools/random.h \
+			src/tools/random.cpp \
+			src/tools/stringx.h \
+			src/tools/stringx.cpp \
+			src/ui_interface/ui_constants.h \
+			src/ui_interface/ui_constants.cpp \
+			src/ui_interface/ui_id.h \
+			src/ui_interface/ui_id.cpp \
+			src/ui_interface/ui_strings.h \
+			src/ui_interface/ui_strings.cpp \
+			src/xml/xml_strings.h \
+			src/xml/xml_strings.cpp
+
+old_lam_conv_SOURCES    = $(old_lam_conv_core) \
+			$(old_lam_conv_resources) \
+			src/conversion/ConverterUIMain.cpp
+
+EXTRA_old_lam_conv_SOURCES = $(old_lam_conv_icons)
+
+converter_err_sources   = \
+			src/control/errhandling.h \
+			src/control/errhandling.cpp \
+			src/convErr/gc_errhandling.h \
+			src/convErr/gc_errhandling.cpp \
+			\
+			src/convErr/gc_cmdfile_err.h \
+			src/convErr/gc_cmdfile_err.cpp \
+			src/convErr/gc_data_missing_err.h \
+			src/convErr/gc_data_missing_err.cpp \
+			src/convErr/gc_individual_err.h \
+			src/convErr/gc_individual_err.cpp \
+			src/convErr/gc_infile_err.h \
+			src/convErr/gc_infile_err.cpp \
+			src/convErr/gc_locus_err.h \
+			src/convErr/gc_locus_err.cpp \
+			src/convErr/gc_map_err.h \
+			src/convErr/gc_map_err.cpp \
+			src/convErr/gc_phase_err.h \
+			src/convErr/gc_phase_err.cpp \
+			src/convErr/gc_structures_err.h \
+			src/convErr/gc_structures_err.cpp \
+			src/convErr/gc_trait_err.h \
+			src/convErr/gc_trait_err.cpp
+
+converter_model_sources = \
+			src/guiconv/gc_cmdline.h \
+			src/guiconv/gc_cmdline.cpp \
+			src/guiconv/gc_data.h \
+			src/guiconv/gc_data.cpp \
+			src/convModel/gc_dictionary.h \
+			src/convModel/gc_dictionary.cpp \
+			src/ui_interface/front_end_warnings.cpp \
+			src/ui_interface/front_end_warnings.h \
+			src/convModel/cmdfileschema.h \
+			src/convModel/cmdfileschema.cpp \
+			src/convModel/gc_datastore.h \
+			src/convModel/gc_datastore.cpp \
+			src/convModel/gc_datastore_export.cpp \
+			src/convModel/gc_datastore_files.cpp \
+			src/convModel/gc_datastore_readcmd.cpp \
+			src/convModel/gc_datastore_writebatch.cpp \
+			src/convModel/gc_default.h \
+			src/convModel/gc_default.cpp \
+			src/convModel/gc_exportable.h \
+			src/convModel/gc_exportable.cpp \
+			src/convModel/gc_file.h \
+			src/convModel/gc_file.cpp \
+			src/convModel/gc_file_info.h \
+			src/convModel/gc_file_info.cpp \
+			src/convModel/gc_migration.h \
+			src/convModel/gc_migration.cpp \
+			src/convModel/gc_panel.h \
+			src/convModel/gc_panel.cpp \
+			src/convModel/gc_parent.h \
+			src/convModel/gc_parent.cpp \
+			src/convModel/gc_phase.h \
+			src/convModel/gc_phase.cpp \
+			src/convModel/gc_phase_info.h \
+			src/convModel/gc_phase_info.cpp \
+			src/convModel/gc_phenotype.h \
+			src/convModel/gc_phenotype.cpp \
+			src/convModel/gc_set_util.h \
+			src/convModel/gc_set_util.cpp \
+			src/convModel/gc_structure_maps.h \
+			src/convModel/gc_structure_maps.cpp \
+			src/convModel/gc_structures.h \
+			src/convModel/gc_structures.cpp \
+			src/convModel/gc_trait.h \
+			src/convModel/gc_trait.cpp \
+			src/convModel/gc_trait_allele.h \
+			src/convModel/gc_trait_allele.cpp \
+			src/convModel/gc_types.h \
+			src/convModel/gc_types.cpp \
+			src/guiconv/gc_quantum.h \
+			src/guiconv/gc_quantum.cpp
+
+
+converter_parse_sources = \
+			src/convParse/gc_genotype_resolution.h \
+			src/convParse/gc_genotype_resolution.cpp \
+			src/convModel/gc_creation_info.h \
+			src/convModel/gc_creation_info.cpp \
+			src/convModel/gc_individual.h \
+			src/convModel/gc_individual.cpp \
+			src/convParse/gc_loci_match.h \
+			src/convParse/gc_loci_match.cpp \
+			src/convParse/gc_locus.h \
+			src/convParse/gc_locus.cpp \
+			src/convParse/gc_migrate.h \
+			src/convParse/gc_migrate.cpp \
+			src/convParse/gc_parse.h \
+			src/convParse/gc_parse.cpp \
+			src/convParse/gc_parse_block.h \
+			src/convParse/gc_parse_block.cpp \
+			src/convParse/gc_parse_locus.h \
+			src/convParse/gc_parse_locus.cpp \
+			src/convParse/gc_parse_pop.h \
+			src/convParse/gc_parse_pop.cpp \
+			src/convParse/gc_parse_sample.h \
+			src/convParse/gc_parse_sample.cpp \
+			src/convParse/gc_parser.h \
+			src/convParse/gc_parser.cpp \
+			src/convParse/gc_phylip.h \
+			src/convParse/gc_phylip.cpp \
+			src/convParse/gc_pop_match.h \
+			src/convParse/gc_pop_match.cpp \
+			src/convParse/gc_population.h \
+			src/convParse/gc_population.cpp \
+			src/convParse/gc_region.h \
+			src/convParse/gc_region.cpp \
+			src/convParse/gc_sequential_data.h \
+			src/convParse/gc_sequential_data.cpp \
+			src/convParse/tixml_util.h \
+			src/convParse/tixml_util.cpp \
+			$(xml_core_sources) \
+			$(xml_support_sources)
+
+converter_string_sources = \
+			src/convStrings/cnv_strings.h \
+			src/convStrings/cnv_strings.cpp \
+			src/convStrings/gc_strings.h \
+			src/convStrings/gc_strings_cmdfile.h \
+			src/convStrings/gc_strings_cmdfile.cpp \
+			src/convStrings/gc_strings_creation.h \
+			src/convStrings/gc_strings_creation.cpp \
+			src/convStrings/gc_strings_data.h \
+			src/convStrings/gc_strings_data.cpp \
+			src/convStrings/gc_strings_err.cpp \
+			src/convStrings/gc_strings_individual.h \
+			src/convStrings/gc_strings_individual.cpp \
+			src/convStrings/gc_strings_infile.h \
+			src/convStrings/gc_strings_infile.cpp \
+			src/convStrings/gc_strings_io.h \
+			src/convStrings/gc_strings_io.cpp \
+			src/convStrings/gc_strings_locus.h \
+			src/convStrings/gc_strings_locus.cpp \
+			src/convStrings/gc_strings_map.h \
+			src/convStrings/gc_strings_map.cpp \
+			src/convStrings/gc_strings_mig.h \
+			src/convStrings/gc_strings_mig.cpp \
+			src/convStrings/gc_strings_parse.h \
+			src/convStrings/gc_strings_parse.cpp \
+			src/convStrings/gc_strings_parse_locus.h \
+			src/convStrings/gc_strings_parse_locus.cpp \
+			src/convStrings/gc_strings_phase.h \
+			src/convStrings/gc_strings_phase.cpp \
+			src/convStrings/gc_strings_pop.h \
+			src/convStrings/gc_strings_pop.cpp \
+			src/convStrings/gc_strings_region.h \
+			src/convStrings/gc_strings_region.cpp \
+			src/convStrings/gc_strings_str_a_m.cpp \
+			src/convStrings/gc_strings_str_n_z.cpp \
+			src/convStrings/gc_strings_structures.h \
+			src/convStrings/gc_strings_structures.cpp \
+			src/convStrings/gc_strings_trait.h \
+			src/convStrings/gc_strings_trait.cpp
+
+converter_util_sources = \
+			src/convUtil/gc_file_util.h \
+			src/convUtil/gc_file_util.cpp
+
+gui_util_sources =      \
+			src/guiutil/gc_clickpanel.h \
+			src/guiutil/gc_clickpanel.cpp \
+			src/guiutil/gc_gridpanel.h \
+			src/guiutil/gc_gridpanel.cpp \
+			src/guiutil/gc_text_ctrl.h \
+			src/guiutil/gc_text_ctrl.cpp \
+			src/guiutil/gc_validators.h \
+			src/guiutil/gc_validators.cpp
+
+old_gui_conv_sources =  \
+			src/guiconv/gc_export_dialogs.h \
+			src/guiconv/gc_export_dialogs.cpp \
+			src/guiconv/gc_unit_dialogs.h \
+			src/guiconv/gc_unit_dialogs.cpp
+
+gui_conv_sources =      \
+			resources/empty16.xpm \
+			resources/excl16.xpm \
+			resources/giraffe32.xpm \
+			src/guiconv/gc_assigntab.h \
+			src/guiconv/gc_assigntab.cpp \
+			src/guiconv/gc_block_dialogs.h \
+			src/guiconv/gc_block_dialogs.cpp \
+			src/guiconv/gc_color.h \
+			src/guiconv/gc_color.cpp \
+			src/guiconv/gc_data_display.h \
+			src/guiconv/gc_data_display.cpp \
+			src/guiconv/gc_dialog.h \
+			src/guiconv/gc_dialog.cpp \
+			src/guiconv/gc_event_ids.h \
+			src/guiconv/gc_event_publisher.h \
+			src/guiconv/gc_event_publisher.cpp \
+			src/guiconv/gc_file_dialogs.h \
+			src/guiconv/gc_file_dialogs.cpp \
+			src/guiconv/gc_file_list.h \
+			src/guiconv/gc_file_list.cpp \
+			src/guiconv/gc_frame.h \
+			src/guiconv/gc_frame.cpp \
+			src/guiconv/gc_layout.h \
+			src/guiconv/gc_layout.cpp \
+			src/guiconv/gc_locus_dialogs.h \
+			src/guiconv/gc_locus_dialogs.cpp \
+			src/guiconv/gc_logic.h \
+			src/guiconv/gc_logic.cpp \
+			src/guiconv/gc_matrix_display.h \
+			src/guiconv/gc_matrix_display.cpp \
+			src/guiconv/gc_menu_actors.h \
+			src/guiconv/gc_menu_actors.cpp \
+			src/guiconv/gc_migration_dialogs.h \
+			src/guiconv/gc_migration_dialogs.cpp \
+			src/guiconv/gc_migtab.h \
+			src/guiconv/gc_migtab.cpp \
+			src/guiconv/gc_panel_dialogs.h \
+			src/guiconv/gc_panel_dialogs.cpp \
+			src/guiconv/gc_parent_dialogs.h \
+			src/guiconv/gc_parent_dialogs.cpp \
+			src/guiconv/gc_population_dialogs.h \
+			src/guiconv/gc_population_dialogs.cpp \
+			src/guiconv/gc_region_dialogs.h \
+			src/guiconv/gc_region_dialogs.cpp \
+			src/guiconv/gc_trait_dialogs.h \
+			src/guiconv/gc_trait_dialogs.cpp \
+			src/guiconv/guiconverter.h \
+			src/guiconv/guiconverter.cpp
+
+lam_conv_core           = \
+			$(gui_util_sources) \
+			$(gui_conv_sources)
+
+lam_conv_SOURCES        = \
+			$(converter_err_sources) \
+			$(converter_model_sources) \
+			$(converter_parse_sources) \
+			$(converter_string_sources) \
+			$(converter_util_sources) \
+			$(lam_conv_core) \
+			$(lam_conv_resources)
+
+lam_conv_LDADD = @WX_LIBS@ -lX11 libtinyxml.a
+
+EXTRA_lam_conv_SOURCES  = $(tinyxml_suite) $(lam_conv_icons)
+
+batch_lam_conv_core     = \
+			src/guiconv/batchconverter.h \
+			src/guiconv/batchconverter.cpp
+
+batch_lam_conv_testable = \
+			$(batch_lam_conv_core) \
+			$(converter_err_sources) \
+			$(converter_model_sources) \
+			$(converter_parse_sources) \
+			$(converter_string_sources) \
+			$(converter_util_sources)
+
+batch_lam_conv_SOURCES	=	\
+			$(batch_lam_conv_testable)
+
+batch_lam_conv_LDADD = @WX_LIBS@ -lX11 libtinyxml.a
+
+EXTRA_batch_lam_conv_SOURCES = $(tinyxml_suite)
+
+## ############################################################
+## To make OSX executables for wxWindows stuff
+## ############################################################
+lam_conv_app_SOURCES=
+
+lam_conv.app:   lam_conv
+	@rm -rf lam_conv.app
+	$(MACDEVTOOLS)/Rez -d __DARWIN__ -t APPL Carbon.r -o lam_conv
+	$(MACDEVTOOLS)/SetFile -a C lam_conv
+	-mkdir -p lam_conv.app/Contents/MacOS
+	-mkdir -p lam_conv.app/Contents/Resources
+	sed -e "s/IDENTIFIER/lam_conv/" \
+	-e "s/COMMAND/lam_conv/" \
+	-e "s/EXECUTABLE/lam_conv/" \
+	-e "s/ICONNAME/lam_conv/" \
+	-e "s/VERSION/@VERSION@/" \
+	$(top_srcdir)/resources/Info.plist.in > lam_conv.app/Contents/Info.plist
+	echo -n "APPL????" >lam_conv.app/Contents/PkgInfo
+	cp -f lam_conv lam_conv.app/Contents/MacOS/lam_conv
+	cp -f $(top_srcdir)/resources/lam_conv.icns lam_conv.app/Contents/Resources/lam_conv.icns
+
+lamarc_app_SOURCES=
+
+lamarc.app:     lamarc
+	@rm -rf lamarc.app
+	$(MACDEVTOOLS)/Rez -d __DARWIN__ -t APPL Carbon.r -o lamarc
+	$(MACDEVTOOLS)/SetFile -a C lamarc
+	-mkdir -p lamarc.app/Contents/MacOS
+	-mkdir -p lamarc.app/Contents/Resources
+	sed -e "s/IDENTIFIER/lamarc/" \
+	-e "s/COMMAND/lamarc.command/" \
+	-e "s/EXECUTABLE/lamarc/" \
+	-e "s/ICONNAME/lamarc/" \
+	-e "s/VERSION/@VERSION@/" \
+	$(top_srcdir)/resources/Info.plist.in > lamarc.app/Contents/Info.plist
+	cp -f $(top_srcdir)/resources/lamarc.command lamarc.app/Contents/MacOS/lamarc.command
+	chmod ug+x lamarc.app/Contents/MacOS/lamarc.command
+	echo -n "APPL????" >lamarc.app/Contents/PkgInfo
+	ln -f lamarc lamarc.app/Contents/MacOS/lamarc
+	cp -f $(top_srcdir)/resources/lamarc.icns lamarc.app/Contents/Resources/lamarc.icns
+
+
+## ############################################################
+## VC++ project files
+## ############################################################
+## vcprojdir = $(top_srcdir)/projects/win32
+##vcprojscriptdir = $(top_srcdir)/projects/win32
+##vcprojdir = $(top_srcdir)/projects/win32
+
+##$(vcprojdir)/lamarc.vcproj :  $(vcprojdir)/make_vcproj.pl Makefile.am
+##      $(vcprojscriptdir)/make_vcproj.pl lamarc $(lamarc_SOURCES) $(lamarc_includes) $(lamarc_icons) > $(vcprojdir)/lamarc.vcproj
+
+##$(vcprojdir)/lam_conv.vcproj :        $(vcprojdir)/make_vcproj.pl Makefile.am
+##      $(vcprojscriptdir)/make_vcproj.pl lam_conv $(lam_conv_SOURCES) $(lamarc_includes) $(lam_conv_icons) > $(vcprojdir)/lam_conv.vcproj
+
+##$(vcprojdir)/batch_lam_conv.vcproj :  $(vcprojdir)/make_vcproj.pl Makefile.am
+##      $(vcprojscriptdir)/make_vcproj.pl batch_lam_conv $(batch_lam_conv_SOURCES) $(lamarc_includes) $(batch_lam_conv_icons) > $(vcprojdir)/batch_lam_conv.vcproj
+
+
+## ############################################################
+## html documentation
+## ############################################################
+
+htmldir = $(prefix)/html/lamarc
+
+html_DATA = \
+			doc/html/bayes.html \
+			doc/html/bayes_howto.html \
+			doc/html/changes.html \
+			doc/html/comparing_curvefiles.sxc \
+			doc/html/comparing_curvefiles.xls \
+			doc/html/compiling.html \
+			doc/html/converter.html \
+			doc/html/converter_cmd.html \
+			doc/html/curve-smoothing.html \
+			doc/html/data_models.html \
+			doc/html/data_required.html \
+			doc/html/divergence.html \
+			doc/html/forces.html \
+			doc/html/gamma.html \
+			doc/html/genetic_map.html \
+			doc/html/genotype.html \
+			doc/html/glossary.html \
+			doc/html/growthmenu.html \
+			doc/html/forces.html \
+			doc/html/index.html \
+			doc/html/insumfile.2reg3rep.html \
+			doc/html/insumfile.2reg3rep.xml \
+			doc/html/insumfile.3rep.html \
+			doc/html/insumfile.3rep.xml \
+			doc/html/limitations.html \
+			doc/html/mapping.html \
+			doc/html/menu.html \
+			doc/html/messages.html \
+			doc/html/migration_matrix.html \
+			doc/html/output.html \
+			doc/html/outsumfile.2reg3rep.html \
+			doc/html/outsumfile.2reg3rep.xml \
+			doc/html/outsumfile.3rep.html \
+			doc/html/outsumfile.3rep.xml \
+			doc/html/overview.html \
+			doc/html/panels.html \
+			doc/html/parallel.html \
+			doc/html/parameters.html \
+			doc/html/regions.html \
+			doc/html/search.html \
+			doc/html/tracer.html \
+			doc/html/troubleshooting.html \
+			doc/html/tutorial.html \
+			doc/html/tutorial2.html \
+			doc/html/upcoming.html \
+			doc/html/viral_data.html \
+			doc/html/xmlinput.html
+
+htmlimgdir = $(prefix)/html/lamarc/images
+
+htmlimg_DATA = \
+			doc/html/images/LamarcAnalysisScreen.png \
+			doc/html/images/LamarcDataScreen.png \
+			doc/html/images/LamarcIOScreen.png \
+			doc/html/images/LamarcMainScreen.png \
+			doc/html/images/LamarcOverviewScreen.png \
+			doc/html/images/LamarcSearchScreen.png \
+			doc/html/images/browser-gtk.gif \
+			doc/html/images/correlated1.gif \
+			doc/html/images/correlated2.gif \
+			doc/html/images/gui_lam_conv.gif \
+			doc/html/images/lam_conv.gif \
+			doc/html/images/lamarc.gif \
+			doc/html/images/loci-gtk.png \
+			doc/html/images/loci2-gtk.png \
+			doc/html/images/partitions-gtk.png \
+			doc/html/images/partitions2-gtk.png \
+			doc/html/images/populations-gtk.png \
+			doc/html/images/startup-gtk.png \
+			doc/html/images/tracer_trend.png \
+			doc/html/images/uncorrelated.gif \
+			doc/html/images/variably_correlated.gif
+
+htmlbconvdir = $(prefix)/html/lamarc/batch_converter
+
+htmlbconv_DATA = \
+			doc/html/batch_converter/README.txt \
+			doc/html/batch_converter/chrom1_lamarc.html \
+			doc/html/batch_converter/chrom1_lamarc.xml \
+			doc/html/batch_converter/chrom1.mig \
+			doc/html/batch_converter/chrom2_lamarc.xml \
+			doc/html/batch_converter/chrom2.mig \
+			doc/html/batch_converter/chrom3_lamarc.xml \
+			doc/html/batch_converter/chrom3_phase_cmd.xml \
+			doc/html/batch_converter/chrom3microsat.mig \
+			doc/html/batch_converter/chrom3snp.mig \
+			doc/html/batch_converter/exported-lamarc-input.xml \
+			doc/html/batch_converter/sample-conv-cmd.html \
+			doc/html/batch_converter/sample-conv-cmd.xml
+
+htmlbconvimgdir = $(prefix)/html/lamarc/batch_converter/images
+
+htmlbconvimg_DATA = \
+			doc/html/batch_converter/images/CombinedPanels.png \
+			doc/html/batch_converter/images/DataFilesTab.png \
+			doc/html/batch_converter/images/DataPartitionsMigTab.png \
+			doc/html/batch_converter/images/DebugLogTab.png \
+			doc/html/batch_converter/images/DivergeOff.png \
+			doc/html/batch_converter/images/DivMigMatrixTab.png \
+			doc/html/batch_converter/images/EditMigration.png \
+			doc/html/batch_converter/images/EditPanelCorrection.png \
+			doc/html/batch_converter/images/FirstParent.png \
+			doc/html/batch_converter/images/FirstParent2Children.png \
+			doc/html/batch_converter/images/FullParentsImage.png \
+			doc/html/batch_converter/images/InterumParentImage.png \
+			doc/html/batch_converter/images/MigrationOnlyMatrixTab.png \
+			doc/html/batch_converter/images/PanelCorrectionOn.png \
+			doc/html/batch_converter/images/SecondParent.png \
+			doc/html/batch_converter/images/lam_conv_chrom1_export_file_selection.png \
+			doc/html/batch_converter/images/lam_conv_chrom1_export_warn_1.png \
+			doc/html/batch_converter/images/lam_conv_chrom1_input.png \
+			doc/html/batch_converter/images/lam_conv_chrom1_segment_panel.png \
+			doc/html/batch_converter/images/lam_conv_chrom2_segment1.png \
+			doc/html/batch_converter/images/lam_conv_chrom3_error_map_position.png \
+			doc/html/batch_converter/images/lam_conv_chrom3_error_phase_file_needed.png \
+			doc/html/batch_converter/images/lam_conv_chrom3_input.png \
+			doc/html/batch_converter/images/lam_conv_chrom3_region_panel.png \
+			doc/html/batch_converter/images/lam_conv_chrom3_region_table.png \
+			doc/html/batch_converter/images/lam_conv_chrom3_segment_snp.png \
+			doc/html/batch_converter/images/lam_conv_export_file_mac_minimal.png \
+			doc/html/batch_converter/images/lam_conv_export_file_mac_expanded.png
+
+
+htmltraitdir = $(prefix)/html/lamarc/trait_mapping
+
+htmltrait_DATA  = \
+			doc/html/trait_mapping/README.txt \
+			doc/html/trait_mapping/lamarc-trait-input.html \
+			doc/html/trait_mapping/lamarc-trait-input.xml \
+			doc/html/trait_mapping/mapfile_funny-nose.txt \
+			doc/html/trait_mapping/outfile.txt \
+			doc/html/trait_mapping/traitCmd.html \
+			doc/html/trait_mapping/traitCmd.xml \
+			doc/html/trait_mapping/traitCmd.xml.txt \
+			doc/html/trait_mapping/traitData.mig
+
+license_info = \
+			COPYING \
+			doc/licenses/boost.txt \
+			doc/licenses/gpl.txt \
+			doc/licenses/lamarc.txt \
+			doc/licenses/lgpl.txt \
+			doc/licenses/wx.txt \
+			doc/licenses/wxdoc.txt \
+			doc/licenses/zlib.txt
+
+wx_curr         = \
+			wxWidgets/wxWidgets-2.8.12.tar.gz
+
+wx_stuff        = \
+			wxWidgets/wxWidgets-2.8.10.tar.gz \
+			wxWidgets/wxWidgets-2.8.8.tar.gz \
+			$(wx_curr)
+
+
+
+EXTRA_DIST                      = \
+			Makefile.am \
+			configure.ac \
+			boost \
+			tinyxml \
+			$(license_info) \
+			$(wx_curr) \
+			$(scripts_to_dist) \
+			scripts/pyParallel \
+			$(html_DATA) $(htmlimg_DATA) $(htmlbconv_DATA)\
+			$(htmlbconvimg_DATA) \
+			$(htmltrait_DATA) \
+			doc/testfiles/sample_infile.xml \
+			doc/testfiles/sample_outfile.txt \
+			doc/testfiles/sample_outsumfile.xml \
+			CONTENTS \
+			HISTORY \
+			INSTALL \
+			README \
+			$(top_srcdir)/resources/Info.plist.in \
+			$(top_srcdir)/resources/lamarc.command
+
+# these used to be in EXTRA_DIST, but it's broken and not testable
+# should I decide to fix it
+#                       $(vcprojdir)/batch_lam_conv.vcproj
+#                       $(vcprojdir)/lam_conv.vcproj
+#                       $(vcprojdir)/lamarc.vcproj
+
+dist-hook:
+	rm -rf `find $(distdir)/boost -name CVS`
+	rm -rf `find $(distdir)/tinyxml -name CVS`
+
+## ############################################################
+## support for sanity checks
+## ############################################################
+
+filelist :
+			echo $(lamarcgcov_sources)
+
+batchfilelist   :
+			echo $(batch_lam_conv_testable)
+
+guifilelist :
+			echo $(lam_conv_core)
+
+resourcelist    :
+			echo $(lamarc_resources)
+
+.PHONY :        filelist resourcelist
+
+
+## ############################################################
+## targets for building website
+## ############################################################
+
+
+
+webskeleton     :
+			tar xfvz $(top_srcdir)/tools/apache-forrest-0.7.tar.gz
+			rm -f -r webBuildDir
+			mkdir webBuildDir
+			cd webBuildDir ; \
+			../apache-forrest-0.7/bin/forrest seed
+			cp $(top_srcdir)/website/apacheInput/skinconf.xml webBuildDir/src/documentation
+			rm -f -r webBuildDir/src/documentation/content
+			mkdir webBuildDir/src/documentation/content
+			cp -r $(top_srcdir)/website/apacheInput/xdocs webBuildDir/src/documentation/content/
+			cd webBuildDir ; \
+			../apache-forrest-0.7/bin/forrest
+			cd webBuildDir/build ; \
+			mv site lamarc ; \
+			tar cfvz ../../lamarcweb-$(VERSION).tar.gz lamarc
+
+
+## ############################################################
+## adds this to the "all" target so when the user types "make"
+## this appears at the bottom
+## ############################################################
+all-local:
+	@echo "-----------------------------------------------------"
+	@echo "Lamarc $(VERSION)"
+	@echo "Please read the documentation in ./doc/html/"
+	@echo "This Makefile will produce executables lam_conv and lamarc"
+	@echo ""
+	@echo "If you have problems, please send us an email including "
+	@echo " any compile warnings, error messages, and information"
+	@echo " on other issues you may have to lamarc at gs.washington.edu"
+	@echo "[Mary Kuhner, Jon Yamato, Peter Beerli]"
+	@echo "----------------------------------------------------"
+
+if CONVERTER
+if GUI
+batch_lam_conv:
+	@echo "-----------------------------------------------------"
+	@echo "batch_lam_conv cannot be compiled because you have"
+	@echo "configured with --enable-gui"
+	@echo "-----------------------------------------------------"
+
+else
+lam_conv:
+	@echo "-----------------------------------------------------"
+	@echo "lam_conv cannot be compiled because you have"
+	@echo "configured with --disable-gui"
+	@echo "-----------------------------------------------------"
+
+endif
+else
+batch_lam_conv:
+	@echo "-----------------------------------------------------"
+	@echo "batch_lam_conv cannot be compiled because you have"
+	@echo "configured with --disable-converter"
+	@echo "-----------------------------------------------------"
+
+lam_conv:
+	@echo "-----------------------------------------------------"
+	@echo "lam_conv cannot be compiled because you have"
+	@echo "configured with --disable-converter"
+	@echo "-----------------------------------------------------"
+
+endif
+
+
+
+
+distclean-local:
+	rm -f $(vcprojdir)/*.vcproj projects/macos/*.xml
+
+clean-local:
+	rm -f *.gcov *.gcno *.gcda
+	rm -f -r *.app
+
diff --git a/config/conf.h.in b/config/conf.h.in
new file mode 100644
index 0000000..ea1e6d5
--- /dev/null
+++ b/config/conf.h.in
@@ -0,0 +1,103 @@
+/* config/conf.h.in.  Generated from configure.ac by autoheader.  */
+
+/* Define to 1 if you have the <float.h> header file. */
+#undef HAVE_FLOAT_H
+
+/* Define to 1 if you have the `getcwd' function. */
+#undef HAVE_GETCWD
+
+/* Define to 1 if you have the <inttypes.h> header file. */
+#undef HAVE_INTTYPES_H
+
+/* Define to 1 if you have the <limits.h> header file. */
+#undef HAVE_LIMITS_H
+
+/* Define to 1 if you have the <memory.h> header file. */
+#undef HAVE_MEMORY_H
+
+/* Define to 1 if you have the `pow' function. */
+#undef HAVE_POW
+
+/* Define to 1 if you have the `sqrt' function. */
+#undef HAVE_SQRT
+
+/* Define to 1 if stdbool.h conforms to C99. */
+#undef HAVE_STDBOOL_H
+
+/* Define to 1 if you have the <stdint.h> header file. */
+#undef HAVE_STDINT_H
+
+/* Define to 1 if you have the <stdlib.h> header file. */
+#undef HAVE_STDLIB_H
+
+/* Define to 1 if you have the `strchr' function. */
+#undef HAVE_STRCHR
+
+/* Define to 1 if you have the `strftime' function. */
+#undef HAVE_STRFTIME
+
+/* Define to 1 if you have the <strings.h> header file. */
+#undef HAVE_STRINGS_H
+
+/* Define to 1 if you have the <string.h> header file. */
+#undef HAVE_STRING_H
+
+/* Define to 1 if you have the `strpbrk' function. */
+#undef HAVE_STRPBRK
+
+/* Define to 1 if you have the <sys/stat.h> header file. */
+#undef HAVE_SYS_STAT_H
+
+/* Define to 1 if you have the <sys/types.h> header file. */
+#undef HAVE_SYS_TYPES_H
+
+/* Define to 1 if you have the <unistd.h> header file. */
+#undef HAVE_UNISTD_H
+
+/* Define to 1 if the system has the type `_Bool'. */
+#undef HAVE__BOOL
+
+/* Name of package */
+#undef PACKAGE
+
+/* Define to the address where bug reports for this package should be sent. */
+#undef PACKAGE_BUGREPORT
+
+/* Define to the full name of this package. */
+#undef PACKAGE_NAME
+
+/* Define to the full name and version of this package. */
+#undef PACKAGE_STRING
+
+/* Define to the one symbol short name of this package. */
+#undef PACKAGE_TARNAME
+
+/* Define to the home page for this package. */
+#undef PACKAGE_URL
+
+/* Define to the version of this package. */
+#undef PACKAGE_VERSION
+
+/* The release date of the program */
+#undef RELEASE_DATE
+
+/* Define to 1 if you have the ANSI C header files. */
+#undef STDC_HEADERS
+
+/* Define to 1 if your <sys/time.h> declares `struct tm'. */
+#undef TM_IN_SYS_TIME
+
+/* Version number of package */
+#undef VERSION
+
+/* Define to empty if `const' does not conform to ANSI C. */
+#undef const
+
+/* Define to `__inline__' or `__inline' if that's what the C compiler
+   calls it, or to nothing if 'inline' is not supported under any name.  */
+#ifndef __cplusplus
+#undef inline
+#endif
+
+/* Define to `unsigned int' if <sys/types.h> does not define. */
+#undef size_t
diff --git a/config/local_build.h b/config/local_build.h
new file mode 100644
index 0000000..2b2a76c
--- /dev/null
+++ b/config/local_build.h
@@ -0,0 +1,131 @@
+// $Id: local_build.h,v 1.20 2012/06/30 01:32:25 bobgian Exp $
+
+/*
+ * ~/lamarc/config/local_build.h
+ *
+ *  Regular (non-DENOVO) arrangers with Data, Final-Coalescence ON, Non-Dynameter version
+ *
+ *  Created on: Nov 25, 2009
+ *      Author: bobgian
+ */
+
+#ifndef LOCAL_BUILD_H
+#define LOCAL_BUILD_H
+
+//------------------------------------------------------------------------------------
+//
+// This file is intended to be customized for individual builds (DEBUG vs not, FC ON vs OFF, Dynameter ON vs OFF, etc)
+// and placed into each build directory in which a customized build is done (actually, in the "config" subdirectory of the
+// build directory).  THIS one, the version stored in the CVS repository, is intended as the default for PRODUCTION builds.
+//
+// The copy in the "config" subdirectory in which a build is running (a DEBUG or test build, say) takes precedence over any
+// other copy (THIS copy, in the default PRODUCTION "config" subdirectory, for example).  The reason is that the "make" process
+// INCLUDE directory precedence order prioritizes the CURRENT directory's "config" subdirectory over any other directory
+// containing a file of same name.
+//
+// Note that this file does NOT define DEBUG or NDEBUG.  Either add it to a customized version or let the CONFIGURE script do it.
+// That way, this file can work both for DEBUG and Non-DEBUG builds.
+//
+//------------------------------------------------------------------------------------
+
+//------------------------------------------------------------------------------------
+// Implementation for Biglink optimization and associated debugging printout.
+
+// When this is defined, the Biglink optimization runs natively (that is, the optimization takes effect).
+// If NOT defined, the Littlelink system runs natively.
+#define RUN_BIGLINKS
+
+// When this is defined, the Biglink optimization runs natively but it EMULATES the Littlelink functionality.
+// That is, all Biglink functionality is enabled, but the Biglink Map defines each Littlelink as its own Biglink.
+// The intended effect should be exactly the same (modulo possible floating-point roundoff) as the native
+// Littlelink implementation.  This is for testing purposes.
+// #define EMULATE_LITTLELINKS
+
+#ifndef NDEBUG // In Debug mode, the following compile flags all may-or-may-not take effect.
+//
+// When defined, information is printed about Region data as parsed for the Biglink Map construction.
+// This works with either Biglink (including Littlelink Emulation) or Littlelink systems enabled.
+// #define ENABLE_REGION_DUMP
+//
+// When defined, information is printed about construction and contents of the Biglink Map.
+// This works only when the Biglink (including Littlelink Emulation) systems are enabled.
+// #define ENABLE_BIGLINKMAP_DUMP
+//
+#endif // NDEBUG
+
+//------------------------------------------------------------------------------------
+// For testing of Final Coalescence optimization.
+
+#define FINAL_COALESCENCE_ON          true
+
+//------------------------------------------------------------------------------------
+// Data versus Stationaries, Denovo testing.
+// Make sure this file is consistent with the Makefile either by never defining these or by #undef-ing them.
+
+// Test for stationary distributions.
+// #define STATIONARIES
+
+// Use DENOVO arrangers, ignoring input data.
+// #define DENOVO
+
+// Stationaries with denovo arranger in ALL cases.
+// If not defined, run denovo arranger only in Bayesian case.
+// #define ALL_ARRANGERS_DENOVO
+
+//------------------------------------------------------------------------------------
+// Dynameter
+
+// NB: DYNAMETER_LEVEL is NOT defined.  The presence or absence of the pre-processor symbol DYNAMETER_LEVEL determines whether
+// the tracing/metering facility is activated.  If the symbol IS defined, various symbols and macros defined in header file
+// "dynatracer.h" expand so as to activate (compile in) the metering tool.  Also, the actual value of the symbol (an integer
+// greater than zero) determines the set of metering/tracing features that are activated (higher numbers activate more features).
+// If DYNAMETER_LEVEL is NOT defined (as is the case here), these other symbols are NOT defined and the macros expand into whitespace,
+// making the apparent function calls (actually calls to these macros) "disappear" from the source files during compilation.
+//
+// Activate basic Dynameter functionality (metering of function call times and statistics printed at end) by defining value as 1.
+// #define DYNAMETER_LEVEL               1
+//
+// Add to above the accumulation of tree-search statistics (printed at end) by defining value as 2.
+// #define DYNAMETER_LEVEL               2
+//
+// Add to above the dynamic function-call tracing (printout continuously while running) by defining value as 3.
+// #define DYNAMETER_LEVEL               3
+//
+#ifdef DYNAMETER_LEVEL
+// If the Dynameter is in use (above symbol defined), then the symbol DYNAMETER_FILE_INDEX must be defined before this file is
+// included with an integer value starting from 0 and incrementing by 1 for each file that includes calls to StartDynameter().
+// DYNAMETER_FILE_INDEX's value is an index into a compile-time SWITCH statement held in "dynacount.h", whose output provides
+// the starting value for the Dynameter array's index values, incremented automatically by the macro __COUNTER__ at each call
+// of StartDynameter() within that file.
+#include "dynacount.h"
+// Each file using the Dynameter will, by virtue of declaring such intent by defining DYNAMETER_LEVEL in this file, be guaranteed
+// to have the environment for such use set up automatically.  The file "dynacount.h" (in directory "~/lamarc/config") is generated
+// by the script "~/lamarc/config/dynacounter.bash".
+#endif // DYNAMETER_LEVEL
+
+//------------------------------------------------------------------------------------
+// LAMARC_QA tests -- here are flags you can turn on (by removing the leading comment
+// characters and space) to do special-purpose Quality Assurance testing.
+//
+// Turn off aliasing -- this makes the code run much more slowly, but allows you
+// to check that you haven't screwed up aliasing.
+// #define LAMARC_QA_NOALIAS
+//
+// To generate single denovo trees and retrieve their parameter values, uncomment both of these.
+// #define LAMARC_QA_SINGLE_DENOVOS
+// #define LAMARC_QA_SINGLE_DENOVOS_COUNT 10000
+//
+// To dump out GraphML trees.  Not released to the general public.  If you are
+// using this, please be aware that it is not tested well enough to be trusted.
+// #define LAMARC_QA_TREE_DUMP
+//------------------------------------------------------------------------------------
+
+//------------------------------------------------------------------------------------
+// LAMARC_NEW_FEATURE -- here are flags you can turn on (by removing the leading comment
+// characters and space) to turn on a new feature that is not yet released.
+// #define LAMARC_NEW_FEATURE_RELATIVE_SAMPLING
+//------------------------------------------------------------------------------------
+
+#endif // LOCAL_BUILD_H
+
+//____________________________________________________________________________________
diff --git a/configure.ac b/configure.ac
new file mode 100644
index 0000000..29de069
--- /dev/null
+++ b/configure.ac
@@ -0,0 +1,508 @@
+#                                               -*- Autoconf -*-
+# Process this file with autoconf to produce a configure script.
+
+AC_PREREQ(2.59)
+AC_INIT(lamarc, 2.1.8, lamarc at u.washington.edu, lamarc)
+AM_MAINTAINER_MODE
+AC_CONFIG_AUX_DIR([config])
+
+AC_DEFINE(RELEASE_DATE, ["29 May 2012"], [The release date of the program])
+
+wxRequiredMajorVersion=2
+wxRequiredMinorVersion=8
+wxRequiredPatchVersion=3
+wxVersionRequired=${wxRequiredMajorVersion}.${wxRequiredMinorVersion}.${wxRequiredPatchVersion}
+
+wxProvidedMajorVersion=2
+wxProvidedMinorVersion=8
+wxProvidedPatchVersion=12
+wxVersionProvided=${wxProvidedMajorVersion}.${wxProvidedMinorVersion}.${wxProvidedPatchVersion}
+
+# setting up OS and CPU variables
+AC_CANONICAL_BUILD
+AC_CANONICAL_HOST
+AC_CANONICAL_TARGET
+
+AM_INIT_AUTOMAKE([foreign])
+AC_CONFIG_SRCDIR([src/control/lamarc.cpp])
+AC_CONFIG_HEADER([config/conf.h])
+
+# define conditionals/arguments ***************************
+
+
+
+# use --disable-data for generating stationaries
+AC_ARG_ENABLE(arrcheck,     [  --enable-arrcheck       LAMARC: produce output to check arrangers],[lamARRCHECK=$enableval],[lamARRCHECK=no])
+AC_ARG_ENABLE(buildwx,      [  --enable-buildwx        LAMARC: do a local build the wxWidgets library],[lamBUILDWX=$enableval],[lamBUILDWX=no])
+AC_ARG_ENABLE(converter,    [  --disable-converter     LAMARC: don't build data conversion program(s)],[lamCONV=$enableval],[lamCONV=yes])
+AC_ARG_ENABLE(data,         [  --disable-data          LAMARC: ignore data file],[lamDATA_READ=$enableval],[lamDATA_READ=yes])
+AC_ARG_ENABLE(denovo,       [  --enable-denovo         LAMARC: all trees generated de-novo],[lamDENOVO=$enableval],[lamDENOVO=no])
+AC_ARG_ENABLE(debug,        [  --enable-debug          Compile with debug flags],[lamDEBUG=$enableval],[lamDEBUG=no])
+AC_ARG_ENABLE(dmalloc,      [  --enable-dmalloc        LAMARC: Link with dmalloc library],[lamDMALLOC=$enableval],[lamDMALLOC=no])
+AC_ARG_ENABLE(efence,       [  --enable-efence         LAMARC: Link with electric fence],[lamEFENCE=$enableval],[lamEFENCE=no])
+AC_ARG_ENABLE(gcov,         [  --enable-gcov           LAMARC: Compile with gcov flags],[lamGCOV=$enableval],[lamGCOV=no])
+AC_ARG_ENABLE(gui,          [  --disable-gui           LAMARC: don't create lam_conv with graphic UI capability],[lamGUI=$enableval],[lamGUI=yes])
+AC_ARG_ENABLE(jsim,         [  --enable-jsim           LAMARC: Compile with Jon's simulation flags],[lamJSIM=$enableval],[lamJSIM=no])
+AC_ARG_ENABLE(profile,      [  --enable-profile        LAMARC: compile with profiling flags],[lamPROFILE=$enableval],[lamPROFILE=no])
+AC_ARG_ENABLE(progress,     [  --disable-progress      LAMARC: turn off progress bar],[lamPROGRESS=$enableval],[lamPROGRESS=yes])
+AC_ARG_ENABLE(shared,       [  --disable-shared        LAMARC: don't use shared libs],[lamSHARED=$enableval],[lamSHARED=no])
+AC_ARG_ENABLE(treetrack,    [  --enable-treetrack      LAMARC: track datalikelihoods of sampled trees],[lamTREETRACK=$enableval],[lamTREETRACK=no])
+AC_ARG_ENABLE(universal_binary, [  --enable-universal_binary      LAMARC: build universal binary],[lamUNIVB=$enableval],[lamUNIVB=no])
+AC_ARG_WITH(wx-config,      [  --with-wx-config=FILE   LAMARC: use existing wxWindows installation],[lamWX_CONFIG=$withval])
+
+
+# check that we either have info on GUI toolkit and OS, or get it and complain
+AC_SUBST(LAMARC_CODE_OSTYPE)
+case "$target_os" in
+    darwin*)
+        if test "X${lamCONV}" = "Xyes" ; then
+            if test "X${lamGUI}" = "Xyes" ; then
+                if test "X${GUI_TOOLKIT}" = "X" ; then
+                    GUI_TOOLKIT=osx_cocoa
+                fi
+            fi
+        fi
+        if test "X${LAMARC_CODE_OSTYPE}" = "X" ; then
+            LAMARC_CODE_OSTYPE=LAMARC_COMPILE_MACOSX
+        fi
+        ;;
+    freebsd*|linux*)
+        if test "X${lamCONV}" = "Xyes" ; then
+            if test "X${lamGUI}" = "Xyes" ; then
+                if test "X${GUI_TOOLKIT}" = "X" ; then
+                    GUI_TOOLKIT=gtk2
+                fi
+            fi
+        fi
+        if test "X${LAMARC_CODE_OSTYPE}" = "X" ; then
+            LAMARC_CODE_OSTYPE=LAMARC_COMPILE_LINUX
+        fi
+        ;;
+    *mingw32*)
+        if test "X${lamCONV}" = "Xyes" ; then
+            if test "X${lamGUI}" = "Xyes" ; then
+                if test "X${GUI_TOOLKIT}" = "X" ; then
+                    GUI_TOOLKIT=msw
+                fi
+            fi
+        fi
+        if test "X${LAMARC_CODE_OSTYPE}" = "X" ; then
+            LAMARC_CODE_OSTYPE=LAMARC_COMPILE_MSWINDOWS
+        fi
+        ;;
+    *)
+        if test "X${LAMARC_CODE_OSTYPE}" = "X" ; then
+	    GUI_TOOLKIT=osx_cocoa
+            LAMARC_CODE_OSTYPE=LAMARC_COMPILE_MACOSX
+            echo "*************************************************************"
+            echo "DO NOT CHECK THIS config.ac in, it has GUI_TOOLKIT and"
+            echo "LAMARC_CODE_OSTYPE hardwired for 10.7 mac"
+            echo "*************************************************************"
+        fi
+        ;;
+esac
+
+if test "X${LAMARC_CODE_OSTYPE}" = "X" ; then
+    echo "An internal error occured. We thought it was impossible not"
+    echo "to have LAMARC_CODE_OSTYPE set. Please notify the lamarc"
+    echo "team at lamarc at u.washington.edu and try setting the environment"
+    echo "variable LAMARC_CODE_OSTYPE as described in the \"Compiling"
+    echo "Lamarc\" portion of the documentation."
+    AC_MSG_ERROR([cannot proceed without LAMARC_CODE_OSTYPE])
+fi
+
+if test "X${lamCONV}" = "Xyes" ; then
+    if test "X${lamGUI}" = "Xyes" ; then
+        if test "X${GUI_TOOLKIT}" = "X" ; then
+            echo "*************************************************************"
+            echo "You have enabled compilation of the converter GUI but the"
+            echo "configure script was not able to guess the correct GUI"
+            echo "toolkit to use.  See the section entitled Compiling"
+            echo "Lamarc in the Lamarc documentation for more information."
+            echo "*************************************************************"
+            AC_MSG_ERROR([cannot proceed without GUI_TOOLKIT])
+        fi
+    else
+        if test "X${GUI_TOOLKIT}" != "X" ; then
+            if test "X${GUI_TOOLKIT}" != "Xbase" ; then
+                AC_MSG_WARN([changing GUI_TOOLKIT ${GUI_TOOLKIT} to base because GUI is disabled])
+            fi
+        fi
+        GUI_TOOLKIT=base
+    fi
+fi
+
+
+AC_SUBST(LAMARC_HOST_PREFIX)
+if test "X${host_alias}" = "X" ; then
+    LAMARC_HOST_PREFIX=""
+else
+    LAMARC_HOST_PREFIX="${host_alias}-"
+fi
+
+# conditionals used in Makefile.am to specify different
+# resources and/or compilation products for different
+# platforms
+AM_CONDITIONAL(LAMARC_APP_OSX, test $LAMARC_CODE_OSTYPE = LAMARC_COMPILE_MACOSX)
+AM_CONDITIONAL(LAMARC_APP_MSW, test $LAMARC_CODE_OSTYPE = LAMARC_COMPILE_MSWINDOWS)
+
+# now a few functions for checking the wxWidgets stuff
+
+build_wx()
+{
+    echo "*******************************************"
+    echo "* starting wxWidgets build                *"
+    echo "*******************************************"
+
+    # build wx options
+    if test "X${lamGUI}" = "Xyes" ; then
+        guiOption="--with-${GUI_TOOLKIT}"
+    else
+        guiOption="--disable-gui"
+    fi
+
+    sharedOption="--disable-shared"
+
+    debugOption="--disable-debug"
+    if test "X${lamDEBUG}" = "Xyes" ; then
+        debugOption="--enable-debug"
+    fi
+
+    crossOptions=
+    if test "$host_os" != "$build_os" ; then
+        crossOptions="--disable-threads --host=${host_alias}"
+    fi
+
+    # this is not optimal -- but it is the easiest way to solve
+    # the problem that builtin tiff is needed for debian, yet
+    # we compile releases on red hat
+    tiffOptions=
+    if test "$LAMARC_CODE_OSTYPE" = "LAMARC_COMPILE_LINUX" ; then
+        tiffOptions="--with-libtiff=builtin"
+    fi
+
+    universalBinaryOptions=
+    if test "X${lamUNIVB}" = "Xyes" ; then
+        universalBinaryOptions="--enable-universal_binary --disable-dependency-tracking"
+    fi
+
+
+    wxConfigOptions="${guiOption} ${sharedOption} ${debugOption} ${crossOptions} ${tiffOptions} ${universalBinaryOptions}"
+
+    thisDir=`pwd`;
+    configScriptDir=`dirname $0`
+    topOfPackage=`(cd $configScriptDir 2> /dev/null && pwd ;)`
+    wxTarFile=${topOfPackage}/wxWidgets/wxWidgets-${wxVersionProvided}.tar.gz
+    wxUnpackedDir=wxWidgets-${wxVersionProvided}
+
+    # unpack tar file
+    if test ! -d ${wxUnpackedDir} ; then
+        echo "untar-ing wxWidgets distribution ${wxTarFile}"
+        tar xfvz ${wxTarFile}
+    else
+        echo "${wxUnpackedDir} already exists. Skipping main un-tar step"
+    fi
+
+    if test ! -d ${wxUnpackedDir} ; then
+        AC_MSG_ERROR([Unsuccessful trying to un-tar ${wxTarFile}])
+    else
+        cd ${wxUnpackedDir}
+    fi
+
+    # untar patches over tar file
+    for file in ${topOfPackage}/wxWidgets/wxWidgets-${wxVersionProvided}-Patch*.tar.gz
+    do
+        tar xfvz ${file}
+    done
+
+    # build wxWidgets
+    ./configure ${wxConfigOptions}
+    make
+    cd ${thisDir}
+    RETURN_STRING=${thisDir}/${wxUnpackedDir}/wx-config
+}
+
+get_wx_config_script()
+{
+    if test "X${lamWX_CONFIG}" = "Xyes" ; then
+        # whoops, user used --with-wx-config without an argument
+        # let's assume it is going to be on their path
+        AC_MSG_WARN([no argument given to --with-wx-config])
+        AC_MSG_WARN([Assuming "wx-config" is on your \$PATH])
+        WXCONFIG="wx-config"
+    else
+        if test "X${lamWX_CONFIG}" = "X" ; then
+            # whoops, user didn't give --with-wx-config
+            # let's assume wx-config is going to be on their path
+            AC_MSG_WARN([No option --with-wx-config given])
+            AC_MSG_WARN([Assuming "wx-config" is on your \$PATH])
+            WXCONFIG="wx-config"
+        else
+            # take the value of wx-config script the user gave us
+            WXCONFIG=$lamWX_CONFIG
+        fi
+    fi
+
+    AC_PATH_PROG(WXCONFIG2, $WXCONFIG, "")
+    if test "X${WXCONFIG2}" = "X" ; then
+        AC_MSG_WARN([Failure of above check for $WXCONFIG might have been a bug])
+        AC_MSG_WARN([Searching for it using AC CHECK FILE macro])
+        AC_CHECK_FILE($WXCONFIG, WXCONFIG2=$WXCONFIG, WXCONFIG2="")
+    fi
+
+    AC_MSG_CHECKING([wxWidgets version])
+    if test "X${WXCONFIG2}" = "X" ; then
+        AC_MSG_RESULT([not found])
+        wxVersion=""
+    else
+        if wxVersion=`$WXCONFIG2 --version`; then
+            AC_MSG_RESULT([$wxVersion])
+        else
+            AC_MSG_RESULT([not found])
+        fi
+    fi
+
+    whatIsWxProb=""
+    isVersionBad=""
+    if test "X${wxVersion}" = "X" ; then
+        whatIsWxProb="Could not find wxWidgets config script"
+        isVersionBad="yes"
+    else
+        whatIsWxProb="Your wxWidgets version is ${wxVersion}"
+        wxVersionMajor=`echo ${wxVersion} | awk -F. '{ print $1 }' - `
+        wxVersionMinor=`echo ${wxVersion} | awk -F. '{ print $2 }' - `
+        wxVersionPatch=`echo ${wxVersion} | awk -F. '{ print $3 }' - `
+
+        if test "$wxVersionMajor" -lt "$wxRequiredMajorVersion" ; then
+            isVersionBad="yes"
+        else
+            if test "$wxVersionMajor" -eq "$wxRequiredMajorVersion" ; then
+                if test "$wxVersionMinor" -lt "$wxRequiredMinorVersion" ; then
+                    isVersionBad="yes"
+                else
+                    if test "$wxVersionMinor" -eq "$wxRequiredMinorVersion" ; then
+                        if test "$wxVersionPatch" -lt "$wxRequiredPatchVersion" ; then
+                            isVersionBad="yes"
+                        fi
+                    fi
+                fi
+            fi
+        fi
+    fi
+
+    if test "${isVersionBad}" ; then
+        echo "*******************************************"
+        echo "wxWidgets version ${wxVersionRequired} or greater is required"
+        echo "to build lamarc data converters, but the configure script"
+        echo "found version ${wxVersion}."
+        echo ""
+        echo "If you don't want to install your own version of wxWidgets,"
+        echo "you can try using the configure switch --enable-buildwx"
+        echo ""
+        echo "See the section entitled Compiling Lamarc in the"
+        echo "Lamarc documentation for more information."
+        echo "*******************************************"
+        AC_MSG_ERROR([needed wxWidgets version ${wxVersionRequired} or greater])
+    fi
+
+    RETURN_STRING=${WXCONFIG2}
+}
+
+
+if test "X${lamBUILDWX}" = "Xyes" ; then
+    if test "X${lamWX_CONFIG}" != "X" ; then
+        AC_MSG_ERROR([configure switches --with-wx-config and --enable-buildwx mutually exclusive])
+    fi
+    build_wx
+    lamWX_CONFIG=${RETURN_STRING}
+fi
+
+
+
+WXCONFIG3=""
+if test "X${lamCONV}" = "Xyes" ; then
+    get_wx_config_script
+    WXCONFIG3=${RETURN_STRING}
+    if test "$host_os" != "$build_os" ; then
+        WXCONFIG3="${WXCONFIG3} --host=${host_alias}"
+    fi
+
+    if test "X${lamDEBUG}" = "Xyes" ; then
+        WXCONFIG3="${WXCONFIG3} --debug=yes"
+    else
+        WXCONFIG3="${WXCONFIG3} --debug=no"
+    fi
+
+    # we may have many toolkits installed so specify
+    # if we are GUI-less GUI_TOOLKIT should already have
+    # been set to "base"
+    WXCONFIG3="${WXCONFIG3} --toolkit=${GUI_TOOLKIT}"
+
+    # check that the wx-config we found will provide us anything
+    wxListResult=`${WXCONFIG3} --list`
+    wxOK=`echo ${wxListResult} | awk '{ if (/will be used/) {print 1} ; if (/Specification best match/) {print 1} }' - `
+    if test "X${wxOK}" != "X1" ; then
+        echo "*************************************************************"
+        echo " Lamarc configure script failed because it could not find the "
+        echo " correct wxWidgets library. You might check your path, the "
+        echo " value of variable GUI_TOOLKIT, or try adding the configure"
+        echo " option --enable-build_wx . For more details see the section"
+        echo " entitled \"Compiling Lamarc\" in the lamarc documentation."
+        echo ""
+        echo " Here is error message from wx-config, in case that helps:"
+        echo ""
+        echo "  " ${wxListResult}
+        echo ""
+        echo "*************************************************************"
+        AC_MSG_ERROR([cannot proceed without suitable wxWidgets library])
+    fi
+
+
+AC_SUBST(WX_CXXFLAGS, [`$WXCONFIG3 --cxxflags`])
+AC_SUBST(WX_LIBS, [`$WXCONFIG3 --libs`])
+fi
+
+# Checks for programs. -- tests and setting of cxxflags are to
+# keep AC_PROG_CXX from setting unfortunate default values
+if test -n "${CXXFLAGS}"; then
+    user_set_cxxflags=yes
+fi
+AC_PROG_CXX
+if test X$user_set_cxxflags != Xyes; then
+    CXXFLAGS=" "
+fi
+
+# so we can build tinyxml as a library
+AC_PROG_RANLIB
+
+# Checks for libraries.
+
+# Checks for header files.
+AC_HEADER_STDC
+AC_CHECK_HEADERS([float.h limits.h stdlib.h string.h unistd.h])
+
+# Checks for typedefs, structures, and compiler characteristics.
+AC_HEADER_STDBOOL
+AC_C_CONST
+AC_C_INLINE
+AC_TYPE_SIZE_T
+AC_STRUCT_TM
+
+# Checks for library functions.
+AC_FUNC_ERROR_AT_LINE
+AC_FUNC_MEMCMP
+AC_FUNC_STRFTIME
+AC_CHECK_FUNCS([getcwd pow sqrt strchr strpbrk])
+
+AM_CONDITIONAL(ARRCHECK,    test x$lamARRCHECK = xyes)
+AM_CONDITIONAL(CONVERTER,   test x$lamCONV = xyes)
+AM_CONDITIONAL(DEBUG,       test x$lamDEBUG = xyes)
+AM_CONDITIONAL(DENOVO,      test x$lamDENOVO = xyes)
+AM_CONDITIONAL(DMALLOC,     test x$lamDMALLOC = xyes)
+AM_CONDITIONAL(EFENCE,      test x$lamEFENCE = xyes)
+AM_CONDITIONAL(GCOV,        test x$lamGCOV = xyes)
+AM_CONDITIONAL(GUI,         test x$lamGUI = xyes)
+AM_CONDITIONAL(JSIM,        test x$lamJSIM = xyes)
+AM_CONDITIONAL(NODATA,      test x$lamDATA_READ != xyes)
+AM_CONDITIONAL(NOPROGRESS,  test x$lamPROGRESS != xyes)
+AM_CONDITIONAL(PROFILE,     test x$lamPROFILE = xyes)
+AM_CONDITIONAL(TREETRACK,   test x$lamTREETRACK = xyes)
+AM_CONDITIONAL(UNIVB,       test x$lamUNIVB = xyes)
+AM_CONDITIONAL(USERCXXFLAGS,    test x$user_set_cxxflags = xyes)
+
+AC_CONFIG_FILES([Makefile])
+AC_OUTPUT
+
+echo "*************************************************************"
+echo " Settings for lamarc build. For information on how to change"
+echo " try executing \"configure --help\" and reading the lamarc"
+echo " documentation section entitled \"Compiling Lamarc\"."
+echo "*************************************************************"
+echo ""
+echo "Options that are almost always on:"
+echo "  Compile lam_conv, the lamarc data converter?   ${lamCONV}"
+if test "X${lamCONV}" = "Xyes" ; then
+echo "  Compile lam_conv as a GUI?                     ${lamGUI}"
+else
+echo "  Compile lam_conv as a GUI?                     --"
+fi
+echo "  Compile LAMARC to read data?                   ${lamDATA_READ}"
+echo "  Compile LAMARC to display progress bar?        ${lamPROGRESS}"
+echo "Options that are almost always off:"
+echo "  Compile LAMARC to check arrangers?             ${lamARRCHECK}"
+echo "  Compile LAMARC to generate all trees denovo?   ${lamDENOVO}"
+echo "  Compile LAMARC to use dmalloc?                 ${lamDMALLOC}"
+echo "  Compile LAMARC to use electric fence?          ${lamEFENCE}"
+echo "  Compile LAMARC to use gcov?                    ${lamGCOV}"
+echo "  Compile LAMARC to use Jon's simulation code?   ${lamJSIM}"
+echo "  Compile LAMARC to generate profile data?       ${lamPROFILE}"
+echo "  Track data likelihoods of sampled trees?       ${lamTREETRACK}"
+echo "  Attempt to compile universal binary (MAC)?     ${lamUNIVB}"
+echo ""
+echo "Commonly toggled configure options:"
+echo "  Compile LAMARC in debug mode?                  ${lamDEBUG}"
+echo ""
+if test "X${lamCONV}" = "Xyes" ; then
+echo "wxWidgets version                                ${wxVersion}"
+else
+echo "wxWidgets version                                --"
+fi
+if test "X${lamCONV}" = "Xyes" ; then
+echo "gui toolkit                                      ${GUI_TOOLKIT}"
+else
+echo "gui toolkit                                      --"
+fi
+echo "LAMARC_CODE_OSTYPE                               ${LAMARC_CODE_OSTYPE}"
+echo "*************************************************************"
+
+case "${LAMARC_CODE_OSTYPE}" in
+    LAMARC_COMPILE_MACOSX)
+        ;;
+    LAMARC_COMPILE_LINUX)
+        ;;
+    LAMARC_COMPILE_MSWINDOWS)
+        ;;
+    *)
+        echo "The lamarc compile script either failed to recognize"
+        echo "${LAMARC_CODE_OSTYPE} as a legal value for "
+        echo "LAMARC_CODE_OSTYPE. It is likely that you will have"
+        echo "trouble compiling lamarc."
+        echo ""
+        echo "We recommend that you see the section entitled \"Compiling"
+        echo "Lamarc\" in the lamarc documentation for alternative solutions."
+        echo "*************************************************************"
+        ;;
+esac
+
+if test "X${lamCONV}" = "Xyes" ; then
+    case "${GUI_TOOLKIT}" in
+        gtk2)
+            ;;
+        mac)
+            ;;
+        msw)
+            ;;
+        base)
+            ;;
+        *)
+            echo "The lamarc compile script did not recognize your GUI toolkit,"
+            echo "\"$GUI_TOOLKIT\", as one we have tested. It is very likely"
+            echo "that the lamarc file converter, lam_conv, will not work."
+            echo ""
+            echo "We recommend that you see the section entitled \"Compiling"
+            echo "Lamarc\" in the lamarc documentation for alternative solutions."
+            echo "*************************************************************"
+            ;;
+    esac
+fi
+
+
+if test "X$user_set_cxxflags" = "Xyes"; then
+    echo "************************************************************* "
+    echo "You have set your own value of CXXFLAGS. Did you remember to"
+    echo "use a matching argument of --enable-debug or --disable-debug?"
+    echo "If not, you may have trouble running the programs. "
+    echo "************************************************************* "
+fi
+
+echo "Configure script finished successfully."
diff --git a/doc/cross-notes.txt b/doc/cross-notes.txt
new file mode 100644
index 0000000..fbde09b
--- /dev/null
+++ b/doc/cross-notes.txt
@@ -0,0 +1,203 @@
+########################################################################
+# HOW TO INSTALL A CROSS-COMPILING (LINUX -> MS) G++ COMPILER 
+########################################################################
+This file is supposed to live in 
+
+    sanity at lamarc.gs.washington.edu:cross-tools/cross-notes.txt
+
+#########################################
+# Where to put it
+
+I installed stuff in the "sanity" user account on lamarc. You should be
+able to read and use stuff there just by following the path
+
+    /net/gs/vol1/home/sanity
+
+If you want to make changes, you'll need the IT folks to give you sudo
+access. Once that is done, you can log in this way:
+
+    sudo -H -s -u sanity
+
+
+#########################################
+# Getting the packages
+
+I used the link below as my starting point, though I've also built 
+cross-compilers before, and can tell you that the correct thing to do
+will likely change by the time you next wish to update the compilers
+
+    http://www.blogcompiler.com/2010/07/11/compile-for-windows-on-linux/
+
+I went to the mingw-64 sourceforge page here:
+
+    http://sourceforge.net/projects/mingw-w64/files/
+
+    and from there followed each of
+
+    * Toolchains targeting Win64 > Personal Builds
+    * Toolchains targeting Win32 > Personal Builds
+
+
+Ending up with the following two tar files:
+
+    mingw-w32-bin_x86_64-linux_20110510_sezero.tar.gz
+    mingw-w64-bin_x86_64-linux_20110510_sezero.tar.gz
+
+Note that the part that says 'x86_64-linux' refers to the machine
+you will run the cross-compiler on. I chose lamarc.gs.washington.edu
+
+I untar'd the packages and installed them under
+
+    /net/gs/vol1/home/sanity/cross-tools
+
+#########################################
+# Using the compilers -- 64 bit lamarc.exe
+
+I recommend that before you try to build lam_conv.exe (the windows
+version of the converter gui), you first build the plain lamarc.exe
+executable. This is because lam_conv.exe requires compiling wxWidgets,
+which is typically more complicated than building lamarc.exe
+
+Here are the steps I used to cross-compile a lamarc-only (no converter)
+64-bit version of lamarc.exe on lamarc.gs.washington.edu
+
+    # Step 1:
+    # create a directory and check a new lamarc distribution out in it
+    mkdir myBuildDir
+    cd myBuildDIr
+    export CVS_RSH=ssh
+    export CVSROOT=/local/cvs
+    cvs co -P lamarc
+
+    # Step 2:
+    # invoke modules to get a more modern default g++ compiler and
+    # library. this is interesting -- we're not going to use the
+    # resulting compiler for our code, but our compiler needs 
+    # access to some of its libraries
+    . /etc/profile.d/modules.sh
+    module load modules modules-init modules-gs
+    module load gmp mpfr/2.4.1 gcc
+
+    # Step 3:
+    # get the desired cross-compiler on our $PATH
+    export CROSS_HOME=/net/gs/vol1/home/sanity/cross-tools/cross_win64
+    export HOST_TYPE=x86_64-w64-mingw32
+    export PATH=$CROSS_HOME/bin:$CROSS_HOME/$HOST_TYPE/bin/:$PATH
+
+    # Step 4:
+    # unfortunately, in order to make the modules command make everything
+    # 'just work' for the typical user, several environment variables
+    # are set which hose the cross-compiling process. these steps
+    # undo that
+    unset CC
+    unset CPP
+    unset CXX
+    unset CPPFLAGS
+    unset LDFLAGS
+
+    # Step 5:
+    # make sure the autotools-produced Makefiles are up-to-date
+    # I used automake and autoconf
+    # If you update to new versions, you may need to change these
+    # directions as well as the Makefile.am and configure.ac files
+    cd lamarc
+    aclocal
+    autoconf
+    automake
+
+    # Step 6:
+    # configure and make the executable
+    mkdir rel64
+    cd rel64
+    ../configure --disable-converter --host=$HOST_TYPE 
+    make lamarc.exe
+
+#########################################
+# Using the compilers -- 32 bit lamarc.exe
+
+To build a 32-bit windows executable, do as above, but replace steps 3
+and 6 with these:
+
+    # Step 3 for win-32
+    export CROSS_HOME=/net/gs/vol1/home/sanity/cross-tools/cross_win32
+    export HOST_TYPE=i686-w64-mingw32
+    export PATH=$CROSS_HOME/bin:$CROSS_HOME/$HOST_TYPE/bin/:$PATH
+
+    # Step 6 for win-32
+    mkdir rel32
+    cd rel32
+    ../configure --disable-converter --host=$HOST_TYPE 
+    make lamarc.exe
+
+
+#########################################
+# building wxWidgets-enabled lam_conv.exe
+
+Once you have the the above working, you can try to cross-compile
+lam_conv.exe . First make sure you have cross-compiled versions
+of the wxWidgets library. See here:
+
+    sanity at lamarc.gs.washington.edu:wx-libs/wx-notes.txt
+
+Assuming things are cool there, here's the instructions for
+a 64-bit lam_conv.exe on lamarc.gs.washington.edu
+
+    # modules as above
+    . /etc/profile.d/modules.sh
+    module load modules modules-init modules-gs
+    module load gmp mpfr/2.4.1 gcc
+
+    # get the desired cross-compiler on our $PATH
+    export CROSS_HOME=/net/gs/vol1/home/sanity/cross-tools/cross_win64
+    export HOST_TYPE=x86_64-w64-mingw32
+    export PATH=$CROSS_HOME/bin:$CROSS_HOME/$HOST_TYPE/bin/:$PATH
+
+    # remove unfortunate modules side-effects
+    unset CC
+    unset CPP
+    unset CXX
+    unset CPPFLAGS
+    unset LDFLAGS
+
+    # configure for compilation with wxWidgets cross-compiled library
+    cd <path to lamarc checkout>
+
+    export WX_LIB=/net/gs/vol1/home/sanity/wx-libs/
+    mkdir wxRel64
+    cd wxRel64
+    ../configure --host=$HOST_TYPE \
+        --with-wx-config=/net/gs/vol1/home/sanity/wx-libs/wxMSW/bin/wx-config
+    make lam_conv.exe
+
+
+# and now the same for 32 bits
+
+    # modules as above
+    . /etc/profile.d/modules.sh
+    module load modules modules-init modules-gs
+    module load gmp mpfr/2.4.1 gcc
+
+    # get the desired cross-compiler on our $PATH
+    export CROSS_HOME=/net/gs/vol1/home/sanity/cross-tools/cross_win32
+    export HOST_TYPE=i686-w64-mingw32
+    export PATH=$CROSS_HOME/bin:$CROSS_HOME/$HOST_TYPE/bin/:$PATH
+
+    # Step 4:
+    # remove unfortunate modules side-effects
+    unset CC
+    unset CPP
+    unset CXX
+    unset CPPFLAGS
+    unset LDFLAGS
+
+    # configure for compilation with wxWidgets cross-compiled library
+    cd <path to lamarc checkout>
+
+    export WX_LIB=/net/gs/vol1/home/sanity/wx-libs/
+    mkdir wxRel32
+    cd wxRel32
+    ../configure --host=$HOST_TYPE \
+        --with-wx-config=/net/gs/vol1/home/sanity/wx-libs/wxMSW/bin/wx-config
+    make lam_conv.exe
+
+
diff --git a/doc/html/batch_converter/README.txt b/doc/html/batch_converter/README.txt
new file mode 100644
index 0000000..b4318c8
--- /dev/null
+++ b/doc/html/batch_converter/README.txt
@@ -0,0 +1,12 @@
+Copy 'lam_conv' (or lam_conv.exe, depending on your system) into this 
+directory, then run:
+
+    ./lam_conv -b -c sample-conv-cmd.xml
+
+This data isn't meant to be runable with Lamarc. However, if you read the
+comments in sample-conv-cmd.xml you should be able to produce your own
+lamarc input file.  From there, you should be able to run lamarc on the 
+produced file.
+
+You can also omit the '-b' option to pull the data into the converter, and
+further manipulate it with the GUI.
diff --git a/doc/html/batch_converter/chrom1.mig b/doc/html/batch_converter/chrom1.mig
new file mode 100644
index 0000000..c5ad74e
--- /dev/null
+++ b/doc/html/batch_converter/chrom1.mig
@@ -0,0 +1,14 @@
+   2 1  Example: chromosome 1 with single dna segment
+9
+6    North
+n_ind0_a  ccccccAcc
+n_ind0_b  TcccccAcc
+n_ind1_a  ccccccTcc
+n_ind1_b  TcccccTcc
+n_ind2_a  ccccccGcc
+n_ind2_b  TcccccGcc
+4    South
+s_ind0_a  cTccTcccc
+s_ind0_b  ccccTcccc
+s_ind1_a  cTccccccc
+s_ind1_b  ccccccccc
diff --git a/doc/html/batch_converter/chrom1_lamarc.html b/doc/html/batch_converter/chrom1_lamarc.html
new file mode 100644
index 0000000..8c920f7
--- /dev/null
+++ b/doc/html/batch_converter/chrom1_lamarc.html
@@ -0,0 +1,71 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
+<HTML>
+<BODY>
+<pre>
+&lt?xml version="1.0" ?&gt
+&ltlamarc&gt
+    &ltdata&gt
+        &ltregion name="from chrom1.mig"&gt
+            &ltspacing&gt
+                &ltblock name="segment 1 of chrom1.mig" /&gt
+            &lt/spacing&gt
+            &ltpopulation name="North"&gt
+                &ltindividual name="n_ind0_a"&gt
+                    &ltsample name="n_ind0_a_0"&gt
+                        &ltdatablock type="DNA"&gt ccccccAcc &lt/datablock&gt
+                    &lt/sample&gt
+                &lt/individual&gt
+                &ltindividual name="n_ind0_b"&gt
+                    &ltsample name="n_ind0_b_0"&gt
+                        &ltdatablock type="DNA"&gt TcccccAcc &lt/datablock&gt
+                    &lt/sample&gt
+                &lt/individual&gt
+                &ltindividual name="n_ind1_a"&gt
+                    &ltsample name="n_ind1_a_0"&gt
+                        &ltdatablock type="DNA"&gt ccccccTcc &lt/datablock&gt
+                    &lt/sample&gt
+                &lt/individual&gt
+                &ltindividual name="n_ind1_b"&gt
+                    &ltsample name="n_ind1_b_0"&gt
+                        &ltdatablock type="DNA"&gt TcccccTcc &lt/datablock&gt
+                    &lt/sample&gt
+                &lt/individual&gt
+                &ltindividual name="n_ind2_a"&gt
+                    &ltsample name="n_ind2_a_0"&gt
+                        &ltdatablock type="DNA"&gt ccccccGcc &lt/datablock&gt
+                    &lt/sample&gt
+                &lt/individual&gt
+                &ltindividual name="n_ind2_b"&gt
+                    &ltsample name="n_ind2_b_0"&gt
+                        &ltdatablock type="DNA"&gt TcccccGcc &lt/datablock&gt
+                    &lt/sample&gt
+                &lt/individual&gt
+            &lt/population&gt
+            &ltpopulation name="South"&gt
+                &ltindividual name="s_ind0_a"&gt
+                    &ltsample name="s_ind0_a_0"&gt
+                        &ltdatablock type="DNA"&gt cTccTcccc &lt/datablock&gt
+                    &lt/sample&gt
+                &lt/individual&gt
+                &ltindividual name="s_ind0_b"&gt
+                    &ltsample name="s_ind0_b_0"&gt
+                        &ltdatablock type="DNA"&gt ccccTcccc &lt/datablock&gt
+                    &lt/sample&gt
+                &lt/individual&gt
+                &ltindividual name="s_ind1_a"&gt
+                    &ltsample name="s_ind1_a_0"&gt
+                        &ltdatablock type="DNA"&gt cTccccccc &lt/datablock&gt
+                    &lt/sample&gt
+                &lt/individual&gt
+                &ltindividual name="s_ind1_b"&gt
+                    &ltsample name="s_ind1_b_0"&gt
+                        &ltdatablock type="DNA"&gt ccccccccc &lt/datablock&gt
+                    &lt/sample&gt
+                &lt/individual&gt
+            &lt/population&gt
+        &lt/region&gt
+    &lt/data&gt
+&lt/lamarc&gt
+</pre>
+</BODY>
+</HTML>
diff --git a/doc/html/batch_converter/chrom1_lamarc.xml b/doc/html/batch_converter/chrom1_lamarc.xml
new file mode 100644
index 0000000..b736b48
--- /dev/null
+++ b/doc/html/batch_converter/chrom1_lamarc.xml
@@ -0,0 +1,64 @@
+<?xml version="1.0" ?>
+<lamarc>
+    <data>
+        <region name="from chrom1.mig">
+            <spacing>
+                <block name="segment 1 of chrom1.mig" />
+            </spacing>
+            <population name="North">
+                <individual name="n_ind0_a">
+                    <sample name="n_ind0_a_0">
+                        <datablock type="DNA"> ccccccAcc </datablock>
+                    </sample>
+                </individual>
+                <individual name="n_ind0_b">
+                    <sample name="n_ind0_b_0">
+                        <datablock type="DNA"> TcccccAcc </datablock>
+                    </sample>
+                </individual>
+                <individual name="n_ind1_a">
+                    <sample name="n_ind1_a_0">
+                        <datablock type="DNA"> ccccccTcc </datablock>
+                    </sample>
+                </individual>
+                <individual name="n_ind1_b">
+                    <sample name="n_ind1_b_0">
+                        <datablock type="DNA"> TcccccTcc </datablock>
+                    </sample>
+                </individual>
+                <individual name="n_ind2_a">
+                    <sample name="n_ind2_a_0">
+                        <datablock type="DNA"> ccccccGcc </datablock>
+                    </sample>
+                </individual>
+                <individual name="n_ind2_b">
+                    <sample name="n_ind2_b_0">
+                        <datablock type="DNA"> TcccccGcc </datablock>
+                    </sample>
+                </individual>
+            </population>
+            <population name="South">
+                <individual name="s_ind0_a">
+                    <sample name="s_ind0_a_0">
+                        <datablock type="DNA"> cTccTcccc </datablock>
+                    </sample>
+                </individual>
+                <individual name="s_ind0_b">
+                    <sample name="s_ind0_b_0">
+                        <datablock type="DNA"> ccccTcccc </datablock>
+                    </sample>
+                </individual>
+                <individual name="s_ind1_a">
+                    <sample name="s_ind1_a_0">
+                        <datablock type="DNA"> cTccccccc </datablock>
+                    </sample>
+                </individual>
+                <individual name="s_ind1_b">
+                    <sample name="s_ind1_b_0">
+                        <datablock type="DNA"> ccccccccc </datablock>
+                    </sample>
+                </individual>
+            </population>
+        </region>
+    </data>
+</lamarc>
diff --git a/doc/html/batch_converter/chrom2.mig b/doc/html/batch_converter/chrom2.mig
new file mode 100644
index 0000000..f8021cd
--- /dev/null
+++ b/doc/html/batch_converter/chrom2.mig
@@ -0,0 +1,24 @@
+   2 2  Example: 2 chromosomes with 2 loci each
+5 7
+6    North
+n_ind0_a  Gaaaa
+n_ind0_b  aGaaa
+n_ind1_a  aaGaa
+n_ind1_b  aaaGa
+n_ind2_a  aaaaG
+n_ind2_b  Taaaa
+n_ind0_a  Caaaaaa
+n_ind0_b  aCaaaaa
+n_ind1_a  aaCaaaa
+n_ind1_b  aaaCaaa
+n_ind2_a  aaaaCaa
+n_ind2_b  aaaaaCa
+4    South
+s_ind0_a  Taaaa
+s_ind0_b  aTaaa
+s_ind1_a  aaTaa
+s_ind1_b  aaaTa
+s_ind0_a  Gaaaaaa
+s_ind0_b  aGaaaaa
+s_ind1_a  aaGaaaa
+s_ind1_b  aaaGaaa
diff --git a/doc/html/batch_converter/chrom2_lamarc.html b/doc/html/batch_converter/chrom2_lamarc.html
new file mode 100644
index 0000000..83bae56
--- /dev/null
+++ b/doc/html/batch_converter/chrom2_lamarc.html
@@ -0,0 +1,92 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
+<HTML>
+<BODY>
+<pre>
+&lt?xml version="1.0" ?&gt
+&ltlamarc&gt
+    &ltdata&gt
+        &ltregion name="chrom2"&gt
+            &ltspacing&gt
+                &ltblock name="chrom2-segment1"&gt
+                    &ltmap-position&gt 1000 &lt/map-position&gt
+                    &ltlength&gt 500 &lt/length&gt
+                    &ltoffset&gt -4 &lt/offset&gt
+                    &ltlocations&gt 2 88 125 173 443 &lt/locations&gt
+                &lt/block&gt
+                &ltblock name="chrom2-segment2"&gt
+                    &ltmap-position&gt 5000 &lt/map-position&gt
+                    &ltlength&gt 250 &lt/length&gt
+                    &ltoffset&gt -4 &lt/offset&gt
+                    &ltlocations&gt 13 19 35 77 102 112 204 &lt/locations&gt
+                &lt/block&gt
+            &lt/spacing&gt
+            &ltpopulation name="North"&gt
+                &ltindividual name="n_ind0_a"&gt
+                    &ltsample name="n_ind0_a_0"&gt
+                        &ltdatablock type="SNP"&gt Gaaaa &lt/datablock&gt
+                        &ltdatablock type="SNP"&gt Caaaaaa &lt/datablock&gt
+                    &lt/sample&gt
+                &lt/individual&gt
+                &ltindividual name="n_ind0_b"&gt
+                    &ltsample name="n_ind0_b_0"&gt
+                        &ltdatablock type="SNP"&gt aGaaa &lt/datablock&gt
+                        &ltdatablock type="SNP"&gt aCaaaaa &lt/datablock&gt
+                    &lt/sample&gt
+                &lt/individual&gt
+                &ltindividual name="n_ind1_a"&gt
+                    &ltsample name="n_ind1_a_0"&gt
+                        &ltdatablock type="SNP"&gt aaGaa &lt/datablock&gt
+                        &ltdatablock type="SNP"&gt aaCaaaa &lt/datablock&gt
+                    &lt/sample&gt
+                &lt/individual&gt
+                &ltindividual name="n_ind1_b"&gt
+                    &ltsample name="n_ind1_b_0"&gt
+                        &ltdatablock type="SNP"&gt aaaGa &lt/datablock&gt
+                        &ltdatablock type="SNP"&gt aaaCaaa &lt/datablock&gt
+                    &lt/sample&gt
+                &lt/individual&gt
+                &ltindividual name="n_ind2_a"&gt
+                    &ltsample name="n_ind2_a_0"&gt
+                        &ltdatablock type="SNP"&gt aaaaG &lt/datablock&gt
+                        &ltdatablock type="SNP"&gt aaaaCaa &lt/datablock&gt
+                    &lt/sample&gt
+                &lt/individual&gt
+                &ltindividual name="n_ind2_b"&gt
+                    &ltsample name="n_ind2_b_0"&gt
+                        &ltdatablock type="SNP"&gt Taaaa &lt/datablock&gt
+                        &ltdatablock type="SNP"&gt aaaaaCa &lt/datablock&gt
+                    &lt/sample&gt
+                &lt/individual&gt
+            &lt/population&gt
+            &ltpopulation name="South"&gt
+                &ltindividual name="s_ind0_a"&gt
+                    &ltsample name="s_ind0_a_0"&gt
+                        &ltdatablock type="SNP"&gt Taaaa &lt/datablock&gt
+                        &ltdatablock type="SNP"&gt Gaaaaaa &lt/datablock&gt
+                    &lt/sample&gt
+                &lt/individual&gt
+                &ltindividual name="s_ind0_b"&gt
+                    &ltsample name="s_ind0_b_0"&gt
+                        &ltdatablock type="SNP"&gt aTaaa &lt/datablock&gt
+                        &ltdatablock type="SNP"&gt aGaaaaa &lt/datablock&gt
+                    &lt/sample&gt
+                &lt/individual&gt
+                &ltindividual name="s_ind1_a"&gt
+                    &ltsample name="s_ind1_a_0"&gt
+                        &ltdatablock type="SNP"&gt aaTaa &lt/datablock&gt
+                        &ltdatablock type="SNP"&gt aaGaaaa &lt/datablock&gt
+                    &lt/sample&gt
+                &lt/individual&gt
+                &ltindividual name="s_ind1_b"&gt
+                    &ltsample name="s_ind1_b_0"&gt
+                        &ltdatablock type="SNP"&gt aaaTa &lt/datablock&gt
+                        &ltdatablock type="SNP"&gt aaaGaaa &lt/datablock&gt
+                    &lt/sample&gt
+                &lt/individual&gt
+            &lt/population&gt
+        &lt/region&gt
+    &lt/data&gt
+&lt/lamarc&gt
+</pre>
+</BODY>
+</HTML>
diff --git a/doc/html/batch_converter/chrom2_lamarc.xml b/doc/html/batch_converter/chrom2_lamarc.xml
new file mode 100644
index 0000000..74ea494
--- /dev/null
+++ b/doc/html/batch_converter/chrom2_lamarc.xml
@@ -0,0 +1,85 @@
+<?xml version="1.0" ?>
+<lamarc>
+    <data>
+        <region name="chrom2">
+            <spacing>
+                <block name="chrom2-segment1">
+                    <map-position> 1000 </map-position>
+                    <length> 500 </length>
+                    <offset> -4 </offset>
+                    <locations> 2 88 125 173 443 </locations>
+                </block>
+                <block name="chrom2-segment2">
+                    <map-position> 5000 </map-position>
+                    <length> 250 </length>
+                    <offset> -4 </offset>
+                    <locations> 13 19 35 77 102 112 204 </locations>
+                </block>
+            </spacing>
+            <population name="North">
+                <individual name="n_ind0_a">
+                    <sample name="n_ind0_a_0">
+                        <datablock type="SNP"> Gaaaa </datablock>
+                        <datablock type="SNP"> Caaaaaa </datablock>
+                    </sample>
+                </individual>
+                <individual name="n_ind0_b">
+                    <sample name="n_ind0_b_0">
+                        <datablock type="SNP"> aGaaa </datablock>
+                        <datablock type="SNP"> aCaaaaa </datablock>
+                    </sample>
+                </individual>
+                <individual name="n_ind1_a">
+                    <sample name="n_ind1_a_0">
+                        <datablock type="SNP"> aaGaa </datablock>
+                        <datablock type="SNP"> aaCaaaa </datablock>
+                    </sample>
+                </individual>
+                <individual name="n_ind1_b">
+                    <sample name="n_ind1_b_0">
+                        <datablock type="SNP"> aaaGa </datablock>
+                        <datablock type="SNP"> aaaCaaa </datablock>
+                    </sample>
+                </individual>
+                <individual name="n_ind2_a">
+                    <sample name="n_ind2_a_0">
+                        <datablock type="SNP"> aaaaG </datablock>
+                        <datablock type="SNP"> aaaaCaa </datablock>
+                    </sample>
+                </individual>
+                <individual name="n_ind2_b">
+                    <sample name="n_ind2_b_0">
+                        <datablock type="SNP"> Taaaa </datablock>
+                        <datablock type="SNP"> aaaaaCa </datablock>
+                    </sample>
+                </individual>
+            </population>
+            <population name="South">
+                <individual name="s_ind0_a">
+                    <sample name="s_ind0_a_0">
+                        <datablock type="SNP"> Taaaa </datablock>
+                        <datablock type="SNP"> Gaaaaaa </datablock>
+                    </sample>
+                </individual>
+                <individual name="s_ind0_b">
+                    <sample name="s_ind0_b_0">
+                        <datablock type="SNP"> aTaaa </datablock>
+                        <datablock type="SNP"> aGaaaaa </datablock>
+                    </sample>
+                </individual>
+                <individual name="s_ind1_a">
+                    <sample name="s_ind1_a_0">
+                        <datablock type="SNP"> aaTaa </datablock>
+                        <datablock type="SNP"> aaGaaaa </datablock>
+                    </sample>
+                </individual>
+                <individual name="s_ind1_b">
+                    <sample name="s_ind1_b_0">
+                        <datablock type="SNP"> aaaTa </datablock>
+                        <datablock type="SNP"> aaaGaaa </datablock>
+                    </sample>
+                </individual>
+            </population>
+        </region>
+    </data>
+</lamarc>
diff --git a/doc/html/batch_converter/chrom3_lamarc.html b/doc/html/batch_converter/chrom3_lamarc.html
new file mode 100644
index 0000000..8457471
--- /dev/null
+++ b/doc/html/batch_converter/chrom3_lamarc.html
@@ -0,0 +1,89 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
+<HTML>
+<BODY>
+<pre>
+&lt?xml version="1.0" ?&gt
+&ltlamarc&gt
+    &ltdata&gt
+        &ltregion name="from chrom3microsat.mig"&gt
+            &ltspacing&gt
+                &ltblock name="segment 1 of chrom3microsat.mig"&gt
+                    &ltmap-position&gt 500 &lt/map-position&gt
+                &lt/block&gt
+                &ltblock name="segment 1 of chrom3snp.mig"&gt
+                    &ltmap-position&gt 501 &lt/map-position&gt
+                    &ltlength&gt 100 &lt/length&gt
+                    &ltoffset&gt 1 &lt/offset&gt
+                    &ltlocations&gt 23 &lt/locations&gt
+                &lt/block&gt
+            &lt/spacing&gt
+            &ltpopulation name="North"&gt
+                &ltindividual name="n_ind0"&gt
+                    &ltphase type="unknown"&gt&lt/phase&gt
+                    &ltphase type="unknown"&gt&lt/phase&gt
+                    &ltsample name="n_ind0_a"&gt
+                        &ltdatablock type="Microsat"&gt  18 &lt/datablock&gt
+                        &ltdatablock type="SNP"&gt T &lt/datablock&gt
+                    &lt/sample&gt
+                    &ltsample name="n_ind0_b"&gt
+                        &ltdatablock type="Microsat"&gt  18 &lt/datablock&gt
+                        &ltdatablock type="SNP"&gt c &lt/datablock&gt
+                    &lt/sample&gt
+                &lt/individual&gt
+                &ltindividual name="n_ind1"&gt
+                    &ltphase type="unknown"&gt&lt/phase&gt
+                    &ltphase type="unknown"&gt&lt/phase&gt
+                    &ltsample name="n_ind1_a"&gt
+                        &ltdatablock type="Microsat"&gt  18 &lt/datablock&gt
+                        &ltdatablock type="SNP"&gt c &lt/datablock&gt
+                    &lt/sample&gt
+                    &ltsample name="n_ind1_b"&gt
+                        &ltdatablock type="Microsat"&gt  19 &lt/datablock&gt
+                        &ltdatablock type="SNP"&gt c &lt/datablock&gt
+                    &lt/sample&gt
+                &lt/individual&gt
+                &ltindividual name="n_ind2"&gt
+                    &ltphase type="unknown"&gt&lt/phase&gt
+                    &ltphase type="unknown"&gt&lt/phase&gt
+                    &ltsample name="n_ind2_a"&gt
+                        &ltdatablock type="Microsat"&gt  18 &lt/datablock&gt
+                        &ltdatablock type="SNP"&gt A &lt/datablock&gt
+                    &lt/sample&gt
+                    &ltsample name="n_ind2_b"&gt
+                        &ltdatablock type="Microsat"&gt  20 &lt/datablock&gt
+                        &ltdatablock type="SNP"&gt c &lt/datablock&gt
+                    &lt/sample&gt
+                &lt/individual&gt
+            &lt/population&gt
+            &ltpopulation name="South"&gt
+                &ltindividual name="s_ind0"&gt
+                    &ltphase type="unknown"&gt&lt/phase&gt
+                    &ltphase type="unknown"&gt&lt/phase&gt
+                    &ltsample name="s_ind0_a"&gt
+                        &ltdatablock type="Microsat"&gt  19 &lt/datablock&gt
+                        &ltdatablock type="SNP"&gt G &lt/datablock&gt
+                    &lt/sample&gt
+                    &ltsample name="s_ind0_b"&gt
+                        &ltdatablock type="Microsat"&gt  20 &lt/datablock&gt
+                        &ltdatablock type="SNP"&gt c &lt/datablock&gt
+                    &lt/sample&gt
+                &lt/individual&gt
+                &ltindividual name="s_ind1"&gt
+                    &ltphase type="unknown"&gt&lt/phase&gt
+                    &ltphase type="unknown"&gt&lt/phase&gt
+                    &ltsample name="s_ind1_a"&gt
+                        &ltdatablock type="Microsat"&gt  19 &lt/datablock&gt
+                        &ltdatablock type="SNP"&gt c &lt/datablock&gt
+                    &lt/sample&gt
+                    &ltsample name="s_ind1_b"&gt
+                        &ltdatablock type="Microsat"&gt  21 &lt/datablock&gt
+                        &ltdatablock type="SNP"&gt c &lt/datablock&gt
+                    &lt/sample&gt
+                &lt/individual&gt
+            &lt/population&gt
+        &lt/region&gt
+    &lt/data&gt
+&lt/lamarc&gt
+</pre>
+</BODY>
+</HTML>
diff --git a/doc/html/batch_converter/chrom3_lamarc.xml b/doc/html/batch_converter/chrom3_lamarc.xml
new file mode 100644
index 0000000..ab03904
--- /dev/null
+++ b/doc/html/batch_converter/chrom3_lamarc.xml
@@ -0,0 +1,82 @@
+<?xml version="1.0" ?>
+<lamarc>
+    <data>
+        <region name="from chrom3microsat.mig">
+            <spacing>
+                <block name="segment 1 of chrom3microsat.mig">
+                    <map-position> 500 </map-position>
+                </block>
+                <block name="segment 1 of chrom3snp.mig">
+                    <map-position> 501 </map-position>
+                    <length> 100 </length>
+                    <offset> 1 </offset>
+                    <locations> 23 </locations>
+                </block>
+            </spacing>
+            <population name="North">
+                <individual name="n_ind0">
+                    <phase type="unknown"></phase>
+                    <phase type="unknown"></phase>
+                    <sample name="n_ind0_a">
+                        <datablock type="Microsat">  18 </datablock>
+                        <datablock type="SNP"> T </datablock>
+                    </sample>
+                    <sample name="n_ind0_b">
+                        <datablock type="Microsat">  18 </datablock>
+                        <datablock type="SNP"> c </datablock>
+                    </sample>
+                </individual>
+                <individual name="n_ind1">
+                    <phase type="unknown"></phase>
+                    <phase type="unknown"></phase>
+                    <sample name="n_ind1_a">
+                        <datablock type="Microsat">  18 </datablock>
+                        <datablock type="SNP"> c </datablock>
+                    </sample>
+                    <sample name="n_ind1_b">
+                        <datablock type="Microsat">  19 </datablock>
+                        <datablock type="SNP"> c </datablock>
+                    </sample>
+                </individual>
+                <individual name="n_ind2">
+                    <phase type="unknown"></phase>
+                    <phase type="unknown"></phase>
+                    <sample name="n_ind2_a">
+                        <datablock type="Microsat">  18 </datablock>
+                        <datablock type="SNP"> A </datablock>
+                    </sample>
+                    <sample name="n_ind2_b">
+                        <datablock type="Microsat">  20 </datablock>
+                        <datablock type="SNP"> c </datablock>
+                    </sample>
+                </individual>
+            </population>
+            <population name="South">
+                <individual name="s_ind0">
+                    <phase type="unknown"></phase>
+                    <phase type="unknown"></phase>
+                    <sample name="s_ind0_a">
+                        <datablock type="Microsat">  19 </datablock>
+                        <datablock type="SNP"> G </datablock>
+                    </sample>
+                    <sample name="s_ind0_b">
+                        <datablock type="Microsat">  20 </datablock>
+                        <datablock type="SNP"> c </datablock>
+                    </sample>
+                </individual>
+                <individual name="s_ind1">
+                    <phase type="unknown"></phase>
+                    <phase type="unknown"></phase>
+                    <sample name="s_ind1_a">
+                        <datablock type="Microsat">  19 </datablock>
+                        <datablock type="SNP"> c </datablock>
+                    </sample>
+                    <sample name="s_ind1_b">
+                        <datablock type="Microsat">  21 </datablock>
+                        <datablock type="SNP"> c </datablock>
+                    </sample>
+                </individual>
+            </population>
+        </region>
+    </data>
+</lamarc>
diff --git a/doc/html/batch_converter/chrom3_phase_cmd.html b/doc/html/batch_converter/chrom3_phase_cmd.html
new file mode 100644
index 0000000..b83eb79
--- /dev/null
+++ b/doc/html/batch_converter/chrom3_phase_cmd.html
@@ -0,0 +1,36 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
+<HTML>
+<BODY>
+<pre>
+&ltlamarc-converter-cmd&gt
+    &ltindividuals&gt
+        &ltindividual&gt
+            &ltname&gtn_ind0&lt/name&gt
+            &ltsample&gt&ltname&gtn_ind0_a&lt/name&gt&lt/sample&gt
+            &ltsample&gt&ltname&gtn_ind0_b&lt/name&gt&lt/sample&gt
+        &lt/individual&gt
+        &ltindividual&gt
+            &ltname&gtn_ind1&lt/name&gt
+            &ltsample&gt&ltname&gtn_ind1_a&lt/name&gt&lt/sample&gt
+            &ltsample&gt&ltname&gtn_ind1_b&lt/name&gt&lt/sample&gt
+        &lt/individual&gt
+        &ltindividual&gt
+            &ltname&gtn_ind2&lt/name&gt
+            &ltsample&gt&ltname&gtn_ind2_a&lt/name&gt&lt/sample&gt
+            &ltsample&gt&ltname&gtn_ind2_b&lt/name&gt&lt/sample&gt
+        &lt/individual&gt
+        &ltindividual&gt
+            &ltname&gts_ind0&lt/name&gt
+            &ltsample&gt&ltname&gts_ind0_a&lt/name&gt&lt/sample&gt
+            &ltsample&gt&ltname&gts_ind0_b&lt/name&gt&lt/sample&gt
+        &lt/individual&gt
+        &ltindividual&gt
+            &ltname&gts_ind1&lt/name&gt
+            &ltsample&gt&ltname&gts_ind1_a&lt/name&gt&lt/sample&gt
+            &ltsample&gt&ltname&gts_ind1_b&lt/name&gt&lt/sample&gt
+        &lt/individual&gt
+    &lt/individuals&gt
+&lt/lamarc-converter-cmd&gt
+</pre>
+</BODY>
+</HTML>
diff --git a/doc/html/batch_converter/chrom3_phase_cmd.xml b/doc/html/batch_converter/chrom3_phase_cmd.xml
new file mode 100644
index 0000000..e3bbb4e
--- /dev/null
+++ b/doc/html/batch_converter/chrom3_phase_cmd.xml
@@ -0,0 +1,29 @@
+<lamarc-converter-cmd>
+    <individuals>
+        <individual>
+            <name>n_ind0</name>
+            <sample><name>n_ind0_a</name></sample>
+            <sample><name>n_ind0_b</name></sample>
+        </individual>
+        <individual>
+            <name>n_ind1</name>
+            <sample><name>n_ind1_a</name></sample>
+            <sample><name>n_ind1_b</name></sample>
+        </individual>
+        <individual>
+            <name>n_ind2</name>
+            <sample><name>n_ind2_a</name></sample>
+            <sample><name>n_ind2_b</name></sample>
+        </individual>
+        <individual>
+            <name>s_ind0</name>
+            <sample><name>s_ind0_a</name></sample>
+            <sample><name>s_ind0_b</name></sample>
+        </individual>
+        <individual>
+            <name>s_ind1</name>
+            <sample><name>s_ind1_a</name></sample>
+            <sample><name>s_ind1_b</name></sample>
+        </individual>
+    </individuals>
+</lamarc-converter-cmd>
diff --git a/doc/html/batch_converter/chrom3microsat.mig b/doc/html/batch_converter/chrom3microsat.mig
new file mode 100644
index 0000000..0ea0848
--- /dev/null
+++ b/doc/html/batch_converter/chrom3microsat.mig
@@ -0,0 +1,8 @@
+ 2 1 . Example: diploid microsat data from two populations
+3    North
+n_ind0    18.18
+n_ind1    18.19
+n_ind2    18.20
+2    South
+s_ind0    19.20
+s_ind1    19.21
diff --git a/doc/html/batch_converter/chrom3snp.mig b/doc/html/batch_converter/chrom3snp.mig
new file mode 100644
index 0000000..41888fe
--- /dev/null
+++ b/doc/html/batch_converter/chrom3snp.mig
@@ -0,0 +1,14 @@
+   2 1  Example: 2 chromosomes with 2 loci each and phase information
+1
+6    North
+n_ind0_a  T
+n_ind0_b  c
+n_ind1_a  c
+n_ind1_b  c
+n_ind2_a  A
+n_ind2_b  c
+4    South
+s_ind0_a  G
+s_ind0_b  c
+s_ind1_a  c
+s_ind1_b  c
diff --git a/doc/html/batch_converter/exported-lamarc-input.html b/doc/html/batch_converter/exported-lamarc-input.html
new file mode 100644
index 0000000..5898f7b
--- /dev/null
+++ b/doc/html/batch_converter/exported-lamarc-input.html
@@ -0,0 +1,230 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
+<HTML>
+<BODY>
+<pre>
+&lt?xml version="1.0" ?&gt
+&lt!--Example output for 3 chromosomes, some with multiple segments--&gt
+&ltlamarc version="2.1"&gt
+    &ltformat&gt
+        &ltconvert-output-to-eliminate-zero&gttrue&lt/convert-output-to-eliminate-zero&gt
+    &lt/format&gt
+    &ltdata&gt
+        &ltregion name="chrom1"&gt
+            &lteffective-popsize&gt1.000000&lt/effective-popsize&gt
+            &ltspacing&gt
+                &ltblock name="chrom1-segment" /&gt
+            &lt/spacing&gt
+            &ltpopulation name="North"&gt
+                &ltindividual name="n_ind0"&gt
+                    &ltphase type="unknown"&gt&lt/phase&gt
+                    &ltsample name="n_ind0_a"&gt
+                        &ltdatablock type="DNA"&gt ccccccAcc &lt/datablock&gt
+                    &lt/sample&gt
+                    &ltsample name="n_ind0_b"&gt
+                        &ltdatablock type="DNA"&gt TcccccAcc &lt/datablock&gt
+                    &lt/sample&gt
+                &lt/individual&gt
+                &ltindividual name="n_ind1"&gt
+                    &ltphase type="unknown"&gt&lt/phase&gt
+                    &ltsample name="n_ind1_a"&gt
+                        &ltdatablock type="DNA"&gt ccccccTcc &lt/datablock&gt
+                    &lt/sample&gt
+                    &ltsample name="n_ind1_b"&gt
+                        &ltdatablock type="DNA"&gt TcccccTcc &lt/datablock&gt
+                    &lt/sample&gt
+                &lt/individual&gt
+                &ltindividual name="n_ind2"&gt
+                    &ltphase type="unknown"&gt&lt/phase&gt
+                    &ltsample name="n_ind2_a"&gt
+                        &ltdatablock type="DNA"&gt ccccccGcc &lt/datablock&gt
+                    &lt/sample&gt
+                    &ltsample name="n_ind2_b"&gt
+                        &ltdatablock type="DNA"&gt TcccccGcc &lt/datablock&gt
+                    &lt/sample&gt
+                &lt/individual&gt
+            &lt/population&gt
+            &ltpopulation name="South"&gt
+                &ltindividual name="s_ind0"&gt
+                    &ltphase type="unknown"&gt&lt/phase&gt
+                    &ltsample name="s_ind0_a"&gt
+                        &ltdatablock type="DNA"&gt cTccTcccc &lt/datablock&gt
+                    &lt/sample&gt
+                    &ltsample name="s_ind0_b"&gt
+                        &ltdatablock type="DNA"&gt ccccTcccc &lt/datablock&gt
+                    &lt/sample&gt
+                &lt/individual&gt
+                &ltindividual name="s_ind1"&gt
+                    &ltphase type="unknown"&gt&lt/phase&gt
+                    &ltsample name="s_ind1_a"&gt
+                        &ltdatablock type="DNA"&gt cTccccccc &lt/datablock&gt
+                    &lt/sample&gt
+                    &ltsample name="s_ind1_b"&gt
+                        &ltdatablock type="DNA"&gt ccccccccc &lt/datablock&gt
+                    &lt/sample&gt
+                &lt/individual&gt
+            &lt/population&gt
+        &lt/region&gt
+        &ltregion name="chrom2"&gt
+            &lteffective-popsize&gt1.000000&lt/effective-popsize&gt
+            &ltspacing&gt
+                &ltblock name="chrom2-segment1"&gt
+                    &ltmap-position&gt 1000 &lt/map-position&gt
+                    &ltlength&gt 500 &lt/length&gt
+                    &ltoffset&gt -4 &lt/offset&gt
+                    &ltlocations&gt 2 88 125 173 443 &lt/locations&gt
+                &lt/block&gt
+                &ltblock name="chrom2-segment2"&gt
+                    &ltmap-position&gt 5000 &lt/map-position&gt
+                    &ltlength&gt 250 &lt/length&gt
+                    &ltoffset&gt -4 &lt/offset&gt
+                    &ltlocations&gt 13 19 35 77 102 112 204 &lt/locations&gt
+                &lt/block&gt
+            &lt/spacing&gt
+            &ltpopulation name="North"&gt
+                &ltindividual name="n_ind0"&gt
+                    &ltphase type="unknown"&gt&lt/phase&gt
+                    &ltphase type="unknown"&gt13 19&lt/phase&gt
+                    &ltsample name="n_ind0_a"&gt
+                        &ltdatablock type="SNP"&gt Gaaaa &lt/datablock&gt
+                        &ltdatablock type="SNP"&gt Caaaaaa &lt/datablock&gt
+                    &lt/sample&gt
+                    &ltsample name="n_ind0_b"&gt
+                        &ltdatablock type="SNP"&gt aGaaa &lt/datablock&gt
+                        &ltdatablock type="SNP"&gt aCaaaaa &lt/datablock&gt
+                    &lt/sample&gt
+                &lt/individual&gt
+                &ltindividual name="n_ind1"&gt
+                    &ltphase type="unknown"&gt&lt/phase&gt
+                    &ltphase type="unknown"&gt&lt/phase&gt
+                    &ltsample name="n_ind1_a"&gt
+                        &ltdatablock type="SNP"&gt aaGaa &lt/datablock&gt
+                        &ltdatablock type="SNP"&gt aaCaaaa &lt/datablock&gt
+                    &lt/sample&gt
+                    &ltsample name="n_ind1_b"&gt
+                        &ltdatablock type="SNP"&gt aaaGa &lt/datablock&gt
+                        &ltdatablock type="SNP"&gt aaaCaaa &lt/datablock&gt
+                    &lt/sample&gt
+                &lt/individual&gt
+                &ltindividual name="n_ind2"&gt
+                    &ltphase type="unknown"&gt&lt/phase&gt
+                    &ltphase type="unknown"&gt&lt/phase&gt
+                    &ltsample name="n_ind2_a"&gt
+                        &ltdatablock type="SNP"&gt aaaaG &lt/datablock&gt
+                        &ltdatablock type="SNP"&gt aaaaCaa &lt/datablock&gt
+                    &lt/sample&gt
+                    &ltsample name="n_ind2_b"&gt
+                        &ltdatablock type="SNP"&gt Taaaa &lt/datablock&gt
+                        &ltdatablock type="SNP"&gt aaaaaCa &lt/datablock&gt
+                    &lt/sample&gt
+                &lt/individual&gt
+            &lt/population&gt
+            &ltpopulation name="South"&gt
+                &ltindividual name="s_ind0"&gt
+                    &ltphase type="unknown"&gt&lt/phase&gt
+                    &ltphase type="unknown"&gt&lt/phase&gt
+                    &ltsample name="s_ind0_a"&gt
+                        &ltdatablock type="SNP"&gt Taaaa &lt/datablock&gt
+                        &ltdatablock type="SNP"&gt Gaaaaaa &lt/datablock&gt
+                    &lt/sample&gt
+                    &ltsample name="s_ind0_b"&gt
+                        &ltdatablock type="SNP"&gt aTaaa &lt/datablock&gt
+                        &ltdatablock type="SNP"&gt aGaaaaa &lt/datablock&gt
+                    &lt/sample&gt
+                &lt/individual&gt
+                &ltindividual name="s_ind1"&gt
+                    &ltphase type="unknown"&gt&lt/phase&gt
+                    &ltphase type="unknown"&gt&lt/phase&gt
+                    &ltsample name="s_ind1_a"&gt
+                        &ltdatablock type="SNP"&gt aaTaa &lt/datablock&gt
+                        &ltdatablock type="SNP"&gt aaGaaaa &lt/datablock&gt
+                    &lt/sample&gt
+                    &ltsample name="s_ind1_b"&gt
+                        &ltdatablock type="SNP"&gt aaaTa &lt/datablock&gt
+                        &ltdatablock type="SNP"&gt aaaGaaa &lt/datablock&gt
+                    &lt/sample&gt
+                &lt/individual&gt
+            &lt/population&gt
+        &lt/region&gt
+        &ltregion name="chrom3"&gt
+            &ltspacing&gt
+                &ltblock name="chrom3-micro"&gt
+                    &ltmap-position&gt 500 &lt/map-position&gt
+                    &ltoffset&gt 1 &lt/offset&gt
+                &lt/block&gt
+                &ltblock name="chrom3-snp"&gt
+                    &ltmap-position&gt 501 &lt/map-position&gt
+                    &ltlength&gt 100 &lt/length&gt
+                    &ltoffset&gt 1 &lt/offset&gt
+                    &ltlocations&gt 23 &lt/locations&gt
+                &lt/block&gt
+            &lt/spacing&gt
+            &ltpopulation name="North"&gt
+                &ltindividual name="n_ind0"&gt
+                    &ltphase type="unknown"&gt&lt/phase&gt
+                    &ltphase type="unknown"&gt&lt/phase&gt
+                    &ltsample name="n_ind0_a"&gt
+                        &ltdatablock type="Microsat"&gt  18 &lt/datablock&gt
+                        &ltdatablock type="SNP"&gt T &lt/datablock&gt
+                    &lt/sample&gt
+                    &ltsample name="n_ind0_b"&gt
+                        &ltdatablock type="Microsat"&gt  18 &lt/datablock&gt
+                        &ltdatablock type="SNP"&gt c &lt/datablock&gt
+                    &lt/sample&gt
+                &lt/individual&gt
+                &ltindividual name="n_ind1"&gt
+                    &ltphase type="unknown"&gt&lt/phase&gt
+                    &ltphase type="unknown"&gt&lt/phase&gt
+                    &ltsample name="n_ind1_a"&gt
+                        &ltdatablock type="Microsat"&gt  18 &lt/datablock&gt
+                        &ltdatablock type="SNP"&gt c &lt/datablock&gt
+                    &lt/sample&gt
+                    &ltsample name="n_ind1_b"&gt
+                        &ltdatablock type="Microsat"&gt  19 &lt/datablock&gt
+                        &ltdatablock type="SNP"&gt c &lt/datablock&gt
+                    &lt/sample&gt
+                &lt/individual&gt
+                &ltindividual name="n_ind2"&gt
+                    &ltphase type="unknown"&gt&lt/phase&gt
+                    &ltphase type="unknown"&gt&lt/phase&gt
+                    &ltsample name="n_ind2_a"&gt
+                        &ltdatablock type="Microsat"&gt  18 &lt/datablock&gt
+                        &ltdatablock type="SNP"&gt A &lt/datablock&gt
+                    &lt/sample&gt
+                    &ltsample name="n_ind2_b"&gt
+                        &ltdatablock type="Microsat"&gt  20 &lt/datablock&gt
+                        &ltdatablock type="SNP"&gt c &lt/datablock&gt
+                    &lt/sample&gt
+                &lt/individual&gt
+            &lt/population&gt
+            &ltpopulation name="South"&gt
+                &ltindividual name="s_ind0"&gt
+                    &ltphase type="unknown"&gt&lt/phase&gt
+                    &ltphase type="unknown"&gt&lt/phase&gt
+                    &ltsample name="s_ind0_a"&gt
+                        &ltdatablock type="Microsat"&gt  19 &lt/datablock&gt
+                        &ltdatablock type="SNP"&gt G &lt/datablock&gt
+                    &lt/sample&gt
+                    &ltsample name="s_ind0_b"&gt
+                        &ltdatablock type="Microsat"&gt  20 &lt/datablock&gt
+                        &ltdatablock type="SNP"&gt c &lt/datablock&gt
+                    &lt/sample&gt
+                &lt/individual&gt
+                &ltindividual name="s_ind1"&gt
+                    &ltphase type="unknown"&gt&lt/phase&gt
+                    &ltphase type="unknown"&gt&lt/phase&gt
+                    &ltsample name="s_ind1_a"&gt
+                        &ltdatablock type="Microsat"&gt  19 &lt/datablock&gt
+                        &ltdatablock type="SNP"&gt c &lt/datablock&gt
+                    &lt/sample&gt
+                    &ltsample name="s_ind1_b"&gt
+                        &ltdatablock type="Microsat"&gt  21 &lt/datablock&gt
+                        &ltdatablock type="SNP"&gt c &lt/datablock&gt
+                    &lt/sample&gt
+                &lt/individual&gt
+            &lt/population&gt
+        &lt/region&gt
+    &lt/data&gt
+&lt/lamarc&gt
+</pre>
+</BODY>
+</HTML>
diff --git a/doc/html/batch_converter/exported-lamarc-input.xml b/doc/html/batch_converter/exported-lamarc-input.xml
new file mode 100644
index 0000000..f87c33a
--- /dev/null
+++ b/doc/html/batch_converter/exported-lamarc-input.xml
@@ -0,0 +1,223 @@
+<?xml version="1.0" ?>
+<!--Example output for 3 chromosomes, some with multiple segments-->
+<lamarc version="2.1">
+    <format>
+        <convert-output-to-eliminate-zero>true</convert-output-to-eliminate-zero>
+    </format>
+    <data>
+        <region name="chrom1">
+            <effective-popsize>1.000000</effective-popsize>
+            <spacing>
+                <block name="chrom1-segment" />
+            </spacing>
+            <population name="North">
+                <individual name="n_ind0">
+                    <phase type="unknown"></phase>
+                    <sample name="n_ind0_a">
+                        <datablock type="DNA"> ccccccAcc </datablock>
+                    </sample>
+                    <sample name="n_ind0_b">
+                        <datablock type="DNA"> TcccccAcc </datablock>
+                    </sample>
+                </individual>
+                <individual name="n_ind1">
+                    <phase type="unknown"></phase>
+                    <sample name="n_ind1_a">
+                        <datablock type="DNA"> ccccccTcc </datablock>
+                    </sample>
+                    <sample name="n_ind1_b">
+                        <datablock type="DNA"> TcccccTcc </datablock>
+                    </sample>
+                </individual>
+                <individual name="n_ind2">
+                    <phase type="unknown"></phase>
+                    <sample name="n_ind2_a">
+                        <datablock type="DNA"> ccccccGcc </datablock>
+                    </sample>
+                    <sample name="n_ind2_b">
+                        <datablock type="DNA"> TcccccGcc </datablock>
+                    </sample>
+                </individual>
+            </population>
+            <population name="South">
+                <individual name="s_ind0">
+                    <phase type="unknown"></phase>
+                    <sample name="s_ind0_a">
+                        <datablock type="DNA"> cTccTcccc </datablock>
+                    </sample>
+                    <sample name="s_ind0_b">
+                        <datablock type="DNA"> ccccTcccc </datablock>
+                    </sample>
+                </individual>
+                <individual name="s_ind1">
+                    <phase type="unknown"></phase>
+                    <sample name="s_ind1_a">
+                        <datablock type="DNA"> cTccccccc </datablock>
+                    </sample>
+                    <sample name="s_ind1_b">
+                        <datablock type="DNA"> ccccccccc </datablock>
+                    </sample>
+                </individual>
+            </population>
+        </region>
+        <region name="chrom2">
+            <effective-popsize>1.000000</effective-popsize>
+            <spacing>
+                <block name="chrom2-segment1">
+                    <map-position> 1000 </map-position>
+                    <length> 500 </length>
+                    <offset> -4 </offset>
+                    <locations> 2 88 125 173 443 </locations>
+                </block>
+                <block name="chrom2-segment2">
+                    <map-position> 5000 </map-position>
+                    <length> 250 </length>
+                    <offset> -4 </offset>
+                    <locations> 13 19 35 77 102 112 204 </locations>
+                </block>
+            </spacing>
+            <population name="North">
+                <individual name="n_ind0">
+                    <phase type="unknown"></phase>
+                    <phase type="unknown">13 19</phase>
+                    <sample name="n_ind0_a">
+                        <datablock type="SNP"> Gaaaa </datablock>
+                        <datablock type="SNP"> Caaaaaa </datablock>
+                    </sample>
+                    <sample name="n_ind0_b">
+                        <datablock type="SNP"> aGaaa </datablock>
+                        <datablock type="SNP"> aCaaaaa </datablock>
+                    </sample>
+                </individual>
+                <individual name="n_ind1">
+                    <phase type="unknown"></phase>
+                    <phase type="unknown"></phase>
+                    <sample name="n_ind1_a">
+                        <datablock type="SNP"> aaGaa </datablock>
+                        <datablock type="SNP"> aaCaaaa </datablock>
+                    </sample>
+                    <sample name="n_ind1_b">
+                        <datablock type="SNP"> aaaGa </datablock>
+                        <datablock type="SNP"> aaaCaaa </datablock>
+                    </sample>
+                </individual>
+                <individual name="n_ind2">
+                    <phase type="unknown"></phase>
+                    <phase type="unknown"></phase>
+                    <sample name="n_ind2_a">
+                        <datablock type="SNP"> aaaaG </datablock>
+                        <datablock type="SNP"> aaaaCaa </datablock>
+                    </sample>
+                    <sample name="n_ind2_b">
+                        <datablock type="SNP"> Taaaa </datablock>
+                        <datablock type="SNP"> aaaaaCa </datablock>
+                    </sample>
+                </individual>
+            </population>
+            <population name="South">
+                <individual name="s_ind0">
+                    <phase type="unknown"></phase>
+                    <phase type="unknown"></phase>
+                    <sample name="s_ind0_a">
+                        <datablock type="SNP"> Taaaa </datablock>
+                        <datablock type="SNP"> Gaaaaaa </datablock>
+                    </sample>
+                    <sample name="s_ind0_b">
+                        <datablock type="SNP"> aTaaa </datablock>
+                        <datablock type="SNP"> aGaaaaa </datablock>
+                    </sample>
+                </individual>
+                <individual name="s_ind1">
+                    <phase type="unknown"></phase>
+                    <phase type="unknown"></phase>
+                    <sample name="s_ind1_a">
+                        <datablock type="SNP"> aaTaa </datablock>
+                        <datablock type="SNP"> aaGaaaa </datablock>
+                    </sample>
+                    <sample name="s_ind1_b">
+                        <datablock type="SNP"> aaaTa </datablock>
+                        <datablock type="SNP"> aaaGaaa </datablock>
+                    </sample>
+                </individual>
+            </population>
+        </region>
+        <region name="chrom3">
+            <spacing>
+                <block name="chrom3-micro">
+                    <map-position> 500 </map-position>
+                    <offset> 1 </offset>
+                </block>
+                <block name="chrom3-snp">
+                    <map-position> 501 </map-position>
+                    <length> 100 </length>
+                    <offset> 1 </offset>
+                    <locations> 23 </locations>
+                </block>
+            </spacing>
+            <population name="North">
+                <individual name="n_ind0">
+                    <phase type="unknown"></phase>
+                    <phase type="unknown"></phase>
+                    <sample name="n_ind0_a">
+                        <datablock type="Microsat">  18 </datablock>
+                        <datablock type="SNP"> T </datablock>
+                    </sample>
+                    <sample name="n_ind0_b">
+                        <datablock type="Microsat">  18 </datablock>
+                        <datablock type="SNP"> c </datablock>
+                    </sample>
+                </individual>
+                <individual name="n_ind1">
+                    <phase type="unknown"></phase>
+                    <phase type="unknown"></phase>
+                    <sample name="n_ind1_a">
+                        <datablock type="Microsat">  18 </datablock>
+                        <datablock type="SNP"> c </datablock>
+                    </sample>
+                    <sample name="n_ind1_b">
+                        <datablock type="Microsat">  19 </datablock>
+                        <datablock type="SNP"> c </datablock>
+                    </sample>
+                </individual>
+                <individual name="n_ind2">
+                    <phase type="unknown"></phase>
+                    <phase type="unknown"></phase>
+                    <sample name="n_ind2_a">
+                        <datablock type="Microsat">  18 </datablock>
+                        <datablock type="SNP"> A </datablock>
+                    </sample>
+                    <sample name="n_ind2_b">
+                        <datablock type="Microsat">  20 </datablock>
+                        <datablock type="SNP"> c </datablock>
+                    </sample>
+                </individual>
+            </population>
+            <population name="South">
+                <individual name="s_ind0">
+                    <phase type="unknown"></phase>
+                    <phase type="unknown"></phase>
+                    <sample name="s_ind0_a">
+                        <datablock type="Microsat">  19 </datablock>
+                        <datablock type="SNP"> G </datablock>
+                    </sample>
+                    <sample name="s_ind0_b">
+                        <datablock type="Microsat">  20 </datablock>
+                        <datablock type="SNP"> c </datablock>
+                    </sample>
+                </individual>
+                <individual name="s_ind1">
+                    <phase type="unknown"></phase>
+                    <phase type="unknown"></phase>
+                    <sample name="s_ind1_a">
+                        <datablock type="Microsat">  19 </datablock>
+                        <datablock type="SNP"> c </datablock>
+                    </sample>
+                    <sample name="s_ind1_b">
+                        <datablock type="Microsat">  21 </datablock>
+                        <datablock type="SNP"> c </datablock>
+                    </sample>
+                </individual>
+            </population>
+        </region>
+    </data>
+</lamarc>
diff --git a/doc/html/batch_converter/images/CombinedPanels.png b/doc/html/batch_converter/images/CombinedPanels.png
new file mode 100644
index 0000000..1bb66c7
Binary files /dev/null and b/doc/html/batch_converter/images/CombinedPanels.png differ
diff --git a/doc/html/batch_converter/images/DataFilesTab.png b/doc/html/batch_converter/images/DataFilesTab.png
new file mode 100644
index 0000000..dada782
Binary files /dev/null and b/doc/html/batch_converter/images/DataFilesTab.png differ
diff --git a/doc/html/batch_converter/images/DataPartitionsDivTab.png b/doc/html/batch_converter/images/DataPartitionsDivTab.png
new file mode 100644
index 0000000..237a7b9
Binary files /dev/null and b/doc/html/batch_converter/images/DataPartitionsDivTab.png differ
diff --git a/doc/html/batch_converter/images/DataPartitionsMigTab.png b/doc/html/batch_converter/images/DataPartitionsMigTab.png
new file mode 100644
index 0000000..f42e08a
Binary files /dev/null and b/doc/html/batch_converter/images/DataPartitionsMigTab.png differ
diff --git a/doc/html/batch_converter/images/DebugLogTab.png b/doc/html/batch_converter/images/DebugLogTab.png
new file mode 100644
index 0000000..c07fc07
Binary files /dev/null and b/doc/html/batch_converter/images/DebugLogTab.png differ
diff --git a/doc/html/batch_converter/images/DivMigMatrixTab.png b/doc/html/batch_converter/images/DivMigMatrixTab.png
new file mode 100644
index 0000000..1693188
Binary files /dev/null and b/doc/html/batch_converter/images/DivMigMatrixTab.png differ
diff --git a/doc/html/batch_converter/images/DivergeOff.png b/doc/html/batch_converter/images/DivergeOff.png
new file mode 100644
index 0000000..5bc1b24
Binary files /dev/null and b/doc/html/batch_converter/images/DivergeOff.png differ
diff --git a/doc/html/batch_converter/images/EditMigration.png b/doc/html/batch_converter/images/EditMigration.png
new file mode 100644
index 0000000..5e42667
Binary files /dev/null and b/doc/html/batch_converter/images/EditMigration.png differ
diff --git a/doc/html/batch_converter/images/EditPanelCorrection.png b/doc/html/batch_converter/images/EditPanelCorrection.png
new file mode 100644
index 0000000..50274de
Binary files /dev/null and b/doc/html/batch_converter/images/EditPanelCorrection.png differ
diff --git a/doc/html/batch_converter/images/FirstParent.png b/doc/html/batch_converter/images/FirstParent.png
new file mode 100644
index 0000000..499fffe
Binary files /dev/null and b/doc/html/batch_converter/images/FirstParent.png differ
diff --git a/doc/html/batch_converter/images/FirstParent2Children.png b/doc/html/batch_converter/images/FirstParent2Children.png
new file mode 100644
index 0000000..5ae2f9e
Binary files /dev/null and b/doc/html/batch_converter/images/FirstParent2Children.png differ
diff --git a/doc/html/batch_converter/images/FullParentsImage.png b/doc/html/batch_converter/images/FullParentsImage.png
new file mode 100644
index 0000000..f81e379
Binary files /dev/null and b/doc/html/batch_converter/images/FullParentsImage.png differ
diff --git a/doc/html/batch_converter/images/InterumParentImage.png b/doc/html/batch_converter/images/InterumParentImage.png
new file mode 100644
index 0000000..3489722
Binary files /dev/null and b/doc/html/batch_converter/images/InterumParentImage.png differ
diff --git a/doc/html/batch_converter/images/MigrationOnlyMatrixTab.png b/doc/html/batch_converter/images/MigrationOnlyMatrixTab.png
new file mode 100644
index 0000000..5220177
Binary files /dev/null and b/doc/html/batch_converter/images/MigrationOnlyMatrixTab.png differ
diff --git a/doc/html/batch_converter/images/PanelCorrectionOn.png b/doc/html/batch_converter/images/PanelCorrectionOn.png
new file mode 100644
index 0000000..2fc004c
Binary files /dev/null and b/doc/html/batch_converter/images/PanelCorrectionOn.png differ
diff --git a/doc/html/batch_converter/images/SecondParent.png b/doc/html/batch_converter/images/SecondParent.png
new file mode 100644
index 0000000..b3a3ba8
Binary files /dev/null and b/doc/html/batch_converter/images/SecondParent.png differ
diff --git a/doc/html/batch_converter/images/lam_conv_chrom1_export_file_selection.png b/doc/html/batch_converter/images/lam_conv_chrom1_export_file_selection.png
new file mode 100644
index 0000000..ee6477f
Binary files /dev/null and b/doc/html/batch_converter/images/lam_conv_chrom1_export_file_selection.png differ
diff --git a/doc/html/batch_converter/images/lam_conv_chrom1_export_warn_1.png b/doc/html/batch_converter/images/lam_conv_chrom1_export_warn_1.png
new file mode 100644
index 0000000..62e8c65
Binary files /dev/null and b/doc/html/batch_converter/images/lam_conv_chrom1_export_warn_1.png differ
diff --git a/doc/html/batch_converter/images/lam_conv_chrom1_input.png b/doc/html/batch_converter/images/lam_conv_chrom1_input.png
new file mode 100644
index 0000000..d95f92a
Binary files /dev/null and b/doc/html/batch_converter/images/lam_conv_chrom1_input.png differ
diff --git a/doc/html/batch_converter/images/lam_conv_chrom1_segment_panel.png b/doc/html/batch_converter/images/lam_conv_chrom1_segment_panel.png
new file mode 100644
index 0000000..e40d005
Binary files /dev/null and b/doc/html/batch_converter/images/lam_conv_chrom1_segment_panel.png differ
diff --git a/doc/html/batch_converter/images/lam_conv_chrom2_export_warn_first_position.png b/doc/html/batch_converter/images/lam_conv_chrom2_export_warn_first_position.png
new file mode 100644
index 0000000..ce67192
Binary files /dev/null and b/doc/html/batch_converter/images/lam_conv_chrom2_export_warn_first_position.png differ
diff --git a/doc/html/batch_converter/images/lam_conv_chrom2_export_warn_length.png b/doc/html/batch_converter/images/lam_conv_chrom2_export_warn_length.png
new file mode 100644
index 0000000..fe27c96
Binary files /dev/null and b/doc/html/batch_converter/images/lam_conv_chrom2_export_warn_length.png differ
diff --git a/doc/html/batch_converter/images/lam_conv_chrom2_export_warn_locations.png b/doc/html/batch_converter/images/lam_conv_chrom2_export_warn_locations.png
new file mode 100644
index 0000000..67dd9fc
Binary files /dev/null and b/doc/html/batch_converter/images/lam_conv_chrom2_export_warn_locations.png differ
diff --git a/doc/html/batch_converter/images/lam_conv_chrom2_export_warn_map.png b/doc/html/batch_converter/images/lam_conv_chrom2_export_warn_map.png
new file mode 100644
index 0000000..7e35113
Binary files /dev/null and b/doc/html/batch_converter/images/lam_conv_chrom2_export_warn_map.png differ
diff --git a/doc/html/batch_converter/images/lam_conv_chrom2_input_snp.png b/doc/html/batch_converter/images/lam_conv_chrom2_input_snp.png
new file mode 100644
index 0000000..22a8e26
Binary files /dev/null and b/doc/html/batch_converter/images/lam_conv_chrom2_input_snp.png differ
diff --git a/doc/html/batch_converter/images/lam_conv_chrom2_segment1.png b/doc/html/batch_converter/images/lam_conv_chrom2_segment1.png
new file mode 100644
index 0000000..a5f0564
Binary files /dev/null and b/doc/html/batch_converter/images/lam_conv_chrom2_segment1.png differ
diff --git a/doc/html/batch_converter/images/lam_conv_chrom2_segment2_done_locations.png b/doc/html/batch_converter/images/lam_conv_chrom2_segment2_done_locations.png
new file mode 100644
index 0000000..d1bc217
Binary files /dev/null and b/doc/html/batch_converter/images/lam_conv_chrom2_segment2_done_locations.png differ
diff --git a/doc/html/batch_converter/images/lam_conv_chrom3_error_map_position.png b/doc/html/batch_converter/images/lam_conv_chrom3_error_map_position.png
new file mode 100644
index 0000000..6df765b
Binary files /dev/null and b/doc/html/batch_converter/images/lam_conv_chrom3_error_map_position.png differ
diff --git a/doc/html/batch_converter/images/lam_conv_chrom3_error_phase_file_needed.png b/doc/html/batch_converter/images/lam_conv_chrom3_error_phase_file_needed.png
new file mode 100644
index 0000000..e9d4d60
Binary files /dev/null and b/doc/html/batch_converter/images/lam_conv_chrom3_error_phase_file_needed.png differ
diff --git a/doc/html/batch_converter/images/lam_conv_chrom3_input.png b/doc/html/batch_converter/images/lam_conv_chrom3_input.png
new file mode 100644
index 0000000..216284b
Binary files /dev/null and b/doc/html/batch_converter/images/lam_conv_chrom3_input.png differ
diff --git a/doc/html/batch_converter/images/lam_conv_chrom3_region_panel.png b/doc/html/batch_converter/images/lam_conv_chrom3_region_panel.png
new file mode 100644
index 0000000..db67c09
Binary files /dev/null and b/doc/html/batch_converter/images/lam_conv_chrom3_region_panel.png differ
diff --git a/doc/html/batch_converter/images/lam_conv_chrom3_region_table.png b/doc/html/batch_converter/images/lam_conv_chrom3_region_table.png
new file mode 100644
index 0000000..446d5ae
Binary files /dev/null and b/doc/html/batch_converter/images/lam_conv_chrom3_region_table.png differ
diff --git a/doc/html/batch_converter/images/lam_conv_chrom3_segment_snp.png b/doc/html/batch_converter/images/lam_conv_chrom3_segment_snp.png
new file mode 100644
index 0000000..2bf155e
Binary files /dev/null and b/doc/html/batch_converter/images/lam_conv_chrom3_segment_snp.png differ
diff --git a/doc/html/batch_converter/images/lam_conv_export_file_mac_expanded.png b/doc/html/batch_converter/images/lam_conv_export_file_mac_expanded.png
new file mode 100644
index 0000000..bb2c31d
Binary files /dev/null and b/doc/html/batch_converter/images/lam_conv_export_file_mac_expanded.png differ
diff --git a/doc/html/batch_converter/images/lam_conv_export_file_mac_minimal.png b/doc/html/batch_converter/images/lam_conv_export_file_mac_minimal.png
new file mode 100644
index 0000000..8eac5ff
Binary files /dev/null and b/doc/html/batch_converter/images/lam_conv_export_file_mac_minimal.png differ
diff --git a/doc/html/batch_converter/sample-conv-cmd.html b/doc/html/batch_converter/sample-conv-cmd.html
new file mode 100644
index 0000000..9316dbb
--- /dev/null
+++ b/doc/html/batch_converter/sample-conv-cmd.html
@@ -0,0 +1,351 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
+<HTML>
+<BODY>
+<pre>
+
+&lt!--
+    Place this command file and all input files in the same directory
+    along with executable lam_conv and execute like this to
+    automatically convert
+
+        ./lam_conv -b -c lamarc-converter-commands.xml
+
+    Or like this to explore in the GUI
+
+        ./lam_conv -c lamarc-converter-commands.xml
+--&gt
+
+&ltlamarc-converter-cmd&gt
+
+    &lt!--
+         You can specify the lamarc input file you will produce
+         here. If not present, it defaults to infile.xml
+    --&gt
+    &ltoutfile&gtlamarc-input.xml&lt/outfile&gt
+
+    &lt!--
+        The comment below will be at the top of the outfile produced.
+        This is a useful way to distinguish different lamarc infiles
+    --&gt
+    &ltlamarc-header-comment&gtExample output for 3 chromosomes, some with multiple segments&lt/lamarc-header-comment&gt
+
+    &lt!-- ********************************************************* --&gt
+    &lt!--
+        The &ltregions&gt section is where you specify both the type of
+        data you have and its relative location (and therefore
+        likeliness to be co-inherited).
+    --&gt
+    &ltregions&gt
+
+        &lt!--
+            Each region contains a specification of data types and
+            relative locations of data which are "close enough" to
+            each other to be modeled as co-inherited.
+
+            As a rule of thumb, data samples should be in the same
+            region if:
+                (a) they are within 1/1000 of a centimorgan, or
+                (b) they are within 1 centimorgan and you plan
+                    to estimate recombination.
+        --&gt
+        &ltregion&gt
+
+            &lt!--
+                all region, segment, and population names must be unique
+            --&gt
+            &ltname&gtchrom1&lt/name&gt
+
+            &lt!--
+                 The effective population size defaults to 1. You can
+                 probably ignore it unless you're working with
+                 sex chromosomes or mixing mtDna with chromosomal
+            --&gt
+            &lteffective-popsize&gt1&lt/effective-popsize&gt
+
+            &lt!--
+                Within a region, different segments will occur where
+                    (a) data types are different,
+                    (b) mutation rates are different, or
+                    (c) the samples are separated by unsampled stretches
+                        of the genome
+            --&gt
+            &ltsegments&gt
+                &lt!--
+                    The region for chrom 1 is not terribly interesting.
+                    It contains only a single stretch of DNA data,
+                    the easiest and simplest to model in lamarc.
+
+                    Allowed datatypes are "snp" "dna" "microsat" and "kallele"
+                --&gt
+                &ltsegment datatype="dna"&gt
+                    &ltname&gtchrom1-segment&lt/name&gt
+
+                    &lt!--
+                        For DNA data, the number of markers is the number
+                        of sites in the data.
+                    --&gt
+                    &ltmarkers&gt9&lt/markers&gt
+                &lt/segment&gt
+            &lt/segments&gt
+        &lt/region&gt
+
+        &ltregion&gt
+            &lt!--
+                Region "chrom2" models two sets of snp data on the
+                same chromosome, separated by other unknown data.
+            --&gt
+
+            &ltname&gtchrom2&lt/name&gt
+            &lteffective-popsize&gt1&lt/effective-popsize&gt
+
+            &ltsegments&gt
+
+                &lt!--
+                    A SNP segment requires that we provide more information
+                    in order to model it correctly.
+                --&gt
+                &ltsegment datatype="snp"&gt
+
+                    &ltname&gtchrom2-segment1&lt/name&gt
+
+                    &lt!--
+                        For SNP data, the number of markers is the number
+                        of SNP sites in the data.
+                    --&gt
+                    &ltmarkers&gt5&lt/markers&gt
+
+                    &lt!--
+                        Using a region-wide scale, the position of this 
+                        segment within region chrom2. Lamarc needs this
+                        information to model recombination events occuring
+                        between segments.
+                    --&gt
+                    &ltmap-position&gt1000&lt/map-position&gt
+
+                    &lt!--
+                        where you started scanning for SNPS, assuming
+                        "1" in segment co-ordinates is identical to 
+                        &ltmap-position&gt in region co-ordinates
+                    --&gt
+                    &ltfirst-position-scanned&gt-5&lt/first-position-scanned&gt
+
+                    &lt!--
+                        total data length (in nucleotides) scanned, staring
+                        at &ltfirst-position-scanned&gt
+                    --&gt
+                    &ltlength&gt500&lt/length&gt
+
+                    &lt!-- 
+                        relative locations of snp markers using
+                        segment coordinates.
+                    --&gt
+                    &ltlocations&gt 2 88 125 173 443 &lt/locations&gt
+
+                &lt/segment&gt
+
+                &ltsegment datatype="snp"&gt
+                    &ltname&gtchrom2-segment2&lt/name&gt
+                    &ltmarkers&gt7&lt/markers&gt
+                    &ltmap-position&gt5000&lt/map-position&gt
+                    &ltfirst-position-scanned&gt-5&lt/first-position-scanned&gt
+                    &ltlength&gt250&lt/length&gt
+                    &ltlocations&gt 13 19 35 77 102 112 204&lt/locations&gt
+                &lt/segment&gt
+            &lt/segments&gt
+        &lt/region&gt
+
+        &ltregion&gt
+            &lt!--
+                Here we have a microsat next to a SNP.  The SNP was found
+                in a 100-base region at the 23rd site after the microsat
+            --&gt
+
+            &ltname&gtchrom3&lt/name&gt
+            &ltsegments&gt
+                &ltsegment datatype="microsat"&gt
+                    &ltname&gtchrom3-micro&lt/name&gt
+                    &ltmarkers&gt1&lt/markers&gt
+                    &ltmap-position&gt500&lt/map-position&gt
+                    &ltfirst-position-scanned&gt1&lt/first-position-scanned&gt
+                &lt/segment&gt
+                &ltsegment datatype="snp"&gt
+                    &ltname&gtchrom3-snp&lt/name&gt
+                    &ltmarkers&gt1&lt/markers&gt
+                    &ltmap-position&gt501&lt/map-position&gt
+                    &ltlength&gt100&lt/length&gt
+                    &ltlocations&gt 23 &lt/locations&gt
+                    &ltfirst-position-scanned&gt1&lt/first-position-scanned&gt
+                &lt/segment&gt
+            &lt/segments&gt
+        &lt/region&gt
+    &lt/regions&gt
+
+    &lt!-- ********************************************************* --&gt
+    &lt!--
+        If you want to make sure your populations have nice names,
+        here is the place to do it.
+    --&gt
+    &ltpopulations&gt
+        &ltpopulation&gtNorth&lt/population&gt
+        &ltpopulation&gtSouth&lt/population&gt
+    &lt/populations&gt
+
+    &lt!-- ********************************************************* --&gt
+    &lt!--
+        You may need to include the &ltindividuals&gt tag if you:
+            (a) have samples which include unresolved haplotypes, or
+            (b) you are combining both allelic and nucleotide 
+                segments in a single region, or
+            (c) you are doing trait mapping
+    --&gt
+
+    &ltindividuals&gt
+        &ltindividual&gt
+            &lt!-- 
+                if you have specified diploid (or higher ploidy) data
+                in a migrate microsat or kallele file, your individual
+                names are probably the sequence name labels from that file
+            --&gt
+            &ltname&gtn_ind0&lt/name&gt
+
+            &lt!-- 
+                if you have dna or snp data from a phylip or migrate
+                file, your sample names are probably the sequence
+                name labels from that file
+            --&gt
+            &ltsample&gt&ltname&gtn_ind0_a&lt/name&gt&lt/sample&gt
+
+            &ltsample&gt&ltname&gtn_ind0_b&lt/name&gt&lt/sample&gt
+
+            &lt!--
+                use the &ltphase&gt tag to indicate when you don't know
+                which haploid (or greater ploidy) sample has which
+                marker. The scale here is the same as the 'locations'
+                tag, i.e. relative to the numbering system in the
+                segment in question.  The first valid position is the 
+                first-position-scanned value, and can be as higher than 
+                that as the length of the segment.  
+                
+                The specification below indicates that for
+                this individual, we're not sure which of the two
+                haplotypes we should assign the first and second
+                data sample values to.
+            --&gt
+            &ltphase&gt
+                &ltsegment-name&gtchrom2-segment2&lt/segment-name&gt
+                &ltunresolved-markers&gt 13 19 &lt/unresolved-markers&gt
+            &lt/phase&gt
+
+        &lt/individual&gt
+        &ltindividual&gt
+            &ltname&gtn_ind1&lt/name&gt
+            &ltsample&gt&ltname&gtn_ind1_a&lt/name&gt&lt/sample&gt
+            &ltsample&gt&ltname&gtn_ind1_b&lt/name&gt&lt/sample&gt
+        &lt/individual&gt
+        &ltindividual&gt
+            &ltname&gtn_ind2&lt/name&gt
+            &ltsample&gt&ltname&gtn_ind2_a&lt/name&gt&lt/sample&gt
+            &ltsample&gt&ltname&gtn_ind2_b&lt/name&gt&lt/sample&gt
+        &lt/individual&gt
+        &ltindividual&gt
+            &ltname&gts_ind0&lt/name&gt
+            &ltsample&gt&ltname&gts_ind0_a&lt/name&gt&lt/sample&gt
+            &ltsample&gt&ltname&gts_ind0_b&lt/name&gt&lt/sample&gt
+        &lt/individual&gt
+        &ltindividual&gt
+            &ltname&gts_ind1&lt/name&gt
+            &ltsample&gt&ltname&gts_ind1_a&lt/name&gt&lt/sample&gt
+            &ltsample&gt&ltname&gts_ind1_b&lt/name&gt&lt/sample&gt
+        &lt/individual&gt
+    &lt/individuals&gt
+
+    &lt!-- ********************************************************* --&gt
+    &lt!--
+        Use the &ltinfiles&gt tag to tell the converter how your data
+        corresponds to the &ltregion&gt and &ltsegment&gt elements
+    --&gt
+    &ltinfiles&gt
+
+        &lt!--
+            All attributes given for the &ltinfile&gt tag are required.
+            The legal values are given below
+
+            format              : "migrate", "phylip"
+            datatype            : "dna", "snp", "kallele", "microsat"
+            sequence-alignment  : "sequential" or "interleaved"
+        --&gt
+        &ltinfile format="migrate" datatype="dna" sequence-alignment="sequential"&gt
+
+            &lt!--
+                File name is relative to the directory the converter
+                was invoked from
+            --&gt
+            &ltname&gtchrom1.mig&lt/name&gt
+
+            &lt!--
+                The &ltpopulation-matching&gt tag tells the converter how
+                to assign data samples to populations.
+
+                legal types are:
+                    "single"    : assign all data to the single population
+                                  whose name is enclosed within this tag
+                    "byList"    : a list of population names appears, enclosed
+                                  in &ltpopulation-name&gt tags. Assign populations
+                                  in the file to the named populations in order
+                    "byName"    : use the name in the comment of the infile
+            --&gt
+            &ltpopulation-matching type="byName"/&gt
+
+            &lt!--
+                The &ltsegments-matching&gt tag tells the converter how
+                to assign data samples to segments.
+
+                legal types are:
+                    "single"    : assign all data to the single segment
+                                  whose name is enclosed within this tag
+                    "byList"    : a list of segment names appears, enclosed
+                                  in &ltsegment-name&gt tags. Assign segments
+                                  in the file to the named segments in order
+            --&gt
+            &ltsegments-matching type="byList"&gt
+                &lt!-- assigned to segments from input file in the order given here --&gt
+                &ltsegment-name&gtchrom1-segment&lt/segment-name&gt
+            &lt/segments-matching&gt
+
+        &lt/infile&gt
+
+        &ltinfile format="migrate" datatype="snp" sequence-alignment="sequential"&gt
+            &ltname&gtchrom2.mig&lt/name&gt
+            &ltpopulation-matching type="byName"/&gt
+            &ltsegments-matching type="byList"&gt
+                &ltsegment-name&gtchrom2-segment1&lt/segment-name&gt
+                &ltsegment-name&gtchrom2-segment2&lt/segment-name&gt
+            &lt/segments-matching&gt
+        &lt/infile&gt
+
+        &lt!--
+            note that while both segments in chrom2 could be specified
+            in a single file, the segments of chrom3 are in different
+            files since they have different data types.
+        --&gt
+        &ltinfile format="migrate" datatype="snp" sequence-alignment="sequential"&gt
+            &ltname&gtchrom3snp.mig&lt/name&gt
+            &ltpopulation-matching type="byName"/&gt
+            &ltsegments-matching type="byList"&gt
+                &ltsegment-name&gtchrom3-snp&lt/segment-name&gt
+            &lt/segments-matching&gt
+        &lt/infile&gt
+
+        &ltinfile format="migrate" datatype="microsat" sequence-alignment="sequential"&gt
+            &ltname&gtchrom3microsat.mig&lt/name&gt
+            &ltpopulation-matching type="byName"/&gt
+            &ltsegments-matching type="byList"&gt
+                &ltsegment-name&gtchrom3-micro&lt/segment-name&gt
+            &lt/segments-matching&gt
+        &lt/infile&gt
+    &lt/infiles&gt
+
+&lt/lamarc-converter-cmd&gt
+</pre>
+</BODY>
+</HTML>
diff --git a/doc/html/batch_converter/sample-conv-cmd.xml b/doc/html/batch_converter/sample-conv-cmd.xml
new file mode 100644
index 0000000..e11fb2a
--- /dev/null
+++ b/doc/html/batch_converter/sample-conv-cmd.xml
@@ -0,0 +1,343 @@
+<!--
+    Place this command file and all input files in the same directory
+    along with executable lam_conv and execute like this to
+    automatically convert
+
+        ./lam_conv -b -c lamarc-converter-commands.xml
+
+    Or like this to explore in the GUI
+
+        ./lam_conv -c lamarc-converter-commands.xml
+-->
+
+<lamarc-converter-cmd>
+
+    <!--
+         You can specify the lamarc input file you will produce
+         here. If not present, it defaults to infile.xml
+    -->
+    <outfile>lamarc-input.xml</outfile>
+
+    <!--
+        The comment below will be at the top of the outfile produced.
+        This is a useful way to distinguish different lamarc infiles
+    -->
+    <lamarc-header-comment>Example output for 3 chromosomes, some with multiple segments</lamarc-header-comment>
+
+    <!-- ********************************************************* -->
+    <!--
+        The <regions> section is where you specify both the type of
+        data you have and its relative location (and therefore
+        likeliness to be co-inherited).
+    -->
+    <regions>
+
+        <!--
+            Each region contains a specification of data types and
+            relative locations of data which are "close enough" to
+            each other to be modeled as co-inherited.
+
+            As a rule of thumb, data samples should be in the same
+            region if:
+                (a) they are within 1/1000 of a centimorgan, or
+                (b) they are within 1 centimorgan and you plan
+                    to estimate recombination.
+        -->
+        <region>
+
+            <!--
+                all region, segment, and population names must be unique
+            -->
+            <name>chrom1</name>
+
+            <!--
+                 The effective population size defaults to 1. You can
+                 probably ignore it unless you're working with
+                 sex chromosomes or mixing mtDna with chromosomal
+            -->
+            <effective-popsize>1</effective-popsize>
+
+            <!--
+                Within a region, different segments will occur where
+                    (a) data types are different,
+                    (b) mutation rates are different, or
+                    (c) the samples are separated by unsampled stretches
+                        of the genome
+            -->
+            <segments>
+                <!--
+                    The region for chrom 1 is not terribly interesting.
+                    It contains only a single stretch of DNA data,
+                    the easiest and simplest to model in lamarc.
+
+                    Allowed datatypes are "snp" "dna" "microsat" and "kallele"
+                -->
+                <segment datatype="dna">
+                    <name>chrom1-segment</name>
+
+                    <!--
+                        For DNA data, the number of markers is the number
+                        of sites in the data.
+                    -->
+                    <markers>9</markers>
+                </segment>
+            </segments>
+        </region>
+
+        <region>
+            <!--
+                Region "chrom2" models two sets of snp data on the
+                same chromosome, separated by other unknown data.
+            -->
+
+            <name>chrom2</name>
+            <effective-popsize>1</effective-popsize>
+
+            <segments>
+
+                <!--
+                    A SNP segment requires that we provide more information
+                    in order to model it correctly.
+                -->
+                <segment datatype="snp">
+
+                    <name>chrom2-segment1</name>
+
+                    <!--
+                        For SNP data, the number of markers is the number
+                        of SNP sites in the data.
+                    -->
+                    <markers>5</markers>
+
+                    <!--
+                        Using a region-wide scale, the position of this 
+                        segment within region chrom2. Lamarc needs this
+                        information to model recombination events occuring
+                        between segments.
+                    -->
+                    <map-position>1000</map-position>
+
+                    <!--
+                        where you started scanning for SNPS, assuming
+                        "1" in segment co-ordinates is identical to 
+                        <map-position> in region co-ordinates
+                    -->
+                    <first-position-scanned>-5</first-position-scanned>
+
+                    <!--
+                        total data length (in nucleotides) scanned, staring
+                        at <first-position-scanned>
+                    -->
+                    <length>500</length>
+
+                    <!-- 
+                        relative locations of snp markers using
+                        segment coordinates.
+                    -->
+                    <locations> 2 88 125 173 443 </locations>
+
+                </segment>
+
+                <segment datatype="snp">
+                    <name>chrom2-segment2</name>
+                    <markers>7</markers>
+                    <map-position>5000</map-position>
+                    <first-position-scanned>-5</first-position-scanned>
+                    <length>250</length>
+                    <locations> 13 19 35 77 102 112 204</locations>
+                </segment>
+            </segments>
+        </region>
+
+        <region>
+            <!--
+                Here we have a microsat next to a SNP.  The SNP was found
+                in a 100-base region at the 23rd site after the microsat
+            -->
+
+            <name>chrom3</name>
+            <segments>
+                <segment datatype="microsat">
+                    <name>chrom3-micro</name>
+                    <markers>1</markers>
+                    <map-position>500</map-position>
+                    <first-position-scanned>1</first-position-scanned>
+                </segment>
+                <segment datatype="snp">
+                    <name>chrom3-snp</name>
+                    <markers>1</markers>
+                    <map-position>501</map-position>
+                    <length>100</length>
+                    <locations> 23 </locations>
+                    <first-position-scanned>1</first-position-scanned>
+                </segment>
+            </segments>
+        </region>
+    </regions>
+
+    <!-- ********************************************************* -->
+    <!--
+        If you want to make sure your populations have nice names,
+        here is the place to do it.
+    -->
+    <populations>
+        <population>North</population>
+        <population>South</population>
+    </populations>
+
+    <!-- ********************************************************* -->
+    <!--
+        You may need to include the <individuals> tag if you:
+            (a) have samples which include unresolved haplotypes, or
+            (b) you are combining both allelic and nucleotide 
+                segments in a single region, or
+            (c) you are doing trait mapping
+    -->
+
+    <individuals>
+        <individual>
+            <!-- 
+                if you have specified diploid (or higher ploidy) data
+                in a migrate microsat or kallele file, your individual
+                names are probably the sequence name labels from that file
+            -->
+            <name>n_ind0</name>
+
+            <!-- 
+                if you have dna or snp data from a phylip or migrate
+                file, your sample names are probably the sequence
+                name labels from that file
+            -->
+            <sample><name>n_ind0_a</name></sample>
+
+            <sample><name>n_ind0_b</name></sample>
+
+            <!--
+                use the <phase> tag to indicate when you don't know
+                which haploid (or greater ploidy) sample has which
+                marker. The scale here is the same as the 'locations'
+                tag, i.e. relative to the numbering system in the
+                segment in question.  The first valid position is the 
+                first-position-scanned value, and can be as higher than 
+                that as the length of the segment.  
+                
+                The specification below indicates that for
+                this individual, we're not sure which of the two
+                haplotypes we should assign the first and second
+                data sample values to.
+            -->
+            <phase>
+                <segment-name>chrom2-segment2</segment-name>
+                <unresolved-markers> 13 19 </unresolved-markers>
+            </phase>
+
+        </individual>
+        <individual>
+            <name>n_ind1</name>
+            <sample><name>n_ind1_a</name></sample>
+            <sample><name>n_ind1_b</name></sample>
+        </individual>
+        <individual>
+            <name>n_ind2</name>
+            <sample><name>n_ind2_a</name></sample>
+            <sample><name>n_ind2_b</name></sample>
+        </individual>
+        <individual>
+            <name>s_ind0</name>
+            <sample><name>s_ind0_a</name></sample>
+            <sample><name>s_ind0_b</name></sample>
+        </individual>
+        <individual>
+            <name>s_ind1</name>
+            <sample><name>s_ind1_a</name></sample>
+            <sample><name>s_ind1_b</name></sample>
+        </individual>
+    </individuals>
+
+    <!-- ********************************************************* -->
+    <!--
+        Use the <infiles> tag to tell the converter how your data
+        corresponds to the <region> and <segment> elements
+    -->
+    <infiles>
+
+        <!--
+            All attributes given for the <infile> tag are required.
+            The legal values are given below
+
+            format              : "migrate", "phylip"
+            datatype            : "dna", "snp", "kallele", "microsat"
+            sequence-alignment  : "sequential" or "interleaved"
+        -->
+        <infile format="migrate" datatype="dna" sequence-alignment="sequential">
+
+            <!--
+                File name is relative to the directory the converter
+                was invoked from
+            -->
+            <name>chrom1.mig</name>
+
+            <!--
+                The <population-matching> tag tells the converter how
+                to assign data samples to populations.
+
+                legal types are:
+                    "single"    : assign all data to the single population
+                                  whose name is enclosed within this tag
+                    "byList"    : a list of population names appears, enclosed
+                                  in <population-name> tags. Assign populations
+                                  in the file to the named populations in order
+                    "byName"    : use the name in the comment of the infile
+            -->
+            <population-matching type="byName"/>
+
+            <!--
+                The <segments-matching> tag tells the converter how
+                to assign data samples to segments.
+
+                legal types are:
+                    "single"    : assign all data to the single segment
+                                  whose name is enclosed within this tag
+                    "byList"    : a list of segment names appears, enclosed
+                                  in <segment-name> tags. Assign segments
+                                  in the file to the named segments in order
+            -->
+            <segments-matching type="byList">
+                <!-- assigned to segments from input file in the order given here -->
+                <segment-name>chrom1-segment</segment-name>
+            </segments-matching>
+
+        </infile>
+
+        <infile format="migrate" datatype="snp" sequence-alignment="sequential">
+            <name>chrom2.mig</name>
+            <population-matching type="byName"/>
+            <segments-matching type="byList">
+                <segment-name>chrom2-segment1</segment-name>
+                <segment-name>chrom2-segment2</segment-name>
+            </segments-matching>
+        </infile>
+
+        <!--
+            note that while both segments in chrom2 could be specified
+            in a single file, the segments of chrom3 are in different
+            files since they have different data types.
+        -->
+        <infile format="migrate" datatype="snp" sequence-alignment="sequential">
+            <name>chrom3snp.mig</name>
+            <population-matching type="byName"/>
+            <segments-matching type="byList">
+                <segment-name>chrom3-snp</segment-name>
+            </segments-matching>
+        </infile>
+
+        <infile format="migrate" datatype="microsat" sequence-alignment="sequential">
+            <name>chrom3microsat.mig</name>
+            <population-matching type="byName"/>
+            <segments-matching type="byList">
+                <segment-name>chrom3-micro</segment-name>
+            </segments-matching>
+        </infile>
+    </infiles>
+
+</lamarc-converter-cmd>
diff --git a/doc/html/bayes.html b/doc/html/bayes.html
new file mode 100644
index 0000000..85e62d7
--- /dev/null
+++ b/doc/html/bayes.html
@@ -0,0 +1,484 @@
+<!-- header fragment for html documentation -->
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
+<HTML>
+<HEAD>
+
+<META NAME="description" CONTENT="Estimation of population parameters using genetic data using a maximum likelihood approach with Metropolis-Hastings Monte Carlo Markov chain importance sampling">
+<META NAME="keywords" CONTENT="MCMC, Markov chain, Monte Carlo, Metropolis-Hastings, population, parameters, migration rate, population size, recombination rate, maximum likelihood">
+
+<TITLE>LAMARC Documentation: Bayesian Tutorial</title>
+</HEAD>
+
+
+<BODY BGCOLOR="#FFFFFF">
+<!-- coalescent, coalescence, Metropolis-Hastings, Markov chain Monte Carlo
+ simulation, migration rate, effective population size, recombination rate,
+ maximum likelihood -->
+
+
+(<A HREF="output.html">Previous</A> | <A HREF="index.html">Contents</A> | <A
+HREF="tracer.html">Next</A>)
+
+<H2>Bayesian-LAMARC tutorial:  Why?</H2>
+
+<P>This tutorial is designed to be read from beginning to end, but if you
+like you can jump straight to:</P>
+
+<UL>
+<LI><A HREF="bayes.html#intro">Introduction to Bayesian analysis</A></LI>
+<LI><A HREF="bayes.html#approach">The Bayesian approach in LAMARC</A></LI>
+<LI><A HREF="bayes.html#results">Results of a Bayesian run</A></LI>
+<LI><A HREF="bayes.html#curvefiles">Curvefile uses</A></LI>
+<LI><A HREF="bayes.html#drawbacks">Curvefile drawbacks</A></LI>
+<LI><A HREF="bayes.html#surprises">Curvefile surprises</A></LI>
+<LI><A HREF="bayes.html#compare">Comparing Bayes-LAMARC to Likelihood-LAMARC</A></LI>
+<LI><A HREF="bayes.html#priors">Details about Bayesian priors</A></LI>
+<LI><A HREF="bayes.html#final">Final thoughts</A></LI>
+</UL>
+
+
+<h3><A NAME="intro">What's this about?  What's a Bayesian analysis?</h3>
+
+<P>Better minds than mine have devoted themselves to creating whole web
+pages explaining what a Bayesian approach is, and why it's important.  See,
+for example, Eliezer Yudkowsky's <A
+HREF="http://yudkowsky.net/rational/bayes/">An Intuitive Explanation of
+Bayes' Theorem</A>.  In brief, the advantages of a Bayesian approach
+include:
+
+<UL>
+<LI> Any information you have about the parameters before you start the
+program can be incorporated into the answer
+<LI> The answers are in terms of probability instead of likelihood, and
+the answers therefore have credibility intervals instead of support
+intervals.
+<LI> For LAMARC in particular, the limitations of reliance on 'driving
+values' for the likelihood run are no longer an issue because the parameters
+vary during the search through tree-space.
+</ul>
+
+The biggest disadvantage of a Bayesian approach is:
+
+<ul>
+<LI> You <i>must</i> include information about the parameters before you
+start the analysis, regardless of your state of ignorance, and that
+information is incorporated into the answer, sometimes in non-obvious ways.
+</ul>
+</P>
+
+<h3><A NAME="approach">How is a Bayesian approach used in LAMARC?</h3>
+
+<P>In Likelihood-LAMARC, each chain is a search through tree-space with a
+single set of 'driving values'.  In Bayesian LAMARC, this search through
+tree-space is augmented by a search through parameter space as defined by
+the priors.  This search serves by proxy as the 'driving values' for the
+parallel search through tree-space.
+</P>
+
+<P>In practical terms, a Likelihood-LAMARC search looks like:
+<OL>
+<LI>Generate a tree using a set of driving values.
+<LI>Come up with a new tree using that same set of driving values and the
+old tree.
+<LI>Compare the two trees, and usually keep the one with the better
+likelihood.
+<LI>Save the tree you liked better.
+<LI>Go to step 2 until you're done.
+<LI>Analyze the trees you've collected.
+<LI>Make a new set of driving values based on your trees, and go back
+to step 2 again for a new chain.
+</ol>
+
+(The 'usually' is so that the search can sometimes move 'downhill' in hopes
+of finding a better peak in a different area.)
+</P>
+<P>
+A Bayesian-LAMARC search looks like:
+
+<OL>
+<LI>Generate a tree using a set of driving values.
+<LI>Randomly choose whether to go to step 3 or step 7.
+</P>
+<P>
+<LI>Select a new point in parameter space.
+<LI>Compare the two sets of parameters, and usually keep the one with the
+better likelihood.
+<LI>Save the set of parameters you liked better, and replace the old set of
+driving values with those new ones.
+<LI>Go to step 2 unless you're done, then go to step 10.
+</P>
+<P>
+<LI>Come up with a new tree using the current set of driving values and the
+old tree.
+<LI>Compare the two trees, and usually keep the one with the better
+likelihood.
+<LI>Go to step 2 unless you're done, then go to step 10.
+</P>
+<P>
+<LI>Analyze the parameters you've collected.
+</P>
+</ol>
+
+Steps 1 and 7-9 are exactly the same as those in Likelihood-LAMARC, except
+that the trees are not saved for subsequent analysis, and instead the
+<b>parameters</b> are saved and analyzed.
+</P>
+
+<P>Just as Likelihood-LAMARC samples trees proportionally to their
+likelihood, Bayesian-LAMARC samples sets of parameters proportionally to their
+probability.  For more detail, and a comparison of simulated data in both a
+Likelihood and Bayesian setting, see:</P>
+
+<P><A HREF="http://www.genetics.org/cgi/content/abstract/175/1/155">Kuhner,
+M. K. and L. P. Smith, 2007  <i>Comparing Likelihood and Bayesian Coalescent
+Estimation of Population Parameters</i> Genetics 175: 155-165.</a></P>
+
+
+<h3>That was more than I really wanted to know.</h3>
+
+<P>You asked!</P>
+
+<h3><A NAME="results">What I meant to say is, methodology aside, what are
+the results of a Bayesian run and what do they mean?</h3>
+
+<P> In the current version of LAMARC, each parameter is analyzed separately,
+and a probability density function is created for each.  (A probability
+density function is a curve where the area under the curve is one.)  The
+highest point on these curves is given as the point estimate for that
+parameter.  The area under the curve is used to report credibility intervals
+in the output file--the 0.005 percentile is the point at which 0.005% of
+the area of the curve falls below it, and .995% of the curve falls above
+it.  The reverse is true for the 0.995 percentile.  The combination of the
+.005 percentile and the .995 percentile give you your 99% credibility
+intervals.  (Credibility intervals are like confidence intervals in that
+they tell you where the truth is most likely to lie, but are used in a
+Bayesian context.  They are sometimes called the 'posterior probability
+interval'.)
+</P>
+
+<P> <A NAME="LnLpictures"> Probably of more interest are the curvefiles themselves, produced by
+LAMARC as output files.  Each parameter has a curvefile for each
+chromosomal region in your data set, plus one curvefile for the
+overall estimate (the product of the individual curves).  They are
+named [prefix]_[region]_[parameter].txt, where [prefix] is 'curvefile' by
+default (this can be changed in the menu or the XML), [region] is 'overall'
+or 'reg#' with # the number of the region in question, and [parameter] is
+the short name of the parameter, like 'Theta1' for the theta for the first
+population, or 'M32' for the migration rate from the third population into
+the second population. </P>
+
+<P> Each curvefile contains information about the curve at the top,
+followed by a 2-column tab-delimited list of the parameter values and point
+likelihoods that define the probability density function.  It can be
+imported into a spreadsheet program or Mathematica to create an actual
+graph.
+</P>
+
+<h3><A NAME="curvefiles">So, what do these curvefiles tell me?</h3>
+
+<P> Each curvefile is a sort of convolution of the likelihood and the
+prior: if both inputs are accurate, the result tells you the relative
+probability that the parameter is any particular value.  Unlike the
+likelihood version, this overview is complete and continuous.
+
+<P> Another advantage of the curvefiles is that you can check them to see
+if they're unimodal.  Our research to date indicates that the true
+probability density function should be unimodal, with three exceptions:  an
+insufficiently long run of LAMARC, insufficient sampling of the data, and
+unusual correlations between parameters.  The first gives rise to multiple
+peaks in the curvefiles that come from single regions, which will not be
+conserved from one region to the next.  The second gives rise to multiple
+peaks in the curvefiles that come from the overall estimate over regions. 
+The third gives rise to the <b>same</b> multimodal peaks across all
+regions. The various possibilities, then, are:
+
+<UL>
+
+  <LI> <b>You have multiple peaks in both the individual regions curvefiles
+  and the overall-region curvefiles</b>:  If these multiple peaks are
+  different across the different regions, LAMARC was not run long enough. 
+  It is probably not possible to tell at this point if the data were also
+  undersampled.  If these multiple peaks are conserved across regions and
+  show up in the overall-region curvefiles too, it is possible you have
+  unusually-correlated parameters--see below for a discussion on this.
+
+  <LI><b>You have multiple peaks in the overall region curvefiles, but not
+  in the individual regions curvefiles</b>:  LAMARC was probably run long
+  enough, but the data coverage is insufficient.  Your options are either
+  to collect new data at a different region, or report the multiple peaks
+  and their likely cause in your analysis.
+
+  <LI><b>You have multiple peaks in the individual regions curvefiles, but
+  not in the overall-region curvefiles</b>:  LAMARC was probably not run
+  long enough for any one region, but good coverage of the data seems to
+  have overcome this insufficiency.
+
+  <LI><b>You have no multiple peaks</b>:  LAMARC was run for long enough,
+  and your data coverage was good.  Congratulations!
+
+</UL>
+</P>
+
+
+<h3><A NAME="drawbacks">So what won't these curvefiles tell me?</h3>
+
+<P> The first thing they won't tell you is anything about any correlation
+between your parameters.  For example, the known correlation between Theta
+and growth would not be seen in the curvefiles (or anywhere else in a
+Bayesian-LAMARC run).  This is not a shortcoming of the Bayesian method
+<i>per se</i>, but rather a shortcoming of LAMARC as currently written: 
+each parameter's walk through parameter space is analyzed without regard to
+how the other parameters are moving at the same time.  So, say that two
+parameters are jointly exploring the following parameter space, illustrated
+as a contour map.  We'll start by observing what happens in the uncorrelated
+case:</P>
+
+<P>
+<center><IMG SRC="images/uncorrelated.gif" ALT="Graph of two uncorrelated
+parameters"></center>
+</P>
+
+<P> In the Bayesian run, this contour map fills up with sampled points in
+proportion to its height.  So, the peak area fills up with a lot of points,
+and as you go down, the density of sampled points goes down, too. A
+two-dimensional analysis of these points could reproduce the contour
+map.  But in a one-dimensional analysis (as in the current version of
+LAMARC), each curvefile is the result of projecting each point onto one of
+the axes, then smoothing the density of resulting points to get a
+probability density function (seen outside each axis).
+</P>
+
+<P>Now let's look at some other examples, where the two parameters are
+either correlated (on the left) or inversely-correlated (on the right).  The
+contour map of parameter space might then look like:
+</P>
+
+<P>
+<center>
+<IMG src="images/correlated1.gif" alt="Graph of two
+correlated parameters">
+<IMG src="images/correlated2.gif" alt="Graph of two
+inversely correlated parameters">
+</center>
+</P>
+
+<P> Unfortunately, not only can a one-dimensional analysis not distinguish
+between these two cases, it also cannot distinguish between these cases and
+the uncorrelated case:  the projected densities onto the axes are the same
+in all three cases.
+</P>
+
+<P>The problem gets even knottier when the two parameters have some sort of
+non-symmetrical correlation.  Consider a correlation whose contour map looks
+like this: </P>
+
+<center>
+<IMG src="images/variably_correlated.gif" alt="Graph of two variably
+correlated parameters">
+</center>
+
+<P> In this example, there is a single maximum on the contour map, but its
+oddly-distorted shape causes there to be multiple peaks in its projection
+onto the X axis.  Given a sufficiently-long 'flat' section of the curve, the
+secondary peak could even end up being higher than the peak corresponding
+to the peak of the contour plot.
+</P>
+
+<P> This is, it should be noted, not an <b>incorrect</b> assessment of the
+data, but it is <b>limited</b>.  In other words, if the parameter plotted
+on the Y axis in the above example is truly equally likely to be anywhere
+in the lower half of its allowed range, (which it would be, if the prior on
+that parameter was accurate), and all those Y values correspond to a single
+X value, there will indeed be a peak at that X value, which may even
+overshadow the peak seen in the contour plot.  Conversely, an analysis that
+simply reported the peak of the contour plot would miss the fact that there
+was a different X value which might be a better choice, were you only
+considering possible values of X. </p>
+
+<P> The take-home lesson?  The point estimates of individual parameters you
+get in a Bayesian run are only guaranteed to be accurate if considered
+independently from one another.  And, of course, if the priors are accurate.
+</P>
+
+<h3><A NAME="surprises">Is there any other feature of these curvefiles that
+might surprise me if I didn't know the secret?</h3>
+
+<P> Funny you should ask!  As a matter of fact, there's one biggie:  Our
+curve-smoothing algorithm does not take into account the prior boundaries. 
+This means that if your prior ranges from 0 to 100, you might end up with a
+curve that goes negative, even if negative values are absurd for your data. 
+This does not mean that LAMARC actually used any negative values calculating
+the trees, but rather that the curve-smoothing algorithm has taken some of
+the density at or around zero and smoothed it beyond the boundary.  If
+you're desperately curious about our curve-smoothing algorithm, look <A
+HREF="curve-smoothing.html">here</A>.
+</P>
+
+<P> If you get this behavior, there are two ways to interpret it.  One is
+simply to assign all density outside the boundary in question to the
+boundary itself.  In other words, if 3% of your curve falls below zero,
+simply claim that the lower 3% of your probability density curve is
+<b>at</b> zero.  Another possibility (especially if you have accepted the
+default priors) is that your priors are wrong.  If, in the same 0 to 100
+example, you have a lot of density smoothed beyond 100, and the peak of the
+curve is at 99.5, it's fairly clear that your data is pushing the search as
+high as you are allowing it to go.  It might be informative to see where it
+would take the search when you allow it to go higher still.
+</P>
+
+
+<h3><A NAME="compare">So, why should I run Bayesian LAMARC vs. Likelihood
+LAMARC?</h3>
+
+<P> There are two advantages we have found in a Bayesian run vs. a
+likelihood run in LAMARC.  In some difficult cases, our experience indicates that
+Bayes-LAMARC does a better job of searching tree space than does Likelihood-
+LAMARC.  We attribute this to Likelihood-LAMARC's reliance on driving
+values:  its search is akin to looking under a lamp-post because that's
+where the light is.  The method of changing the driving values from chain
+to chain allow us (to extend the analogy) to move the lamp-post from time
+to time, and given a sufficiently long run, the search would find all the
+good trees.  But Bayes-LAMARC's methodology searches tree space at the same
+time that it searches parameter space, meaning that the driving values are
+constantly changing.  The boundaries of that search are immobile--they're
+the boundaries of the priors--but if your priors are correct, the search
+does a better job of covering the allowable space. </P>
+
+<P> The second advantage of a Bayesian run vs. a likelihood run is in a
+sense a subset of the first advantage:  a Bayesian run is better at
+determining whether your data support parameter values at or near zero.  In
+a likelihood run, a driving value of zero makes it nearly impossible to
+search tree space with a non-zero value.  Conversely, having a non-zero
+driving value makes it nearly impossible to search tree space where the
+value is zero.  One observed failure mode in Likelihood LAMARC is when it
+estimates a parameter after one chain to be nearly zero, then estimates
+that same parameter to be high after the next chain, near-zero again the
+next, and continues to ping-pong back and forth throughout the run.  Our
+current theory is that this behavior stems from having single driving
+values.  Our Bayesian runs have not shown this same behavior, and instead
+settle on a near-zero estimate, complete with seemingly appropriate
+confidence intervals. </P>
+
+<P> Finally, if you wish to analyze a case with modern populations diverging
+from ancestral populations, you must use a Bayesian analysis as likelihood
+is not supported in cases with divergence.  For the curious, the reason for
+this is that a likelihood run would prefer to keep the population splitting
+times constant throughout a chain and then consider what they suggest about
+other possible split times, but it turns out that trees inferred with one
+split time are often not useful in deciding about other potential split
+times.  A Bayesian analysis avoids this problem as it allows split times
+to vary throughout the chain.</P>
+
+<h3><A NAME="priors">So tell me about these priors, then.  They're flat,
+right?</h3>
+
+<P> Yes, the current version of LAMARC only has flat priors.  A prior is
+supposed to represent your knowledge of the potential answers before you
+start, and if you don't know anything about what the potential answer might
+be beyond 'it'll be larger than X and less than Y', assigning all values
+between X and Y the same probability is a way to express that.
+</P>
+
+<P> Even then, you have to decide what kind of <b>density</b> your priors
+should have between those boundaries.  LAMARC currently gives you two
+options:  a linear prior, and a logarithmic prior (with natural logarithms,
+not base-10 logarithms).  This choice can affect the confidence intervals
+of the reported parameters, and will definitely affect the search through
+parameter space.  </P>
+
+<P> Your choice should be motivated by how you believe the parameter varies.
+For example, if you believe that if the best value for theta will be around
+0.01, and that the 95% confidence interval will be from 0.1 to 0.001, you
+should use a logarithmic prior for theta.  If you believe that the best
+value for a migration rate will be around 100, and that the 95% confidence
+interval will be from 50 to 150, you should use a linear prior for
+migration.
+</P>
+
+<P> In the end, if you run LAMARC long enough and you have enough data, 
+it won't matter which type of prior you use because the data will overwhelm it.  
+But your search will
+have to be much longer if you use the wrong type.  If you get it right
+beforehand, it will spend an equal amount of time searching the 1st to 50th
+percentiles as it will the 50th to 99th percentiles, meaning that both
+limits will have the same degree of precision.  In addition, your search
+will have been maximally efficient. </P>
+
+<P> Also remember that any flat areas of your curvefiles should raise a red
+flag.  For example, let's go back to our graph of variably-correlated
+parameters: </P>
+
+<center>
+<IMG src="images/variably_correlated.gif" alt="Graph of two variably
+correlated parameters">
+</center>
+
+<P>One possible cause of the curve on the left (with the long flat section)
+is a logarithmic prior for that parameter with a lower bound so low that it
+included a wide swath of values, all of which had the same effect on the
+data as they would if the parameter value was zero.  For example, if you
+are estimating recombination rate, there will be a particular value for
+that rate that predicts a single recombination event in the entire history
+of your population.  All potential recombination rates below that value will
+therefore predict the same thing:  zero recombination events.  Conversely,
+any tree with zero recombination events will happily accept any value for
+recombination below that cut-off.
+</P>
+
+<P> The upshot of all this is that if you get a curvefile with a flat
+section, you must realize that section only includes the information you put
+into the prior--in other words, the same amount of information you had
+before you ran LAMARC at all.  What makes this example particularly worrying
+is not that recombination has a long flat section--that's fairly easy to
+explain--but that due to a correlation between recombination and some other
+parameter, that other parameter now has a secondary peak.  And as we said
+before:  if your prior is right, than this secondary peak is also right. 
+But if the prior was intended to represent more or less complete lack of
+information, the program gleaned information from it anyway.
+</P>
+
+<P> With some amount of trepidation, we have put in default priors for each
+force, though we strongly encourage users to make deliberate choices about
+the priors for their particular variables, instead of blindly accepting the
+given defaults.  For reference, the default priors are:
+<UL>
+<LI>Theta:  Logarithmic, 0.00001 - 10.0
+<LI>Migration:  Logarithmic, 0.01 - 1000.0
+<LI>Recombination:  Logarithmic, 0.00001 - 10.0
+<LI>Growth:  Linear, -500.0 - 1000.0
+</UL>
+
+<h3><A NAME="final">Uh, suddenly I'm not so sure about doing a Bayesian run
+any more.</h3>
+
+<P> You are wise to be cautious, for it is always critically important to
+examine your assumptions before undertaking any scientific endeavor, and
+the priors represent a large set of assumptions that can affect your
+results in sometimes surprising ways.  It might help to realize that your
+priors are part of the model for population history you're using--a model
+that already includes a variety of assumptions and simplifications.  The
+difference, of course, is that the assumptions and simplifications of the
+coalescent model have been hashed out in the literature over the course of
+several years, and you're going to have to defend the prior you put on your
+data for the first time. </P>
+
+<P> On the plus side, the more data you collect and the longer you run
+LAMARC, the less your choice of priors will affect the results.  If you do a
+sufficient analysis, and understand that any part of the resulting curves
+that are flat came from your assumptions and not from your data, you will be
+OK.
+</P>
+
+<h3>Well, OK, I'll give it a shot.  What do I do?</h3>
+
+<P> Onward, then, to the <A HREF="bayes_howto.html">Bayesian tutorial</a>.
+</P>
+
+(<A HREF="output.html">Previous</A> | <A HREF="index.html">Contents</A> | <A
+HREF="tracer.html">Next</A>)
+
+<!--
+//$Id: bayes.html,v 1.14 2012/05/16 17:14:01 mkkuhner Exp $
+-->
+</BODY>
+</HTML>
diff --git a/doc/html/bayes_howto.html b/doc/html/bayes_howto.html
new file mode 100644
index 0000000..85dc56d
--- /dev/null
+++ b/doc/html/bayes_howto.html
@@ -0,0 +1,334 @@
+<!-- header fragment for html documentation -->
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
+<HTML>
+<HEAD>
+
+<META NAME="description" CONTENT="Estimation of population parameters using genetic data using a maximum likelihood approach with Metropolis-Hastings Monte Carlo Markov chain importance sampling">
+<META NAME="keywords" CONTENT="MCMC, Markov chain, Monte Carlo, Metropolis-Hastings, population, parameters, migration rate, population size, recombination rate, maximum likelihood">
+
+<TITLE>LAMARC Documentation: Bayesian Tutorial</title>
+</HEAD>
+
+
+<BODY BGCOLOR="#FFFFFF">
+<!-- coalescent, coalescence, Metropolis-Hastings, Markov chain Monte Carlo
+ simulation, migration rate, effective population size, recombination rate,
+ maximum likelihood -->
+
+
+(<A HREF="tutorial.html">Previous</A> | <A HREF="index.html">Contents</A> |
+<A HREF="tutorial2.html">Next</a>)
+
+<H2>Bayesian-LAMARC tutorial:  How?</H2>
+<P>This tutorial is designed to be read from beginning to end, but if you
+like you can jump straight to:</P>
+
+<UL>
+<LI><A HREF="bayes_howto.html#what">What do I do?</A></LI>
+<LI><A HREF="bayes_howto.html#priors"> I've set my priors.  Am I done?</A></LI>
+<LI><A HREF="bayes_howto.html#initchain">Initial chain is finished</A></LI>
+<LI><A HREF="bayes_howto.html#finalchain">Final chain is finished, what are the files about?</A></LI>
+<LI><A HREF="bayes_howto.html#curve">What's a curve.file?</A></LI>
+<LI><A HREF="bayes_howto.html#nowwhat">What does it all mean?</A></LI>
+</UL>
+
+<h3><A NAME="what">I'm ready to go!  What do I do?</h3>
+
+<P>First off, you should have read the <A HREF="tutorial.html">basic tutorial</A> at least to the
+point where it links to this document (though it is wise to read the whole thing, as many of the concepts are the same).  You should also read our
+tutorial <A HREF="bayes.html">why you might want to use a Bayesian
+analysis</a>.
+</P>
+
+<p>Now do the following:</p>
+<P>Step 1:  Use the converter to get a LAMARC infile from your data.</p>
+<p>Step 2:  run LAMARC and tell it about that infile.  </p>
+
+<p>For the purposes of illustration, I'm going to assume that you have at least two genetic regions in your data set, coming from at least two populations.</P>
+
+<P>Now we're going to use the menu.  To change from a likelihood run to a
+Bayesian run, first select the 'Search Strategy menu' (S), and toggle the
+'Perform Bayesian or Likelihood analysis' option (P).  If you're
+experimenting and just want to see a Bayesian run, that's all you'd need to
+do--you could hit '.' now and run LAMARC.  But let's explore some of the
+other options available to us in a Bayesian run.
+</P>
+
+<P>First, you'll notice that you now have a new menu option available to you
+entitled, 'Bayesian Priors Menu' (B).  Select that, and you'll get a
+sub-menu listing the different active forces, and a summary of what the
+priors look like for each of them.  By default, the priors for all forces
+but growth are logarithmic, so you'll see something like:
+</P>
+
+<pre>
+Bayesian Priors Menu
+                                                                             
+  T  Bayesian priors for Theta                             (all logarithmic)
+  M  Bayesian priors for Migration Rate                    (all logarithmic)
+     ----------
+     <Return> = Go Up | . = Run | Q = Quit
+</pre>
+
+<P> Hit 'T' and it'll take you to a list of all the priors for the thetas,
+including a default.  You can then edit the default with 'D', or edit one
+particular prior by selecting that prior's number.  This will take you to a
+menu like the following:
+</P>
+
+<pre>
+Bayesian Priors Menu for Theta for Population 1
+    Priors may be either linear or logarithmic.  The lower bound for
+     logarithmic priors must be above zero, in addition to any other
+     constraints an evolutionary force might have.
+                                                                             
+  D  Use the default prior for this force                                Yes
+  S  Shape of the prior                                                  log
+  U  Upper bound of the prior                                             10
+  L  Lower bound of the prior                                          1e-05
+     ----------
+     <Return> = Go Up | . = Run | Q = Quit
+</pre>
+
+<P> As you can see, you have two ways to change the prior--you can change
+the boundaries, and you can change its shape (or density).  The 'S' option
+will toggle the shape between logarithmic and linear, and you can set the
+upper and lower bounds with the 'U' and 'L' options.  This is your
+opportunity to input what you already know about the parameters you wish to
+estimate.  It's obviously important to get the units right, so be sure to
+read the <A HREF="forces.html">forces</a> section of the manual, and figure
+out any differences between how you typically think about your parameters
+and how LAMARC uses those parameters.  (One typical 'gotcha' is that LAMARC
+always uses per-site estimates, but some researchers use per-locus
+estimates.)</p>
+
+<h3><A NAME="priors">OK, I've set my priors.  Am I done?</h3>
+
+<P> Not quite--the standard search strategy is not really appropriate for a
+Bayesian run, so we're going to change it.  Go to the top menu by hitting
+'Return' a few times, then select 'S' ('Search Strategy Menu'), then S
+again ('Sampling strategy (chains and replicates)').  Once here, change the
+number of initial chains to 1 and final chains to 1 (options 1 and 5). 
+Finally, change the number of replicates ('R') to 3.  (In a production run,
+you're probably better off changing the 'Final number of samples' to 30,000
+instead of changing the number of replicates to 3, but we want a bit more
+feedback for this sample run, which replicates will give us.)
+</P>
+
+<h3> <b>Now</b> am I done?</h3>
+
+<P> Yes!  One quick thing, however: from the main menu, select S (Search
+Strategy Menu), then R (the Rearrangers Menu).  Here's where you can change
+the various rearrangers, including (now) the Bayesian rearranger.  This
+controls how much relative time is spent sampling new parameters (the
+Bayesian rearranger) vs. sampling new trees (all the other rearrangers). 
+In a run where you are trying to estimate many parameters (say, in a system
+with several populations), this menu is where you could increase the time
+spent resampling from those parameters.</p>
+
+<P>But for now, we'll leave it as it is, with the Bayesian rearranger set
+to the same relative frequency as the Topology rearranger.  Hit Run ('.')
+and I'll walk you through the output. </P>
+
+<h3> <A NAME="initchain">OK, this 'Initial chain 1' thing is finished.</h3>
+
+<P>This probably looks something like:
+</P>
+<pre>
+14:33:01  Initial chain   1:  [====================]         1000 steps
+14:35:47  Predicted end of chains for this region:  Fri Apr 22 08:32:28
+          2005
+ 
+14:35:47  Accepted    17% | Point Likelihood 2.09739689 | Data lnL -3268.84119
+Trees discarded due to too many events:        2
+Trees discarded due to too small population sizes:        0
+Trees discarded due to an infinitesimal data likelihood:        0
+Trees discarded due to extremely long branch lengths:        0
+Bayes-Arranger accepted            80/421 proposals
+Tree-Arranger accepted             81/468 proposals
+Tree-Size-Arranger accepted         9/111 proposals
+ 
+Number of unique sampled values for each parameter:
+    9: Theta for population number 1
+    4: Theta for population number 2
+   20: Migration rate into population number 1 from population number 2
+   31: Migration rate into population number 2 from population number 1
+ 
+  Class                  Theta
+  population number 1   0.002060
+  population number 2   0.009636
+   
+  Population                     Mig
+  population number 1     --------  184.1942
+  population number 2     48.03229  --------
+   
+ 
+14:35:47  Final chain     1:  [|                   ]          325
+</pre>
+
+<P> Much of this is the same as in the <A HREF="tutorial.html#screen_output">basic
+tutorial</a>, but let's revisit all the pieces anyway.
+</p>
+<dl>
+<P><dt><b>Initial chain 1</b></dt>
+  <dd>For this Bayesian run, we set up a single initial chain and a single
+  final chain.  The initial chain is not used in the final estimation of
+  parameters, but serves (along with 'burn-in', or the discarded samples for
+  each chain) to get the estimates away from their starting values and the
+  trees away from the initial tree.  It also gives you a rough idea of how the
+  run is going.</dd>
+</P><P>
+<dt><b>Predicted end of chains</b></dt>
+  <dd>LAMARC's estimate for how long it will take LAMARC to get through all of
+  the replicates for this genetic region.</dd>
+
+</P><P>
+<dt><b>Accepted 17%</b></dt>
+  <dd>The total acceptance rate for all the various arrangers.  While this can
+  be helpful (acceptance rates should normally fall in the 5-50% range, and
+  typically reside around 10% or so), it is usually more helpful to examine
+  the acceptance rates for the individual arrangers.</dd>
+
+</P><P>
+<DT><b>Point Likelihood 2.09739689</b></dt>
+  <dd> An average of each parameter's posterior point likelihood at its
+  maximum probability.  Not all that useful on its own, but it can be
+  compared to other Point Likelihoods for other chains or regions.  The
+  higher this number, the thinner (on average) the confidence intervals,
+  while the lower this number, the wider the confidence intervals.</dd>
+</dt>
+
+</P><P>
+<dt><b>Data lnL -3268.84119</b></dt>
+  <dd> The data log likelihood.  This number will probably be very
+  negative, should increase for the first few chains, then level off for
+  the last few chains.  It's the probability of the last tree in the chain
+  given your data, which is a measure of how well the tree
+  fits your data.  Since a large data set is highly unlikely to
+  be produced by <b>any</b> given tree, the low values in 
+  themselves are not a problem; but they should not decrease 
+  significantly as LAMARC's search continues.<br><br>
+  
+  One other thing you should note is that if you add more
+  sites or more individuals to your data set, this number will go
+  <b>down</b>.  A larger data set is intrinsically more unlikely
+  (requires us to posit more events to explain it) than a smaller
+  one.  So a tremendously negative data log likelihood is not a
+  symptom of impending doom, just a sign of a big, juicy data set.</dd>
+
+</P><P>
+<dt><b>Trees discarded due to...</b>
+  <dd>Sometimes, LAMARC will discard trees because the trees themselves are
+   inherently too tricky to deal with.  Almost always, these trees would
+   also be rejected from having too low a likelihood, so you shouldn't worry
+   about this too much unless one of these numbers gets very high (say,
+   larger than 5% of the total number of proposed trees).  If that happens,
+   your starting parameters might be too extreme, or you might be calling
+   two populations different when they are actually genetically identical
+   (rejections due to 'too many events' can have this cause).  Generally,
+   though, it's just LAMARC being efficient.<br><br>
+   If absolutely no trees are rejected, you'll see the message "No trees
+   discarded due to limit violations." which means you're fine.
+  
+</P><P>
+<dt><b>Arranger accepted</b></dt>
+
+  <dd>This is a more detailed breakdown of the 'Accepted 7.39%', above. 
+  The three arrangers on by default in Bayesian LAMARC run are the Bayes
+  arranger, which picks a new value for one of your parameters from that
+  parameter's prior; the Tree-Arranger, which breaks a branch of a tree and
+  then re-attaches it; and the Tree-Size-Arranger, which preserves the
+  topology of the tree but picks new sizes for some or all of the
+  branches.  The Bayes-Arranger and Tree-Arranger are absolutely required
+  for a Bayesian run, since the first samples the parameters and the second
+  samples the trees.  The Tree-Size-Arranger is more of a helper function,
+  which is why (by default) it only searches 1/5 as much as the
+  Tree-Arranger.  <br><br>
+
+  These numbers can vary fairly widely, but each should normally fall in the
+  5-50% range, with 10% being typical.  </dd>
+
+</P><P>
+<dt><b>Number of unique sampled values for each parameter:</b></dt>
+  <dd> Here, we see the results of the Bayesian acceptance rate.  Each
+  parameter is listed, together with the total number of unique points
+  collected for it (each point may have been sampled multiple times, either
+  because proposed new values for that parameter were rejected, or because
+  that parameter had no new proposed values between sampling steps).  For
+  the initial chain, it's not important that these numbers be very high, but
+  for the final chain it will be vital.  The more data points you have, the
+  better resolution of your peaks you will get, but in general, you'll need
+  at least 100 unique data points to get an okay curve, and probably in the
+  1000s or greater to get a good curve.<br><br>
+  It is important to note that these values are <b>not</b> 'Effective Sample
+  Size' values (ESS).  LAMARC does not calculate ESS, but provides an output
+  file that can be used with the program Tracer to calculate ESS.</dd>
+
+</P><P>
+<dt><b>Theta</b> and <b>Mig</b>
+  <dd>These are the parameters LAMARC is trying to estimate.  You will be
+  estimating one theta value for every population in your data, and two
+  migration rates for every pair of populations in your data.  In this case,
+  with two populations, that means a theta for each (0.002060 for population
+  1 and 0.009636 for population 2), and two migration rates (184.1942 for
+  the rate from pop2 to pop1, and 48.03229 for the rate from pop1 to pop2). 
+  These are the peaks of the posterior likelihood curves for your
+  parameters.  As we are in the Initial chain, this doesn't mean much, but
+  these same estimates from the Final chain will be reported as LAMARC's 
+  estimates of your parameters.</dd>
+
+</P>
+</dl>
+
+<h3> <A NAME="finalchain">OK, it finished 'Final Chain 1', then told me it wrote to a bunch of
+files.  What are they?</h3>
+
+<P>The main output file will match the output file from a likelihood run;
+more information can be found in <A HREF="tutorial.html#output">this
+section</a> of the main tutorial, and still more information can be found in
+the <A HREF="output.html">Output Files</a> section of the main manual.  The
+principal difference between a Bayesian output file and a likelihood output
+file is that Bayesian parameter estimates are known as Most Probable
+Estimates (MPEs) instead of Maximum Likelihood Estimates (MLEs).  This is
+because a Bayesian run produces probability density functions from which we
+read off the peak, instead of calculating likelihoods.</p>
+
+<P>This difference also shows up in the profile tables of the outfile, where
+the Point Probabilities of the parameters are reported instead of the Log
+Likelihoods.  Again, this is due to the type of analysis being done.  Point
+probabilities are the absolute values of the probability density function
+for that parameter value.  They can be used to
+compare their magnitudes at different parameter values, but are meaningless
+outside of that context.
+</P>
+
+<P>Finally, the parameters are profiled without regard to other parameters
+in the run.  As noted <A HREF="bayes.html#drawbacks">earlier</a>, this can
+mask any correlations that might legitimately exist in your data, but the
+profiles are otherwise accurate.</p>
+
+<h3><A NAME="curve">There are also these 'curve files'.  What do I do with them?</h3>
+
+<P>Curve files, as you no doubt <A HREF="bayes.html#results">remember</a>,
+are the full detailed output from a Bayesian run, and you should definitely
+look at them.  The easiest way to do this is to import the file into a
+spreadsheet program like Excel (the numbers are tab-delimited), highlight
+the two columns of data, and select 'make a graph of this' (you'll want an
+X-Y Scatter Plot type graph).  You can then simply look at the resulting
+curve to see if it's lumpy or nicely monotonic.
+
+<h3><A NAME="nowwhat">So what does it all mean?</h3>
+
+<P>An excellent question, and one that we attempt to answer in some detail in <A
+HREF="tutorial2.html">Analyzing the Rest of Your Data</a>.</P>
+
+
+<P> 
+(<A HREF="tutorial.html">Previous</A> | <A HREF="index.html">Contents</A> | <A
+HREF="tutorial2.html">Next</A>)
+</P>
+<!--
+//$Id: bayes_howto.html,v 1.17 2011/06/23 21:00:36 jmcgill Exp $
+-->
+</BODY>
+</HTML>
diff --git a/doc/html/changes.html b/doc/html/changes.html
new file mode 100644
index 0000000..34ca455
--- /dev/null
+++ b/doc/html/changes.html
@@ -0,0 +1,497 @@
+<!-- header fragment for html documentation -->
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
+<HTML>
+<HEAD>
+
+<META NAME="description" CONTENT="Estimation of population parameters using genetic data usi
+ng a maximum likelihood approach with Metropolis-Hastings Monte Carlo Markov chain importanc
+e sampling">
+<META NAME="keywords" CONTENT="MCMC, Markov chain, Monte Carlo, Metropolis-Hastings, populat
+ion, parameters, migration rate, population size, recombination rate, maximum likelihood">
+
+<TITLE>LAMARC Documentation: Changes</title>
+</HEAD>
+
+
+<BODY BGCOLOR="#FFFFFF"> 
+<!-- coalescent, coalescence, Markov chain Monte Carlo simulation, migration
+rate, effective population size, recombination rate, maximum likelihood -->
+
+<P>(<A HREF="overview.html">Previous</A> | <A HREF="index.html">Contents</A>
+| <A HREF="upcoming.html">Next</A>)</P>
+
+<H2> Changes between LAMARC version 2.1.8 and 2.1.6 </h2>
+
+<p><b>BUG FIX: corrected major migration bug</b>
+A bug in data summarization for migration parameters reversed the
+direction of migration in some senses bug not others.
+All runs modeling migration parameters not held constant 
+should be re-run.
+</p>
+
+<p><b>BUG FIX: improved handling of extreme negative growth</b>
+A bug in tree generation under extreme negative growth resulted
+in trees with default branch length being accepted.
+If you have seen results (especially in Likelihood runs) where growth
+rates oscillated regularly from negative to positive, that was
+likely this bug.
+All runs modeling growth should be re-run unless they were
+Bayesian runs with exclusively non-negative priors.
+</p>
+
+<p><b>New feature: Divergence:</b>
+LAMARC now models multiple populations 
+<a href="divergence.html">diverging from common ancestors</a>.
+</p>
+
+<p><b>New feature: SNP panel corrections:</b>
+LAMARC can now correct for the loss of recent variation induced
+by using <a href="panels.html">SNP panel chips</a>.
+</p>
+
+<p><b>Enhancement: faster convergence in Likelihood runs:</b>
+Method for checking convergence in LIKELIHOOD runs has been improved, 
+yielding faster convergence.
+</p>
+
+
+<H2> Changes between LAMARC version 2.1.6 and 2.1.5 </h2>
+
+<P><b>BUG FIX: corrected bug in data likelihood in presence of recombination</b>
+A bug in the data likelihood calculation for recombinant trees caused
+increasingly inaccurate data likelihoods as the number of non-recombinant
+sub trees grew. This led to LAMARC preferring trees with fewer recombinations.
+Recombinant analyses run with LAMARC versions 2.1.2 through 2.1.5 should
+be re-run.
+</P>
+
+<P><b>BUG FIX: corrected random number generator</b>
+Corrected a bug in which random values which were supposed to be in the
+open interval (0,1) were sometimes returning 0 or 1. The bug was rare,
+resulting in unexplained crashes every 4 billion or so random number draws.
+There is no need to re-run analyses which did not crash.
+We also updated our random number generators to use the 
+Boost Mersenne twister (boost::mt19937) random number generators.
+</P>
+
+<P><b>BUG FIX: data uncertainty model with SNP data (beta test)</b>
+Data likelihood for the invariant base pairs in SNP data now
+incorporates the per-base error rate.
+Analyses using SNP data with the per-base error rate model should
+be re-run.
+</P>
+
+<H2> Changes between LAMARC version 2.1.5 and 2.1.4 </h2>
+
+<P><b>data uncertainty model (beta test)</b>
+</P>
+
+<P><b>improved output reports</b>
+</P>
+
+<H2> Changes between LAMARC version 2.1.4 and 2.1.3 </h2>
+
+<P><b>compiles with g++ 4.3.3 on Linux</b>
+This release updates the code base to compile with g++ 4.3.3.
+Earlier compilers should still work.</P>
+
+<P><b>minor user experience improvements</b>
+Several error messages from the converter have been improved.
+Converter can now read input files missing the end-of-line
+character.
+Lamarc now records the random seed used. This is useful
+when debugging problems.
+
+<H2> Changes between LAMARC version 2.1.3 and 2.1.2b </h2>
+
+<P><b>Bug fix for haplotype rearranging code</b>
+This release fixes a bug introduced into the code used to guess 
+haplotype resolution.
+Analyses using this feature 
+in LAMARC versions 2.1.2 and 2.1.2b 
+should be re-run.
+Additional improvements include:
+<ul>
+<li>Tracer output now renders step counts as integers instead of reals.
+<li>Mapping output now writable to its own file.
+<li>Upgraded default wxWidgets installation to 2.8.8.
+</ul>
+</P>
+
+<H2> Changes between LAMARC version 2.1.2b and 2.1.2 </h2>
+
+<P><b>Minor changes affecting user experience only</b>
+Removed requirement for user to "press enter to quit" in
+batch mode. Restored missing icons to MSW distribution.
+</P>
+
+<H2> Changes between LAMARC version 2.1.2 and 2.1.1 </h2>
+
+<H3> Additions</H3>
+
+<P><b>Limitations for Recombination relaxed.</b>  LAMARC is now 'final
+coalescent' aware, meaning that individual sites that have coalesced no
+longer induce recombination events.  This means that recombination can be
+estimated for much longer distances--version 2.1.1 had problems when
+theta * r * sequence length (or 4NCl) was any higher than about 5.  2.1.2
+can now handle values of 4NCl up to ~100.  In humans, this translates to
+about .2 centimorgans, or 200 kilobases.  We're working on expanding this
+even further--if you have a data set that needs longer recombination
+lengths, let us know (we're particularly interested in people using LAMARC
+over long distances for trait mapping).</P>
+
+<P><b>Tracer output</b> now includes the probabilities of the sequence data
+on the current genealogy in Bayesian runs as well as in Likelihood runs.</P>
+
+<h3>Corrections</h3>
+
+<P><b>Unknown microsatellite data</b> are now recognized by the
+converter.</P>
+
+<P><b>Maximization</b> has been made somewhat more efficient in some cases,
+particularly in runs that estimate variable mutation rates over regions
+drawn from an unknown gamma distribution.</P>
+
+
+<H2> Changes between LAMARC version 2.1.1 and 2.1. </h2>
+
+<h3>Corrections</h3>
+
+<P><b>Bayesian Tracer files</b> now omit parameters set to be 'invalid'.</P>
+
+<P><b>Bayesian analyses</b> now handle runs with too few unique sampled
+parameter values a bit more robustly, and warn a bit more sternly.</P>
+
+
+<H2> Changes between LAMARC version 2.1 and 2.0.3. </H2>
+
+<H3> Additions</H3>
+
+<P><b>Trait mapping</b>.  Trait data (such as disease status) can be mapped,
+modelling trait changes as K-Allele data, with arbitrary models for
+penetrance.  Mapping can be performed using two approaches, one that
+includes the trait data when rearranging trees, and one that analyzes the
+trees after they are produced and collects the likelihoods of the data being
+produced at each site.  (See the <A HREF="mapping.html">mapping
+documentation</a> for more information.)</P>
+
+<P><b>Multiple data types within a linked genomic region.</b>  LAMARC is now
+able to correctly analyze a genomic region which contains, for example,
+several microsatellite markers and a stretch of single-copy DNA.  The
+researcher will need to provide the expected relative mutation rate of each
+type of data.</P>
+
+<P><b>GUI Converter</b>.  The GUI file conversion utility included with this
+release has been significantly updated, replacing the beta version
+originally released with version 2.0.</P>
+
+<P><b>Batch versions</b>.  LAMARC could previously be compiled in such a way
+as to produce a 'batch' version.  Now, that capability has been extended to
+the normal version:  If you execute lamarc with a '-b' (or '--batch')
+command line option, it will run through and produce output without further
+interaction from the user.  The converter may be run the same way, also with
+a '-b' command-line flag (see the <A HREF="converter.html">converter
+documentation</a>).</P>
+
+<P><b>Input file setting from the command line</b>.  You may now specify a
+LAMARC input file to use at the command line with a command like "lamarc
+new_infile.xml".  This is particularly helpful with the '-b' option, above,
+as it means you can use a different input file than the default
+'infile.xml'.</P>
+
+<h3>Corrections</h3>
+
+<P><b>Bayesian runs with different effective population sizes</b> for
+different regions were producing erroneous output due to a bug in tree
+rearrangement.  (This would most easily manifest as abnormally low
+acceptance rates for theta values in regions with an effective population
+size other than 1.0.)  This has been fixed.</P>
+
+<h3> Incompatibilities</h3>
+
+<P><b>Phase tags in XML input file.</B>  As part of enabling multiple
+data segments of different types within a linked genomic region, we
+have changed the numbering system used to indicate which sites in an
+individual are phase-unknown to match the numbering system used for
+other purposes.  As a result, any previous XML input files which explicitly
+listed the phase-unknown sites for each individual (rather than
+indicating that no sites were phase-known or phase-unknown) will need
+to be hand-edited to use the <A HREF="xmlinput.html#phase">new scheme</a>.  
+LAMARC will generally be able to detect this problem and issue an error
+message.  If you find you have this problem with an old infile, you can
+either recreate the lamarc input with the converter, or edit the infile. 
+If you have all phase-unknown data, the simplest method is to change the
+default (and now incorrect) tags:</P>
+
+<pre>
+     <phase type="unknown">
+     0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 [...] 
+     </phase>
+</pre>
+
+<P>To:</P>
+
+<pre>
+     <phase type="known"> </phase>
+</pre>
+
+<P>The latter will set all of your markers to phase unknown.</P>
+
+
+<br>
+<H2> Changes between LAMARC version 2.0.2 and 2.0.3. </H2>
+
+<P><em>Note:  version 2.0.3 was not released to the general public, but was
+made available for the 2006 <A
+HREF="http://www.molecularevolution.org/">Workshop on Molecular
+Evolution</a> at Woods Hole.</em></p> 
+
+<H3> Additions</H3>
+
+<P> <B> Gamma rates among regions.</B>  Mutation rates can now vary among
+genomic regions according to a gamma distribution; the program will
+attempt to estimate the gamma shape parameter.  This is particularly
+useful for collections of microsatellite regions where little is known
+about the relative mutation rates of each region.  See the document <A
+HREF="gamma.html">"Combining data with different mutation rates"</a> for
+more information.</P>
+
+<P><B> Tracer compatibility.</B>  LAMARC now automatically writes
+files that can be read by the Tracer utility of Drummond and Rambaut.
+In a Bayesian run Tracer can monitor convergence of the parameter
+estimates.  In a likelihood run it can monitor the data likelihood
+of the genealogies.  In both cases, it is useful in determining
+whether the program has been run long enough.  See <A
+HREF="tracer.html">the "Using Tracer with LAMARC" documentation</a> for more information.</P>
+
+<P><B> Newick tree.</B>  LAMARC can now write out the tree of highest
+data likelihood it finds for each region, as a Newick format tree, in cases 
+which do not have migration or recombination.</P>
+
+<H3> Corrections </H3>
+
+<P>Several limitations of the <b>Stepwise and Mixed-KS microsatellite 
+models</b> have been relaxed, allowing runs with widely divergent
+microsatellite counts to run to completion instead of halting partway
+through the program run.  (If an analysis using an earlier version of LAMARC
+ran to completion, it was not affected by this problem; affected runs
+would crash.)</P>
+
+<P>The mixing parameter of the <b>Mixed-KS model</b> could previously be set
+to adjust during the run, but did not actually do so.  Now it does.</P>
+
+<br>
+<H2> Changes between LAMARC version 2.0 and 2.0.2 </H2>
+
+<H3> Additions</H3>
+
+<P><B> Summary file reading and writing </B> can now be used with Bayesian
+as well as likelihood analysis. </P>
+
+<P><B> Maximizer fine-tuning </B> allows likelihood maximization and profiling
+to succeed in some cases where they previously would have failed. </P>
+
+<H3> Corrections </H3>
+
+<P><B> Bayesian multi-region analysis was incorrect </B> in the previous version;
+probability curves representing multiple regions were added rather than
+multiplied.  The resulting MPEs are correct but the confidence limits are 
+unnecessarily wide.  All Bayesian runs with multiple unlinked genomic regions
+should be redone. </P>
+
+<P><B> Likelihood multi-replicate analysis was incorrect </B> in the previous
+version.  Neither MLEs nor confidence limits are accurate.  All likelihood
+runs done using replication from version 2.0 (not from earlier versions) should
+be redone. </P>
+
+<br>
+<H2> Changes between LAMARC version 1.2 and 2.0 </H2>
+
+<H3>Additions</H3>
+
+<P><B> Bayesian analysis.</B>  Lamarc can now make a Bayesian estimation of
+population parameters as an alternative to the original maximum-likelihood
+estimation.  Linear and logarithmic priors with user-specified upper and
+lower bounds are available.  Users are strongly encouraged to set appropriate
+priors.  In our limited experience, the results of Bayesian analysis are
+quite similar to those of likelihood analysis, but the Bayesian approach
+may be superior for estimating parameter values near zero.</P>
+
+<P>As an adjunct to Bayesian analysis, we offer a new genealogy-search
+strategy which reconsiders only branch lengths.  This may allow the
+search to more rapidly react to newly proposed values of Theta.  It can
+also be used in likelihood-based analysis.  It is currently enabled by
+default for all lamarc runs, so attempts to exactly replicate previous
+results will first need to disable this <A HREF="menu.html#rearrangers">strategy</A>.</P>
+
+<P><B>Parameter constraints.</B>  Individual population parameters (such
+as migration or growth rates) may now be constrained to a user-specified
+value, or groups of them may be constrained to be equal.  We especially recommend
+use of constraints to reduce the number of parameters in cases with 
+many subpopulations.</P>
+
+<P><B>Different N<sub>e</sub> and mu among genetic regions.</B>  
+It is now possible to set the relative N<sub>e</sub> (effective 
+population size) and relative mu (neutral
+mutation rate) of each genetic region independently, allowing a correct
+joint estimate over unlike regions such as autosomal and sex-chromosome
+samples or DNA and microsatellite samples.</P>
+
+<P><B>New data types and models.</B>  Data with multiple alleles among
+which no particular relationship is implied, such as elecrophoretic
+alleles, can now be coded as "K-Allele data" and analyzed via a K-Allele
+model.  This model is also available for microsatellites as an 
+alternative to the stepwise mutation models; furthermore, a mixed
+model which attempts to optimize the ratio of stepwise changes and
+K-allele changes can be used for microsatellite data.</P>
+
+<P><B> Graphical user interface for file conversion utility.</B>  File
+conversion from PHYLIP or MIGRATE format files can now be done using
+a GUI interface which is significantly easier than the text-based
+form.  The text-based converter is still available.</P>
+
+<H3> Improvements and bug-fixes </H3>
+
+<P><B>Multiple replicates</B> are now correctly implemented using the method of
+Geyer.</P>
+
+<P><B>Inference on phase-unknown DNA or SNP data</B> had a serious bug in version 1.2
+which is fixed in this version.  Previous analyses of this type should be
+repeated as the bug did not cause a crash, but led to inaccurate results.</P>
+
+<P><B> Effectiveness of the maximizer</B> has been greatly improved; it finds
+correct maxima in a much larger proportion of cases.  (The maximizer is the 
+part of the program that searches the n-dimensional likelihood surface for 
+the maximum height, which is the maximum likelihood.)</P>
+
+<br>
+<H2> Changes between LAMARC version 1.1 and 1.2 </H2>
+
+<H3> Change in XML file format </H3>
+
+<P> Older XML files which use the following tag will need to be modified.  The
+tag <map_position> (with an underscore) has become <map-position>
+(with a dash) for consistency with the other tags. </P>
+
+<H3> Removal</H3>
+
+<P><B> Multiple replicates.</B>  Regrettably, we have disabled the ability to
+do multiple replicates of each chain as an accuracy improving measure.  This
+had not been implemented correctly; it produced approximately correct maximum
+likelihood estimates but too-narrow confidence intervals.  We will re-enable
+this feature as soon as we have correct algorithms for combining results over
+replicates.  (This feature has been corrected and re-enabled in version 2.0.)</P>
+
+<P>Previous multiple-replicate runs may well have too-narrow confidence intervals and
+should be redone.  We regret this problem.  </P
+
+<H3>Additions </H3>
+
+<P><B> Growth.</B>  The program can now estimate an exponential growth rate
+for a single population or for several subpopulations.  This duplicates the
+functionality of FLUCTUATE except that LAMARC can estimate growth in the
+presence of recombination and/or migration as well.</P>
+
+<P><B>General Time-Reversible mutational model.  </B>  For DNA, RNA or
+SNP data, the program can now use a fully-specified form of the GTR mutational
+model.  It is not able to optimize the parameters of this model, but
+other tools such as PAUP*/Modeltest can be used to develop an optimal model 
+to be applied by LAMARC.</P>
+
+
+<P><B>Adaptive heating.</B>  When using the MC^3 or "heated chains"
+strategy to improve searching, the program can now adjust the temperatures
+of the heated chains automatically in an attempt to improve efficiency,
+rather than relying on user-specified fixed temperatures.</P>
+
+<P><B> Menu revision.</B>  The menu has been extensively revised and now
+has the capacity to undo multiple changes.  In addition, a few options on
+the menu have been moved to theoretically more reasonable spots. </P>
+
+<P><B> Saving menu options.</B> The program automatically writes a 
+file, "menusettings_infile.xml", which contains the user's original infile updated
+with the results of all changes made via the menu.  This greatly
+simplifies re-running a complicated case.</P>
+
+<P><B> Saving sampled genealogies. </B>  The program is now capable
+of writing a file containing summaries of its sampled genealogies,
+and can read that file back in and resume a run.  This is useful
+in recovering a run that has crashed, and can also be used to do
+more complex analyses of the same genealogies.  For example, you
+may wish to do a quick run with no profiling in order to find the
+best run parameters, and then re-analyze those genealogies with
+profiling if they are satisfactory.</P>
+
+<P><B>No-menu option.</B>  The program can now be compiled in a no-menu 
+form which takes all of its input from the XML infile.  This is
+useful in designing large simulation studies and other batch runs. </P>
+
+<P> <b>Output</b>  The tables of data have been transposed so that what
+used to be displayed in rows is now displayed in columns.  This puts all
+modified values of the same parameter in the same column, which should make
+it easier to follow the changes.</P>
+
+<H3> Corrections </H3>
+
+<P> <B>File converter.</B>  When input data was presented in Phylip
+"interleaved" format it was truncated in the file converter.  Also,
+if multiple input sequences had the same Phylip-truncated name, the
+converter would silently discard the duplicates.</P>
+
+<P> <B> Maximizer accuracy </B>  The maximization routines which generated
+the maximum likelihood estimates, confidence limits and profiles for
+population parameters were sometimes unsuccessful in finding the true
+maxima, leading to incorrect estimates and inconsistent profiles.
+While we cannot guarantee that the new routines will succeed in all
+cases, they are greatly improved and also provide more feedback when
+they fail.</P>
+
+<br>
+<H2> Changes between LAMARC version 1.0 and 1.1 </H2>
+
+<H3>Additions </H3>
+
+<P><B>Microsatellite data.</B>  Both a stepwise mutational model
+and a Brownian model are provided.  Variable rates at different
+microsatellite regions can be accommodated with a Felsenstein-Churchill
+Hidden Markov model. Warning:  the stepwise model is very slow, 
+and so has not been as thoroughly tested as the others.</P>
+
+<P><B>SNP data.</B>  We implement the "reconstituted DNA" model of
+<A HREF="http://www.genetics.org/cgi/content/abstract/156/1/439">Kuhner et
+al. 2000.</a>  The user must provide map information showing the
+location of the SNPs relative to each other in order to estimate
+recombination rate; unmapped SNPs are usable for population size
+and migration rate estimation only.  </P>
+
+<P><B>Genotypic data.</B>  The program can now use data for which the 
+haplotypes are unknown.  It searches among many different haplotype
+resolutions.  Be sure to use heating if you use this
+option, as otherwise the search tends to become stuck. </P>
+
+<H3> Corrections </H3>
+
+<P><B>Speed.</B>  The version 1.0 release still contained some debugging
+code which slowed it down substantially (all likelihoods were
+calculated twice).  Version 1.1 should be quite a bit faster.</P>
+
+<P><B>Lost data in converter.</B>  The file conversion program silently lost
+the last nucleotide of each sequence.  This will have had a slight
+effect on the results.  If your sequences are very short you may
+wish to re-run previous analyses.</P>
+
+<P><B>Incorrect converter output.</B>  Using the file converter for multiple
+population cases produced defective LAMARC input files which could
+not run successfully.</P>
+
+<P><B>File converter flexibility.</B>  The file converter is now able
+to deal with a much wider array of input data.</P>
+
+<P>(<A HREF="overview.html">Previous</A> | <A HREF="index.html">Contents</A>
+| <A HREF="upcoming.html">Next</A>)</P>
+
+<!--
+//$Id: changes.html,v 1.52 2012/05/25 23:28:10 ewalkup Exp $
+-->
+</BODY>
+</HTML>
diff --git a/doc/html/comparing_curvefiles.sxc b/doc/html/comparing_curvefiles.sxc
new file mode 100644
index 0000000..1a5dd33
Binary files /dev/null and b/doc/html/comparing_curvefiles.sxc differ
diff --git a/doc/html/comparing_curvefiles.xls b/doc/html/comparing_curvefiles.xls
new file mode 100644
index 0000000..cae91df
Binary files /dev/null and b/doc/html/comparing_curvefiles.xls differ
diff --git a/doc/html/compiling.html b/doc/html/compiling.html
new file mode 100644
index 0000000..bcf5cd2
--- /dev/null
+++ b/doc/html/compiling.html
@@ -0,0 +1,466 @@
+<!-- header fragment for html documentation -->
+<!--$Id: compiling.html,v 1.36 2012/05/25 23:28:10 ewalkup Exp $ -->
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
+<HTML>
+<HEAD>
+
+<META NAME="description" CONTENT="Estimation of population parameters using genetic data usi
+ng a maximum likelihood approach with Metropolis-Hastings Monte Carlo Markov chain importanc
+e sampling">
+<META NAME="keywords" CONTENT="MCMC, Markov chain, Monte Carlo, Metropolis-Hastings, populat
+ion, parameters, migration rate, population size, recombination rate, maximum likelihood">
+
+<TITLE>LAMARC Documentation: Compiling LAMARC</title>
+</HEAD>
+
+
+<BODY BGCOLOR="#FFFFFF">
+<!-- coalescent, coalescence, Markov chain Monte Carlo simulation, migration rate, effective
+ population size, recombination rate, maximum likelihood -->
+
+<P>(<A HREF="mapping.html">Previous</A> | <A
+HREF="index.html">Contents</A> | <A HREF="converter.html">Next</A>)</P>
+<H2>Compiling LAMARC</H2>
+
+<P> We provide executables of LAMARC for many systems, but if you cannot use
+these, or wish to compile the program yourself, we provide source code as well.
+This article discusses our experience compiling LAMARC.  We welcome
+your comments on the use of other compilers and computing environments.</P>
+
+<P> LAMARC is written in C++.  We tried to avoid advanced or new features of
+the language, but very old compilers will probably not work.  We
+recommend the use of GNU g++ on any machine which supports it; this is 
+the compiler we used to develop LAMARC, and it works well on most systems.</P>
+
+<h3>Compilation and Testing of Released Executables</h3>
+
+<P> Here is a list of machine/compiler combinations we have tried, with
+comments on our success or lack of it.</P>
+<TABLE BORDER="3" CELLSPACING="1" CELLPADDING="1">
+<TR>
+<TD width=13%><b>OS Family</b></TD>
+<TD          ><b>Compiler Used</b></TD>
+<TD width=13%><b>Compiled On</b></TD>
+<TD width=13%><b>Tested On</b></TD>
+<TD width=39%><b>Notes</b></TD>
+</TR>
+
+<TR>
+<TD rowspan=4>Linux</TD>       
+<TD>GNU g++ 4.5.1</TD>
+    <!-- lamarc bleeding edge -->
+    <TD colspan=2>red hat<br>2.6.18-128.1.1.el5<br>x86_64</TD>
+    <TD rowspan=4>
+        <!-- add info on cluster and special systems -->
+        Our code should compile on any Linux- or Unix-based system.
+        If you are using a compiler other than g++, the GNU C++
+        compiler, you may need to <a href="#advanced">give our configure script 
+        and makefiles some extra help</a>
+        <br>
+        <br>
+        Previous releases have successfully compiled on 32 bit red hat
+        machines, but we did not have one at our disposal for this
+        release.
+    </TD>
+    </TR>
+<TR><TD>GNU g++ 4.3.3<br><em>(released 64-bit executables)</em></TD>
+    <!-- lamarc modules -->
+    <TD colspan=2>red hat<br>2.6.18-128.1.1.el5<br>x86_64</TD>
+</TR>
+<TR><TD>GNU g++ 4.1.2</TD>
+    <!-- lamarc old -->
+    <TD colspan=2>red hat<br>2.6.18-128.1.1.el5<br>x86_64</TD>
+</TR>
+<TR><TD>GNU g++ 4.3.2</TD>
+    <!-- kingman -->
+    <TD colspan=2>debian<br>2.6.18-4-k7<br>32 bit i686</TD></TR>
+</TR>
+
+<TR>
+<TD rowspan=2>Windows</TD>
+    <TD>MinGW cross compile<br><br>
+        GNU g++ 3.4.2<br>
+        MinGW runtime 3.7 <br>
+        w32 API 3.2 <br>
+        <em>(released 32-bit lam_conv executable)</em>
+        </TD>
+    <TD>red hat<br>2.6.18-128.1.1.el5<br>x86_64</TD>
+    <!-- check the windows version -->
+    <TD rowspan=2>64-bit<br>Windows 7 Professional</TD>
+    <TD rowspan=2>
+        We create our Windows executables by cross compiling from 
+        Linux and/or Windows using a Mingw-w64 toolchain
+        of g++.
+</TR>
+<TR><TD>MinGW64 cross compile<br><br>
+        GNU g++ 4.5.1<br>
+        i686 Cygwin 1.7.7<br>
+        <em>(released 64-bit lamarc executable)</em>
+        </TD>
+    <TD>64-bit<br>Windows 7 Professional</TD>
+</TR>
+
+
+<TR>
+<TD rowspan=4>MacOS X</TD>
+    <TD rowspan=2>GNU g++ 4.0.1<br></TD>           
+    <TD rowspan=2>32 bit intel-mac<br>
+        OS X 10.4.11<br>
+        Darwin Kernel 8.11.1</TD>
+    <TD>32 bit intel-mac<br>
+        OS X 10.4.11<br>
+        Darwin Kernel 8.11.1</TD>
+    <TD rowspan=4>
+        As of Lamarc version 2.0, we have stopped supporting the Mac OS 9 platform.
+        <br>
+        <br>
+        We no longer distribute 32-bit executables for the Mac
+        but are still able to compile them.  If you have a 32-bit 
+        machine, and cannot compile your
+        own executables, please contact us at
+        <A HREF="mailto:lamarc at u.washington.edu">lamarc at u.washington.edu</A>
+        and we'll see if we can get you an executable that works for you.
+        <br>
+        <br>
+        The distributed wxWidgets code will not compile with g++ 4.0.0.
+        <br>
+        <br>
+        <tt>lam_conv</tt>, the lamarc file converter does not compile on
+        Mac OS X 10.6 in its current form. However, <tt>lamarc</tt> will.
+        We recommend you use a copy of <tt>lam_conv</tt> provided in the
+        .dmg file and compile <tt>lamarc</tt> to run on your
+        newer Mac. The <tt>lamarc</tt> in the .dmg should also run on
+        your machine, but a natively compiled version may be faster.
+    </TD>
+    </TR>
+<TR><TD>32 bit powerpc<br>
+        OS X 10.4.11<br>
+        Darwin Kernel 8.11.0</TD></TR>
+<TR><TD rowspan=2>GNU g++ 4.0.1<br><em>(released 64-bit executables)</em></TD>
+    <!-- elizabeth's mac -->
+    <TD rowspan=2>64 bit intel-mac<br>
+                  OS X 10.5.8<br>
+                  Kernel 9.8.0</TD>
+    <TD>64 bit intel-mac<br>
+                  OS X 10.5.8<br>
+                  Kernel 9.8.0</TD></TR>
+<TR><TD>64 bit intel-mac<br>
+                  OS X 10.6.2<br>
+                  Kernel 10.2.0</TD></TR>
+</TABLE>
+
+<h3><a name="basic">Basic Instructions for Compiling and Installing LAMARC on *-nix Systems</a></h3>
+
+<p>
+Here are basic instructions for compiling on Linux/Unix/Mac OSX
+systems.
+</p>
+
+
+<p>
+Begin by un-taring the distribution and creating a sub-directory
+to compile in. This will keep your object files 
+from cluttering up a listing of the top level of the distribution.
+
+    <pre>
+    tar xfvz lamarc-2.1.8-src.tar.gz
+    cd lamarc-2.1.8
+    mkdir release
+    cd release
+    </pre>
+</p>
+
+
+<p>
+The configure script queries your system and produces a
+Makefile tailored to your computing environment. Try invoking it
+like this:
+    <pre>
+    ../configure
+    </pre>
+or, if that doesn't work, like this:
+    <pre>
+    sh ../configure
+    </pre>
+</p>
+
+<p>
+The configure script will probably complain that you don't have 
+<a href="http://www.wxwidgets.org">wxWidgets</a> installed.
+(wxWidgets is a free, open source toolkit for GUI applications.
+Through it, we provide native look-and-feel Linux, Mac OS X, and 
+Windows GUI interfaces to our file converter using a single code
+base.)
+If you already have wxWidgets, you can invoke <tt>configure</tt> like this:
+
+    <pre>
+    sh ../configure --with-wx-config=/path/to/wx-config
+    </pre>
+
+If you don't have wxWidgets, or your current version is too old,
+the following invocation should build
+a wxWidgets distribution for you. (Be warned that this may take
+some time.)
+    <pre>
+    ../configure --enable-buildwx
+    </pre>
+</p>
+
+<p>If none of these variations for invoking the configure
+script worked for you, skip ahead to
+<a href="#advanced">Advanced Configuration Options</a>.
+</p>
+
+<p>
+You are now ready to make lamarc. (It is recommended that you use
+the GNU make utility.  On some systems it may be called gmake.)
+
+    <pre>
+    make
+    </pre>
+
+This will produce executables lamarc and lam_conv if you're running
+on a Unix or Linux system. If you're running under Mac OS X, you
+should get clickable executables.
+</p>
+
+<p>
+To install the executables and html documentation (you may
+need sysadmin privilidges to do this, and it is not required
+to use the program) type either
+
+    <pre>
+    make install
+    </pre>
+
+or
+
+    <pre>
+    make install-strip
+    </pre>
+</p>
+
+<p>
+Either of these should install the executables in /usr/local/bin and
+the documentation under /usr/local/html/lamarc. Executables installed
+either way have the same compute time performance. The install-strip
+versions are smaller but provide no debugging information, should
+you run into problems.
+</p>
+
+
+
+<h3><a name="advanced">Advanced Configuration Options</a></h3>
+
+<p>
+For most users,
+the configure script provided with the distribution should work
+as <a href="#basic">described above</a>. 
+However, there are several situations in which you may wish
+to change the behavior of the configure script. These include
+when:
+<ul>
+<li>the configure declares it <a href="#ostype">cannot proceed without 
+LAMARC_CODE_OSTYPE</a>,</li>
+<li>the configure declares it <a href="#gui">cannot proceed without 
+GUI_TOOLKIT</a>, or</li>
+<li>you wish to <a href="#special">use a non-default compiler, or override
+variables such as CXXFLAGS and LDFLAGS</a>.</li>
+</ul>
+</p>
+
+<p>
+If these techniques are not adequate to solve your problems, you
+may wish to:
+<ul>
+<li>compile a <a href="#batchOnly">batch only version of the lamarc converter</a>,</li>
+<li>default to an <a href="#ancientConverter">ancient version of the lamarc converter</a>, or</li>
+<li><a href="#selfEdit">edit the configure script and Makefile yourself</a>.</li>
+</ul>
+</p>
+
+<h4><a name="ostype">Setting LAMARC_CODE_OSTYPE</a></h4>
+
+<p>
+While most of the lamarc code is not dependent on the user's operating system,
+there are a few minor differences in I/O handling and standard library headers.
+These differences require the setting of the variable <b>LAMARC_CODE_OSTYPE</b>
+so that appropriate code can be exercised for each operating system.
+</p>
+<p>
+Under normal circumstances, the configure script should be able to
+guess the correct value for LAMARC_CODE_OSTYPE. If it cannot, you
+may set the value by including an assignment to it when you
+invoke the configure script.
+For example
+    <pre>
+    ../configure LAMARC_CODE_OSTYPE=LAMARC_COMPILE_MACOSX
+    make
+    </pre>
+The assignment to LAMARC_CODE_OSTYPE should come after any other arguments
+to the configure script, for example:
+    <pre>
+    ../configure --enable-debug LAMARC_CODE_OSTYPE=LAMARC_COMPILE_LINUX
+    make
+    </pre>
+Legal values for LAMARC_CODE_OSTYPE are:
+<ul>
+<li>LAMARC_COMPILE_LINUX</li>
+<li>LAMARC_COMPILE_MACOSX</li>
+<li>LAMARC_COMPILE_WINDOWS</li>
+</ul>
+</p>
+
+
+<h4><a name="gui">Setting GUI_TOOLKIT</a></h4>
+
+<p>
+Under normal circumstances, the configure script should be able to
+guess the correct value for GUI_TOOLKIT. If it cannot, or you wish
+to use a different toolkit, you should set the value for GUI_TOOLKIT.
+As with <a href="#ostype">setting LAMARC_CODE_OSTYPE</a>, the assignment
+is made at the end of the invocation of configure.
+For example:
+    <pre>
+    ../configure GUI_TOOLKIT=gtk2
+    make
+    </pre>
+The assignment to GUI_TOOLKIT should come after any other arguments
+to the configure script, and is most likely to be needed when
+you are also setting LAMARC_CODE_OSTYPE. For example:
+    <pre>
+    ../configure --enable-debug LAMARC_CODE_OSTYPE=LAMARC_COMPILE_MACOSX GUI_TOOLKIT=mac
+    make
+    </pre>
+</p>
+
+<p>
+Tested values for GUI_TOOLKIT are:
+<ul>
+<li>gtk2 -- tested on linux systems</li>
+<li>mac -- tested on OS X systems</li>
+<li>msw -- tested on a Windows system</li>
+</ul>
+It is possible that other toolkit values supported by wxWidgets work, 
+including x11, Motif, and OS/2. If you wish to build for one of
+these toolkits, we suggest you start with wxWidget's
+<a href="http://wiki.wxwidgets.org/Supported_Platforms">
+information on supported platforms
+(http://www.wxwidgets.org/docs/platform.htm)</a>.
+</p>
+
+<h4><a name="special">Specifying non-standard compiler and/or build flags</a></h4>
+
+<p>
+The configure script is designed to allow you to
+specify a compiler or to pass options to the compiler
+or preprocessor. For example, if you are testing out
+a new C++ compiler you might invoke it like this:
+
+    <pre>
+    ../configure CXX=/path/to/my/compiler CXXFLAGS="I /path/to/my/include/files"
+    make
+    </pre>
+</p>
+
+<p>
+For more information on options and environment 
+variables that effect the configure script type
+
+    <pre>
+    ../configure --help
+    </pre>
+</p>
+
+<h4><a name="batchOnly">Compiling a batch only version of the converter</a></h4>
+
+<p>
+If you cannot successfully build for any GUI_TOOLKIT,
+or if you wish to run the lamarc file converter as
+part of a batch process, you can build a batch only
+version of the converter with the following commands
+(add CXX and CXXFLAGS, etc at the end if you need them):
+
+    <pre>
+    ../configure --disable-gui --enable-buildwx
+    make
+    </pre>
+</p>
+
+<h4><a name="ancientConverter">Compiling a wxWidgets-less converter</a></h4>
+
+<p>
+We recommend the following step only as a <b>last resort</b>.
+If you are going to attempt it, we recommend that you
+email us at
+<A HREF="mailto:lamarc at u.washington.edu">lamarc at u.washington.edu</A>.
+to make sure there isn't a better solution.
+</p>
+
+<p>
+If you unable to build wxWidgets at all (and therefore not 
+have access to the converter) you may instead configure and make as 
+follows:
+
+    <pre>
+    ../configure --disable-converter
+    make old_lam_conv
+    make
+    </pre>
+
+You must explicitly give the argument to "make old_lam_conv" --
+it will not otherwise be built.
+Please be warned that <b>old_lam_conv</b> is <b>no longer maintained</b>
+and will not have all features of the new converter.
+</p>
+
+<h4><a name="selfEdit">Editing the configure script and Makefile yourself</a></h4>
+
+<p>
+If none of the above techniques allows you to build the lamarc
+programs, you have one further option -- edit the configure
+script and Makefiles yourself.
+</p>
+
+<p>
+The easiest way to do this is to edit them the same way we do,
+using autoconf and automake.
+These tools are available at the following locations:
+   <ul>
+   <li><a href="http://www.gnu.org/software/autoconf/">http://www.gnu.org/software/autoconf/</a>
+   <li><a href="http://www.gnu.org/software/automake/">http://www.gnu.org/software/automake/</a>
+   </ul>
+</p>
+
+<p>
+You may also wish to read additonal documentation
+available at
+</p>
+   <ul>
+   <li><a href="http://sources.redhat.com/autobook/">http://sources.redhat.com/autobook/</a>
+   <li><a href="http://seul.org/docs/autotut/">http://seul.org/docs/autotut/</a>
+   </ul>
+
+<p>
+Make any edits you need to Makefile.am or configure.ac
+and then issue the following series of commands
+</p>
+
+    <pre>
+    aclocal
+    autoconf
+    autoheader
+    automake --add-missing
+    </pre>
+
+<p>
+You may then proceed to configure and make as in the section
+<a href="#basic">Basic Instructions...</a> above.
+</p>
+
+
+<P>(<A HREF="mapping.html">Previous</A> | <A
+HREF="index.html">Contents</A> | <A HREF="converter.html">Next</A>)</P>
+
+</BODY>
+</HTML>
diff --git a/doc/html/converter.html b/doc/html/converter.html
new file mode 100644
index 0000000..51e732f
--- /dev/null
+++ b/doc/html/converter.html
@@ -0,0 +1,441 @@
+<!-- header fragment for html documentation -->
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
+<HTML>
+<HEAD>
+
+<META NAME="description" CONTENT="Estimation of population parameters using genetic data usi
+ng a maximum likelihood approach with Metropolis-Hastings Monte Carlo Markov chain importanc
+e sampling">
+<META NAME="keywords" CONTENT="MCMC, Markov chain, Monte Carlo, Metropolis-Hastings, populat
+ion, parameters, migration rate, population size, recombination rate, maximum likelihood">
+
+<TITLE>LAMARC Documentation: Data file conversion</title>
+</HEAD>
+
+
+<BODY BGCOLOR="#FFFFFF">
+<!-- coalescent, coalescence, Markov chain Monte Carlo simulation, migration rate, effective
+ population size, recombination rate, maximum likelihood -->
+
+<P>(<A HREF="compiling.html">Back</A> | <A HREF="index.html">Contents</A>
+| <A HREF="genetic_map.html">Next</A>)</P>
+<H2>LAMARC File Converter</H2>
+
+<p>
+The converter uses a GUI, but can be used non-interactively in 
+<a href="#batch-mode">batch mode</a>
+with command-line arguments and a supplementary 
+<a href="converter_cmd.html">converter command file</a> containing
+information about your data.</P>
+
+<P>Below you will find some basic issues to keep in mind when using the
+converter, followed by instructions on how to use the converter in both
+<a href="#batch-mode">batch</a>
+and <a href="#gui-mode">GUI</a> modes.
+</P>
+
+<P><B>Important:</B>  If you wish to 
+<a href="mapping.html">map trait alleles</a>,
+you will need to write a <a href="converter_cmd.html">converter
+command file</a> and <a href="#batch-mode">run the converter in 
+batch mode</a>.  The capacity to read
+and output trait data has not yet been added to the GUI. </P>
+
+
+<h3>LAMARC File Converter Overview</h3>
+<P> Topics covered in this article:</P>
+
+<UL>
+<LI><A HREF="converter.html#gettingReady">Getting Data Ready for Conversion </A></LI>
+<LI> <A HREF="converter.html#running">Running the converter</a></LI>
+<LI> <A HREF="converter.html#simple-example">A Simple Example</a></LI>
+</UL>
+
+<H3><A NAME="gettingReady">Getting Your Data Ready for Conversion</A></H3>
+
+<p>
+There are several things you must do
+before you can produce a lamarc input file that will run successfully: 
+<ul>
+<li> <a href="converter.html#goodData">Start with good data</a>,</li>
+<li> <a href="converter.html#converterCanRead">Use file formats the converter can read</a>,</li>
+<li> <a href="converter.html#interleavingStatus">Know how your data files are interleaved</a>, </li>
+<li> <a href="converter.html#handleHaploidMsatsRight">Verify haploid data is correctly represented</a>, and</li>
+<li> <a href="converter.html#spatialProperties">Be able to model linkage properties and relative mutation rates of your data.</a></li>
+</ul>
+</p>
+
+<h4><a name="goodData">Start with good data</a></h4>
+
+The quality of a LAMARC analysis is dependent on the quality of
+data that is input to it. Of particular import are:
+<ul>
+<li>Collecting data from different portions of the organism's genome,</li>
+<li>Collecting an appropriate number of samples (more is not always better), and</li>
+<li>Ensuring random sample selection (duplicates and invariant sequences are meaningful -- don't throw them out) </li>
+</ul>
+
+The section
+<a href="data_required.html">Suitable data for LAMARC</a>
+gives more information on this topic.
+
+<h4><a name="converterCanRead">Use file formats the converter can read</a></h4>
+
+
+<p> The converter can convert PHYLIP, RECOMBINE and MIGRATE files to the
+format used by the LAMARC program. With a tiny amount of hand-editing it can
+also convert COALESCE and FLUCTUATE files.
+</p>
+
+<p>
+If you have a COALESCE or FLUCTUATE file, you will need to edit it
+slightly in a text editor first.  
+<ul>
+<li>If the file has only one chromosomal
+region in it, delete the first line (which will say "1").  Then  treat it as
+a PHYLIP file.</li>
+<li> If the file has multiple chromosomal regions in it, delete the first line
+(which will give the number of regions) and divide the file into several
+different files, one per region.  Then treat them as PHYLIP files.</li>
+</ul>
+</P>
+
+<P> If your data file is not in any of these formats, check to see if
+the software which produced it has an option to write PHYLIP files.
+For example, PAUP* can convert many types of files to the PHYLIP format.</P>
+
+<P> Currently the converter can handle DNA, RNA, and SNP sequence files
+in PHYLIP or MIGRATE format, and microsatellite and K-Allele files in 
+MIGRATE format.</P>
+
+<h4><a name="interleavingStatus">Know how your data files are interleaved</a></h4>
+
+<P>
+PHYLIP, MIGRATE and the LAMARC's predecessor programs
+(COALESCE and RECOMBINE) store nucleotide
+sequence data in either interleaved or sequential form.  Unfortunately,
+these formats don't always contain enough internal evidence for the computer
+to guess correctly whether the data are interleaved or not, so you may have
+to provide this information.  (The problem is that strings like "CAT" are
+both legal sequence names and legal DNA.)</P>
+
+<p><ul>
+<li> The data section of
+an interleaved file will have the first line of sequence 1, then
+the first line of sequence 2, etc, followed by the second line of
+all sequences, and so forth.  This format is often produced
+by alignment algorithms.</li>
+<li> A sequential file will have all of sequence 1, then all of sequence
+2, etc.  This format is often produced by database programs.</li>
+</ul>
+A simple example showing both interleaved and sequential arrangement
+in a Phylip input file is given in the table below.
+</p>
+
+<center>
+<table border="2">
+<tr>
+<th>Phylip example file with interleaved sequences</th>
+<th>Phylip example file with sequential sequences</th>
+</tr>
+<tr>
+<td>
+<pre>
+3 10
+cat       acttg
+dog       acttg
+pigeon    acttg
+gtGca
+gtGcT
+gAtca
+</pre>
+</td>
+<td>
+<pre>
+3 10
+cat       acttg
+gtGca
+dog       acttg
+gtGcT
+pigeon    acttg
+gAtca
+</pre>
+</td>
+</tr>
+</table>
+</center>
+
+<P> The formats should be relatively easy to distinguish if you look at them
+yourself. While the converter will try to figure it out algorithmically,
+in some cases it will give up, and you must tell it which format is present.</P>
+
+<h4><a name="handleHaploidMsatsRight">Verify haploid data is correctly represented</a></h4>
+
+<P> In the absence of a defined delimiter character,
+the MIGRATE file format for microsatellite or elecrophoretic
+data assumes that you have collected two alleles per marker per individual.
+If this is not the case (perhaps your organism is haploid, or you
+collected your data in a way that produces only one allele per marker
+per individual) you can make a MIGRATE file with one, rather than two,
+entries.  However, you <b>must</b> specify the delimiter character
+(even though you will not be using it!) at the top of your MIGRATE
+file.  If you don't, your single allele may be interpreted as two
+alleles (i.e. a microsatellite of "27" may turn into "2" and "7").
+Give an explicit delimiter to avoid this problem.</P>
+
+<p>
+If you have diploid (or more) data and some of your data has unresolved phase,
+you may need to include <a href="converter_cmd.html#phase">phase resolution</a>
+information via a <a href="converter_cmd.html">converter command file</a>.
+</p>
+
+<h4><a name="spatialProperties">
+Be able to model linkage properties and relative mutation rates of your data</a></h4>
+
+<p>
+Previous LAMARC releases allowed combined analyses of data samples
+from different regions of an organism's genome only when these
+regions were either on separate chromosomes, or far enough separated
+on a single chromosome that each
+data sample was completely unlinked to the others.
+It was also not possible to explicitly represent known variations
+in relative mutation rate within a data sample.
+</p>
+
+<p>
+As of LAMARC 2.1, we have relaxed this restriction, allowing you to
+mix and match different data types even when they are linked.  So, for
+example, the increasingly-popular data type of 
+<a href="genetic_map.html#microsat-snp">microsatellite next to a SNP</a>
+may now be modeled in LAMARC and will be analyzed appropriately.</P>
+
+<p>If you have any of the following types of data, 
+<ul>
+<li>Samples with know differences in relative mutation rates, such as a
+sequence containing introns and exons,</li>
+<li>Overlapping data, such as fully sequenced DNA within a broader
+collection of SNPs, or </li>
+<li>Samples with different data types in close proximity, such as a 
+microsatellite next to a SNP,</li>
+</ul>
+you will want to read the section entitled
+<a href="genetic_map.html">Modeling Linkage Properties and Relative
+Mutation Rates of Your Data</a> before attempting to create a
+LAMARC infile that contains all your data.
+It is recommended that you first continue reading this section and
+make a test LAMARC run using one of your data samples to familiarize
+yourself with the converter and the LAMARC program itself.
+</p>
+
+<p>
+If you have a more straightforward data set such as 
+a single DNA sequence or a set of unlinked microsatellites,
+reading and following this section should be sufficient to
+get you up to speed with the converter.
+</p>
+
+
+<h3> <A NAME="running">Running the converter</a></h3>
+
+<h4>Incompatibilities with earlier versions</h4>
+
+<P> The majority of LAMARC 2.0 and earlier input files should work
+unmodified in version 2.1, with the sole exception of those 1.1.1 files with
+the "<map_position>" tag, which must be changed to
+"<map-position>" (see the <A HREF="changes.html">changes</A>
+documentation).</P>
+
+<h4>Settings not handled by the converter</h4>
+
+<P> The file conversion process creates a lamarc input file with just the
+data, so when it is read in by LAMARC, defaults will be used for all
+parameter estimations.  To get LAMARC to estimate what you want it to
+estimate, use the <A HREF="menu.html">LAMARC menu</a>, or <A
+HREF="xmlinput.html">edit the XML itself</a>
+after you have produced your LAMARC infile.
+</P>
+
+<h4>Reverse conversion</h4>
+
+<P>
+If you ever
+need to get a PHYLIP file from a LAMARC XML file, one way to do so is to run
+LAMARC with "normal" or "verbose" output (see the <A
+HREF="menu.html#io">menu</A> documentation).  This will cause the input data
+to be printed into the output file in a very PHYLIP-like format. You can use
+a text editor to move this into a file of its own and make the minor changes
+needed to create a PHYLIP file.</P>
+
+<h4><a name="gui-mode">Running the converter in GUI mode<a></h4>
+<p>
+To run the lamarc file converter in GUI mode on a Linux or Unix system,
+the command is:
+</p>
+<p>
+<pre>
+  lam_conv [-c <commandfile>] [ <datafile>... ]
+</pre>
+</p>
+<p>
+On Windows or a Mac simply double click on the application icon. You can
+open command files and data files using the <tt>File</tt> menu of the
+application.
+</p>
+
+<h4><a name="batch-mode">Running the converter in batch mode<a></h4>
+
+<P> To run on a Linux, Unix, or Mac
+in batch mode, you'll need to add command-line options:</P>
+<pre>
+  lam_conv -b -c <commandfile>
+</pre>
+</p>
+<p>
+(The Mac executable can be found in <tt>lam_conv.app/Contents/MacOS/lam_conv</tt>.)
+</p>
+
+<P>On a Windows system, use</P>
+<pre>
+  lam_conv.exe -b -c <commandfile>
+</pre>
+</p>
+
+<P>The '-b' option tells the converter to run in batch mode, and the '-c'
+option tells the converter the name of an XML file you have created that
+tells the converter where your data is, and what to do with it.  If you
+wish, you can use the "<tt>-c [filename]</tt>" option without the -b option,
+and the converter will load in the data according to that file, and then let
+you further modify things within the GUI.</P>
+
+<P>The command file is not needed to run the converter in interactive mode;
+the single exception (at present) is if you wish to include trait data for
+<A HREF="mapping.html">mapping</a>.
+</p>
+
+
+<h3> <A NAME="simple-example">A Simple Example</a></h3>
+
+
+<p>
+This section demonstrates the converter in use on a simple DNA file
+in migrate format,
+<a href="batch_converter/chrom1.mig">chrom1.mig</a>.
+This file is not real data, but was instead constructed to be 
+easily inspected and checked for correctness.
+</P>
+
+<pre>
+   2 1  Example: chromosome 1 with single dna segment
+9
+6    North
+n_ind0_a  ccccccAcc
+n_ind0_b  TcccccAcc
+n_ind1_a  ccccccTcc
+n_ind1_b  TcccccTcc
+n_ind2_a  ccccccGcc
+n_ind2_b  TcccccGcc
+4    South
+s_ind0_a  cTccTcccc
+s_ind0_b  ccccTcccc
+s_ind1_a  cTccccccc
+s_ind1_b  ccccccccc
+</pre>
+
+<p>Upon reading in the file, the GUI looks like this:
+</p>
+<p><img src="batch_converter/images/DataPartitionsMigTab.png" alt="GUI converter after reading file chrom1.mig"/></p>
+<p>
+Note that there are 4 tabs:
+<ul>
+<li>The <tt>Data Partitions</tt> tab which contains the information read in from the file. </li>
+<li>The <tt>Migration Matrix</tt> tab which contains the default migration matrix.</li>
+<li>The <tt>Data Files</tt> tab which contains the information about what data files(s) have been read and what was found in them. </li>
+<li>The <tt>Debug Log</tt> tab which contains debugging and logging information (not usually very interesting but somtimes useful). </li>
+</ul>
+</p>
+<p>
+There are also 2 buttons which are discussed later:
+<ul>
+<li><tt>Divergence</tt> which turns <A HREF="divergence.html">Divergence</A> off and on (not present if you only have one population). </li>
+<li><tt>Use Panels</tt> which turns the <A HREF="panels.html">Panel Correction</A> method off and on.</li>
+</ul>
+</p>
+<p>
+Focusing first on the <tt>Data Partitions</tt> the following things are worth noting:
+<ul>
+<li>The <tt>Divergence</tt> button is "Off". Unless you are studying Divergence (discussed below) this is correct. Divergence greatly slows down execution of LAMARC so do not turn this on without a good reason. If you have only one population in your data, this button will not be visible.</li>
+<li>The box labeled <tt>contiguous segment</tt> in the <tt>Data Partitions</tt> information panel
+has data type of <tt>???</tt>, indicating that you must select the specific data type
+(in this case, SNP or DNA).
+<li>The converter was able to create population names ("North" and "South")
+from the comments in the migrate file.</li>
+<li>No panels are defined for any population. This is correct unless you are doing SNP panel correction. Do not define panel member counts unless you need them as they markedly slow down LAMARC.</li>
+</p>
+The <tt>Migration Matrix</tt> tab contains the default migration matrix 
+between each of you populations (if you have only one population, there will be 
+no migrations allowed). What these values mean and how they are edited is discussed on a <a href="migration_matrix.html">subsequent page</a>.
+</p>
+<p><img src="batch_converter/images/MigrationOnlyMatrixTab.png" alt="DataFiles Tab after reading file chrom1.mig"/></p>
+<p>
+The <tt>Data Files</tt> tab shows that the converter was
+able to determine that the data in file chrom1.mig is of type DNA or SNP, </p>
+<p><img src="batch_converter/images/DataFilesTab.png" alt="DataFiles Tab after reading file chrom1.mig"/></p>
+<p>
+<p>
+The <tt>Debug Log</tt> tab shows whatever housekeeping comments the software has
+output. This is usually not very interesting, but if we have to debug something for
+you, it will be vital.</p>
+<p><img src="batch_converter/images/DebugLogTab.png" alt="DebugLog Tab after reading file chrom1.mig"/></p>
+<p>
+<p>
+Returning to the <tt>Data Partitions</tt> tab, if you try to convert the file (using <tt>File > Write Lamarc File</tt> from the GUI
+menu), you will see the following error message:
+</p>
+<p><img src="batch_converter/images/lam_conv_chrom1_export_warn_1.png" alt="GUI converter warning: needs data type"/></p>
+
+<p>
+The problem is that the converter needs you to tell it whether this
+is DNA or SNP data.
+To fix this problem, double click on the text inside
+<tt>contiguous segment</tt> box in the <tt>Data Partitions</tt> panel.
+You will see a new window that looks something like this:
+</p>
+
+<p><img src="batch_converter/images/lam_conv_chrom1_segment_panel.png" alt="GUI converter segment panel"/></p>
+
+<p>
+Select the <tt>DNA</tt> check box and click <tt>Apply</tt>. Now when you
+choose <tt> File > Write Lamarc File </tt> from the file menu, you will get
+a directory browser window like this one in Linux:
+</p>
+
+<p><img src="batch_converter/images/lam_conv_chrom1_export_file_selection.png" alt="GUI converter export file selection"/></p>
+
+<p>
+and like this under OS X:
+</p>
+
+<p><img src="batch_converter/images/lam_conv_export_file_mac_minimal.png" alt="GUI converter export file selection"/></p>
+
+<p>
+Click the button with the triangle at the top for more complete navigation
+through your directory system.
+</p>
+
+<p><img src="batch_converter/images/lam_conv_export_file_mac_expanded.png" alt="GUI converter export file selection"/></p>
+
+
+<p>
+The resulting lamarc file will look like <a href="batch_converter/chrom1_lamarc.html">this</a> 
+(the actual xml is <a href="batch_converter/chrom1_lamarc.xml">here</a>).
+</p>
+
+
+<P>(<A HREF="compiling.html">Back</A> | <A HREF="index.html">Contents</A>
+| <A HREF="genetic_map.html">Next</A>)</P>
+
+<!--
+//$Id: converter.html,v 1.46 2012/02/16 17:13:31 jmcgill Exp $
+-->
+</BODY>
+</HTML>
diff --git a/doc/html/converter_cmd.html b/doc/html/converter_cmd.html
new file mode 100644
index 0000000..1fb6031
--- /dev/null
+++ b/doc/html/converter_cmd.html
@@ -0,0 +1,564 @@
+<!-- header fragment for html documentation -->
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
+<HTML>
+<HEAD>
+
+<META NAME="description" CONTENT="Estimation of population parameters using genetic data usi
+ng a maximum likelihood approach with Metropolis-Hastings Monte Carlo Markov chain importanc
+e sampling">
+<META NAME="keywords" CONTENT="MCMC, Markov chain, Monte Carlo, Metropolis-Hastings, populat
+ion, parameters, migration rate, population size, recombination rate, maximum likelihood">
+
+<TITLE>LAMARC Documentation: Data file conversion</title>
+</HEAD>
+
+
+<BODY BGCOLOR="#FFFFFF">
+<!-- coalescent, coalescence, Markov chain Monte Carlo simulation, migration rate, effective
+ population size, recombination rate, maximum likelihood -->
+<P>(<A HREF="panels.html">Back</A> | <A HREF="index.html">Contents</A>
+| <A HREF="xmlinput.html">Next</A>)</P>
+<H2> <A NAME="command-file">Converter Command File Reference</A></H2>
+<UL>
+<LI><A HREF="converter_cmd.html#intro">Converter Command File Introduction</A></LI>
+<LI> <A HREF="converter_cmd.html#cmd_overview">Command File Overview</a></LI>
+<LI> <A HREF="converter_cmd.html#traits">Traits</a></LI>
+<LI> <A HREF="converter_cmd.html#inherit">Tags Specifying Inheritance and Mutation Models</a></LI>
+<LI> <A HREF="converter_cmd.html#regions">Regions</a></LI>
+<LI> <A HREF="converter_cmd.html#segments">Segments</a></LI>
+<LI> <A HREF="converter_cmd.html#populations">Populations</a></LI>
+<LI> <A HREF="converter_cmd.html#panels">Panels</a></LI>
+<LI> <A HREF="converter_cmd.html#infiles">Data files</a></LI>
+<LI> <A HREF="converter_cmd.html#outfile">Specifying the Name of the Produced Lamarc file</a></LI>
+<LI> <A HREF="converter_cmd.html#comment">Miscellaneous Tags</a></LI>
+<LI> <A HREF="converter_cmd.html#phase">Specifying Relationships Between Individuals and Data Samples</a></LI>
+</UL>
+
+<h3><A NAME="intro">Converter Command File Introduction</A></h3>
+
+<P>The converter command file is an 
+<A HREF="xmlinput.html#overview">XML-format</a> text file
+which can be used to bypass the <a href="converter.html">converter</a>
+GUI interface and
+directly provide information to the converter.
+</p>
+
+<h4>When to use a Converter Command File</h4>
+
+<P>
+For most LAMARC users, running the lamarc file converter in GUI mode
+will be the quickest and most intuitive way to convert data files
+for use in LAMARC. However, there are a few situations in which 
+it may be necessary to write a converter command file.
+These situations include:
+</P>
+<ul>
+<li>automating conversion for use in simulation studies,</li>
+<li>using a new converter feature for which there is not yet
+a GUI interface, and</li>
+<li>reading in information that is tedious and error prone to
+enter by hand (such as 
+<a href="genetic_map.html#segment-coord">locations</a> for SNP data).</li>
+</ul>
+<P>
+If a command file is needed to access a particular feature,
+it can be read into the converter either in batch mode or
+from the GUI.
+</P>
+
+<h4>An Example Converter Command File</h4>
+
+<p>
+An example converter command file with matching MIGRATE
+data files is provided in the <A
+HREF="batch_converter/">batch_converter/</a> directory.  The file <A
+HREF="batch_converter/sample-conv-cmd.html">sample-conv-cmd.xml</a> (actual xml is is <A HREF="batch_converter/sample-conv-cmd.xml">here</a>)
+ annotated with comments, and should be a good guide to what's going on.
+</p>
+
+<h4>How to Create a Converter Command File</h4>
+
+<p>
+The simplest way to create your own file is probably a combination of:
+</P>
+<ul>
+<li>copying the
+<a href="batch_converter/sample-conv-cmd.xml">provided example</a>,</li>
+<li>preparing an example in the GUI and then using the
+<tt>File > Write Batch Command File</tt> menu command, and </li>
+<li>editing a final version based on the above two items.
+</ul>
+
+<P>
+The rest of this section is provided as a reference should copying 
+from examples is not sufficient for your needs.
+</P>
+
+<h4>How to Use a Converter Command File</h4>
+
+<P>
+You can use your converter command file by:
+</P>
+<ul>
+<li>Reading it in from the GUI with the 
+    <tt>"File >Read Command File"</tt> menu item</li>
+<li>Providing it using the <tt>-c</tt> command line
+argument to the converter in either
+<a href="converter.html#gui-mode">GUI</a>
+or <a href="converter.html#batch-mode">batch</a> mode.</li>
+</ul>
+
+<H3 style="page-break-before: always"><A NAME="cmd_overview">Command File Overview</A></H3>
+
+<P>The top level tag of the file is a
+<b><tt><lamarc-converter-cmd></tt></b> tag. 
+Its possible immediate children are listed in the table below.
+Note that none of these child tags are required. This is because,
+generally speaking, fragments of complete converter command files are
+allowed to be read in from the GUI.
+</P>
+
+<table border=1 >
+<tr><th colspan=4>Top Level Tags in Lamarc Converter Command File</th></tr>
+<tr><th>parent tag</th><th>child tag</th><th>child required</th><th>child instances allowed</th></tr>
+<tr><td rowspan=8><lamarc-converter-cmd></td>
+        <td><a href="#traits"><traits></a></td>
+        <td>optional</td><td>SINGLE</td></tr>
+<tr><td><a href="#regions"><regions></a></td>
+        <td>optional</td><td>SINGLE</td></tr>
+<tr><td><a href="#populations"><populations></a></td>
+        <td>optional</td><td>SINGLE</td></tr>
+<tr><td><a href="#phase"><individuals></a></td>
+        <td>optional</td><td>SINGLE</td></tr>
+<tr><td><a href="#panels"><panels></a></td>
+        <td>optional</td><td>SINGLE</td></tr>
+<tr><td><a href="#infiles"><infiles></a></td>
+        <td>optional</td><td>SINGLE</td></tr>
+<tr><td><a href="#outfile"><outfile></a></td>
+        <td>optional</td><td>SINGLE</td></tr>
+<tr><td><a href="#comment"><lamarc-header-comment></a></td>
+        <td>optional</td><td>SINGLE</td></tr>
+</table>
+
+<H3> <A NAME="traits">Traits</A></H3>
+
+<p>The <tt><traits></tt> tag is used only when <a href="mapping.html">trait mapping</a>.
+If you are not mapping traits, you may skip ahead to the <a href="#regions">regions</a> section.
+</p>
+
+<p>
+The <traits> tag contains definitions of one or more
+of the following objects.
+</p>
+<ul>
+<li><trait-info>, used to specify a trait name and associated alleles, and</li>
+<li><phenotype> definitions, used to specify a model for an observed
+trait manifestation.</li>
+</ul>
+Below is 
+<a href="#table-trait">a table discribing the relevant XML tags</a>.
+You can also find an
+<a href="mapping.html#trait-info-defs">examples trait-info definition</a>
+and
+<a href="mapping.html#phenotype-defs">examples of phenotype definitions</a>
+in the <a href="mapping.html">section on trait mapping</a>.</p>
+
+<h4 style="page-break-before: always"><a name="table-trait">Table of Sub-Tags of <traits></a></h4>
+<table border=1 >
+<tr><th colspan=4>Tags Describing Traits in Lamarc Converter Command File</th></tr>
+<tr><th>parent tag</th><th>child tag</th><th>child required</th><th>child instances allowed</th></tr>
+<tr>
+    <td rowspan=2><traits></td>
+            <td><trait-info></td>
+                <td>optional</td><td>multiple</td></tr>
+        <tr><td><phenotype></td>
+                <td>optional</td><td>multiple</td></tr>
+<tr>
+    <td rowspan=2><trait-info></td>
+            <td><name></td>
+                <td>REQUIRED</td><td>SINGLE</td>
+        <tr><td><allele></td>
+                <td>REQUIRED</td><td>multiple</td></tr>
+<tr>
+    <td rowspan=2><phenotype></td>
+            <td><name></td>
+                <td>REQUIRED</td><td>SINGLE</td>
+        <tr><td><genotype-resolutions></td>
+                <td>REQUIRED</td><td>multiple</td></tr>
+<tr>
+    <td rowspan=2><genotype-resolutions></td>
+            <td><trait-name></td>
+                <td>REQUIRED</td><td>SINGLE</td></tr>
+        <tr><td><haplotypes></td>
+                <td>REQUIRED</td><td>multiple</td></tr>
+<tr>
+    <td rowspan=2><haplotypes></td>
+            <td><alleles></td>
+                <td>REQUIRED</td><td>SINGLE</td></tr>
+        <tr><td><penetrance></td>
+                <td>REQUIRED</td><td>SINGLE</td></tr>
+<tr> <th>tag</th><th colspan=3>contents</th></tr>
+<tr><td ><allele></td>
+        <td colspan=3><em>unique name; should not contain spaces</em></td></tr>
+<tr><td ><alleles></td>
+        <td colspan=3><em>ordered list of names (from <allele> tags of corresponding trait), separated by spaces</em></td></tr>
+<tr><td ><penetrance></td>
+        <td colspan=3><em>value between 0 and 1; indicates the chance that an individual with these specific alleles will display the enclosing trait</em></td></tr>
+<tr><td ><name></td>
+        <td colspan=3><em>unique name; should not contain spaces</em></td></tr>
+<tr><td ><trait-name></td>
+        <td colspan=3><em>unique name; should not contain spaces</em></td></tr>
+</table>
+
+<H3> <A NAME="inherit">Tags Specifying Inheritance and Mutation Models:
+<regions> and <segments></A></H3>
+
+
+<p>
+In section <a href="genetic_map.html">
+Modeling Linkage Properties and Relative Mutation Rates of Your Data</a>
+of the documentation
+</p>
+
+
+<H3> <A NAME="regions">Regions</A></H3>
+
+<table style="page-break-before: always" border=1 >
+<tr><th colspan=4>Specifying Inheritance Relationships</th></tr>
+<tr><th>parent tag</th><th>child tag</th><th>child required</th><th>child instances allowed</th></tr>
+<tr>
+    <td><regions></td>
+            <td><region></td><td>REQUIRED</td><td>multiple</td></tr>
+<tr>
+    <td rowspan=4><region></td>
+            <td><name></td>
+                <td>REQUIRED</td><td>SINGLE</td></tr>
+        <tr><td><effective-popsize></td>
+                <td>optional</td><td>SINGLE</td></tr>
+        <tr><td><a href="#segments"><segments></a></td>
+                <td>optional</td><td>SINGLE</td></tr>
+        <tr><td><trait-location></td>
+                <td>optional</td><td>multiple</td></tr>
+<tr><td><trait-location></td><td><trait-name></td>
+                <td>REQUIRED for mapping<br>optional for others</td><td>SINGLE</td></tr>
+<tr> <th>tag</th><th colspan=3>contents</th></tr>
+<tr><td ><effective-popsize></td>
+        <td colspan=3><em>value greater than 0; defaults to 1;
+            the relative <a href="glossary.html#effpopsize">effective population size
+            of samples from this region.</em></td></tr>
+<tr><td ><trait-name></td>
+        <td colspan=3><em>unique name; should not contain spaces</em></td></tr>
+</table>
+
+<H3> <A NAME="segments">Segments</A></H3>
+
+
+<table style="page-break-before: always" border=1 >
+<tr><th colspan=4>Specifying Properties of Data Samples</th></tr>
+<tr><th>parent tag</th><th>child tag or <em>attribute</em></th><th>child required</th><th>child instances allowed</th></tr>
+<tr><td><segments></td><td><segment></td>
+                <td>REQUIRED</td><td>multiple</td></tr>
+<tr>
+    <td rowspan=9><segment></td>
+            <td><em>datatype</em></td>
+                <td>REQUIRED</td><td><em>-</em></td></tr>
+        <tr><td><em>marker-proximity</em></td>
+                <td>optional</td><td><em>-</em></td></tr>
+        <tr><td><name></td>
+                <td>REQUIRED</td><td>SINGLE</td></tr>
+        <tr><td><markers></td>
+                <td>REQUIRED</td><td>SINGLE</td></tr>
+        <tr><td><map-position></td>
+                <td>optional</td><td>SINGLE</td></tr>
+        <tr><td><length></td>
+                <td>optional</td><td>SINGLE</td></tr>
+        <tr><td><locations></td>
+                <td>optional</td><td>SINGLE</td></tr>
+        <tr><td><first-position-scanned></td>
+                <td>optional</td><td>SINGLE</td></tr>
+        <tr><td><unresolved-markers></td>
+                <td>optional</td><td>SINGLE</td></tr>
+<tr> <th>tag</th><th colspan=3>contents</th></tr>
+<tr><td ><markers></td>
+        <td colspan=3><em>number of sites with data; for dna this is 
+            the number of sites sequenced; for snp data it is the number 
+            of snps; for kallele and microsat data it is the number
+            of distinct sites at which kallele/msat data was collected.</em></td></tr>
+<tr><td ><map-position></td>
+        <td colspan=3><em>location of <first-position-scanned> in 
+        <a href="genetic_map.html#region-coord">region-wide coordinates</a></em></td></tr>
+<tr><td ><length></td>
+        <td colspan=3><em>total number of bases searched for data</em></td></tr>
+<tr><td ><locations></td>
+        <td colspan=3><em>the location of each particular data site of
+        your data in <a href="genetic_map.html#segment-coord">segment coordinates</a></em></td></tr>
+<tr><td ><first-position-scanned></td>
+        <td colspan=3><em>the location of the first sampled location in
+        your data in <a href="genetic_map.html#segment-coord">segment coordinates</a></em></td></tr>
+<tr><th>attribute</th><th>value</th><th colspan=2>meaning</th></tr>
+<tr><td rowspan=4><em>datatype</em></td>
+        <td>dna</td><td colspan=2>DNA data</td></tr>
+    <tr><td>snp</td><td colspan=2>SNP data</td></tr>
+    <tr><td>kallele</td><td colspan=2>k-allele data</td></tr>
+    <tr><td>microsat</td><td colspan=2>microsattelite data</td></tr>
+<tr><td rowspan=2><em>marker-proximity</em></td>
+        <td>linked</td><td colspan=2>individual data markers likely to be inherited together</td></tr>
+    <tr><td>unlinked</td><td colspan=2>individual data markers are independently inherited</td></tr>
+</table>
+
+<P><H3> <A NAME="populations">Populations</A></H3>
+
+<P>
+The <tt><populations></tt> tag is used to name distinct
+<a href="glossary.html#population">populations</a>.
+If your data files have named populations, the population names here
+should match the names that are in your files.</P>
+
+<table border=1 >
+<tr><th colspan=4>Specifying population names with the <populations> tag</th></tr>
+<tr><th>parent tag</th><th>child tag</th><th>child required</th><th>child instances allowed</th></tr>
+<tr>
+    <td><populations></td>
+        <td><population></td>
+        <td>Y</td><td>Y</td>
+        </tr>
+<tr> <th>tag</th><th colspan=3>contents</th></tr>
+    <tr><td><population></td>
+        <td colspan=3><em>a name unique among all populations, regions, and segments</em></td></tr>
+</table>
+
+
+
+<h3 style="page-break-before: always"> <A NAME="infiles">Data files</A></H3>
+
+<P>The <tt><infiles</tt>> tag will tell the converter where to find
+your data, and how to associate each file with the previously-defined
+regions, segments, and populations.
+</p>
+
+<table border=1>
+<tr><th colspan=4>Tags Describing Input Files in Lamarc Converter Command File</th></tr>
+<tr><th>parent tag</th><th>child tag or <em>attribute</em></th><th>child required</th><th>child instances allowed</th></tr>
+<tr><td><infiles></td><td><infile></td>
+                <td>REQUIRED</td><td>multiple</td></tr>
+<tr>
+    <td rowspan=7><infile></td>
+            <td><em>datatype</em></td>
+                <td>REQUIRED</td><td><em>-</em></td></tr>
+        <tr><td><em>format</em></td>
+                <td>optional</td><td><em>-</em></td></tr>
+        <tr><td><em>sequence-alignment</em></td>
+                <td>optional</td><td><em>-</em></td></tr>
+        <tr><td><name></td>
+                <td>REQUIRED</td><td>SINGLE</td></tr>
+        <tr><td><segments-matching></td>
+                <td>REQUIRED</td><td>SINGLE</td></tr>
+        <tr><td><pop-matching></td>
+                <td>optional</td><td>SINGLE</td></tr>
+        <tr><td><individuals-from-samples></td>
+                <td>optional</td><td>SINGLE</td></tr>
+    <td><individuals-from-samples></td>
+            <td><em>type</em></td>
+                <td>REQUIRED</td><td><em>-</em></td></tr>
+    <td rowspan=2><population-matching></td>
+            <td><em>type</em></td>
+                <td>REQUIRED</td><td><em>-</em></td></tr>
+        <tr><td><population-name></td>
+                <td>depends on value of <em>type</em> attribute</td><td>multiple</td></tr>
+    <td rowspan=2><segments-matching></td>
+            <td><em>type</em></td>
+                <td>REQUIRED</td><td><em>-</em></td></tr>
+        <tr><td><segment-name></td>
+                <td>depends on value of <em>type</em> attribute</td><td>multiple</td></tr>
+<tr> <th>tag</th><th colspan=3>contents</th></tr>
+<tr><td><individuals-from-samples></td>
+    <td colspan=3>the number of adjacent samples to bundle into a single individual</td></tr>
+<tr><th>attribute</th><th>value</th><th colspan=2>meaning</th></tr>
+    <tr><td rowspan=4><em>datatype</em></td>
+        <td>dna</td><td colspan=2>DNA data</td></tr>
+    <tr><td>snp</td><td colspan=2>SNP data</td></tr>
+    <tr><td>kallele</td><td colspan=2>k-allele data</td></tr>
+    <tr><td>microsat</td><td colspan=2>microsattelite data</td></tr>
+    <tr><td rowspan=2><em>format</em></td>
+        <td>migrate</td><td colspan=2>input file is a migrate file</td></tr>
+    <tr><td>phylip</td><td colspan=2>input file is a phylip file</td></tr>
+    <tr><td rowspan=2><em>sequence-alignment</em></td>
+        <td>interleaved</td><td colspan=2>the first line of each sequence appears, followed 
+        by all second lines, then all third lines, etc.</td></tr>
+    <tr><td>sequential</td><td colspan=2>each entire sequence appears
+        in the file before the next one starts.</td></tr>
+    <tr><td><em>type</em> for <individuals-from-samples></td>
+        <td>byAdjacency</td>
+        <td colspan=2>bundle adjacent samples into individuals</td></tr>
+    <tr><td rowspan=3><em>type</em> for <population-matching></td>
+        <td>byList</td><td colspan=2>
+            Each population referred to in the file is to be
+            assigned to a particular population defined in this file.  If this type
+            is used, sub-tags of the type <tt><population-name</tt>> should be
+            used to define those populations (each should have a name that matches a
+            population defined in the <tt><populations</tt>> tag, above).
+            </td></tr>
+        <tr><td>byName</td><td colspan=2>
+            The file itself contains information about what
+            populations the data refers to.  These names must match the names given
+            in the 'population' tag, above.
+            </td></tr>
+        <tr><td>single</td><td colspan=2>
+            All individuals in the file are to be assigned to a
+            single population.  That population must then be defined by a
+            <tt><population-name</tt>> subtag.
+            </td></tr>
+    <tr><td rowspan=2><em>type</em> for <segments-matching></td>
+        <td>byList</td><td colspan=2>
+            Each segment referred to in the file is to be
+            assigned to a particular segment defined in this file.  If this type
+            is used, sub-tags of the type <tt><segment-name</tt>> should be
+            used to define those segment (each should have a name that matches a
+            defined segment).
+            </td></tr>
+        <tr><td>single</td><td colspan=2>
+            All individuals in the file are to be assigned to a
+            single segment.  That segment must then be defined by a
+            <tt><segment-name</tt>> subtag.
+            </td></tr>
+</table>
+
+
+
+<H3> <A NAME="outfile">Specifying the Name of the Produced Lamarc file</A></H3>
+
+<P>
+<tt><outfile</tt>>, where you can specify the name of the file that
+you want the converter to produce,
+</P>
+
+<table border=1>
+<tr><th colspan=4>Tags Describing Output Files in Lamarc Converter Command File</th></tr>
+<tr><th>tag</th><th>contents</th></tr>
+<tr><td><outfile></td>
+    <td><em>name of outfile to produce; defaults to <tt>infile.xml</tt></em></td></tr>
+</table>
+
+
+<H3> <A NAME="comment">Miscellaneous Tags</A></H3>
+
+<table border=1>
+<tr><th colspan=4>Miscellaneous Tags in Lamarc Converter Command File</th></tr>
+<tr><th>tag</th><th>contents</th></tr>
+<tr><td><lamarc-header-comment></td>
+    <td><em>text of comment to be inserted in lamarc file</em></td></tr>
+</table>
+
+<H3 style="page-break-before: always">
+<A NAME="phase">Specifying Relationships Between Individuals and Data Samples</A> </H3>
+
+<p>For most LAMARC analyses, it is not necessary to specify which
+pairs (or more) of data sequences belong to the same individual. 
+However, there are a few cases where it may be necessary, including:
+</p>
+<ul>
+<li><a href="mapping.html">Trait mapping</a>, since traits are observed
+    for individuals.</li>
+<li>When haplotypes are incompletely resolved from individuals.</li>
+<LI>When combining nucleotide data (defined by sample) and microsats
+(defined by individuals).
+</ul>
+<p>
+Assigning samples to individuals, and optionally assigning trait
+phenotypes or information about haplotype resolution to them
+is done with the <individuals> tag.
+An example can be found in section
+<a href="mapping.html#pheno-to-ind">Assigning Phenotypes to Individuals</a>
+of the
+<a href="mapping.html">Trait Mapping</a> documentation.
+</p>
+
+<table border=1>
+<tr><th colspan=4>Specifying Relationships between Individuals and Sample Data in Converter Command File</th></tr>
+<tr><th>parent tag</th><th>child tag</th><th>child required</th><th>child instances allowed</th></tr>
+<tr><td><individuals></td><td><individual></td>
+                <td>optional</td><td>multiple</td></tr>
+<tr>
+    <td rowspan=5><individual></td>
+            <td><name></td>
+                <td>REQUIRED</td><td>SINGLE</td>
+    <tr><td><sample></td>
+            <td>REQUIRED</td><td>multiple</td></tr>
+    <tr><td><phase></td>
+            <td>optional</td><td>multiple</td></tr>
+    <tr><td><has-phenotype></td>
+            <td>optional</td><td>multiple</td></tr>
+    <tr><td><genotype-resolutions></td>
+            <td>optional</td><td>multiple</td></tr>
+<tr>
+    <td><sample></td>
+            <td><name></td>
+                <td>REQUIRED</td><td>SINGLE</td></tr>
+<tr>
+    <td rowspan=2><phase></td>
+            <td><segment-name></td>
+                <td>REQUIRED</td><td>SINGLE</td></tr>
+            <tr><td><unresolved-markers></td>
+                <td>REQUIRED</td><td>SINGLE</td></tr>
+<tr> <th>tag</th><th colspan=3>contents</th></tr>
+    <tr><td><name></td>
+        <td colspan=3><em>a name unique among all individuals and samples</em></td></tr>
+    <tr><td><has-phenotype></td>
+        <td colspan=3><em>a <phenotype>name already defined in the
+        <a href="#table-trait"><traits></a> section</em></td></tr>
+    <tr><td><genotype-resolution></td>
+        <td colspan=3><em>an "anonymous" phenotype belonging to the enclosing individual only.
+            See <a href="#table-trait"><traits> subtags table</a> for definition</em></td></tr>
+    <tr><td><segment-name></td>
+        <td colspan=3><em>the name of the segment to which this set of phase information applies</em></td></tr>
+    <tr><td><unresolved-markers></td>
+        <td colspan=3><em>sites for which data markers are unresolved for this individual and segment</em></td></tr>
+</table>
+
+<p>To see an example of the <phase>, <segment-name> and
+<unresolved-markers> tags in use, see the file <a
+HREF="batch_converter/sample-conv-cmd.html">sample-conv-cmd.xml</a> (actual xml is <a HREF="batch_converter/sample-conv-cmd.xml">here</a>)
+
+<P>The values for the 'unresolved-markers' tag should be site labels.  The
+first valid site in a segment is the value of the 'first-position-scanned'
+tag for that segment, and the last valid site is determined by the length of
+the segment.  If the segment does not have as many markers in it as valid
+sites (as for SNP data), the values here should match the values in the
+'locations' tag for the segment.  In the example file, the second segment of
+the second chromosome has SNP data with markers at positions 13, 19, 35, 77,
+102, 112, and 204.  These are therefore the only valid values for the
+'phase' tag for this segment.</P>
+
+<H3 style="page-break-before: always">
+<A NAME="panels">Specifying Panel Correction Information</A> </H3>
+
+<p>Panel member counts should be entered only if the user wishes to invoke Panel Correction. They need not be specified for all regions, only those for which one has the number of sequences used to create the panel. 
+</p>
+<p>WARNING: Do not estimate the number of sequences used to create a panel, it will make your results indefensible. If you do not have the actual number of sequences, you should not use Panel Correction. Your mutation rates will be lower, but that's the best you can do without knowing more about how the panel was created. 
+</p>
+<table border=1>
+<tr><th colspan=4>Specifying Panel Correction Information in Converter Command File</th></tr>
+<tr><th>parent tag</th><th>child tag</th><th>child required</th><th>child instances allowed</th></tr>
+<tr><td><panels></td><td><panel></td>
+                <td>optional</td><td>multiple</td></tr>
+<tr>
+    <td rowspan=4><panel></td>
+            <td><panel-name></td>
+                <td>optional</td><td>SINGLE</td>
+    <tr><td><panel-region></td>
+            <td>REQUIRED</td><td>SINGLE</td></tr>
+    <tr><td><panel-pop></td>
+            <td>REQUIRED</td><td>SINGLE</td></tr>
+    <tr><td><panel-size></td>
+            <td>REQUIRED</td><td>SINGLE</td></tr>
+</table>
+
+
+<P>(<A HREF="panels.html">Back</A> | <A HREF="index.html">Contents</A>
+| <A HREF="xmlinput.html">Next</A>)</P>
+
+<!--
+//$Id: converter_cmd.html,v 1.18 2012/05/14 19:55:38 ewalkup Exp $
+-->
+</BODY>
+</HTML>
diff --git a/doc/html/curve-smoothing.html b/doc/html/curve-smoothing.html
new file mode 100644
index 0000000..74757c9
--- /dev/null
+++ b/doc/html/curve-smoothing.html
@@ -0,0 +1,113 @@
+<!-- header fragment for html documentation -->
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
+<HTML>
+<HEAD>
+
+<META NAME="description" CONTENT="Estimation of population parameters using genetic data using a maximum likelihood approach with Metropolis-Hastings Monte Carlo Markov chain importance sampling">
+<META NAME="keywords" CONTENT="MCMC, Markov chain, Monte Carlo, Metropolis-Hastings, population, parameters, migration rate, population size, recombination rate, maximum likelihood">
+
+(<A HREF="index.html">Previous</A> | <A HREF="index.html">Contents</A> | <A HREF="compiling.html">Next</A>)
+<TITLE>LAMARC Documentation: Bayesian curve smoothing</title>
+</HEAD>
+
+
+<BODY BGCOLOR="#FFFFFF">
+<!-- coalescent, coalescence, Metropolis-Hastings, Markov chain Monte Carlo
+ simulation, migration rate, effective population size, recombination rate,
+ maximum likelihood -->
+
+
+<H2>Bayesian-LAMARC tutorial:  Curve smoothing</H2>
+
+<h3>If I wanted to take the same data and smooth it myself, how would I
+reproduce your results?</h3>
+
+<P> After collecting a set of parameters (and Bayesian LAMARC will soon be
+able to output 'summary files', which would contain this information), the
+information is divvied into groups on a per-parameter and per-replicate
+basis.  The data for each parameter is then smoothed using a Biweight 
+kernel of the form:
+</P>
+
+<P><center>(15/16)(1-t<sup>2</sup>)<sup>2</sup> for abs(t) < 1.0
+</center></P>
+
+<P> The width of this kernel is set as:</P>
+
+<P><center>2.5 * σ * n<sup>-1/5</sup>
+</center></P>
+
+<P>where n is the number of points in the data set, and σ is the
+smaller of the inter-quartile distance divided by 1.34, or the standard
+deviation of the data set.  (If this value falls to zero or near-zero, we
+substitute an arbitrary minimum to allow the program to continue).</P>
+
+<P> After the data for one parameter from one replicate and one region are
+smoothed, multi-replicate curves are created by averaging the individual
+curves, while multi-region curves are created by multiplying and
+re-normalizing the single-region curves.  (Multiple replicates are
+different views of the same data, and are therefore averaged.  Multiple
+regions are independent views using independent data, and are therefore
+multiplied.)
+</P>
+
+<h3>Where did these numbers come from?</h3>
+
+<P>The biweight kernel was chosen primarily because it is bounded and quick
+to calculate.  A bounded kernel (unlike a Gaussian kernel) ensures that the
+the resulting probability density curve has defined boundaries, which better
+matches the priors for the parameters we estimate.  (An even better kernel
+would be additionally constrained to not spread out over the bounds of the
+prior, but this works for a basic attempt at analysis.)  Silverman (1986)
+has demonstrated that the choice of kernels does not make an appreciable
+difference in accuracy otherwise, so the biweight kernel was chosen over
+other bounded kernels simply because it was simple to calculate.
+</P>
+
+<P>The formula for the kernel width was taken from Silverman (1986) as
+modified for use for the biweight kernel.  The formula for figuring out the
+optimal kernel weight (h<sub>opt</sub>) is:
+</P>
+
+<P>
+<center>h<sub>opt</sub> = [∫t<sup>2</sup>k(t)dt]<sup>-2/5</sup>
+[∫k(t)<sup>2</sup>dt]<sup>1/5</sup> [f"(x)dx]<sup>-1/5</sup>
+n<sup>-1/5</sup>
+</center>
+</P>
+
+<P>where k(t) is the kernel function, f(x) is the function you wish to
+estimate, and n is the number of data points.  If a Gaussian is used as an
+estimate of f(x), and the biweight kernel is used for k(t), this reduces
+to:</P>
+
+<P>
+<center>h<sub>opt</sub> = [1/7]<sup>-2/5</sup> [5/7]<sup>1/5</sup> [3/8
+π<sup>-1/2</sup>σ<sup>-5</sup>]<sup>-1/5</sup> n<sup>-1/5</sup>
+</center>
+</P>
+
+<P>or</P>
+
+<P>
+<center>h<sub>opt</sub> = 2.78σn<sup>-1/5</sup></center>
+</P>
+
+<P>Silverman argues that you can account for the fact that your function is
+unknown (instead of definitely Gaussian, as we assumed in the initial
+calculation), by reducing the coefficient slightly (which is why we use 2.5
+instead of 2.78), and use the lesser of the inter-quartile distance divided
+by 1.34 and the standard deviation of the data for σ.  Using both of
+these substitutions, we arrive at the equation listed at the beginning:
+</P>
+
+<P><center>2.5 * σ * n<sup>-1/5</sup>
+</center></P>
+
+
+(<A HREF="index.html">Previous</A> | <A HREF="index.html">Contents</A> | <A HREF="compiling.html">Next</A>)
+<!--
+//$Id: curve-smoothing.html,v 1.5 2007/05/02 00:44:25 lpsmith Exp $
+-->
+</BODY>
+</HTML>
diff --git a/doc/html/data_models.html b/doc/html/data_models.html
new file mode 100644
index 0000000..9b0cfc7
--- /dev/null
+++ b/doc/html/data_models.html
@@ -0,0 +1,228 @@
+<!-- header fragment for html documentation -->
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
+<HTML>
+<HEAD>
+
+<META NAME="description" CONTENT="Estimation of population parameters using genetic data using a maximum likelihood approach with Metropolis-Hastings Monte Carlo Markov chain importance sampling">
+<META NAME="keywords" CONTENT="MCMC, Markov chain, Monte Carlo, Metropolis-Hastings, population, parameters, migration rate, population size, recombination rate, maximum likelihood">
+
+<TITLE>LAMARC Documentation: Evolutionary forces</title>
+</HEAD>
+
+
+<BODY BGCOLOR="#FFFFFF">
+<!-- coalescent, coalescence, Markov chain Monte Carlo simulation, migration rate, effective
+ population size, recombination rate, maximum likelihood -->
+
+
+<P>(<A HREF="regions.html">Previous</A> | <A HREF="index.html">Contents</A> |
+<A HREF="gamma.html">Next</A>)</P>
+
+<H2>Data Models (models of the mutational process) </H2>
+
+<P> This article describes available mutational models and how
+to use them. The first section covers basic capabilities shared by
+all models.  Nucleotide models are covered next, followed
+by models suitable for microsatellites and general allelic
+data.</P>
+
+The models supported at this time are:
+
+<UL>
+<LI><A HREF="data_models.html#F84">F84</A></LI>
+<LI><A HREF="data_models.html#GTR">GTR</A></LI>
+<LI><A HREF="data_models.html#stepwise">Stepwise</A></LI>
+<LI><A HREF="data_models.html#brownian">Brownian-Motion</A></LI>
+<LI><A HREF="data_models.html#kallele">K-Allele</A></LI>
+<LI><A HREF="data_models.html#mixed">Mixed Stepwise/K-Allele</A></LI>
+</UL>
+
+<H3>General features of all mutational models</H3>
+
+<P><a name="rateCategories"/>All mutational models offer multiple mutation-rate categories using
+the Hidden Markov Model of Felsenstein and Churchill.  In this
+model, we do not know what rate each site has, but we assume that
+we know how many rates there are, the relative value of each rate,
+and the proportion of sites that are expected to have each rate.
+For example, we might assume that our data has 10% fast-evolving
+sites and 90% slow-evolving sites, and that the fast-evolving sites
+are 5 times faster than the others.</P>
+
+<P>This would be indicated using the "Categories" option of the mutational
+model, giving 2 categories, the first category with a probability of
+0.1 and relative rate of 5, and the second with a probability of 0.9
+and relative rate of 1.</P>
+
+<P>Additionally, you can indicate that the rates at adjacent sites are
+correlated by setting the auto-correlation coefficient.  If you
+expect that, on average, runs of 10 sites will have the same rate,
+set this to 10.  The default is 1--every site chooses its rate
+independently.  Coding sequence has a different kind of 
+correlation--every third site is likely to be much faster than the 
+rest--and unfortunately LAMARC does not yet model this type of correlation.
+Coding sequence is currently best handled by two or three rate
+categories with no correlation.</P>
+
+<P>Programs such as <A HREF="http://paup.csit.fsu.edu/">PAUP*</a> can be useful in fine-tuning the rate model.
+In general, if you suspect your data has multiple rates, a
+multiple-rate model, even if not fully correct, is better than a single-rate
+model which is sure to be wrong.</P>
+
+<P>The program will slow down in proportion to the number of rate
+categories; it is seldom useful to have more than 3-4 categories.</P>
+
+<P>If you use multiple categories, the "mu" in your estimate of
+Theta=4N(mu) will be the weighted average of the rates.  In our
+given example, that would be 1.4 times the slow rate.</P>
+
+<P>Sometimes you will know that an entire region or segment has a significantly
+higher or lower mutation rate than the rest of your data.  This
+is best dealt with by setting that segment's relative mutation rate,
+not by using categories.  (This option is found in the data model
+menu as "R  Relative mutation rate".)  Be aware that the parameter
+estimates produced by LAMARC will be scaled proportional to a
+segment of relative mutation rate 1, even if no such segment is
+included in the data.  That is, if you tell LAMARC that you have 
+two segments, one with a relative mutation rate of 5 and the other 
+with a relative mutation rate of 50, your final estimate of Theta
+will describe a fictional segment with a relative mutation rate
+of 1, and you will need to multiply by 5 or 50 to find the
+Theta of your actual segments.</P>
+
+<br>
+<H3>Nucleotide models</H3>
+
+<P>LAMARC provides two models suitable for nucleotide (DNA, RNA or
+SNP) data.  The F84 model allows transitions and transversions to
+differ in rate, and also allows for different nucleotide frequencies.
+It can be used to emulate simpler models such as the Kimura 2-parameter
+or Jukes-Cantor models.  The GTR model allows every combination of
+bases to have its own rate, and can emulate any simpler model, but
+is much slower and should be used only when it's really necessary.
+The Modeltest utility of Posada, used in conjunction with PAUP*,
+can be used to select the most appropriate model.
+Both models allow the user to specify 
+<a href="glossary.html#data-uncertainty">data uncertainty</a>.
+</P>
+
+<H4><A NAME="F84">F84 Model</A></H4>
+The F84 or Felsenstein 84 nucleotide model distinguishes
+transition mutations (purine to purine, pyrimidine to pyrimidine)
+from transversion mutations and allows them to have different
+rates.  This is controlled by the "ttratio" parameter, which is
+the expected ratio of transitions to transversions.  Due to the way that
+these rates are handled internally, the ttratio must be strictly
+greater than 0.5.  If you want the Jukes-Cantor model, in which
+transitions and transversions are equally probable (because
+there are twice as many possible transversions, this corresponds to
+a ratio of 0.5), set the ttratio to a value such as 0.50001.
+
+<P>PAUP* can be useful in estimating the ttratio.  The value does not
+have to be very precise, but you risk misleading results if
+you analyze, for example, human mtDNA (whose correct ttratio may
+be around 30) using the default ttratio of 2.</P>
+
+<P>The F84 model also allows the frequencies of the four bases to 
+vary.  LAMARC can estimate these frequencies from the data, or
+you can specify values (for example, obtained from PAUP* or from
+a larger data set).  If your data set is very small, it is best
+to specify the values as the ones calculated from the data may
+be misleading.</P>
+
+<H4><A NAME="GTR">GTR (General Time-Reversible) Model</A></H4>
+
+<P>The GTR model is the most complex tractable model of nucleotide
+evolution; it allows for six different rates (each base to each
+other base) and unequal base frequencies.  The combination of
+rates and base frequencies must describe an equilibrium.  We
+recommend use of PAUP* to estimate the rates and base frequencies.
+GTR is a slow model and should only be used if it's really
+necessary, but it frequently is necessary for virus data and
+other high-mutation situations.</P>
+
+<P>Some releases of PAUP* provide only five rates for GTR.  The
+sixth rate is 1 by convention.</P>
+
+<br>
+<H3>Microsatellite models</H3>
+
+<P>The microsatellite models are appropriate for data which is
+expected to vary up and down a ladder.  They can also be
+used for elecrophoretic data.  The K-Allele model is, in addition,
+appropriate for allelic data where nothing is known about the
+expected direction of mutations:  an example would be presence/absence
+of some chromosomal rearrangement.</P>
+
+<H4><A NAME="stepwise">Stepwise Model</A></H4>
+
+<P>The stepwise model assumes that each mutation increases or decreases
+the number of repeats by one; larger changes always result from 
+multiple mutations.  It is suitable for microsatellite or
+electrophoretic data.  Even if the microsatellite does not
+evolve in a perfectly stepwise fashion, this model may be
+adequate unless the violations are fairly common or large.</P>
+
+<P> We allow for the possibility of alleles in the ancestry of
+the sample which were up to 5 repeats larger or smaller than the
+most extreme alleles observed in the data.  Currently this
+cannot be adjusted by the user.</P>
+
+<P>This model offers no user-settable parameters except for the
+rate categories options common to all data models.</P>
+
+<H4><A NAME="brownian">Brownian-Motion Model</A></H4>
+
+<P>The Brownian model is a mathematical approximation of the stepwise
+model.  It considers the mutational process as a continuous random walk
+around the starting point.  This is much faster than the full
+stepwise calculation, but can break down if the number of mutations is
+very low.  Breakdown will be signalled by data likelihoods of
+zero.  We normally try the Brownian model first and resort to the
+Stepwise only if bad data likelihoods are observed.  Results should
+be extremely similar.</P>
+
+<H4><A NAME="kallele">K-Allele Model</A></H4>
+
+<P>This model treats all alleles as equivalent, with mutation from any
+allele to any other allele equally probable.  It is the only model
+LAMARC offers for allelic data which are definitely not stepwise.  The 
+program assumes that the alleles observed in the data are the only possible
+alleles, so that if you present data with three different alleles,
+K=3.  This can be a problem if you know that there are actually 4
+alleles but one failed to occur in your data.  At present there is
+no workaround for this.</P>
+
+<H4><A NAME="mixed">Mixed Stepwise/K-Allele Model</A></H4>
+
+<P>This is a hybrid of the Stepwise and K-Allele models and may be
+appropriate for data in which most mutations are
+stepwise but a few much larger mutations occur. 
+It has a parameter 'percent_stepwise' which controls the
+proportion of Stepwise changes:  percent_stepwise of zero is a K-Allele
+model, percent_stepwise of one is a Stepwise model, and percent_stepwise of 1/2 asserts
+that K-Allele and Stepwise mutations are equally probable.  LAMARC's
+implementation can attempt to optimize percent_stepwise to maximize the
+likelihood of the observed data.</P>
+
+<P>Mixed K/S is a new
+and experimental model with which we have little experience.  One
+difficulty is that the stepwise model generally considers the
+possibility of alleles a bit larger or smaller than any observed,
+but allowing those unobserved alleles in the K-Allele model makes
+K very large and may cause the model to rule out K-Allele type mutations.
+We have allowed the Mixed model to consider only alleles one step larger
+or smaller than the largest or smallest observed alleles.  Even so,
+if the microsatellite being studied has only two, adjacent alleles,
+the Mixed model will consider 4 states whereas a normal K-Allele
+model would consider only 2.  As a result, we expect LAMARC's
+estimates of percent_stepwise to be somewhat higher than the truth.</P>
+
+<P>(<A HREF="regions.html">Previous</A> | <A HREF="index.html">Contents</A> |
+<A HREF="gamma.html">Next</A>)</P>
+
+<!--
+//$Id: data_models.html,v 1.13 2009/12/18 00:27:44 ewalkup Exp $
+-->
+</BODY>
+</HTML>
+
diff --git a/doc/html/data_required.html b/doc/html/data_required.html
new file mode 100644
index 0000000..7a8a3c9
--- /dev/null
+++ b/doc/html/data_required.html
@@ -0,0 +1,135 @@
+<!-- header fragment for html documentation -->
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
+<HTML>
+<HEAD>
+
+<META NAME="description" CONTENT="Estimation of population parameters using genetic data using a maximum likelihood approach with Metropolis-Hastings Monte Carlo Markov chain importance sampling">
+<META NAME="keywords" CONTENT="MCMC, Markov chain, Monte Carlo, Metropolis-Hastings, population, parameters, migration rate, population size, recombination rate, maximum likelihood">
+
+<TITLE>LAMARC Documentation: Suitable data for LAMARC</title>
+</HEAD>
+
+
+<BODY BGCOLOR="#FFFFFF">
+<!-- coalescent, coalescence, Markov chain Monte Carlo simulation, migration rate, effective
+ population size, recombination rate, maximum likelihood -->
+
+
+<P>(<A HREF="glossary.html">Previous</A> | <A HREF="index.html">Contents</A> | <A HREF="tutorial.html">Next</A>)</P>
+<H2>Suitable data for LAMARC </H2>
+
+<P> This article gives our best available information on the type and amount
+of data needed to use LAMARC successfully.  Remember that LAMARC is an experimental tool, not an oracle. You know your data better than we do. The following guidelines will help, but we don't guarantee success even
+if you meet these criteria. Every data set is different. But if you have
+much less data than described here, you will probably not be happy with your results.</P>
+
+<LI><A HREF="data_required.html#regions">How many unlinked regions?</A></LI>
+<LI><A HREF="data_required.html#samples">How many samples?</A></LI>
+<LI><A HREF="data_required.html#sites">How many linked sites?</A></LI>
+<LI><A HREF="data_required.html#kinds">What kinds of samples?</A></LI>
+<LI><A HREF="data_required.html#pops">What kinds of populations?</A></LI>
+
+
+<H3> <A NAME="regions">How many unlinked regions?</H3>
+
+<P> Best rule we've found is count the number of forces you wish to recover and add 1. So for estimation of Theta and
+migration rate it is possible to get results with one or two regions but the returned values improve markedly with three. This is because each independent region tracks a different evolutionary pathway. Each gives you an independent measure of the forces of interest. Averaging the estimate for each force from each region together gives you a much better estimate than any individual region can provide.</p>
+<p> There are a few exceptions:</p>
+<p>Estimation of growth rate of a single population is very poor with less
+than 3 unlinked regions and particularly benefits from having more. </p> <p>Recombination is best estimated from a single lengthy region (more discussion of this below in "How many linked sites?"</P>
+
+<P> Note that runtime increases approximately linearly with the number of regions.  But it's hard to have too many, unless you exceed your computer's memory capacity (or your patience).</P>
+
+<H3> <A NAME="samples">How many samples?</H3>
+
+<P> Surprisingly, Felsenstein has shown that for estimation of Theta the
+optimal number of samples is very low, around 8.  (Reference:  <A
+HREF="http://mbe.oxfordjournals.org/cgi/content/full/23/3/691?ijkey=BKStzV1zbTncUJJ&keytype=ref">
+Felsenstein 2006</A>.) If you can obtain another unlinked region you will
+gain much more than you would gain by increasing the sample size for the current
+region beyond 8.  We generally recommend aiming for 10-15 sequences per population. Again, recombination is an exception; a recombination analysis probably needs 15-20
+sequences.</P>
+
+<P> If you cannot get multiple genes, multiple sequences are some help, but
+we do not recommend going above 30 sequences/subpopulation as the added
+difficulty of the analysis more than outweighs the gain in information. 
+Remember that the more sequences you have, the longer you will have to
+search to find good trees, and the longer each tree will take to process. 
+Runtime goes up with the log of the number of sequences, but as you will also
+have to perform more steps (perhaps many more steps) in the final outcome,
+adding sequences causes a worse-than-linear increase in runtime.</P>
+
+<H3> <A NAME="sites">How many linked sites?</H3>
+
+<P> This depends on the expected level of polymorphism.  For DNA or SNP
+data, a region should ideally be long enough to see 10 or more variable
+sites.  If that's not possible you will definitely need multiple unlinked
+regions.  Above 100 or so variable sites, there is little additional
+information to be gained outside of estimating recombination.</P>
+
+<P> Runtime goes up less than linearly with number of sites; how much less
+depends on how polymorphic your data are.  Nearly invariant data are very
+quick, so if you have long sequences, by all means use them (assuming you
+have the memory).</P>
+
+<P> Recombination is an exception.  You would like your sequenced area to
+include tens of recombinations, but not hundreds.  Unfortunately it is hard
+to know this in advance.  A recombinational analysis will definitely need
+20-30 variable sites or more to have much accuracy.  In general, sequences
+for recombination inference should be long, but watch out for signs that you
+are in the hundreds-of-recombinations zone as such runs will take a very
+long time.  (The estimates will be good, but you won't want to wait for
+them.) </p>
+<p>The best approach if you do not know how much recombinaion your have is to start small and experiment. For example, run a very small (2-4) sample subset of your data and see how long it takes and how much recombination is finds. If you find you have high levels of recombination you may then want to shorten your sequences. </P>
+
+<H3> <A NAME="kinds">What kinds of samples?</H3>
+
+<P> LAMARC <B>requires</B> random samples from each subpopulation.  Therefore
+<ul>
+<li><b>DO NOT</b> cherrypick the most divergent or most interesting sequences, 
+<li><b>DO NOT</b> pick one from each major lineage, 
+<li><b>DO NOT</b> discard identical sequences, and
+<li><b>DO NOT</b> do anything of this sort! 
+</ul>
+Such  data will give grossly biased estimates.</P>
+
+<P> It has been stated in print (not by us!) that mutually incompatible
+sites in the data set must be removed.  <B>This is totally untrue.</B>  It
+is true for programs which assume an infinite-sites model, but LAMARC does
+not.  Remove sites only if you cannot be sure they are correctly aligned. 
+It is best to remove all alignment columns for which the alignment is
+doubtful. </P>
+
+<P> Be sure that all of your samples are of the same homolog; throwing in a
+few sequences from a paralog will ruin your analysis.</P>
+
+<P> You do not need to sample from multiple subpopulations evenly, nor in
+proportion to their population sizes.  However, try not to let the sample
+size from any subpopulation go below 2 diploid or 4 haploid individuals as
+the estimates are likely to be unstable.</P>
+
+<H3><A NAME="pops"> What kinds of populations?</H3>
+
+<P> LAMARC works well for subpopulations which show definite signs of 
+geographic structure.  You may want to use the program STRUCTURE to test
+for this before beginning.  If STRUCTURE does not see any structure in
+your populations, the migration rate is probably too high and you should
+pool those populations together.</P>
+
+<p> One warning here. If STRUCTURE finds clusters it's tempting to use them to redefine your populations. DON"T DO THAT!!! You've just returned all the migrants to their original populations. If you find structure, keep your samples in their original spacial groupings.</p>
+
+<P> At the other extreme, if your populations are totally isolated you
+will not, of course, get a good estimate of the migration rate, though
+other parameters can be well estimated.  If your populations have
+probably not exchanged migrants in the last 4N generations (for a diploid,
+or 2N for a haploid) you should analyze each one separately, or use a divergence
+model; it is not appropriate to try to estimate non-divergence migration rates. </P>
+<P>
+
+<P>(<A HREF="glossary.html">Previous</A> | <A HREF="index.html">Contents</A> | <A HREF="tutorial.html">Next</A>)</P>
+<!--
+//$Id: data_required.html,v 1.9 2012/05/16 17:14:01 mkkuhner Exp $
+-->
+</BODY>
+</HTML>
+
diff --git a/doc/html/divergence.html b/doc/html/divergence.html
new file mode 100644
index 0000000..e09250f
--- /dev/null
+++ b/doc/html/divergence.html
@@ -0,0 +1,113 @@
+<!-- header fragment for html documentation -->
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
+<HTML>
+<HEAD>
+
+<META NAME="description" CONTENT="Estimation of population parameters using genetic data usi
+ng a maximum likelihood approach with Metropolis-Hastings Monte Carlo Markov chain importanc
+e sampling">
+<META NAME="keywords" CONTENT="MCMC, Markov chain, Monte Carlo, Metropolis-Hastings, populat
+ion, parameters, migration rate, population size, recombination rate, maximum likelihood">
+
+<TITLE>LAMARC Documentation:
+Modeling Linkage Properties and Relative Mutation Rates of Your Data</title>
+</HEAD>
+
+
+<BODY BGCOLOR="#FFFFFF">
+<!-- coalescent, coalescence, Markov chain Monte Carlo simulation, migration rate, effective
+ population size, recombination rate, maximum likelihood -->
+
+<P>(<A HREF="migration_matrix.html">Back</A> | <A HREF="index.html">Contents</A>
+| <A HREF="panels.html">Next</A>)</P>
+<h2>Divergence</h2>
+
+<p>
+<em>(Note: it is recommended that you be familiar with the material
+in the <a href="converter.html">data file conversion</a>
+section before reading this section.)</em>
+</p>
+
+<p>
+Divergence allows you to define the relationships between populations by linking populations pairwise into parent (or ancestor) populations. This Divergence linkage is defined on the <tt>Data Partitions</tt> tab. Once that is complete, the user can go to the <tt>Migration Matrix</tt> tab to edit the migration matrix.
+</p>
+<p>
+Note: Because you are defining the relationship between pairs of populations and their parent, the process is iterative. You pick the first pair, the software links them to their parent, and then you pick the next pair. Thus there is a bit of back and forth between screens until you get all the connections defined. We would like to have done this graphically, but, given the constraints of the current interface, that was not possible. This interface is, quite frankly, a stop gap that save [...]
+</p>
+
+<h3><a name="div_def">Divergence Definition</a></h3>
+<p>
+Here is the <tt>Data Partitions</tt> tab for a 3 population data set before divergence is turned on. Note that the content of the <tt>Divergence</tt> button is "Off".
+</p>
+<p><img src="batch_converter/images/DivergeOff.png" alt="Divergence off Data Partitions"/>
+</p>
+<p>
+Picking the <tt>Divergence</tt> button yields the following screen:
+</p>
+<p><img src="batch_converter/images/FirstParent.png" alt="Parent_1 with no children checked"/>
+</p>
+<p>
+There are two things that can be done here:
+<ul>
+<li>
+<tt>Rename Parent</tt> allows you to edit the parent name. You should be aware that the XML parser that will eventually read the output from this converter into LAMARC gets confused by spaces in names, so they are automatically replaced with underscores. The software is not being perverse, it is protecting you from an unsophisticated parser.
+</li>
+<li>
+<tt>Pick Children</tt> This is a list of all the populations that currently do not have parents. Parents can only have 2 children, so after you check two the rest will gray out. If you make a mistake, just pick the incorrect one again, it will uncheck, and the rest of the names will become active again. <tt>Unselect All</tt> will uncheck all the children. Not much of a gain in this case, but if there were 4 or 5 children it is useful.
+</li>
+</ul>
+</p>
+<p><img src="batch_converter/images/FirstParent2Children.png" alt="Parent_1 with 2 children checked"/>
+</p>
+<p>
+Now that the first parent is chosen, the <tt>Data Partitions</tt> tab now looks like this. Note that the <tt>Divergence</tt> button now reads "Continue" and there is a <tt>parent</tt> cell bridging the "North" and "South" populations with the name "Parent_1". 
+</p>
+<p><img src="batch_converter/images/InterumParentImage.png" alt="Parent_1 with 2 children image"/>
+</p>
+<p>
+One puzzler is if you looked at the <tt>Migration Matrix</tt> tab you would find only the migration matrix between the populations and no sign of "Parent_1". This is because the Divergence structure is not yet fully defined, so the Migration Matrix does not know what to do.
+</p>
+<p>
+In order to finish the Divergence definition it is necessary to pick the Divergence button again. This time the resulting screen looks like:
+</p>
+<p><img src="batch_converter/images/SecondParent.png" alt="Parent_2 with 2 children checked"/>
+</p>
+<p>
+As there are only two items lacking a parent at this point, both "East" and "Parent_1" are automatically checked. One can still edit the name, of course. 
+</p>
+<p>
+Accepting  "Parent_2" leads to the fully populated Divergence <tt>Data Partitions</tt> tab with the <tt>Divergence</tt> button reading "Done"
+</p>
+<p><img src="batch_converter/images/FullParentsImage.png" alt="Full Divergence Parent set image"/>
+</p>
+<p>
+By the way, if this was all a horrible mistake, just pick the <tt>Divergence</tt> button again and it will shut Divergence off and delete all the parent information.
+</p>
+
+<h3><a name="divmig_matrix">Divergence-Migration Matrix</a></h3>
+
+<p>
+When Divergence definition is done the <tt>Migration Matrix</tt> tab displays the Divergence-Migration Matrix. It has an identical form to the Migration Matrix except there are now rows and columns for the parent populations. If you had done any edits in the Migration Matrix before turning Divergence on, they will still be present. And if you turn Divergence off, the edits in the Population part of the matrix will not change, though the Parent part of the matrix will be lost. 
+</p>
+<p>
+[Technical note: Divergence-Migration is actually a different Force from Migration. This is probably irrelevant to the average user but has serious internal consequences for the software.]
+</p>
+<p><img src="batch_converter/images/DivMigMatrixTab.png" alt="Divergence-Migration Matrix"/></p>
+<p>
+One thing that looks different about the Divergence-Migration Matrix is there are "invalid" cells off the diagonal. This is because it is impossible to have migration between populations if they don't exist at the same time. In this case, "North" and "South" are children of "Parent_1" and thus do not exist until "Parent_1" disappears. 
+</p>
+<p>Also note that the matrix has "invalid" in the entire bottom row and last column. This is because there are no other populations defined after the final parent "Parent_2" is defined, so migration cannot occur. Conceptually one could leave "Parent_2" off the matrix, but this format makes clear what is going on and is consistent with the Migration Matrix for a single population, which has a single "invalid" cell in it.
+</p>
+<p>
+<a href="migration_matrix.html#edit_mat_cell">Editing the individual cells</a> is exactly the same as the Migration Matrix discussed previously. 
+<p>
+
+
+<P>(<A HREF="migration_matrix.html">Back</A> | <A HREF="index.html">Contents</A>
+| <A HREF="panels.html">Next</A>)</P>
+
+<!--
+//$Id: divergence.html,v 1.3 2012/03/14 22:07:54 jmcgill Exp $
+-->
+</BODY>
+</HTML>
diff --git a/doc/html/forces.html b/doc/html/forces.html
new file mode 100644
index 0000000..5c689fa
--- /dev/null
+++ b/doc/html/forces.html
@@ -0,0 +1,394 @@
+<!-- header fragment for html documentation -->
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
+<HTML>
+<HEAD>
+
+<META NAME="description" CONTENT="Estimation of population parameters using genetic data using a maximum likelihood approach with Metropolis-Hastings Monte Carlo Markov chain importance sampling">
+<META NAME="keywords" CONTENT="MCMC, Markov chain, Monte Carlo, Metropolis-Hastings, population, parameters, migration rate, population size, recombination rate, maximum likelihood">
+
+<TITLE>LAMARC Documentation: Evolutionary forces</title>
+</HEAD>
+
+
+<BODY BGCOLOR="#FFFFFF">
+<!-- coalescent, coalescence, Markov chain Monte Carlo simulation, migration rate, effective
+ population size, recombination rate, maximum likelihood -->
+
+
+<P>(<A HREF="gamma.html">Previous</A> | <A HREF="index.html">Contents</A> |
+<A HREF="parameters.html">Next</A>)</P>
+
+<H2>Evolutionary Forces</H2>
+<P> This article describes, in some technical detail, the way we
+model evolutionary forces.  You don't need this information to
+get the program running, but it can be very helpful in understanding
+your results. </P>
+
+The forces supported at this time are:
+
+<UL>
+<LI><A HREF="forces.html#coalescence">Coalescence</A></LI>
+<LI><A HREF="forces.html#migration">Migration</A></LI>
+<LI><A HREF="forces.html#recombination">Recombination</A></LI>
+<LI><A HREF="forces.html#growth">Growth</A></LI>
+<LI><A HREF="forces.html#gamma">Gamma-distributed relative mutation rate over regions</A></LI>
+<LI><A HREF="forces.html#divergence">Divergence</A></LI>
+</UL>
+
+<P> For each force, we describe the parameter being estimated, and
+list some assumptions made in the analysis.  If the assumptions
+are violated, the results will not necessarily be wrong, but
+should be interpreted cautiously.</P>
+
+<H3><A NAME="coalescence">Coalescence</A></H3>
+
+We estimate the parameter Theta for each population.  While most papers
+describe Theta as 4<i>N<sub>e</sub></i>mu (where <i>N<sub>e</sub></i> is the
+effective number of individuals and mu is the
+mutation rate in mutations per generation), this is specific to diploids.
+If you put haploid mtDNA into LAMARC, the Theta estimates will be estimates
+of 2<i>N<sub>f</sub></i>mu instead, where <i>N<sub>f</sub></i> is the effective 
+number of females in the population.  It is best to think of Theta as "number of heritable
+copies in population * 2 * mutation rate," since this definition works no
+matter what the ploidy is.  (The "2" comes from the fact that two sequences
+that have diverged for time <i>t</i> are different by 2 * mu * <i>t</i> 
+mutations, since both diverging lineages accumulate mutations.)</P>
+
+<P> The units of mu are mutations per site per generation.  Please note
+that many studies compute a (lower-case) theta whose units involve
+mutations per <b>locus</b> per generation.  To convert, multiply the per-
+site Theta by the number of sites.</P>
+
+<P> If the "multiple rate categories" option of the data model is
+used, the mutation rate is the weighted mean across all categories.</P>
+
+<P> While it would be helpful to have separate estimates of population 
+size and mutation rate, with genetic data from a single time point
+there is no way to separate them.  If you have outside information
+about either population size or mutation rate, for example from
+outgroup analysis, you can then estimate the other parameter
+directly.</P>
+
+<P> A Frequently Asked Question is how to interpret the parameter
+Theta when analyzing mitochondrial DNA or Y-chromosome DNA.
+In these cases Theta is proportional to the mtDNA or Y-chromosome
+effective population size.  For example, when given mtDNA, the program
+estimates Theta=2<i>N<sub>f</sub></i>mu, and when given Y-chromosome DNA, the
+program estimates Theta=2<i>N<sub>m</sub></i>mu (where "f" and "m" refer to the
+female and male effective population sizes).</P>
+
+<P> To combine estimates of Theta from regions with different Thetas, such
+as mtDNA and nuclear DNA, you can set the relative population sizes in the
+'<A HREF="menu.html#data">Effective Population Size</a>' menu, or set these
+values directly in the XML input.  Reported multi-region Theta estimates
+will be scaled accordingly.</P>
+
+<H4> Assumptions:</H4>
+
+<UL> <LI><P>(unless growth rates are being estimated) The population(s) have been the
+same size for a very long time (though they can be different from one
+another).</P>
+
+<LI><P>All markers being considered are neutral, or at least the variation 
+seen is neutral (a gene which undergoes purifying selection against harmful
+variants will work, but a gene under balancing selection will give
+distorted results).</P>
+
+<LI><P>The markers are also not tightly linked  to any segments undergoing
+directional or balancing selection.</P>
+
+<LI><P>The population is significantly larger than the sample drawn from it. 
+If not, the basic coalescent equations break down.  Results will also be
+questionable if the population size is truly tiny--less than a few dozen. 
+A rule of thumb is that the sample size should be no greater than the
+square root of the population size, so a population of 100 should have a
+sample no greater than 10.</P>
+
+<LI><P>Each population is free of internal subdivisions which would impede
+gene flow.</P>
+
+<LI><P>All regions and segments in the analysis either reflect the same
+underlying Theta (they do not vary in mutation rate or population size),
+have been correctly annotated with their relative mutation rates and
+population sizes, or have mutation rates drawn from a gamma distribution
+(if explicitly estimated).</P> </UL>
+
+
+<H3><A NAME="migration">Migration</A></H3>
+
+<P> We estimate the immigration rate into each population from each
+other population (moving forward in time), so that a three-population case will estimate
+six rates.  The immigration parameter <i>M</i> is expressed as 
+<i>m</i>/mu where <i>m</i>
+is the immigration rate per generation and mu is the neutral
+mutation rate per site per generation.  (As usual, if multiple
+mutation categories are used, mu is the weighted average.)
+</P>
+
+<P> If you would prefer to consider migration in terms of 4<i>Nm</i>, 
+multiply the given value by the recipient population's value of Theta.
+Please be careful of the distinction between immigration and
+emigration.  LAMARC always estimates and reports immigration, so 'M12'
+indicates the rate at which individuals from population 1 have moved to 
+population 2.
+</P>
+
+<P> In the current version we cannot meaningfully combine genetic
+regions with different migration rates in the same analysis.  We hope 
+to provide this capability later.</P>
+
+<P> Peter Beerli has argued that it may be useful to include a population in your analysis
+even if you have not sampled any individuals from it.  This can
+guard against biases produced by unacknowledged populations.
+For example, if you believe that your real-life situation involves
+three populations exchanging migrants, but you have been able to
+sample only two of them, you might add the third population
+even with no individuals.  The parameter estimates involving
+this population will be weak, but the estimates involving the
+other two populations may be more accurate than if the unsampled
+population were omitted entirely.  </P>
+
+<P> However, LAMARC is not very stable with unsampled
+populations, because the likelihood surface for the parameters of
+the unsampled population may be flat or multi-peaked. For this
+reason we have not allowed unsampled populations in v2.  If
+you feel this capability would help you, consider using MIGRATE,
+and also please let us know.  We will work on stabilizing
+estimates for unsampled populations if this is of general interest.</P>
+
+<H4> Assumptions:</H4>
+
+<UL>
+<LI>
+<P>The current migration structure has been stable for a long time,
+and the populations have existed for a long time.  (If one population
+is a recent offshoot of another, you will see spurious evidence
+of migration between them.)</P>
+
+<LI> <P> Since the migration situation is assumed to be stable, there must
+be some way for individuals to have reached all of the populations. Thus,
+all populations must be connected at least through one path, and at most one
+population can have no immigrants.   If your data do contain several
+completely isolated populations, you will estimate small, but non-zero,
+migration rates into them. This is a result of assuming a common ancestor
+for all populations.</P>
+
+</UL>
+
+<P> We do not assume that migration is symmetrical, though you can impose
+this assumption if you wish using <A
+HREF="menu.html#constraints">constraints</a>.</P>
+
+<H3><A NAME="recombination">Recombination</A></H3>
+
+<P> We estimate the recombination rate <i>r</i> = <i>C</i>/mu, 
+where <i>C</i> is
+the recombination rate per inter-site link per generation and
+mu is the neutral mutation rate per site per generation.  As
+usual, if multiple rate categories are in effect, mu is the
+weighted average of the categories.</P>
+
+<P> In the current version we cannot meaningfully combine regions or
+populations with different recombination rates.  We hope to provide this 
+capability later.</P>
+
+<P> We do not assume that all recombinations are visible.  An advantage
+of LAMARC and its predecessor RECOMBINE over pairwise methods
+is that their recombination estimates are not dragged downwards
+if the data are nearly invariant (though the error bars, of course,
+increase greatly).</P>
+
+<H4> Assumptions:</H4>
+
+<UL>
+
+<LI><P>The recombination rate is constant across the region and has not
+changed for a long time.</P>
+
+<LI><P>Recombination frequency is not affected by sequence divergence;
+highly similar and highly dissimilar sequences are equally likely to
+recombine.</P>
+
+<LI><P>All recombination is homologous.</P>
+
+<LI><P>There is no gene conversion.  While double recombinants can produce
+events that resemble conversions, they have the dynamics of two independent
+events, not a single event like a true conversion.</P>
+
+<LI><P>There is no interference among adjacent recombinations (this follows
+from the coalescent assumption that no two events happen at the same
+instant).</P>
+
+<LI><P>Recombination events are selectively neutral.</P>
+
+</UL>
+
+<H3><A NAME="growth">Growth</A></H3>
+
+<P> We estimate the exponential growth rate <i>g</i> as defined in the
+following equation, where <i>t</i> is a time before the present:</P>
+
+<P><center>Theta<sub><i>t</i></sub> = Theta<sub>present time</sub> exp(-<i>gt</i>)</center></P>
+
+<P>This means that positive values of <i>g</i> indicate a population which is
+growing, and negative values a population which is shrinking.  They
+are not symmetrical in magnitude, however; <i>g</i> = 10 indicates rather slow
+growth, but <i>g</i> = -10 indicates significant shrinkage for most values of
+Theta.</P>
+
+<P> When migration is also in effect, growth rates are computed
+for each population independently.</P>
+
+<P> In the presence of growth, the reported value of Theta is the
+present-day Theta (at the time when the data were sampled). The equation
+above can be used to determine values of Theta for past times.  Just
+remember that the time parameter <i>t</i> is measured in units of mutations 
+(i.e. 1 <i>t</i> is the average number of generations it takes one site to 
+accumulate one mutation),
+and <i>g</i> is measured in the inverse units of <i>t</i>.
+If mu is known, divide generations by mu to
+get units of <i>t</i> (conversely, <i>t</i>*mu is a number of generations).
+
+<H4>Assumptions:</H4>
+
+<UL>
+
+<LI><P> The population has been growing or shrinking at the same
+exponential rate for a long time.  (This is particularly questionable when
+the population is shrinking; except for microorganisms, most biological
+populations do not survive long periods of exponential shrinkage.)</P>
+
+<LI><P> If migration is in effect, immigration rate does not depend on
+population size.</P>
+
+<LI><P> The growth force cannot be used in combination with the gamma "force."
+Please see the comment <A HREF="forces.html#gamma">there</A> for an explanation.
+
+</UL>
+
+<P> Simulation studies show that estimation of growth tends to be biased
+upwards; this bias is reduced by having multiple unlinked regions and/or, in
+cases with recombination, long sequences.  Be cautious in interpreting
+results from one or a few regions.  The profile likelihoods are more reliable
+than the maxima but can also show the bias.</P>
+
+
+<H3><A NAME="gamma">Gamma-distributed relative mutation rate over regions</A></H3>
+
+<P> This isn't exactly a "force," but because LAMARC optionally allows you to estimate
+the parameter (α) of the gamma distribution which best fits data sets composed of
+multiple unlinked genomic regions, it's in the menu for evolutionary forces. More
+information about this is available <A HREF="gamma.html">here</A>.</P>
+
+<P> Because there need to be multiple genomic regions in order for there to
+be relative mutation rates to distribute among them, this "force" will not
+appear in the evolutionary forces menu if the data is annotated as coming from a single
+genomic region.</P>
+
+<P> Due to a mathematical detail of how this feature is implemented in LAMARC,
+it can only be applied to maximum-likelihood analyses.  If you wish to perform
+a Bayesian analysis, your best bet is to estimate the relative single-region
+mutation rates by some other means, then supply these to LAMARC as
+constants.</P>
+
+<H4>Assumptions:</H4>
+
+<UL>
+
+<LI><P>Each population being analyzed is constant in size. This is because
+a certain integrand containing the gamma distribution arises when the populations
+are assumed to be growing or shrinking exponentially, and this integrand
+cannot be integrated analytically.  Hence LAMARC does not allow the gamma
+"force" to be used in combination with growth.</P>
+
+</UL>
+
+<H3><A NAME="divergence">Divergence</A></H3>
+
+<p>
+We estimate the time at which pairs of populations split from their
+common ancestor.  Thus, a case with three populations will estimate two
+divergence times.  For example, a human/chimp/gorilla case might
+estimate the human versus chimp divergence time and the human+chimp versus
+gorilla divergence time.
+</p>
+
+<p>
+LAMARC does not infer the population tree.  The information that we should
+be computing human+chimp and not chimp+gorilla as the first split must be
+provided by the user.  [For a method that attempts to infer population trees,
+see the "*BEAST" program from Drummond's group.] Divergence
+requires not only the topology of the population tree, but its order of
+events.  If we have a tree with an A+B split and a C+D split we must
+specify which split happened most recently.
+</p>
+
+<p>
+Divergence times are scaled by the mutation rate.  A time of 1.0 for the
+divergence of two populations means that it occured long enough ago that
+each character has an expectation of 1 mutation since then.
+</p>
+
+<p>
+If migration is additionally turned on, migration is allowed between populations that exist at the same time.  This leads to inference of more migration rates
+than a non-divergence case.  For example, in the human/chimp/gorilla
+example the program could maximally infer bidirectional migration rates
+between human and chimp, human and gorilla, chimp and gorilla, and
+between gorilla and the human/chimp ancestor.  Users should beware of
+inferring too many migration rates for the available amount of data.
+Unwanted rates can be fixed at zero.  For example, in this case fixing
+the rates between human and gorilla to zero would seem reasonable.
+</p>
+
+<p>
+An important caveat about models of divergence is that in some cases the
+data will say nothing about one or more parameters (the jargon term is
+"not identifiable").  For example, if a population split very recently
+there will be no power to infer the sizes of the two descendants or the
+migration rate between them.  If
+they split very long ago there will be no power to infer the
+size of the ancestor or its migration rate.  And if the migration rate between two populations is very high there will
+be no power to infer their divergence time.  It is important to look at
+confidence intervals and, in Bayesian runs, the shape of the posterior
+distribution to detect cases in which some parameters are not identifiable. 
+</p>
+
+<p> Quick calculators for starting values (FST, Watterson) are not available in
+cases with divergence.  We don't know how to use them for unsampled ancestral
+populations, so cravenly don't use them at all.</p>
+
+<H4>Assumptions:</H4>
+
+<UL>
+
+<LI>
+<P>The population tree given by the user is correct.  Grossly distorted
+estimates should be expected if the population tree is wrong, and there
+is no way for the program to detect this problem.
+</P>
+</LI>
+
+<LI>
+<P>The populations have been the same size (unless growth rates are being
+estimated) throughout their existance.  No relationship is assumed between
+pre-split and post-split sizes (the size of the human/chimp ancestor has
+no fixed relationship with the sizes of the human and chimp populations).
+</P>
+</LI>
+
+<LI>
+<P>The migration rates are stable throughout the lifespan of a population.
+</P>
+</LI>
+
+</UL>
+
+<P>(<A HREF="gamma.html">Previous</A> | <A HREF="index.html">Contents</A> |
+<A HREF="parameters.html">Next</A>)</P>
+
+<!--
+//$Id: forces.html,v 1.26 2013/11/07 22:46:06 mkkuhner Exp $
+-->
+</BODY>
+</HTML>
+
diff --git a/doc/html/gamma.html b/doc/html/gamma.html
new file mode 100644
index 0000000..beb88f9
--- /dev/null
+++ b/doc/html/gamma.html
@@ -0,0 +1,169 @@
+<!-- header fragment for html documentation -->
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
+<HTML>
+<HEAD>
+
+<META NAME="description" CONTENT="Estimation of population parameters using genetic data usi
+ng a maximum likelihood approach with Metropolis-Hastings Monte Carlo Markov chain importanc
+e sampling">
+<META NAME="keywords" CONTENT="MCMC, Markov chain, Monte Carlo, Metropolis-Hastings, populat
+ion, parameters, migration rate, population size, recombination rate, maximum likelihood">
+
+<TITLE>LAMARC Documentation: Mutation Rate Variation</title>
+</HEAD>
+
+
+<BODY BGCOLOR="#FFFFFF">
+<!-- coalescent, coalescence, Markov chain Monte Carlo simulation, migration rate, effective
+ population size, recombination rate, maximum likelihood -->
+
+<P>(<A HREF="data_models.html">Previous</A> | <A
+HREF="index.html">Contents</A> | <A HREF="forces.html">Next</A>)</P>
+
+<H2>Combining data with different mutation rates</H2>
+
+<P> LAMARC offers three ways to handle variation in mutation rate
+among markers.  
+<UL>
+<LI><A HREF="gamma.html#contiguous">Variation within a contiguous segment</A></LI>
+<LI> <A HREF="gamma.html#known">Known variation among segments, regions, and/or data types.</a></LI>
+<LI> <A HREF="gamma.html#unknown">Unknown variation among regions.</a></LI>
+<LI> <A HREF="gamma.html#bottom">Bottom line.</a></LI>
+</UL>
+
+
+This file describes all three and gives some guidelines
+on their correct use.  Note that we will use the term 'segment' for a
+contiguous stretch of sites with markers of the same data type, and 'region'
+to indicate one or more linked segments of the same or different data types.
+</P>
+
+<P><A NAME="contiguous"><B> Variation within a contiguous segment.</B>  If the mutation
+rate (or fixation rate) may vary from site to site within a single
+contiguous genetic segment, such as a DNA sequence or group of
+linked microsatellites, the best approach is to use the "Multiple
+rate categories" option of the appropriate data model.  This
+option is described in the <A HREF="data_models.html">data models</a>
+section of the documentation.</P>
+
+<P><A NAME="known"><B> Known variation among segments, regions, and/or data types.</B>
+If you know in advance
+that one region has, say, a tenfold higher mutation rate than
+another, the best approach is to set the "Relative mutation
+rate" option of the appropriate data model.  Choose one region
+as the standard, and set its relative mutation rate to 1.0;
+set the others proportionally.  This is a good approach if you
+have, for example, a DNA sequence and some microsatellites, and are
+fairly sure that microsatellite shift mutations are about 1000
+times more common than single base pair substitutions.  The
+unit of comparison is the single marker:  one microsatellite,
+one base pair of DNA, one SNP.</P>
+
+<P>This approach can also be used for areas where the mutation rate
+variation is known for a contiguous stretch of markers, for example a DNA
+sequence containing both introns and exons.  If each intron and exon is
+assigned to a unique segment, the known relative mutation rates can be set
+explicitly.</P>
+
+<P> Even if you are not perfectly sure of the ratio between
+your data, using a reasonable guess will still be better than
+allowing the default of identical rates everywhere.  If you
+are not sure whether microsatellites mutate 1000x or only
+100x faster than DNA, pick an intermediate value.  Assuming
+that they mutate at the same rate will definitely give bad
+results.</P>
+
+<P><A NAME="unknown"><B> Unknown variation among regions.</B> If you suspect that
+your regions vary in mutation rate, but you don't have any
+information on their specific rates, you can assume that 
+these rates are drawn from a gamma distribution.  The
+gamma distribution is a somewhat arbitrarily-chosen, flexible statistical
+distribution which varies from looking exponential when its
+scaled shape parameter α is low, to looking like an increasingly narrow
+bell curve as α increases.  Low values of α correspond
+to cases in which most regions are nearly invariant, and a few
+evolve rapidly.  High values of α correspond
+to cases in which the single-region mutation rates are approximately
+normally distributed about the mean single-region mutation rate.
+(The gamma distribution actually has two parameters, a "shape
+parameter" α and a "scale parameter" β, but LAMARC
+sets β = 1/α to avoid overparameterization, and to
+allow it to work with a distribution whose mean, the product αβ,
+is 1.)</P>
+
+<P> LAMARC can estimate α if you have no prior conception of what a
+good value here would be (though a reasonable starting guess will speed up
+maximization).  In practice, it needs more than two or three regions to make
+a reasonable estimate of α.  If you have only 2-3 regions, it is best
+to guess at their ratio, or fix α to a value you find reasonable;
+estimation of α is likely to fail because not enough information is
+available.  (With only one region, α cannot be estimated and will not
+be used.) Information on setting this option is available in the
+<a href="menu.html#gamma">gamma parameter</a> section of the
+<a href="menu.html">LAMARC menu documentation</a>.
+</P>
+
+<P> If your data consist of several microsatellites and
+several DNA or SNP regions, the real distribution of mutation
+rates probably resembles a two-humped camel and not a
+gamma distribution at all.  You can try fitting a gamma
+anyway, but be aware that you are fitting an inappropriate
+model.  A better alternative is to guess the relative
+mutation rates:  the large difference between microsatellites
+and DNA data probably trumps any differences among each
+group.  You can even do both, giving a mutation rate constant
+for each region and then adding a gamma on top.  We believe
+that this has the effect of drawing the different regions from
+versions of the same gamma but with its mean shifted by
+the given mutation rate ratio.  However, this combination
+has not been extensively tested:  use it at your own risk. 
+It assumes that α is the same for DNA and microsatellites,
+which is probably not the case, but sometimes a shaky
+assumption is better than nothing.</P>
+
+<P>Please note that Lamarc can only apply a gamma distribution
+to single-region relative mutation rates if all populations
+are assumed to remain constant in their respective sizes.
+This is due to a mathematical complication in the way Lamarc
+implements the gamma distribution (in this case, Lamarc does
+<i>not</i> approximate the gamma distribution by a histogram of
+relative rates).  This means that Lamarc cannot simultaneously
+model the gamma "force" and the force of exponential population
+growth, even for fixed values of α or <i>g</i>.  If you
+believe one or more of your populations is rapidly growing or
+shrinking, and you think the single-region relative mutation
+rates are approximately gamma-distributed for your data,
+then your best bet is to estimate the relative rates by some
+other method and supply these to Lamarc as constants, and then
+to proceed to estimate growth rates. </P>
+
+<P>Also, because of the way Lamarc implements this feature,
+it can only be used for maximum-likelihood analyses.
+If you want to perform a Bayesian analysis, and you think
+the single-region relative mutation rates are approximately
+gamma-distributed for your data, then your best bet is to
+estimate the relative rates by some other method and supply
+these to Lamarc as constants, and then
+proceed with your Bayesian analysis.</P>
+
+<P><A NAME="bottom"><B>Bottom line.</B>  If your data has mutation rate
+variation within a segment, use the "Multiple rate categories"
+option of the mutation model.  If it has variation between
+segments and you know the relative rate of each, use the
+"Variable mutation rate" option of the mutation model.
+If it has variation among regions and you don't know the rates
+of individual regions, you can assume that they are drawn
+from a gamma, but this is likely to work well only if you
+have more than 3 regions, and is not ideal if the regions fall
+into large classes with distinctly different rates.  It
+is best suited for large collections of one data type,
+such as multiple regions with DNA.<P>
+
+<P>(<A HREF="data_models.html">Previous</A> | <A HREF="index.html">Contents</A> |
+<A HREF="forces.html">Next</A>)</P>
+<!--
+//$Id: gamma.html,v 1.12 2011/06/23 21:00:36 jmcgill Exp $
+-->
+</BODY>
+</HTML>
+
diff --git a/doc/html/genetic_map.html b/doc/html/genetic_map.html
new file mode 100644
index 0000000..effb8bd
--- /dev/null
+++ b/doc/html/genetic_map.html
@@ -0,0 +1,385 @@
+<!-- header fragment for html documentation -->
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
+<HTML>
+<HEAD>
+
+<META NAME="description" CONTENT="Estimation of population parameters using genetic data usi
+ng a maximum likelihood approach with Metropolis-Hastings Monte Carlo Markov chain importanc
+e sampling">
+<META NAME="keywords" CONTENT="MCMC, Markov chain, Monte Carlo, Metropolis-Hastings, populat
+ion, parameters, migration rate, population size, recombination rate, maximum likelihood">
+
+<TITLE>LAMARC Documentation:
+Modeling Linkage Properties and Relative Mutation Rates of Your Data</title>
+</HEAD>
+
+
+<BODY BGCOLOR="#FFFFFF">
+<!-- coalescent, coalescence, Markov chain Monte Carlo simulation, migration rate, effective
+ population size, recombination rate, maximum likelihood -->
+
+<P>(<A HREF="converter.html">Back</A> | <A HREF="index.html">Contents</A>
+| <A HREF="migration_matrix.html">Next</A>)</P>
+<h2>Modeling Linkage Properties and Relative Mutation Rates of Your Data</h2>
+
+<p>
+<em>(Note: it is recommended that you be familiar with the material
+in the <a href="converter.html">data file conversion</a>
+section before reading this section.)</em>
+</p>
+
+
+<p>
+LAMARC is a coalescent analysis program.
+It constructs and analyzes many different possible genealogies
+(trees) representing the common ancestor relationships among the
+data sequences sampled. You will need to guide LAMARC's search by
+specifying how samples from different portions of your organism's
+genome are related.
+For example,
+<ul>
+<li>
+data from different chromosomes (or from sufficiently
+distant portions of the same chromosome)
+should be modeled with independent genealogies, and
+</li>
+<li>
+samples with known differences in relative mutation rate (such as
+introns and exons within a single sequence)
+should be subdivided into portions corresponding to those
+different mutation rates.
+</li>
+</ul>
+</p>
+
+<p>
+In order to guide LAMARC's search through appropriate
+genealogies, you will need to be able to partition your data samples into
+<ul>
+<li><a href="#coherent-segment">coherent segments</a> (closely linked data )
+<li><a href="#region-coord">linked segment regions</a> (loosely linked 
+data), and
+<li>populations (groups of inter-breeding individuals).
+</ul>
+</p>
+
+<P> Modeling these different properties
+will allow you to do an analysis with multiple chromosomal regions and/or
+multiple populations. You can combine several files together into a
+single LAMARC input file.  These input files do not need to be in the same
+file format, as long as they are all files the converter can read (i.e. you
+can stick together two PHYLIP files and a MIGRATE file into a single LAMARC
+dataset).</P>
+
+<P>Older versions of LAMARC only allowed one to mix different data types
+only when they were unlinked from each other (i.e. in different linked
+segment regions).
+As of LAMARC 2.1, we have relaxed even this restriction, allowing you to
+mix and match different data types even when they are linked.  So, for
+example, the increasingly-popular data type of 
+<a href="#microsat-snp">microsatellite next to a SNP</a>
+may now be modeled in LAMARC and will be analyzed appropriately.</P>
+
+<h3><a name="coherent-segment">Coherent Segments</a></h3>
+
+<p>
+A coherent segment (or 'segment' for short) is:
+<ul>
+<li>one or more genetic markers, </li>
+<li>all of the same data type, </li>
+<li>arranged in sequence order as on the genome, </li>
+<li>with (almost) no missing or omitted data, and </li>
+<li>(in most cases) having similar mutation rates. </li>
+</ul>
+</p>
+
+<h4>Missing Data</h4>
+<p>
+Note that "missing data" is defined differently for different
+data types. For example, SNPs
+come as markers and a series
+of spacings.
+However, a set of SNPs count as a 'coherent segment' --
+while we don't know what the sequence is between the
+SNPs, we do know that it doesn't vary.
+Linked microsattelite data is similar -- the number of microsat
+repeats and their relative locations are relevant, not the sequences
+between them.
+</p>
+<p>
+Occasional missing or corrupted data is represented in the input
+files with a "<tt>?</tt>" character.
+</p>
+
+<h4>Similar Mutation Rates</h4>
+<P>
+LAMARC's default assumption is that sampled locations within a single
+segment have the same mutation rate. This assumption can be changed
+with the <a href="data_models.html#rateCategories">rate categories</a>
+feature in the lamarc menu. However, that feature has LAMARC identify
+different mutation rates for each site.</p>
+<p>
+When you have sections within a stretch of sampled data for which you know
+that the mutation rates differ, it may be more appropriate to model them as
+different coherent segments within the same linked segment region.
+For example, if you have sequenced a gene with multiple
+introns and exons, you can include each intron and exon as a coherent
+segment, each with an appropriate relative mutation rate, and then combine
+them all into a single linked segement region.</P> 
+
+<!---LS NOTE:  if we test and provide a
+way to allow 'wobble base' overlapped segments, we can talk about this here.
+It would probably work with an unmodified 2.1, but we should test it
+first.-->
+
+<p>
+<em>
+Note:
+As of LAMARC 2.1, 
+<a href="data_models.html">relative mutation rates</a>
+can only be set from the
+<a href="menu.html#data">data model menu</a>
+of the lamarc program.
+If you wish to model introns and exons with separate relative
+mutation rates, place them in separate coherent segments
+during the conversion process, and set the relative mutation
+rates from the lamarc program itself.
+A more complete discussion of the many ways you can accomodate
+data with different mutation rates is found in section
+<a href="gamma.html">Combining Data with Different Mutation Rates</a>.
+</em>
+</p>
+
+
+
+<h4><a name="segment-coord">Length and Spacing Information with
+Segment Coordinates</a></h4>
+
+<p>
+Below is a screen shot from the lamarc converter showing the
+detail panel for coherent segment <tt>chrom2-segment1</tt> that
+results when <a href="converter_cmd.html">command file</a>
+<a href="batch_converter/sample-conv-cmd.html">sample-conv-cmd.xml</a> 
+(actual xml is <a href="batch_converter/sample-conv-cmd.xml">here</a>)
+is read into the lamarc converter.
+</p>
+
+<img src="batch_converter/images/lam_conv_chrom2_segment1.png" alt="xxx"/>
+
+<p>
+The second column from the left displays the following quantities,
+which together specify the relative spacing of data within a segment.
+<ul>
+<li><em>Number of Markers</em> -- the number of sites with data. For
+    DNA it is the number of sites sequences; for SNPs it is the number
+    of SNPs; for Kallele and Microsattelite data it is the number of 
+    distinct sites at which kallele/msat data was found</li>
+<li><em>Total Length</em> -- total number of bases searched for data</li>
+<li><em>First Position Scanned</em> --
+    the location of the first sampled location in your data
+    </li>
+<li><em>Locations of sampled markers</em> --
+    the location of each particular marker of your data, in the same
+    coordinates as the first position scanned.
+    </li>
+</ul>
+</p>
+
+<p>
+The last two of these quantities are measured in 
+"segment coordinates": they are local to the appropriate
+segment.  Thus, if your first position scanned is -5 and your first 
+location is 2, your first SNP is the 7th position scanned.
+<em>
+(If you're wondering why it isn't the 8th position, see question
+<A HREF="troubleshooting.html#Q14">Does LAMARC use 'site 0'? Do I?</a>.)
+</em>
+</p>
+<p>
+See also <a href="#region-coord">Region Coordinates.</a>
+</p>
+
+
+<h3><a name="region-coord">Linked Segment Regions and Region Coordinates</a></h3>
+
+<P>Once your data is divided up into coherent segments, if any of these are
+genetically linked to one another, you can combine them into a linked
+segment region (or 'region' for short).  Unrelated coherent segments
+each belong in their own region.
+</p>
+
+<p>
+The Segment Panel pictured above for <tt>chrom2-segment1</tt>
+displays a <tt>Map Position</tt> of <tt>1000</tt>.
+Whenever you have more than one coherent segment in a region, you
+will need to know their relative spacing. This is entered as the
+"map position" and is required to:
+<ul>
+<li>verify that the segments are non-overlapping, and</li>
+<li>model intervening recombinations correctly.</li>
+</ul>
+</p>
+
+<p>
+All Map Positions are given using a single coordinate
+system for the region, the "regional coordinates"
+system.
+If you wish to use region-wide coordinates within segments as
+well, you may. Your <tt>first position scanned</tt> would be
+identical to your <tt>map position</tt> and all <tt>location</tt>
+values should have values between your <tt>map position</tt>
+value and the <tt>map position</tt> plus the <tt>length</tt>.
+</p>
+
+<P> Do not put samples together in a region if you have reason to
+believe they are actually unlinked.  
+This will result in wrong answers.
+If you put unlinked markers together in a region and also estimate
+recombination, you will get wrong answers and the program will bog down
+horribly as it tries to estimate an infinite recombination rate. 
+</P>
+
+<p>
+The total length of genome that can
+be included in a single region when estimating recombination has an upper
+limit of about a centimorgan, though an extensive study with many samples of
+that length might bog down LAMARC considerably and run very slowly.  A more
+reasonable length is probably half of that.  As a quick rule of thumb, we've
+found that LAMARC runs smoothly for runs where Theta times the recombination
+rate times the total length of a region is 10 or less.</P>
+
+
+<h3><a name="microsat-snp">A Multi-Segment Example: Microsatellite next to SNP</a></h3>
+
+<P>If you have data with a microsatellite next to a SNP, you
+want the microsatellite as one coherent segment, the SNP as another coherent
+segment, and both segments as part of the same linked segment region. 
+This section of the documentation walks you through creating such a
+file with the converter in GUI mode.
+</P>
+
+<p>
+Below is a picture of the converter after you have read in the files
+<a href="batch_converter/chrom3microsat.mig">chrom3microsat.mig</a> and
+<a href="batch_converter/chrom3snp.mig">chrom3snp.mig</a>, and have
+set the data types appropriately.
+</p>
+
+
+<img src="batch_converter/images/lam_conv_chrom3_input.png" alt="xxx"/>
+
+<p>
+Since we are modeling an adjacent microsat and SNP, we need to place
+them in the same region. Otherwise, LAMARC will analyze them separatedly.
+To do this, double click the text inside either of the boxes
+labeled <tt>region</tt> within the <tt>Data Partitions</tt>.
+</p>
+<p>
+You'll see a panel similar to this:
+</p>
+
+<img src="batch_converter/images/lam_conv_chrom3_region_panel.png" alt="xxx"/>
+
+<p>
+Select the single box within the <tt>Merge with selected Region</tt>
+area and click <tt>Apply</tt>. When you return to the main GUI window,
+you will notice that the two coherent segments are now included in
+one region like this:
+</p>
+
+<img src="batch_converter/images/lam_conv_chrom3_region_table.png" alt="xxx"/>
+
+<p>
+Unfortunately the two data files name their samples in different ways:
+the SNP file names each haploid sample while the Microsattelite data
+names each individual. If you try to write a lamarc file at this point,
+you will get this error:
+</p>
+
+<img src="batch_converter/images/lam_conv_chrom3_error_phase_file_needed.png" alt="xxx"/>
+
+<p>
+The solution is to write a 
+<a href="converter_cmd.html#phase">phase information file</a>
+like this:
+</p>
+
+
+<pre>
+    <lamarc-converter-cmd>
+        <individuals>
+            <individual>
+                <name>n_ind0</name>
+                <sample><name>n_ind0_a</name></sample>
+                <sample><name>n_ind0_b</name></sample>
+            </individual>
+            <individual>
+                <name>n_ind1</name>
+                <sample><name>n_ind1_a</name></sample>
+                <sample><name>n_ind1_b</name></sample>
+            </individual>
+            <individual>
+                <name>n_ind2</name>
+                <sample><name>n_ind2_a</name></sample>
+                <sample><name>n_ind2_b</name></sample>
+            </individual>
+            <individual>
+                <name>s_ind0</name>
+                <sample><name>s_ind0_a</name></sample>
+                <sample><name>s_ind0_b</name></sample>
+            </individual>
+            <individual>
+                <name>s_ind1</name>
+                <sample><name>s_ind1_a</name></sample>
+                <sample><name>s_ind1_b</name></sample>
+            </individual>
+        </individuals>
+    </lamarc-converter-cmd>
+</pre>
+
+<p>
+You can also find the actual xml  
+<a href="batch_converter/chrom3_phase_cmd.xml">chrom3_phase_cmd.xml</a> here
+<br>Read it in using the menu commands <tt>File > Read Command File</tt>.
+</p>
+
+<p>
+Alas, we are still not ready to write a Lamarc file. Attempting to do so results
+in this error:
+</p>
+
+<img src="batch_converter/images/lam_conv_chrom3_error_map_position.png" alt="xxx"/>
+
+<p>
+The problem is that we don't know how close together the microsattelite and
+the SNP are. To solve this problem you'll need to edit fields on the 
+segment panels for each coherent segment.
+</p>
+
+<p>
+Begin by setting the map position of the microsatelite segment to 500.
+Then edit the snp segment as shown here.
+</p>
+
+<img src="batch_converter/images/lam_conv_chrom3_segment_snp.png" alt="xxx"/>
+
+<p>
+<ul>
+<li> map position -- 501 -- in global co-ordinates, the location of the
+    start of the area scanned for SNPs,
+<li> total length -- 100 -- the total length scanned for SNPS
+<li> first postion scanned -- 1 -- establishes a 
+<li> locations of sampled markers -- 23 -- in the segment coordinates, where the
+        SNP was found
+</ul>
+You should now be able to write the Lamarc file.
+</p>
+
+<P>(<A HREF="converter.html">Back</A> | <A HREF="index.html">Contents</A>
+| <A HREF="migration_matrix.html">Next</A>)</P>
+
+<!--
+//$Id: genetic_map.html,v 1.12 2011/12/09 17:14:03 jmcgill Exp $
+-->
+</BODY>
+</HTML>
diff --git a/doc/html/genotype.html b/doc/html/genotype.html
new file mode 100644
index 0000000..a8426d8
--- /dev/null
+++ b/doc/html/genotype.html
@@ -0,0 +1,165 @@
+<!-- header fragment for html documentation -->
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
+<HTML>
+<HEAD>
+
+<META NAME="description" CONTENT="Estimation of population parameters using genetic data usi
+ng a maximum likelihood approach with Metropolis-Hastings Monte Carlo Markov chain importanc
+e sampling">
+<META NAME="keywords" CONTENT="MCMC, Markov chain, Monte Carlo, Metropolis-Hastings, populat
+ion, parameters, migration rate, population size, recombination rate, maximum likelihood">
+
+<TITLE>LAMARC Documentation: Haplotypes/Diplotypes</title>
+</HEAD>
+
+
+<BODY BGCOLOR="#FFFFFF">
+<!-- coalescent, coalescence, Markov chain Monte Carlo simulation, migration rate, effective
+ population size, recombination rate, maximum likelihood -->
+
+<P>(<A HREF="tracer.html">Previous</A> | <A HREF="index.html">Contents</A> |
+<A HREF="viral_data.html">Next</A>)</P>
+<H2>Using genotypic data</H2>
+
+<P>The LAMARC program is not a tool to infer haplotypes in the sense
+of telling you what the best haplotype resolutions would be.  However,
+it can make estimates using genotypic data with unknown phase,
+by including in its search space many different resolutions of the
+haplotypes.</P>
+
+<P>This process is not slow in itself, but it makes the search space
+enormously larger, so you will need to run many more steps.  You will 
+also almost surely need to use heating; otherwise the search tends to 
+get "stuck" as soon as it finds a reasonably compatible tree and set 
+of haplotypes.</P>
+
+<P>If you can get haplotypes by family studies or experimental methods,
+they will produce more accurate results than LAMARC's process.
+However, if you can only get haplotypes by using some other form of
+haplotype-inference software (such as the EM algorithm), it may be
+better, if you have time, to use LAMARC's process instead (using the
+inferred haplotypes as a starting point will speed up the search).  No
+matter how good the haplotype-inference program, it will introduce
+some bias by using only the "best" haplotypes; if these haplotypes
+are not fully correct they will tend to indicate less recombination
+than has actually occured. </P>
+
+<H3>Input</H3>
+
+<P>The XML format for phase-unknown data includes an extra tag within
+each individual, <phase>.  This tag has an obligatory attribute
+of "type" which can be either "known" or "unknown."  In the "known"
+case, the phase tag encloses a list of all sites whose phase is
+known (and therefore need not be reconsidered during the run).  In
+the "unknown" case, the phase tag encloses a list of all sites
+whose phase is not known (and therefore must be reconsidered).  Use
+whichever is more convenient for your data.</P>
+
+<P>If all sites are known, the phase tag can be omitted, or an empty
+<phase type="unknown"> tag can be used.  If all sites are unknown,
+an empty <phase type="known"> tag can be used.</P>
+
+<P>Only one <phase> tag is allowed per individual.</P>
+
+<P>You do not need to exclude sites which are homozygous, as they will
+not be considered for phase inference anyway; but if any heterozygous
+sites are phase-known, it is much better to indicate them as such
+than to let them be unnecessarily reconsidered.</P>
+
+<P>To indicate that a haplotype-reconsideration arrangement strategy 
+should be used, add a <haplotyping> tag to the <strategy> section, 
+and indicate for each strategy what proportion of the time it should 
+be used.  For example, here is a valid strategy block that will spend 20%
+of its effort on haplotype reconsideration and 80% on tree
+reconsideration:</P>
+
+<PRE>
+ <strategy>
+    <resimulating> 0.8 </resimulating>
+    <haplotyping> 0.2 </haplotyping>
+ </strategy>
+</PRE>
+
+<P>We have used 20% haplotyping with fair success, but you may want
+to experiment.  Note that the resimulating strategy is required;
+100% effort on haplotyping is not a valid option.</P>
+
+<H3>Menu</H3>
+
+<P>To turn haplotype reconsideration on and off, use the Search Strategies
+menu, Rearrangement submenu.  If haplotyping is off, selecting
+its entry will turn it on, and you will be prompted for the 
+frequency of haplotype reconsideration desired (for example, 0.2 
+for 20% effort into reconsideration).  If haplotype reconsideration
+is on, selecting its entry will turn it off.</P>
+
+<P>Note that these options will have no effect unless some
+phase-unknown sites are present in the data set.  Haplotype reconsideration
+is automatically disabled for any region which lacks phase-unknown
+sites.</P>
+
+<H3>Search strategy </H3>
+
+<P>Every step spent reconsidering the haplotypes is time not spent
+reconsidering the tree.  So if you put 20% effort into haplotypes
+you will need at least 20% more steps to get the same tree coverage.
+It will probably be safer to double the number of steps so as to
+allow for the increased search space.</P>
+
+<P>Haplotype steps are quicker than tree-resimulation steps, so you
+can afford to be fairly generous with the number of haplotype
+steps.  However, we don't recommend putting more than 50% effort
+into haplotype reconsideration, because this may end up optimizing 
+the haplotypes of a very suboptimal tree.</P>
+
+<P>Heating is almost essential for genotypic data unless
+very few sites are phase-unknown.  Signs that the haplotype
+sampler is "stuck" include estimates that don't move far from 
+their starting value, or estimates that are nearly constant within 
+one run but vary wildly between runs.
+Stuck haplotyping runs normally overestimate Theta because they
+are using sub-optimal haplotypes.  Unfortunately the error bars in
+such cases may exclude the truth.</P>
+
+<P>If you have access to a haplotype-inference program, using its
+inferred haplotypes as a starting point may produce better estimates
+more quickly.  We prefer this to simply accepting the externally
+inferred haplotypes as correct.  While they may be nearly 
+correct, they will err in a specific direction (corresponding to
+a downward bias in Theta) because they are "too perfect" compared
+to the truth.</P>
+
+<H3> Evaluating the results </H3>
+
+<P>When haplotype reconsideration is in effect, some extra information
+will be printed in the runtime report and output report to let you
+assess whether it is working well.  (The "runtime report" is the set of 
+messages that Lamarc displays while it is running.  If you select
+"verbose" output, these messages will be appended to your output file.)
+Acceptance rates are given for
+each type of "arranger" (rearrangement strategy) separately, so you might
+see:</P>
+
+<P><PRE>
+Tree-Arranger accepted            149/1591 proposals
+Haplotype-Arranger accepted       277/409 proposals
+</PRE></P>
+
+<P> This means that the resimulating arranger is not accepting very
+many proposed trees, whereas the haplotype-reconsidering arranger is
+accepting over half.  If one of the arrangers is accepting few or no
+trees, your search is probably not working even if the overall acceptance
+rate is satisfactory; at the very least, you will need to run the 
+program for a long time to get adequate searching.</P>
+
+<P> If multiple temperatures are in use, the reported acceptance
+rates are for the coldest temperature only.</P>
+
+<P>(<A HREF="tracer.html">Previous</A> | <A HREF="index.html">Contents</A> |
+<A HREF="viral_data.html">Next</A>)</P>
+<!--
+//$Id: genotype.html,v 1.17 2007/04/27 19:43:17 mkkuhner Exp $
+-->
+</BODY>
+</HTML>
+
diff --git a/doc/html/glossary.html b/doc/html/glossary.html
new file mode 100644
index 0000000..f854d76
--- /dev/null
+++ b/doc/html/glossary.html
@@ -0,0 +1,430 @@
+<!-- header fragment for html documentation -->
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
+<HTML>
+<HEAD>
+
+<META NAME="description" CONTENT="Estimation of population parameters using genetic data usi
+ng a maximum likelihood approach with Metropolis-Hastings Monte Carlo Markov chain importanc
+e sampling">
+<META NAME="keywords" CONTENT="MCMC, Markov chain, Monte Carlo, Metropolis-Hastings, populat
+ion, parameters, migration rate, population size, recombination rate, maximum likelihood">
+
+<TITLE>LAMARC Documentation: Frequently asked Questions and Answers</title>
+</HEAD>
+
+
+<BODY BGCULOR="#FFFFFF" TEXT="#000000">
+<!-- coalescent, coalescence, Markov chain Monte Carlo simulation, migration rate, effective
+ population size, recombination rate, maximum likelihood -->
+
+
+<P>(<A HREF="limitations.html">Previous</A> | <A
+HREF="index.html">Contents</A> | <A HREF="data_required.html">Next</A>)</P>
+
+<H2>Glossary</H2>
+
+<P> This article gives informal definitions of terms used in the
+LAMARC documentation.  For more precise definitions, consult our published
+papers.</P>
+
+<UL>
+
+<A NAME="bayesian"></a><LI><B>Bayesian analysis.</B> In the LAMARC context, an
+analysis which places <A HREF="#prior">priors</a> on the population parameters and then samples
+both possible trees and possible parameters. It reports the relative
+probabilities of the range of parameter values which it visited.  The
+alternative is a <A HREF="#likelihood">Likelihood analysis</a>.</LI> <BR>
+
+<A NAME="brownian"></a><LI><B>Brownian-motion model.</B> A mathematical approximation of the
+<A HREF="#stepwise">stepwise model</a> of microsatellite mutation.  It will run much faster, but
+may be inaccurate if your data are nearly invariant. </LI> <BR>
+
+<LI><B>Burn-in.</B> Discarding some steps of the sampler before
+beginning to record any.  This is done because early steps may
+be highly atypical.  For example, if the sampler starts with a
+deeply unreasonable tree, the first few trees it produces will
+also be unreasonable and probably should be discarded. </LI>
+<BR>
+
+<LI><B>Chain.</B> LAMARC grinds for a while
+producing trees, and then makes an estimate based on
+those trees.  It may repeat this cycle many times, especially in
+a likelihood run.  Each such cycle is a "chain."  "Initial" 
+chains are used to get reasonable starting values of the
+parameters, and are generally shorter; "final" chains are
+used to obtain the actual parameter estimates, and are
+generally longer.  (A Bayesian run is often just one huge
+final chain.)</LI>
+<BR>
+
+<LI><B>Coalescence.</B> Kingman developed the idea of looking at
+a genealogy going backwards in time from modern individuals to
+their ancestors.  He named the resulting pattern the "n-coalescent"
+(later writers have dropped the "n-").  So a coalescent tree is
+the backwards pattern of relationships among modern sampled
+individuals, and a coalescence is a point at which two of those
+individuals have a common ancestor.  In LAMARC, we sometimes
+use "coalescence" to refer to the effect of Theta on the shape
+of the coalescent tree, as opposed to the effects of migration,
+recombination, growth, and so forth.  The proper name of this 
+evolutionary force is "genetic drift." </LI>
+<BR>
+
+<A NAME="curvefile"></a><LI><B>Curve file.</B> In a Bayesian run, a file containing data points
+from the <A HREF="#pdf">probability density function</a> that
+results from collecting the parameters which the chain has visited.  It can
+be read into a spreadsheet (Excel) or math/stat package (Mathematica) to
+produce a picture of the Bayesian <A
+HREF="#postprob">posterior probability curve</a>. </LI> <BR>
+
+<LI><B>Data likelihood.</B> The probability of your sequence data given a
+specific tree (genealogy).  This is also computed by  maximum-likelihood
+phylogeny algorithms such as <A
+HREF="http://evolution.genetics.washington.edu/phylip/progs.data.dna.html">PHYLIP's
+DNAML</a>. 
+Usually expressed as a log-likelihood because it's so small; the resulting
+numbers are negative, and are improved by moving closer to zero.</LI> <BR>
+
+<A NAME="data-uncertainty"></a><LI><B>Data uncertainty</B>
+<P>Lamarc runs on nucleotide data now accommodate modeling data uncertainty.
+The per-base error rate gives the rate at which each single instance of a
+nucleotide should be assumed to have been miss-called. A value of 0 indicates
+that all were sequenced correctly. A value of 0.001 indicates one in one
+thousand is incorrectly called.
+Note that this is different from using the IUPAC ambiguity codes as it privileges
+one (or more) data values over another. It may be used simultaneously
+with the IUPAC codes.
+As of December, 2009 the per base error rate must be set to a single
+value for each <a href="#segment">segment</a>.
+See the <A HREF="menu.html#data-uncertainty">data uncertainty</A> segment in the
+<A HREF="menu.html">Instructions for using the LAMARC menu</A> 
+documentation for more information.
+</P></LI><BR>
+
+<LI><B>Divergence.</B>  Splitting of
+an ancestral population into two descendant populations.  There may optionally
+be ongoing migration between the descendants after the split.
+</LI><BR>
+
+<A NAME="effpopsize"></a><LI><B>Effective population size.</B> A theoretical concept that converts a
+real population into an idealized Wright-Fisher population.  The effective
+population size is the size of a Wright-Fisher population that would have
+the same amount of genetic drift (same <A
+HREF="#theta">Theta</a>) as our real one.  Most real
+populations have an effective population size much smaller than their
+census size due to factors like non-breeding individuals, overlapping
+generations, and unequal reproductive success between genders. </LI> <BR>
+
+<LI><B>Epoch time. </B>  In Lamarc, an "epoch" is the period of time between one population
+splitting event and the next, in a model with divergence.  The parameter reported
+for Lamarc epoch times is the time of the population split in generations times
+the mutation rate per site in mutations per generation, counting backwards from
+the present (when the samples were collected).  Note that it is a time (the
+time at which the populations split) and not a length of time (the length of time
+between one population split and the next).  If you wish to know the split time in
+generations or years, you will need an external estimate of the mutation rate.
+</LI><BR>
+
+<LI><B>F84 model.</B> Evolutionary model proposed by Joe Felsenstein (in
+1984) in which the frequencies of the four nucleotides and the ratio
+of transitions to transversions may be varied.  Several simpler
+models such as Kimura 2-Parameter and Jukes-Cantor can easily
+be expressed as subsets of F84.  (They can also be expressed as
+subsets of GTR but this will be much slower.) </LI>
+<BR>
+
+<A NAME="growth"></a><LI><b>Growth (g).</b>
+The parameter governing the
+exponential growth model used by lamarc.  Theta at any time
+t (where t is measured increasing into the past, and is
+in "mutational units" where one unit of time is the expected
+time until a site mutates once) is equal
+to modern-day Theta times exp(-gt).  Positive values of g
+thus indicate a growing population and negative values a
+shrinking one.  Interpretation of the actual value requires
+knowledge of the mutation rate, because of the use of
+mutational units in defining time.  However, even without
+this, g values from organisms of similar mutation rate can
+be compared.
+</LI>
+<br>
+
+<LI><B>GTR model.</B> The General Time-Reversible Model, the
+most general easily tractable model of nucleotide sequence
+data.  Can be used to emulate simpler models, but if a model
+can be expressed as a simplification of F84 instead it will
+run faster. </LI>
+<BR>
+
+<A NAME="haplotype"></a><LI><B>Haplotype.</B> A collection of markers known to come from
+the same chromosomal copy in an individual.  See "<A
+HREF="#phase">Phase</a>." </LI>
+<BR>
+
+<LI><B>Heating.</B> Improving the search performance of a
+Markov chain Monte Carlo sampler by adding additional searches
+which see a smoothed-out or "heated" version of the search space.
+We refer to each additional search as a "temperature".  This method is also
+known as 'Metropolis-coupled Markov chain Monte Carlo', or MCMCMC, or
+MC<sup>3</sup>.</LI>
+<BR>
+
+<A NAME="kallele"></a><LI><B>K-Allele model.</B> A mutational model which assumes that
+there are K states which a marker can be in, and change from any
+state to any other is equally probable.  Jukes-Cantor is a
+K-Allele model (with K=4) for DNA data (though in LAMARC you
+will need to use an actual DNA model to get the same effect).
+The K-Allele model is suitable for data where we do not know
+the pattern of mutation, such as presence/absence of a 
+chromosomal modification, or for elecrophoretic mobility data.  
+It may also be useful for microsatellites
+which do not fit a <A HREF="#stepwise">stepwise model</a> well. </LI>
+<BR>
+
+<A NAME="likelihood"></a><LI><B>Likelihood analysis.</B> In the LAMARC
+context, an analysis which collects trees at a particular driving value of
+the parameter(s)  and then uses those trees to make a likelihood curve for
+other values of the parameters.  The alternative is a <A
+HREF="#bayesian">Bayesian analysis</a>.
+</LI>
+<BR>
+
+<A NAME="locus"></a><LI><B>Locus.</B> In ordinary genetic terminology, a
+gene or other defined piece of chromosome.  In LAMARC, we use the terms <A
+HREF="#segment">segment</a> and <A HREF="#region">region</a> to
+indicate sections of genetic information that would be referred to in other
+places as 'loci'.  We try to be consistent in using our terms, but may have
+slipped on occasion--it's likely we mean 'segment' if we accidentally said
+'locus' somewhere.</LI> <BR>
+
+<A NAME="marker"></a><LI><B>Marker.</B> A position along the chromosome for
+which we have collected data.  This is in contrast to "<A
+HREF="#site">site</a>", which is any position on the chromosome within the
+area we are considering, even if we didn't collect data for it.  SNP data
+involves choosing a few interesting markers out of a much larger collection
+of sites.</LI> <BR>
+
+<LI><B>Markov chain Monte Carlo.</B> A strategy for integrating
+functions which cannot be solved directly, such as finding
+the probability of sequence data given population parameters.
+"Markov chain" means that each step of the search depends only
+on the previous step, and "Monte Carlo" means that random
+choices are used rather than, say, a systematic grid search.
+Abbreviated MCMC.</LI>
+<BR>
+
+<LI><B>Maximizer.</B> The part of LAMARC which analyzes stored trees or
+parameters to determine the best estimate of each parameter.  Can be used
+in conjunction with <A HREF="#profiles">profiling</a> to estimate error
+bars for these parameters.</LI> <BR>
+
+<A NAME="mle"></a><LI><B>Maximum likelihood estimate (MLE).</B>  The set of parameters that
+maximize the <A HREF="#postlike">posterior likelihood</a> (the sum over
+sampled genealogies of the probability of the data given the genealogy and
+the probability of the genealogy given the parameters). In other words,
+this is the best solution found by a <A HREF="#likelihood">likelihood
+run</a>. Contrast with the <A HREF="#mpe">most probable
+estimate</a>, the best solution found in a <A HREF="#bayesian">Bayesian
+analysis</a>.</LI> <BR>
+
+<A NAME="migration"></a><LI><B>Migration rate.</B> LAMARC estimates <b>immigration</b> rates
+(movement of breeding individuals into a population) and
+records them as M=m/mu, where m is the chance of immigration per
+individual per generation, and mu is the chance of mutation per
+site per generation.  If you are interested in 4Nm instead,
+multiply our estimate of M by our estimate of <A HREF="#theta">Theta</a>
+for the recipient population. </LI>
+<BR>
+
+<A NAME="mixedks"></a><LI><B>Mixed KS model.</B> A microsatellite mutational model that assumes some proportion
+of changes are among random states (like <A HREF="#kallele">K-Allele</a>)
+and the remainder are among adjacent states (like <A
+HREF="#stepwise">Stepwise</a>).  May be a better fit than either model
+alone for some microsatellite data.  LAMARC allows you to specify the
+proportion of stepwise to multistep changes or to allow the program to try
+to optimize this ratio as it runs.  The parameter percent_stepwise
+is the proportion of stepwise changes; percent_stepwise=0 is the K-Allele
+model and percent_stepwise=1 is the Stepwise model.</LI> <BR>
+
+<A NAME="mpe"></a><LI><B>Most probable estimate (MPE).</B>  The highest
+point on the <A HREF="#postprob">posterior probability curve</a> for a given
+parameter.  Effectively, the point that fell closest to the sampled
+parameter the highest number of times (think of the
+tallest bar in a histogram).  In other words, this is the best solution
+found by a <A HREF="#bayesian">Bayesian run</a>. Contrast with <A
+HREF="#mle">maximum likelihood estimate</a>, the best solution found by a <A
+HREF="#likelihood">likelihood analysis</a>.</LI> <BR>
+
+<LI><B>Mu.</B> Neutral mutation rate per <A HREF="#site">site</a> or <A
+HREF="#marker">marker</a> (depending on data type) per generation.  If you
+have multiple data types, the relative mu rate must be set for each to
+ensure accurate co-estimation of your parameters.  Please be careful in
+comparing LAMARC runs with results from other methods, as these often report
+mu per <A HREF="#segment">segment</a> rather than mu per site. </LI>
+<BR>
+
+<LI><b>Parameter.</b>  In LAMARC parlance, a parameter is a numerical
+quality that describes an aspect of a <A HREF="#population">population</a>
+or set of populations. LAMARC can co-estimate <A
+HREF="#effpopsize">effective population sizes</a> (<A
+HREF="#theta">Theta</a>), <A HREF="#migration">migration rates</a>, the <A
+HREF="#recombination">recombination rate</a>, and <A HREF="#growth">growth
+rates</a> for one or more populations.</li> <br>
+
+<A NAME="phase"></a><LI><B>Phase.</B> Phase is information about whether or
+not several variants seen in the same individual are on the same chromosome
+of that individual, or different chromosomes.  (In other words, whether
+they are part of the same <A HREF="#haplotype">haplotype</a>.) If data is of
+"known phase" then we know which variants group together on the same
+chromosome; if it is of "unknown phase" we only know which variants are
+present, but not how they are allocated among the gene copies.  Another way
+of saying this is that haplotype data is phase-known and genotype data is
+phase-unknown.  </LI> <BR>
+
+<A NAME="pointprob"></a><LI><B>Point probability.</B> The height of the <A
+HREF="#postprob">posterior probability curve</a> at a given value of an
+estimated parameter.  Point probabilities can be compared to other point
+probabilities on the same curve (for example, to find the <A HREF="#mpe">most
+probable estimate</a>), but are otherwise not as useful as integrating a
+section of that curve.</LI>
+<BR>
+
+<A NAME="population"></a><LI><B>Population.</B> A group of organisms that are more or less
+freely interbreeding among themselves and isolated from other groups. </LI>
+<BR>
+
+<A NAME="postlike"></a><LI><B>Posterior likelihood.</B> The probability of the observed
+genealogies at their best (MLE) parameter values, divided by
+their probability at the values which produced them ("driving
+values").  This is quoted because it can be useful in diagnosing
+a poorly performing run; if it is very large, the driving values
+were poor ones and the run should be extended until better ones
+can be found.  It can <B>not</B> be used in a likelihood ratio
+test between runs, because it is a ratio of two independent likelihoods,
+neither of which we can actually compute on its own.  Posterior likelihoods
+are produced in a <A HREF="#likelihood">likelihood analysis</a>.</LI>
+<BR>
+
+<A NAME="postprob"></a><LI><b>Posterior probability curve.</b>  A <A
+HREF="#pdf">probability density function</a> that describes the  relative
+probabilities that the true value of a parameter is a particular value. 
+Useful when comparing the relative probabilities of two or more possible
+values for the parameter, or when integrating to find the total probability
+that the true value of the parameter is within a range of values (as during
+<A HREF="#profiles">profiling</a>).  Posterior probability curves are
+produced in a <A HREF="#bayesian">Bayesian analysis</a>, and exported as <A
+HREF="#curvefile">curvefiles</a> by LAMARC.  See also <A
+HREF="#pointprob">Point probability</a>.</li> <br>
+
+<A NAME="prior"></a><LI><B>Prior.</b> A LAMARC prior for a <A
+HREF="#bayesian">Bayesian run</a> is a curve that describes your prior
+knowledge of the possible range of values for a given parameter.  LAMARC
+uses only 'flat' priors, giving the parameter an equal chance to be anywhere
+within the allowed range, but the density of those priors may be either
+linear or logrithmic, depending on how the parameter is expected to vary.
+</LI>
+<br>
+
+<A NAME="pdf"></a><LI><b>Probability Density Function.</b>  A curve that
+describes a probability distribution in terms of integrals (the area under a
+probability density function must be 1.0).  More informally, it can be
+viewed as a 'smoothed out' version of a histogram.</li>
+<br>
+
+<A NAME="profiles"></a><LI><B>Profiles.</B> A picture of the uncertainty
+in LAMARC's results.  In a likelihood run, profiles are produced
+by holding one parameter constant at a series of different values,
+and for each value maximizing all other parameters.  This is like
+taking a slice through the multi-dimensional surface to reveal the landscape
+with respect to one parameter.  In a Bayesian run, profiles
+are produced by showing the probability curve for one parameter
+at a time; no information is available about how other parameters
+co-vary with the chosen one.  In either case, percentile profiles
+show the results at percentiles of the distribution (for example,
+the 95% marks) while the less expensive fixed profiles show
+the results at arbitrary points chosen in advance. </LI>
+<BR>
+
+<A NAME="recombination"></a><LI><b>Recombination rate (r).</B>
+LAMARC measures recombination
+rate as C/mu, where C is the rate of recombination per inter-site
+link per generation, and mu is the mutation rate per site per
+generation.  Be careful in comparing this with other estimates,
+which are often of 4NC, or per-locus rather than per-site.
+</LI><BR>
+
+<A NAME="region"></a><LI><B>Region (or 'Linked Segment Region').</B> A
+stretch of linked markers along a chromosome.  The usual term is "<A
+HREF="#locus">locus</a>", but a LAMARC region can contain what a geneticist
+might consider multiple loci (e.g. coding regions for several genes) as long
+as they are all linked and their map is known.  In the program and in our
+documentation, we usually refer to this kind of region as a "linked segment
+region", because our regions can contain multiple linked <A
+HREF="#segment">segments</a>. </LI> <BR>
+
+<A NAME="replicate"></a><LI><B>Replicate.</B> An internal repetition of a
+LAMARC analysis used to produce a more refined result, and to measure
+whether the run is long enough to produce consistent results.  In a <A
+HREF="#likelihood">likelihood analysis</a>, when multiple replicates are
+run, their results are combined via  Geyer's reverse logistic regression
+method to produce a joint estimate that should be more accurate than the
+individual ones.  In a <A HREF="#bayesian">Bayesian analysis</a>, when
+multiple replicates are run, the resulting <A HREF="#postprob">posterior
+probability curves</a> are simply averaged. </LI> <BR>
+
+<A NAME="runreport"></a><LI><B>Runtime report.</B> The on-screen report
+showing each chain and its acceptance rates, parameter estimates, times, and
+so forth; also reprinted as the last entry in the output file. </LI> <BR>
+
+<A NAME="segment"></a><LI><B>Segment (or Coherent Segment).</B> A contiguous
+stretch of <A HREF="#site">sites</a> containing <A HREF="#marker">markers</a>
+of all the same data type.  Multiple segments can be linked together within
+a <A HREF="#region">region</a>, even when the data types differ. </LI> <BR>
+
+<A NAME="site"></a><LI><B>Site.</B> A position along the chromosome in the
+area under study, whether or not you have observed any data at that
+position. Contrasts with "<A HREF="#marker">marker</a>", which is a site at
+which you have observed data.  Sites are important because we must consider
+every site as a possible location for a recombination, whether or not it is
+a marker; the total chance of recombination is proportional to the number
+of sites. </LI> <BR>
+
+<A NAME="stepwise"></a><LI><B>Stepwise model.</B>  A mutational model that
+assumes mutations happen between adjacent states.  It is generally used
+for  microsatellites, and in this case the assumption is that
+microsatellites increase or decrease by one repeat unit at a time.  (It may
+still work well even if this assumption is occasionally violated; if you
+believe it is often violated, the <A HREF="#kallele">K-Allele</a> or <A
+HREF="#mixedks">Mixed KS</a> models may be more
+appropriate.)  The <A HREF="#brownian">Brownian model</a> approximates Stepwise and runs much
+faster, but may break down if population sizes are very small.</LI> <BR>
+
+<A NAME="theta"></a><LI><B>Theta.</B> Population parameter controlling the
+amount of genetic diversity in a population, and equal to two times the
+neutral mutation rate per site times the number of heritable gene copies
+in the population.  For a diploid population, the Theta for nuclear DNA is
+equal to 4N(mu).  </LI> <BR>
+
+<LI><B>Tip.</B> A single observed sequence; generally a haplotype,
+though if you do not know the phase of your data you can make
+fictional haplotypes to use as initial tips.  Called "tip" because
+it appears at one of the tips of the genealogical tree. </LI>
+<BR>
+
+<LI><B>Tree.</B> An informal word for the coalescent genealogy which
+relates the sampled sequences.  Formally, genealogies with recombination
+should be called Ancestral Recombination Graphs, but we frequently
+call them trees even though they are not strictly tree-like.</LI>
+<BR>
+
+
+</UL>
+
+<P>(<A HREF="limitations.html">Previous</A> | <A
+HREF="index.html">Contents</A> | <A HREF="data_required.html">Next</A>)</P>
+
+<!--
+//$Id: glossary.html,v 1.19 2012/05/16 17:14:01 mkkuhner Exp $
+-->
+</BODY>
+</HTML>
diff --git a/doc/html/growthmenu.html b/doc/html/growthmenu.html
new file mode 100644
index 0000000..8c245d9
--- /dev/null
+++ b/doc/html/growthmenu.html
@@ -0,0 +1,39 @@
+<H4> <A NAME="growth">Growth parameters:  the "Growth" force  </A></H4>
+
+<P> This submenu allows you to turn on and off estimation of population
+growth rates, and to set starting parameters.  </P>
+
+<P> If there is a single population in your data, Lamarc will estimate a
+single growth rate for it.  If there are multiple populations, Lamarc will
+estimate one independent growth rate per population.</P>
+
+<P> If the type of Growth is exponential (the only type currently
+allowed) then if we label growth as <i>g</i>, then the relationship
+between Theta at a time <i>t</i> > 0 in the past and Theta at the present
+day (<i>t</i> = 0) is:</P>
+
+    <center>Theta(<i>t</i>) = Theta<sub>present day</sub> e<sup>-<i>gt</i></sup></center>
+
+<p>This means that a positive value of <i>g</i>
+represents a growing population, and a negative value, a shrinking one. </P>
+
+<P> Time is measured in units of mutations (i.e., 1 <i>t</i> is the average
+number of generations it takes one site to accumulate one mutation), and
+<i>g</i> is measured in the inverse units of time.  If mu is known, divide
+generations by mu to get units of <i>t</i>, or conversely, multiply
+<i>t</i>*mu to get a number of generations.</P>
+
+<P> Additionally, its now possible to choose between a "Stick" or
+"Curve" implementation for the chosen type of growth.  Generally,
+if possible, one should always use the "Curve" implementation as the
+"Stick" is just an approximation to the "Curve".  We provide it because
+in some cases only the "Stick" is available and...
+
+<P> Starting parameter input for growth is similar to that for Theta, 
+except that no quick pairwise calculators are available; you will have to 
+either accept default values or enter values of your own.  Avoid highly
+negative values (less than -10) as these have some risk of producing
+infinitely long trees which must then be rejected.</P>
+
+Type of Growth
+Growth implemented via
diff --git a/doc/html/images/LamarcAnalysisScreen.png b/doc/html/images/LamarcAnalysisScreen.png
new file mode 100644
index 0000000..de2e6fd
Binary files /dev/null and b/doc/html/images/LamarcAnalysisScreen.png differ
diff --git a/doc/html/images/LamarcDataScreen.png b/doc/html/images/LamarcDataScreen.png
new file mode 100644
index 0000000..34a07c4
Binary files /dev/null and b/doc/html/images/LamarcDataScreen.png differ
diff --git a/doc/html/images/LamarcIOScreen.png b/doc/html/images/LamarcIOScreen.png
new file mode 100644
index 0000000..e7f9144
Binary files /dev/null and b/doc/html/images/LamarcIOScreen.png differ
diff --git a/doc/html/images/LamarcMainScreen.png b/doc/html/images/LamarcMainScreen.png
new file mode 100644
index 0000000..6705786
Binary files /dev/null and b/doc/html/images/LamarcMainScreen.png differ
diff --git a/doc/html/images/LamarcOverviewScreen.png b/doc/html/images/LamarcOverviewScreen.png
new file mode 100644
index 0000000..f81ef0f
Binary files /dev/null and b/doc/html/images/LamarcOverviewScreen.png differ
diff --git a/doc/html/images/LamarcSearchScreen.png b/doc/html/images/LamarcSearchScreen.png
new file mode 100644
index 0000000..7c21c8e
Binary files /dev/null and b/doc/html/images/LamarcSearchScreen.png differ
diff --git a/doc/html/images/browser-gtk.gif b/doc/html/images/browser-gtk.gif
new file mode 100644
index 0000000..039ea5b
Binary files /dev/null and b/doc/html/images/browser-gtk.gif differ
diff --git a/doc/html/images/correlated1.gif b/doc/html/images/correlated1.gif
new file mode 100644
index 0000000..b371801
Binary files /dev/null and b/doc/html/images/correlated1.gif differ
diff --git a/doc/html/images/correlated2.gif b/doc/html/images/correlated2.gif
new file mode 100644
index 0000000..59b697a
Binary files /dev/null and b/doc/html/images/correlated2.gif differ
diff --git a/doc/html/images/datatab-2-osx.gif b/doc/html/images/datatab-2-osx.gif
new file mode 100644
index 0000000..a2abc3c
Binary files /dev/null and b/doc/html/images/datatab-2-osx.gif differ
diff --git a/doc/html/images/datatab-gtk.gif b/doc/html/images/datatab-gtk.gif
new file mode 100644
index 0000000..720c0ce
Binary files /dev/null and b/doc/html/images/datatab-gtk.gif differ
diff --git a/doc/html/images/datatab-help-gtk.gif b/doc/html/images/datatab-help-gtk.gif
new file mode 100644
index 0000000..df3f74e
Binary files /dev/null and b/doc/html/images/datatab-help-gtk.gif differ
diff --git a/doc/html/images/gui_lam_conv.gif b/doc/html/images/gui_lam_conv.gif
new file mode 100644
index 0000000..f3b5262
Binary files /dev/null and b/doc/html/images/gui_lam_conv.gif differ
diff --git a/doc/html/images/lam_conv.gif b/doc/html/images/lam_conv.gif
new file mode 100644
index 0000000..f754ee6
Binary files /dev/null and b/doc/html/images/lam_conv.gif differ
diff --git a/doc/html/images/lamarc-128.png b/doc/html/images/lamarc-128.png
new file mode 100644
index 0000000..6969d4d
Binary files /dev/null and b/doc/html/images/lamarc-128.png differ
diff --git a/doc/html/images/lamarc-256.png b/doc/html/images/lamarc-256.png
new file mode 100644
index 0000000..ab4172d
Binary files /dev/null and b/doc/html/images/lamarc-256.png differ
diff --git a/doc/html/images/lamarc.gif b/doc/html/images/lamarc.gif
new file mode 100644
index 0000000..8e6f0e9
Binary files /dev/null and b/doc/html/images/lamarc.gif differ
diff --git a/doc/html/images/loci-gtk.png b/doc/html/images/loci-gtk.png
new file mode 100644
index 0000000..d66fbd2
Binary files /dev/null and b/doc/html/images/loci-gtk.png differ
diff --git a/doc/html/images/loci2-gtk.png b/doc/html/images/loci2-gtk.png
new file mode 100644
index 0000000..ed40188
Binary files /dev/null and b/doc/html/images/loci2-gtk.png differ
diff --git a/doc/html/images/membershiptab-1-gtk.gif b/doc/html/images/membershiptab-1-gtk.gif
new file mode 100644
index 0000000..a78832a
Binary files /dev/null and b/doc/html/images/membershiptab-1-gtk.gif differ
diff --git a/doc/html/images/membershiptab-2-gtk.gif b/doc/html/images/membershiptab-2-gtk.gif
new file mode 100644
index 0000000..c0047c7
Binary files /dev/null and b/doc/html/images/membershiptab-2-gtk.gif differ
diff --git a/doc/html/images/membershiptab-help-1-gtk.gif b/doc/html/images/membershiptab-help-1-gtk.gif
new file mode 100644
index 0000000..df54d09
Binary files /dev/null and b/doc/html/images/membershiptab-help-1-gtk.gif differ
diff --git a/doc/html/images/membershiptab-help-2-gtk.gif b/doc/html/images/membershiptab-help-2-gtk.gif
new file mode 100644
index 0000000..b589e89
Binary files /dev/null and b/doc/html/images/membershiptab-help-2-gtk.gif differ
diff --git a/doc/html/images/partitions-gtk.png b/doc/html/images/partitions-gtk.png
new file mode 100644
index 0000000..f0ec55e
Binary files /dev/null and b/doc/html/images/partitions-gtk.png differ
diff --git a/doc/html/images/partitions2-gtk.png b/doc/html/images/partitions2-gtk.png
new file mode 100644
index 0000000..27d463c
Binary files /dev/null and b/doc/html/images/partitions2-gtk.png differ
diff --git a/doc/html/images/populations-gtk.png b/doc/html/images/populations-gtk.png
new file mode 100644
index 0000000..6359d14
Binary files /dev/null and b/doc/html/images/populations-gtk.png differ
diff --git a/doc/html/images/regiontab-1-gtk.gif b/doc/html/images/regiontab-1-gtk.gif
new file mode 100644
index 0000000..7965086
Binary files /dev/null and b/doc/html/images/regiontab-1-gtk.gif differ
diff --git a/doc/html/images/regiontab-2-gtk.gif b/doc/html/images/regiontab-2-gtk.gif
new file mode 100644
index 0000000..080314d
Binary files /dev/null and b/doc/html/images/regiontab-2-gtk.gif differ
diff --git a/doc/html/images/startup-gtk.gif b/doc/html/images/startup-gtk.gif
new file mode 100644
index 0000000..44e3d3b
Binary files /dev/null and b/doc/html/images/startup-gtk.gif differ
diff --git a/doc/html/images/startup-gtk.png b/doc/html/images/startup-gtk.png
new file mode 100644
index 0000000..0090978
Binary files /dev/null and b/doc/html/images/startup-gtk.png differ
diff --git a/doc/html/images/startup-win.gif b/doc/html/images/startup-win.gif
new file mode 100644
index 0000000..3d7b569
Binary files /dev/null and b/doc/html/images/startup-win.gif differ
diff --git a/doc/html/images/tracer_trend.png b/doc/html/images/tracer_trend.png
new file mode 100644
index 0000000..57ac73c
Binary files /dev/null and b/doc/html/images/tracer_trend.png differ
diff --git a/doc/html/images/uncorrelated.gif b/doc/html/images/uncorrelated.gif
new file mode 100644
index 0000000..849ac2b
Binary files /dev/null and b/doc/html/images/uncorrelated.gif differ
diff --git a/doc/html/images/variably_correlated.gif b/doc/html/images/variably_correlated.gif
new file mode 100644
index 0000000..ae04d44
Binary files /dev/null and b/doc/html/images/variably_correlated.gif differ
diff --git a/doc/html/index.html b/doc/html/index.html
new file mode 100644
index 0000000..d3fbf56
--- /dev/null
+++ b/doc/html/index.html
@@ -0,0 +1,97 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
+<HTML>
+<HEAD>
+
+<META NAME="description" CONTENT="Estimation of population parameters using genetic data usi
+ng a maximum likelihood approach with Metropolis-Hastings Monte Carlo Markov chain importanc
+e sampling">
+<META NAME="keywords" CONTENT="MCMC, Markov chain, Monte Carlo, Metropolis-Hastings, populat
+ion, parameters, migration rate, population size, recombination rate, maximum likelihood">
+
+<TITLE>LAMARC Documentation</title>
+</HEAD>
+
+<BODY BGCULOR="#FFFFFF" TEXT="#000000">
+<!-- coalescent, coalescence, Markov chain Monte Carlo simulation, migration rate, effective
+ population size, recombination rate, maximum likelihood -->
+<H1 ALIGN="CENTER"> LAMARC - Likelihood Analysis with Metropolis Algorithm
+using Random Coalescence<P>
+<IMG SRC="images/lamarc.gif" BORDER="1"  HEIGHT="100" ALT="[LAMARC logo]" 
+ALIGN="MIDDLE"></P>
+</H1>
+
+<P> This documentation covers use of the program LAMARC, which can estimate
+population sizes, migration rates, growth rates, and recombination rate
+using a Metropolis-Hastings Monte Carlo Markov Chain algorithm.  LAMARC's
+'sister program' is <A HREF="http://popgen.sc.fsu.edu/">MIGRATE</a>, with
+which it shares history, algorithms, and some code base, but which is now
+being maintained exclusively by Peter Beerli at Florida State
+University.</P>
+
+<UL>
+<H3>General:</H3>
+<LI> <A HREF="overview.html">Overview</A></LI>
+<LI> <A HREF="changes.html">Changes in Version 2.1.8</A></LI>
+<LI> <A HREF="upcoming.html">Plans for the Future</A></LI>
+<LI> <A HREF="troubleshooting.html">Frequently Asked Questions</A></LI>
+<LI> <A HREF="messages.html">Error and Warning Messages</A></LI>
+<LI> <A HREF="limitations.html">Limitations of LAMARC</A></LI>
+<LI> <A HREF="glossary.html">Glossary of Terms</A></LI>
+<H3>Tutorials:</H3>
+<LI> <A HREF="data_required.html">What kind of data to use with LAMARC</A></LI>
+<LI> <A HREF="tutorial.html">How to design a LAMARC analysis</A></LI>
+<LI> <A HREF="bayes_howto.html">How to design a Bayesian LAMARC analysis</A></LI>
+<LI> <A HREF="tutorial2.html">Interpreting the results</A></LI>
+<LI> <A HREF="parallel.html">Combining multiple computers to complete a LAMARC run</A></LI>
+<LI> <A HREF="mapping.html">Using LAMARC for fine-scale mapping</A></LI>
+<H3>Specific Topics:</H3>
+<LI> <A HREF="compiling.html">Compiling LAMARC</A></LI>
+<LI> <A HREF="converter.html">File conversion utilities</A></LI>
+<LI> <A HREF="genetic_map.html">Modeling Linkage Properties and Relative Mutation Rates</A></LI>
+<LI> <A HREF="migration_matrix.html">Migration Matrix</A></LI>
+<LI> <A HREF="divergence.html">Divergence</A></LI>
+<LI> <A HREF="panels.html">Panel Corrections</A></LI>
+<LI> <A HREF="converter_cmd.html">The Converter Command File</A></LI>
+<LI> <A HREF="xmlinput.html">XML data input format</A></LI>
+<LI> <A HREF="menu.html">Using the LAMARC interactive menu</A></LI>
+<LI> <A HREF="regions.html">Dividing your data into regions and segments</A></LI>
+<LI> <A HREF="data_models.html">Modelling the mutational process</A></LI>
+<LI> <A HREF="gamma.html">Handling variation in mutation rate (several types, including gamma-distributed rates)</A></LI>
+<LI> <A HREF="forces.html">Evolutionary forces</A></LI>
+<LI> <A HREF="parameters.html">What do the parameters mean?</A></LI>
+<LI> <A HREF="search.html">Search strategies</A></LI>
+<LI> <A HREF="output.html">Interpreting the output</A></LI>
+<LI> <A HREF="bayes.html">Bayesian analysis</A></LI>
+<LI> <A HREF="tracer.html">Using Tracer to examine LAMARC results</A></LI>
+<LI> <A HREF="genotype.html">Genotypic (phase-unknown) data</A></LI>
+<LI> <A HREF="viral_data.html">Specific issues with virus data</A></LI>
+</UL>
+<H3>References:</H3>
+<P><A HREF="http://www.genetics.org/cgi/content/abstract/175/1/155">Kuhner,
+M. K. and L. P. Smith, 2007  <i>"Comparing Likelihood and Bayesian Coalescent
+Estimation of Population Parameters"</i> Genetics 175: 155-165.</a></P>
+<P>
+
+<P><A
+HREF="http://bioinformatics.oxfordjournals.org/cgi/content/abstract/22/6/768">
+Kuhner, M. K., 2006  <i>"LAMARC 2.0: maximum likelihood and Bayesian estimation  of
+population parameters."</i>  Bioinformatics 22(6): 768-770.</a> </P>
+
+
+<A HREF="overview.html">Next</A>
+<HR>
+Please report problems to <A HREF="MAILTO:lamarc at u.washington.edu">
+lamarc at u.washington.edu</A>. The LAMARC package is maintained by the
+lab of
+<A HREF="http://evolution.gs.washington.edu/lamarc/mkkuhner.html"> Mary K. Kuhner </A>
+<ADDRESS> Mary Kuhner,
+Department of Genome Sciences,
+University of Washington, Box 355065,
+Seattle, WA  98195-5065,  USA.  Phone: (206) 543-8751, Fax: (206) 543-0754</ADDRESS>
+
+<!--
+//$Id: index.html,v 1.50 2012/05/25 23:28:10 ewalkup Exp $
+-->
+</BODY>
+</HTML>
+
diff --git a/doc/html/insumfile.2reg3rep.html b/doc/html/insumfile.2reg3rep.html
new file mode 100644
index 0000000..2c71604
--- /dev/null
+++ b/doc/html/insumfile.2reg3rep.html
@@ -0,0 +1,3327 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
+<HTML>
+<BODY>
+<pre>
+&ltXML-summary-file&gt
+&lt!-- Lamarc v. 2.0
+     Please do not modify. --&gt
+&ltchainpack&gt
+	&ltnumber&gt 0 0 0 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 0 &lt/tinytrees&gt
+		&ltaccrate&gt 0.195000000000000007 &lt/accrate&gt
+		&ltllikemle&gt 4.50058419396642684 &lt/llikemle&gt
+		&ltllikedata&gt -3620.90996263017314 &lt/llikedata&gt
+		&ltstarttime&gt 1113933890 &lt/starttime&gt
+		&ltendtime&gt 1113933911 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 39 200 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00832313455063505084 0.00734484914891693919 &lt/thetas&gt
+			&ltmigrates&gt 0 96.6630952829531083 46.6299496864950527 0 &lt/migrates&gt
+			&ltgrowthrates&gt 2365.3441048272216 184.41163985471303 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainpack&gt
+	&ltnumber&gt 0 0 1 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 0 &lt/tinytrees&gt
+		&ltaccrate&gt 0.160000000000000003 &lt/accrate&gt
+		&ltllikemle&gt 1.18053638385976001 &lt/llikemle&gt
+		&ltllikedata&gt -3516.0909920893173 &lt/llikedata&gt
+		&ltstarttime&gt 1113933912 &lt/starttime&gt
+		&ltendtime&gt 1113933934 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 32 200 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00513756085360574273 0.0082400193242912461 &lt/thetas&gt
+			&ltmigrates&gt 0 131.185343491258408 42.1077506833302877 0 &lt/migrates&gt
+			&ltgrowthrates&gt 1821.55597636667744 149.760781972604775 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainpack&gt
+	&ltnumber&gt 0 0 2 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 0 &lt/tinytrees&gt
+		&ltaccrate&gt 0.130000000000000004 &lt/accrate&gt
+		&ltllikemle&gt 1.6599393589321001 &lt/llikemle&gt
+		&ltllikedata&gt -3271.75316866892263 &lt/llikedata&gt
+		&ltstarttime&gt 1113933934 &lt/starttime&gt
+		&ltendtime&gt 1113933960 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 26 200 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00505700949907143815 0.00613707484094671175 &lt/thetas&gt
+			&ltmigrates&gt 0 124.679362905292081 34.8937725369221496 0 &lt/migrates&gt
+			&ltgrowthrates&gt 1761.27593665903237 -84.0195656658781189 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainpack&gt
+	&ltnumber&gt 0 0 3 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 1 &lt/tinytrees&gt
+		&ltaccrate&gt 0.149999999999999994 &lt/accrate&gt
+		&ltllikemle&gt 0.596513557975481556 &lt/llikemle&gt
+		&ltllikedata&gt -3269.61178480345779 &lt/llikedata&gt
+		&ltstarttime&gt 1113933961 &lt/starttime&gt
+		&ltendtime&gt 1113933971 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 6 40 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.0051461021148077107 0.00589555271016268308 &lt/thetas&gt
+			&ltmigrates&gt 0 125.68905479949639 48.865939986393137 0 &lt/migrates&gt
+			&ltgrowthrates&gt 1835.77616186834098 82.0043403675645095 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainsum&gt
+	&ltreg_rep&gt 0 0 &lt/reg_rep&gt
+	&lttreesum&gt
+		&ltncopy&gt 1 &lt/ncopy&gt
+		&ltshortforce&gt coalesce long
+			&ltshortpoint&gt 20 19 &lt/shortpoint&gt
+			&ltshortwait&gt 0.0629273638426249143 0.105548312215536552 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltshortforce&gt migrate long
+			&ltshortpoint&gt 0 1 1 0 &lt/shortpoint&gt
+			&ltshortwait&gt 0.0079654795218820091 0.0206451375053703819 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltintervals&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 5.09657490636763079e-06 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 20 20 &lt/xpartlines&gt
+			&ltpartlines&gt 20 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.2102355624254017e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 19 20 &lt/xpartlines&gt
+			&ltpartlines&gt 19 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.33961400471318623e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 18 20 &lt/xpartlines&gt
+			&ltpartlines&gt 18 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.77695915387843852e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 17 20 &lt/xpartlines&gt
+			&ltpartlines&gt 17 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 2.10446887053792051e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 17 19 &lt/xpartlines&gt
+			&ltpartlines&gt 17 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 2.27457247455224152e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 17 18 &lt/xpartlines&gt
+			&ltpartlines&gt 17 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 2.44016581525568737e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 17 17 &lt/xpartlines&gt
+			&ltpartlines&gt 17 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 5.63431475801879578e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 16 17 &lt/xpartlines&gt
+			&ltpartlines&gt 16 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 6.55098716603418459e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 15 17 &lt/xpartlines&gt
+			&ltpartlines&gt 15 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 7.39112816930027905e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 14 17 &lt/xpartlines&gt
+			&ltpartlines&gt 14 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 8.18973368575778886e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 14 16 &lt/xpartlines&gt
+			&ltpartlines&gt 14 16 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 8.3802045298549127e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 16 &lt/xpartlines&gt
+			&ltpartlines&gt 13 16 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00010629432546176642 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 15 &lt/xpartlines&gt
+			&ltpartlines&gt 13 15 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000111507477845693503 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 14 &lt/xpartlines&gt
+			&ltpartlines&gt 13 14 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000150975907355033011 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 14 &lt/xpartlines&gt
+			&ltpartlines&gt 12 14 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000165638034054806815 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 14 &lt/xpartlines&gt
+			&ltpartlines&gt 11 14 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000219061924145474876 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 13 &lt/xpartlines&gt
+			&ltpartlines&gt 11 13 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000219707138094496452 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 12 &lt/xpartlines&gt
+			&ltpartlines&gt 11 12 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000258213852917746063 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 11 &lt/xpartlines&gt
+			&ltpartlines&gt 11 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000273652113921858643 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 10 &lt/xpartlines&gt
+			&ltpartlines&gt 11 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000280583868742954141 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 10 &lt/xpartlines&gt
+			&ltpartlines&gt 10 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000331106053916933646 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 9 &lt/xpartlines&gt
+			&ltpartlines&gt 10 9 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000344952279767115331 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 8 &lt/xpartlines&gt
+			&ltpartlines&gt 10 8 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000345874033941911106 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 8 &lt/xpartlines&gt
+			&ltpartlines&gt 9 8 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000348861238031590646 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 7 &lt/xpartlines&gt
+			&ltpartlines&gt 9 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000424599242926313077 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 7 &lt/xpartlines&gt
+			&ltpartlines&gt 8 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000439947186235675422 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 7 &lt/xpartlines&gt
+			&ltpartlines&gt 7 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000481070344401667829 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 6 7 &lt/xpartlines&gt
+			&ltpartlines&gt 6 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000528930654355221855 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 5 7 &lt/xpartlines&gt
+			&ltpartlines&gt 5 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000592184315630871314 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 5 6 &lt/xpartlines&gt
+			&ltpartlines&gt 5 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000857591267505941114 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 6 &lt/xpartlines&gt
+			&ltpartlines&gt 4 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000866354211828237845 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 6 &lt/xpartlines&gt
+			&ltpartlines&gt 3 6 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00091353575187592918 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltnewstatus&gt 1 &lt/newstatus&gt
+			&ltxpartlines&gt 3 5 &lt/xpartlines&gt
+			&ltpartlines&gt 3 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000919185689826623655 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 4 &lt/xpartlines&gt
+			&ltpartlines&gt 4 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000955912424407952743 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 4 &lt/xpartlines&gt
+			&ltpartlines&gt 3 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00118557134547170773 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 2 4 &lt/xpartlines&gt
+			&ltpartlines&gt 2 4 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00153425735560147494 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 1 4 &lt/xpartlines&gt
+			&ltpartlines&gt 1 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.0019246211065550127 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 5 &lt/xpartlines&gt
+			&ltpartlines&gt 0 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00206538502262878197 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 4 &lt/xpartlines&gt
+			&ltpartlines&gt 0 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00305508566675113469 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 3 &lt/xpartlines&gt
+			&ltpartlines&gt 0 3 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00533986494186010525 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 2 &lt/xpartlines&gt
+			&ltpartlines&gt 0 2 . &lt/partlines&gt
+		&lt/intervals&gt
+	&lt/treesum&gt
+	&lttreesum&gt
+		&ltncopy&gt 1 &lt/ncopy&gt
+		&ltshortforce&gt coalesce long
+			&ltshortpoint&gt 20 19 &lt/shortpoint&gt
+			&ltshortwait&gt 0.0575370553514418445 0.104633534940785464 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltshortforce&gt migrate long
+			&ltshortpoint&gt 0 1 1 0 &lt/shortpoint&gt
+			&ltshortwait&gt 0.00794664505963699544 0.0202802222566785852 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltintervals&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 5.09657490636763079e-06 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 20 20 &lt/xpartlines&gt
+			&ltpartlines&gt 20 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.2102355624254017e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 19 20 &lt/xpartlines&gt
+			&ltpartlines&gt 19 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.25411707266569827e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 18 20 &lt/xpartlines&gt
+			&ltpartlines&gt 18 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.33961400471318623e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 17 20 &lt/xpartlines&gt
+			&ltpartlines&gt 17 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.77695915387843852e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 16 20 &lt/xpartlines&gt
+			&ltpartlines&gt 16 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 2.10446887053792051e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 16 19 &lt/xpartlines&gt
+			&ltpartlines&gt 16 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 2.27457247455224152e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 16 18 &lt/xpartlines&gt
+			&ltpartlines&gt 16 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 2.44016581525568737e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 16 17 &lt/xpartlines&gt
+			&ltpartlines&gt 16 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 5.63431475801879578e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 15 17 &lt/xpartlines&gt
+			&ltpartlines&gt 15 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 6.55098716603418459e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 14 17 &lt/xpartlines&gt
+			&ltpartlines&gt 14 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 7.39112816930027905e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 17 &lt/xpartlines&gt
+			&ltpartlines&gt 13 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 8.3802045298549127e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 16 &lt/xpartlines&gt
+			&ltpartlines&gt 13 16 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00010617490035119024 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 15 &lt/xpartlines&gt
+			&ltpartlines&gt 13 15 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000111507477845693503 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 15 &lt/xpartlines&gt
+			&ltpartlines&gt 12 15 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000150975907355033011 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 15 &lt/xpartlines&gt
+			&ltpartlines&gt 11 15 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000151946997256986488 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 15 &lt/xpartlines&gt
+			&ltpartlines&gt 10 15 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000165638034054806815 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 14 &lt/xpartlines&gt
+			&ltpartlines&gt 10 14 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000191522903992015264 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 13 &lt/xpartlines&gt
+			&ltpartlines&gt 10 13 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000219707138094496452 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 12 &lt/xpartlines&gt
+			&ltpartlines&gt 10 12 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000258213852917746063 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 11 &lt/xpartlines&gt
+			&ltpartlines&gt 10 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000273652113921858643 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 10 &lt/xpartlines&gt
+			&ltpartlines&gt 10 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000280583868742954141 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 10 &lt/xpartlines&gt
+			&ltpartlines&gt 9 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000331106053916933646 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 9 &lt/xpartlines&gt
+			&ltpartlines&gt 9 9 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000345874033941911106 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 8 &lt/xpartlines&gt
+			&ltpartlines&gt 9 8 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000348861238031590646 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 7 &lt/xpartlines&gt
+			&ltpartlines&gt 9 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000424599242926313077 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 7 &lt/xpartlines&gt
+			&ltpartlines&gt 8 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000439947186235675422 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 7 &lt/xpartlines&gt
+			&ltpartlines&gt 7 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000481070344401667829 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 6 7 &lt/xpartlines&gt
+			&ltpartlines&gt 6 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000528930654355221855 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 5 7 &lt/xpartlines&gt
+			&ltpartlines&gt 5 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000592184315630871314 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 5 6 &lt/xpartlines&gt
+			&ltpartlines&gt 5 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000866354211828237845 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 6 &lt/xpartlines&gt
+			&ltpartlines&gt 4 6 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00091353575187592918 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltnewstatus&gt 1 &lt/newstatus&gt
+			&ltxpartlines&gt 4 5 &lt/xpartlines&gt
+			&ltpartlines&gt 4 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000919185689826623655 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 5 4 &lt/xpartlines&gt
+			&ltpartlines&gt 5 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000955912424407952743 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 4 &lt/xpartlines&gt
+			&ltpartlines&gt 4 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00114689035080777412 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 4 &lt/xpartlines&gt
+			&ltpartlines&gt 3 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00118557134547170773 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 2 4 &lt/xpartlines&gt
+			&ltpartlines&gt 2 4 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00153425735560147494 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 1 4 &lt/xpartlines&gt
+			&ltpartlines&gt 1 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.0019246211065550127 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 5 &lt/xpartlines&gt
+			&ltpartlines&gt 0 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00206538502262878197 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 4 &lt/xpartlines&gt
+			&ltpartlines&gt 0 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00267205676641757645 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 3 &lt/xpartlines&gt
+			&ltpartlines&gt 0 3 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00533986494186010525 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 2 &lt/xpartlines&gt
+			&ltpartlines&gt 0 2 . &lt/partlines&gt
+		&lt/intervals&gt
+	&lt/treesum&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00505700949907143815 0.00613707484094671175 &lt/thetas&gt
+			&ltmigrates&gt 0 124.679362905292081 34.8937725369221496 0 &lt/migrates&gt
+			&ltgrowthrates&gt 1761.27593665903237 -84.0195656658781189 &lt/growthrates&gt
+		&lt/estimates&gt
+&lt/chainsum&gt
+&ltchainpack&gt
+	&ltnumber&gt 0 1 0 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 0 &lt/tinytrees&gt
+		&ltaccrate&gt 0.265000000000000013 &lt/accrate&gt
+		&ltllikemle&gt 8.03638549246886313 &lt/llikemle&gt
+		&ltllikedata&gt -3574.6386248622166 &lt/llikedata&gt
+		&ltstarttime&gt 1113933971 &lt/starttime&gt
+		&ltendtime&gt 1113933986 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 53 200 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00238967973895868897 0.00857314189478221474 &lt/thetas&gt
+			&ltmigrates&gt 0 178.594354360432533 201.711534027463927 0 &lt/migrates&gt
+			&ltgrowthrates&gt 286.265570187478374 522.584629259533926 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainpack&gt
+	&ltnumber&gt 0 1 1 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 0 &lt/tinytrees&gt
+		&ltaccrate&gt 0.174999999999999989 &lt/accrate&gt
+		&ltllikemle&gt 2.24292366391492948 &lt/llikemle&gt
+		&ltllikedata&gt -3366.95583651043171 &lt/llikedata&gt
+		&ltstarttime&gt 1113933986 &lt/starttime&gt
+		&ltendtime&gt 1113934003 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 35 200 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00342828243898930232 0.00938815584810562438 &lt/thetas&gt
+			&ltmigrates&gt 0 359.333313010233837 170.709781451344668 0 &lt/migrates&gt
+			&ltgrowthrates&gt 376.258762176827872 608.980866754854333 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainpack&gt
+	&ltnumber&gt 0 1 2 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 0 &lt/tinytrees&gt
+		&ltaccrate&gt 0.115000000000000005 &lt/accrate&gt
+		&ltllikemle&gt 0.331232649867971241 &lt/llikemle&gt
+		&ltllikedata&gt -3277.07186443434921 &lt/llikedata&gt
+		&ltstarttime&gt 1113934003 &lt/starttime&gt
+		&ltendtime&gt 1113934021 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 23 200 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00382351077572252833 0.0101148980666503786 &lt/thetas&gt
+			&ltmigrates&gt 0 266.63302494001374 180.090955280637019 0 &lt/migrates&gt
+			&ltgrowthrates&gt 589.048754968938624 732.840714912730732 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainpack&gt
+	&ltnumber&gt 0 1 3 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 0 &lt/tinytrees&gt
+		&ltaccrate&gt 0.100000000000000006 &lt/accrate&gt
+		&ltllikemle&gt 0.712638597764832182 &lt/llikemle&gt
+		&ltllikedata&gt -3264.58251001428516 &lt/llikedata&gt
+		&ltstarttime&gt 1113934021 &lt/starttime&gt
+		&ltendtime&gt 1113934029 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 4 40 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.004074651441941083 0.00837181988651936049 &lt/thetas&gt
+			&ltmigrates&gt 0 118.942422560571757 182.899111430931441 0 &lt/migrates&gt
+			&ltgrowthrates&gt 1023.10652786136313 621.693756977592329 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainsum&gt
+	&ltreg_rep&gt 0 1 &lt/reg_rep&gt
+	&lttreesum&gt
+		&ltncopy&gt 1 &lt/ncopy&gt
+		&ltshortforce&gt coalesce long
+			&ltshortpoint&gt 18 21 &lt/shortpoint&gt
+			&ltshortwait&gt 0.0539318434248482123 0.124884165154835192 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltshortforce&gt migrate long
+			&ltshortpoint&gt 0 1 3 0 &lt/shortpoint&gt
+			&ltshortwait&gt 0.00820356565115269894 0.0167862026647611975 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltintervals&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 9.86113155575850924e-06 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 20 20 &lt/xpartlines&gt
+			&ltpartlines&gt 20 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.14780854199569499e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 19 20 &lt/xpartlines&gt
+			&ltpartlines&gt 19 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.2291023672518374e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 19 19 &lt/xpartlines&gt
+			&ltpartlines&gt 19 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.66635848884044525e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 19 18 &lt/xpartlines&gt
+			&ltpartlines&gt 19 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 2.22961141141907646e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 18 18 &lt/xpartlines&gt
+			&ltpartlines&gt 18 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 2.48673742353794826e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 17 18 &lt/xpartlines&gt
+			&ltpartlines&gt 17 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 3.91186994294289797e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 16 18 &lt/xpartlines&gt
+			&ltpartlines&gt 16 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 4.82376276028000035e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 16 17 &lt/xpartlines&gt
+			&ltpartlines&gt 16 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 5.19910298629359333e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 15 17 &lt/xpartlines&gt
+			&ltpartlines&gt 15 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 7.45549455970045656e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 15 16 &lt/xpartlines&gt
+			&ltpartlines&gt 15 16 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 8.48515305769414904e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 14 16 &lt/xpartlines&gt
+			&ltpartlines&gt 14 16 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 8.63938283360383356e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 16 &lt/xpartlines&gt
+			&ltpartlines&gt 13 16 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000109224715491037562 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 15 &lt/xpartlines&gt
+			&ltpartlines&gt 13 15 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000125328787711941807 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 15 &lt/xpartlines&gt
+			&ltpartlines&gt 12 15 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000134238691874153575 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 15 &lt/xpartlines&gt
+			&ltpartlines&gt 11 15 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00015730396931299724 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 15 &lt/xpartlines&gt
+			&ltpartlines&gt 10 15 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000208869889075072063 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 14 &lt/xpartlines&gt
+			&ltpartlines&gt 10 14 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000215094766862807343 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 13 &lt/xpartlines&gt
+			&ltpartlines&gt 10 13 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000225539801544489551 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 13 &lt/xpartlines&gt
+			&ltpartlines&gt 9 13 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00024281367284408395 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 13 &lt/xpartlines&gt
+			&ltpartlines&gt 8 13 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000339783045919729956 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 12 &lt/xpartlines&gt
+			&ltpartlines&gt 8 12 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000349763100889471208 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 12 &lt/xpartlines&gt
+			&ltpartlines&gt 7 12 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000380891571910665103 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 6 12 &lt/xpartlines&gt
+			&ltpartlines&gt 6 12 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.000430449683486230515 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltnewstatus&gt 1 &lt/newstatus&gt
+			&ltxpartlines&gt 6 11 &lt/xpartlines&gt
+			&ltpartlines&gt 6 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000457714375841571639 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 10 &lt/xpartlines&gt
+			&ltpartlines&gt 7 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000477876206426440411 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 6 10 &lt/xpartlines&gt
+			&ltpartlines&gt 6 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000491278080421270061 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 5 10 &lt/xpartlines&gt
+			&ltpartlines&gt 5 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000618305099660766818 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 5 9 &lt/xpartlines&gt
+			&ltpartlines&gt 5 9 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.000711694454938392642 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 5 8 &lt/xpartlines&gt
+			&ltpartlines&gt 5 8 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000735873160967268547 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 9 &lt/xpartlines&gt
+			&ltpartlines&gt 4 9 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000752775920475186756 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 8 &lt/xpartlines&gt
+			&ltpartlines&gt 4 8 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000767134447803857842 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 7 &lt/xpartlines&gt
+			&ltpartlines&gt 4 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000783650082327605664 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 6 &lt/xpartlines&gt
+			&ltpartlines&gt 4 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000848841654435085577 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 6 &lt/xpartlines&gt
+			&ltpartlines&gt 3 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000957093185831530183 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 2 6 &lt/xpartlines&gt
+			&ltpartlines&gt 2 6 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00113398823503352373 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 2 5 &lt/xpartlines&gt
+			&ltpartlines&gt 2 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00120225473233144119 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 1 6 &lt/xpartlines&gt
+			&ltpartlines&gt 1 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00125272233077632815 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 1 5 &lt/xpartlines&gt
+			&ltpartlines&gt 1 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00148623295494489864 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 1 4 &lt/xpartlines&gt
+			&ltpartlines&gt 1 4 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00243994510677220068 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 1 3 &lt/xpartlines&gt
+			&ltpartlines&gt 1 3 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00250266179027173021 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 4 &lt/xpartlines&gt
+			&ltpartlines&gt 0 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00288283320062619926 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 3 &lt/xpartlines&gt
+			&ltpartlines&gt 0 3 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00290053205202245312 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 2 &lt/xpartlines&gt
+			&ltpartlines&gt 0 2 . &lt/partlines&gt
+		&lt/intervals&gt
+	&lt/treesum&gt
+	&lttreesum&gt
+		&ltncopy&gt 1 &lt/ncopy&gt
+		&ltshortforce&gt coalesce long
+			&ltshortpoint&gt 18 21 &lt/shortpoint&gt
+			&ltshortwait&gt 0.0609748580652498956 0.11278304974572527 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltshortforce&gt migrate long
+			&ltshortpoint&gt 0 1 3 0 &lt/shortpoint&gt
+			&ltshortwait&gt 0.0085322498443588142 0.0161685184391479192 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltintervals&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 9.86113155575850924e-06 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 20 20 &lt/xpartlines&gt
+			&ltpartlines&gt 20 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.14780854199569499e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 19 20 &lt/xpartlines&gt
+			&ltpartlines&gt 19 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.2291023672518374e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 19 19 &lt/xpartlines&gt
+			&ltpartlines&gt 19 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 2.22961141141907646e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 19 18 &lt/xpartlines&gt
+			&ltpartlines&gt 19 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 2.48673742353794826e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 18 18 &lt/xpartlines&gt
+			&ltpartlines&gt 18 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 3.91186994294289797e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 17 18 &lt/xpartlines&gt
+			&ltpartlines&gt 17 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 4.82376276028000035e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 17 17 &lt/xpartlines&gt
+			&ltpartlines&gt 17 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 5.19910298629359333e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 16 17 &lt/xpartlines&gt
+			&ltpartlines&gt 16 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 7.45549455970045656e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 16 16 &lt/xpartlines&gt
+			&ltpartlines&gt 16 16 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 8.48515305769414904e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 15 16 &lt/xpartlines&gt
+			&ltpartlines&gt 15 16 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 8.63938283360383356e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 14 16 &lt/xpartlines&gt
+			&ltpartlines&gt 14 16 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000109224715491037562 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 14 15 &lt/xpartlines&gt
+			&ltpartlines&gt 14 15 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000118188935353991798 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 15 &lt/xpartlines&gt
+			&ltpartlines&gt 13 15 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000125328787711941807 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 14 &lt/xpartlines&gt
+			&ltpartlines&gt 13 14 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000134238691874153575 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 14 &lt/xpartlines&gt
+			&ltpartlines&gt 12 14 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00015730396931299724 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 14 &lt/xpartlines&gt
+			&ltpartlines&gt 11 14 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000208869889075072063 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 13 &lt/xpartlines&gt
+			&ltpartlines&gt 11 13 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000215094766862807343 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 12 &lt/xpartlines&gt
+			&ltpartlines&gt 11 12 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000225539801544489551 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 12 &lt/xpartlines&gt
+			&ltpartlines&gt 10 12 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00024281367284408395 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 12 &lt/xpartlines&gt
+			&ltpartlines&gt 9 12 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000339783045919729956 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 11 &lt/xpartlines&gt
+			&ltpartlines&gt 9 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000345347778094520265 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 11 &lt/xpartlines&gt
+			&ltpartlines&gt 8 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000349763100889471208 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 11 &lt/xpartlines&gt
+			&ltpartlines&gt 7 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000380891571910665103 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 6 11 &lt/xpartlines&gt
+			&ltpartlines&gt 6 11 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.000430449683486230515 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltnewstatus&gt 1 &lt/newstatus&gt
+			&ltxpartlines&gt 6 10 &lt/xpartlines&gt
+			&ltpartlines&gt 6 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000457714375841571639 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 9 &lt/xpartlines&gt
+			&ltpartlines&gt 7 9 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000477876206426440411 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 6 9 &lt/xpartlines&gt
+			&ltpartlines&gt 6 9 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000491278080421270061 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 5 9 &lt/xpartlines&gt
+			&ltpartlines&gt 5 9 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000618305099660766818 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 5 8 &lt/xpartlines&gt
+			&ltpartlines&gt 5 8 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.000711694454938392642 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 5 7 &lt/xpartlines&gt
+			&ltpartlines&gt 5 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000752775920475186756 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 8 &lt/xpartlines&gt
+			&ltpartlines&gt 4 8 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000767134447803857842 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 7 &lt/xpartlines&gt
+			&ltpartlines&gt 4 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000783650082327605664 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 6 &lt/xpartlines&gt
+			&ltpartlines&gt 4 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000848841654435085577 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 6 &lt/xpartlines&gt
+			&ltpartlines&gt 3 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000957093185831530183 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 2 6 &lt/xpartlines&gt
+			&ltpartlines&gt 2 6 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00113398823503352373 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 2 5 &lt/xpartlines&gt
+			&ltpartlines&gt 2 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00120225473233144119 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 1 6 &lt/xpartlines&gt
+			&ltpartlines&gt 1 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00125272233077632815 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 1 5 &lt/xpartlines&gt
+			&ltpartlines&gt 1 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00148623295494489864 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 1 4 &lt/xpartlines&gt
+			&ltpartlines&gt 1 4 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00243994510677220068 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 1 3 &lt/xpartlines&gt
+			&ltpartlines&gt 1 3 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00250266179027173021 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 4 &lt/xpartlines&gt
+			&ltpartlines&gt 0 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00288283320062619926 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 3 &lt/xpartlines&gt
+			&ltpartlines&gt 0 3 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00290053205202245312 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 2 &lt/xpartlines&gt
+			&ltpartlines&gt 0 2 . &lt/partlines&gt
+		&lt/intervals&gt
+	&lt/treesum&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00382351077572252833 0.0101148980666503786 &lt/thetas&gt
+			&ltmigrates&gt 0 266.63302494001374 180.090955280637019 0 &lt/migrates&gt
+			&ltgrowthrates&gt 589.048754968938624 732.840714912730732 &lt/growthrates&gt
+		&lt/estimates&gt
+&lt/chainsum&gt
+&ltchainpack&gt
+	&ltnumber&gt 0 2 0 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 0 &lt/tinytrees&gt
+		&ltaccrate&gt 0.234999999999999987 &lt/accrate&gt
+		&ltllikemle&gt 4.42696635227687985 &lt/llikemle&gt
+		&ltllikedata&gt -3492.15130892703792 &lt/llikedata&gt
+		&ltstarttime&gt 1113934029 &lt/starttime&gt
+		&ltendtime&gt 1113934045 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 47 200 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.0051932689740616959 0.0095267210917029492 &lt/thetas&gt
+			&ltmigrates&gt 0 0.0030934444336862682 46.6542500393172119 0 &lt/migrates&gt
+			&ltgrowthrates&gt 1401.00175791445054 338.376238311546331 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainpack&gt
+	&ltnumber&gt 0 2 1 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 0 &lt/tinytrees&gt
+		&ltaccrate&gt 0.149999999999999994 &lt/accrate&gt
+		&ltllikemle&gt 11.6317746234713901 &lt/llikemle&gt
+		&ltllikedata&gt -3382.65609923619468 &lt/llikedata&gt
+		&ltstarttime&gt 1113934046 &lt/starttime&gt
+		&ltendtime&gt 1113934062 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 30 200 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00363910230419641195 0.00949364169215391289 &lt/thetas&gt
+			&ltmigrates&gt 0 113.28617225286888 88.1277594808196767 0 &lt/migrates&gt
+			&ltgrowthrates&gt 411.557560954860037 233.86142612045623 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainpack&gt
+	&ltnumber&gt 0 2 2 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 0 &lt/tinytrees&gt
+		&ltaccrate&gt 0.110000000000000001 &lt/accrate&gt
+		&ltllikemle&gt 0.636063492994214719 &lt/llikemle&gt
+		&ltllikedata&gt -3299.08922066821424 &lt/llikedata&gt
+		&ltstarttime&gt 1113934062 &lt/starttime&gt
+		&ltendtime&gt 1113934079 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 22 200 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00344721894934559969 0.0137468371177546927 &lt/thetas&gt
+			&ltmigrates&gt 0 113.720626268037961 82.7670768811458402 0 &lt/migrates&gt
+			&ltgrowthrates&gt 320.377044363307391 443.960643777887299 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainpack&gt
+	&ltnumber&gt 0 2 3 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 0 &lt/tinytrees&gt
+		&ltaccrate&gt 0.174999999999999989 &lt/accrate&gt
+		&ltllikemle&gt 0.411652243146974017 &lt/llikemle&gt
+		&ltllikedata&gt -3301.112857149506 &lt/llikedata&gt
+		&ltstarttime&gt 1113934079 &lt/starttime&gt
+		&ltendtime&gt 1113934087 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 7 40 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00274284032250226347 0.0117803366876250235 &lt/thetas&gt
+			&ltmigrates&gt 0 120.893070720992441 85.9918539079791771 0 &lt/migrates&gt
+			&ltgrowthrates&gt 26.2473005315673831 368.198849330621101 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainsum&gt
+	&ltreg_rep&gt 0 2 &lt/reg_rep&gt
+	&lttreesum&gt
+		&ltncopy&gt 1 &lt/ncopy&gt
+		&ltshortforce&gt coalesce long
+			&ltshortpoint&gt 19 20 &lt/shortpoint&gt
+			&ltshortwait&gt 0.0517692740770155577 0.168835716039738043 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltshortforce&gt migrate long
+			&ltshortpoint&gt 0 1 2 0 &lt/shortpoint&gt
+			&ltshortwait&gt 0.00827170676388711348 0.0236086927070103779 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltintervals&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 4.65136899805835341e-06 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 20 20 &lt/xpartlines&gt
+			&ltpartlines&gt 20 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 8.04215566305798666e-06 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 19 20 &lt/xpartlines&gt
+			&ltpartlines&gt 19 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.43604566212563868e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 18 20 &lt/xpartlines&gt
+			&ltpartlines&gt 18 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.84336441146756059e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 17 20 &lt/xpartlines&gt
+			&ltpartlines&gt 17 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 2.24324521605382133e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 16 20 &lt/xpartlines&gt
+			&ltpartlines&gt 16 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 4.02727378794709678e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 16 19 &lt/xpartlines&gt
+			&ltpartlines&gt 16 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 4.66779769093841711e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 15 19 &lt/xpartlines&gt
+			&ltpartlines&gt 15 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 6.70209786261414327e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 14 19 &lt/xpartlines&gt
+			&ltpartlines&gt 14 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 8.7820260710313902e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 14 18 &lt/xpartlines&gt
+			&ltpartlines&gt 14 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 9.28621094428023613e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 18 &lt/xpartlines&gt
+			&ltpartlines&gt 13 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000107706203473348862 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 18 &lt/xpartlines&gt
+			&ltpartlines&gt 12 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000114816939483791875 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 17 &lt/xpartlines&gt
+			&ltpartlines&gt 12 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000126163485711554112 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 16 &lt/xpartlines&gt
+			&ltpartlines&gt 12 16 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.0001343202142583523 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 15 &lt/xpartlines&gt
+			&ltpartlines&gt 12 15 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000157502040375572878 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 14 &lt/xpartlines&gt
+			&ltpartlines&gt 12 14 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000158198275534539187 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 14 &lt/xpartlines&gt
+			&ltpartlines&gt 11 14 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000185924618990777787 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 13 &lt/xpartlines&gt
+			&ltpartlines&gt 11 13 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000195139416618541169 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 13 &lt/xpartlines&gt
+			&ltpartlines&gt 10 13 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.000195144856593906958 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltnewstatus&gt 1 &lt/newstatus&gt
+			&ltxpartlines&gt 9 13 &lt/xpartlines&gt
+			&ltpartlines&gt 9 13 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000208892591389243015 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 12 &lt/xpartlines&gt
+			&ltpartlines&gt 10 12 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000221729134487027137 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 11 &lt/xpartlines&gt
+			&ltpartlines&gt 10 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000231540140137400703 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 11 &lt/xpartlines&gt
+			&ltpartlines&gt 9 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000286748264076826459 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 11 &lt/xpartlines&gt
+			&ltpartlines&gt 8 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000296568169067036559 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 11 &lt/xpartlines&gt
+			&ltpartlines&gt 7 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00032780787636151992 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 10 &lt/xpartlines&gt
+			&ltpartlines&gt 7 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00040351349954208406 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 6 10 &lt/xpartlines&gt
+			&ltpartlines&gt 6 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000469019333789370591 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 5 10 &lt/xpartlines&gt
+			&ltpartlines&gt 5 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000501156369385404206 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 10 &lt/xpartlines&gt
+			&ltpartlines&gt 4 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00051564770169723541 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 9 &lt/xpartlines&gt
+			&ltpartlines&gt 4 9 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000720822165058714829 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 9 &lt/xpartlines&gt
+			&ltpartlines&gt 3 9 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00096916231842889629 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 8 &lt/xpartlines&gt
+			&ltpartlines&gt 3 8 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00117293932612708303 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 7 &lt/xpartlines&gt
+			&ltpartlines&gt 3 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00150380041638617967 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 6 &lt/xpartlines&gt
+			&ltpartlines&gt 3 6 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00173696782656019471 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 2 6 &lt/xpartlines&gt
+			&ltpartlines&gt 2 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00185054353054030468 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 1 7 &lt/xpartlines&gt
+			&ltpartlines&gt 1 7 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00191839064111927254 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 1 6 &lt/xpartlines&gt
+			&ltpartlines&gt 1 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00194182526794987008 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 7 &lt/xpartlines&gt
+			&ltpartlines&gt 0 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00225255911291697688 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 6 &lt/xpartlines&gt
+			&ltpartlines&gt 0 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00258123599239381586 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 5 &lt/xpartlines&gt
+			&ltpartlines&gt 0 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00295407111169069311 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 4 &lt/xpartlines&gt
+			&ltpartlines&gt 0 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00324495225912871431 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 3 &lt/xpartlines&gt
+			&ltpartlines&gt 0 3 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00382175977738545905 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 2 &lt/xpartlines&gt
+			&ltpartlines&gt 0 2 . &lt/partlines&gt
+		&lt/intervals&gt
+	&lt/treesum&gt
+	&lttreesum&gt
+		&ltncopy&gt 1 &lt/ncopy&gt
+		&ltshortforce&gt coalesce long
+			&ltshortpoint&gt 19 20 &lt/shortpoint&gt
+			&ltshortwait&gt 0.0517692740770155577 0.159664105752183261 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltshortforce&gt migrate long
+			&ltshortpoint&gt 0 1 2 0 &lt/shortpoint&gt
+			&ltshortwait&gt 0.00827170676388711348 0.0229534802929423874 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltintervals&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 4.65136899805835341e-06 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 20 20 &lt/xpartlines&gt
+			&ltpartlines&gt 20 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 8.04215566305798666e-06 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 19 20 &lt/xpartlines&gt
+			&ltpartlines&gt 19 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.43604566212563868e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 18 20 &lt/xpartlines&gt
+			&ltpartlines&gt 18 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.84336441146756059e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 17 20 &lt/xpartlines&gt
+			&ltpartlines&gt 17 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 2.24324521605382133e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 16 20 &lt/xpartlines&gt
+			&ltpartlines&gt 16 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 4.02727378794709678e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 16 19 &lt/xpartlines&gt
+			&ltpartlines&gt 16 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 4.66779769093841711e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 15 19 &lt/xpartlines&gt
+			&ltpartlines&gt 15 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 6.70209786261414327e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 14 19 &lt/xpartlines&gt
+			&ltpartlines&gt 14 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 8.7820260710313902e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 14 18 &lt/xpartlines&gt
+			&ltpartlines&gt 14 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 9.28621094428023613e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 18 &lt/xpartlines&gt
+			&ltpartlines&gt 13 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000107706203473348862 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 18 &lt/xpartlines&gt
+			&ltpartlines&gt 12 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000114816939483791875 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 17 &lt/xpartlines&gt
+			&ltpartlines&gt 12 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000126163485711554112 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 16 &lt/xpartlines&gt
+			&ltpartlines&gt 12 16 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.0001343202142583523 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 15 &lt/xpartlines&gt
+			&ltpartlines&gt 12 15 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000157502040375572878 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 14 &lt/xpartlines&gt
+			&ltpartlines&gt 12 14 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000158198275534539187 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 14 &lt/xpartlines&gt
+			&ltpartlines&gt 11 14 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000185924618990777787 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 13 &lt/xpartlines&gt
+			&ltpartlines&gt 11 13 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000195139416618541169 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 13 &lt/xpartlines&gt
+			&ltpartlines&gt 10 13 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.000195144856593906958 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltnewstatus&gt 1 &lt/newstatus&gt
+			&ltxpartlines&gt 9 13 &lt/xpartlines&gt
+			&ltpartlines&gt 9 13 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000208892591389243015 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 12 &lt/xpartlines&gt
+			&ltpartlines&gt 10 12 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000221729134487027137 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 11 &lt/xpartlines&gt
+			&ltpartlines&gt 10 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000231540140137400703 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 11 &lt/xpartlines&gt
+			&ltpartlines&gt 9 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000286748264076826459 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 11 &lt/xpartlines&gt
+			&ltpartlines&gt 8 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000296568169067036559 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 11 &lt/xpartlines&gt
+			&ltpartlines&gt 7 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00032780787636151992 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 10 &lt/xpartlines&gt
+			&ltpartlines&gt 7 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00040351349954208406 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 6 10 &lt/xpartlines&gt
+			&ltpartlines&gt 6 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000469019333789370591 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 5 10 &lt/xpartlines&gt
+			&ltpartlines&gt 5 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000501156369385404206 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 10 &lt/xpartlines&gt
+			&ltpartlines&gt 4 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00051564770169723541 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 9 &lt/xpartlines&gt
+			&ltpartlines&gt 4 9 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000517726912059090507 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 9 &lt/xpartlines&gt
+			&ltpartlines&gt 3 9 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000720822165058714829 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 8 &lt/xpartlines&gt
+			&ltpartlines&gt 3 8 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00096916231842889629 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 7 &lt/xpartlines&gt
+			&ltpartlines&gt 3 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00150380041638617967 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 6 &lt/xpartlines&gt
+			&ltpartlines&gt 3 6 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00173696782656019471 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 2 6 &lt/xpartlines&gt
+			&ltpartlines&gt 2 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00185054353054030468 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 1 7 &lt/xpartlines&gt
+			&ltpartlines&gt 1 7 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00191839064111927254 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 1 6 &lt/xpartlines&gt
+			&ltpartlines&gt 1 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00194182526794987008 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 7 &lt/xpartlines&gt
+			&ltpartlines&gt 0 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00225255911291697688 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 6 &lt/xpartlines&gt
+			&ltpartlines&gt 0 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00258123599239381586 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 5 &lt/xpartlines&gt
+			&ltpartlines&gt 0 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00295407111169069311 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 4 &lt/xpartlines&gt
+			&ltpartlines&gt 0 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00324495225912871431 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 3 &lt/xpartlines&gt
+			&ltpartlines&gt 0 3 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00382175977738545905 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 2 &lt/xpartlines&gt
+			&ltpartlines&gt 0 2 . &lt/partlines&gt
+		&lt/intervals&gt
+	&lt/treesum&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00344721894934559969 0.0137468371177546927 &lt/thetas&gt
+			&ltmigrates&gt 0 113.720626268037961 82.7670768811458402 0 &lt/migrates&gt
+			&ltgrowthrates&gt 320.377044363307391 443.960643777887299 &lt/growthrates&gt
+		&lt/estimates&gt
+&lt/chainsum&gt
+&ltreplicate-summary&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00356219464250582358 0.00792721479150828613 &lt/thetas&gt
+			&ltmigrates&gt 0 121.904055450476534 100.015915512533283 0 &lt/migrates&gt
+			&ltgrowthrates&gt 722.893455415187532 257.932768127086547 &lt/growthrates&gt
+		&lt/estimates&gt
+		&ltmaxlike&gt -4.6390765097207467 &lt/maxlike&gt
+&lt/replicate-summary&gt
+&ltchainpack&gt
+	&ltnumber&gt 1 0 0 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 0 &lt/tinytrees&gt
+		&ltaccrate&gt 0.195000000000000007 &lt/accrate&gt
+		&ltllikemle&gt 2.70726265416057954 &lt/llikemle&gt
+		&ltllikedata&gt -3922.87305464996825 &lt/llikedata&gt
+		&ltstarttime&gt 1113934177 &lt/starttime&gt
+		&ltendtime&gt 1113934192 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 39 200 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00380011726327808552 0.00713604248963370554 &lt/thetas&gt
+			&ltmigrates&gt 0 0.0014665879208418085 76.5973069163458717 0 &lt/migrates&gt
+			&ltgrowthrates&gt 125.906070220065573 10.9556147174487499 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainpack&gt
+	&ltnumber&gt 1 0 1 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 0 &lt/tinytrees&gt
+		&ltaccrate&gt 0.200000000000000011 &lt/accrate&gt
+		&ltllikemle&gt 19.4577047358918414 &lt/llikemle&gt
+		&ltllikedata&gt -3507.94370195006422 &lt/llikedata&gt
+		&ltstarttime&gt 1113934192 &lt/starttime&gt
+		&ltendtime&gt 1113934207 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 40 200 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00557328352740996819 0.00767691676407612593 &lt/thetas&gt
+			&ltmigrates&gt 0 72.5171669311285285 50.3899810789623572 0 &lt/migrates&gt
+			&ltgrowthrates&gt 239.320949207652802 15.0158998451551309 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainpack&gt
+	&ltnumber&gt 1 0 2 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 0 &lt/tinytrees&gt
+		&ltaccrate&gt 0.195000000000000007 &lt/accrate&gt
+		&ltllikemle&gt 1.41077196606614153 &lt/llikemle&gt
+		&ltllikedata&gt -3388.85530318494739 &lt/llikedata&gt
+		&ltstarttime&gt 1113934207 &lt/starttime&gt
+		&ltendtime&gt 1113934221 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 39 200 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00526638488263829647 0.00636010206007683442 &lt/thetas&gt
+			&ltmigrates&gt 0 145.033451919934123 78.9828742546580571 0 &lt/migrates&gt
+			&ltgrowthrates&gt 101.354431821265237 42.7824995714557517 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainpack&gt
+	&ltnumber&gt 1 0 3 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 0 &lt/tinytrees&gt
+		&ltaccrate&gt 0.0500000000000000028 &lt/accrate&gt
+		&ltllikemle&gt 0.106842797016683602 &lt/llikemle&gt
+		&ltllikedata&gt -3387.66439523280769 &lt/llikedata&gt
+		&ltstarttime&gt 1113934222 &lt/starttime&gt
+		&ltendtime&gt 1113934229 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 2 40 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.0048297296701252353 0.00632472336672777573 &lt/thetas&gt
+			&ltmigrates&gt 0 158.359159064075584 79.436028487943048 0 &lt/migrates&gt
+			&ltgrowthrates&gt 48.5740506820224667 12.3249628941104206 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainsum&gt
+	&ltreg_rep&gt 1 0 &lt/reg_rep&gt
+	&lttreesum&gt
+		&ltncopy&gt 1 &lt/ncopy&gt
+		&ltshortforce&gt coalesce long
+			&ltshortpoint&gt 21 18 &lt/shortpoint&gt
+			&ltshortwait&gt 0.0985596947596808493 0.112500144905919378 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltshortforce&gt migrate long
+			&ltshortpoint&gt 0 3 2 0 &lt/shortpoint&gt
+			&ltshortwait&gt 0.0189681695385016322 0.0251774919104673957 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltintervals&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 2.75402443608963525e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 20 20 &lt/xpartlines&gt
+			&ltpartlines&gt 20 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 3.67126558256159515e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 20 19 &lt/xpartlines&gt
+			&ltpartlines&gt 20 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 6.24882792998111863e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 19 19 &lt/xpartlines&gt
+			&ltpartlines&gt 19 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 6.28842625068443356e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 18 19 &lt/xpartlines&gt
+			&ltpartlines&gt 18 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 6.34298732185740874e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 17 19 &lt/xpartlines&gt
+			&ltpartlines&gt 17 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 6.46656309975548847e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 17 18 &lt/xpartlines&gt
+			&ltpartlines&gt 17 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 6.81601036755640491e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 16 18 &lt/xpartlines&gt
+			&ltpartlines&gt 16 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 6.9553870703353184e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 15 18 &lt/xpartlines&gt
+			&ltpartlines&gt 15 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 8.65634921089525913e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 14 18 &lt/xpartlines&gt
+			&ltpartlines&gt 14 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 9.53160440134389236e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 18 &lt/xpartlines&gt
+			&ltpartlines&gt 13 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 9.84400137091464748e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 17 &lt/xpartlines&gt
+			&ltpartlines&gt 13 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000104908914799906663 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 17 &lt/xpartlines&gt
+			&ltpartlines&gt 12 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000107378906806900444 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 16 &lt/xpartlines&gt
+			&ltpartlines&gt 12 16 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000134386781925198401 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 16 &lt/xpartlines&gt
+			&ltpartlines&gt 11 16 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00015095815823508343 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 16 &lt/xpartlines&gt
+			&ltpartlines&gt 10 16 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.000187294338352160755 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltnewstatus&gt 1 &lt/newstatus&gt
+			&ltxpartlines&gt 9 16 &lt/xpartlines&gt
+			&ltpartlines&gt 9 16 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000194311967981214922 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 15 &lt/xpartlines&gt
+			&ltpartlines&gt 10 15 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000207082475182713985 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 14 &lt/xpartlines&gt
+			&ltpartlines&gt 10 14 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000216519914862317283 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 13 &lt/xpartlines&gt
+			&ltpartlines&gt 10 13 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000240917252117856754 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 13 &lt/xpartlines&gt
+			&ltpartlines&gt 9 13 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000275177350551769888 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 12 &lt/xpartlines&gt
+			&ltpartlines&gt 9 12 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000277961519944129815 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 11 &lt/xpartlines&gt
+			&ltpartlines&gt 9 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000327184596530212871 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 10 &lt/xpartlines&gt
+			&ltpartlines&gt 9 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000375434817240545321 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 10 &lt/xpartlines&gt
+			&ltpartlines&gt 8 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000421956732764341578 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 9 &lt/xpartlines&gt
+			&ltpartlines&gt 8 9 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000454327709213169504 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 8 &lt/xpartlines&gt
+			&ltpartlines&gt 8 8 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000455301780358049633 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 8 &lt/xpartlines&gt
+			&ltpartlines&gt 7 8 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000477060867071809822 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 7 &lt/xpartlines&gt
+			&ltpartlines&gt 7 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000495771892855436606 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 6 &lt/xpartlines&gt
+			&ltpartlines&gt 7 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000577304059643193 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 5 &lt/xpartlines&gt
+			&ltpartlines&gt 7 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000728271605769075263 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 6 5 &lt/xpartlines&gt
+			&ltpartlines&gt 6 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000928286291063316091 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 6 4 &lt/xpartlines&gt
+			&ltpartlines&gt 6 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00136280706740197761 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 5 4 &lt/xpartlines&gt
+			&ltpartlines&gt 5 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00157448633617017866 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 4 &lt/xpartlines&gt
+			&ltpartlines&gt 4 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00170993270529683282 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 3 &lt/xpartlines&gt
+			&ltpartlines&gt 4 3 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00199676444517621777 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 3 3 &lt/xpartlines&gt
+			&ltpartlines&gt 3 3 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00200728073872961498 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltnewstatus&gt 1 &lt/newstatus&gt
+			&ltxpartlines&gt 2 4 &lt/xpartlines&gt
+			&ltpartlines&gt 2 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00213154474570202875 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 3 &lt/xpartlines&gt
+			&ltpartlines&gt 3 3 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00229629270831637703 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltnewstatus&gt 1 &lt/newstatus&gt
+			&ltxpartlines&gt 3 2 &lt/xpartlines&gt
+			&ltpartlines&gt 3 2 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00279444689547627594 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 1 &lt/xpartlines&gt
+			&ltpartlines&gt 4 1 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00362185244963840556 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 1 &lt/xpartlines&gt
+			&ltpartlines&gt 3 1 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00370319681038052849 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 2 1 &lt/xpartlines&gt
+			&ltpartlines&gt 2 1 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00482422222342331845 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 1 1 &lt/xpartlines&gt
+			&ltpartlines&gt 1 1 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00968056818678340696 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 2 &lt/xpartlines&gt
+			&ltpartlines&gt 0 2 . &lt/partlines&gt
+		&lt/intervals&gt
+	&lt/treesum&gt
+	&lttreesum&gt
+		&ltncopy&gt 1 &lt/ncopy&gt
+		&ltshortforce&gt coalesce long
+			&ltshortpoint&gt 21 18 &lt/shortpoint&gt
+			&ltshortwait&gt 0.0968437428145017959 0.112500144905919378 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltshortforce&gt migrate long
+			&ltshortpoint&gt 0 3 2 0 &lt/shortpoint&gt
+			&ltshortwait&gt 0.018921333345451638 0.0251774919104673957 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltintervals&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.78294379475588584e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 20 20 &lt/xpartlines&gt
+			&ltpartlines&gt 20 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 2.75402443608963525e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 19 20 &lt/xpartlines&gt
+			&ltpartlines&gt 19 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 3.67126558256159515e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 19 19 &lt/xpartlines&gt
+			&ltpartlines&gt 19 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 6.24882792998111863e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 18 19 &lt/xpartlines&gt
+			&ltpartlines&gt 18 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 6.28842625068443356e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 17 19 &lt/xpartlines&gt
+			&ltpartlines&gt 17 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 6.34298732185740874e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 16 19 &lt/xpartlines&gt
+			&ltpartlines&gt 16 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 6.81601036755640491e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 16 18 &lt/xpartlines&gt
+			&ltpartlines&gt 16 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 6.9553870703353184e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 15 18 &lt/xpartlines&gt
+			&ltpartlines&gt 15 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 8.65634921089525913e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 14 18 &lt/xpartlines&gt
+			&ltpartlines&gt 14 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 9.53160440134389236e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 18 &lt/xpartlines&gt
+			&ltpartlines&gt 13 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 9.84400137091464748e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 17 &lt/xpartlines&gt
+			&ltpartlines&gt 13 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000104908914799906663 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 17 &lt/xpartlines&gt
+			&ltpartlines&gt 12 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000107378906806900444 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 16 &lt/xpartlines&gt
+			&ltpartlines&gt 12 16 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000134386781925198401 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 16 &lt/xpartlines&gt
+			&ltpartlines&gt 11 16 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00015095815823508343 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 16 &lt/xpartlines&gt
+			&ltpartlines&gt 10 16 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.000187294338352160755 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltnewstatus&gt 1 &lt/newstatus&gt
+			&ltxpartlines&gt 9 16 &lt/xpartlines&gt
+			&ltpartlines&gt 9 16 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000194311967981214922 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 15 &lt/xpartlines&gt
+			&ltpartlines&gt 10 15 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000207082475182713985 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 14 &lt/xpartlines&gt
+			&ltpartlines&gt 10 14 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000216519914862317283 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 13 &lt/xpartlines&gt
+			&ltpartlines&gt 10 13 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000240917252117856754 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 13 &lt/xpartlines&gt
+			&ltpartlines&gt 9 13 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000275177350551769888 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 12 &lt/xpartlines&gt
+			&ltpartlines&gt 9 12 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000277961519944129815 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 11 &lt/xpartlines&gt
+			&ltpartlines&gt 9 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000327184596530212871 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 10 &lt/xpartlines&gt
+			&ltpartlines&gt 9 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000375434817240545321 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 10 &lt/xpartlines&gt
+			&ltpartlines&gt 8 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000421956732764341578 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 9 &lt/xpartlines&gt
+			&ltpartlines&gt 8 9 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000454327709213169504 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 8 &lt/xpartlines&gt
+			&ltpartlines&gt 8 8 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000455301780358049633 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 8 &lt/xpartlines&gt
+			&ltpartlines&gt 7 8 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000477060867071809822 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 7 &lt/xpartlines&gt
+			&ltpartlines&gt 7 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000495771892855436606 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 6 &lt/xpartlines&gt
+			&ltpartlines&gt 7 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000577304059643193 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 5 &lt/xpartlines&gt
+			&ltpartlines&gt 7 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000728271605769075263 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 6 5 &lt/xpartlines&gt
+			&ltpartlines&gt 6 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000928286291063316091 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 6 4 &lt/xpartlines&gt
+			&ltpartlines&gt 6 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00136280706740197761 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 5 4 &lt/xpartlines&gt
+			&ltpartlines&gt 5 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00157448633617017866 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 4 &lt/xpartlines&gt
+			&ltpartlines&gt 4 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00170993270529683282 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 3 &lt/xpartlines&gt
+			&ltpartlines&gt 4 3 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00199676444517621777 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 3 3 &lt/xpartlines&gt
+			&ltpartlines&gt 3 3 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00200728073872961498 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltnewstatus&gt 1 &lt/newstatus&gt
+			&ltxpartlines&gt 2 4 &lt/xpartlines&gt
+			&ltpartlines&gt 2 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00213154474570202875 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 3 &lt/xpartlines&gt
+			&ltpartlines&gt 3 3 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00229629270831637703 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltnewstatus&gt 1 &lt/newstatus&gt
+			&ltxpartlines&gt 3 2 &lt/xpartlines&gt
+			&ltpartlines&gt 3 2 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00279444689547627594 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 1 &lt/xpartlines&gt
+			&ltpartlines&gt 4 1 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00362185244963840556 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 1 &lt/xpartlines&gt
+			&ltpartlines&gt 3 1 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00370319681038052849 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 2 1 &lt/xpartlines&gt
+			&ltpartlines&gt 2 1 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00482422222342331845 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 1 1 &lt/xpartlines&gt
+			&ltpartlines&gt 1 1 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00968056818678340696 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 2 &lt/xpartlines&gt
+			&ltpartlines&gt 0 2 . &lt/partlines&gt
+		&lt/intervals&gt
+	&lt/treesum&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00526638488263829647 0.00636010206007683442 &lt/thetas&gt
+			&ltmigrates&gt 0 145.033451919934123 78.9828742546580571 0 &lt/migrates&gt
+			&ltgrowthrates&gt 101.354431821265237 42.7824995714557517 &lt/growthrates&gt
+		&lt/estimates&gt
+&lt/chainsum&gt
+&ltchainpack&gt
+	&ltnumber&gt 1 1 0 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 0 &lt/tinytrees&gt
+		&ltaccrate&gt 0.239999999999999991 &lt/accrate&gt
+		&ltllikemle&gt 2.34750730772453364 &lt/llikemle&gt
+		&ltllikedata&gt -3525.30915507333066 &lt/llikedata&gt
+		&ltstarttime&gt 1113934229 &lt/starttime&gt
+		&ltendtime&gt 1113934243 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 48 200 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00633684542395633252 0.00414801483484614902 &lt/thetas&gt
+			&ltmigrates&gt 0 189.840646683791618 111.21338721037425 0 &lt/migrates&gt
+			&ltgrowthrates&gt 134.29937737766619 -261.501604057297811 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainpack&gt
+	&ltnumber&gt 1 1 1 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 2 &lt/tinytrees&gt
+		&ltaccrate&gt 0.135000000000000009 &lt/accrate&gt
+		&ltllikemle&gt 1.87776764509904415 &lt/llikemle&gt
+		&ltllikedata&gt -3404.56028299746959 &lt/llikedata&gt
+		&ltstarttime&gt 1113934244 &lt/starttime&gt
+		&ltendtime&gt 1113934259 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 27 200 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.0053726478023596964 0.0034495368114013979 &lt/thetas&gt
+			&ltmigrates&gt 0 175.867668937235891 153.579579990926845 0 &lt/migrates&gt
+			&ltgrowthrates&gt 37.0453100154095694 -121.00495810761069 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainpack&gt
+	&ltnumber&gt 1 1 2 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 0 &lt/tinytrees&gt
+		&ltaccrate&gt 0.104999999999999996 &lt/accrate&gt
+		&ltllikemle&gt 0.449286917034601607 &lt/llikemle&gt
+		&ltllikedata&gt -3321.70936524524359 &lt/llikedata&gt
+		&ltstarttime&gt 1113934259 &lt/starttime&gt
+		&ltendtime&gt 1113934274 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 21 200 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00422505900112751364 0.00395290224643087169 &lt/thetas&gt
+			&ltmigrates&gt 0 160.037181408148427 147.594989310094263 0 &lt/migrates&gt
+			&ltgrowthrates&gt -173.121613861979398 -52.9903878137056665 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainpack&gt
+	&ltnumber&gt 1 1 3 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 1 &lt/badtrees&gt
+		&lttinytrees&gt 0 &lt/tinytrees&gt
+		&ltaccrate&gt 0.0749999999999999972 &lt/accrate&gt
+		&ltllikemle&gt 4.54069966505540101 &lt/llikemle&gt
+		&ltllikedata&gt -3318.52699208625927 &lt/llikedata&gt
+		&ltstarttime&gt 1113934274 &lt/starttime&gt
+		&ltendtime&gt 1113934304 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 3 40 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00295615018924887769 0.00491936629532284831 &lt/thetas&gt
+			&ltmigrates&gt 0 235.929972373142618 308.986288378157326 0 &lt/migrates&gt
+			&ltgrowthrates&gt -631.659188775292023 226.807009056767953 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainsum&gt
+	&ltreg_rep&gt 1 1 &lt/reg_rep&gt
+	&lttreesum&gt
+		&ltncopy&gt 1 &lt/ncopy&gt
+		&ltshortforce&gt coalesce long
+			&ltshortpoint&gt 18 21 &lt/shortpoint&gt
+			&ltshortwait&gt 0.0648405565130086708 0.0877252307909633239 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltshortforce&gt migrate long
+			&ltshortpoint&gt 0 4 6 0 &lt/shortpoint&gt
+			&ltshortwait&gt 0.0169381769238374467 0.0194182320038754526 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltintervals&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.30550527315558047e-06 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 20 20 &lt/xpartlines&gt
+			&ltpartlines&gt 20 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 5.162795826446426e-06 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 19 20 &lt/xpartlines&gt
+			&ltpartlines&gt 19 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.26737235869386312e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 18 20 &lt/xpartlines&gt
+			&ltpartlines&gt 18 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.55014581129354168e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 17 20 &lt/xpartlines&gt
+			&ltpartlines&gt 17 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 4.40932985043456985e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 16 20 &lt/xpartlines&gt
+			&ltpartlines&gt 16 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 4.90702713266381754e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 15 20 &lt/xpartlines&gt
+			&ltpartlines&gt 15 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 5.10411853492643601e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 15 19 &lt/xpartlines&gt
+			&ltpartlines&gt 15 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 5.12290248204213804e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 14 19 &lt/xpartlines&gt
+			&ltpartlines&gt 14 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 5.67472162385812578e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 14 18 &lt/xpartlines&gt
+			&ltpartlines&gt 14 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 9.65590413200949371e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 14 17 &lt/xpartlines&gt
+			&ltpartlines&gt 14 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000103869320417860665 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 14 16 &lt/xpartlines&gt
+			&ltpartlines&gt 14 16 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000107286941773631806 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 16 &lt/xpartlines&gt
+			&ltpartlines&gt 13 16 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000107550821053514076 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 15 &lt/xpartlines&gt
+			&ltpartlines&gt 13 15 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000109709891069030956 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 14 &lt/xpartlines&gt
+			&ltpartlines&gt 13 14 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.000114694502065424022 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltnewstatus&gt 1 &lt/newstatus&gt
+			&ltxpartlines&gt 12 14 &lt/xpartlines&gt
+			&ltpartlines&gt 12 14 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00012158499114137082 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 13 &lt/xpartlines&gt
+			&ltpartlines&gt 13 13 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000122718853104433789 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 12 &lt/xpartlines&gt
+			&ltpartlines&gt 13 12 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000125375411254551923 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 11 &lt/xpartlines&gt
+			&ltpartlines&gt 13 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000142189216663119957 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 11 &lt/xpartlines&gt
+			&ltpartlines&gt 12 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000142843686193008286 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 11 &lt/xpartlines&gt
+			&ltpartlines&gt 11 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000171515317423868074 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 11 &lt/xpartlines&gt
+			&ltpartlines&gt 10 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000218046136466306875 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 11 &lt/xpartlines&gt
+			&ltpartlines&gt 9 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.0002790329859263787 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 11 &lt/xpartlines&gt
+			&ltpartlines&gt 8 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00030767075159027072 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 10 &lt/xpartlines&gt
+			&ltpartlines&gt 8 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000326602024127066991 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 10 &lt/xpartlines&gt
+			&ltpartlines&gt 7 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000336138739569854291 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 9 &lt/xpartlines&gt
+			&ltpartlines&gt 7 9 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000411029437867106761 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 8 &lt/xpartlines&gt
+			&ltpartlines&gt 7 8 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000455284747279604488 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 7 &lt/xpartlines&gt
+			&ltpartlines&gt 7 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000539893327729701073 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 6 7 &lt/xpartlines&gt
+			&ltpartlines&gt 6 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000560189187505523802 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 6 6 &lt/xpartlines&gt
+			&ltpartlines&gt 6 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000724982991669875874 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 6 5 &lt/xpartlines&gt
+			&ltpartlines&gt 6 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000800823245136290159 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 5 5 &lt/xpartlines&gt
+			&ltpartlines&gt 5 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00101663478186505727 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 5 4 &lt/xpartlines&gt
+			&ltpartlines&gt 5 4 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00117786220475134804 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 4 4 &lt/xpartlines&gt
+			&ltpartlines&gt 4 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00118517172371627641 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 5 &lt/xpartlines&gt
+			&ltpartlines&gt 3 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00125529723210753312 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 4 &lt/xpartlines&gt
+			&ltpartlines&gt 3 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00128846095940146773 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 3 &lt/xpartlines&gt
+			&ltpartlines&gt 3 3 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00172659403406359149 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 2 3 &lt/xpartlines&gt
+			&ltpartlines&gt 2 3 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00213157854076952711 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 2 2 &lt/xpartlines&gt
+			&ltpartlines&gt 2 2 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00256248379887218016 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 1 3 &lt/xpartlines&gt
+			&ltpartlines&gt 1 3 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00262033009215670038 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 1 2 &lt/xpartlines&gt
+			&ltpartlines&gt 1 2 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00262735858334228268 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltnewstatus&gt 1 &lt/newstatus&gt
+			&ltxpartlines&gt 1 1 &lt/xpartlines&gt
+			&ltpartlines&gt 1 1 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.0031959883447954078 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 2 0 &lt/xpartlines&gt
+			&ltpartlines&gt 2 0 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00551501377964465876 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 1 1 &lt/xpartlines&gt
+			&ltpartlines&gt 1 1 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00596028250037733409 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltnewstatus&gt 1 &lt/newstatus&gt
+			&ltxpartlines&gt 0 2 &lt/xpartlines&gt
+			&ltpartlines&gt 0 2 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00803817495345396371 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 1 1 &lt/xpartlines&gt
+			&ltpartlines&gt 1 1 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00803938998428121825 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltnewstatus&gt 1 &lt/newstatus&gt
+			&ltxpartlines&gt 0 2 &lt/xpartlines&gt
+			&ltpartlines&gt 0 2 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.0086849234925416869 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 1 1 &lt/xpartlines&gt
+			&ltpartlines&gt 1 1 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00905185738010444788 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 2 &lt/xpartlines&gt
+			&ltpartlines&gt 0 2 . &lt/partlines&gt
+		&lt/intervals&gt
+	&lt/treesum&gt
+	&lttreesum&gt
+		&ltncopy&gt 1 &lt/ncopy&gt
+		&ltshortforce&gt coalesce long
+			&ltshortpoint&gt 18 21 &lt/shortpoint&gt
+			&ltshortwait&gt 0.0659711430564781931 0.0877252307909633239 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltshortforce&gt migrate long
+			&ltshortpoint&gt 0 4 6 0 &lt/shortpoint&gt
+			&ltshortwait&gt 0.0169723921539429272 0.0194182320038754526 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltintervals&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.30550527315558047e-06 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 20 20 &lt/xpartlines&gt
+			&ltpartlines&gt 20 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.26737235869386312e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 19 20 &lt/xpartlines&gt
+			&ltpartlines&gt 19 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.55014581129354168e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 18 20 &lt/xpartlines&gt
+			&ltpartlines&gt 18 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 3.93780259319326687e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 17 20 &lt/xpartlines&gt
+			&ltpartlines&gt 17 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 4.40932985043456985e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 16 20 &lt/xpartlines&gt
+			&ltpartlines&gt 16 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 4.90702713266381754e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 15 20 &lt/xpartlines&gt
+			&ltpartlines&gt 15 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 5.10411853492643601e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 15 19 &lt/xpartlines&gt
+			&ltpartlines&gt 15 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 5.12290248204213804e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 14 19 &lt/xpartlines&gt
+			&ltpartlines&gt 14 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 5.67472162385812578e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 14 18 &lt/xpartlines&gt
+			&ltpartlines&gt 14 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 9.65590413200949371e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 14 17 &lt/xpartlines&gt
+			&ltpartlines&gt 14 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000103869320417860665 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 14 16 &lt/xpartlines&gt
+			&ltpartlines&gt 14 16 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000107286941773631806 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 16 &lt/xpartlines&gt
+			&ltpartlines&gt 13 16 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000107550821053514076 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 15 &lt/xpartlines&gt
+			&ltpartlines&gt 13 15 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000109709891069030956 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 14 &lt/xpartlines&gt
+			&ltpartlines&gt 13 14 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.000114694502065424022 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltnewstatus&gt 1 &lt/newstatus&gt
+			&ltxpartlines&gt 12 14 &lt/xpartlines&gt
+			&ltpartlines&gt 12 14 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00012158499114137082 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 13 &lt/xpartlines&gt
+			&ltpartlines&gt 13 13 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000122718853104433789 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 12 &lt/xpartlines&gt
+			&ltpartlines&gt 13 12 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000125375411254551923 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 11 &lt/xpartlines&gt
+			&ltpartlines&gt 13 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000142189216663119957 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 11 &lt/xpartlines&gt
+			&ltpartlines&gt 12 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000142843686193008286 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 11 &lt/xpartlines&gt
+			&ltpartlines&gt 11 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000171515317423868074 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 11 &lt/xpartlines&gt
+			&ltpartlines&gt 10 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000218046136466306875 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 11 &lt/xpartlines&gt
+			&ltpartlines&gt 9 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.0002790329859263787 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 11 &lt/xpartlines&gt
+			&ltpartlines&gt 8 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00030767075159027072 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 10 &lt/xpartlines&gt
+			&ltpartlines&gt 8 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000326602024127066991 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 10 &lt/xpartlines&gt
+			&ltpartlines&gt 7 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000336138739569854291 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 9 &lt/xpartlines&gt
+			&ltpartlines&gt 7 9 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000411029437867106761 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 8 &lt/xpartlines&gt
+			&ltpartlines&gt 7 8 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000455284747279604488 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 7 &lt/xpartlines&gt
+			&ltpartlines&gt 7 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000539893327729701073 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 6 7 &lt/xpartlines&gt
+			&ltpartlines&gt 6 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000560189187505523802 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 6 6 &lt/xpartlines&gt
+			&ltpartlines&gt 6 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000724982991669875874 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 6 5 &lt/xpartlines&gt
+			&ltpartlines&gt 6 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000800823245136290159 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 5 5 &lt/xpartlines&gt
+			&ltpartlines&gt 5 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00101663478186505727 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 5 4 &lt/xpartlines&gt
+			&ltpartlines&gt 5 4 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00117786220475134804 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 4 4 &lt/xpartlines&gt
+			&ltpartlines&gt 4 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00118517172371627641 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 5 &lt/xpartlines&gt
+			&ltpartlines&gt 3 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00125529723210753312 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 4 &lt/xpartlines&gt
+			&ltpartlines&gt 3 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00128846095940146773 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 3 &lt/xpartlines&gt
+			&ltpartlines&gt 3 3 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00172659403406359149 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 2 3 &lt/xpartlines&gt
+			&ltpartlines&gt 2 3 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00213157854076952711 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 2 2 &lt/xpartlines&gt
+			&ltpartlines&gt 2 2 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00256248379887218016 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 1 3 &lt/xpartlines&gt
+			&ltpartlines&gt 1 3 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00262033009215670038 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 1 2 &lt/xpartlines&gt
+			&ltpartlines&gt 1 2 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00262735858334228268 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltnewstatus&gt 1 &lt/newstatus&gt
+			&ltxpartlines&gt 1 1 &lt/xpartlines&gt
+			&ltpartlines&gt 1 1 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.0031959883447954078 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 2 0 &lt/xpartlines&gt
+			&ltpartlines&gt 2 0 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00551501377964465876 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 1 1 &lt/xpartlines&gt
+			&ltpartlines&gt 1 1 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00596028250037733409 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltnewstatus&gt 1 &lt/newstatus&gt
+			&ltxpartlines&gt 0 2 &lt/xpartlines&gt
+			&ltpartlines&gt 0 2 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00803817495345396371 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 1 1 &lt/xpartlines&gt
+			&ltpartlines&gt 1 1 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00803938998428121825 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltnewstatus&gt 1 &lt/newstatus&gt
+			&ltxpartlines&gt 0 2 &lt/xpartlines&gt
+			&ltpartlines&gt 0 2 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.0086849234925416869 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 1 1 &lt/xpartlines&gt
+			&ltpartlines&gt 1 1 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00905185738010444788 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 2 &lt/xpartlines&gt
+			&ltpartlines&gt 0 2 . &lt/partlines&gt
+		&lt/intervals&gt
+	&lt/treesum&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00422505900112751364 0.00395290224643087169 &lt/thetas&gt
+			&ltmigrates&gt 0 160.037181408148427 147.594989310094263 0 &lt/migrates&gt
+			&ltgrowthrates&gt -173.121613861979398 -52.9903878137056665 &lt/growthrates&gt
+		&lt/estimates&gt
+&lt/chainsum&gt
+&ltchainpack&gt
+	&ltnumber&gt 1 2 0 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 0 &lt/tinytrees&gt
+		&ltaccrate&gt 0.260000000000000009 &lt/accrate&gt
+		&ltllikemle&gt 2.99420902451789361 &lt/llikemle&gt
+		&ltllikedata&gt -4299.50526781021654 &lt/llikedata&gt
+		&ltstarttime&gt 1113934304 &lt/starttime&gt
+		&ltendtime&gt 1113934319 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 52 200 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00756617832740799981 0.00659856362452725716 &lt/thetas&gt
+			&ltmigrates&gt 0 111.176155934682811 69.5707103933484632 0 &lt/migrates&gt
+			&ltgrowthrates&gt 409.88939497805228 543.16021718378704 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainpack&gt
+	&ltnumber&gt 1 2 1 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 0 &lt/tinytrees&gt
+		&ltaccrate&gt 0.179999999999999993 &lt/accrate&gt
+		&ltllikemle&gt 1.07089522228415079 &lt/llikemle&gt
+		&ltllikedata&gt -3490.4063004547229 &lt/llikedata&gt
+		&ltstarttime&gt 1113934320 &lt/starttime&gt
+		&ltendtime&gt 1113934335 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 36 200 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00656792579496304294 0.00584459175444660209 &lt/thetas&gt
+			&ltmigrates&gt 0 57.9007335206334091 156.694294591854742 0 &lt/migrates&gt
+			&ltgrowthrates&gt 451.288616099099272 497.009379113105979 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainpack&gt
+	&ltnumber&gt 1 2 2 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 0 &lt/tinytrees&gt
+		&ltaccrate&gt 0.140000000000000013 &lt/accrate&gt
+		&ltllikemle&gt 1.41480621744143908 &lt/llikemle&gt
+		&ltllikedata&gt -3434.66438504787766 &lt/llikedata&gt
+		&ltstarttime&gt 1113934336 &lt/starttime&gt
+		&ltendtime&gt 1113934351 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 28 200 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00550575271147208094 0.00696470662214750645 &lt/thetas&gt
+			&ltmigrates&gt 0 121.568711323335009 63.7627893168077549 0 &lt/migrates&gt
+			&ltgrowthrates&gt 354.601203542231758 606.129048483006613 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainpack&gt
+	&ltnumber&gt 1 2 3 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 0 &lt/tinytrees&gt
+		&ltaccrate&gt 0.100000000000000006 &lt/accrate&gt
+		&ltllikemle&gt 0.109493731119181953 &lt/llikemle&gt
+		&ltllikedata&gt -3423.70440531923623 &lt/llikedata&gt
+		&ltstarttime&gt 1113934352 &lt/starttime&gt
+		&ltendtime&gt 1113934359 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 4 40 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00514192935091333826 0.0062288990703713602 &lt/thetas&gt
+			&ltmigrates&gt 0 124.55408129628016 61.5396353290182461 0 &lt/migrates&gt
+			&ltgrowthrates&gt 335.346278823775265 527.046201636540445 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainsum&gt
+	&ltreg_rep&gt 1 2 &lt/reg_rep&gt
+	&lttreesum&gt
+		&ltncopy&gt 1 &lt/ncopy&gt
+		&ltshortforce&gt coalesce long
+			&ltshortpoint&gt 20 19 &lt/shortpoint&gt
+			&ltshortwait&gt 0.0861184261675420754 0.0989240815983647992 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltshortforce&gt migrate long
+			&ltshortpoint&gt 0 2 1 0 &lt/shortpoint&gt
+			&ltshortwait&gt 0.0161108601645228097 0.0163539778587721008 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltintervals&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 2.10705440289478283e-06 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 20 20 &lt/xpartlines&gt
+			&ltpartlines&gt 20 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.25399383339123163e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 20 19 &lt/xpartlines&gt
+			&ltpartlines&gt 20 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 3.24885254542245494e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 19 19 &lt/xpartlines&gt
+			&ltpartlines&gt 19 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 5.11189546645828539e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 18 19 &lt/xpartlines&gt
+			&ltpartlines&gt 18 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 8.85012116407944336e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 18 18 &lt/xpartlines&gt
+			&ltpartlines&gt 18 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 9.78107834741533388e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 18 17 &lt/xpartlines&gt
+			&ltpartlines&gt 18 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000118112773039976836 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 17 17 &lt/xpartlines&gt
+			&ltpartlines&gt 17 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000127737720313619163 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 16 17 &lt/xpartlines&gt
+			&ltpartlines&gt 16 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000129662442835112526 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 15 17 &lt/xpartlines&gt
+			&ltpartlines&gt 15 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000131648496800567045 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 14 17 &lt/xpartlines&gt
+			&ltpartlines&gt 14 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000131860052917847297 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 17 &lt/xpartlines&gt
+			&ltpartlines&gt 13 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000140310461463006713 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 17 &lt/xpartlines&gt
+			&ltpartlines&gt 12 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000155023093005773421 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 16 &lt/xpartlines&gt
+			&ltpartlines&gt 12 16 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000157657160551731915 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 15 &lt/xpartlines&gt
+			&ltpartlines&gt 12 15 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000162080373620438725 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 15 &lt/xpartlines&gt
+			&ltpartlines&gt 11 15 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000195611028967059892 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 14 &lt/xpartlines&gt
+			&ltpartlines&gt 11 14 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000233034957421893385 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 14 &lt/xpartlines&gt
+			&ltpartlines&gt 10 14 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00023544280363578128 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 14 &lt/xpartlines&gt
+			&ltpartlines&gt 9 14 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000262195006109537632 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 13 &lt/xpartlines&gt
+			&ltpartlines&gt 9 13 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000262906955612288607 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 12 &lt/xpartlines&gt
+			&ltpartlines&gt 9 12 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000299377625930490846 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 12 &lt/xpartlines&gt
+			&ltpartlines&gt 8 12 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000304231828846860254 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 11 &lt/xpartlines&gt
+			&ltpartlines&gt 8 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00035306417392301292 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 10 &lt/xpartlines&gt
+			&ltpartlines&gt 8 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000385710705381445032 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 9 &lt/xpartlines&gt
+			&ltpartlines&gt 8 9 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00043664221284098057 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 8 &lt/xpartlines&gt
+			&ltpartlines&gt 8 8 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000458570971503667059 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 7 &lt/xpartlines&gt
+			&ltpartlines&gt 8 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000462182592971902917 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 7 &lt/xpartlines&gt
+			&ltpartlines&gt 7 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000529375689534849147 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 6 &lt/xpartlines&gt
+			&ltpartlines&gt 7 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000531151851086229484 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 5 &lt/xpartlines&gt
+			&ltpartlines&gt 7 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000570644372201619386 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 6 5 &lt/xpartlines&gt
+			&ltpartlines&gt 6 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000628012110491958956 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 5 5 &lt/xpartlines&gt
+			&ltpartlines&gt 5 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00126002496707287252 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 5 4 &lt/xpartlines&gt
+			&ltpartlines&gt 5 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00133172776547203667 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 4 &lt/xpartlines&gt
+			&ltpartlines&gt 4 4 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00138378313545306345 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 4 3 &lt/xpartlines&gt
+			&ltpartlines&gt 4 3 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00139440804359624269 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 4 &lt/xpartlines&gt
+			&ltpartlines&gt 3 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.0015125657858844324 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 3 &lt/xpartlines&gt
+			&ltpartlines&gt 3 3 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00151492720306570521 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 2 3 &lt/xpartlines&gt
+			&ltpartlines&gt 2 3 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00180363775393179489 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 1 3 &lt/xpartlines&gt
+			&ltpartlines&gt 1 3 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00272922260577865804 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltnewstatus&gt 1 &lt/newstatus&gt
+			&ltxpartlines&gt 1 2 &lt/xpartlines&gt
+			&ltpartlines&gt 1 2 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00303636024375119338 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 2 1 &lt/xpartlines&gt
+			&ltpartlines&gt 2 1 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00598338892698212308 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltnewstatus&gt 1 &lt/newstatus&gt
+			&ltxpartlines&gt 1 1 &lt/xpartlines&gt
+			&ltpartlines&gt 1 1 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.0064621861657712101 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 2 0 &lt/xpartlines&gt
+			&ltpartlines&gt 2 0 . &lt/partlines&gt
+		&lt/intervals&gt
+	&lt/treesum&gt
+	&lttreesum&gt
+		&ltncopy&gt 1 &lt/ncopy&gt
+		&ltshortforce&gt coalesce long
+			&ltshortpoint&gt 20 19 &lt/shortpoint&gt
+			&ltshortwait&gt 0.0838664731195002539 0.0940195460747020734 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltshortforce&gt migrate long
+			&ltshortpoint&gt 0 2 1 0 &lt/shortpoint&gt
+			&ltshortwait&gt 0.0160107008550887178 0.0161577536563222747 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltintervals&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 2.10705440289478283e-06 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 20 20 &lt/xpartlines&gt
+			&ltpartlines&gt 20 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.25399383339123163e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 20 19 &lt/xpartlines&gt
+			&ltpartlines&gt 20 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 3.24885254542245494e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 19 19 &lt/xpartlines&gt
+			&ltpartlines&gt 19 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 5.11189546645828539e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 18 19 &lt/xpartlines&gt
+			&ltpartlines&gt 18 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 8.85012116407944336e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 18 18 &lt/xpartlines&gt
+			&ltpartlines&gt 18 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 9.20971519398864496e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 18 17 &lt/xpartlines&gt
+			&ltpartlines&gt 18 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 9.78107834741533388e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 17 17 &lt/xpartlines&gt
+			&ltpartlines&gt 17 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000127737720313619163 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 16 17 &lt/xpartlines&gt
+			&ltpartlines&gt 16 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000129662442835112526 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 15 17 &lt/xpartlines&gt
+			&ltpartlines&gt 15 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000131648496800567045 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 14 17 &lt/xpartlines&gt
+			&ltpartlines&gt 14 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000131860052917847297 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 17 &lt/xpartlines&gt
+			&ltpartlines&gt 13 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000137097093409470431 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 17 &lt/xpartlines&gt
+			&ltpartlines&gt 12 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000140310461463006713 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 16 &lt/xpartlines&gt
+			&ltpartlines&gt 12 16 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000155023093005773421 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 15 &lt/xpartlines&gt
+			&ltpartlines&gt 12 15 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000157657160551731915 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 14 &lt/xpartlines&gt
+			&ltpartlines&gt 12 14 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000158891269087890025 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 14 &lt/xpartlines&gt
+			&ltpartlines&gt 11 14 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000162080373620438725 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 14 &lt/xpartlines&gt
+			&ltpartlines&gt 10 14 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000195611028967059892 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 13 &lt/xpartlines&gt
+			&ltpartlines&gt 10 13 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000206353336623343831 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 13 &lt/xpartlines&gt
+			&ltpartlines&gt 9 13 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000262195006109537632 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 12 &lt/xpartlines&gt
+			&ltpartlines&gt 9 12 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000262906955612288607 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 11 &lt/xpartlines&gt
+			&ltpartlines&gt 9 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000299377625930490846 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 11 &lt/xpartlines&gt
+			&ltpartlines&gt 8 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00035306417392301292 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 10 &lt/xpartlines&gt
+			&ltpartlines&gt 8 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000385710705381445032 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 9 &lt/xpartlines&gt
+			&ltpartlines&gt 8 9 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00043664221284098057 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 8 &lt/xpartlines&gt
+			&ltpartlines&gt 8 8 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000458570971503667059 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 7 &lt/xpartlines&gt
+			&ltpartlines&gt 8 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000462182592971902917 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 7 &lt/xpartlines&gt
+			&ltpartlines&gt 7 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000529375689534849147 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 6 &lt/xpartlines&gt
+			&ltpartlines&gt 7 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000531151851086229484 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 5 &lt/xpartlines&gt
+			&ltpartlines&gt 7 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000570644372201619386 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 6 5 &lt/xpartlines&gt
+			&ltpartlines&gt 6 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000628012110491958956 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 5 5 &lt/xpartlines&gt
+			&ltpartlines&gt 5 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00126002496707287252 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 5 4 &lt/xpartlines&gt
+			&ltpartlines&gt 5 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00133172776547203667 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 4 &lt/xpartlines&gt
+			&ltpartlines&gt 4 4 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00138378313545306345 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 4 3 &lt/xpartlines&gt
+			&ltpartlines&gt 4 3 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00139440804359624269 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 4 &lt/xpartlines&gt
+			&ltpartlines&gt 3 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.0015125657858844324 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 3 &lt/xpartlines&gt
+			&ltpartlines&gt 3 3 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00151492720306570521 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 2 3 &lt/xpartlines&gt
+			&ltpartlines&gt 2 3 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00180363775393179489 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 1 3 &lt/xpartlines&gt
+			&ltpartlines&gt 1 3 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00272922260577865804 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltnewstatus&gt 1 &lt/newstatus&gt
+			&ltxpartlines&gt 1 2 &lt/xpartlines&gt
+			&ltpartlines&gt 1 2 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00303636024375119338 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 2 1 &lt/xpartlines&gt
+			&ltpartlines&gt 2 1 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00598338892698212308 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltnewstatus&gt 1 &lt/newstatus&gt
+			&ltxpartlines&gt 1 1 &lt/xpartlines&gt
+			&ltpartlines&gt 1 1 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.0064621861657712101 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 2 0 &lt/xpartlines&gt
+			&ltpartlines&gt 2 0 . &lt/partlines&gt
+		&lt/intervals&gt
+	&lt/treesum&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00550575271147208094 0.00696470662214750645 &lt/thetas&gt
+			&ltmigrates&gt 0 121.568711323335009 63.7627893168077549 0 &lt/migrates&gt
+			&ltgrowthrates&gt 354.601203542231758 606.129048483006613 &lt/growthrates&gt
+		&lt/estimates&gt
+&lt/chainsum&gt
+&ltreplicate-summary&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00459610289535911792 0.00543752715647888643 &lt/thetas&gt
+			&ltmigrates&gt 0 173.235122299289941 147.938467429627138 0 &lt/migrates&gt
+			&ltgrowthrates&gt 151.703172633675905 86.8286879559442326 &lt/growthrates&gt
+		&lt/estimates&gt
+		&ltmaxlike&gt -3.86601975441648626 &lt/maxlike&gt
+&lt/replicate-summary&gt
+&lt/XML-summary-file&gt
+</pre>
+</BODY>
+</HTML>
diff --git a/doc/html/insumfile.2reg3rep.xml b/doc/html/insumfile.2reg3rep.xml
new file mode 100644
index 0000000..75b0b8e
--- /dev/null
+++ b/doc/html/insumfile.2reg3rep.xml
@@ -0,0 +1,3320 @@
+<XML-summary-file>
+<!-- Lamarc v. 2.0
+     Please do not modify. -->
+<chainpack>
+	<number> 0 0 0 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<accrate> 0.195000000000000007 </accrate>
+		<llikemle> 4.50058419396642684 </llikemle>
+		<llikedata> -3620.90996263017314 </llikedata>
+		<starttime> 1113933890 </starttime>
+		<endtime> 1113933911 </endtime>
+		<rates> <map> Tree-Arranger 39 200 </map> </rates>
+		<estimates>
+			<thetas> 0.00832313455063505084 0.00734484914891693919 </thetas>
+			<migrates> 0 96.6630952829531083 46.6299496864950527 0 </migrates>
+			<growthrates> 2365.3441048272216 184.41163985471303 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainpack>
+	<number> 0 0 1 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<accrate> 0.160000000000000003 </accrate>
+		<llikemle> 1.18053638385976001 </llikemle>
+		<llikedata> -3516.0909920893173 </llikedata>
+		<starttime> 1113933912 </starttime>
+		<endtime> 1113933934 </endtime>
+		<rates> <map> Tree-Arranger 32 200 </map> </rates>
+		<estimates>
+			<thetas> 0.00513756085360574273 0.0082400193242912461 </thetas>
+			<migrates> 0 131.185343491258408 42.1077506833302877 0 </migrates>
+			<growthrates> 1821.55597636667744 149.760781972604775 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainpack>
+	<number> 0 0 2 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<accrate> 0.130000000000000004 </accrate>
+		<llikemle> 1.6599393589321001 </llikemle>
+		<llikedata> -3271.75316866892263 </llikedata>
+		<starttime> 1113933934 </starttime>
+		<endtime> 1113933960 </endtime>
+		<rates> <map> Tree-Arranger 26 200 </map> </rates>
+		<estimates>
+			<thetas> 0.00505700949907143815 0.00613707484094671175 </thetas>
+			<migrates> 0 124.679362905292081 34.8937725369221496 0 </migrates>
+			<growthrates> 1761.27593665903237 -84.0195656658781189 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainpack>
+	<number> 0 0 3 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 1 </tinytrees>
+		<accrate> 0.149999999999999994 </accrate>
+		<llikemle> 0.596513557975481556 </llikemle>
+		<llikedata> -3269.61178480345779 </llikedata>
+		<starttime> 1113933961 </starttime>
+		<endtime> 1113933971 </endtime>
+		<rates> <map> Tree-Arranger 6 40 </map> </rates>
+		<estimates>
+			<thetas> 0.0051461021148077107 0.00589555271016268308 </thetas>
+			<migrates> 0 125.68905479949639 48.865939986393137 0 </migrates>
+			<growthrates> 1835.77616186834098 82.0043403675645095 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainsum>
+	<reg_rep> 0 0 </reg_rep>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce long
+			<shortpoint> 20 19 </shortpoint>
+			<shortwait> 0.0629273638426249143 0.105548312215536552 </shortwait>
+		</shortforce>
+		<shortforce> migrate long
+			<shortpoint> 0 1 1 0 </shortpoint>
+			<shortwait> 0.0079654795218820091 0.0206451375053703819 </shortwait>
+		</shortforce>
+		<intervals>
+		<force> Coalescence </force>
+			<endtime> 5.09657490636763079e-06 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 20 20 </xpartlines>
+			<partlines> 20 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.2102355624254017e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 19 20 </xpartlines>
+			<partlines> 19 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.33961400471318623e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 18 20 </xpartlines>
+			<partlines> 18 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.77695915387843852e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 17 20 </xpartlines>
+			<partlines> 17 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 2.10446887053792051e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 17 19 </xpartlines>
+			<partlines> 17 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 2.27457247455224152e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 17 18 </xpartlines>
+			<partlines> 17 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 2.44016581525568737e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 17 17 </xpartlines>
+			<partlines> 17 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 5.63431475801879578e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 16 17 </xpartlines>
+			<partlines> 16 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 6.55098716603418459e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 15 17 </xpartlines>
+			<partlines> 15 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 7.39112816930027905e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 14 17 </xpartlines>
+			<partlines> 14 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 8.18973368575778886e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 14 16 </xpartlines>
+			<partlines> 14 16 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 8.3802045298549127e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 13 16 </xpartlines>
+			<partlines> 13 16 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00010629432546176642 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 13 15 </xpartlines>
+			<partlines> 13 15 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000111507477845693503 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 13 14 </xpartlines>
+			<partlines> 13 14 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000150975907355033011 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 12 14 </xpartlines>
+			<partlines> 12 14 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000165638034054806815 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 11 14 </xpartlines>
+			<partlines> 11 14 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000219061924145474876 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 11 13 </xpartlines>
+			<partlines> 11 13 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000219707138094496452 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 11 12 </xpartlines>
+			<partlines> 11 12 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000258213852917746063 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 11 11 </xpartlines>
+			<partlines> 11 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000273652113921858643 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 11 10 </xpartlines>
+			<partlines> 11 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000280583868742954141 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 10 10 </xpartlines>
+			<partlines> 10 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000331106053916933646 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 10 9 </xpartlines>
+			<partlines> 10 9 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000344952279767115331 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 10 8 </xpartlines>
+			<partlines> 10 8 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000345874033941911106 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 9 8 </xpartlines>
+			<partlines> 9 8 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000348861238031590646 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 9 7 </xpartlines>
+			<partlines> 9 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000424599242926313077 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 8 7 </xpartlines>
+			<partlines> 8 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000439947186235675422 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 7 7 </xpartlines>
+			<partlines> 7 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000481070344401667829 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 6 7 </xpartlines>
+			<partlines> 6 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000528930654355221855 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 5 7 </xpartlines>
+			<partlines> 5 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000592184315630871314 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 5 6 </xpartlines>
+			<partlines> 5 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000857591267505941114 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 4 6 </xpartlines>
+			<partlines> 4 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000866354211828237845 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 3 6 </xpartlines>
+			<partlines> 3 6 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00091353575187592918 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<newstatus> 1 </newstatus>
+			<xpartlines> 3 5 </xpartlines>
+			<partlines> 3 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000919185689826623655 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 4 4 </xpartlines>
+			<partlines> 4 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000955912424407952743 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 3 4 </xpartlines>
+			<partlines> 3 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00118557134547170773 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 2 4 </xpartlines>
+			<partlines> 2 4 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00153425735560147494 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 1 4 </xpartlines>
+			<partlines> 1 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.0019246211065550127 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 5 </xpartlines>
+			<partlines> 0 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00206538502262878197 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 4 </xpartlines>
+			<partlines> 0 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00305508566675113469 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 3 </xpartlines>
+			<partlines> 0 3 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00533986494186010525 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 2 </xpartlines>
+			<partlines> 0 2 . </partlines>
+		</intervals>
+	</treesum>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce long
+			<shortpoint> 20 19 </shortpoint>
+			<shortwait> 0.0575370553514418445 0.104633534940785464 </shortwait>
+		</shortforce>
+		<shortforce> migrate long
+			<shortpoint> 0 1 1 0 </shortpoint>
+			<shortwait> 0.00794664505963699544 0.0202802222566785852 </shortwait>
+		</shortforce>
+		<intervals>
+		<force> Coalescence </force>
+			<endtime> 5.09657490636763079e-06 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 20 20 </xpartlines>
+			<partlines> 20 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.2102355624254017e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 19 20 </xpartlines>
+			<partlines> 19 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.25411707266569827e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 18 20 </xpartlines>
+			<partlines> 18 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.33961400471318623e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 17 20 </xpartlines>
+			<partlines> 17 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.77695915387843852e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 16 20 </xpartlines>
+			<partlines> 16 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 2.10446887053792051e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 16 19 </xpartlines>
+			<partlines> 16 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 2.27457247455224152e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 16 18 </xpartlines>
+			<partlines> 16 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 2.44016581525568737e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 16 17 </xpartlines>
+			<partlines> 16 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 5.63431475801879578e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 15 17 </xpartlines>
+			<partlines> 15 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 6.55098716603418459e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 14 17 </xpartlines>
+			<partlines> 14 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 7.39112816930027905e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 13 17 </xpartlines>
+			<partlines> 13 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 8.3802045298549127e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 13 16 </xpartlines>
+			<partlines> 13 16 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00010617490035119024 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 13 15 </xpartlines>
+			<partlines> 13 15 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000111507477845693503 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 12 15 </xpartlines>
+			<partlines> 12 15 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000150975907355033011 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 11 15 </xpartlines>
+			<partlines> 11 15 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000151946997256986488 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 10 15 </xpartlines>
+			<partlines> 10 15 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000165638034054806815 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 10 14 </xpartlines>
+			<partlines> 10 14 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000191522903992015264 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 10 13 </xpartlines>
+			<partlines> 10 13 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000219707138094496452 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 10 12 </xpartlines>
+			<partlines> 10 12 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000258213852917746063 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 10 11 </xpartlines>
+			<partlines> 10 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000273652113921858643 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 10 10 </xpartlines>
+			<partlines> 10 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000280583868742954141 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 9 10 </xpartlines>
+			<partlines> 9 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000331106053916933646 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 9 9 </xpartlines>
+			<partlines> 9 9 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000345874033941911106 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 9 8 </xpartlines>
+			<partlines> 9 8 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000348861238031590646 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 9 7 </xpartlines>
+			<partlines> 9 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000424599242926313077 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 8 7 </xpartlines>
+			<partlines> 8 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000439947186235675422 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 7 7 </xpartlines>
+			<partlines> 7 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000481070344401667829 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 6 7 </xpartlines>
+			<partlines> 6 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000528930654355221855 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 5 7 </xpartlines>
+			<partlines> 5 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000592184315630871314 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 5 6 </xpartlines>
+			<partlines> 5 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000866354211828237845 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 4 6 </xpartlines>
+			<partlines> 4 6 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00091353575187592918 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<newstatus> 1 </newstatus>
+			<xpartlines> 4 5 </xpartlines>
+			<partlines> 4 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000919185689826623655 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 5 4 </xpartlines>
+			<partlines> 5 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000955912424407952743 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 4 4 </xpartlines>
+			<partlines> 4 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00114689035080777412 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 3 4 </xpartlines>
+			<partlines> 3 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00118557134547170773 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 2 4 </xpartlines>
+			<partlines> 2 4 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00153425735560147494 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 1 4 </xpartlines>
+			<partlines> 1 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.0019246211065550127 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 5 </xpartlines>
+			<partlines> 0 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00206538502262878197 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 4 </xpartlines>
+			<partlines> 0 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00267205676641757645 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 3 </xpartlines>
+			<partlines> 0 3 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00533986494186010525 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 2 </xpartlines>
+			<partlines> 0 2 . </partlines>
+		</intervals>
+	</treesum>
+		<estimates>
+			<thetas> 0.00505700949907143815 0.00613707484094671175 </thetas>
+			<migrates> 0 124.679362905292081 34.8937725369221496 0 </migrates>
+			<growthrates> 1761.27593665903237 -84.0195656658781189 </growthrates>
+		</estimates>
+</chainsum>
+<chainpack>
+	<number> 0 1 0 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<accrate> 0.265000000000000013 </accrate>
+		<llikemle> 8.03638549246886313 </llikemle>
+		<llikedata> -3574.6386248622166 </llikedata>
+		<starttime> 1113933971 </starttime>
+		<endtime> 1113933986 </endtime>
+		<rates> <map> Tree-Arranger 53 200 </map> </rates>
+		<estimates>
+			<thetas> 0.00238967973895868897 0.00857314189478221474 </thetas>
+			<migrates> 0 178.594354360432533 201.711534027463927 0 </migrates>
+			<growthrates> 286.265570187478374 522.584629259533926 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainpack>
+	<number> 0 1 1 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<accrate> 0.174999999999999989 </accrate>
+		<llikemle> 2.24292366391492948 </llikemle>
+		<llikedata> -3366.95583651043171 </llikedata>
+		<starttime> 1113933986 </starttime>
+		<endtime> 1113934003 </endtime>
+		<rates> <map> Tree-Arranger 35 200 </map> </rates>
+		<estimates>
+			<thetas> 0.00342828243898930232 0.00938815584810562438 </thetas>
+			<migrates> 0 359.333313010233837 170.709781451344668 0 </migrates>
+			<growthrates> 376.258762176827872 608.980866754854333 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainpack>
+	<number> 0 1 2 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<accrate> 0.115000000000000005 </accrate>
+		<llikemle> 0.331232649867971241 </llikemle>
+		<llikedata> -3277.07186443434921 </llikedata>
+		<starttime> 1113934003 </starttime>
+		<endtime> 1113934021 </endtime>
+		<rates> <map> Tree-Arranger 23 200 </map> </rates>
+		<estimates>
+			<thetas> 0.00382351077572252833 0.0101148980666503786 </thetas>
+			<migrates> 0 266.63302494001374 180.090955280637019 0 </migrates>
+			<growthrates> 589.048754968938624 732.840714912730732 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainpack>
+	<number> 0 1 3 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<accrate> 0.100000000000000006 </accrate>
+		<llikemle> 0.712638597764832182 </llikemle>
+		<llikedata> -3264.58251001428516 </llikedata>
+		<starttime> 1113934021 </starttime>
+		<endtime> 1113934029 </endtime>
+		<rates> <map> Tree-Arranger 4 40 </map> </rates>
+		<estimates>
+			<thetas> 0.004074651441941083 0.00837181988651936049 </thetas>
+			<migrates> 0 118.942422560571757 182.899111430931441 0 </migrates>
+			<growthrates> 1023.10652786136313 621.693756977592329 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainsum>
+	<reg_rep> 0 1 </reg_rep>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce long
+			<shortpoint> 18 21 </shortpoint>
+			<shortwait> 0.0539318434248482123 0.124884165154835192 </shortwait>
+		</shortforce>
+		<shortforce> migrate long
+			<shortpoint> 0 1 3 0 </shortpoint>
+			<shortwait> 0.00820356565115269894 0.0167862026647611975 </shortwait>
+		</shortforce>
+		<intervals>
+		<force> Coalescence </force>
+			<endtime> 9.86113155575850924e-06 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 20 20 </xpartlines>
+			<partlines> 20 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.14780854199569499e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 19 20 </xpartlines>
+			<partlines> 19 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.2291023672518374e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 19 19 </xpartlines>
+			<partlines> 19 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.66635848884044525e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 19 18 </xpartlines>
+			<partlines> 19 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 2.22961141141907646e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 18 18 </xpartlines>
+			<partlines> 18 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 2.48673742353794826e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 17 18 </xpartlines>
+			<partlines> 17 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 3.91186994294289797e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 16 18 </xpartlines>
+			<partlines> 16 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 4.82376276028000035e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 16 17 </xpartlines>
+			<partlines> 16 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 5.19910298629359333e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 15 17 </xpartlines>
+			<partlines> 15 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 7.45549455970045656e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 15 16 </xpartlines>
+			<partlines> 15 16 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 8.48515305769414904e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 14 16 </xpartlines>
+			<partlines> 14 16 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 8.63938283360383356e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 13 16 </xpartlines>
+			<partlines> 13 16 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000109224715491037562 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 13 15 </xpartlines>
+			<partlines> 13 15 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000125328787711941807 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 12 15 </xpartlines>
+			<partlines> 12 15 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000134238691874153575 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 11 15 </xpartlines>
+			<partlines> 11 15 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00015730396931299724 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 10 15 </xpartlines>
+			<partlines> 10 15 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000208869889075072063 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 10 14 </xpartlines>
+			<partlines> 10 14 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000215094766862807343 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 10 13 </xpartlines>
+			<partlines> 10 13 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000225539801544489551 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 9 13 </xpartlines>
+			<partlines> 9 13 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00024281367284408395 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 8 13 </xpartlines>
+			<partlines> 8 13 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000339783045919729956 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 8 12 </xpartlines>
+			<partlines> 8 12 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000349763100889471208 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 7 12 </xpartlines>
+			<partlines> 7 12 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000380891571910665103 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 6 12 </xpartlines>
+			<partlines> 6 12 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.000430449683486230515 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<newstatus> 1 </newstatus>
+			<xpartlines> 6 11 </xpartlines>
+			<partlines> 6 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000457714375841571639 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 7 10 </xpartlines>
+			<partlines> 7 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000477876206426440411 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 6 10 </xpartlines>
+			<partlines> 6 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000491278080421270061 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 5 10 </xpartlines>
+			<partlines> 5 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000618305099660766818 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 5 9 </xpartlines>
+			<partlines> 5 9 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.000711694454938392642 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 5 8 </xpartlines>
+			<partlines> 5 8 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000735873160967268547 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 4 9 </xpartlines>
+			<partlines> 4 9 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000752775920475186756 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 4 8 </xpartlines>
+			<partlines> 4 8 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000767134447803857842 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 4 7 </xpartlines>
+			<partlines> 4 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000783650082327605664 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 4 6 </xpartlines>
+			<partlines> 4 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000848841654435085577 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 3 6 </xpartlines>
+			<partlines> 3 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000957093185831530183 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 2 6 </xpartlines>
+			<partlines> 2 6 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00113398823503352373 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 2 5 </xpartlines>
+			<partlines> 2 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00120225473233144119 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 1 6 </xpartlines>
+			<partlines> 1 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00125272233077632815 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 1 5 </xpartlines>
+			<partlines> 1 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00148623295494489864 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 1 4 </xpartlines>
+			<partlines> 1 4 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00243994510677220068 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 1 3 </xpartlines>
+			<partlines> 1 3 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00250266179027173021 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 4 </xpartlines>
+			<partlines> 0 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00288283320062619926 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 3 </xpartlines>
+			<partlines> 0 3 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00290053205202245312 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 2 </xpartlines>
+			<partlines> 0 2 . </partlines>
+		</intervals>
+	</treesum>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce long
+			<shortpoint> 18 21 </shortpoint>
+			<shortwait> 0.0609748580652498956 0.11278304974572527 </shortwait>
+		</shortforce>
+		<shortforce> migrate long
+			<shortpoint> 0 1 3 0 </shortpoint>
+			<shortwait> 0.0085322498443588142 0.0161685184391479192 </shortwait>
+		</shortforce>
+		<intervals>
+		<force> Coalescence </force>
+			<endtime> 9.86113155575850924e-06 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 20 20 </xpartlines>
+			<partlines> 20 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.14780854199569499e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 19 20 </xpartlines>
+			<partlines> 19 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.2291023672518374e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 19 19 </xpartlines>
+			<partlines> 19 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 2.22961141141907646e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 19 18 </xpartlines>
+			<partlines> 19 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 2.48673742353794826e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 18 18 </xpartlines>
+			<partlines> 18 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 3.91186994294289797e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 17 18 </xpartlines>
+			<partlines> 17 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 4.82376276028000035e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 17 17 </xpartlines>
+			<partlines> 17 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 5.19910298629359333e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 16 17 </xpartlines>
+			<partlines> 16 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 7.45549455970045656e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 16 16 </xpartlines>
+			<partlines> 16 16 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 8.48515305769414904e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 15 16 </xpartlines>
+			<partlines> 15 16 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 8.63938283360383356e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 14 16 </xpartlines>
+			<partlines> 14 16 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000109224715491037562 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 14 15 </xpartlines>
+			<partlines> 14 15 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000118188935353991798 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 13 15 </xpartlines>
+			<partlines> 13 15 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000125328787711941807 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 13 14 </xpartlines>
+			<partlines> 13 14 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000134238691874153575 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 12 14 </xpartlines>
+			<partlines> 12 14 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00015730396931299724 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 11 14 </xpartlines>
+			<partlines> 11 14 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000208869889075072063 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 11 13 </xpartlines>
+			<partlines> 11 13 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000215094766862807343 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 11 12 </xpartlines>
+			<partlines> 11 12 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000225539801544489551 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 10 12 </xpartlines>
+			<partlines> 10 12 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00024281367284408395 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 9 12 </xpartlines>
+			<partlines> 9 12 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000339783045919729956 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 9 11 </xpartlines>
+			<partlines> 9 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000345347778094520265 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 8 11 </xpartlines>
+			<partlines> 8 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000349763100889471208 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 7 11 </xpartlines>
+			<partlines> 7 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000380891571910665103 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 6 11 </xpartlines>
+			<partlines> 6 11 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.000430449683486230515 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<newstatus> 1 </newstatus>
+			<xpartlines> 6 10 </xpartlines>
+			<partlines> 6 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000457714375841571639 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 7 9 </xpartlines>
+			<partlines> 7 9 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000477876206426440411 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 6 9 </xpartlines>
+			<partlines> 6 9 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000491278080421270061 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 5 9 </xpartlines>
+			<partlines> 5 9 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000618305099660766818 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 5 8 </xpartlines>
+			<partlines> 5 8 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.000711694454938392642 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 5 7 </xpartlines>
+			<partlines> 5 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000752775920475186756 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 4 8 </xpartlines>
+			<partlines> 4 8 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000767134447803857842 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 4 7 </xpartlines>
+			<partlines> 4 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000783650082327605664 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 4 6 </xpartlines>
+			<partlines> 4 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000848841654435085577 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 3 6 </xpartlines>
+			<partlines> 3 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000957093185831530183 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 2 6 </xpartlines>
+			<partlines> 2 6 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00113398823503352373 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 2 5 </xpartlines>
+			<partlines> 2 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00120225473233144119 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 1 6 </xpartlines>
+			<partlines> 1 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00125272233077632815 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 1 5 </xpartlines>
+			<partlines> 1 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00148623295494489864 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 1 4 </xpartlines>
+			<partlines> 1 4 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00243994510677220068 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 1 3 </xpartlines>
+			<partlines> 1 3 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00250266179027173021 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 4 </xpartlines>
+			<partlines> 0 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00288283320062619926 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 3 </xpartlines>
+			<partlines> 0 3 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00290053205202245312 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 2 </xpartlines>
+			<partlines> 0 2 . </partlines>
+		</intervals>
+	</treesum>
+		<estimates>
+			<thetas> 0.00382351077572252833 0.0101148980666503786 </thetas>
+			<migrates> 0 266.63302494001374 180.090955280637019 0 </migrates>
+			<growthrates> 589.048754968938624 732.840714912730732 </growthrates>
+		</estimates>
+</chainsum>
+<chainpack>
+	<number> 0 2 0 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<accrate> 0.234999999999999987 </accrate>
+		<llikemle> 4.42696635227687985 </llikemle>
+		<llikedata> -3492.15130892703792 </llikedata>
+		<starttime> 1113934029 </starttime>
+		<endtime> 1113934045 </endtime>
+		<rates> <map> Tree-Arranger 47 200 </map> </rates>
+		<estimates>
+			<thetas> 0.0051932689740616959 0.0095267210917029492 </thetas>
+			<migrates> 0 0.0030934444336862682 46.6542500393172119 0 </migrates>
+			<growthrates> 1401.00175791445054 338.376238311546331 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainpack>
+	<number> 0 2 1 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<accrate> 0.149999999999999994 </accrate>
+		<llikemle> 11.6317746234713901 </llikemle>
+		<llikedata> -3382.65609923619468 </llikedata>
+		<starttime> 1113934046 </starttime>
+		<endtime> 1113934062 </endtime>
+		<rates> <map> Tree-Arranger 30 200 </map> </rates>
+		<estimates>
+			<thetas> 0.00363910230419641195 0.00949364169215391289 </thetas>
+			<migrates> 0 113.28617225286888 88.1277594808196767 0 </migrates>
+			<growthrates> 411.557560954860037 233.86142612045623 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainpack>
+	<number> 0 2 2 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<accrate> 0.110000000000000001 </accrate>
+		<llikemle> 0.636063492994214719 </llikemle>
+		<llikedata> -3299.08922066821424 </llikedata>
+		<starttime> 1113934062 </starttime>
+		<endtime> 1113934079 </endtime>
+		<rates> <map> Tree-Arranger 22 200 </map> </rates>
+		<estimates>
+			<thetas> 0.00344721894934559969 0.0137468371177546927 </thetas>
+			<migrates> 0 113.720626268037961 82.7670768811458402 0 </migrates>
+			<growthrates> 320.377044363307391 443.960643777887299 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainpack>
+	<number> 0 2 3 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<accrate> 0.174999999999999989 </accrate>
+		<llikemle> 0.411652243146974017 </llikemle>
+		<llikedata> -3301.112857149506 </llikedata>
+		<starttime> 1113934079 </starttime>
+		<endtime> 1113934087 </endtime>
+		<rates> <map> Tree-Arranger 7 40 </map> </rates>
+		<estimates>
+			<thetas> 0.00274284032250226347 0.0117803366876250235 </thetas>
+			<migrates> 0 120.893070720992441 85.9918539079791771 0 </migrates>
+			<growthrates> 26.2473005315673831 368.198849330621101 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainsum>
+	<reg_rep> 0 2 </reg_rep>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce long
+			<shortpoint> 19 20 </shortpoint>
+			<shortwait> 0.0517692740770155577 0.168835716039738043 </shortwait>
+		</shortforce>
+		<shortforce> migrate long
+			<shortpoint> 0 1 2 0 </shortpoint>
+			<shortwait> 0.00827170676388711348 0.0236086927070103779 </shortwait>
+		</shortforce>
+		<intervals>
+		<force> Coalescence </force>
+			<endtime> 4.65136899805835341e-06 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 20 20 </xpartlines>
+			<partlines> 20 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 8.04215566305798666e-06 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 19 20 </xpartlines>
+			<partlines> 19 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.43604566212563868e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 18 20 </xpartlines>
+			<partlines> 18 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.84336441146756059e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 17 20 </xpartlines>
+			<partlines> 17 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 2.24324521605382133e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 16 20 </xpartlines>
+			<partlines> 16 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 4.02727378794709678e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 16 19 </xpartlines>
+			<partlines> 16 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 4.66779769093841711e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 15 19 </xpartlines>
+			<partlines> 15 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 6.70209786261414327e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 14 19 </xpartlines>
+			<partlines> 14 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 8.7820260710313902e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 14 18 </xpartlines>
+			<partlines> 14 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 9.28621094428023613e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 13 18 </xpartlines>
+			<partlines> 13 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000107706203473348862 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 12 18 </xpartlines>
+			<partlines> 12 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000114816939483791875 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 12 17 </xpartlines>
+			<partlines> 12 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000126163485711554112 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 12 16 </xpartlines>
+			<partlines> 12 16 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.0001343202142583523 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 12 15 </xpartlines>
+			<partlines> 12 15 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000157502040375572878 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 12 14 </xpartlines>
+			<partlines> 12 14 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000158198275534539187 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 11 14 </xpartlines>
+			<partlines> 11 14 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000185924618990777787 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 11 13 </xpartlines>
+			<partlines> 11 13 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000195139416618541169 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 10 13 </xpartlines>
+			<partlines> 10 13 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.000195144856593906958 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<newstatus> 1 </newstatus>
+			<xpartlines> 9 13 </xpartlines>
+			<partlines> 9 13 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000208892591389243015 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 10 12 </xpartlines>
+			<partlines> 10 12 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000221729134487027137 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 10 11 </xpartlines>
+			<partlines> 10 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000231540140137400703 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 9 11 </xpartlines>
+			<partlines> 9 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000286748264076826459 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 8 11 </xpartlines>
+			<partlines> 8 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000296568169067036559 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 7 11 </xpartlines>
+			<partlines> 7 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00032780787636151992 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 7 10 </xpartlines>
+			<partlines> 7 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00040351349954208406 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 6 10 </xpartlines>
+			<partlines> 6 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000469019333789370591 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 5 10 </xpartlines>
+			<partlines> 5 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000501156369385404206 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 4 10 </xpartlines>
+			<partlines> 4 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00051564770169723541 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 4 9 </xpartlines>
+			<partlines> 4 9 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000720822165058714829 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 3 9 </xpartlines>
+			<partlines> 3 9 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00096916231842889629 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 3 8 </xpartlines>
+			<partlines> 3 8 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00117293932612708303 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 3 7 </xpartlines>
+			<partlines> 3 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00150380041638617967 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 3 6 </xpartlines>
+			<partlines> 3 6 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00173696782656019471 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 2 6 </xpartlines>
+			<partlines> 2 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00185054353054030468 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 1 7 </xpartlines>
+			<partlines> 1 7 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00191839064111927254 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 1 6 </xpartlines>
+			<partlines> 1 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00194182526794987008 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 7 </xpartlines>
+			<partlines> 0 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00225255911291697688 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 6 </xpartlines>
+			<partlines> 0 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00258123599239381586 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 5 </xpartlines>
+			<partlines> 0 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00295407111169069311 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 4 </xpartlines>
+			<partlines> 0 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00324495225912871431 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 3 </xpartlines>
+			<partlines> 0 3 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00382175977738545905 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 2 </xpartlines>
+			<partlines> 0 2 . </partlines>
+		</intervals>
+	</treesum>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce long
+			<shortpoint> 19 20 </shortpoint>
+			<shortwait> 0.0517692740770155577 0.159664105752183261 </shortwait>
+		</shortforce>
+		<shortforce> migrate long
+			<shortpoint> 0 1 2 0 </shortpoint>
+			<shortwait> 0.00827170676388711348 0.0229534802929423874 </shortwait>
+		</shortforce>
+		<intervals>
+		<force> Coalescence </force>
+			<endtime> 4.65136899805835341e-06 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 20 20 </xpartlines>
+			<partlines> 20 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 8.04215566305798666e-06 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 19 20 </xpartlines>
+			<partlines> 19 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.43604566212563868e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 18 20 </xpartlines>
+			<partlines> 18 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.84336441146756059e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 17 20 </xpartlines>
+			<partlines> 17 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 2.24324521605382133e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 16 20 </xpartlines>
+			<partlines> 16 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 4.02727378794709678e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 16 19 </xpartlines>
+			<partlines> 16 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 4.66779769093841711e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 15 19 </xpartlines>
+			<partlines> 15 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 6.70209786261414327e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 14 19 </xpartlines>
+			<partlines> 14 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 8.7820260710313902e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 14 18 </xpartlines>
+			<partlines> 14 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 9.28621094428023613e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 13 18 </xpartlines>
+			<partlines> 13 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000107706203473348862 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 12 18 </xpartlines>
+			<partlines> 12 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000114816939483791875 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 12 17 </xpartlines>
+			<partlines> 12 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000126163485711554112 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 12 16 </xpartlines>
+			<partlines> 12 16 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.0001343202142583523 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 12 15 </xpartlines>
+			<partlines> 12 15 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000157502040375572878 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 12 14 </xpartlines>
+			<partlines> 12 14 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000158198275534539187 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 11 14 </xpartlines>
+			<partlines> 11 14 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000185924618990777787 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 11 13 </xpartlines>
+			<partlines> 11 13 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000195139416618541169 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 10 13 </xpartlines>
+			<partlines> 10 13 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.000195144856593906958 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<newstatus> 1 </newstatus>
+			<xpartlines> 9 13 </xpartlines>
+			<partlines> 9 13 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000208892591389243015 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 10 12 </xpartlines>
+			<partlines> 10 12 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000221729134487027137 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 10 11 </xpartlines>
+			<partlines> 10 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000231540140137400703 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 9 11 </xpartlines>
+			<partlines> 9 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000286748264076826459 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 8 11 </xpartlines>
+			<partlines> 8 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000296568169067036559 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 7 11 </xpartlines>
+			<partlines> 7 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00032780787636151992 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 7 10 </xpartlines>
+			<partlines> 7 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00040351349954208406 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 6 10 </xpartlines>
+			<partlines> 6 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000469019333789370591 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 5 10 </xpartlines>
+			<partlines> 5 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000501156369385404206 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 4 10 </xpartlines>
+			<partlines> 4 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00051564770169723541 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 4 9 </xpartlines>
+			<partlines> 4 9 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000517726912059090507 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 3 9 </xpartlines>
+			<partlines> 3 9 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000720822165058714829 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 3 8 </xpartlines>
+			<partlines> 3 8 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00096916231842889629 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 3 7 </xpartlines>
+			<partlines> 3 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00150380041638617967 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 3 6 </xpartlines>
+			<partlines> 3 6 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00173696782656019471 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 2 6 </xpartlines>
+			<partlines> 2 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00185054353054030468 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 1 7 </xpartlines>
+			<partlines> 1 7 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00191839064111927254 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 1 6 </xpartlines>
+			<partlines> 1 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00194182526794987008 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 7 </xpartlines>
+			<partlines> 0 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00225255911291697688 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 6 </xpartlines>
+			<partlines> 0 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00258123599239381586 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 5 </xpartlines>
+			<partlines> 0 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00295407111169069311 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 4 </xpartlines>
+			<partlines> 0 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00324495225912871431 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 3 </xpartlines>
+			<partlines> 0 3 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00382175977738545905 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 2 </xpartlines>
+			<partlines> 0 2 . </partlines>
+		</intervals>
+	</treesum>
+		<estimates>
+			<thetas> 0.00344721894934559969 0.0137468371177546927 </thetas>
+			<migrates> 0 113.720626268037961 82.7670768811458402 0 </migrates>
+			<growthrates> 320.377044363307391 443.960643777887299 </growthrates>
+		</estimates>
+</chainsum>
+<replicate-summary>
+		<estimates>
+			<thetas> 0.00356219464250582358 0.00792721479150828613 </thetas>
+			<migrates> 0 121.904055450476534 100.015915512533283 0 </migrates>
+			<growthrates> 722.893455415187532 257.932768127086547 </growthrates>
+		</estimates>
+		<maxlike> -4.6390765097207467 </maxlike>
+</replicate-summary>
+<chainpack>
+	<number> 1 0 0 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<accrate> 0.195000000000000007 </accrate>
+		<llikemle> 2.70726265416057954 </llikemle>
+		<llikedata> -3922.87305464996825 </llikedata>
+		<starttime> 1113934177 </starttime>
+		<endtime> 1113934192 </endtime>
+		<rates> <map> Tree-Arranger 39 200 </map> </rates>
+		<estimates>
+			<thetas> 0.00380011726327808552 0.00713604248963370554 </thetas>
+			<migrates> 0 0.0014665879208418085 76.5973069163458717 0 </migrates>
+			<growthrates> 125.906070220065573 10.9556147174487499 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainpack>
+	<number> 1 0 1 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<accrate> 0.200000000000000011 </accrate>
+		<llikemle> 19.4577047358918414 </llikemle>
+		<llikedata> -3507.94370195006422 </llikedata>
+		<starttime> 1113934192 </starttime>
+		<endtime> 1113934207 </endtime>
+		<rates> <map> Tree-Arranger 40 200 </map> </rates>
+		<estimates>
+			<thetas> 0.00557328352740996819 0.00767691676407612593 </thetas>
+			<migrates> 0 72.5171669311285285 50.3899810789623572 0 </migrates>
+			<growthrates> 239.320949207652802 15.0158998451551309 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainpack>
+	<number> 1 0 2 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<accrate> 0.195000000000000007 </accrate>
+		<llikemle> 1.41077196606614153 </llikemle>
+		<llikedata> -3388.85530318494739 </llikedata>
+		<starttime> 1113934207 </starttime>
+		<endtime> 1113934221 </endtime>
+		<rates> <map> Tree-Arranger 39 200 </map> </rates>
+		<estimates>
+			<thetas> 0.00526638488263829647 0.00636010206007683442 </thetas>
+			<migrates> 0 145.033451919934123 78.9828742546580571 0 </migrates>
+			<growthrates> 101.354431821265237 42.7824995714557517 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainpack>
+	<number> 1 0 3 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<accrate> 0.0500000000000000028 </accrate>
+		<llikemle> 0.106842797016683602 </llikemle>
+		<llikedata> -3387.66439523280769 </llikedata>
+		<starttime> 1113934222 </starttime>
+		<endtime> 1113934229 </endtime>
+		<rates> <map> Tree-Arranger 2 40 </map> </rates>
+		<estimates>
+			<thetas> 0.0048297296701252353 0.00632472336672777573 </thetas>
+			<migrates> 0 158.359159064075584 79.436028487943048 0 </migrates>
+			<growthrates> 48.5740506820224667 12.3249628941104206 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainsum>
+	<reg_rep> 1 0 </reg_rep>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce long
+			<shortpoint> 21 18 </shortpoint>
+			<shortwait> 0.0985596947596808493 0.112500144905919378 </shortwait>
+		</shortforce>
+		<shortforce> migrate long
+			<shortpoint> 0 3 2 0 </shortpoint>
+			<shortwait> 0.0189681695385016322 0.0251774919104673957 </shortwait>
+		</shortforce>
+		<intervals>
+		<force> Coalescence </force>
+			<endtime> 2.75402443608963525e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 20 20 </xpartlines>
+			<partlines> 20 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 3.67126558256159515e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 20 19 </xpartlines>
+			<partlines> 20 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 6.24882792998111863e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 19 19 </xpartlines>
+			<partlines> 19 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 6.28842625068443356e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 18 19 </xpartlines>
+			<partlines> 18 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 6.34298732185740874e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 17 19 </xpartlines>
+			<partlines> 17 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 6.46656309975548847e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 17 18 </xpartlines>
+			<partlines> 17 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 6.81601036755640491e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 16 18 </xpartlines>
+			<partlines> 16 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 6.9553870703353184e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 15 18 </xpartlines>
+			<partlines> 15 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 8.65634921089525913e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 14 18 </xpartlines>
+			<partlines> 14 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 9.53160440134389236e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 13 18 </xpartlines>
+			<partlines> 13 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 9.84400137091464748e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 13 17 </xpartlines>
+			<partlines> 13 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000104908914799906663 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 12 17 </xpartlines>
+			<partlines> 12 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000107378906806900444 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 12 16 </xpartlines>
+			<partlines> 12 16 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000134386781925198401 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 11 16 </xpartlines>
+			<partlines> 11 16 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00015095815823508343 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 10 16 </xpartlines>
+			<partlines> 10 16 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.000187294338352160755 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<newstatus> 1 </newstatus>
+			<xpartlines> 9 16 </xpartlines>
+			<partlines> 9 16 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000194311967981214922 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 10 15 </xpartlines>
+			<partlines> 10 15 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000207082475182713985 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 10 14 </xpartlines>
+			<partlines> 10 14 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000216519914862317283 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 10 13 </xpartlines>
+			<partlines> 10 13 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000240917252117856754 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 9 13 </xpartlines>
+			<partlines> 9 13 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000275177350551769888 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 9 12 </xpartlines>
+			<partlines> 9 12 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000277961519944129815 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 9 11 </xpartlines>
+			<partlines> 9 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000327184596530212871 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 9 10 </xpartlines>
+			<partlines> 9 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000375434817240545321 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 8 10 </xpartlines>
+			<partlines> 8 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000421956732764341578 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 8 9 </xpartlines>
+			<partlines> 8 9 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000454327709213169504 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 8 8 </xpartlines>
+			<partlines> 8 8 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000455301780358049633 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 7 8 </xpartlines>
+			<partlines> 7 8 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000477060867071809822 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 7 7 </xpartlines>
+			<partlines> 7 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000495771892855436606 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 7 6 </xpartlines>
+			<partlines> 7 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000577304059643193 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 7 5 </xpartlines>
+			<partlines> 7 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000728271605769075263 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 6 5 </xpartlines>
+			<partlines> 6 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000928286291063316091 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 6 4 </xpartlines>
+			<partlines> 6 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00136280706740197761 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 5 4 </xpartlines>
+			<partlines> 5 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00157448633617017866 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 4 4 </xpartlines>
+			<partlines> 4 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00170993270529683282 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 4 3 </xpartlines>
+			<partlines> 4 3 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00199676444517621777 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 3 3 </xpartlines>
+			<partlines> 3 3 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00200728073872961498 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<newstatus> 1 </newstatus>
+			<xpartlines> 2 4 </xpartlines>
+			<partlines> 2 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00213154474570202875 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 3 3 </xpartlines>
+			<partlines> 3 3 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00229629270831637703 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<newstatus> 1 </newstatus>
+			<xpartlines> 3 2 </xpartlines>
+			<partlines> 3 2 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00279444689547627594 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 4 1 </xpartlines>
+			<partlines> 4 1 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00362185244963840556 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 3 1 </xpartlines>
+			<partlines> 3 1 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00370319681038052849 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 2 1 </xpartlines>
+			<partlines> 2 1 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00482422222342331845 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 1 1 </xpartlines>
+			<partlines> 1 1 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00968056818678340696 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 2 </xpartlines>
+			<partlines> 0 2 . </partlines>
+		</intervals>
+	</treesum>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce long
+			<shortpoint> 21 18 </shortpoint>
+			<shortwait> 0.0968437428145017959 0.112500144905919378 </shortwait>
+		</shortforce>
+		<shortforce> migrate long
+			<shortpoint> 0 3 2 0 </shortpoint>
+			<shortwait> 0.018921333345451638 0.0251774919104673957 </shortwait>
+		</shortforce>
+		<intervals>
+		<force> Coalescence </force>
+			<endtime> 1.78294379475588584e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 20 20 </xpartlines>
+			<partlines> 20 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 2.75402443608963525e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 19 20 </xpartlines>
+			<partlines> 19 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 3.67126558256159515e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 19 19 </xpartlines>
+			<partlines> 19 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 6.24882792998111863e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 18 19 </xpartlines>
+			<partlines> 18 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 6.28842625068443356e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 17 19 </xpartlines>
+			<partlines> 17 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 6.34298732185740874e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 16 19 </xpartlines>
+			<partlines> 16 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 6.81601036755640491e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 16 18 </xpartlines>
+			<partlines> 16 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 6.9553870703353184e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 15 18 </xpartlines>
+			<partlines> 15 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 8.65634921089525913e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 14 18 </xpartlines>
+			<partlines> 14 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 9.53160440134389236e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 13 18 </xpartlines>
+			<partlines> 13 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 9.84400137091464748e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 13 17 </xpartlines>
+			<partlines> 13 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000104908914799906663 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 12 17 </xpartlines>
+			<partlines> 12 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000107378906806900444 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 12 16 </xpartlines>
+			<partlines> 12 16 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000134386781925198401 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 11 16 </xpartlines>
+			<partlines> 11 16 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00015095815823508343 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 10 16 </xpartlines>
+			<partlines> 10 16 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.000187294338352160755 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<newstatus> 1 </newstatus>
+			<xpartlines> 9 16 </xpartlines>
+			<partlines> 9 16 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000194311967981214922 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 10 15 </xpartlines>
+			<partlines> 10 15 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000207082475182713985 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 10 14 </xpartlines>
+			<partlines> 10 14 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000216519914862317283 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 10 13 </xpartlines>
+			<partlines> 10 13 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000240917252117856754 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 9 13 </xpartlines>
+			<partlines> 9 13 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000275177350551769888 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 9 12 </xpartlines>
+			<partlines> 9 12 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000277961519944129815 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 9 11 </xpartlines>
+			<partlines> 9 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000327184596530212871 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 9 10 </xpartlines>
+			<partlines> 9 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000375434817240545321 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 8 10 </xpartlines>
+			<partlines> 8 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000421956732764341578 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 8 9 </xpartlines>
+			<partlines> 8 9 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000454327709213169504 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 8 8 </xpartlines>
+			<partlines> 8 8 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000455301780358049633 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 7 8 </xpartlines>
+			<partlines> 7 8 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000477060867071809822 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 7 7 </xpartlines>
+			<partlines> 7 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000495771892855436606 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 7 6 </xpartlines>
+			<partlines> 7 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000577304059643193 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 7 5 </xpartlines>
+			<partlines> 7 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000728271605769075263 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 6 5 </xpartlines>
+			<partlines> 6 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000928286291063316091 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 6 4 </xpartlines>
+			<partlines> 6 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00136280706740197761 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 5 4 </xpartlines>
+			<partlines> 5 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00157448633617017866 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 4 4 </xpartlines>
+			<partlines> 4 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00170993270529683282 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 4 3 </xpartlines>
+			<partlines> 4 3 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00199676444517621777 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 3 3 </xpartlines>
+			<partlines> 3 3 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00200728073872961498 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<newstatus> 1 </newstatus>
+			<xpartlines> 2 4 </xpartlines>
+			<partlines> 2 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00213154474570202875 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 3 3 </xpartlines>
+			<partlines> 3 3 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00229629270831637703 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<newstatus> 1 </newstatus>
+			<xpartlines> 3 2 </xpartlines>
+			<partlines> 3 2 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00279444689547627594 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 4 1 </xpartlines>
+			<partlines> 4 1 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00362185244963840556 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 3 1 </xpartlines>
+			<partlines> 3 1 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00370319681038052849 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 2 1 </xpartlines>
+			<partlines> 2 1 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00482422222342331845 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 1 1 </xpartlines>
+			<partlines> 1 1 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00968056818678340696 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 2 </xpartlines>
+			<partlines> 0 2 . </partlines>
+		</intervals>
+	</treesum>
+		<estimates>
+			<thetas> 0.00526638488263829647 0.00636010206007683442 </thetas>
+			<migrates> 0 145.033451919934123 78.9828742546580571 0 </migrates>
+			<growthrates> 101.354431821265237 42.7824995714557517 </growthrates>
+		</estimates>
+</chainsum>
+<chainpack>
+	<number> 1 1 0 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<accrate> 0.239999999999999991 </accrate>
+		<llikemle> 2.34750730772453364 </llikemle>
+		<llikedata> -3525.30915507333066 </llikedata>
+		<starttime> 1113934229 </starttime>
+		<endtime> 1113934243 </endtime>
+		<rates> <map> Tree-Arranger 48 200 </map> </rates>
+		<estimates>
+			<thetas> 0.00633684542395633252 0.00414801483484614902 </thetas>
+			<migrates> 0 189.840646683791618 111.21338721037425 0 </migrates>
+			<growthrates> 134.29937737766619 -261.501604057297811 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainpack>
+	<number> 1 1 1 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 2 </tinytrees>
+		<accrate> 0.135000000000000009 </accrate>
+		<llikemle> 1.87776764509904415 </llikemle>
+		<llikedata> -3404.56028299746959 </llikedata>
+		<starttime> 1113934244 </starttime>
+		<endtime> 1113934259 </endtime>
+		<rates> <map> Tree-Arranger 27 200 </map> </rates>
+		<estimates>
+			<thetas> 0.0053726478023596964 0.0034495368114013979 </thetas>
+			<migrates> 0 175.867668937235891 153.579579990926845 0 </migrates>
+			<growthrates> 37.0453100154095694 -121.00495810761069 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainpack>
+	<number> 1 1 2 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<accrate> 0.104999999999999996 </accrate>
+		<llikemle> 0.449286917034601607 </llikemle>
+		<llikedata> -3321.70936524524359 </llikedata>
+		<starttime> 1113934259 </starttime>
+		<endtime> 1113934274 </endtime>
+		<rates> <map> Tree-Arranger 21 200 </map> </rates>
+		<estimates>
+			<thetas> 0.00422505900112751364 0.00395290224643087169 </thetas>
+			<migrates> 0 160.037181408148427 147.594989310094263 0 </migrates>
+			<growthrates> -173.121613861979398 -52.9903878137056665 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainpack>
+	<number> 1 1 3 </number>
+	<chainout>
+		<badtrees> 1 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<accrate> 0.0749999999999999972 </accrate>
+		<llikemle> 4.54069966505540101 </llikemle>
+		<llikedata> -3318.52699208625927 </llikedata>
+		<starttime> 1113934274 </starttime>
+		<endtime> 1113934304 </endtime>
+		<rates> <map> Tree-Arranger 3 40 </map> </rates>
+		<estimates>
+			<thetas> 0.00295615018924887769 0.00491936629532284831 </thetas>
+			<migrates> 0 235.929972373142618 308.986288378157326 0 </migrates>
+			<growthrates> -631.659188775292023 226.807009056767953 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainsum>
+	<reg_rep> 1 1 </reg_rep>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce long
+			<shortpoint> 18 21 </shortpoint>
+			<shortwait> 0.0648405565130086708 0.0877252307909633239 </shortwait>
+		</shortforce>
+		<shortforce> migrate long
+			<shortpoint> 0 4 6 0 </shortpoint>
+			<shortwait> 0.0169381769238374467 0.0194182320038754526 </shortwait>
+		</shortforce>
+		<intervals>
+		<force> Coalescence </force>
+			<endtime> 1.30550527315558047e-06 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 20 20 </xpartlines>
+			<partlines> 20 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 5.162795826446426e-06 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 19 20 </xpartlines>
+			<partlines> 19 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.26737235869386312e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 18 20 </xpartlines>
+			<partlines> 18 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.55014581129354168e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 17 20 </xpartlines>
+			<partlines> 17 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 4.40932985043456985e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 16 20 </xpartlines>
+			<partlines> 16 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 4.90702713266381754e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 15 20 </xpartlines>
+			<partlines> 15 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 5.10411853492643601e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 15 19 </xpartlines>
+			<partlines> 15 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 5.12290248204213804e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 14 19 </xpartlines>
+			<partlines> 14 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 5.67472162385812578e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 14 18 </xpartlines>
+			<partlines> 14 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 9.65590413200949371e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 14 17 </xpartlines>
+			<partlines> 14 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000103869320417860665 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 14 16 </xpartlines>
+			<partlines> 14 16 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000107286941773631806 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 13 16 </xpartlines>
+			<partlines> 13 16 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000107550821053514076 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 13 15 </xpartlines>
+			<partlines> 13 15 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000109709891069030956 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 13 14 </xpartlines>
+			<partlines> 13 14 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.000114694502065424022 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<newstatus> 1 </newstatus>
+			<xpartlines> 12 14 </xpartlines>
+			<partlines> 12 14 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00012158499114137082 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 13 13 </xpartlines>
+			<partlines> 13 13 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000122718853104433789 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 13 12 </xpartlines>
+			<partlines> 13 12 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000125375411254551923 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 13 11 </xpartlines>
+			<partlines> 13 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000142189216663119957 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 12 11 </xpartlines>
+			<partlines> 12 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000142843686193008286 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 11 11 </xpartlines>
+			<partlines> 11 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000171515317423868074 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 10 11 </xpartlines>
+			<partlines> 10 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000218046136466306875 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 9 11 </xpartlines>
+			<partlines> 9 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.0002790329859263787 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 8 11 </xpartlines>
+			<partlines> 8 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00030767075159027072 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 8 10 </xpartlines>
+			<partlines> 8 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000326602024127066991 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 7 10 </xpartlines>
+			<partlines> 7 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000336138739569854291 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 7 9 </xpartlines>
+			<partlines> 7 9 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000411029437867106761 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 7 8 </xpartlines>
+			<partlines> 7 8 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000455284747279604488 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 7 7 </xpartlines>
+			<partlines> 7 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000539893327729701073 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 6 7 </xpartlines>
+			<partlines> 6 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000560189187505523802 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 6 6 </xpartlines>
+			<partlines> 6 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000724982991669875874 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 6 5 </xpartlines>
+			<partlines> 6 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000800823245136290159 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 5 5 </xpartlines>
+			<partlines> 5 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00101663478186505727 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 5 4 </xpartlines>
+			<partlines> 5 4 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00117786220475134804 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 4 4 </xpartlines>
+			<partlines> 4 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00118517172371627641 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 3 5 </xpartlines>
+			<partlines> 3 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00125529723210753312 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 3 4 </xpartlines>
+			<partlines> 3 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00128846095940146773 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 3 3 </xpartlines>
+			<partlines> 3 3 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00172659403406359149 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 2 3 </xpartlines>
+			<partlines> 2 3 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00213157854076952711 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 2 2 </xpartlines>
+			<partlines> 2 2 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00256248379887218016 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 1 3 </xpartlines>
+			<partlines> 1 3 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00262033009215670038 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 1 2 </xpartlines>
+			<partlines> 1 2 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00262735858334228268 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<newstatus> 1 </newstatus>
+			<xpartlines> 1 1 </xpartlines>
+			<partlines> 1 1 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.0031959883447954078 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 2 0 </xpartlines>
+			<partlines> 2 0 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00551501377964465876 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 1 1 </xpartlines>
+			<partlines> 1 1 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00596028250037733409 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<newstatus> 1 </newstatus>
+			<xpartlines> 0 2 </xpartlines>
+			<partlines> 0 2 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00803817495345396371 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 1 1 </xpartlines>
+			<partlines> 1 1 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00803938998428121825 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<newstatus> 1 </newstatus>
+			<xpartlines> 0 2 </xpartlines>
+			<partlines> 0 2 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.0086849234925416869 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 1 1 </xpartlines>
+			<partlines> 1 1 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00905185738010444788 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 2 </xpartlines>
+			<partlines> 0 2 . </partlines>
+		</intervals>
+	</treesum>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce long
+			<shortpoint> 18 21 </shortpoint>
+			<shortwait> 0.0659711430564781931 0.0877252307909633239 </shortwait>
+		</shortforce>
+		<shortforce> migrate long
+			<shortpoint> 0 4 6 0 </shortpoint>
+			<shortwait> 0.0169723921539429272 0.0194182320038754526 </shortwait>
+		</shortforce>
+		<intervals>
+		<force> Coalescence </force>
+			<endtime> 1.30550527315558047e-06 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 20 20 </xpartlines>
+			<partlines> 20 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.26737235869386312e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 19 20 </xpartlines>
+			<partlines> 19 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.55014581129354168e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 18 20 </xpartlines>
+			<partlines> 18 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 3.93780259319326687e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 17 20 </xpartlines>
+			<partlines> 17 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 4.40932985043456985e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 16 20 </xpartlines>
+			<partlines> 16 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 4.90702713266381754e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 15 20 </xpartlines>
+			<partlines> 15 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 5.10411853492643601e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 15 19 </xpartlines>
+			<partlines> 15 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 5.12290248204213804e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 14 19 </xpartlines>
+			<partlines> 14 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 5.67472162385812578e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 14 18 </xpartlines>
+			<partlines> 14 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 9.65590413200949371e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 14 17 </xpartlines>
+			<partlines> 14 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000103869320417860665 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 14 16 </xpartlines>
+			<partlines> 14 16 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000107286941773631806 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 13 16 </xpartlines>
+			<partlines> 13 16 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000107550821053514076 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 13 15 </xpartlines>
+			<partlines> 13 15 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000109709891069030956 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 13 14 </xpartlines>
+			<partlines> 13 14 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.000114694502065424022 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<newstatus> 1 </newstatus>
+			<xpartlines> 12 14 </xpartlines>
+			<partlines> 12 14 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00012158499114137082 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 13 13 </xpartlines>
+			<partlines> 13 13 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000122718853104433789 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 13 12 </xpartlines>
+			<partlines> 13 12 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000125375411254551923 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 13 11 </xpartlines>
+			<partlines> 13 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000142189216663119957 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 12 11 </xpartlines>
+			<partlines> 12 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000142843686193008286 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 11 11 </xpartlines>
+			<partlines> 11 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000171515317423868074 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 10 11 </xpartlines>
+			<partlines> 10 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000218046136466306875 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 9 11 </xpartlines>
+			<partlines> 9 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.0002790329859263787 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 8 11 </xpartlines>
+			<partlines> 8 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00030767075159027072 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 8 10 </xpartlines>
+			<partlines> 8 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000326602024127066991 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 7 10 </xpartlines>
+			<partlines> 7 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000336138739569854291 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 7 9 </xpartlines>
+			<partlines> 7 9 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000411029437867106761 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 7 8 </xpartlines>
+			<partlines> 7 8 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000455284747279604488 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 7 7 </xpartlines>
+			<partlines> 7 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000539893327729701073 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 6 7 </xpartlines>
+			<partlines> 6 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000560189187505523802 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 6 6 </xpartlines>
+			<partlines> 6 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000724982991669875874 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 6 5 </xpartlines>
+			<partlines> 6 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000800823245136290159 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 5 5 </xpartlines>
+			<partlines> 5 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00101663478186505727 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 5 4 </xpartlines>
+			<partlines> 5 4 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00117786220475134804 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 4 4 </xpartlines>
+			<partlines> 4 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00118517172371627641 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 3 5 </xpartlines>
+			<partlines> 3 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00125529723210753312 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 3 4 </xpartlines>
+			<partlines> 3 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00128846095940146773 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 3 3 </xpartlines>
+			<partlines> 3 3 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00172659403406359149 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 2 3 </xpartlines>
+			<partlines> 2 3 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00213157854076952711 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 2 2 </xpartlines>
+			<partlines> 2 2 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00256248379887218016 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 1 3 </xpartlines>
+			<partlines> 1 3 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00262033009215670038 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 1 2 </xpartlines>
+			<partlines> 1 2 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00262735858334228268 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<newstatus> 1 </newstatus>
+			<xpartlines> 1 1 </xpartlines>
+			<partlines> 1 1 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.0031959883447954078 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 2 0 </xpartlines>
+			<partlines> 2 0 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00551501377964465876 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 1 1 </xpartlines>
+			<partlines> 1 1 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00596028250037733409 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<newstatus> 1 </newstatus>
+			<xpartlines> 0 2 </xpartlines>
+			<partlines> 0 2 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00803817495345396371 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 1 1 </xpartlines>
+			<partlines> 1 1 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00803938998428121825 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<newstatus> 1 </newstatus>
+			<xpartlines> 0 2 </xpartlines>
+			<partlines> 0 2 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.0086849234925416869 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 1 1 </xpartlines>
+			<partlines> 1 1 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00905185738010444788 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 2 </xpartlines>
+			<partlines> 0 2 . </partlines>
+		</intervals>
+	</treesum>
+		<estimates>
+			<thetas> 0.00422505900112751364 0.00395290224643087169 </thetas>
+			<migrates> 0 160.037181408148427 147.594989310094263 0 </migrates>
+			<growthrates> -173.121613861979398 -52.9903878137056665 </growthrates>
+		</estimates>
+</chainsum>
+<chainpack>
+	<number> 1 2 0 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<accrate> 0.260000000000000009 </accrate>
+		<llikemle> 2.99420902451789361 </llikemle>
+		<llikedata> -4299.50526781021654 </llikedata>
+		<starttime> 1113934304 </starttime>
+		<endtime> 1113934319 </endtime>
+		<rates> <map> Tree-Arranger 52 200 </map> </rates>
+		<estimates>
+			<thetas> 0.00756617832740799981 0.00659856362452725716 </thetas>
+			<migrates> 0 111.176155934682811 69.5707103933484632 0 </migrates>
+			<growthrates> 409.88939497805228 543.16021718378704 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainpack>
+	<number> 1 2 1 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<accrate> 0.179999999999999993 </accrate>
+		<llikemle> 1.07089522228415079 </llikemle>
+		<llikedata> -3490.4063004547229 </llikedata>
+		<starttime> 1113934320 </starttime>
+		<endtime> 1113934335 </endtime>
+		<rates> <map> Tree-Arranger 36 200 </map> </rates>
+		<estimates>
+			<thetas> 0.00656792579496304294 0.00584459175444660209 </thetas>
+			<migrates> 0 57.9007335206334091 156.694294591854742 0 </migrates>
+			<growthrates> 451.288616099099272 497.009379113105979 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainpack>
+	<number> 1 2 2 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<accrate> 0.140000000000000013 </accrate>
+		<llikemle> 1.41480621744143908 </llikemle>
+		<llikedata> -3434.66438504787766 </llikedata>
+		<starttime> 1113934336 </starttime>
+		<endtime> 1113934351 </endtime>
+		<rates> <map> Tree-Arranger 28 200 </map> </rates>
+		<estimates>
+			<thetas> 0.00550575271147208094 0.00696470662214750645 </thetas>
+			<migrates> 0 121.568711323335009 63.7627893168077549 0 </migrates>
+			<growthrates> 354.601203542231758 606.129048483006613 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainpack>
+	<number> 1 2 3 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<accrate> 0.100000000000000006 </accrate>
+		<llikemle> 0.109493731119181953 </llikemle>
+		<llikedata> -3423.70440531923623 </llikedata>
+		<starttime> 1113934352 </starttime>
+		<endtime> 1113934359 </endtime>
+		<rates> <map> Tree-Arranger 4 40 </map> </rates>
+		<estimates>
+			<thetas> 0.00514192935091333826 0.0062288990703713602 </thetas>
+			<migrates> 0 124.55408129628016 61.5396353290182461 0 </migrates>
+			<growthrates> 335.346278823775265 527.046201636540445 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainsum>
+	<reg_rep> 1 2 </reg_rep>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce long
+			<shortpoint> 20 19 </shortpoint>
+			<shortwait> 0.0861184261675420754 0.0989240815983647992 </shortwait>
+		</shortforce>
+		<shortforce> migrate long
+			<shortpoint> 0 2 1 0 </shortpoint>
+			<shortwait> 0.0161108601645228097 0.0163539778587721008 </shortwait>
+		</shortforce>
+		<intervals>
+		<force> Coalescence </force>
+			<endtime> 2.10705440289478283e-06 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 20 20 </xpartlines>
+			<partlines> 20 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.25399383339123163e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 20 19 </xpartlines>
+			<partlines> 20 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 3.24885254542245494e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 19 19 </xpartlines>
+			<partlines> 19 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 5.11189546645828539e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 18 19 </xpartlines>
+			<partlines> 18 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 8.85012116407944336e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 18 18 </xpartlines>
+			<partlines> 18 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 9.78107834741533388e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 18 17 </xpartlines>
+			<partlines> 18 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000118112773039976836 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 17 17 </xpartlines>
+			<partlines> 17 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000127737720313619163 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 16 17 </xpartlines>
+			<partlines> 16 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000129662442835112526 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 15 17 </xpartlines>
+			<partlines> 15 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000131648496800567045 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 14 17 </xpartlines>
+			<partlines> 14 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000131860052917847297 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 13 17 </xpartlines>
+			<partlines> 13 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000140310461463006713 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 12 17 </xpartlines>
+			<partlines> 12 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000155023093005773421 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 12 16 </xpartlines>
+			<partlines> 12 16 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000157657160551731915 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 12 15 </xpartlines>
+			<partlines> 12 15 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000162080373620438725 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 11 15 </xpartlines>
+			<partlines> 11 15 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000195611028967059892 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 11 14 </xpartlines>
+			<partlines> 11 14 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000233034957421893385 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 10 14 </xpartlines>
+			<partlines> 10 14 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00023544280363578128 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 9 14 </xpartlines>
+			<partlines> 9 14 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000262195006109537632 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 9 13 </xpartlines>
+			<partlines> 9 13 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000262906955612288607 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 9 12 </xpartlines>
+			<partlines> 9 12 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000299377625930490846 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 8 12 </xpartlines>
+			<partlines> 8 12 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000304231828846860254 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 8 11 </xpartlines>
+			<partlines> 8 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00035306417392301292 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 8 10 </xpartlines>
+			<partlines> 8 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000385710705381445032 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 8 9 </xpartlines>
+			<partlines> 8 9 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00043664221284098057 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 8 8 </xpartlines>
+			<partlines> 8 8 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000458570971503667059 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 8 7 </xpartlines>
+			<partlines> 8 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000462182592971902917 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 7 7 </xpartlines>
+			<partlines> 7 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000529375689534849147 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 7 6 </xpartlines>
+			<partlines> 7 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000531151851086229484 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 7 5 </xpartlines>
+			<partlines> 7 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000570644372201619386 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 6 5 </xpartlines>
+			<partlines> 6 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000628012110491958956 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 5 5 </xpartlines>
+			<partlines> 5 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00126002496707287252 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 5 4 </xpartlines>
+			<partlines> 5 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00133172776547203667 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 4 4 </xpartlines>
+			<partlines> 4 4 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00138378313545306345 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 4 3 </xpartlines>
+			<partlines> 4 3 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00139440804359624269 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 3 4 </xpartlines>
+			<partlines> 3 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.0015125657858844324 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 3 3 </xpartlines>
+			<partlines> 3 3 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00151492720306570521 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 2 3 </xpartlines>
+			<partlines> 2 3 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00180363775393179489 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 1 3 </xpartlines>
+			<partlines> 1 3 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00272922260577865804 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<newstatus> 1 </newstatus>
+			<xpartlines> 1 2 </xpartlines>
+			<partlines> 1 2 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00303636024375119338 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 2 1 </xpartlines>
+			<partlines> 2 1 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00598338892698212308 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<newstatus> 1 </newstatus>
+			<xpartlines> 1 1 </xpartlines>
+			<partlines> 1 1 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.0064621861657712101 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 2 0 </xpartlines>
+			<partlines> 2 0 . </partlines>
+		</intervals>
+	</treesum>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce long
+			<shortpoint> 20 19 </shortpoint>
+			<shortwait> 0.0838664731195002539 0.0940195460747020734 </shortwait>
+		</shortforce>
+		<shortforce> migrate long
+			<shortpoint> 0 2 1 0 </shortpoint>
+			<shortwait> 0.0160107008550887178 0.0161577536563222747 </shortwait>
+		</shortforce>
+		<intervals>
+		<force> Coalescence </force>
+			<endtime> 2.10705440289478283e-06 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 20 20 </xpartlines>
+			<partlines> 20 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.25399383339123163e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 20 19 </xpartlines>
+			<partlines> 20 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 3.24885254542245494e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 19 19 </xpartlines>
+			<partlines> 19 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 5.11189546645828539e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 18 19 </xpartlines>
+			<partlines> 18 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 8.85012116407944336e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 18 18 </xpartlines>
+			<partlines> 18 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 9.20971519398864496e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 18 17 </xpartlines>
+			<partlines> 18 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 9.78107834741533388e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 17 17 </xpartlines>
+			<partlines> 17 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000127737720313619163 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 16 17 </xpartlines>
+			<partlines> 16 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000129662442835112526 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 15 17 </xpartlines>
+			<partlines> 15 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000131648496800567045 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 14 17 </xpartlines>
+			<partlines> 14 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000131860052917847297 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 13 17 </xpartlines>
+			<partlines> 13 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000137097093409470431 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 12 17 </xpartlines>
+			<partlines> 12 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000140310461463006713 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 12 16 </xpartlines>
+			<partlines> 12 16 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000155023093005773421 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 12 15 </xpartlines>
+			<partlines> 12 15 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000157657160551731915 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 12 14 </xpartlines>
+			<partlines> 12 14 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000158891269087890025 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 11 14 </xpartlines>
+			<partlines> 11 14 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000162080373620438725 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 10 14 </xpartlines>
+			<partlines> 10 14 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000195611028967059892 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 10 13 </xpartlines>
+			<partlines> 10 13 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000206353336623343831 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 9 13 </xpartlines>
+			<partlines> 9 13 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000262195006109537632 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 9 12 </xpartlines>
+			<partlines> 9 12 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000262906955612288607 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 9 11 </xpartlines>
+			<partlines> 9 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000299377625930490846 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 8 11 </xpartlines>
+			<partlines> 8 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00035306417392301292 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 8 10 </xpartlines>
+			<partlines> 8 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000385710705381445032 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 8 9 </xpartlines>
+			<partlines> 8 9 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00043664221284098057 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 8 8 </xpartlines>
+			<partlines> 8 8 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000458570971503667059 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 8 7 </xpartlines>
+			<partlines> 8 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000462182592971902917 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 7 7 </xpartlines>
+			<partlines> 7 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000529375689534849147 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 7 6 </xpartlines>
+			<partlines> 7 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000531151851086229484 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 7 5 </xpartlines>
+			<partlines> 7 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000570644372201619386 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 6 5 </xpartlines>
+			<partlines> 6 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000628012110491958956 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 5 5 </xpartlines>
+			<partlines> 5 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00126002496707287252 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 5 4 </xpartlines>
+			<partlines> 5 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00133172776547203667 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 4 4 </xpartlines>
+			<partlines> 4 4 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00138378313545306345 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 4 3 </xpartlines>
+			<partlines> 4 3 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00139440804359624269 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 3 4 </xpartlines>
+			<partlines> 3 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.0015125657858844324 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 3 3 </xpartlines>
+			<partlines> 3 3 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00151492720306570521 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 2 3 </xpartlines>
+			<partlines> 2 3 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00180363775393179489 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 1 3 </xpartlines>
+			<partlines> 1 3 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00272922260577865804 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<newstatus> 1 </newstatus>
+			<xpartlines> 1 2 </xpartlines>
+			<partlines> 1 2 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00303636024375119338 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 2 1 </xpartlines>
+			<partlines> 2 1 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00598338892698212308 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<newstatus> 1 </newstatus>
+			<xpartlines> 1 1 </xpartlines>
+			<partlines> 1 1 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.0064621861657712101 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 2 0 </xpartlines>
+			<partlines> 2 0 . </partlines>
+		</intervals>
+	</treesum>
+		<estimates>
+			<thetas> 0.00550575271147208094 0.00696470662214750645 </thetas>
+			<migrates> 0 121.568711323335009 63.7627893168077549 0 </migrates>
+			<growthrates> 354.601203542231758 606.129048483006613 </growthrates>
+		</estimates>
+</chainsum>
+<replicate-summary>
+		<estimates>
+			<thetas> 0.00459610289535911792 0.00543752715647888643 </thetas>
+			<migrates> 0 173.235122299289941 147.938467429627138 0 </migrates>
+			<growthrates> 151.703172633675905 86.8286879559442326 </growthrates>
+		</estimates>
+		<maxlike> -3.86601975441648626 </maxlike>
+</replicate-summary>
+</XML-summary-file>
diff --git a/doc/html/insumfile.3rep.html b/doc/html/insumfile.3rep.html
new file mode 100644
index 0000000..1168ccc
--- /dev/null
+++ b/doc/html/insumfile.3rep.html
@@ -0,0 +1,1607 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
+<HTML>
+<BODY>
+<pre>
+&ltXML-summary-file&gt
+&lt!-- Lamarc v. 2.0
+     Please do not modify. --&gt
+&ltchainpack&gt
+	&ltnumber&gt 0 0 0 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 0 &lt/tinytrees&gt
+		&ltaccrate&gt 0.195000000000000007 &lt/accrate&gt
+		&ltllikemle&gt 4.50058419396642684 &lt/llikemle&gt
+		&ltllikedata&gt -3620.90996263017314 &lt/llikedata&gt
+		&ltstarttime&gt 1113933890 &lt/starttime&gt
+		&ltendtime&gt 1113933911 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 39 200 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00832313455063505084 0.00734484914891693919 &lt/thetas&gt
+			&ltmigrates&gt 0 96.6630952829531083 46.6299496864950527 0 &lt/migrates&gt
+			&ltgrowthrates&gt 2365.3441048272216 184.41163985471303 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainpack&gt
+	&ltnumber&gt 0 0 1 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 0 &lt/tinytrees&gt
+		&ltaccrate&gt 0.160000000000000003 &lt/accrate&gt
+		&ltllikemle&gt 1.18053638385976001 &lt/llikemle&gt
+		&ltllikedata&gt -3516.0909920893173 &lt/llikedata&gt
+		&ltstarttime&gt 1113933912 &lt/starttime&gt
+		&ltendtime&gt 1113933934 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 32 200 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00513756085360574273 0.0082400193242912461 &lt/thetas&gt
+			&ltmigrates&gt 0 131.185343491258408 42.1077506833302877 0 &lt/migrates&gt
+			&ltgrowthrates&gt 1821.55597636667744 149.760781972604775 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainpack&gt
+	&ltnumber&gt 0 0 2 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 0 &lt/tinytrees&gt
+		&ltaccrate&gt 0.130000000000000004 &lt/accrate&gt
+		&ltllikemle&gt 1.6599393589321001 &lt/llikemle&gt
+		&ltllikedata&gt -3271.75316866892263 &lt/llikedata&gt
+		&ltstarttime&gt 1113933934 &lt/starttime&gt
+		&ltendtime&gt 1113933960 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 26 200 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00505700949907143815 0.00613707484094671175 &lt/thetas&gt
+			&ltmigrates&gt 0 124.679362905292081 34.8937725369221496 0 &lt/migrates&gt
+			&ltgrowthrates&gt 1761.27593665903237 -84.0195656658781189 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainpack&gt
+	&ltnumber&gt 0 0 3 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 1 &lt/tinytrees&gt
+		&ltaccrate&gt 0.149999999999999994 &lt/accrate&gt
+		&ltllikemle&gt 0.596513557975481556 &lt/llikemle&gt
+		&ltllikedata&gt -3269.61178480345779 &lt/llikedata&gt
+		&ltstarttime&gt 1113933961 &lt/starttime&gt
+		&ltendtime&gt 1113933971 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 6 40 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.0051461021148077107 0.00589555271016268308 &lt/thetas&gt
+			&ltmigrates&gt 0 125.68905479949639 48.865939986393137 0 &lt/migrates&gt
+			&ltgrowthrates&gt 1835.77616186834098 82.0043403675645095 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainsum&gt
+	&ltreg_rep&gt 0 0 &lt/reg_rep&gt
+	&lttreesum&gt
+		&ltncopy&gt 1 &lt/ncopy&gt
+		&ltshortforce&gt coalesce long
+			&ltshortpoint&gt 20 19 &lt/shortpoint&gt
+			&ltshortwait&gt 0.0629273638426249143 0.105548312215536552 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltshortforce&gt migrate long
+			&ltshortpoint&gt 0 1 1 0 &lt/shortpoint&gt
+			&ltshortwait&gt 0.0079654795218820091 0.0206451375053703819 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltintervals&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 5.09657490636763079e-06 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 20 20 &lt/xpartlines&gt
+			&ltpartlines&gt 20 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.2102355624254017e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 19 20 &lt/xpartlines&gt
+			&ltpartlines&gt 19 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.33961400471318623e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 18 20 &lt/xpartlines&gt
+			&ltpartlines&gt 18 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.77695915387843852e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 17 20 &lt/xpartlines&gt
+			&ltpartlines&gt 17 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 2.10446887053792051e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 17 19 &lt/xpartlines&gt
+			&ltpartlines&gt 17 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 2.27457247455224152e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 17 18 &lt/xpartlines&gt
+			&ltpartlines&gt 17 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 2.44016581525568737e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 17 17 &lt/xpartlines&gt
+			&ltpartlines&gt 17 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 5.63431475801879578e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 16 17 &lt/xpartlines&gt
+			&ltpartlines&gt 16 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 6.55098716603418459e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 15 17 &lt/xpartlines&gt
+			&ltpartlines&gt 15 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 7.39112816930027905e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 14 17 &lt/xpartlines&gt
+			&ltpartlines&gt 14 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 8.18973368575778886e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 14 16 &lt/xpartlines&gt
+			&ltpartlines&gt 14 16 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 8.3802045298549127e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 16 &lt/xpartlines&gt
+			&ltpartlines&gt 13 16 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00010629432546176642 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 15 &lt/xpartlines&gt
+			&ltpartlines&gt 13 15 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000111507477845693503 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 14 &lt/xpartlines&gt
+			&ltpartlines&gt 13 14 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000150975907355033011 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 14 &lt/xpartlines&gt
+			&ltpartlines&gt 12 14 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000165638034054806815 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 14 &lt/xpartlines&gt
+			&ltpartlines&gt 11 14 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000219061924145474876 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 13 &lt/xpartlines&gt
+			&ltpartlines&gt 11 13 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000219707138094496452 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 12 &lt/xpartlines&gt
+			&ltpartlines&gt 11 12 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000258213852917746063 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 11 &lt/xpartlines&gt
+			&ltpartlines&gt 11 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000273652113921858643 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 10 &lt/xpartlines&gt
+			&ltpartlines&gt 11 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000280583868742954141 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 10 &lt/xpartlines&gt
+			&ltpartlines&gt 10 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000331106053916933646 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 9 &lt/xpartlines&gt
+			&ltpartlines&gt 10 9 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000344952279767115331 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 8 &lt/xpartlines&gt
+			&ltpartlines&gt 10 8 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000345874033941911106 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 8 &lt/xpartlines&gt
+			&ltpartlines&gt 9 8 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000348861238031590646 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 7 &lt/xpartlines&gt
+			&ltpartlines&gt 9 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000424599242926313077 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 7 &lt/xpartlines&gt
+			&ltpartlines&gt 8 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000439947186235675422 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 7 &lt/xpartlines&gt
+			&ltpartlines&gt 7 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000481070344401667829 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 6 7 &lt/xpartlines&gt
+			&ltpartlines&gt 6 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000528930654355221855 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 5 7 &lt/xpartlines&gt
+			&ltpartlines&gt 5 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000592184315630871314 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 5 6 &lt/xpartlines&gt
+			&ltpartlines&gt 5 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000857591267505941114 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 6 &lt/xpartlines&gt
+			&ltpartlines&gt 4 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000866354211828237845 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 6 &lt/xpartlines&gt
+			&ltpartlines&gt 3 6 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00091353575187592918 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltnewstatus&gt 1 &lt/newstatus&gt
+			&ltxpartlines&gt 3 5 &lt/xpartlines&gt
+			&ltpartlines&gt 3 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000919185689826623655 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 4 &lt/xpartlines&gt
+			&ltpartlines&gt 4 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000955912424407952743 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 4 &lt/xpartlines&gt
+			&ltpartlines&gt 3 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00118557134547170773 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 2 4 &lt/xpartlines&gt
+			&ltpartlines&gt 2 4 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00153425735560147494 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 1 4 &lt/xpartlines&gt
+			&ltpartlines&gt 1 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.0019246211065550127 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 5 &lt/xpartlines&gt
+			&ltpartlines&gt 0 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00206538502262878197 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 4 &lt/xpartlines&gt
+			&ltpartlines&gt 0 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00305508566675113469 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 3 &lt/xpartlines&gt
+			&ltpartlines&gt 0 3 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00533986494186010525 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 2 &lt/xpartlines&gt
+			&ltpartlines&gt 0 2 . &lt/partlines&gt
+		&lt/intervals&gt
+	&lt/treesum&gt
+	&lttreesum&gt
+		&ltncopy&gt 1 &lt/ncopy&gt
+		&ltshortforce&gt coalesce long
+			&ltshortpoint&gt 20 19 &lt/shortpoint&gt
+			&ltshortwait&gt 0.0575370553514418445 0.104633534940785464 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltshortforce&gt migrate long
+			&ltshortpoint&gt 0 1 1 0 &lt/shortpoint&gt
+			&ltshortwait&gt 0.00794664505963699544 0.0202802222566785852 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltintervals&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 5.09657490636763079e-06 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 20 20 &lt/xpartlines&gt
+			&ltpartlines&gt 20 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.2102355624254017e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 19 20 &lt/xpartlines&gt
+			&ltpartlines&gt 19 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.25411707266569827e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 18 20 &lt/xpartlines&gt
+			&ltpartlines&gt 18 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.33961400471318623e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 17 20 &lt/xpartlines&gt
+			&ltpartlines&gt 17 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.77695915387843852e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 16 20 &lt/xpartlines&gt
+			&ltpartlines&gt 16 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 2.10446887053792051e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 16 19 &lt/xpartlines&gt
+			&ltpartlines&gt 16 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 2.27457247455224152e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 16 18 &lt/xpartlines&gt
+			&ltpartlines&gt 16 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 2.44016581525568737e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 16 17 &lt/xpartlines&gt
+			&ltpartlines&gt 16 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 5.63431475801879578e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 15 17 &lt/xpartlines&gt
+			&ltpartlines&gt 15 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 6.55098716603418459e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 14 17 &lt/xpartlines&gt
+			&ltpartlines&gt 14 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 7.39112816930027905e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 17 &lt/xpartlines&gt
+			&ltpartlines&gt 13 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 8.3802045298549127e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 16 &lt/xpartlines&gt
+			&ltpartlines&gt 13 16 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00010617490035119024 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 15 &lt/xpartlines&gt
+			&ltpartlines&gt 13 15 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000111507477845693503 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 15 &lt/xpartlines&gt
+			&ltpartlines&gt 12 15 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000150975907355033011 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 15 &lt/xpartlines&gt
+			&ltpartlines&gt 11 15 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000151946997256986488 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 15 &lt/xpartlines&gt
+			&ltpartlines&gt 10 15 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000165638034054806815 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 14 &lt/xpartlines&gt
+			&ltpartlines&gt 10 14 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000191522903992015264 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 13 &lt/xpartlines&gt
+			&ltpartlines&gt 10 13 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000219707138094496452 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 12 &lt/xpartlines&gt
+			&ltpartlines&gt 10 12 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000258213852917746063 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 11 &lt/xpartlines&gt
+			&ltpartlines&gt 10 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000273652113921858643 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 10 &lt/xpartlines&gt
+			&ltpartlines&gt 10 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000280583868742954141 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 10 &lt/xpartlines&gt
+			&ltpartlines&gt 9 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000331106053916933646 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 9 &lt/xpartlines&gt
+			&ltpartlines&gt 9 9 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000345874033941911106 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 8 &lt/xpartlines&gt
+			&ltpartlines&gt 9 8 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000348861238031590646 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 7 &lt/xpartlines&gt
+			&ltpartlines&gt 9 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000424599242926313077 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 7 &lt/xpartlines&gt
+			&ltpartlines&gt 8 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000439947186235675422 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 7 &lt/xpartlines&gt
+			&ltpartlines&gt 7 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000481070344401667829 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 6 7 &lt/xpartlines&gt
+			&ltpartlines&gt 6 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000528930654355221855 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 5 7 &lt/xpartlines&gt
+			&ltpartlines&gt 5 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000592184315630871314 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 5 6 &lt/xpartlines&gt
+			&ltpartlines&gt 5 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000866354211828237845 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 6 &lt/xpartlines&gt
+			&ltpartlines&gt 4 6 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00091353575187592918 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltnewstatus&gt 1 &lt/newstatus&gt
+			&ltxpartlines&gt 4 5 &lt/xpartlines&gt
+			&ltpartlines&gt 4 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000919185689826623655 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 5 4 &lt/xpartlines&gt
+			&ltpartlines&gt 5 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000955912424407952743 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 4 &lt/xpartlines&gt
+			&ltpartlines&gt 4 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00114689035080777412 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 4 &lt/xpartlines&gt
+			&ltpartlines&gt 3 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00118557134547170773 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 2 4 &lt/xpartlines&gt
+			&ltpartlines&gt 2 4 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00153425735560147494 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 1 4 &lt/xpartlines&gt
+			&ltpartlines&gt 1 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.0019246211065550127 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 5 &lt/xpartlines&gt
+			&ltpartlines&gt 0 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00206538502262878197 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 4 &lt/xpartlines&gt
+			&ltpartlines&gt 0 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00267205676641757645 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 3 &lt/xpartlines&gt
+			&ltpartlines&gt 0 3 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00533986494186010525 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 2 &lt/xpartlines&gt
+			&ltpartlines&gt 0 2 . &lt/partlines&gt
+		&lt/intervals&gt
+	&lt/treesum&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00505700949907143815 0.00613707484094671175 &lt/thetas&gt
+			&ltmigrates&gt 0 124.679362905292081 34.8937725369221496 0 &lt/migrates&gt
+			&ltgrowthrates&gt 1761.27593665903237 -84.0195656658781189 &lt/growthrates&gt
+		&lt/estimates&gt
+&lt/chainsum&gt
+&ltchainpack&gt
+	&ltnumber&gt 0 1 0 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 0 &lt/tinytrees&gt
+		&ltaccrate&gt 0.265000000000000013 &lt/accrate&gt
+		&ltllikemle&gt 8.03638549246886313 &lt/llikemle&gt
+		&ltllikedata&gt -3574.6386248622166 &lt/llikedata&gt
+		&ltstarttime&gt 1113933971 &lt/starttime&gt
+		&ltendtime&gt 1113933986 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 53 200 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00238967973895868897 0.00857314189478221474 &lt/thetas&gt
+			&ltmigrates&gt 0 178.594354360432533 201.711534027463927 0 &lt/migrates&gt
+			&ltgrowthrates&gt 286.265570187478374 522.584629259533926 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainpack&gt
+	&ltnumber&gt 0 1 1 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 0 &lt/tinytrees&gt
+		&ltaccrate&gt 0.174999999999999989 &lt/accrate&gt
+		&ltllikemle&gt 2.24292366391492948 &lt/llikemle&gt
+		&ltllikedata&gt -3366.95583651043171 &lt/llikedata&gt
+		&ltstarttime&gt 1113933986 &lt/starttime&gt
+		&ltendtime&gt 1113934003 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 35 200 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00342828243898930232 0.00938815584810562438 &lt/thetas&gt
+			&ltmigrates&gt 0 359.333313010233837 170.709781451344668 0 &lt/migrates&gt
+			&ltgrowthrates&gt 376.258762176827872 608.980866754854333 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainpack&gt
+	&ltnumber&gt 0 1 2 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 0 &lt/tinytrees&gt
+		&ltaccrate&gt 0.115000000000000005 &lt/accrate&gt
+		&ltllikemle&gt 0.331232649867971241 &lt/llikemle&gt
+		&ltllikedata&gt -3277.07186443434921 &lt/llikedata&gt
+		&ltstarttime&gt 1113934003 &lt/starttime&gt
+		&ltendtime&gt 1113934021 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 23 200 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00382351077572252833 0.0101148980666503786 &lt/thetas&gt
+			&ltmigrates&gt 0 266.63302494001374 180.090955280637019 0 &lt/migrates&gt
+			&ltgrowthrates&gt 589.048754968938624 732.840714912730732 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainpack&gt
+	&ltnumber&gt 0 1 3 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 0 &lt/tinytrees&gt
+		&ltaccrate&gt 0.100000000000000006 &lt/accrate&gt
+		&ltllikemle&gt 0.712638597764832182 &lt/llikemle&gt
+		&ltllikedata&gt -3264.58251001428516 &lt/llikedata&gt
+		&ltstarttime&gt 1113934021 &lt/starttime&gt
+		&ltendtime&gt 1113934029 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 4 40 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.004074651441941083 0.00837181988651936049 &lt/thetas&gt
+			&ltmigrates&gt 0 118.942422560571757 182.899111430931441 0 &lt/migrates&gt
+			&ltgrowthrates&gt 1023.10652786136313 621.693756977592329 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainsum&gt
+	&ltreg_rep&gt 0 1 &lt/reg_rep&gt
+	&lttreesum&gt
+		&ltncopy&gt 1 &lt/ncopy&gt
+		&ltshortforce&gt coalesce long
+			&ltshortpoint&gt 18 21 &lt/shortpoint&gt
+			&ltshortwait&gt 0.0539318434248482123 0.124884165154835192 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltshortforce&gt migrate long
+			&ltshortpoint&gt 0 1 3 0 &lt/shortpoint&gt
+			&ltshortwait&gt 0.00820356565115269894 0.0167862026647611975 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltintervals&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 9.86113155575850924e-06 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 20 20 &lt/xpartlines&gt
+			&ltpartlines&gt 20 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.14780854199569499e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 19 20 &lt/xpartlines&gt
+			&ltpartlines&gt 19 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.2291023672518374e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 19 19 &lt/xpartlines&gt
+			&ltpartlines&gt 19 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.66635848884044525e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 19 18 &lt/xpartlines&gt
+			&ltpartlines&gt 19 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 2.22961141141907646e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 18 18 &lt/xpartlines&gt
+			&ltpartlines&gt 18 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 2.48673742353794826e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 17 18 &lt/xpartlines&gt
+			&ltpartlines&gt 17 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 3.91186994294289797e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 16 18 &lt/xpartlines&gt
+			&ltpartlines&gt 16 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 4.82376276028000035e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 16 17 &lt/xpartlines&gt
+			&ltpartlines&gt 16 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 5.19910298629359333e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 15 17 &lt/xpartlines&gt
+			&ltpartlines&gt 15 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 7.45549455970045656e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 15 16 &lt/xpartlines&gt
+			&ltpartlines&gt 15 16 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 8.48515305769414904e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 14 16 &lt/xpartlines&gt
+			&ltpartlines&gt 14 16 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 8.63938283360383356e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 16 &lt/xpartlines&gt
+			&ltpartlines&gt 13 16 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000109224715491037562 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 15 &lt/xpartlines&gt
+			&ltpartlines&gt 13 15 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000125328787711941807 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 15 &lt/xpartlines&gt
+			&ltpartlines&gt 12 15 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000134238691874153575 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 15 &lt/xpartlines&gt
+			&ltpartlines&gt 11 15 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00015730396931299724 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 15 &lt/xpartlines&gt
+			&ltpartlines&gt 10 15 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000208869889075072063 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 14 &lt/xpartlines&gt
+			&ltpartlines&gt 10 14 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000215094766862807343 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 13 &lt/xpartlines&gt
+			&ltpartlines&gt 10 13 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000225539801544489551 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 13 &lt/xpartlines&gt
+			&ltpartlines&gt 9 13 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00024281367284408395 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 13 &lt/xpartlines&gt
+			&ltpartlines&gt 8 13 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000339783045919729956 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 12 &lt/xpartlines&gt
+			&ltpartlines&gt 8 12 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000349763100889471208 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 12 &lt/xpartlines&gt
+			&ltpartlines&gt 7 12 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000380891571910665103 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 6 12 &lt/xpartlines&gt
+			&ltpartlines&gt 6 12 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.000430449683486230515 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltnewstatus&gt 1 &lt/newstatus&gt
+			&ltxpartlines&gt 6 11 &lt/xpartlines&gt
+			&ltpartlines&gt 6 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000457714375841571639 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 10 &lt/xpartlines&gt
+			&ltpartlines&gt 7 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000477876206426440411 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 6 10 &lt/xpartlines&gt
+			&ltpartlines&gt 6 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000491278080421270061 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 5 10 &lt/xpartlines&gt
+			&ltpartlines&gt 5 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000618305099660766818 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 5 9 &lt/xpartlines&gt
+			&ltpartlines&gt 5 9 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.000711694454938392642 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 5 8 &lt/xpartlines&gt
+			&ltpartlines&gt 5 8 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000735873160967268547 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 9 &lt/xpartlines&gt
+			&ltpartlines&gt 4 9 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000752775920475186756 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 8 &lt/xpartlines&gt
+			&ltpartlines&gt 4 8 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000767134447803857842 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 7 &lt/xpartlines&gt
+			&ltpartlines&gt 4 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000783650082327605664 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 6 &lt/xpartlines&gt
+			&ltpartlines&gt 4 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000848841654435085577 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 6 &lt/xpartlines&gt
+			&ltpartlines&gt 3 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000957093185831530183 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 2 6 &lt/xpartlines&gt
+			&ltpartlines&gt 2 6 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00113398823503352373 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 2 5 &lt/xpartlines&gt
+			&ltpartlines&gt 2 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00120225473233144119 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 1 6 &lt/xpartlines&gt
+			&ltpartlines&gt 1 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00125272233077632815 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 1 5 &lt/xpartlines&gt
+			&ltpartlines&gt 1 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00148623295494489864 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 1 4 &lt/xpartlines&gt
+			&ltpartlines&gt 1 4 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00243994510677220068 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 1 3 &lt/xpartlines&gt
+			&ltpartlines&gt 1 3 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00250266179027173021 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 4 &lt/xpartlines&gt
+			&ltpartlines&gt 0 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00288283320062619926 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 3 &lt/xpartlines&gt
+			&ltpartlines&gt 0 3 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00290053205202245312 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 2 &lt/xpartlines&gt
+			&ltpartlines&gt 0 2 . &lt/partlines&gt
+		&lt/intervals&gt
+	&lt/treesum&gt
+	&lttreesum&gt
+		&ltncopy&gt 1 &lt/ncopy&gt
+		&ltshortforce&gt coalesce long
+			&ltshortpoint&gt 18 21 &lt/shortpoint&gt
+			&ltshortwait&gt 0.0609748580652498956 0.11278304974572527 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltshortforce&gt migrate long
+			&ltshortpoint&gt 0 1 3 0 &lt/shortpoint&gt
+			&ltshortwait&gt 0.0085322498443588142 0.0161685184391479192 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltintervals&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 9.86113155575850924e-06 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 20 20 &lt/xpartlines&gt
+			&ltpartlines&gt 20 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.14780854199569499e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 19 20 &lt/xpartlines&gt
+			&ltpartlines&gt 19 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.2291023672518374e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 19 19 &lt/xpartlines&gt
+			&ltpartlines&gt 19 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 2.22961141141907646e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 19 18 &lt/xpartlines&gt
+			&ltpartlines&gt 19 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 2.48673742353794826e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 18 18 &lt/xpartlines&gt
+			&ltpartlines&gt 18 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 3.91186994294289797e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 17 18 &lt/xpartlines&gt
+			&ltpartlines&gt 17 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 4.82376276028000035e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 17 17 &lt/xpartlines&gt
+			&ltpartlines&gt 17 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 5.19910298629359333e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 16 17 &lt/xpartlines&gt
+			&ltpartlines&gt 16 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 7.45549455970045656e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 16 16 &lt/xpartlines&gt
+			&ltpartlines&gt 16 16 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 8.48515305769414904e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 15 16 &lt/xpartlines&gt
+			&ltpartlines&gt 15 16 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 8.63938283360383356e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 14 16 &lt/xpartlines&gt
+			&ltpartlines&gt 14 16 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000109224715491037562 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 14 15 &lt/xpartlines&gt
+			&ltpartlines&gt 14 15 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000118188935353991798 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 15 &lt/xpartlines&gt
+			&ltpartlines&gt 13 15 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000125328787711941807 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 14 &lt/xpartlines&gt
+			&ltpartlines&gt 13 14 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000134238691874153575 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 14 &lt/xpartlines&gt
+			&ltpartlines&gt 12 14 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00015730396931299724 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 14 &lt/xpartlines&gt
+			&ltpartlines&gt 11 14 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000208869889075072063 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 13 &lt/xpartlines&gt
+			&ltpartlines&gt 11 13 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000215094766862807343 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 12 &lt/xpartlines&gt
+			&ltpartlines&gt 11 12 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000225539801544489551 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 12 &lt/xpartlines&gt
+			&ltpartlines&gt 10 12 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00024281367284408395 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 12 &lt/xpartlines&gt
+			&ltpartlines&gt 9 12 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000339783045919729956 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 11 &lt/xpartlines&gt
+			&ltpartlines&gt 9 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000345347778094520265 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 11 &lt/xpartlines&gt
+			&ltpartlines&gt 8 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000349763100889471208 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 11 &lt/xpartlines&gt
+			&ltpartlines&gt 7 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000380891571910665103 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 6 11 &lt/xpartlines&gt
+			&ltpartlines&gt 6 11 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.000430449683486230515 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltnewstatus&gt 1 &lt/newstatus&gt
+			&ltxpartlines&gt 6 10 &lt/xpartlines&gt
+			&ltpartlines&gt 6 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000457714375841571639 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 9 &lt/xpartlines&gt
+			&ltpartlines&gt 7 9 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000477876206426440411 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 6 9 &lt/xpartlines&gt
+			&ltpartlines&gt 6 9 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000491278080421270061 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 5 9 &lt/xpartlines&gt
+			&ltpartlines&gt 5 9 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000618305099660766818 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 5 8 &lt/xpartlines&gt
+			&ltpartlines&gt 5 8 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.000711694454938392642 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 5 7 &lt/xpartlines&gt
+			&ltpartlines&gt 5 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000752775920475186756 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 8 &lt/xpartlines&gt
+			&ltpartlines&gt 4 8 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000767134447803857842 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 7 &lt/xpartlines&gt
+			&ltpartlines&gt 4 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000783650082327605664 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 6 &lt/xpartlines&gt
+			&ltpartlines&gt 4 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000848841654435085577 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 6 &lt/xpartlines&gt
+			&ltpartlines&gt 3 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000957093185831530183 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 2 6 &lt/xpartlines&gt
+			&ltpartlines&gt 2 6 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00113398823503352373 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 2 5 &lt/xpartlines&gt
+			&ltpartlines&gt 2 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00120225473233144119 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 1 6 &lt/xpartlines&gt
+			&ltpartlines&gt 1 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00125272233077632815 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 1 5 &lt/xpartlines&gt
+			&ltpartlines&gt 1 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00148623295494489864 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 1 4 &lt/xpartlines&gt
+			&ltpartlines&gt 1 4 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00243994510677220068 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 1 3 &lt/xpartlines&gt
+			&ltpartlines&gt 1 3 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00250266179027173021 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 4 &lt/xpartlines&gt
+			&ltpartlines&gt 0 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00288283320062619926 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 3 &lt/xpartlines&gt
+			&ltpartlines&gt 0 3 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00290053205202245312 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 2 &lt/xpartlines&gt
+			&ltpartlines&gt 0 2 . &lt/partlines&gt
+		&lt/intervals&gt
+	&lt/treesum&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00382351077572252833 0.0101148980666503786 &lt/thetas&gt
+			&ltmigrates&gt 0 266.63302494001374 180.090955280637019 0 &lt/migrates&gt
+			&ltgrowthrates&gt 589.048754968938624 732.840714912730732 &lt/growthrates&gt
+		&lt/estimates&gt
+&lt/chainsum&gt
+&ltchainpack&gt
+	&ltnumber&gt 0 2 0 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 0 &lt/tinytrees&gt
+		&ltaccrate&gt 0.234999999999999987 &lt/accrate&gt
+		&ltllikemle&gt 4.42696635227687985 &lt/llikemle&gt
+		&ltllikedata&gt -3492.15130892703792 &lt/llikedata&gt
+		&ltstarttime&gt 1113934029 &lt/starttime&gt
+		&ltendtime&gt 1113934045 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 47 200 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.0051932689740616959 0.0095267210917029492 &lt/thetas&gt
+			&ltmigrates&gt 0 0.0030934444336862682 46.6542500393172119 0 &lt/migrates&gt
+			&ltgrowthrates&gt 1401.00175791445054 338.376238311546331 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainpack&gt
+	&ltnumber&gt 0 2 1 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 0 &lt/tinytrees&gt
+		&ltaccrate&gt 0.149999999999999994 &lt/accrate&gt
+		&ltllikemle&gt 11.6317746234713901 &lt/llikemle&gt
+		&ltllikedata&gt -3382.65609923619468 &lt/llikedata&gt
+		&ltstarttime&gt 1113934046 &lt/starttime&gt
+		&ltendtime&gt 1113934062 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 30 200 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00363910230419641195 0.00949364169215391289 &lt/thetas&gt
+			&ltmigrates&gt 0 113.28617225286888 88.1277594808196767 0 &lt/migrates&gt
+			&ltgrowthrates&gt 411.557560954860037 233.86142612045623 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainpack&gt
+	&ltnumber&gt 0 2 2 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 0 &lt/tinytrees&gt
+		&ltaccrate&gt 0.110000000000000001 &lt/accrate&gt
+		&ltllikemle&gt 0.636063492994214719 &lt/llikemle&gt
+		&ltllikedata&gt -3299.08922066821424 &lt/llikedata&gt
+		&ltstarttime&gt 1113934062 &lt/starttime&gt
+		&ltendtime&gt 1113934079 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 22 200 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00344721894934559969 0.0137468371177546927 &lt/thetas&gt
+			&ltmigrates&gt 0 113.720626268037961 82.7670768811458402 0 &lt/migrates&gt
+			&ltgrowthrates&gt 320.377044363307391 443.960643777887299 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainpack&gt
+	&ltnumber&gt 0 2 3 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 0 &lt/tinytrees&gt
+		&ltaccrate&gt 0.174999999999999989 &lt/accrate&gt
+		&ltllikemle&gt 0.411652243146974017 &lt/llikemle&gt
+		&ltllikedata&gt -3301.112857149506 &lt/llikedata&gt
+		&ltstarttime&gt 1113934079 &lt/starttime&gt
+		&ltendtime&gt 1113934087 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 7 40 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00274284032250226347 0.0117803366876250235 &lt/thetas&gt
+			&ltmigrates&gt 0 120.893070720992441 85.9918539079791771 0 &lt/migrates&gt
+			&ltgrowthrates&gt 26.2473005315673831 368.198849330621101 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainsum&gt
+	&ltreg_rep&gt 0 2 &lt/reg_rep&gt
+	&lttreesum&gt
+		&ltncopy&gt 1 &lt/ncopy&gt
+		&ltshortforce&gt coalesce long
+			&ltshortpoint&gt 19 20 &lt/shortpoint&gt
+			&ltshortwait&gt 0.0517692740770155577 0.168835716039738043 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltshortforce&gt migrate long
+			&ltshortpoint&gt 0 1 2 0 &lt/shortpoint&gt
+			&ltshortwait&gt 0.00827170676388711348 0.0236086927070103779 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltintervals&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 4.65136899805835341e-06 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 20 20 &lt/xpartlines&gt
+			&ltpartlines&gt 20 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 8.04215566305798666e-06 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 19 20 &lt/xpartlines&gt
+			&ltpartlines&gt 19 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.43604566212563868e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 18 20 &lt/xpartlines&gt
+			&ltpartlines&gt 18 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.84336441146756059e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 17 20 &lt/xpartlines&gt
+			&ltpartlines&gt 17 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 2.24324521605382133e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 16 20 &lt/xpartlines&gt
+			&ltpartlines&gt 16 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 4.02727378794709678e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 16 19 &lt/xpartlines&gt
+			&ltpartlines&gt 16 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 4.66779769093841711e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 15 19 &lt/xpartlines&gt
+			&ltpartlines&gt 15 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 6.70209786261414327e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 14 19 &lt/xpartlines&gt
+			&ltpartlines&gt 14 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 8.7820260710313902e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 14 18 &lt/xpartlines&gt
+			&ltpartlines&gt 14 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 9.28621094428023613e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 18 &lt/xpartlines&gt
+			&ltpartlines&gt 13 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000107706203473348862 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 18 &lt/xpartlines&gt
+			&ltpartlines&gt 12 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000114816939483791875 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 17 &lt/xpartlines&gt
+			&ltpartlines&gt 12 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000126163485711554112 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 16 &lt/xpartlines&gt
+			&ltpartlines&gt 12 16 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.0001343202142583523 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 15 &lt/xpartlines&gt
+			&ltpartlines&gt 12 15 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000157502040375572878 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 14 &lt/xpartlines&gt
+			&ltpartlines&gt 12 14 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000158198275534539187 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 14 &lt/xpartlines&gt
+			&ltpartlines&gt 11 14 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000185924618990777787 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 13 &lt/xpartlines&gt
+			&ltpartlines&gt 11 13 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000195139416618541169 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 13 &lt/xpartlines&gt
+			&ltpartlines&gt 10 13 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.000195144856593906958 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltnewstatus&gt 1 &lt/newstatus&gt
+			&ltxpartlines&gt 9 13 &lt/xpartlines&gt
+			&ltpartlines&gt 9 13 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000208892591389243015 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 12 &lt/xpartlines&gt
+			&ltpartlines&gt 10 12 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000221729134487027137 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 11 &lt/xpartlines&gt
+			&ltpartlines&gt 10 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000231540140137400703 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 11 &lt/xpartlines&gt
+			&ltpartlines&gt 9 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000286748264076826459 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 11 &lt/xpartlines&gt
+			&ltpartlines&gt 8 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000296568169067036559 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 11 &lt/xpartlines&gt
+			&ltpartlines&gt 7 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00032780787636151992 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 10 &lt/xpartlines&gt
+			&ltpartlines&gt 7 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00040351349954208406 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 6 10 &lt/xpartlines&gt
+			&ltpartlines&gt 6 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000469019333789370591 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 5 10 &lt/xpartlines&gt
+			&ltpartlines&gt 5 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000501156369385404206 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 10 &lt/xpartlines&gt
+			&ltpartlines&gt 4 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00051564770169723541 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 9 &lt/xpartlines&gt
+			&ltpartlines&gt 4 9 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000720822165058714829 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 9 &lt/xpartlines&gt
+			&ltpartlines&gt 3 9 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00096916231842889629 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 8 &lt/xpartlines&gt
+			&ltpartlines&gt 3 8 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00117293932612708303 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 7 &lt/xpartlines&gt
+			&ltpartlines&gt 3 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00150380041638617967 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 6 &lt/xpartlines&gt
+			&ltpartlines&gt 3 6 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00173696782656019471 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 2 6 &lt/xpartlines&gt
+			&ltpartlines&gt 2 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00185054353054030468 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 1 7 &lt/xpartlines&gt
+			&ltpartlines&gt 1 7 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00191839064111927254 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 1 6 &lt/xpartlines&gt
+			&ltpartlines&gt 1 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00194182526794987008 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 7 &lt/xpartlines&gt
+			&ltpartlines&gt 0 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00225255911291697688 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 6 &lt/xpartlines&gt
+			&ltpartlines&gt 0 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00258123599239381586 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 5 &lt/xpartlines&gt
+			&ltpartlines&gt 0 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00295407111169069311 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 4 &lt/xpartlines&gt
+			&ltpartlines&gt 0 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00324495225912871431 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 3 &lt/xpartlines&gt
+			&ltpartlines&gt 0 3 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00382175977738545905 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 2 &lt/xpartlines&gt
+			&ltpartlines&gt 0 2 . &lt/partlines&gt
+		&lt/intervals&gt
+	&lt/treesum&gt
+	&lttreesum&gt
+		&ltncopy&gt 1 &lt/ncopy&gt
+		&ltshortforce&gt coalesce long
+			&ltshortpoint&gt 19 20 &lt/shortpoint&gt
+			&ltshortwait&gt 0.0517692740770155577 0.159664105752183261 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltshortforce&gt migrate long
+			&ltshortpoint&gt 0 1 2 0 &lt/shortpoint&gt
+			&ltshortwait&gt 0.00827170676388711348 0.0229534802929423874 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltintervals&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 4.65136899805835341e-06 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 20 20 &lt/xpartlines&gt
+			&ltpartlines&gt 20 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 8.04215566305798666e-06 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 19 20 &lt/xpartlines&gt
+			&ltpartlines&gt 19 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.43604566212563868e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 18 20 &lt/xpartlines&gt
+			&ltpartlines&gt 18 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.84336441146756059e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 17 20 &lt/xpartlines&gt
+			&ltpartlines&gt 17 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 2.24324521605382133e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 16 20 &lt/xpartlines&gt
+			&ltpartlines&gt 16 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 4.02727378794709678e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 16 19 &lt/xpartlines&gt
+			&ltpartlines&gt 16 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 4.66779769093841711e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 15 19 &lt/xpartlines&gt
+			&ltpartlines&gt 15 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 6.70209786261414327e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 14 19 &lt/xpartlines&gt
+			&ltpartlines&gt 14 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 8.7820260710313902e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 14 18 &lt/xpartlines&gt
+			&ltpartlines&gt 14 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 9.28621094428023613e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 18 &lt/xpartlines&gt
+			&ltpartlines&gt 13 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000107706203473348862 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 18 &lt/xpartlines&gt
+			&ltpartlines&gt 12 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000114816939483791875 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 17 &lt/xpartlines&gt
+			&ltpartlines&gt 12 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000126163485711554112 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 16 &lt/xpartlines&gt
+			&ltpartlines&gt 12 16 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.0001343202142583523 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 15 &lt/xpartlines&gt
+			&ltpartlines&gt 12 15 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000157502040375572878 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 14 &lt/xpartlines&gt
+			&ltpartlines&gt 12 14 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000158198275534539187 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 14 &lt/xpartlines&gt
+			&ltpartlines&gt 11 14 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000185924618990777787 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 13 &lt/xpartlines&gt
+			&ltpartlines&gt 11 13 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000195139416618541169 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 13 &lt/xpartlines&gt
+			&ltpartlines&gt 10 13 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.000195144856593906958 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltnewstatus&gt 1 &lt/newstatus&gt
+			&ltxpartlines&gt 9 13 &lt/xpartlines&gt
+			&ltpartlines&gt 9 13 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000208892591389243015 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 12 &lt/xpartlines&gt
+			&ltpartlines&gt 10 12 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000221729134487027137 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 11 &lt/xpartlines&gt
+			&ltpartlines&gt 10 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000231540140137400703 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 11 &lt/xpartlines&gt
+			&ltpartlines&gt 9 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000286748264076826459 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 11 &lt/xpartlines&gt
+			&ltpartlines&gt 8 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000296568169067036559 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 11 &lt/xpartlines&gt
+			&ltpartlines&gt 7 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00032780787636151992 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 10 &lt/xpartlines&gt
+			&ltpartlines&gt 7 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00040351349954208406 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 6 10 &lt/xpartlines&gt
+			&ltpartlines&gt 6 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000469019333789370591 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 5 10 &lt/xpartlines&gt
+			&ltpartlines&gt 5 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000501156369385404206 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 10 &lt/xpartlines&gt
+			&ltpartlines&gt 4 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00051564770169723541 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 9 &lt/xpartlines&gt
+			&ltpartlines&gt 4 9 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000517726912059090507 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 9 &lt/xpartlines&gt
+			&ltpartlines&gt 3 9 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000720822165058714829 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 8 &lt/xpartlines&gt
+			&ltpartlines&gt 3 8 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00096916231842889629 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 7 &lt/xpartlines&gt
+			&ltpartlines&gt 3 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00150380041638617967 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 6 &lt/xpartlines&gt
+			&ltpartlines&gt 3 6 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00173696782656019471 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 2 6 &lt/xpartlines&gt
+			&ltpartlines&gt 2 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00185054353054030468 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 1 7 &lt/xpartlines&gt
+			&ltpartlines&gt 1 7 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00191839064111927254 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 1 6 &lt/xpartlines&gt
+			&ltpartlines&gt 1 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00194182526794987008 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 7 &lt/xpartlines&gt
+			&ltpartlines&gt 0 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00225255911291697688 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 6 &lt/xpartlines&gt
+			&ltpartlines&gt 0 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00258123599239381586 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 5 &lt/xpartlines&gt
+			&ltpartlines&gt 0 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00295407111169069311 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 4 &lt/xpartlines&gt
+			&ltpartlines&gt 0 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00324495225912871431 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 3 &lt/xpartlines&gt
+			&ltpartlines&gt 0 3 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00382175977738545905 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 2 &lt/xpartlines&gt
+			&ltpartlines&gt 0 2 . &lt/partlines&gt
+		&lt/intervals&gt
+	&lt/treesum&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00344721894934559969 0.0137468371177546927 &lt/thetas&gt
+			&ltmigrates&gt 0 113.720626268037961 82.7670768811458402 0 &lt/migrates&gt
+			&ltgrowthrates&gt 320.377044363307391 443.960643777887299 &lt/growthrates&gt
+		&lt/estimates&gt
+&lt/chainsum&gt
+&lt/XML-summary-file&gt
+</pre>
+</BODY>
+</HTML>
diff --git a/doc/html/insumfile.3rep.xml b/doc/html/insumfile.3rep.xml
new file mode 100644
index 0000000..6733494
--- /dev/null
+++ b/doc/html/insumfile.3rep.xml
@@ -0,0 +1,1600 @@
+<XML-summary-file>
+<!-- Lamarc v. 2.0
+     Please do not modify. -->
+<chainpack>
+	<number> 0 0 0 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<accrate> 0.195000000000000007 </accrate>
+		<llikemle> 4.50058419396642684 </llikemle>
+		<llikedata> -3620.90996263017314 </llikedata>
+		<starttime> 1113933890 </starttime>
+		<endtime> 1113933911 </endtime>
+		<rates> <map> Tree-Arranger 39 200 </map> </rates>
+		<estimates>
+			<thetas> 0.00832313455063505084 0.00734484914891693919 </thetas>
+			<migrates> 0 96.6630952829531083 46.6299496864950527 0 </migrates>
+			<growthrates> 2365.3441048272216 184.41163985471303 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainpack>
+	<number> 0 0 1 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<accrate> 0.160000000000000003 </accrate>
+		<llikemle> 1.18053638385976001 </llikemle>
+		<llikedata> -3516.0909920893173 </llikedata>
+		<starttime> 1113933912 </starttime>
+		<endtime> 1113933934 </endtime>
+		<rates> <map> Tree-Arranger 32 200 </map> </rates>
+		<estimates>
+			<thetas> 0.00513756085360574273 0.0082400193242912461 </thetas>
+			<migrates> 0 131.185343491258408 42.1077506833302877 0 </migrates>
+			<growthrates> 1821.55597636667744 149.760781972604775 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainpack>
+	<number> 0 0 2 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<accrate> 0.130000000000000004 </accrate>
+		<llikemle> 1.6599393589321001 </llikemle>
+		<llikedata> -3271.75316866892263 </llikedata>
+		<starttime> 1113933934 </starttime>
+		<endtime> 1113933960 </endtime>
+		<rates> <map> Tree-Arranger 26 200 </map> </rates>
+		<estimates>
+			<thetas> 0.00505700949907143815 0.00613707484094671175 </thetas>
+			<migrates> 0 124.679362905292081 34.8937725369221496 0 </migrates>
+			<growthrates> 1761.27593665903237 -84.0195656658781189 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainpack>
+	<number> 0 0 3 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 1 </tinytrees>
+		<accrate> 0.149999999999999994 </accrate>
+		<llikemle> 0.596513557975481556 </llikemle>
+		<llikedata> -3269.61178480345779 </llikedata>
+		<starttime> 1113933961 </starttime>
+		<endtime> 1113933971 </endtime>
+		<rates> <map> Tree-Arranger 6 40 </map> </rates>
+		<estimates>
+			<thetas> 0.0051461021148077107 0.00589555271016268308 </thetas>
+			<migrates> 0 125.68905479949639 48.865939986393137 0 </migrates>
+			<growthrates> 1835.77616186834098 82.0043403675645095 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainsum>
+	<reg_rep> 0 0 </reg_rep>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce long
+			<shortpoint> 20 19 </shortpoint>
+			<shortwait> 0.0629273638426249143 0.105548312215536552 </shortwait>
+		</shortforce>
+		<shortforce> migrate long
+			<shortpoint> 0 1 1 0 </shortpoint>
+			<shortwait> 0.0079654795218820091 0.0206451375053703819 </shortwait>
+		</shortforce>
+		<intervals>
+		<force> Coalescence </force>
+			<endtime> 5.09657490636763079e-06 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 20 20 </xpartlines>
+			<partlines> 20 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.2102355624254017e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 19 20 </xpartlines>
+			<partlines> 19 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.33961400471318623e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 18 20 </xpartlines>
+			<partlines> 18 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.77695915387843852e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 17 20 </xpartlines>
+			<partlines> 17 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 2.10446887053792051e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 17 19 </xpartlines>
+			<partlines> 17 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 2.27457247455224152e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 17 18 </xpartlines>
+			<partlines> 17 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 2.44016581525568737e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 17 17 </xpartlines>
+			<partlines> 17 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 5.63431475801879578e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 16 17 </xpartlines>
+			<partlines> 16 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 6.55098716603418459e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 15 17 </xpartlines>
+			<partlines> 15 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 7.39112816930027905e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 14 17 </xpartlines>
+			<partlines> 14 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 8.18973368575778886e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 14 16 </xpartlines>
+			<partlines> 14 16 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 8.3802045298549127e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 13 16 </xpartlines>
+			<partlines> 13 16 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00010629432546176642 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 13 15 </xpartlines>
+			<partlines> 13 15 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000111507477845693503 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 13 14 </xpartlines>
+			<partlines> 13 14 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000150975907355033011 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 12 14 </xpartlines>
+			<partlines> 12 14 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000165638034054806815 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 11 14 </xpartlines>
+			<partlines> 11 14 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000219061924145474876 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 11 13 </xpartlines>
+			<partlines> 11 13 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000219707138094496452 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 11 12 </xpartlines>
+			<partlines> 11 12 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000258213852917746063 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 11 11 </xpartlines>
+			<partlines> 11 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000273652113921858643 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 11 10 </xpartlines>
+			<partlines> 11 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000280583868742954141 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 10 10 </xpartlines>
+			<partlines> 10 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000331106053916933646 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 10 9 </xpartlines>
+			<partlines> 10 9 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000344952279767115331 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 10 8 </xpartlines>
+			<partlines> 10 8 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000345874033941911106 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 9 8 </xpartlines>
+			<partlines> 9 8 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000348861238031590646 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 9 7 </xpartlines>
+			<partlines> 9 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000424599242926313077 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 8 7 </xpartlines>
+			<partlines> 8 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000439947186235675422 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 7 7 </xpartlines>
+			<partlines> 7 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000481070344401667829 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 6 7 </xpartlines>
+			<partlines> 6 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000528930654355221855 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 5 7 </xpartlines>
+			<partlines> 5 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000592184315630871314 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 5 6 </xpartlines>
+			<partlines> 5 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000857591267505941114 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 4 6 </xpartlines>
+			<partlines> 4 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000866354211828237845 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 3 6 </xpartlines>
+			<partlines> 3 6 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00091353575187592918 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<newstatus> 1 </newstatus>
+			<xpartlines> 3 5 </xpartlines>
+			<partlines> 3 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000919185689826623655 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 4 4 </xpartlines>
+			<partlines> 4 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000955912424407952743 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 3 4 </xpartlines>
+			<partlines> 3 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00118557134547170773 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 2 4 </xpartlines>
+			<partlines> 2 4 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00153425735560147494 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 1 4 </xpartlines>
+			<partlines> 1 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.0019246211065550127 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 5 </xpartlines>
+			<partlines> 0 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00206538502262878197 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 4 </xpartlines>
+			<partlines> 0 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00305508566675113469 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 3 </xpartlines>
+			<partlines> 0 3 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00533986494186010525 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 2 </xpartlines>
+			<partlines> 0 2 . </partlines>
+		</intervals>
+	</treesum>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce long
+			<shortpoint> 20 19 </shortpoint>
+			<shortwait> 0.0575370553514418445 0.104633534940785464 </shortwait>
+		</shortforce>
+		<shortforce> migrate long
+			<shortpoint> 0 1 1 0 </shortpoint>
+			<shortwait> 0.00794664505963699544 0.0202802222566785852 </shortwait>
+		</shortforce>
+		<intervals>
+		<force> Coalescence </force>
+			<endtime> 5.09657490636763079e-06 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 20 20 </xpartlines>
+			<partlines> 20 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.2102355624254017e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 19 20 </xpartlines>
+			<partlines> 19 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.25411707266569827e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 18 20 </xpartlines>
+			<partlines> 18 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.33961400471318623e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 17 20 </xpartlines>
+			<partlines> 17 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.77695915387843852e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 16 20 </xpartlines>
+			<partlines> 16 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 2.10446887053792051e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 16 19 </xpartlines>
+			<partlines> 16 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 2.27457247455224152e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 16 18 </xpartlines>
+			<partlines> 16 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 2.44016581525568737e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 16 17 </xpartlines>
+			<partlines> 16 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 5.63431475801879578e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 15 17 </xpartlines>
+			<partlines> 15 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 6.55098716603418459e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 14 17 </xpartlines>
+			<partlines> 14 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 7.39112816930027905e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 13 17 </xpartlines>
+			<partlines> 13 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 8.3802045298549127e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 13 16 </xpartlines>
+			<partlines> 13 16 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00010617490035119024 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 13 15 </xpartlines>
+			<partlines> 13 15 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000111507477845693503 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 12 15 </xpartlines>
+			<partlines> 12 15 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000150975907355033011 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 11 15 </xpartlines>
+			<partlines> 11 15 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000151946997256986488 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 10 15 </xpartlines>
+			<partlines> 10 15 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000165638034054806815 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 10 14 </xpartlines>
+			<partlines> 10 14 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000191522903992015264 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 10 13 </xpartlines>
+			<partlines> 10 13 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000219707138094496452 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 10 12 </xpartlines>
+			<partlines> 10 12 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000258213852917746063 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 10 11 </xpartlines>
+			<partlines> 10 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000273652113921858643 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 10 10 </xpartlines>
+			<partlines> 10 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000280583868742954141 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 9 10 </xpartlines>
+			<partlines> 9 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000331106053916933646 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 9 9 </xpartlines>
+			<partlines> 9 9 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000345874033941911106 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 9 8 </xpartlines>
+			<partlines> 9 8 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000348861238031590646 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 9 7 </xpartlines>
+			<partlines> 9 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000424599242926313077 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 8 7 </xpartlines>
+			<partlines> 8 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000439947186235675422 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 7 7 </xpartlines>
+			<partlines> 7 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000481070344401667829 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 6 7 </xpartlines>
+			<partlines> 6 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000528930654355221855 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 5 7 </xpartlines>
+			<partlines> 5 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000592184315630871314 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 5 6 </xpartlines>
+			<partlines> 5 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000866354211828237845 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 4 6 </xpartlines>
+			<partlines> 4 6 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00091353575187592918 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<newstatus> 1 </newstatus>
+			<xpartlines> 4 5 </xpartlines>
+			<partlines> 4 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000919185689826623655 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 5 4 </xpartlines>
+			<partlines> 5 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000955912424407952743 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 4 4 </xpartlines>
+			<partlines> 4 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00114689035080777412 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 3 4 </xpartlines>
+			<partlines> 3 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00118557134547170773 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 2 4 </xpartlines>
+			<partlines> 2 4 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00153425735560147494 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 1 4 </xpartlines>
+			<partlines> 1 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.0019246211065550127 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 5 </xpartlines>
+			<partlines> 0 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00206538502262878197 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 4 </xpartlines>
+			<partlines> 0 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00267205676641757645 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 3 </xpartlines>
+			<partlines> 0 3 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00533986494186010525 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 2 </xpartlines>
+			<partlines> 0 2 . </partlines>
+		</intervals>
+	</treesum>
+		<estimates>
+			<thetas> 0.00505700949907143815 0.00613707484094671175 </thetas>
+			<migrates> 0 124.679362905292081 34.8937725369221496 0 </migrates>
+			<growthrates> 1761.27593665903237 -84.0195656658781189 </growthrates>
+		</estimates>
+</chainsum>
+<chainpack>
+	<number> 0 1 0 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<accrate> 0.265000000000000013 </accrate>
+		<llikemle> 8.03638549246886313 </llikemle>
+		<llikedata> -3574.6386248622166 </llikedata>
+		<starttime> 1113933971 </starttime>
+		<endtime> 1113933986 </endtime>
+		<rates> <map> Tree-Arranger 53 200 </map> </rates>
+		<estimates>
+			<thetas> 0.00238967973895868897 0.00857314189478221474 </thetas>
+			<migrates> 0 178.594354360432533 201.711534027463927 0 </migrates>
+			<growthrates> 286.265570187478374 522.584629259533926 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainpack>
+	<number> 0 1 1 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<accrate> 0.174999999999999989 </accrate>
+		<llikemle> 2.24292366391492948 </llikemle>
+		<llikedata> -3366.95583651043171 </llikedata>
+		<starttime> 1113933986 </starttime>
+		<endtime> 1113934003 </endtime>
+		<rates> <map> Tree-Arranger 35 200 </map> </rates>
+		<estimates>
+			<thetas> 0.00342828243898930232 0.00938815584810562438 </thetas>
+			<migrates> 0 359.333313010233837 170.709781451344668 0 </migrates>
+			<growthrates> 376.258762176827872 608.980866754854333 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainpack>
+	<number> 0 1 2 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<accrate> 0.115000000000000005 </accrate>
+		<llikemle> 0.331232649867971241 </llikemle>
+		<llikedata> -3277.07186443434921 </llikedata>
+		<starttime> 1113934003 </starttime>
+		<endtime> 1113934021 </endtime>
+		<rates> <map> Tree-Arranger 23 200 </map> </rates>
+		<estimates>
+			<thetas> 0.00382351077572252833 0.0101148980666503786 </thetas>
+			<migrates> 0 266.63302494001374 180.090955280637019 0 </migrates>
+			<growthrates> 589.048754968938624 732.840714912730732 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainpack>
+	<number> 0 1 3 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<accrate> 0.100000000000000006 </accrate>
+		<llikemle> 0.712638597764832182 </llikemle>
+		<llikedata> -3264.58251001428516 </llikedata>
+		<starttime> 1113934021 </starttime>
+		<endtime> 1113934029 </endtime>
+		<rates> <map> Tree-Arranger 4 40 </map> </rates>
+		<estimates>
+			<thetas> 0.004074651441941083 0.00837181988651936049 </thetas>
+			<migrates> 0 118.942422560571757 182.899111430931441 0 </migrates>
+			<growthrates> 1023.10652786136313 621.693756977592329 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainsum>
+	<reg_rep> 0 1 </reg_rep>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce long
+			<shortpoint> 18 21 </shortpoint>
+			<shortwait> 0.0539318434248482123 0.124884165154835192 </shortwait>
+		</shortforce>
+		<shortforce> migrate long
+			<shortpoint> 0 1 3 0 </shortpoint>
+			<shortwait> 0.00820356565115269894 0.0167862026647611975 </shortwait>
+		</shortforce>
+		<intervals>
+		<force> Coalescence </force>
+			<endtime> 9.86113155575850924e-06 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 20 20 </xpartlines>
+			<partlines> 20 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.14780854199569499e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 19 20 </xpartlines>
+			<partlines> 19 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.2291023672518374e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 19 19 </xpartlines>
+			<partlines> 19 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.66635848884044525e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 19 18 </xpartlines>
+			<partlines> 19 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 2.22961141141907646e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 18 18 </xpartlines>
+			<partlines> 18 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 2.48673742353794826e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 17 18 </xpartlines>
+			<partlines> 17 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 3.91186994294289797e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 16 18 </xpartlines>
+			<partlines> 16 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 4.82376276028000035e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 16 17 </xpartlines>
+			<partlines> 16 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 5.19910298629359333e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 15 17 </xpartlines>
+			<partlines> 15 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 7.45549455970045656e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 15 16 </xpartlines>
+			<partlines> 15 16 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 8.48515305769414904e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 14 16 </xpartlines>
+			<partlines> 14 16 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 8.63938283360383356e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 13 16 </xpartlines>
+			<partlines> 13 16 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000109224715491037562 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 13 15 </xpartlines>
+			<partlines> 13 15 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000125328787711941807 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 12 15 </xpartlines>
+			<partlines> 12 15 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000134238691874153575 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 11 15 </xpartlines>
+			<partlines> 11 15 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00015730396931299724 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 10 15 </xpartlines>
+			<partlines> 10 15 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000208869889075072063 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 10 14 </xpartlines>
+			<partlines> 10 14 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000215094766862807343 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 10 13 </xpartlines>
+			<partlines> 10 13 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000225539801544489551 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 9 13 </xpartlines>
+			<partlines> 9 13 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00024281367284408395 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 8 13 </xpartlines>
+			<partlines> 8 13 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000339783045919729956 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 8 12 </xpartlines>
+			<partlines> 8 12 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000349763100889471208 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 7 12 </xpartlines>
+			<partlines> 7 12 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000380891571910665103 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 6 12 </xpartlines>
+			<partlines> 6 12 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.000430449683486230515 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<newstatus> 1 </newstatus>
+			<xpartlines> 6 11 </xpartlines>
+			<partlines> 6 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000457714375841571639 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 7 10 </xpartlines>
+			<partlines> 7 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000477876206426440411 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 6 10 </xpartlines>
+			<partlines> 6 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000491278080421270061 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 5 10 </xpartlines>
+			<partlines> 5 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000618305099660766818 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 5 9 </xpartlines>
+			<partlines> 5 9 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.000711694454938392642 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 5 8 </xpartlines>
+			<partlines> 5 8 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000735873160967268547 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 4 9 </xpartlines>
+			<partlines> 4 9 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000752775920475186756 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 4 8 </xpartlines>
+			<partlines> 4 8 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000767134447803857842 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 4 7 </xpartlines>
+			<partlines> 4 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000783650082327605664 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 4 6 </xpartlines>
+			<partlines> 4 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000848841654435085577 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 3 6 </xpartlines>
+			<partlines> 3 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000957093185831530183 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 2 6 </xpartlines>
+			<partlines> 2 6 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00113398823503352373 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 2 5 </xpartlines>
+			<partlines> 2 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00120225473233144119 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 1 6 </xpartlines>
+			<partlines> 1 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00125272233077632815 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 1 5 </xpartlines>
+			<partlines> 1 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00148623295494489864 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 1 4 </xpartlines>
+			<partlines> 1 4 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00243994510677220068 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 1 3 </xpartlines>
+			<partlines> 1 3 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00250266179027173021 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 4 </xpartlines>
+			<partlines> 0 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00288283320062619926 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 3 </xpartlines>
+			<partlines> 0 3 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00290053205202245312 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 2 </xpartlines>
+			<partlines> 0 2 . </partlines>
+		</intervals>
+	</treesum>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce long
+			<shortpoint> 18 21 </shortpoint>
+			<shortwait> 0.0609748580652498956 0.11278304974572527 </shortwait>
+		</shortforce>
+		<shortforce> migrate long
+			<shortpoint> 0 1 3 0 </shortpoint>
+			<shortwait> 0.0085322498443588142 0.0161685184391479192 </shortwait>
+		</shortforce>
+		<intervals>
+		<force> Coalescence </force>
+			<endtime> 9.86113155575850924e-06 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 20 20 </xpartlines>
+			<partlines> 20 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.14780854199569499e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 19 20 </xpartlines>
+			<partlines> 19 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.2291023672518374e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 19 19 </xpartlines>
+			<partlines> 19 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 2.22961141141907646e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 19 18 </xpartlines>
+			<partlines> 19 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 2.48673742353794826e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 18 18 </xpartlines>
+			<partlines> 18 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 3.91186994294289797e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 17 18 </xpartlines>
+			<partlines> 17 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 4.82376276028000035e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 17 17 </xpartlines>
+			<partlines> 17 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 5.19910298629359333e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 16 17 </xpartlines>
+			<partlines> 16 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 7.45549455970045656e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 16 16 </xpartlines>
+			<partlines> 16 16 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 8.48515305769414904e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 15 16 </xpartlines>
+			<partlines> 15 16 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 8.63938283360383356e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 14 16 </xpartlines>
+			<partlines> 14 16 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000109224715491037562 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 14 15 </xpartlines>
+			<partlines> 14 15 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000118188935353991798 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 13 15 </xpartlines>
+			<partlines> 13 15 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000125328787711941807 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 13 14 </xpartlines>
+			<partlines> 13 14 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000134238691874153575 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 12 14 </xpartlines>
+			<partlines> 12 14 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00015730396931299724 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 11 14 </xpartlines>
+			<partlines> 11 14 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000208869889075072063 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 11 13 </xpartlines>
+			<partlines> 11 13 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000215094766862807343 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 11 12 </xpartlines>
+			<partlines> 11 12 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000225539801544489551 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 10 12 </xpartlines>
+			<partlines> 10 12 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00024281367284408395 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 9 12 </xpartlines>
+			<partlines> 9 12 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000339783045919729956 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 9 11 </xpartlines>
+			<partlines> 9 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000345347778094520265 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 8 11 </xpartlines>
+			<partlines> 8 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000349763100889471208 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 7 11 </xpartlines>
+			<partlines> 7 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000380891571910665103 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 6 11 </xpartlines>
+			<partlines> 6 11 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.000430449683486230515 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<newstatus> 1 </newstatus>
+			<xpartlines> 6 10 </xpartlines>
+			<partlines> 6 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000457714375841571639 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 7 9 </xpartlines>
+			<partlines> 7 9 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000477876206426440411 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 6 9 </xpartlines>
+			<partlines> 6 9 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000491278080421270061 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 5 9 </xpartlines>
+			<partlines> 5 9 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000618305099660766818 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 5 8 </xpartlines>
+			<partlines> 5 8 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.000711694454938392642 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 5 7 </xpartlines>
+			<partlines> 5 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000752775920475186756 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 4 8 </xpartlines>
+			<partlines> 4 8 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000767134447803857842 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 4 7 </xpartlines>
+			<partlines> 4 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000783650082327605664 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 4 6 </xpartlines>
+			<partlines> 4 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000848841654435085577 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 3 6 </xpartlines>
+			<partlines> 3 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000957093185831530183 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 2 6 </xpartlines>
+			<partlines> 2 6 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00113398823503352373 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 2 5 </xpartlines>
+			<partlines> 2 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00120225473233144119 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 1 6 </xpartlines>
+			<partlines> 1 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00125272233077632815 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 1 5 </xpartlines>
+			<partlines> 1 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00148623295494489864 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 1 4 </xpartlines>
+			<partlines> 1 4 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00243994510677220068 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 1 3 </xpartlines>
+			<partlines> 1 3 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00250266179027173021 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 4 </xpartlines>
+			<partlines> 0 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00288283320062619926 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 3 </xpartlines>
+			<partlines> 0 3 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00290053205202245312 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 2 </xpartlines>
+			<partlines> 0 2 . </partlines>
+		</intervals>
+	</treesum>
+		<estimates>
+			<thetas> 0.00382351077572252833 0.0101148980666503786 </thetas>
+			<migrates> 0 266.63302494001374 180.090955280637019 0 </migrates>
+			<growthrates> 589.048754968938624 732.840714912730732 </growthrates>
+		</estimates>
+</chainsum>
+<chainpack>
+	<number> 0 2 0 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<accrate> 0.234999999999999987 </accrate>
+		<llikemle> 4.42696635227687985 </llikemle>
+		<llikedata> -3492.15130892703792 </llikedata>
+		<starttime> 1113934029 </starttime>
+		<endtime> 1113934045 </endtime>
+		<rates> <map> Tree-Arranger 47 200 </map> </rates>
+		<estimates>
+			<thetas> 0.0051932689740616959 0.0095267210917029492 </thetas>
+			<migrates> 0 0.0030934444336862682 46.6542500393172119 0 </migrates>
+			<growthrates> 1401.00175791445054 338.376238311546331 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainpack>
+	<number> 0 2 1 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<accrate> 0.149999999999999994 </accrate>
+		<llikemle> 11.6317746234713901 </llikemle>
+		<llikedata> -3382.65609923619468 </llikedata>
+		<starttime> 1113934046 </starttime>
+		<endtime> 1113934062 </endtime>
+		<rates> <map> Tree-Arranger 30 200 </map> </rates>
+		<estimates>
+			<thetas> 0.00363910230419641195 0.00949364169215391289 </thetas>
+			<migrates> 0 113.28617225286888 88.1277594808196767 0 </migrates>
+			<growthrates> 411.557560954860037 233.86142612045623 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainpack>
+	<number> 0 2 2 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<accrate> 0.110000000000000001 </accrate>
+		<llikemle> 0.636063492994214719 </llikemle>
+		<llikedata> -3299.08922066821424 </llikedata>
+		<starttime> 1113934062 </starttime>
+		<endtime> 1113934079 </endtime>
+		<rates> <map> Tree-Arranger 22 200 </map> </rates>
+		<estimates>
+			<thetas> 0.00344721894934559969 0.0137468371177546927 </thetas>
+			<migrates> 0 113.720626268037961 82.7670768811458402 0 </migrates>
+			<growthrates> 320.377044363307391 443.960643777887299 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainpack>
+	<number> 0 2 3 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<accrate> 0.174999999999999989 </accrate>
+		<llikemle> 0.411652243146974017 </llikemle>
+		<llikedata> -3301.112857149506 </llikedata>
+		<starttime> 1113934079 </starttime>
+		<endtime> 1113934087 </endtime>
+		<rates> <map> Tree-Arranger 7 40 </map> </rates>
+		<estimates>
+			<thetas> 0.00274284032250226347 0.0117803366876250235 </thetas>
+			<migrates> 0 120.893070720992441 85.9918539079791771 0 </migrates>
+			<growthrates> 26.2473005315673831 368.198849330621101 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainsum>
+	<reg_rep> 0 2 </reg_rep>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce long
+			<shortpoint> 19 20 </shortpoint>
+			<shortwait> 0.0517692740770155577 0.168835716039738043 </shortwait>
+		</shortforce>
+		<shortforce> migrate long
+			<shortpoint> 0 1 2 0 </shortpoint>
+			<shortwait> 0.00827170676388711348 0.0236086927070103779 </shortwait>
+		</shortforce>
+		<intervals>
+		<force> Coalescence </force>
+			<endtime> 4.65136899805835341e-06 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 20 20 </xpartlines>
+			<partlines> 20 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 8.04215566305798666e-06 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 19 20 </xpartlines>
+			<partlines> 19 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.43604566212563868e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 18 20 </xpartlines>
+			<partlines> 18 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.84336441146756059e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 17 20 </xpartlines>
+			<partlines> 17 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 2.24324521605382133e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 16 20 </xpartlines>
+			<partlines> 16 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 4.02727378794709678e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 16 19 </xpartlines>
+			<partlines> 16 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 4.66779769093841711e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 15 19 </xpartlines>
+			<partlines> 15 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 6.70209786261414327e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 14 19 </xpartlines>
+			<partlines> 14 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 8.7820260710313902e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 14 18 </xpartlines>
+			<partlines> 14 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 9.28621094428023613e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 13 18 </xpartlines>
+			<partlines> 13 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000107706203473348862 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 12 18 </xpartlines>
+			<partlines> 12 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000114816939483791875 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 12 17 </xpartlines>
+			<partlines> 12 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000126163485711554112 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 12 16 </xpartlines>
+			<partlines> 12 16 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.0001343202142583523 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 12 15 </xpartlines>
+			<partlines> 12 15 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000157502040375572878 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 12 14 </xpartlines>
+			<partlines> 12 14 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000158198275534539187 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 11 14 </xpartlines>
+			<partlines> 11 14 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000185924618990777787 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 11 13 </xpartlines>
+			<partlines> 11 13 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000195139416618541169 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 10 13 </xpartlines>
+			<partlines> 10 13 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.000195144856593906958 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<newstatus> 1 </newstatus>
+			<xpartlines> 9 13 </xpartlines>
+			<partlines> 9 13 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000208892591389243015 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 10 12 </xpartlines>
+			<partlines> 10 12 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000221729134487027137 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 10 11 </xpartlines>
+			<partlines> 10 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000231540140137400703 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 9 11 </xpartlines>
+			<partlines> 9 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000286748264076826459 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 8 11 </xpartlines>
+			<partlines> 8 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000296568169067036559 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 7 11 </xpartlines>
+			<partlines> 7 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00032780787636151992 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 7 10 </xpartlines>
+			<partlines> 7 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00040351349954208406 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 6 10 </xpartlines>
+			<partlines> 6 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000469019333789370591 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 5 10 </xpartlines>
+			<partlines> 5 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000501156369385404206 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 4 10 </xpartlines>
+			<partlines> 4 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00051564770169723541 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 4 9 </xpartlines>
+			<partlines> 4 9 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000720822165058714829 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 3 9 </xpartlines>
+			<partlines> 3 9 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00096916231842889629 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 3 8 </xpartlines>
+			<partlines> 3 8 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00117293932612708303 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 3 7 </xpartlines>
+			<partlines> 3 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00150380041638617967 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 3 6 </xpartlines>
+			<partlines> 3 6 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00173696782656019471 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 2 6 </xpartlines>
+			<partlines> 2 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00185054353054030468 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 1 7 </xpartlines>
+			<partlines> 1 7 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00191839064111927254 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 1 6 </xpartlines>
+			<partlines> 1 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00194182526794987008 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 7 </xpartlines>
+			<partlines> 0 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00225255911291697688 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 6 </xpartlines>
+			<partlines> 0 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00258123599239381586 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 5 </xpartlines>
+			<partlines> 0 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00295407111169069311 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 4 </xpartlines>
+			<partlines> 0 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00324495225912871431 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 3 </xpartlines>
+			<partlines> 0 3 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00382175977738545905 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 2 </xpartlines>
+			<partlines> 0 2 . </partlines>
+		</intervals>
+	</treesum>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce long
+			<shortpoint> 19 20 </shortpoint>
+			<shortwait> 0.0517692740770155577 0.159664105752183261 </shortwait>
+		</shortforce>
+		<shortforce> migrate long
+			<shortpoint> 0 1 2 0 </shortpoint>
+			<shortwait> 0.00827170676388711348 0.0229534802929423874 </shortwait>
+		</shortforce>
+		<intervals>
+		<force> Coalescence </force>
+			<endtime> 4.65136899805835341e-06 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 20 20 </xpartlines>
+			<partlines> 20 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 8.04215566305798666e-06 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 19 20 </xpartlines>
+			<partlines> 19 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.43604566212563868e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 18 20 </xpartlines>
+			<partlines> 18 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.84336441146756059e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 17 20 </xpartlines>
+			<partlines> 17 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 2.24324521605382133e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 16 20 </xpartlines>
+			<partlines> 16 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 4.02727378794709678e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 16 19 </xpartlines>
+			<partlines> 16 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 4.66779769093841711e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 15 19 </xpartlines>
+			<partlines> 15 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 6.70209786261414327e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 14 19 </xpartlines>
+			<partlines> 14 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 8.7820260710313902e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 14 18 </xpartlines>
+			<partlines> 14 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 9.28621094428023613e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 13 18 </xpartlines>
+			<partlines> 13 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000107706203473348862 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 12 18 </xpartlines>
+			<partlines> 12 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000114816939483791875 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 12 17 </xpartlines>
+			<partlines> 12 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000126163485711554112 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 12 16 </xpartlines>
+			<partlines> 12 16 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.0001343202142583523 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 12 15 </xpartlines>
+			<partlines> 12 15 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000157502040375572878 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 12 14 </xpartlines>
+			<partlines> 12 14 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000158198275534539187 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 11 14 </xpartlines>
+			<partlines> 11 14 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000185924618990777787 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 11 13 </xpartlines>
+			<partlines> 11 13 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000195139416618541169 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 10 13 </xpartlines>
+			<partlines> 10 13 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.000195144856593906958 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<newstatus> 1 </newstatus>
+			<xpartlines> 9 13 </xpartlines>
+			<partlines> 9 13 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000208892591389243015 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 10 12 </xpartlines>
+			<partlines> 10 12 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000221729134487027137 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 10 11 </xpartlines>
+			<partlines> 10 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000231540140137400703 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 9 11 </xpartlines>
+			<partlines> 9 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000286748264076826459 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 8 11 </xpartlines>
+			<partlines> 8 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000296568169067036559 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 7 11 </xpartlines>
+			<partlines> 7 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00032780787636151992 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 7 10 </xpartlines>
+			<partlines> 7 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00040351349954208406 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 6 10 </xpartlines>
+			<partlines> 6 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000469019333789370591 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 5 10 </xpartlines>
+			<partlines> 5 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000501156369385404206 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 4 10 </xpartlines>
+			<partlines> 4 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00051564770169723541 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 4 9 </xpartlines>
+			<partlines> 4 9 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000517726912059090507 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 3 9 </xpartlines>
+			<partlines> 3 9 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000720822165058714829 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 3 8 </xpartlines>
+			<partlines> 3 8 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00096916231842889629 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 3 7 </xpartlines>
+			<partlines> 3 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00150380041638617967 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 3 6 </xpartlines>
+			<partlines> 3 6 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00173696782656019471 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 2 6 </xpartlines>
+			<partlines> 2 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00185054353054030468 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 1 7 </xpartlines>
+			<partlines> 1 7 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00191839064111927254 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 1 6 </xpartlines>
+			<partlines> 1 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00194182526794987008 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 7 </xpartlines>
+			<partlines> 0 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00225255911291697688 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 6 </xpartlines>
+			<partlines> 0 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00258123599239381586 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 5 </xpartlines>
+			<partlines> 0 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00295407111169069311 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 4 </xpartlines>
+			<partlines> 0 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00324495225912871431 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 3 </xpartlines>
+			<partlines> 0 3 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00382175977738545905 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 2 </xpartlines>
+			<partlines> 0 2 . </partlines>
+		</intervals>
+	</treesum>
+		<estimates>
+			<thetas> 0.00344721894934559969 0.0137468371177546927 </thetas>
+			<migrates> 0 113.720626268037961 82.7670768811458402 0 </migrates>
+			<growthrates> 320.377044363307391 443.960643777887299 </growthrates>
+		</estimates>
+</chainsum>
+</XML-summary-file>
diff --git a/doc/html/limitations.html b/doc/html/limitations.html
new file mode 100644
index 0000000..0224514
--- /dev/null
+++ b/doc/html/limitations.html
@@ -0,0 +1,97 @@
+<!-- header fragment for html documentation -->
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
+<HTML>
+<HEAD>
+
+<META NAME="description" CONTENT="Estimation of population parameters using genetic data using a maximum likelihood approach with Metropolis-Hastings Monte Carlo Markov chain importance sampling">
+<META NAME="keywords" CONTENT="MCMC, Markov chain, Monte Carlo, Metropolis-Hastings, population, parameters, migration rate, population size, recombination rate, maximum likelihood">
+
+<TITLE>LAMARC Documentation: Limitations of LAMARC</title>
+</HEAD>
+
+
+<BODY BGCOLOR="#FFFFFF">
+<!-- coalescent, coalescence, Markov chain Monte Carlo simulation, migration rate, effective
+ population size, recombination rate, maximum likelihood -->
+
+
+<P>(<A HREF="messages.html">Previous</A> | <A HREF="index.html">Contents</A>
+| <A HREF="glossary.html">Next</A>)</P>
+
+<H2> Limitations of LAMARC </H2>
+
+<P> This documentation has a lot to say about what LAMARC can do.
+Here is a concise guide to what it cannot do, with some suggestions
+for other approaches.</P>
+
+<H3> Some combinations of analyses are not possible. </H3>
+
+<P> Due to program or mathematical limitations, some combinations
+of analyses are not possible and will be rejected if attempted:</P>
+
+<LI> Gamma-distributed variation in mutation rate among regions is
+not compatible with estimation of growth, nor can it currently be
+used in a Baysian run.</LI>
+
+<LI> Newick trees cannot be written out if migration or recombination
+are allowed.  </LI>
+
+<LI> Migration rate estimation requires at least two populations.</LI>
+
+<LI> Recombination estimation requires at least two linked markers. </LI>
+
+<LI> Mapping requires recombination.</LI>
+
+<LI> Divergence can only be inferred in a Bayesian run, not a likelihood
+run.</LI>
+
+<H3> Some cases cannot be well modelled by LAMARC. </H3>
+
+LAMARC has a wide range of evolutionary models but not all possible
+ones by any means.  Some significant omissions:
+
+<LI> Samples from multiple time points in a fast-evolving population. 
+Consider the <A HREF="http://beast.bio.ed.ac.uk/Main_Page">BEAST</a> 
+program of Drummond and Rambaut for this.</LI>
+
+<LI> Multiple population divergence cases where the population tree
+is not known.  Consider the *BEAST program of Drummond and Rambaut
+when it is available.
+
+<LI> RFLP, AFLP, or insertion/deletion data.  You may be able to use the
+<A HREF="http://cmpg.unibe.ch/software/arlequin3/">ARLEQUIN</A> program of Excoffier; 
+we know of no full coalescent likelihood or
+Bayesian analysis which can handle these data. </LI>
+
+<LI> Growth models other than simple exponential growth or decline. Consider
+the <A HREF="http://beast.bio.ed.ac.uk/Main_Page">BEAST</a> program of
+Drummond and Rambaut.</LI>
+
+<LI> Geographic isolation as a function of distance, rather than
+via separation into distinct subpopulations.  Consider the Dancing
+Trees algorithm of <A
+HREF="http://ib.berkeley.edu/labs/moritz/research/theory.html">Baird</a>,
+if an implementation becomes available.</LI>
+
+<LI> Combining data which have recombination and data which do not
+(i.e. nuclear and mitochondrial DNA) in the same recombination-aware
+analysis.  We know of no alternative.  You may want to do two separate
+analyses with LAMARC. </LI>
+
+<LI> Sequences from multi-gene families.  The underlying coalescent
+model in LAMARC is not correct for such data.  Consider the gene-families
+ML algorithm of Dubb, if an implementation becomes available.</LI>
+
+<LI> Recombination rates which vary among regions or across the sequence.
+Consider the <A HREF="http://www.stats.ox.ac.uk/~mcvean/LDhat/">LDHAT</A>
+program of McVean.</LI>
+
+<P>(<A HREF="messages.html">Previous</A> | <A HREF="index.html">Contents</A>
+| <A HREF="glossary.html">Next</A>)</P>
+
+<!--
+//$Id: limitations.html,v 1.10 2012/05/16 17:14:01 mkkuhner Exp $
+-->
+</BODY>
+</HTML>
+
diff --git a/doc/html/mapping.html b/doc/html/mapping.html
new file mode 100644
index 0000000..96c7dd4
--- /dev/null
+++ b/doc/html/mapping.html
@@ -0,0 +1,466 @@
+<!-- header fragment for html documentation -->
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
+<HTML>
+<HEAD>
+
+<META NAME="description" CONTENT="Estimation of population parameters using genetic data using a maximum likelihood approach with Metropolis-Hastings Monte Carlo Markov chain importance sampling">
+<META NAME="keywords" CONTENT="MCMC, Markov chain, Monte Carlo, Metropolis-Hastings, population, parameters, migration rate, population size, recombination rate, maximum likelihood">
+
+<TITLE>LAMARC Documentation: Mapping</title>
+</HEAD>
+
+
+<BODY BGCOLOR="#FFFFFF">
+<!-- coalescent, coalescence, Metropolis-Hastings, Markov chain Monte Carlo
+ simulation, migration rate, effective population size, recombination rate,
+ maximum likelihood, trait mapping -->
+
+(<A HREF="parallel.html">Previous</A> | <A HREF="index.html">Contents</A> | 
+<A HREF="compiling.html">Next</A>)
+<H2>Mapping Traits Using LAMARC</H2>
+
+<P>As of LAMARC version 2.1, we now have the capacity to perform fine-scale
+mapping of traits and diseases that have been localized to a single stretch
+of DNA (in LAMARC terms, to one genomic region).  The way it works is that
+as LAMARC searches among trees, it calculates the liklihood of the observed
+trait data being created at each site in the sequence.  Assuming that each
+site has an equal chance of being the true site, it then converts these
+liklihoods into probabilities, an averages the resulting values over all the
+trees LAMARC collects.
+</P>
+
+<P>We have one submitted paper and another in the works (as of early 2007)
+that take this approach.  Until those appear, more detailed theory can be
+provided upon request.
+
+<P>To use LAMARC as a mapping tool, you'll have to collect the data, convert
+it into a LAMARC input file, set up the parameters for your search, and run
+the analysis.  We'll go through each step in succession.
+During our discussion, we'll make use of example data file and converter
+command for a simple trait mapping case. Included are
+<a href="trait_mapping/traitData.mig">sample trait data</a>
+(in migrate format),
+and a <a href="trait_mapping/traitCmd.html">sample converter command</a> file.
+(actual xml is <a href="trait_mapping/traitCmd.xml">here</a>.)
+
+<UL>
+<LI><A HREF="mapping.html#collect">Collecting Data</A></LI>
+<LI> <A HREF="mapping.html#convert">Converting Data</a></LI>
+<LI> <A HREF="mapping.html#move">Moving to LAMARC</a></LI>
+<LI> <A HREF="mapping.html#setup">Setting up parameters</a></LI>
+<LI> <A HREF="mapping.html#analyze">Analyzing your results</a></LI>
+</UL>
+
+
+<h3><A NAME="collect">Collecting Data</h3>
+
+<P>One of the assumptions of LAMARC is that the samples are collected at
+random from the population.  This affects the types of trees one would
+expect to see that relate your samples to one another.  However, researchers
+that study a particular disease often have one set of samples from affected
+individuals, and another set of samples from unaffected individuals.  This
+introduces an ascertainment bias that makes the true tree less likely to be
+produced than it should.  We have not yet investigated how strongly this
+effects the accuracy of mapping results, but it is important to keep in
+mind.
+
+<P>The ideal trait for a LAMARC analysis would be one that is ubiquitously
+diverse in the studied population (like eye color in humans).  A random
+sample from the population as a whole could then be classified into
+different phenotypic groups.  More information on the ascertainment question
+will be provided as our own experiments proceed.
+
+
+
+<H3><A NAME="convert">Converting Data</H3>
+
+<P>Once you have collected data (of any type--marker SNPs, full sequence
+information, or microsatellites), you will need to get the following 
+information into lamarc:
+<ul>
+    <li> trait and allele names,
+    <li> the specific genomic region in which the trait is to be searched, and
+    <li> the correspondence between data samples, individuals, and the
+        phenotypes they express.
+</ul>
+
+<p>At this writing, most of this information can only be entered
+into the lamarc file by writing a
+<a href="converter_cmd.html">converter command file</a>.
+You should refer to that document for the complete syntax of that
+file. Below is a brief overview of the mapping-specific components
+of that file.
+
+<H4><a name="trait-info-defs">Defining Trait and Allele Names</a></H4>
+
+Below is the section of our example 
+<a href="converter_cmd.html">converter command file</a>
+which defines our trait and its alleles.
+
+<pre>
+     <lamarc-converter-cmd>
+        ...
+        <traits>
+            <trait-info>
+                <name>funny-nose</name>
+                <allele>normal</allele>
+                <allele>affected</allele>
+            </trait-info>
+            <phenotype>
+                ...
+            </phenotype>
+        </traits>
+        ...
+     </lamarc-converter-cmd>
+</pre>
+
+<P>This would define the 'funny-nose' trait, with its two alleles, normal
+and affected.</P>
+
+
+<H4><a name="phenotype-defs">Defining Phenotypic Groups</a></H4>
+
+<p>Phenotypic groups are collections of individuals who all
+display the same phenotype.  This requires that one know or have a
+reasonable model for the mechanism and penetrance of the trait in question. 
+In a simple dominant/recessive case, individuals displaying the dominant
+phenotype could be any of AA, Aa, or aA, while individuals displaying the
+recessive phenotype all must be aa.</P>
+
+<p>If your data set includes many individuals with the same phenotype, the
+easiest way to specify them is to define <pheontype> tags in the
+converter command file. Below are the definitions for our simple example.
+
+<pre>
+     <lamarc-converter-cmd>
+        ...
+        <traits>
+            ...
+            <trait-info>
+                ...
+            </trait-info>
+            <phenotype>
+                <name>straight</name>
+                <genotype-resolutions>
+                    <trait>funny-nose</trait>
+                    <haplotypes>
+                        <penetrance> 1.0 </penetrance>
+                        <alleles> normal normal </alleles>
+                    </haplotypes>
+                </genotype-resolutions>
+            </phenotype>
+            <phenotype>
+                <name>bent</name>
+                <genotype-resolutions>
+                    <trait>funny-nose</trait>
+                    <haplotypes>
+                        <penetrance> 1.0 </penetrance>
+                        <alleles> affected normal </alleles>
+                    </haplotypes>
+                </genotype-resolutions>
+                    <haplotypes>
+                        <penetrance> 1.0 </penetrance>
+                        <alleles> normal affected </alleles>
+                    </haplotypes>
+            </phenotype>
+            <phenotype>
+                <name>broken</name>
+                <genotype-resolutions>
+                    <trait>funny-nose</trait>
+                    <haplotypes>
+                        <penetrance> 1.0 </penetrance>
+                        <alleles> affected affected </alleles>
+                    </haplotypes>
+                </genotype-resolutions>
+            </phenotype>
+        </traits>
+        ...
+     </lamarc-converter-cmd>
+</pre>
+
+
+<P>Note that the 'penetrance' in all instances is the chance that an
+individual with those alleles displaying the observed phenotype.  If
+heterozygotes were 80% likely to display the same phenotype as all
+normal/normal individuals, and 20% likely to display the same phenotype as
+all broken/broken individuals, there would be only 2 phenotypes to define
+and they would be as follows:
+
+<pre>
+    <phenotype>
+        <name>straight</name>
+        <genotype-resolutions>
+            <trait>funny-nose</trait>
+            <haplotypes>
+                <penetrance> 1.0 </penetrance>
+                <alleles> normal normal </alleles>
+            </haplotypes>
+            <haplotypes>
+                <penetrance> 0.8 </penetrance>
+                <alleles> normal affected </alleles>
+            </haplotypes>
+            <haplotypes>
+                <penetrance> 0.8 </penetrance>
+                <alleles> affected normal </alleles>
+            </haplotypes>
+        </genotype-resolutions>
+    </phenotype>
+    <phenotype>
+        <name>broken</name>
+        <genotype-resolutions>
+            <trait>funny-nose</trait>
+            <haplotypes>
+                <penetrance> 1.0 </penetrance>
+                <alleles> affected affected </alleles>
+            </haplotypes>
+            <haplotypes>
+                <penetrance> 0.2 </penetrance>
+                <alleles> normal affected </alleles>
+            </haplotypes>
+            <haplotypes>
+                <penetrance> 0.2 </penetrance>
+                <alleles> affected normal </alleles>
+            </haplotypes>
+        </genotype-resolutions>
+    </phenotype>
+</pre>
+
+<P>If additional information is available about an individual that provides a
+clue to their genotype, that information can also be included as a part of
+the phenotype.  For example, an individual with a dominant phenotype who has
+a recessive parent or child must be a heterozygote, and should be classified
+separately from other individuals displaying a dominant phenotype.</P>
+
+
+<h4><a name="pheno-to-ind">Assigning phenotypes to individuals</a></h4>
+
+In order to identify which data samples correspond to an individual
+and what the phenotype of that individual is, use the <individual>
+tag.
+
+<pre>
+     <lamarc-converter-cmd>
+        ...
+        <individuals>
+            <individual>
+                <name>ind_a</name>
+                <sample><name>s0</name></sample>
+                <sample><name>s1</name></sample>
+                <has-phenotype>broken</has-phenotype>
+            </individual>
+            ...
+        </individuals>
+        ...
+     </lamarc-converter-cmd>
+</pre>
+
+<p>This indicates that the samples "s0" and "s1" belong
+to the same individual and that that individual displays the
+"broken" phenotype.  (If your individuals are named in your data
+input file but do not have sample names, as in migrate-format microsattelite
+data, you do not need to provide extra sample names here.)</P>
+
+<p>
+It is also possible to explicitly specify genotype resolutions for
+an individual instead of using the <tt><has-phenotype></tt>
+tag.
+See the section
+<a href="converter_cmd.html#phase">
+Specifying Relationships Between Individuals and Data Samples</a> in the
+<a href="converter_cmd.html">Converter Command File</a> documentation.
+</p>
+
+<h4>Identifying a trait with a genomic region</h4>
+
+<p>Finally, the trait mapper needs to know in which genomic region to search
+for your trait. This is an additional tag inside the "region" tag.
+
+<pre>
+     <lamarc-converter-cmd>
+         ...
+         <regions>
+             <region>
+                 <name>region1</name>
+                 ...
+                 <trait-location>
+                     <trait-name>funny-nose</trait-name>
+                 </trait-location>
+                 ...
+             </region>
+             ...
+         </regions>
+     </lamarc-converter-cmd>
+</pre>
+
+<P>Data from other regions may be included in your analysis, but will not
+affect the results from your mapping analysis one way or another.  If you
+have two unlinked regions, either of which might contain the trait allele,
+LAMARC will be able to tell you the most likely place within either region
+where the trait is likely to map, but will not be able to provide any
+information about whether one region fits the data better than the
+other.</P>
+
+
+<h3><A NAME="move">Moving to Lamarc</h3>
+
+<P>At this point, you are done creating a converter input file, and you can
+run the converter and output a LAMARC input file (<a
+HREF="trait_mapping/lamarc-trait-input.html">lamarc-trait-input.xml</a> [actual xml is <a HREF="trait_mapping/lamarc-trait-input.xml">here]</a>). 
+Run the converter in batch mode using this command file with:</P> 
+
+<pre>
+lam_conv -b -c traitCmd.xml
+</pre>
+
+<P>You should also be able to further modify your run within the converter
+by leaving off the '-b' to run in interactive mode.  Once you're done,
+select 'Write Lamarc File' from the 'File' menu.</P>
+
+<P>Then, start up lamarc, using your newly-minted file as the input.</P>
+
+
+<h3><A NAME="setup">Setting up parameters</h3>
+
+<P>The settings for mapping are under the Analysis menu ('A', from the main
+menu), then 'A' again for the 'Trait Data analysis' menu.  The next menu
+shows a list of all the traits you are mapping; the settings for each can be
+changed independently.  Select the trait for which you want to change the 
+settings.</P>
+
+<P>There are two things you can do from this menu.  The first is to restrict
+the range of possible sites for your trait.  For some analyses, you might
+have sequence data in the same general region as the area where you've
+mapped your trait, but you know that the trait alleles are not there.  In
+this case, you can Add ('A') or Remove ('R') sites from the genomic region
+from consideration as being a possible site for the trait you're mapping. 
+The numbering scheme used here is the region-wide numbering, as defined by
+the 'map-position' numbers both in the converter and in the lamarc input
+file.  Also note that by default, there is no 'site 0' in this scheme:  the
+position to the left of site 1 is defined to be site -1.  (This can be
+changed in the lamarc input file XML tag <tt><A
+HREF="xmlinput.html#options">convert-output-to-eliminate-zero</a></tt>.</P>
+
+<P>The second option here is to select the type of mapping analysis you wish to
+perform, a 'floating' ('F') or a 'jumping' ('J') analysis.</P>
+
+<P>The 'floating' analysis (the default) will not affect the search through
+tree-space at all.  Instead, as trees are collected, each is analyzed, and
+the likelihood that the observed trait data could have been produced at each site is
+calculated and stored.  This can be a somewhat time-consuming process, but
+the results are more robust.  The final result is a complete analysis, for
+every tree collected in the final chain, of the relative likelihoods of each
+site in the genomic region under analysis.</P>
+
+<P>In the 'jumping' analysis, the trait alleles are actually placed at a
+particular site.  A new arranger is turned on that moves the trait
+alleles from site to site based on the relative likelihoods of the data
+being created at each site.  This arranger is a 'Gibbs' arranger, in that it
+calculates the likelihood at each site, and then chooses one according to
+their relative probabilities.  In this way, it always accepts its new
+choice, without regard to where it used to be.  The results of this analysis
+are an average of where the trait alleles were placed during the run.</P>
+
+<P>Based on our preliminary analyses, we recommend the 'floating' analysis for
+most studies.  This analysis is more thorough, and while the analysis takes
+somewhat longer while it is running, it only needs to run during the final
+chain, so on balance, the time difference is relatively small.  In addition,
+by never incorporating the trait data into tree acceptance or rejection, the
+trait data itself does not influence the tree and 'lock' it into place by
+shoving the trait into a fairly reasonable position and then modifying the tree to fit
+the data.  With known data, this is exactly what you want, but in this case,
+we want to know where on the sequence the allele resides, without unduly
+biasing the results.</P>
+
+<P>Still, the 'jumping' analysis has its uses.  Since its arranger is on at all
+times, it provides updates on the current state of the mapper at the end of
+each chain.  And while normally we believe that not incorporating trait
+data into tree acceptance and rejection is good for mapping, there may be
+some cases (perhaps with sparse markers) where using the data from the trait
+may lead to finding trees with overall better likelihood at the correct
+position, with a corresponding lower likelihood at incorrect positions.  In
+this case, the range of sites where the trait might be located would narrow
+(a good thing, when mapping).</P>
+
+<P>When performing a Jumping analysis, two new rearrangers are created, and the
+relative amount of time spent on each can be set from the Rearrangers menu
+('S', then 'R' from the main menu.)  The basic arranger here is the Trait
+Location rearranger ('L'), which accomplishes the 'jumping' part of the
+analysis by moving the trait alleles to various positions in the sequence. 
+The other arranger you might find here is the Trait haplotypes rearranger
+('M', for no good reason), which will rearrange the unknown haplotypes (if
+any) of your trait data.  This rearranger will take the various
+genotype resolutions you defined for your phenotypes and individuals, and
+swap among them in proportion to their relative likelihoods.  If there are a
+lot of individuals in your data with unknown haplotypes, this can be another
+reason to use the 'jumping' analysis, since this analysis searches among
+possible haplotype resolutions, while the 'floating' analysis sums over all
+possible combinations of haplotype resolutions for each analyzed tree, which
+can be very slow.</P>
+
+
+<h3><A NAME="analyze">Analyzing your results</h3>
+
+<P>Running LAMARC in interactive mode (i.e. non-batch mode) will result in
+output to the screen that looks something like this:</P>
+
+<pre>
+15:44:03  Most likely site(s) for funny-nose:  919:923.  Relative data likelihood = 0.0025891
+          The top 5% of all sites in this region:  919:936, 946:947
+          The top 50% of all sites in this region:  678:709, 776:803, 811:953
+          The top 95% of all sites in this region:  1:212, 214:215, 217:220, 641:953
+          You have a total of 531 sites in your 95% range.
+</pre>
+
+
+<P>These <A HREF="trait_mapping/outfile.txt">results</a> were for a mock
+data set for the 'funny-nose' trait, in a region 1000 base pairs long.  The
+top scorers were sites 837:841 although each site in this range had only a
+0.22% chance of actually being the correct one.  The last line informs us
+that 566 sites are included at 95% confidence, so about half the sequence
+was excluded.  The true site for this trait happened to be site 905, which
+was included in the top 50% of all sites.  This was the result of a default
+run, which is on the short side; it may be the case that we'd get better
+results from spending more time on a longer run of LAMARC.</P>
+
+<P>Note that these results are disjointed--the most likely sites are not
+guaranteed to be next to one another.  This can be due to both random chance
+and shared heritage.  Random chance can produce two different historical
+pathways which are both about equally good explanations of the trait. 
+Shared genetic heritage can sometimes mean that distant segments of DNA
+might share a crucial bit of genetic history, while the intervening segment
+experienced a widely divergent history.</P>
+
+<P>More detailed results are available in the mapping output file for each
+trait.   For our example, the default name for this file is
+<a HREF="trait_mapping/mapfile_funny-nose.txt">mapfile_funny-nose.txt</a>.
+There, you will find a list like this:</P>
+
+<pre>
+Site    Data likelihood
+1   0.00090735
+2   0.00090871
+3   0.00090877
+4   0.00091575
+5   0.00091603
+6   0.00091603
+7   0.00091603
+8   0.00091756
+[...]
+</pre>
+
+<P>This column of numbers should add up to 1.0, and indicates the percent
+chance that your trait maps to each site (so, for the above, a 0.069191%
+chance of being at site 1, a 0.069390% chance for site 2, and so on.)  The
+list can be imported into a graphing utility or spreadsheet program to
+better visualize your results.</p>
+
+<P>(<A HREF="parallel.html">Previous</A> | <A HREF="index.html">Contents</A> | 
+<A HREF="compiling.html">Next</A>)</P>
+
+<!--
+//$Id: mapping.html,v 1.22 2011/06/23 21:00:36 jmcgill Exp $
+-->
+</BODY>
+</HTML>
+
+
diff --git a/doc/html/menu.html b/doc/html/menu.html
new file mode 100644
index 0000000..a27fd20
--- /dev/null
+++ b/doc/html/menu.html
@@ -0,0 +1,1179 @@
+<!-- header fragment for html documentation -->
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
+<HTML>
+<HEAD>
+
+<META NAME="description" CONTENT="Estimation of population parameters using
+genetic data using a maximum likelihood approach with Metropolis-Hastings
+Monte Carlo Markov chain importance sampling">  
+<META NAME="keywords" CONTENT="MCMC, Markov chain, Monte Carlo,
+Metropolis-Hastings, populat ion, parameters, migration rate, population
+size, recombination rate, maximum likelihood">
+
+<TITLE>LAMARC Documentation: Menu</title>
+</HEAD>
+
+
+<BODY BGCOLOR="#FFFFFF"> <!-- coalescent, coalescence, Markov chain Monte
+Carlo simulation, migration rate, effective population size, recombination
+rate, maximum likelihood -->
+
+
+<P>(<A HREF="xmlinput.html">Previous</A> | <A
+HREF="index.html">Contents</A> | <A HREF="regions.html">Next</A>)</P>
+<H2>The Interactive LAMARC menu system</H2>
+<H3>Introduction</H3>
+<p>
+LAMARC's user interface is fairly awkward to use. This is because LAMARC is mainly a batch program and this interface reflects the way LAMARC does things internally, not the way users think about things. LAMARC is designed to run off XML input. What this interface does is read in and appropriately decorate the output of the <A HREF="converter.html">File Conversion Utilities</A> and allow users to tweak existing XML (often the output of this interface) to do different analyses than it was [...]
+</p>
+<p>This interface reflects how LAMARC is organized internally, which is not necessarily obvious. We recommend that you take a tour through all the menus before you do anything to get a sense of where things are. For example, "Migration" is found under "Analysis", which may seem odd until you realize that if "Migration" is on, the "Analysis" of the data changes. There are a lot of other examples of seemingly normal terms meaning slightly different t [...]
+</p>
+
+<H3><A NAME="conventions">General Conventions</A></H3>
+<p>
+LAMARC has essentially a command line interface, which is fairly uncommon in the modern era. So here are some general conventions that will help understand it:
+<UL>
+<LI>
+When the menu redisplays, the old screen just scrolls up, so make sure you stay at the bottom of window you are working in or it will get confusing.
+</LI>
+<LI>
+Each line you can interact with has a single character at the left side (for example <b>J</b>), some text to explain what that line is about, and the current value on the right side. In order to change the value on that line, enter that character <b>J</b> at the bottom of the screen. 
+</LI>
+<LI>
+If there are multiple similar lines that can be edited individually, for example rows of a Migration Matrix, they will have numbers in the left column rather than letters.
+</LI>
+<LI>
+Case does not matter. You can enter <b>J</b> or <b>j</b> in the above example and get the same effect.
+</LI>
+<LI>
+Booleans (Yes/No or True/False items) are toggled by entering the character listed at the left. So if an item with an <b>A</b> on the left is "Yes" and you want "No", enter <b>A</b>, the screen will redisplay and <b>A</b> will now be "No".
+</LI>
+<LI>
+Things can come and go in menus in logical, but not necessarily obvious ways. For example, in Forces, if you only have one population. Migration will not appear, because it cannot happen. This can get a bit confusing because when you change something on one screen it can cause something on another screen to appear or disappear. Until you get used to your analysis, it's wise to review everything using the "Overview" pages when you think you are ready to start a run, just to make sure noth [...]
+</LI>
+</UL>
+</p>
+
+<H3>Start Up</H3>
+<p>When you start up LAMARC the first thing you will be asked is what your output directory is. You will then be asked for your input file. This may seem a bit backwards, since usually you would want to read data in before putting it out, but reflects the internal workings of LAMARC. If you don't specify an input file, LAMARC will follow the <A HREF="http://evolution.genetics.washington.edu/phylip/">Phylip</A> conventions and look in the output directory for a file called "infile".
+</p>
+<p>
+The data in the input file defines the kinds of analyses which are possible.  If you don't see the kind of analysis you wish to do listed on the  "Analysis" menu, you will need to modify your input file so that kind of analysis is possible. For example, if you wish to study migration, you need at least two populations. If  "Migration" is not an option in "Analysis" you only have one population defined in your input file. You will need to fix that, either usi [...]
+</p> 
+<p> Once the data have been located and processed (which may take several seconds), the first screen you see upon starting LAMARC is the top level menu: </p>
+<p><img src="images/LamarcMainScreen.png" alt="LAMARC main screen"/></p>
+
+
+<P> The menu may appear in a different form depending on your computer
+system, but the basic ideas are always the same. You can now review and set values in the following areas:
+
+<UL>
+<LI><A HREF="menu.html#data">Data options</A></LI>
+<LI><A HREF="menu.html#analysis">Analysis methods</A></LI>
+<LI><A HREF="menu.html#search">Search Strategy menu</A></LI>
+<LI><A HREF="menu.html#io">Input and Output related tasks</A></LI>
+<LI><A HREF="menu.html#current">Overview of current settings</A></LI>
+</UL>
+
+<P>On all LAMARC menus, the bottom line will give two options: 
+<UL>
+<LI><b>Run</b> the program ('.')</LI>
+<LI><b>Quit</b> ('q')</LI>
+</UL>
+</P>
+<P> If you are viewing a
+sub-menu, you will also have the option to: 
+<UL>
+<LI><b>Go Up</b> to a previous menu('<return>')</LI>
+</UL>
+</P>
+</P>If you have performed any changes from the initial
+setup within a submenu there will be the:
+<UL>
+<LI><b>Undo</b> option ('-') which will undo your last change</LI>
+</UL>
+</P>  If you have performed any Undo operations, there will be the:
+<UL>
+<LI><b>Redo</b> option ('+') which will redo your last change</LI>
+</UL>
+</P> Any time that you may create a new valid LAMARC input file based on the current menu
+settings there will be the:
+<UL>
+<LI><b>Create</b> option ('>').</LI>
+</UL>
+</P>
+
+<P> <B>Warning 1:</B>  LAMARC's search defaults are fairly small.  This
+allows a first-time user to get visible results right away, but for serious
+publishable analysis you will almost surely want to increase some of the
+settings in the <A HREF="menu.html#search">"Search Strategy menu"</A>.</P>
+
+<P> <B>Warning 2:</B>  Once you have selected "Run" you will have no further
+interaction with the program; you'll have to kill and restart it in order
+to change its behavior.  However, Lamarc does save a modified version
+of its input file, updated with any changes made via the menu, into
+file "menusettings_infile.xml" when you exit the menu.  If you want to re-run LAMARC
+starting with the same data and menu options as you last selected, choose
+"menusettings_infile.xml" as your input file when restarting LAMARC.  </P> 
+
+
+<hr>
+<H3><A NAME="data">Data options</A></H3>
+<p><img src="images/LamarcDataScreen.png" alt="LAMARC data screen"/></p>
+
+<P> This menu allows you to define what your data is and how you want to model it.</P>
+
+<P> The first two items (<b>C</b> and <b>S</b>) define the source of the random number seed used to start the analysis. Normally the seed is set
+from the system clock so it is default set "<b>Yes</b>". To toggle it off and use the explicit seed type <b>C</b>. 
+</P>
+
+<P> A very few systems lack a system clock; on those you will need to set
+this value by hand (either here or in the input file).</P>
+
+<P>The explicit random seed is used if you wish to do exactly the same analysis
+twice. You can hand-set the seed by entering <b>S</b>. You will be queried for the number to be used.
+LAMARC will then find the closest integer of the form 4N+1 to the number
+you entered. 
+
+<P> The <b>E</b> option (Effective population size menu) will only appear if you have data from multiple regions. It provides a way to combine
+data collected from different regions of the genome that have unique
+effective population sizes.  For example, data
+from nuclear chromosomes of a diploid organism reflect an effective
+population size four times larger than
+data from the mitochondrion of the same organism.  Data from sex
+chromosomes also have unique effective population sizes--the relative
+effective population size ratios for a non-sex chromosome to an X
+chromosome to a Y chromosome is 4:3:1.  Selecting <b>E</b> takes you to a sub-menu
+where you can select a particular genomic region and set its effective population
+size.</P>
+
+<P> The next set of menus allow you to modify the data-analysis model for each segment of your data. You can either modify the model for each segment
+individually (<b>1</b>, ...), or you can modify a default model for the different types of
+data LAMARC found in your infile.  If you have DNA or RNA or SNP data, <b>N</b> allows you to edit the default data model for all Nucleotide data. If you
+have microsatellite data, <b>M</b> allows you to edit that data's default
+model. If you have K-Allele data, <b>K</b> allows you to edit that
+data's default model.  To assign the appropriate default data model to all
+segments, select <b>D</b>. 
+</P>
+
+<P> For nucleotide data, you can either choose the Felsenstein '84 model
+(F84) or the General Time Reversible model (GTR).  Microsatellite data may
+take the Brownian, Stepwise, K-Allele, or Mixed K-Allele/Stepwise models
+(MixedKS).  K-Allele data may only take the K-Allele model.</P>
+
+<H4>Options common to all <b>data model</b> submenus</H4>
+
+<P> Several menu options are common to all evolutionary models; for
+conciseness these are described here.</P>
+
+<P> If you are editing the data model for a particular segment (and
+not for a default), the first line displays the type of data found in that
+segment, and you are given the option (<b>D</b>) of using the appropriate
+default data model for that segment.  The <b>M</b> option (Data Model) allows you
+to cycle through the possible data models appropriate for that data type.
+</P>
+
+<P> The next two menu lines (<b>C</b> and <b>A</b>) describe the current state of the
+categories model of variation in mutation rates among sites.  LAMARC uses
+the Hidden Markov Model of Felsenstein and Churchill (1996).  In this model,
+you must determine how many rate categories you wish to assume, then
+provide the relative mutation rates for each category and the probability
+that a site will fall into that category.  However, you do not need to
+specify which category each site actually falls into.  The program will sum
+over all possibilities.</P>
+
+<P> If you choose to have multiple categories, select <b>C</b> (Number of
+Categories), which will take you to a sub-menu.  Here, you can change the
+number of categories with the <b>N</b> option, then select particular
+rate/probability pairs to change their values on a further sub-menu.  For
+example, if you wish to model two categories with 80% of sites evolving at
+a base rate and the remaining 20% evolving ten times faster, you would set
+the number of categories to 2, then set one rate/probability pair to 1 and
+.8, and the second rate/probability pair to 10 and .2.</P>
+
+<P> Internally, the program will normalize the rates so that the mean rate
+across categories, weighted by the category probabilities, is 1.0. </P>
+
+<P> In data modeled with categories of mutation rates, the "mu" value (a
+component of various forces such as theta and M) is the weighted average of
+the individual mutation rates.  In the above example, if you determine that
+mu is 0.028, you can solve the equation:</P>
+
+<P><center>0.028 = (0.8 * 1x) + (0.2 * 10x)<br>
+x = 0.028 / 2.8 = 0.01<br>
+10x = 0.1</center></P>
+
+<P>and thus determine the two individual mutation rates are 0.01 and 0.1</P>
+
+<P> The program will slow down approximately in proportion to the number of
+rate categories.  We do not recommend using more than five as the gain in
+accuracy is unlikely to be worth the loss in speed.  Do not use two
+categories with the same rate:  this accomplishes nothing and slows the
+program down.</P>
+
+<P> A category rate of zero is legal and can be useful to represent
+invariant sites; however, at least one rate must be non-zero.</P>
+
+<P> If you wish to use the popular gamma distribution to guide your rates,
+use another program to calculate a set of categories that will approximate
+a gamma distribution, then enter the corresponding rates and probabilities
+manually into LAMARC.  There is currently no provision to 
+infer gamma distributed rate variation within a single segment. For
+gamma distributed mutation rate variation across independent regions,
+see the <a href="#gamma">gamma parameter</a>
+of the <a href="#analysis">analysis menu.</a></P>
+
+<P> The <b>A</b> (Auto-Correlation) option provides an autocorrelation 
+coefficient which controls the tendency of rates to "clump".  The
+coefficient can be interpreted as the average length of a run of sites with
+the same rate.  If you believe there is no clumping (each site has an
+independent rate), set this coefficient to 1.  If you believe that, for
+example, groups of about 100 sites tend to have the same rate, set it to
+100.</P>
+
+<P> While auto-correlation values may be set for any model, it is likely to
+make sense biologically only in the case of contiguous DNA or RNA data. It
+is not sensible to use it for widely separated SNPs or microsatellites.</P>
+
+<P> After other model-specific options, the <b>R</b> (Relative mutation rate)
+option provides a coefficient which controls the comparison of mutations
+rate (mu) between segments and/or data types.  If, for example, you have
+both microsatellite data and nuclear chromosome data in your file, and you
+know that changes accrue in your microsatellite data ten times faster than
+changes accrue in the DNA, you can use this option to set the relative mu
+rate for the microsat segment(s) to be 10, and the relative mu rate for the
+DNA segment(s) to be 1.  Overall estimates of parameters with mu in them (like
+Theta) will be reported relative to the DNA values.  If you want overall
+estimates reported relative to the microsat values, you can set the
+microsat mu rate to 1 and the DNA mu rate to 0.1.
+
+<H4>Model-specific menus:  nucleotide data</H4>
+
+<H5>F84 model </H5>
+
+<P> The Felsenstein '84 (F84) model is a fairly general nucleotide
+evolutionary model, able to accommodate unequal nucleotide frequencies and 
+unequal rates of transition and transversion. It has the flexibility to
+mimic simpler models such as Kimura's (set all nucleotide frequencies to
+0.25) and Jukes and Cantor's (set all nucleotide frequencies to 0.25 and
+the transition/transversion ratio to 0.500001).</P>
+
+<P> The <b>T</b> option (TT Ratio) allows you to set the ratio between transitions
+(A/G, C/T) and transversions (all other mutations). If bases mutated
+completely at random this ratio would be 0.5 (1:2).  If you want a
+random-mutation model (corresponding to the model of Jukes and Cantor)
+LAMARC will use 0.500001 instead of 0.5, due to a limitation of the
+algorithm used in LAMARC that would otherwise divide by zero.</P>
+
+<P> Programs such as <A HREF="http://paup.csit.fsu.edu/">PAUP*</A> can be
+used to estimate the transition/transversion ratio from your data.  In
+practice it probably does not need to be very exact.</P>
+
+<P> The <b>B</b> option (Base Frequencies) will take you to a submenu where you
+can either tell LAMARC to calculate the base frequencies directly from the
+data (the <b>F</b> option), or enter the values for the relative base frequencies
+yourself. Unless your sequences are very short, it is probably best to
+calculate these frequencies from the data.  If a particular nucleotide does
+not exist in your data, you may set its frequency to a very small non-zero
+value (0.00001 is probably low enough).</P>
+
+<H5> General Time-Reversible (GTR) model </H5>
+
+<P> The GTR model is the most general tractable model for nucleotide 
+data.  It allows both unequal nucleotide frequencies and unequal rates for
+each pair of nucleotides.  The most practical way to use GTR in LAMARC is
+to estimate its rates with another program, such as <A
+HREF="http://paup.csit.fsu.edu/">PAUP*</A>.  LAMARC does not have any
+facility to estimate the GTR rates itself, but requires them to be
+provided.</P>
+
+<P> It is wasteful to use GTR when a simpler model is adequate, since it
+runs rather slowly.  PAUP* with 
+<a href="http://darwin.uvigo.es/software/modeltest.html">MODELTEST</a> 
+can be used to assess the adequacy of simpler models. </P>
+
+<P> The <b>G</b> option (GTR rates) requests input of the six base-specific
+mutational rates.  These are symmetrical rates before consideration of
+nucleotide frequencies, and can be obtained from PAUP*.  PAUP* may provide
+only the first 5 rates, in which case the [GT] rate is always 1.0.</P>
+
+<P> The <b>B</b> option (Base Frequencies) allows you to set the base frequencies
+of the four nucleotides, in the order A, C, G, and T.  
+The "Base frequencies computed from data" option is not
+given here, since the third-party program you use to determine GTR rates
+will also give you base frequencies, and you should use those.</P>
+
+<H5><A NAME="data-uncertainty">Modeling data uncertainty in F84 and GTR models</A></H5>
+
+<P>Lamarc runs on nucleotide data now accommodate modeling data uncertainty.
+This is option <b>P</b> in both the F84 and GTR models.
+The per-base error rate gives the rate at which each single instance of a
+nucleotide should be assumed to have been miss-called. A value of 0 indicates
+that all were sequenced correctly. A value of 0.001 indicates one in one
+thousand is incorrect.
+The default value is 0.
+This feature is in beta test as of December, 2009.
+</P>
+
+
+<H4>Model-specific menus:  microsatellite data</H4>
+
+<P> Apart from the choice of which model to use, the only choices for
+the microsatellite models, except for the MixedKS model,
+are those common to all models: handling of rate differences among
+markers, and normalization.  These are discussed above.  It is not
+meaningful or useful to ask for autocorrelation when analyzing only a
+single microsatellite marker per segment.  </P>
+
+<H5> Stepwise model </H5>
+
+<P> The stepwise microsatellite model assumes that microsatellite mutations
+are always single-step changes, so that the larger observed differences
+have been built up via successive single-step mutations.</P>
+
+<H5> Brownian-motion model </H5>
+
+<P>  The Brownian-motion microsatellite model is an approximation of the
+stepwise model.  Rather than a discrete model of single mutational steps,
+we use a continuous Brownian-motion function and then truncate it to the
+nearest step.  This is much faster than the full stepwise model and returns
+comparable results on fairly polymorphic data, but is less accurate on
+nearly invariant data. </P>
+
+<H5> K-Allele model </H5>
+
+<P> This model assumes that only the alleles detected in the data exist,
+and that  mutation from any such allele to any other is equally likely. 
+The Jukes-Cantor DNA model, for example, is a K-Allele model for k=4. </P>
+
+<H5> Mixed K-Allele/Stepwise model </H5>
+
+<P> The Mixed K-Allele/Stepwise model (MixedKS) considers both
+mutation to adjacent states (like the Stepwise model) and mutation
+to arbitrary states (like the K-Allele model).  The relative
+frequency of the two is expressed by the proportion constant percent_stepwise,
+available as menu option <b>L</b>.  It indicates the proportion
+of changes which are stepwise, so that percent_stepwise=0 is K-Allele and
+percent_stepwise=1 is Stepwise.  An initial value can be
+set here, and either used throughout the run, or optimized at
+the end of every chain if the Optimize (<b>O</b>) option is set.  The
+program finds the value of percent_stepwise that maximizes the likelihood
+of the data on the final genealogy of each chain, using a
+bisection approach.</P>
+
+<H4>Model-specific menus:  K-Allele data</H4>
+
+<P> The single model available for K-Allele data is the K-Allele model. 
+"K-allele data" is defined as any genetic data collected as discrete units,
+such as electrophoretic data or phenotypic data.  As for microsatellite
+data, the K-allele model assumes equally likely one-step mutations from any
+state to any other state.</P>
+
+
+<hr>
+<H3><A NAME="analysis"> Analysis </A></H3>
+<p><img src="images/LamarcAnalysisScreen.png" alt="LAMARC analysis screen"/></p>
+
+<P> The Analysis option leads to a submenu that will allow you to specify
+the evolutionary forces you're going to infer, as well as the starting
+values, constraints, and profiling options for each force's parameters. 
+More or less options will appear here depending on your data. If there is
+more than one population, you will have an <b>M</b> option fo estimating Migration parameters. Similarly, if you have more than one region in your data, you can turn on or
+off estimation of varying mutational rates over regions (gamma), and if you
+have trait data, you can set up your mapping analysis.</P>
+
+<P> Each force currently in effect is marked as Enabled, and forces not in
+effect are marked as Disabled. If you wish to add or remove a force, or to
+change the parameters associated with a force, enter that force's
+submenu.</P>
+
+<P> One point to bear in mind is that
+for nucleotide data the mutation rate mu is always expressed as mutation
+per site, not mutation per locus as in many studies.  You may need to do a
+conversion in order to compare your results with those from other
+studies.</P>
+
+<P> Each force is explained below, and following that is a description of
+the various options available on the submenus:  constraints, profiling, and
+Bayesian priors.  For more information on evolutionary forces, consult the
+<A HREF="forces.html"> forces </A> documentation.</P>
+
+<H4> <A NAME="theta">Theta (Effective Population Size): the "Coalescence"
+force </A></H4>
+
+<P> Coalescence is obligatory on all data sets, so there is no provision
+for turning it off.</P>
+
+<P> The Theta submenu allows you to customize estimation of Theta, the
+number of heritable units in the population times the neutral mutation rate
+times two.  This is 4N<sub>e</sub>mu for ordinary diploid data, 
+N<sub>e</sub>mu for mitochondrial data, and so forth. </P>
+
+<P> Starting values of Theta cannot be less than or equal to zero.  They 
+should not be tiny (less than 0.0001), because the program will take a long 
+time to move away from a tiny starting value and explore larger values.</P>
+
+<P> This program provides Watterson and FST estimates for use as starting
+values.  It should never be quoted as a correct calculation of
+Watterson or FST, because if it finds the result unsatisfactory as a
+starting value, it will substitute a default.</P>
+
+<P> The <b>G</b> option allows you to hand-set all of the Thetas to the same initial 
+value.  The <b>W</b> option allows you to set all of them to the Watterson value. 
+(This will cause re-computation of the Watterson value, and can take
+several seconds with a large data set.)  The <b>F</b> option allows you to set all
+of them to the FST value.  You can then fine-tune by using the numbered
+options to hand-set the starting Thetas of individual populations.  The FST
+option is only available if there is more than one population.</P>
+
+<H4> <A NAME="growth">Growth parameters:  the "Growth" force  </A></H4>
+
+<P> This submenu allows you to turn on and off estimation of population
+growth rates, and to set starting parameters.  </P>
+
+<P> If there is a single population in your data, Lamarc will estimate a
+single growth rate for it.  If there are multiple populations, Lamarc will
+estimate one independent growth rate per population.</P>
+
+<P> If we label growth as <i>g</i>, then the relationship between Theta 
+at a time <i>t</i> > 0 in the past and Theta at the present day (<i>t</i> = 0) 
+is:</P>
+
+    <center>Theta<sub><i>t</i></sub> = Theta<sub>present day</sub> e<sup>-<i>gt</i></sup></center>
+
+<p>This means that a positive value of <i>g</i>
+represents a growing population, and a negative value, a shrinking one. </P>
+
+<P> Time is measured in units of mutations (i.e., 1 <i>t</i> is the average
+number of generations it takes one site to accumulate one mutation), and
+<i>g</i> is measured in the inverse units of time.  If mu is known, divide
+generations by mu to get units of <i>t</i>, or conversely, multiply
+<i>t</i>*mu to get a number of generations.</P>
+
+<P> Starting parameter input for growth is similar to that for Theta, 
+except that no quick pairwise calculators are available; you will have to 
+either accept default values or enter values of your own.  Avoid highly
+negative values (less than -10) as these have some risk of producing
+infinitely long trees which must then be rejected.</P>
+
+<H4> <A NAME="migration">Migration parameters and model:  the "Migration"
+force </A></H4>
+
+<P> This submenu allows you to customize estimation of the migration rates
+among your populations.  The rates are reported as <i>M</i> = <i>m</i>/mu,
+where <i>m</i> is the immigration rate per generation and mu is the neutral
+mutation rate per site per generation.  Note that many other programs
+compute 4<i>N<sub>e</sub>m</i> instead; be sure to convert units before
+making comparisons with such results.</P>
+
+<P> You do not have the option to turn migration on and off; if there is
+only one population migration must be off, and if there is more than one
+population then migration must be on.  (Otherwise there is no way for the
+genealogy to connect to a common ancestor.) </P>
+
+<P> The main choice for migration is how to set the starting values for 
+the migration parameters.  You can use an <i>F<sub>ST</sub></i>-based
+estimator or hand-set the values, either hand-setting all to the same
+value, or setting each one individually. </P>
+
+<P> The <i>F<sub>ST</sub></i> estimator does not always return a sensible 
+result (for example, if there is more within-population than
+between-population variability), and in those cases we substitute an
+arbitrary default value. If you see strange <i>F<sub>ST</sub></i> 
+estimates you may wish to hand-set those values.  Please do not quote
+LAMARC as a source of reliable  <i>F<sub>ST</sub></i> estimates, since we
+do not indicate which have been replaced by defaults.</P>
+
+<P> The final menu entry sets the maximum number of migrations allowed in a
+genealogy.  An otherwise normal run may occasionally propose a genealogy
+with a huge number of migrations.  This could exhaust computer memory; in
+any case it would slow the analysis down horribly. Therefore, we provide a
+way to limit the maximum number of migrations.  This limit should be set
+high enough that it disallows only a small proportion of genealogies, or
+it will produce a downward bias in the estimate of <i>M</i>.</P>
+
+<P> If you find that you are sometimes running out of memory late in a
+program run that involves migration, try setting this limit a bit lower. 
+If you find, on examining your runtime reports, that a huge number of
+genealogies are being dropped due to excessive events, set it a bit
+higher.  (The "runtime reports" are the messages displayed on the screen 
+while the Markov chains are evolving; a copy of these messages is provided 
+at the end of each output file.)  You may also want to try lower starting 
+values if many genealogies are dropped in the early chains.</P>
+
+<H4> <A NAME="recombination">Recombination parameter:  the "Recombination"
+force </A></H4>
+
+<P> This submenu allows you to customize estimation of the recombination
+rate parameter <i>r</i>, defined as <i>C</i>/mu where <i>C</i> is the 
+recombination rate per
+site per generation and mu is the neutral mutation rate per site per
+generation.  We do not currently allow segment-specific or
+population-specific recombination rates; only one value of <i>r</i> will be
+estimated.</P>
+
+<P> The first menu line allows you to turn recombination estimation on and
+off.  Estimating recombination slows the program down a great deal, but if
+recombination is actually occurring in your data, allowing inference of
+recombination will not only tell you about recombination, but may improve
+inference of all other parameters.</P>
+
+<P> You cannot estimate recombination rate if there is only one site, and
+in practice you cannot estimate it unless there is some variation in your
+data--at least two variable sites.  Your estimate will be very poor unless
+there are many variable sites.</P>
+
+<P> The <b>S</b> option allows you to set a starting value of <i>r</i>. No
+pre-calculated value is available, so your choices are to set it yourself
+or accept an arbitrary default.</P>
+
+<P> Starting values of <i>r</i> should be greater than zero.  If you do not
+want to infer recombination, turn the recombination force off completely
+instead.  If you believe that <i>r</i> is zero, but wish to infer it to
+test this belief, start with a small non-zero value such as 0.01. It is
+unwise to set the starting value of <i>r</i> greater than 1.0, because the
+program will probably bog down under huge numbers of recombinations as a
+result. A rate of 1 would indicate that recombination is as frequent as
+mutation, and this state of affairs cannot generally be distinguished from
+complete lack of linkage.</P>
+
+<P> The <b>M</b> option sets the maximum number of recombinations allowed
+in a genealogy.  An otherwise normal run may occasionally propose a
+genealogy with a huge number of recombinations.  This could exhaust
+computer memory; in any case it would slow the analysis down horribly.
+Therefore, we provide a way to limit the maximum number of recombinations. 
+This limit should be set high enough that it disallows only a small
+proportion of genealogies, or it will produce a downward bias in the
+estimate of <i>r</i>.</P>
+
+<P> If you find that you are sometimes running out of memory late in a
+program run that involves recombination, try setting this limit a bit
+lower.  If you find, on examining your runtime reports, that many
+genealogies are being dropped due to excessive events, set it a bit
+higher.  (You may also want to try lower starting values if many
+genealogies are dropped in the early chains.)</P>
+
+<H4> <A NAME="gamma">Gamma parameter: allowing the background mutation rate
+to vary over regions</A></H4>
+
+<P> If you suspect that the mutation rate varies between your genomic
+regions, but do not know the specifics of how exactly it varies, you can
+turn on estimation of this force to allow for gamma-distributed rate
+variation.  The shape parameter of the gamma ('alpha') can be estimated, or you
+can set it to a value you believe to be reasonable.  While the gamma
+function is a convenient way to allow for different types of variation, it
+is unlikely that the true variation is drawn from an actual gamma
+distribution, so the important thing here is mainly that you allow mutation
+rates to
+vary, not necessarily which particular value is estimated for the shape
+parameter.  For more information, see the section, <A
+HREF="gamma.html">"Combining data with different mutation rates"</a>.</P>
+
+<H4> <A NAME="trait"> Trait Data analysis</A></H4>
+
+<P> This section provides the capability to map the location of a
+measured trait within one of your genomic regions.  You will need to have
+provided trait data in your input file.  For more details about trait
+mapping, see the <A HREF="mapping.html">mapping documentation.</A> <P>
+
+<P> The Trait Data analysis menu will show you all of the traits which
+you have provided data for and can attempt to map, with an indication
+of which genomic region each is in.  To modify the model for a trait,
+choose that trait by number; you will be taken to a specific menu
+for mapping that trait.  It will start by reminding you of the trait
+name, and then show the type of analysis you are using.  The two
+kinds of mapping analysis are discussed in more detail in <A HREF="mapping.html">
+"Trait Mapping."</A>  As a brief reminder, a "float" analysis 
+collects the trees without use of the trait data, and then finds the
+best fit of trait data to trees after the fact.  A "jump" analysis
+assigns a trial location to the trait and then allows it to be reconsidered
+as the run progresses.</P>
+
+<P> In this menu, you can also restrict the range of sites which you
+are considering as potential locations for your trait.  For example,
+you may be quite sure that the trait is not located in the first 100
+sites, but you still wish to analyze them because they may add useful
+information about Theta and recombination rate.  You can remove the
+range 1-100 from consideration using the <b>R</b> option.  You can also
+add sites to consideration using the <b>A</b> option:  for example, if you
+know that your trait is either in sites 1-100 or 512-750, one approach
+is to remove all sites, then add these two ranges specifically.</P>
+
+<P> If you have turned on a "jump" analysis, the necessary rearrangement
+strategies will appear in the Strategy:Rearrangement submenu.  You may
+wish to inspect them and make sure that you like the proportion of
+effort used for each strategy.</P>
+
+<H4> <A NAME="divergence">Divergence parameters and model:  the "Divergence"
+force </A></H4>
+<p>
+The only value that can be edited in Divergence is 
+Epoch Boundary Time (scaled by the mutation rate) of each Divergence event.  You can
+set starting values, priors, etc. for these as usual.  However, if you wish 
+to redefine the Ancestor/Descendent relationships you need to either return to the <A HREF="converter.html">file converter</A> or edit the input file XML.
+</p>
+
+<H4> <A NAME="divergencemigration">Divergence-Migration parameters and model:  the "Divergence-Migration"
+force </A></H4>
+<p>
+This force is presented exactly the same way that a regular migration matrix is except there 
+are also entries for Ancestor populations. Note that even though you can potentially enter 
+migration rates between invalid population pairs (for example an ancestor and one of its children) 
+these will be ignored by the calculation.   (Be warned that if you manage to create an XML input
+file with values for migration rates between invalid pairs, for example by hand-editing your
+XML, the program will produce confused and meaningless results.)  Also note that pairwise calculators
+for starting values are not available for cases with divergence.
+</p>
+
+<H4> Options common to all Force submenus </H4>
+
+<P> Three options are available on all Forces submenus (except
+for Trait mapping), and they all behave
+in the same fashion.  Constraints allow you to hold parameters constant or
+constrain them to be equal to other parameters.  Profiling affects the
+reported support intervals around the estimates (and can affect how long it
+takes the program to run).  If you are running LAMARC in <A
+HREF="bayes.html">Bayesian mode</A>, the Bayesian Priors menu allows you to
+set the priors for the parameters.</P>
+
+<h5><A NAME="constraints">Constraints</A></h5>
+
+<P> Beginning with version 2.0 of LAMARC, we allow constraints on all
+parameters.  All parameters can be unconstrained (the default, and
+equivalent to pre-2.0 runs), constant, or grouped.  Grouped parameters all
+have the same starting value, and can either be constrained to be identical
+(and vary together as a unit), or be set constant.  In addition, we allow
+some parameters to be set 'invalid', which means 'constant, equal to zero,
+and not reported on in the output'.</P>
+
+<P> Say, for example, you know that the recombination rate for your
+population is 0.03.  In this case, you can set the recombination starting
+value to 0.03 and set the recombination constraint to 'constant'.  Or say
+you have a set of populations from islands in a river; you may know that
+all downstream migration rates will be equal to each other, and that all
+upstream migration rates will be equal to each other.  In this case, you
+can put all the downstream rates together in one group, all the upstream
+rates together in another group, and set each group's constraint to
+'identical'.  If you have another set of populations and know that
+migration is impossible between two of them, you could set those migration
+rates to be 'invalid' (or simply set them constant and set the starting
+values to zero).</P>
+
+<P> In general, a LAMARC run with constraints will be somewhat faster than
+one without, since fewer parameters have to be estimated.  This can be
+particularly helpful for complex systems where you already have some
+information, and are interested in estimating just a few parameters. </P>
+
+<P> Select 'C' to go to the Constraints sub-menu for any force.  To change
+the constraint on a particular parameter, enter that parameter's menu index
+number.  To group parameters, pick one of them and enter 'A' (Add a
+parameter), then the number of your parameter, then 'N' (for a new group). 
+Then pick another parameter that should be grouped with the first one,
+enter 'A' again, the number of your new parameter, then the group number of
+the group you just created (probably 1).  Groups are created with the
+automatic constraint of 'identical', meaning that they will vary, but be
+co-estimated.  You may also set a group 'constant', which has the same
+effect as setting the individual parameters constant, but guarantees they
+will all have the same value.</P>
+
+
+
+<h5><A NAME="profiling">Profiling</A></h5>
+
+<P> Each force's Profiling option (<b>P</b>) takes you to a sub-menu where you
+can adjust how LAMARC gives you feedback about the support intervals for
+your data.  Setting the various profiling options is important in a
+likelihood run, since it is the only way to obtain confidence limits of
+your estimates, and can drastically affect total program time.  It is less
+important in a Bayesian run, since the produced curvefiles have the same
+information, and profiling simply reports key points from those curves (and
+it takes essentially no time to calculate, as a result).  Profiling is
+automatically turned on in a Bayesian run, and it doesn't make a lot of
+difference which type of profiling is used in that instance, so most of the
+discussion below will be most applicable to a likelihood run.</P>
+
+<P> For each force, you can turn profiling on (<b>A</b>) or off (<b>X</b>) for all
+parameters of a given force, though you cannot profile any parameter you
+set constant.  The next option (<b>P</b>), toggles between percentile and
+fixed-point profiling.  Selecting this option will cause all parameters
+with some sort of profiling on to become either percentile or fixed.  You
+can turn on and off profiling for individual parameters by selecting that
+parameter's menu index number.</P>
+
+<P> Both kinds of profiling try to give some information about the shape of
+the likelihood (or posterior, in a Bayesian analysis) curve, including both
+how accurate the estimates are, and how tightly correlated estimates for
+the different parameters are.</P>
+
+<P> Fixed-point profiling considers each parameter in turn.  For a variety
+of values of that parameter (five times the MLE, ten times the MLE, etc.)
+it computes the optimal values for all other parameters, and the log
+likelihood value at that point.  This gives some indication of the shape of
+the curve, but the fixed points are not always very informative.  In the
+case of growth, some values are set to multiples of the MLE, while others
+are set to some generally-useful values unrelated to the MLE, such as
+0. </P>
+
+<P> Percentile profiling, instead of using fixed points, gives you values
+which the value of your parameter is X% likely to fall below.  A value
+for theta of 0.00323 at the .05 percentile means that the true value of
+theta has only a 5% chance of being less than or equal to 0.00323, and a
+95% chance of being greater than 0.00323.  In a likelihood run, LAMARC will
+then calculate the best values for all other parameters with the first
+parameter fixed at that percentile.  If the above example was calculated in
+a run estimating recombination and growth rates, for example, LAMARC will
+calculate the best values for recombination and growth if theta had been
+0.00323.  This gives a much nicer picture of the shape of the curve, but it
+is very slow.  If you use percentile profiling for likelihood, expect 
+it to take a significant proportion of your total run time.</P>
+
+<P> The accuracy of the percentile profiling in a likelihood run is
+dependent on the likelihood surface for your data fitting a Gaussian in
+every dimension.  When the surface is Gaussian, the percentiles for each
+parameter can be determined by finding the values which result in
+particular log likelihoods.  For example, the .05 percentile is
+mathematically defined to be the point at which the log likelihood is
+exactly 1.35 less than the log likelihood for the MLE, while the .25
+percentile can be found at the point where the log likelihood is exactly
+0.23 less.  LAMARC finds the values for which these likelihoods can be
+found, but cannot determine whether the actual likelihood surface for your
+data has a truly Gaussian shape.  Hence, percentile profiling cannot be
+used to report absolute confidence intervals, but it is at least a step in
+that direction.</P>
+
+<P> You may want to turn off profiling completely, or used fixed-point
+profiling, for exploratory runs.  Percentile profiling gives the best
+information for final runs, but may be too slow.  If you save your data to
+a summary file (see <A HREF="menu.html#summary">summary files</a>), you can
+go back and change the profiling options in a subsequent run, which then
+won't have to recalculate the Markov chains; it will merely calculate new
+profiles.</P>
+
+<P> If you turn off profiling, you will lose both the profile tables
+themselves and the approximate confidence intervals in the MLE tables.  A
+good compromise is to set the <A HREF="menu.html#output">output file
+verbosity</a> to "concise", which causes LAMARC to only calculate two
+profiles (for percentile profiling, the 95% support intervals) instead of
+about 10.</P>
+
+
+<h5>Bayesian Priors</h5>
+
+<P> If you are running LAMARC in Bayesian mode (see the <A
+HREF="menu.html#search">Search Strategy</a> menu), each force will have the
+option to edit the Bayesian priors (<b>B</b>) for that force.  A more detailed
+discussion of a Bayesian run can be found <A
+HREF="menu.html#search">below</a>, as well as in the <A
+HREF="bayes.html">tutorial</a>.
+
+    
+<hr>
+<H3><A NAME="search"> Search Strategy </A></H3>
+<p><img src="images/LamarcSearchScreen.png" alt="LAMARC search strategy screen"/></p>
+
+<P> This menu allows you to fine-tune your search strategy, to get
+the best results from LAMARC with the minimal time.  Consider tuning these
+settings if you are not satisfied with the performance of your searches. 
+For advice on choosing the best settings here, see the article <A
+HREF="search.html">"Search Strategies."</A> </P>
+
+<P> The first option in the Search Strategy menu (<b>P</b>, 'Perform Bayesian or
+Likelihood analysis') toggles your setup between a likelihood run and a
+Bayesian run.  This choice can have a profound impact on the course of your
+run, though hopefully both have a reasonable chance of arriving at the
+truth at the end.  A likelihood run (the only option for versions of LAMARC
+earlier than 2.0) searches tree-space with a fixed set of 'driving values'
+per chain, and searches the resulting likelihood surface to find the best
+set of parameter estimates.  A Bayesian run searches tree-space at the same
+time as it searches parameter-space, then uses its parameter-space search as
+a Bayesian posterior to report the best values for individual parameters. 
+For more details about a Bayesian search with some comparisons to the
+likelihood search, see the <A HREF="bayes.html">Bayesian tutorial</a>.
+
+<h4><a NAME="priors">Bayesian priors menu</a></h4>
+<P> If you have elected to run a Bayesian search, you will get the option
+(<b>B</b>) to set the priors for the various forces in your data.  Selecting the
+option will take you to a sub-menu listing all active forces and a summary
+of the current priors for each force.  Once you select a particular force,
+you get the option to edit the default prior for that force (<b>D</b>), and a
+list of parameters, each of which may be selected to edit that parameter's
+prior.</P>
+
+<P> When editing the prior for a particular parameter, you may select
+whether you wish to use the default prior with the <b>D</b> option, re-setting the
+current prior to the default.  For all priors, you may then set three
+options:  the shape of the prior (<b>S</b>), which may be linear or (natural)
+logarithmic, and the upper (<b>U</b>) and lower (<b>L</b>) bounds of the prior.  There
+is currently no provision for other prior shapes.</P>
+
+<h4><a NAME="rearrangers">Rearrangers menu</a></h4>
+
+<P> Selecting <b>R</b> from the Search Strategy menu takes you to a sub-menu where
+you can set the relative frequencies of the various arrangers.  The main
+arranger in a LAMARC run is the Topology rearranger (<b>T</b>), which is the
+main tree rearranger.  This rearranger works by selecting and breaking a
+branch of its current tree, then re-simulating that branch to add it back
+to the tree.  It should almost always be set greater than the other tree
+rearrangers (the size and hapolotype arrangers), and any decrease in its
+relative frequency probably requires a concomitant increase in chain
+length (see <A HREF="menu.html#chains">sampling strategy</a>, below).
+
+<P> A new arranger for version 2.0 is the Tree-Size rearranger (<b>S</b>).  This
+rearranger leaves the topology of the tree constant, but re-samples branch
+lengths root-wards below a selected branch (biased with a triangular
+distribution towards the root).  Our initial experiments with this
+rearranger indicate that it is helpful in a variety of situations, but
+particularly helpful for runs with growth and migration.  It should be used
+sparingly, however:  we've found setting this rearranger's frequency to 1/5
+that of the topology rearranger is a generally good ratio.</p>
+
+<P> If your data appears to have phase-unknown sites,
+you will have the option to set the relative frequency of the Haplotype
+rearranger (<b>H</b>).  The haplotype rearranger considers new phase assignments
+for a pair of haplotypes.  Like the tree-size rearranger, setting this
+frequency to 1/5 that of the topology rearranger has been found to produce
+good results. </p>
+
+<P> If you have chosen to do a Bayesian run, you will have the option to
+set the relative frequency of the Bayesian rearranger (<b>B</b>).  This
+rearranger considers new parameter values on the current tree.  By default,
+this is set to the same frequency as the topology rearranger, and this
+seems to be adequate for runs with a small number of variable parameters. 
+This can be increased for runs with a larger number of parameters, but you
+probably don't want a relative frequency of more than 2-3 times that of the
+topology arranger--increase the <A HREF="menu.html#chains">length of your
+chains</a> instead.
+
+<P> If you are doing trait mapping using the "jump" strategy (in which
+the trait alleles are assigned a chromosomal location, and this location is
+reconsidered during the run) two additional rearrangers become available.
+The Trait haplotypes rearranger (<b>M</b>) allows reconsideration of 
+ambiguous trait haplotypes:  for example, it can switch between
+DD, Dd and dD as haplotype resolutions for someone showing a dominant
+phenotype.  The Trait Location rearranger (<b>L</b>) moves the trait
+position within the region.  We have little information about the
+best proportions of effort to put into these two rearrangers, but the
+Trait Location rearranger probably needs to get at least 20% effort
+to function well.  These arrangers are not needed in "float" mapping
+or in non-mapping runs and will not be shown.</P>
+
+<H4> <A NAME="chains">Sampling Strategy (chains and replicates)</a></H4>
+
+<P> This sub-menu allows you to adjust the time LAMARC spends sampling
+trees.  It can (and should) be adjusted to reflect whether you want an
+'exploratory' run vs. a 'production' run, how complicated your parameter
+model is, and whether you are performing a likelihood or Bayesian run. 
+Options germane to each of the above scenarios will be discussed in turn.
+
+<P> The first option (<b>R</b>) allows you to use replication--repeating the
+entire analysis of each genomic region a number of times, and consolidating the
+results.  This is more accurate than running LAMARC several times and
+attempting to fuse the results by hand, because LAMARC will compute profiles
+over the entire likelihood surface, including replicates, instead of
+individually.  It will, of course, increase the time taken by the current
+run in proportion to the number of replicates chosen (both the time spent
+generating chains and, typically, the time spent profiling the
+results).  The minimum number of replicates is one, for a single run-through
+of LAMARC.  A reasonable upper limit is 5 if your runs produce reasonably
+consistent results, though you may want to use a higher number to try to
+overcome more inconsistent runs.  Replication is useful for 'production'
+runs more than exploratory runs, and can help both likelihood and Bayesian
+searches.</P>
+
+<P> LAMARC's search is divided into two phases.  First, the program will
+run "initial" chains.  In a likelihood run it is useful to make 
+these relatively numerous and short as they
+serve mainly to get the driving values and starting genealogy into a
+reasonable range.  When all the initial chains are done, the program will
+run "final" chains.  These should generally be longer, and are used to
+polish the final estimate.  Exploratory runs can have both short initial
+and short (but somewhat longer) final chains, and production runs should have
+longer sets of both chains.  Because a likelihood run is highly dependent
+on the driving values, you will probably need several initial chains (10 is
+a reasonable number), and a few final chains (the default of 2 usually
+works). A Bayesian run is not at all dependent on the driving values, and
+while you might use several initial chains for an exploratory run just to
+see what's happening with the estimates, you should probably simply use
+zero or one initial chains and one quite-long final chain to obtain your
+production estimates.</P>
+
+<P> For both initial and final chains, there are four parameters to set. 
+"Number of chains" determines how many chains of that type will be run. 
+"Number of recorded genealogies" determines how many genealogies (in a
+likelihood run), while "Number of recorded parameter sets" determines how
+many sets of parameters (in a Bayesian run) will actually be used to make the
+parameter estimates.  "Interval between recorded items" determines how many
+rearrangements will be performed between samples.  Thus, if you ask for 100
+recorded items per chain, and set the interval between them to 20, the program will
+perform a total of 2000 rearrangements, sampling 100 times to make the
+parameter estimates.  The total number of samples will determine the length
+of your run, and can be shorter for exploratory runs but should be long
+enough to produce stable results for production runs.  In a Bayesian run,
+as mentioned, you will want one, long chain for your production run.  If
+you are seeing spurious spikes in your curvefiles, you probably need to
+increase the sampling interval, too--because each rearrangement only
+changes a single parameter (and also takes time to rearrange trees),
+certain parameters can stay the same simply by neglect, and will end up
+being oversampled in the output.  Increasing the sampling interval can
+overcome this artifact.</P>
+
+<P> "Number of samples to discard" controls the burn-in phase before
+sampling begins.  LAMARC can be biased by its initial genealogy and initial
+parameter values, and discarding the first several samples can help to
+reduce this bias.  To continue with the example above, if you ask for 100
+samples, an interval of 20, and 100 samples to be discarded, the program
+will create a total of 2100 samples, throwing away the first 100 and
+sampling every 20th one thereafter.  In a likelihood run, you want the
+burn-in phase to be long enough to get away from your initial driving
+values, which will be longer the more complex your model is.  In a Bayesian
+run, you also want the burn-in phase to be long enough to get away from
+your initial set of values and the initial genealogy.  Again, this will
+need to be longer if you have a more complex model with lots of
+parameters.</P>
+
+<H4> <A NAME="heating">Multiple simultaneous searches with heating</a></H4>
+
+<P> The last menu item on the Search Strategy menu allows you to help your
+search explore a wider sampling of trees by setting multiple
+"temperatures."  A search through possible trees at a higher temperature
+accepts proportionally less likely trees, in the hopes that future
+rearrangements will find a new, well-formed tree with a higher 
+likelihood.  This approach will often rescue a search that otherwise 
+becomes stuck in one group of trees and does not find other
+alternatives.</P>
+
+<P> (The reason that the word "temperature" is used here may be understood 
+by means of an analogy.  Imagine, on a snowy winter day, that there are 
+several snowmen on the lawn in front of a house, and you want to identify 
+the tallest one; you do not want to determine the exact height, you just 
+want to determine the tallest snowman.  One way of doing this would be to 
+raise the temperature so that all of the snowmen melt; you could then 
+identify the tallest snowman as the one that disappears last.  Using 
+multiple "heated" Markov chains simultaneously provides smoothed-out, 
+compressed views of the space of possible genealogy arrangements.)</P>
+
+<P> To set multiple temperatures, select the <b>M</b> option (Multiple
+simultaneous searches with heating) menu, then select <b>S</b> (Number of
+Simultaneous Searches) and enter the number of temperatures you want.  You
+will then get a list of new menu options, and be able to set the various
+temperatures. For best results, temperatures should progress in value
+pseudo-exponentially.  A typical series of temperatures might be "1 1.1 2 3
+8", but different data sets might have different optimal magnitudes, from
+"1 2 10 20 50" to "1 1.01 1.05 1.1 1.3".  Watching the Swap Rates between
+temperatures during the run is important for determining an optimal series
+here--rates should vary somewhere between 10 and 40 (the numbers give are
+percents).  Below about 5% you are getting little return for a huge
+increase in computation, and above 50% the two chains are so close to each
+other that they are unlikely to be exploring significantly distinct areas
+of parameter space (a process more efficiently handled by using <A
+HREF="menu.html#chains">replicates</A>). </P>
+
+<P> Should finding an optimal series of temperatures by hand become too
+difficult, or if the optimal series of temperatures varies during a run,
+LAMARC can be told to try to optimize the temperatures automatically, by
+switching from "static" to "adaptive" heating (the <b>A</b> option that appears
+if you have more than one temperature).  With static heating, the
+temperatures you specify will be used throughout the run.  With adaptive
+heating, the program will continually adjust the temperatures during the
+run to keep swapping rates between 10% and 40%.  We make no guarantees that
+adaptive heating will be superior to static heating, but it should at least
+drive the values to the right magnitudes, and keep them there during the
+course of the run.</P>
+
+<P> A second option that appears if you have multiple temperatures is the
+ability to set the swap interval for different temperatures (<b>I</b>).  The
+default is "1", which means LAMARC picks two adjacent temperatures and
+checks to see if the higher-temperature chain has a better likelihood than
+the lower-temperature chain after each rearrangement.  To check less
+frequently, set this value to a higher number (3 for every third
+rearrangement, etc.).  A higher value will speed up the program incrementally,
+but typically does not represent a significant gain.</P>
+
+<P> In general, a run will increase in length proportionally to the number
+of temperatures chosen, though the time spent profiling will be the same as
+without.</P>
+
+<hr>
+<H3><A NAME="io"> Input and Output related tasks </A></H3>
+<p><img src="images/LamarcIOScreen.png" alt="LAMARC I/O screen"/></p>
+
+<P> This menu controls most all of the interactions between the program, the
+computer, and the user.  You can use it to modify the names and content of
+files LAMARC produces, as well as the information printed to the screen
+during a LAMARC run.</P>
+
+<h4> Verbosity of Progress Reports </h4>
+
+<P> This first option on the input and output menu controls the reports
+that appear on-screen as the program runs.  Type <b>V</b> to toggle among the
+four options.  NONE suppresses all output, and might be used when running
+LAMARC in the background, after you know what to expect. CONCISE only
+periodically reports about LAMARC's progress, noting that a new region
+has begun, or that profiling has started.  NORMAL adds some
+real-time feedback on the program's progress and additionally guesses at
+completion time for each program phase (the guesses are not always
+completely reliable, however).  VERBOSE provides the maximum amount of
+feedback, reporting additionally on some of the internal states of the
+program, including overflow/underflow warnings and the like.  If something
+has gone wrong with your LAMARC run, having this option set to VERBOSE is
+your best chance at a diagnosis.</P>
+
+<h4><A NAME="output">Output File Options</A></h4>
+
+<P> The next menu item sends you to a submenu where you can set various
+aspects of the final output file.  Select <b>O</b> to go to this menu.</P>
+
+<P> Selecting <b>N</b> allows you to set the name of the output report file. 
+Please be aware that if you specify an existing file, you will overwrite
+(and destroy) its contents.</P>
+
+<P> Selecting <b>V</b> allows you to toggle between the three levels of content
+for the output file. VERBOSE will give a very lengthy report with
+everything you might possibly want, including a copy of the input data (to
+check to make sure the data were read correctly and are aligned).  NORMAL
+will give a moderate report with less detail, and CONCISE will give an
+extremely bare-bones report with just the parameter estimates and minimal
+profiling.  We recommend trying VERBOSE first and going to NORMAL if you
+know you don't need the extra information.  CONCISE is mainly useful if
+your output file is going to be read by another computer program rather
+than a human being, or if speed is of the essence, since it speeds up
+profiling in a likelihood run by 5.</P>
+
+<P> The "Profile likelihood settings" option (<b>P</b>) leads to a new sub-menu
+that lists all forces and gives you an overview of how they are going to be
+profiled.  You can turn on (<b>A</b>) or off (<b>X</b>) profiling for all parameters
+from this menu, or set the type of profiling to percentile (<b>P</b>) or fixed
+(<b>F</b>).  The other menu options take you to the force-specific profiling
+menus discussed <A HREF="menu.html#profiling">above</a>. </P>
+
+<h4> <A NAME="menuinfile">Name of menu-modified version of input file</A></h4>
+
+<P> The "Name of menu-modified version of input file" option (<b>M</b>) allows
+you to change the name of the automatically-generated file which will be
+created by LAMARC when the menu is exited ("menusettings_infile.xml", by default).  This
+file contains all the information in the infile, but also contains any
+information that may have been set in the menu.  If you want to repeat
+your run with exactly the same options that you chose from the menu this
+time, you can rerun using this file as your input file.</P>
+
+<h4> <A NAME="summary">Writing and Reading Summary Files</A></h4>
+
+<P> The next two menu items on the "Input and Output Related Tasks" menu
+are used to enable or disable reading and writing of summary files. If
+summary file writing is enabled, LAMARC saves the information it calculates
+as it goes. There is enough to be able to recover from a failed run, or to repeat
+the numerical analysis of a successful run.  If a run fails while
+generating chains, LAMARC will take the parameter estimates from the last
+completed chain, use them to generate trees in a new chain, then use those
+trees and the original estimates to start a new chain where it had crashed
+before.  In this scenario, LAMARC cannot produce numerically identical
+results to what it would have produced had the run finished, but should
+produce similar results for non-fragile data sets.  However, if profiling
+had begun in the failed run, the summary files do contain enough
+information to produce numerically identical results, at least to a certain
+degree of precision.</P>
+
+<P> To turn on summary file writing, select <b>W</b> from the menu, then <b>X</b> to
+toggle activation.  The name of the summary file may be changed with the
+<b>N</b> menu option.  This will produce a summary file as LAMARC runs.  To then
+read that summary file, turn on summary file reading the next time LAMARC
+is run (from the same data set!) with the <b>R</b> option from this menu, then
+<b>X</b> to toggle activation, and finally <b>N</b> to set the correct name of the
+file.  Lamarc will then begin either where the previous run stopped, or, if
+the previous run was successful, will start again from the Profiling
+stage.</P>
+
+<P> For particularly long runs on flaky systems, it may be necessary to
+both read and write summary files.  If both reading and writing is on,
+LAMARC will read from the first file, write that information to the second
+file, and then proceed.  If that run is then stopped, the new file may be
+used as input to start LAMARC further along its path then before.  If this
+option is chosen, be sure to have different names for the input summary file
+and the output summary file.</P>
+
+<P> If reading from a summary file, most of the options set when writing
+the summary file must remain the same, or unpredictable results may occur,
+including LAMARC mysteriously crashing or producing unreliable results. 
+However, since all profiling occurs after reading in the summary file,
+options related to that may be changed freely.  For example, in order to
+get preliminary results, you may run LAMARC with "fixed" profiling,
+"concise" output reports, and writing summary files on.  Afterwards, if
+more detail is needed about the statistical support of your estimates, 
+you may run LAMARC again, this time
+with summary file reading, "percentile" profiling, and "verbose" output
+files.</P>
+
+<H4><A NAME="tracer">Tracer output</A></H4>
+
+<P> LAMARC will automatically write files for the utility <A
+HREF="http://tree.bio.ed.ac.uk/software/tracer/">Tracer</a>
+written by Drummond and Rambaut (see the <A HREF="tracer.html">"Using
+Tracer with LAMARC"</A> documentation in this package).  
+LAMARC's Tracer output files are named [prefix]_[region]_[replicate].txt.
+You can turn on or off Tracer output and set the prefix here.</P>
+
+<H4><A NAME="newick">NEWICK tree output</A></H4>
+
+<P>If there is no migration or recombination, LAMARC can optionally
+write out the tree of highest data likelihood it encounters for each
+region, in Newick format.  Options in this menu control whether such
+a Newick file will be written, and what its prefix will be.  This
+option is not needed for normal use of the program, but it is sometimes
+interesting to see what the best tree was, and how it compares with
+the best tree found by phylogeny-inference programs.  (Sometimes,
+surprisingly, LAMARC is able to outdo normal inference programs.)</P>
+
+<h4> <A NAME="curvefiles">Bayesian curvefiles</A></h4>
+
+<P> A Bayesian run of LAMARC will produce additional output for each
+region/parameter combination that details the probability density curve for
+that parameter.  Each file can be read into a spreadsheet program (like
+Excel) to produce a graphic of your output.  If you decide you don't have
+enough disk space for these files, or don't want them for some other
+reason, you can turn off this feature by toggling the <b>U</b> option ('Write
+Bayesian results to individual files').  You can change the prefix given to
+all of these curvefiles with the <b>C</b> option ('Prefix to use for all
+Bayesian curvefiles').  Curvefile names are of the format
+[prefix]_[region]_[parameter].txt, where '[prefix]' is the option settable
+here, '[region]' is the region number, and '[parameter]' is the parameter
+in question.  More details about bayesian curvefiles are available in the
+<A HREF="bayes.html#results">Bayesian tutorial</a>.
+
+<H4><A NAME="reclocfiles">Recombination location output</A></H4>
+
+<P> In runs modeling recombination, LAMARC can dump a file listing
+each recombination location in every sampled tree in the last final chain.
+A recombination between data position <tt>-13</tt> and <tt>-12</tt> is
+recorded as <tt>-13</tt>, one between <tt>340</tt> and <tt>341</tt> is
+recorded as <tt>340</tt>.
+You can read the file into <tt>R</tt> or another statistical computing
+tool, and plot a histogram to see where the recombinations are most
+often accepted. Keep in mind that there is a slight bias to accept
+recombinations near the ends of the input sequences, as there is less
+data available to demonstrate a recombination there is unsupported.
+</p>
+
+<p>
+These files are named [prefix]_[region]_[replicate].txt,
+You can turn on or off this output and set the prefix here. (The default
+prefix is 'reclocfile'.)
+This option is off by default since they can produce a large amount of output.</P>
+
+
+<hr>
+<H3><A NAME="current"> Show current settings </A></H3>
+<p><img src="images/LamarcOverviewScreen.png" alt="LAMARC overview screen"/></p>
+
+<P> This menu option provides reports on all current settings, so that you
+can see what you've done before starting the program.  You cannot change
+the settings here, but each display will indicate which menu should be used
+to change the displayed settings. </P>
+
+<P>(<A HREF="xmlinput.html">Previous</A> | <A
+HREF="index.html">Contents</A> | <A HREF="regions.html">Next</A>)</P>
+
+<!--
+//$Id: menu.html,v 1.51 2013/11/07 22:46:06 mkkuhner Exp $
+-->
+</BODY>
+</HTML>
diff --git a/doc/html/messages.html b/doc/html/messages.html
new file mode 100644
index 0000000..822d798
--- /dev/null
+++ b/doc/html/messages.html
@@ -0,0 +1,1123 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2//EN">
+
+<HTML>
+<HEAD>
+	
+    <META HTTP-EQUIV="CONTENT-TYPE" CONTENT="text/html; charset=iso-8859-1">
+    <TITLE>LAMARC Documentation: Error and warning messages in LAMARC</TITLE>
+    <META NAME="description" CONTENT="Estimation of population parameters using genetic data using a maximum likelihood approach with Metropolis-Hastings Monte Carlo Markov chain importanc
+e sampling">
+    <META NAME="keywords" CONTENT="MCMC, Markov chain, Monte Carlo, Metropolis-Hastings, population, parameters, migration rate, population size, recombination rate, maximum likelihood">
+	
+</HEAD>
+<BODY BGCULOR="#FFFFFF" TEXT="#000000">
+
+<P>(<A HREF="troubleshooting.html">Previous</A> | <A
+HREF="index.html">Contents</A> | <A HREF="limitations.html">Next</A>)</P>
+
+    <h2>Error and warning messages in LAMARC</h2>
+    <P> Almost everything LAMARC might tell you unexpectedly should be listed below, alphabetically, along with a further explanation of what might have happened to cause the message to be displayed.  Many are error messages, some are only warnings, and a few are just there to let you know what's going on.  Messages with a plus in the 'XML Errors' column can be produced when reading in a LAMARC input file.  Messages with a plus in the 'Menu' column can be produced when working in the men [...]
+<P>
+    <TABLE CELLSPACING=0 CELLPADDING=5 COLS=7 TOP BORDER=0>
+	<COLGROUP><COL WIDTH=25%><COL WIDTH=50%><COL WIDTH=5%><COL WIDTH=5%><COL WIDTH=5%><COL WIDTH=5%><COL WIDTH=5%></COLGROUP>
+	<TBODY>
+		<TR>
+			<TD ALIGN=LEFT><b>Message</b></TD>
+			<TD ALIGN=LEFT><b>Explanation</b></TD>
+			<TD ALIGN=CENTER><b>XML errors</b></TD>
+			<TD ALIGN=CENTER><b>Menu</b></TD>
+			<TD ALIGN=CENTER><b>Urgent</b></TD>
+			<TD ALIGN=CENTER><b>Normal</b></TD>
+			<TD ALIGN=CENTER><b>Informal</b></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>All chain temperatures must be positive.</TD>
+			<TD ALIGN=LEFT>The simultaneous searches are all given temperatures relative to one another, but the ratios must be positive.</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>All parameters are fixed in value, which in a Bayesian run means no estimation is possible.  Please check your xml input file.</TD>
+			<TD ALIGN=LEFT>A Bayesian run needs parameters to vary, or else it's not going to do anything.</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>All of the parameters are set constant or invalid, which means that this Bayesian run has nothing to do.  If you truly want no parameter to vary, set one parameter's prior to be very thin.</TD>
+			<TD ALIGN=LEFT>As above, a Bayesian run needs parameters to vary--this variation is the very thing that makes the parameter estimation possible.</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>All rates for the GTR model must be greater than 0.</TD>
+			<TD ALIGN=LEFT>You may use a very small value for a GTR rate, but it still must be greater than zero.</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+<!-- no simulated data released yet
+	<TR>
+			<TD ALIGN=LEFT>All simulated data was nigh invariant for the source segment, giving us nigh invariant data for the simulated segment [name] as well.</TD>
+			<TD ALIGN=LEFT>For the sort of simulation you're running, the source data is simulated, then a variable site is selected as a 'home' for the segment to be mapped.  However, when this source data was simulated, the mutation rate was low enough that there were no variable sites to select, so an invariant (or nigh-invariant) site was selected instead.</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+-->
+        <TR>
+			<TD ALIGN=LEFT>Arranger timing cannot be < 0.0</TD>
+			<TD ALIGN=LEFT>To turn off an arranger, set this to zero.  To use, set to a positive number, relative to the other arrangers.</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>Base frequencies must be greater than zero</TD>
+			<TD ALIGN=LEFT>Even if a particular base does not show up in your data, it needs to have at least a small positive value for its frequency in order to get the math to work.</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>Cannot add a parameter to a new group because it is already in a group.</TD>
+			<TD ALIGN=LEFT>If you want to change which group a parameter is in, you must first remove it from its current group, and then add it to an existing or new group.</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>Cannot open or read file [filename] for summary file reading.  Check that the name is valid and that the permissions are correct, or simply turn off summary file reading for this run.</TD>
+			<TD ALIGN=LEFT>Summary file reading is for when a previous run of LAMARC produced a summary file, and you wish to pick up where that run left off (or repeat the analysis).  However, the filename you gave LAMARC cannot be found and/or read.</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>Couldn't find a place to wrap columns for output.  Just using the max instead.</TD>
+			<TD ALIGN=LEFT>This is a formatting issue for the output report.  You are unlikely to see it, but if you do, expect slight formatting oddnesses in the resulting output file.</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER>+</TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>Data error--zero populations?  There should be at least one.</TD>
+			<TD ALIGN=LEFT>You don't seem to have any data.  This is a significant problem for a data-analysis program like LAMARC.</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>Debug message:  [message]</TD>
+			<TD ALIGN=LEFT>If LAMARC is compiled in debug mode, you might see messages that begin this way that you wouldn't see otherwise.  You shouldn't run LAMARC on real data in debug mode because it will take probably twice as long.  Any messages you get here should be relatively self-explanatory, but are mainly meant for the programming team.</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER>+</TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>Duplicate locations in marker location list</TD>
+			<TD ALIGN=LEFT>You have told LAMARC that two different markers refer to the same site, which is impossible.</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>Error from the maximizer:  [text]</TD>
+			<TD ALIGN=LEFT>After collecting possible trees, LAMARC searches for the set of parameters that will result in the highest likelihood for that set of trees.  Occasionally, this search simply fails.  There are a variety of potential reasons for this failure, and the particular message will give you a clue about why, but in general, it means LAMARC isn't doing its job and finding the best set of parameters for your data.  The particular message is of less importance than where in the run [...]
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>Error:  empty vector for input that required [numbers/integers].</TD>
+			<TD ALIGN=LEFT>In the LAMARC input file is a line with nothing but whitespace between an opening and closing tag where we needed some values.  If you just want the defaults, you can often simply delete the tag entirely, and change the values if necessary in the menu.</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>Error:  empty vector for the [name of tag] tag.</TD>
+			<TD ALIGN=LEFT>In the LAMARC input file is a line with nothing but whitespace between the opening and closing tag in question.  If you just want the defaults, you can often simply delete the tag entirely, and change the values if necessary in the menu.</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>Error:  group index value greater than the number of groups.</TD>
+			<TD ALIGN=LEFT>This is an internal error that is supposedly unreachable.  It indicates that something is referring to a group that doesn't exist.</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>Error:  group index value less than zero.</TD>
+			<TD ALIGN=LEFT>This is an internal error that is supposedly unreachable.  It indicates that something is referring to a group that doesn't exist.</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>Error:  The force [force type] was turned off in this run of LAMARC, but was on in the LAMARC run that created this summary file.  Re-run LAMARC with [force type] turned on to accurately continue the old run.</TD>
+			<TD ALIGN=LEFT>When running LAMARC from a summary file, you must reproduce the settings that were used to create that summary file exactly.  Look for the 'menusettings_infile.xml' from the initial run if you changed things in the menu.</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>Error:  The force [force type] was turned on in this run of LAMARC, but was not on in the LAMARC run that created this summary file.  Re-run LAMARC with [force type] turned off to accurately continue the old run.</TD>
+			<TD ALIGN=LEFT>When running LAMARC from a summary file, you must reproduce the settings that were used to create that summary file exactly.  Look for the 'menusettings_infile.xml' from the initial run if you changed things in the menu.</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>Expected a prefix but got "".</TD>
+			<TD ALIGN=LEFT>You have to define some actual text to use for this prefix for your filenames.  (This error typically occurs when you accidentally hit 'return' in the menu when setting the prefix.)</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>Expected a real number, but got [text]</TD>
+			<TD ALIGN=LEFT>Your input contained letters (or nothing) when it wanted a number.</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>Expected an integer, but got [text]</TD>
+			<TD ALIGN=LEFT>Your input contained letters, punctuation, or nothing when it wanted an integer.</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>Expected the name of a file but got "".</TD>
+			<TD ALIGN=LEFT>You have to define an actual name for your file.  (This error typically occurs when you accidentally hit 'return' in the menu when setting the filename.)</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>File size exceeded for summary file writing--continuing, but without writing to the summary file any more.  Try the command 'unlimit filesize' if on a UNIX system.</TD>
+			<TD ALIGN=LEFT>Your summary file got too large for your system--we're continuing, but it's possible your disk space has been completely used up.</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>Finished integrating a probability density function.  The total area under the curve should be 1.0; it's actually [value].  The most likely cause of this is having too few data points; consider longer runtimes while collecting data.</TD>
+			<TD ALIGN=LEFT>This message is more important if you get it for your final chain than it would be if you got it for an intermediate chain.  It means that there was a parameter that probably only has 1-5 unique data points, so shouldn't be trusted.  Each probability density curve mentions at the beginning of the file how many unique data points were used in its creation--check that value in your output.</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+<!-- no simulated data released yet
+		<TR>
+			<TD ALIGN=LEFT>Gave up trying to simulate non-invariant data for segment [name].</TD>
+			<TD ALIGN=LEFT>The mutation rate is probably too low--when simulating data, all sites being simulated ended up being nigh-identical to each other.</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+-->
+		<TR>
+			<TD ALIGN=LEFT>GTR rates must be greater than zero</TD>
+			<TD ALIGN=LEFT>For the math to work, rates that are zero must be entered as a very small number instead.</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>Illegal force-type setting [text]</TD>
+			<TD ALIGN=LEFT>Valid forces are 'coalescence', 'migration', 'recombination' and 'growth'</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>Illegal growth-type setting [text]</TD>
+			<TD ALIGN=LEFT>Valid growth-type settings are 'curve' and 'stick'</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>Illegal method-type setting [text]</TD>
+			<TD ALIGN=LEFT>Valid methods (for determining parameter starting values) are 'User', 'FST' (some forces), and 'Watterson' (coalescence only).</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>Illegal model-type setting [text]</TD>
+			<TD ALIGN=LEFT>Valid model settings are 'F84', 'GTR', 'Stepwise', 'Brownian', 'KAllele', and 'MixedKS'.</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>Illegal parameter constraint type [text]</TD>
+			<TD ALIGN=LEFT>Valid parameter constraints are 'unconstrained', 'constant', 'invalid', 'joint', and 'identical'.</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>Illegal prior type [text]</TD>
+			<TD ALIGN=LEFT>Valid prior types are 'logarithmic' or 'linear'.</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>Illegal profile type setting [text]</TD>
+			<TD ALIGN=LEFT>Valid profile types are 'percentile', 'fixed', and 'none'.</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>Illegal verbosity setting [text]</TD>
+			<TD ALIGN=LEFT>Valid verbosity settings are 'none', 'concise', 'normal', and 'verbose'.</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>In function, [function name], file [filename] unexpectedly closed or could not be opened and is no longer valid.  Possible causes of this problem include, but are not limited to: 1) read permissions for this file are not or are no longer enabled.  2) this file was unexpectedly moved, renamed, or deleted.  Continuing, but without writing to the summary file any more.</TD>
+			<TD ALIGN=LEFT>This message is reachable from a variety of summary file writing functions within LAMARC.  If any of them are told by the OS that they cannot write to the file any more, you will see this message.  The problem is almost certainly with something that happened to your computer, as opposed to something that happened within the LAMARC program.</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>Inconsistent number of markers</TD>
+			<TD ALIGN=LEFT>Some of your individuals have different amounts of data (strings of DNA, linked microsats, etc.).  The converter will disallow this--try recreating your LAMARC input file using the converter.</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>Incorrect number of GTR rates:  expected 6, found [#]</TD>
+			<TD ALIGN=LEFT>The six GTR rates are (in order):  AC, AG, AT, CG, CT, and GT.  If the program you use normalizes so that one of these is 1, you must still include '1' on your list.</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>Internal error: defaults::geyeriters needs to be increased.</TD>
+			<TD ALIGN=LEFT>The internal default 'geyeriters' is supposed to be large enough to handle the majority of situations, but yours seems to be an outlier.  If you wish to recompile LAMARC with this value increased, you can find it in control/defaults.cpp.  Or you can write to us with a complaint at lamarc at u.washington.edu</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>Invalid (negative) parameter index in checkIndexValue.</TD>
+			<TD ALIGN=LEFT>Somewhere within the '<force>' tag, you refer to a parameter with a negative index, which is impossible.  Check your group constraints.</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>Invalid (too large) parameter index in checkIndexValue.</TD>
+			<TD ALIGN=LEFT>Somewhere within the '<force>' tag, you refer to a parameter that doesn't exist, because there aren't that many parameters.  Check your group constraints.</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>Invalid parameter indexes in group constraints for force [force]:  The same parameter index is included in more than one group.</TD>
+			<TD ALIGN=LEFT>You have tried to include the same parameter in more than one group for constraining purposes.  This is illegal.</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>Invalid settings for force [forcetype].  Too many parameters are set invalid or have a start value of 0.0.</TD>
+			<TD ALIGN=LEFT>Depending on the force involved, even one parameter with an 'invalid' constraint or a starting value of 0.0 may be too many.  For a force like migration, if too many of the parameters are set to zero, you can make coalescence impossible.  Check your starting values and your constraints.</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>Maximization failed when re-calculating the estimates for this data set.  If that's what happened the last time, at least it's consistent.  The resulting estimates were obtained from the last successfully-maximized chain.</TD>
+			<TD ALIGN=LEFT>You can get this message if running LAMARC from a summary file when the attempted re-maximization of the final chain fails.  If maximization didn't fail in the original LAMARC run, something has gone wrong.  If it failed before, it means something is wrong with the trees themselves.</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>Maximization failure when calculating the best parameters for all replicates in region [name].  Using the mean values for all replicates instead.</TD>
+			<TD ALIGN=LEFT>LAMARC has failed to make any sense of your data, probably because your different regions all say completely different things.  The overall results presented in your output file will be the product of the results from the individual regions.  It's hard to say exactly what went wrong, but check your assumptions, and check to be sure the per-region estimates had reached stationarity--if not, try extending your search with (say) more initial chains.</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>Normalization cannot be set for Brownian Model</TD>
+			<TD ALIGN=LEFT>Normalization must be 'false' for a Brownian microsatellite model.</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>Number of sites in region must be > 0</TD>
+			<TD ALIGN=LEFT>You have defined a region with no space for any data in it.</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>Overflow error:  Attempted to compute [value or exp(value)] [*/-] exp([value]).  Returning [+/-very large value] = [+/-] EXP_OF_EXPMAX.  (Further overflow errors of this type will not be reported.)</TD>
+			<TD ALIGN=LEFT>Overflow errors are more serious than underflow errors, since what should be a ridiculously large number is replaced by a merely monstrosly large number.  A few probably won't hurt, but you may want to check your assumptions to see if you can eliminate these types of errors.  This message is only reported once, but a tally is kept and reported in your output file.  Check that to make sure there weren't too many of them.  EXCEPTION:  When calculating theta over regions u [...]
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>Please enter a valid group index or 'N'.</TD>
+			<TD ALIGN=LEFT>When adding a parameter to a constraint group, you have to use a pre-existing group, or type 'N' to create a new group.</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>Re-calculating best parameter values.  Differences may indicate a true maximum was not found, or that an older version of LAMARC was used to create the original summary file.</TD>
+			<TD ALIGN=LEFT>LAMARC now always re-calculates the best parameter values for the final chain in a region and/or replicate when reading from a summary file.  This ensures that subsequent profiling will be accurate, and in line with the values stored in memory, which might be slightly different from the values stored in memory in the original run, due to rounding errors during translation.</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER>+</TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>Running denovo case--no rearrangement!</TD>
+			<TD ALIGN=LEFT>LAMARC was compiled with the DENOVO flag, which means it will not rearrange trees as it usually does, but instead, each new tree it proposes is unrelated to the previous tree, and generated without regard to the data.</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>Subtree of likelihood 0.0 found:  Turning on normalization and re-calculating.</TD>
+			<TD ALIGN=LEFT>If any subtree has a likelihood of zero, it means the whole tree will also have a likelihood of zero.  This is not very helpful when trying to compare the relative fit of two different trees.  Normalization can help avoid this, so that option is being turned on and LAMARC is continuing.  Normalization used to be a user-settable option, but nobody ever knew what to do with it, so nowadays we just turn it on when you need it.  And we just did.</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER>+</TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>That number is not the index of a grouped parameter.</TD>
+			<TD ALIGN=LEFT>If you want to remove a parameter from a group, it must be in a group in the first place.</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>That number is not the index of an ungrouped parameter.</TD>
+			<TD ALIGN=LEFT>When setting constraints in the menu, you must add ungrouped parameters to the grouped parameters--if you want to change what group a parameter is in, first remove it from its current group, then add it to the new one.</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>The [force name] force estimate [value] was accurately re-calculated from the summary file data.</TD>
+			<TD ALIGN=LEFT>The re-calculated value in this run of LAMARC matches the value stored in the summary file.</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER>+</TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>The autocorrelation must be one or greater.</TD>
+			<TD ALIGN=LEFT>The autocorrelation is the average length of sites that have the same mutation rate.  If the mutation rate for every site is uncorrelated with the next, the autocorrelation is one.</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>The bayesian rearranger frequency must be greater than zero.</TD>
+			<TD ALIGN=LEFT>The rearrangers are all given relative values, but these values must be positive.</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>The data model for region [region], segment [segment] has multiple categories with the same rate, which serves no purpose but to slow the program down considerably.  Please change this before running the program.<BR>[Or:  The global data model for [data type] data, etc.]</TD>
+			<TD ALIGN=LEFT>The purpose of having multiple categories for your data models is so that LAMARC can accomodate the fact that some sites have different mutation rates than other sites.  (For example, the mutation rate differences between coding and non-coding DNA.)  Multiple categories with the same rate, however, do absolutely nothing at considerable computational cost.  So we're assuming you made some sort of mistake in your setup here--either you don't actually want or need multiple [...]
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+<!-- no simulated data released yet
+		<TR>
+			<TD ALIGN=LEFT>The do-nothing rearranger frequency must be positive.</TD>
+			<TD ALIGN=LEFT>The do-nothing rearranger does nothing, and is only on when you are simulating data.  If you want LAMARC to only have the true tree, use this arranger instead of the topology rearranger.</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+-->
+		<TR>
+			<TD ALIGN=LEFT>The effective population size must be positive.</TD>
+			<TD ALIGN=LEFT>A population with no members cannot be said to really be a population, now, can it?</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>We assume that because you had no '0's for any map positions, your data follows the traditional biologist convention of not having a site 0, and placing site -1 next to site 1.  If this is incorrect, you must edit your LAMARC input file to set the '<convert_output_to_eliminate_zeroes>' tag to 'false'.</TD>
+			<TD ALIGN=LEFT>Mapping is on the regional scale, which follows the 'map-position' tag values, and include or exclude 0 according to the convert_output_to_eliminate_zeroes tag in the input file.</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>The FST method is only available for data sets with more than one population.</TD>
+			<TD ALIGN=LEFT>The FST method relies on patterns of genetic differentiation between populations.  If you have only one population, it cannot estimate anything.  Use a different method to determine starting values.</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>The haplotype rearranger frequency must be positive.</TD>
+			<TD ALIGN=LEFT>You are allowed to turn off haplotype rearranging by setting this value to zero (which will treat your data as if it was completely resolved), but it must not be negative.</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>The trait location rearranger frequency must be greater than zero.</TD>
+			<TD ALIGN=LEFT>If the trait location rearranger is on at all, it is needed by the mapping analysis, and must be set to greater than zero.</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>The lower bound of the prior must be less than the upper bound of the prior.</TD>
+			<TD ALIGN=LEFT>A prior defines a range of values, and the lower bound must be the, uh, lower.</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>The lower bound of the prior must be less than the upper bound of the prior.  If you wish to hold this parameter constant, go to the constraints menu and set it constant there.</TD>
+			<TD ALIGN=LEFT>A prior defines a range of values, not a single point.  For fixed values for your parameters, do as suggested and constrain the parameter to be constant.</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>The maximizer failed for position [number], without a corresponding catastrophically low likelihood.  Re-setting to -[large number] and continuing.</TD>
+			<TD ALIGN=LEFT>You really shouldn't get this message; if the maximizer fails, the corresponding likelihood should always be the reported value.  However, checking it here and continuing should fix any problems that might have occurred.</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER>+</TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>The maximum number of categories of mutation rates you may set is 10, though the effectiveness of adding more categories drops considerably after about 5.</TD>
+			<TD ALIGN=LEFT>At much more than 5 categories, any signal you may have in your data is almost completely washed out.</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>The maximum number of events must be greater than zero.</TD>
+			<TD ALIGN=LEFT>If you are going to estimate migration, you must allow some migration events,
+and similarly for other forces.  You must always allow some coalescence events, as without them there is no tree.</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>The MLE for a parameter was zero, which means no profiling can be performed below that value.  Normal profiles should result from values greater than the parameter.</TD>
+			<TD ALIGN=LEFT>For all parameters but growth, negative values are disallowed and/or nonsensical.  If the MLE is zero, you have a one-tailed situation instead of the more normal two-tailed situation, so you must change your interpretation of the confidence intervals accordingly.</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER>+</TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>The model [model] may not be used for data type [type] in region [number], segment [number].</TD>
+			<TD ALIGN=LEFT>DNA data may use the F84 and GTR models.  Microsat data may use the Brownian, Stepwise, K-allele, and Mixed K/S models.  K-allele data may only use the K-allele model.</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>The multi-step:single-step ratio for a MixedKS model must be between zero and one.</TD>
+			<TD ALIGN=LEFT>This ratio determines the relative frequencies of multi-step mutation (under the K-Allele model) vs. single-step mutations (under the Stepwise model).  Zero would mean all multi-step, one would mean all single-step (though those values are disallowed--use the K-Allele or Stepwise models directly if that's what you really want), while one-half would model an equal number of each type.</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>The newly-calculated maximum likelihood for region [name] ([value]) is different from the maximum likelihood as read in from the summary file ([value]).  (A difference of [value], which exceeds the recommended minimum difference of [value].)</TD>
+			<TD ALIGN=LEFT>The summary file has a somewhat different maximum likelihood value.  This is probably attributable to precision issues and can be safely ignored, but should be kept in mind as a possible cause of later further divergence.</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>The number of [force] parameters for the read in values differ in number from the calculated values, which probably means the read-in data differs from that used to write out the data.  It is strongly recommended that you exit the program now and correct this.</TD>
+			<TD ALIGN=LEFT>The settings you're using for this run of LAMARC are different from the settings you used when you created the summary file you're trying to use.  This will probably end up failing horribly.</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>The number of replicates must be one or greater.</TD>
+			<TD ALIGN=LEFT>A value of 'one' for replicate number means not 'one repeated analysis' but 'one analysis in total'.  Two replicates means two analyses.</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>The parameter [name] maximized at [value] but is being constrained to [less extreme value].</TD>
+			<TD ALIGN=LEFT>When maximizing, parameters are allowed to be any value.  However, some values for some parameters do not make good driving values for the next chain.  For this reason, we constrain particularly extreme estimates to be less extreme for the purposes of being a driving value for the next chain.  You should not get this message for the final chain in any region or replicate.</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>The relative mutation rate must be greater than zero.</TD>
+			<TD ALIGN=LEFT>You can't have a negative mutation rate.</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>The second value in the range must be larger than the first.</TD>
+			<TD ALIGN=LEFT>When defining a range, the lower value should always come first.  A range of a single point is defined by a single number.</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>The size rearranger frequency must be positive.</TD>
+			<TD ALIGN=LEFT>The size rearranger is an optional rearranger, and will not do anything if set to zero.  It must be positive, however.</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>The summary file was [complete/incomplete] [...]</TD>
+			<TD ALIGN=LEFT>This message tells you how far into the run the summary file went before LAMARC had to start calculating new information.  If this message doesn't match how far you thought you got when creating the summary file, go back and check the file again.</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>The swapping interval must be one or greater.</TD>
+			<TD ALIGN=LEFT>The swapping interval is the number of steps LAMARC takes before checking to see if a higher-temperature chain has found a better tree than a lower-temperature chain.  If you want it to check every step, this number should be  one.  This is probably unwarranted, however, and will slow down your run.  In any case, you can't set it to a negative number or zero.</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>The topology rearranger frequency must be greater than zero.</TD>
+			<TD ALIGN=LEFT>The topology rearranger is the principle arranger that modifies trees.  Since this is what LAMARC was made to do, the frequency at which it does it must be positive.</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>The TT Ratio must be greater than 0.5.</TD>
+			<TD ALIGN=LEFT> Our implementation of the F84 mutational model must have the TT (transition/transversion)
+ratio greater than 0.5.  If you wish to model a Jukes-Cantor where TT should equal 0.5, set it very slightly higher.  If you
+are sure that TT should be lower than 0.5, use an appropriate GTR model instead of F84.  </TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>The upper bound of the prior must be greater than the lower bound of the prior.</TD>
+			<TD ALIGN=LEFT>A prior defines a range of values, and the upper bound must be the greater.</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>The upper bound of the prior must be greater than the lower bound of the prior.  If you wish to hold this parameter constant, go to the constraints menu and set it constant there.</TD>
+			<TD ALIGN=LEFT>A prior defines a range of values, not a single point.  For fixed values for your parameters, do as suggested and constrain the parameter to be constant.</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>The value [your number] is greater than DBL_MAX [large number]</TD>
+			<TD ALIGN=LEFT>The number you input is greater than the precision of the machine you're running LAMARC with.</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>The value [your number] is greater than LONG_MAX [large number]</TD>
+			<TD ALIGN=LEFT>The number you input is greater than the precision of the machine you're running LAMARC with.</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>The value [your number] is greater than LONG_MIN [large negative number]</TD>
+			<TD ALIGN=LEFT>The number you input is greater than the precision of the machine you're running LAMARC with.</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>The value [your number] is less than NEGMAX [large negative number]</TD>
+			<TD ALIGN=LEFT>The number you input is greater than the precision of the machine you're running LAMARC with.</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>There must be a positive number of categories for all mutational models.</TD>
+			<TD ALIGN=LEFT>A single mutation rate means one category--if you want multiple categories, you must define at least two.</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>There must be a positive number of simultaneous searches</TD>
+			<TD ALIGN=LEFT>Defining a number of simultaneous searches (heating) begins with one search for the 'main' search.  'Two simultaneous searches' means two searches happening at once, one with a different temperature than the first.</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>There must be less than 30 multiple searches for a given run.   (A reasonably high number is 5.)</TD>
+			<TD ALIGN=LEFT>If there are too many simultaneous searches, LAMARC will bog down considerably.  5 should be more than enough.</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>Transition/transversion ratio must be > 0.5</TD>
+			<TD ALIGN=LEFT>The transition/transversion ratio is set for a datamodel for DNA.  It must be greater than one-half.</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>Tried to set the chainpack for region [#], replicate [#], and chain [#], but we needed the chainpack for region [#], replicate [#], and chain [#].</TD>
+			<TD ALIGN=LEFT>While reading in a the input summary file, LAMARC encountered a 'chainpack' with a different label than it expected.  You should only get this error if you have been editing the input summary file by hand.  Search your file for the initial set of numbers and either correct it, or correct the surrounding data.</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>Unable to find tip: [text]</TD>
+			<TD ALIGN=LEFT>This error message should be unreachable; something has gone wrong internally with how LAMARC stores and retrieves data.  Send us an error report at <A HREF="mailto:lamarc at u.washington.edu">lamarc at u.washington.edu</A></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>Unable to read or find your file in 3 attempts.  To create a LAMARC input file, please run the converter (lam_conv or gui_lam_conv).</TD>
+			<TD ALIGN=LEFT>LAMARC takes as input a specially formatted XML file which is typically created using the converter.  You either have not done this, or the file you created could be in a different directory, or there could be problems with the permissions of the file.</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>Underflow error:  Attempted to compute [value or exp(value)] [*/-] exp[value]).  Returning 0.  (Further underflow errors of this type will not be reported.)</TD>
+			<TD ALIGN=LEFT>Underflow errors are not extremely serious, since they replace an inordinately small number with zero.  LAMARC should not attempt to divide by the result--if it does, you'll see a completely different error, and the program may well simply crash.  A tally of the number of times this error was seen is kept as the program runs, and reported to the output file at the end.  You can see how often this occurs there.</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>Unknown data type encountered: [tag]</TD>
+			<TD ALIGN=LEFT>The valid data types are:  DNA, MICROSAT, and K-ALLELE.  Your XML has a tag that is none of those.</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>Warning from the maximizer:  [text]</TD>
+			<TD ALIGN=LEFT>After collecting possible trees, LAMARC searches for the set of parameters that will result in the highest likelihood for that set of trees.  This search petered out for some reason; probably because it was taking too long.  There are a variety of potential reasons for this failure, and the particular message will give you a clue about why, but in general, it means LAMARC didn't find the absolute best set of parameters for your data.  The least worrying message to get h [...]
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>Warning:  A newly-calculated value for force "[force]" ([value]) is different from that read in from the summary file [value] (A difference of [value] which exceeds the recommended minimum difference of 0.1%).</TD>
+			<TD ALIGN=LEFT>The values from the summary file are different from the values from the current maximization.  This can be due to running a different version of LAMARC, running LAMARC on different computers, or precision issues from reading/writing a decimal number but storing it as hex.  In general, it's something to keep in mind--you may want to re-run LAMARC with longer run-times if it's serious.</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>Warning:  attempted to get the maximum of a curve with [value] points when we wanted [value].</TD>
+			<TD ALIGN=LEFT>This is probably an error in LAMARC internally--if you get it, you should contact us at lamarc at u.washington.edu and cite this message in your e-mail.  There's no known way to get this error normally, but that's why they call them 'bugs'.</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+                        <TD ALIGN=LEFT>Warning: calculating FST estimates
+                        for [parameter name(s)]is impossible due to the data
+                        for the populations involved.  If the FST method is
+                        invoked to obtain starting values for those
+                        parameters, defaults will be used instead.</TD>
+
+                        <TD ALIGN=LEFT>LAMARC failed to calculate FST values
+                        from your data.  This might be because FST is
+                        actually impossible to calculate for your data set
+                        (this happens if the variability within a population
+                        is greater than the variabiltiy between
+                        populations), or it might be because LAMARC simply
+                        failed to perform the calculation properly.  In
+                        either event, this will only affect the starting
+                        values for these parameters, and then only if you
+                        explicitly tell LAMARC that you want to use FST to
+                        give it starting values.  If you do, LAMARC will use
+                        defaults instead.  The defaults will almost
+                        certainly work fine; we just don't want you to later
+                        report "we used FST values as our starting
+                        parameters" when in actual fact LAMARC used
+                        defaults.  But the general LAMARC algorithms do not
+                        use FST in any way, shape, or form, and are
+                        therefore immune to irregularities in FST
+                        calculation.</TD>
+
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>Warning:  maximization failure for this chain.  Using the parameter estimates from the previous chain and continuing.</TD>
+			<TD ALIGN=LEFT>LAMARC failed to figure out what the trees produced from this chain mean.  This could be because the chain was too short--if this happens a lot, try increasing the length.  The upshot of this situation is that while the final tree is probably better than the initial tree, the estimated parameters are no better.  Since LAMARC is designed to estimate parameters, not make trees (per se), this can be bad if it happens a lot, especially if it happens at a later chain (and is [...]
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>Warning:  the maximizer was unable to find the same maximum likelihood estimates as before for region "[name]".  The parameter values it found have a log likelihood of [value], which is significantly different from the maximum likelihood of the old set of parameters, [value] (A difference of [value]).  If you were running from older summary files with replication, this is to be expected.  Otherwise, we recommend you re-run this data set with any or all of a) l [...]
+			<TD ALIGN=LEFT>You can get this message if running LAMARC from a summary file when the attempted re-maximization of the final chain produces a different answer than it did the first time.  Newer versions of LAMARC have a more robust maximizer, so if an older version created the summary file, you should be OK.  In addition, if you extended an old summary file with a newer version of LAMARC, the old values will persist in the summary file (though they will be correct in the output file) [...]
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>You may not have a negative number of discarded genealogies.</TD>
+			<TD ALIGN=LEFT>The discarded genealogies are the 'burn-in' phase of a chain, and may be zero, but not negative.  Though that would be a great time-saving device if they could be.</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>You may not have a negative number of final chains.</TD>
+			<TD ALIGN=LEFT>You may have zero final chains if you have at least one initial chain, but no cheating to gain extra time by having a negative number of them.</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>You may not have a negative number of final samples.</TD>
+			<TD ALIGN=LEFT>You may have zero final samples, but not a negative number.</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>You may not have a negative number of initial chains.</TD>
+			<TD ALIGN=LEFT>You can have zero initial chains, but not negative.</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>You may not have a negative number of initial samples.</TD>
+			<TD ALIGN=LEFT>You can have zero initial samples, but not negative.</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+<!-- no simulated data released yet
+		<TR>
+			<TD ALIGN=LEFT>You may not use the do-nothing arranger if you are not simulating data.</TD>
+			<TD ALIGN=LEFT>The do-nothing rearranger does nothing, and is only potentially useful when simulating data.</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+-->
+		<TR>
+			<TD ALIGN=LEFT>You must have a positive sampling interval.</TD>
+			<TD ALIGN=LEFT>The sampling interval is the number of steps LAMARC takes before sampling the tree again.  Even if LAMARC is moving backwards in time, it will not realize this fact, so must be given a positive number here.</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>You must leave at least one allowable site for this trait.  The sites may be chosen from the range [range].</TD>
+			<TD ALIGN=LEFT>Since you have declared that this trait is somewhere in this region, you must let it live somewhere in that region.</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>You must set a positive or zero relative rate for each category.</TD>
+			<TD ALIGN=LEFT>A mutation rate of zero means an invariant site, but a negative mutation rate is meaningless.</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>You must set a positive probability to go with each rate.</TD>
+			<TD ALIGN=LEFT>The probabilities are scaled to be relative to one another, but all must be positive.</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+		<TR>
+			<TD ALIGN=LEFT>Your data must consist of numbers, not letters or punctuation.  The Mixed KS model is inappropriate for DNA or RNA data.</TD>
+			<TD ALIGN=LEFT>Some of the data you are using the 'Mixed KS' model for are not numbers (the standard format for microsatelite data).</TD>
+			<TD ALIGN=CENTER>+</TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+			<TD ALIGN=CENTER><BR></TD>
+		</TR>
+	</TBODY>
+</TABLE>
+
+<P>(<A HREF="troubleshooting.html">Previous</A> | <A
+HREF="index.html">Contents</A> | <A HREF="limitations.html">Next</A>)</P>
+
+<!--
+//$Id: messages.html,v 1.13 2008/07/30 22:43:00 ewalkup Exp $
+-->
+</BODY>
+
+</HTML>
diff --git a/doc/html/migration_matrix.html b/doc/html/migration_matrix.html
new file mode 100644
index 0000000..ed69f53
--- /dev/null
+++ b/doc/html/migration_matrix.html
@@ -0,0 +1,150 @@
+<!-- header fragment for html documentation -->
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
+<HTML>
+<HEAD>
+
+<META NAME="description" CONTENT="Estimation of population parameters using genetic data usi
+ng a maximum likelihood approach with Metropolis-Hastings Monte Carlo Markov chain importanc
+e sampling">
+<META NAME="keywords" CONTENT="MCMC, Markov chain, Monte Carlo, Metropolis-Hastings, populat
+ion, parameters, migration rate, population size, recombination rate, maximum likelihood">
+
+<TITLE>LAMARC Documentation:
+Modeling Linkage Properties and Relative Mutation Rates of Your Data</title>
+</HEAD>
+
+
+<BODY BGCOLOR="#FFFFFF">
+<!-- coalescent, coalescence, Markov chain Monte Carlo simulation, migration rate, effective
+ population size, recombination rate, maximum likelihood -->
+
+<P>(<A HREF="genetic_map.html">Back</A> | <A HREF="index.html">Contents</A>
+| <A HREF="divergence.html">Next</A>)</P>
+<h2>Defining The Migration Matrix</h2>
+
+<p>
+<em>(Note: it is recommended that you be familiar with the material
+in the <a href="converter.html">data file conversion</a>
+section before reading this section.)</em>
+</p>
+<p>
+The <a name="mig_mat">Migration Matrix</a> defines how populations mix. Each cell displays the parameters for migration from one population to another. The populations may mix randomly, symmetrically, at a constant rate, or not at all. If they mix, various parameters including the rate can be defined. The values in the cells can be <a href="#edit_mat_cell">edited</a> to match the user's current model. This same format is used for displaying and defining <a href="divergence.html">Divergen [...]
+</p>
+<p>
+Smart modeling of how your populations mix will yield much better results from LAMARC's analysis. 
+Think about how your populations relate to each other geographically or temporally. For example, 
+if you are looking at populations in a series of valleys separated by mountains, 
+symmetrical migration makes sense. But if you are looking at barnicle populations 
+along an island chain that has a strong prevailing current, migration downstream 
+is easy but migration upstream isn't.  You should not infer symmetrical rates here, and
+perhaps should consider setting the upstream rates to zero.
+</p>
+<p>
+Of course, if you are not sure what model to use, try an ensemble of runs testing each model and compare the results. Each will give you different insights into your data. Coalescence analysis is a tool to investigate your data not an oracle. 
+</p>
+<p>
+This is the <tt>Migration Matrix</tt> tab of the LAMARC Converter interface which displays the currently defined Migration Matrix.
+</p>
+<p><img src="batch_converter/images/MigrationOnlyMatrixTab.png" alt="Migration Matrix for chrom1.mig"/></p>
+<p>
+<a name="edit_mat_cell">There are several things you should note:</a>
+<ul>
+<li>
+The "From" Population is listed at the start of each row and the "To" Population is at the top of each column. 
+</li>
+<li>
+A population cannot migrate to itself, so all the cells on the diagonal are marked "invalid" 
+</li>
+<li>
+Only cells with black text can be edited. Those that are grayed out cannot. So if you want to modify a population name you have to go back to the <tt>Data Partitions</tt> tab.
+</li>
+</ul>
+</p>
+
+<p>
+By default LAMARC allows unconstrained migration between all populations in your data set and sets the rates all equal. If you wish to change any of the values, double click on an editable cell and the following dialog will appear:
+</p>
+<p><img src="batch_converter/images/EditMigration.png" alt="Edit Migration Matrix Cell"/></p>
+<p>
+
+<p>
+You can edit four settings here: 
+<ul>
+<li>
+<tt>Migration Rate</tt> : Any positive real value.  This represents m/(mu), the ratio of the
+per-lineage migration rate to the per-site mutation rate.
+</li>
+<li>
+<tt>Method</tt> : Toggle between
+<ul>
+<li>
+"USER" : User defined (the starting value you give will be used).
+</li>
+<li>
+"FST" : Starting values will be determined using the FST statistic if possible;
+otherwise your value will be used.
+</li>
+</ul>
+</li>
+<li>
+<tt>Profile</tt> : This determines whether the program will compute profiles (slices
+through the posterior curve to show its shape and give approximate confidence intervals).
+In a Bayesian run LAMARC will always do percentile profiles--the most informative type--
+because there is minimal computational cost to doing so.  In a likelihood run, however,
+profiles can be quite expensive, and you may choose to ask for fixed profiles or no profiles
+instead.  Thus, the three options here are: 
+<ul>
+<li>"percentile": Percentile profiles (most expensive)
+<li>"fixed": Fixed profiles (somewhat expensive)
+<li>"none": No profiles (usually not recommended as you will get no information
+about the confidence intervals of your estimates)
+</ul>
+<li>
+<tt>Constraint</tt> : Select from: 
+</li>
+<ul>
+<li>
+"Invalid" : Migration cannot happen between the "From" Population and the "To" Population.
+</li>
+<li>
+"Constant" : The Migration rate will be held constant at the starting value you
+give.  If that value is zero, this will have the same effect as Invalid.  Use this option when
+you have an idea of the migration rate and do not want to infer it. 
+</li>
+<li>
+"Symmetric" The <tt>Migration Rate</tt> between the "From" Population and 
+the "To" Population must be the same as that between the "To" 
+Population and the "From". The effect of this it to change both 
+the <tt>Migration Rate</tt> and <tt>Constraint</tt> of the symmetric cell (across 
+the diagonal) to the values of the cell being edited. It also tells LAMARC that the 
+two linked rates must remain equal through out the analysis.  Only one value is inferred,
+and it will be the one that best fits evidence from both populations.
+</li>
+<li>
+"Unconstrained" This migration rate will be inferred, starting at the given
+value.  This is the normal setting for a parameter of interest.
+</li>
+</ul>
+</li>
+</ul>
+</p>
+<p>
+Generally you will want to make your migration rates Unconstrained unless that is
+leading to inferring too many parameters, which it easily can.  For cases in which
+there is no reason to expect asymmetry, Symmetrical rates greatly reduce the number
+of parameters.  If your populations are laid out in such a way that only a few
+migration routes are feasible, you should set all other routes to Invalid.  Finally,
+setting rates to Constant is relatively seldom used, but is useful when you already
+have a Migration rate estimate in hand and want to concentrate your statistical efforts on some
+other parameter.
+</p>
+
+
+<P>(<A HREF="genetic_map.html">Back</A> | <A HREF="index.html">Contents</A>
+| <A HREF="divergence.html">Next</A>)</P>
+
+<!--
+//$Id: migration_matrix.html,v 1.4 2012/05/14 19:55:38 ewalkup Exp $
+-->
+</BODY>
+</HTML>
diff --git a/doc/html/output.html b/doc/html/output.html
new file mode 100644
index 0000000..c80eefc
--- /dev/null
+++ b/doc/html/output.html
@@ -0,0 +1,375 @@
+<!-- header fragment for html documentation -->
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
+<HTML>
+<HEAD>
+
+<META NAME="description" CONTENT="Estimation of population parameters using genetic data using a maximum likelihood approach with Metropolis-Hastings Monte Carlo Markov chain importance sampling">
+<META NAME="keywords" CONTENT="MCMC, Markov chain, Monte Carlo, Metropolis-Hastings, population, parameters, migration rate, population size, recombination rate, maximum likelihood">
+
+<TITLE>LAMARC Documentation: Output</TITLE>
+</HEAD>
+
+
+<BODY BGCOLOR="#FFFFFF">
+<!-- coalescent, coalescence, Markov chain Monte Carlo simulation, migration rate, effective
+ population size, recombination rate, maximum likelihood -->
+
+<P>(<A HREF="search.html">Previous</A> | <A HREF="index.html">Contents</A> |
+<A HREF="bayes.html">Next</A>)</P>
+
+<H2>Interpreting the Output File</H2>
+
+<P> The output file is divided into four main sections, some of which
+will appear only if you ask for them (by setting output to "verbose"): </P>
+
+<UL>
+<LI><A HREF="output.html#mle"> MLE/MPE Estimates of Parameters </A></LI>
+<LI><A HREF="output.html#profile"> Profile Likelihoods </A></LI>
+<LI><A HREF="output.html#user"> User Specified Options </A></LI>
+<LI><A HREF="output.html#runreport"> Run Reports </A></LI>
+</UL>
+
+<P> The output report is best viewed and printed in a monospaced font such
+as Courier.</P>
+
+<H3><A NAME="mle"> Maximum Likelihood Estimates (MLEs) of
+Parameters</A></H3> (or)
+<H3>Most Probable Estimates (MPEs) of Parameters</h3>
+
+<P> This section presents the actual results:  maximum likelihood estimates
+(MLEs) in a likelihood run or most probable estimates (MPEs) in a bayesian
+run of whichever parameters were specified.  Each evolutionary force
+(coalescence, migration, recombination, growth, etc.) used in the analysis
+has its own column or set of columns in the output.</P>
+
+<P> If profiling has been turned on, each parameter
+will be presented with some information about its possible error.
+This information is calculated during profiling.</P>
+
+<P> If you specified "percentile" profiling, you will be given approximate
+confidence intervals around the estimate of each parameter.  These are only
+asymptotically correct, so take them with a grain of salt; in too-short
+runs, particularly, they are likely to be much too narrow. 
+
+For the most
+accurate confidence intervals you will need to run multiple replicates (see
+the <A HREF="search.html">"Search Strategy"</A> article).  
+
+If you selected
+"concise" output with percentile profiling, only the 95% confidence
+intervals will be shown (the .025 and .975 percentiles).  If "normal" or
+"verbose" output was selected, a full range of percentiles will be shown,
+from .005 to .995.  </P>
+
+<P> If you specified "fixed" profiling, no information about the support
+intervals is
+given in this section, but can be deduced from the information in the next
+section (<A HREF="output.html#profile">Profile Likelihoods</A>).
+
+<H4> Theta ("coalescence" force) </H4>
+
+<P>The "Theta" data presents estimates of Theta for each population. Within
+each population an estimate is given for each region, along with an overall
+estimate combining information from all regions.
+
+<P> While most papers describe Theta as 4Nmu, this is true only for
+diploids. If you put haploid data into LAMARC (like that from mtDNA) the
+Theta estimates will be estimates of 2Nf(mu) instead.  It is best to think
+of Theta as "number of heritable copies in population * 2 * mutation rate"
+since this definition works no matter what the ploidy is.  (The "2" comes
+from the fact that two sequences that have diverged for time T are
+different by 2 * mu * T mutations, since both diverging lineages accumulate
+mutations.)</P>
+
+<P> If you have used the "multiple mutation rates" option of the data
+model, then the mu in Theta is relative to the mean mutation rate
+across all your categories, weighted by the probabilities of each
+category.  The categories reported in the output will be normalized
+to have a mean of 1.0.  They may therefore appear different from
+the values you put in.</P>
+
+<P> You can combine regions with different N or different mu, but
+you must know the relative N or mu of each region or segment, and must
+inform the program of this.  If you believe your mu rates to vary over
+regions, you can tell LAMARC these rates were drawn from a gamma
+distribution (see the 'alpha' section, below).</P>
+
+
+<H4> r (recombination force)</H4>
+
+<P> The "Recombination Rate" information presents estimates of r for each
+region. There are no per-population estimates; we model only
+the case where the recombination rate is constant within a phylogenetic
+tree.</P>
+
+<P> The parameter r is C/mu, with C being the per-site recombination rate
+and mu the per-site mutation rate.  Thus r=1 describes a situation where
+the risk of recombination at a site is the same as the risk of
+mutation at that site.  (Values of r as high as this will be difficult
+to estimate, and the program will tend to bog down.)</P>
+
+<H4> g (growth force)</H4>
+
+<P> The "Growth Rate" information presents estimates of the exponential
+growth or shrinkage rate for each population.  </P>
+
+<P> The parameter g shows the relationship between Theta, which is now the
+estimate of modern-day population size, and population size in the past 
+through the equation Theta(t) = Theta(now) exp(-gt) where t is
+a time in the past.  Positive 
+values of g indicate that the population has been growing, and negative 
+values indicate that it has been shrinking. </P>
+
+<P>The units of t in this equation are mutational units; one unit of time is
+the expected time for one mutation to occur.  To interpret the magnitude
+of g (in contrast to its sign, which has a straightforward meaning) you
+will need information about the mutation rate.  When such information
+is unavailable you have two options:  (1) use the values of g only to
+compare among organisms which presumably have the same mutation rate, or
+(2) consider a range of possible values.</P>
+
+<P>If you have information about the mutation rate mu, you can solve for
+values of Theta a given number of generations in the past using the
+relationship:</P>
+
+<P>Theta(T generations in the past) = Theta(now) exp(-gT(mu))</P>
+
+<H4> M (migration force) </H4>
+
+<P> The "Migration Rate" data are more complex, since we estimate the
+immigration rate into each population from every other population.
+There is an estimate for each migration rate parameter.  For example
+if there are three populations we present immigration from 1 to 2,
+from 1 to 3, from 2 to 1, from 2 to 3, from 3 to 1 and from 3 to 2,
+a total of 6 parameters.  If multiple genetic regions are present,
+then each parameter will have a separate estimate for each region
+and a joint "overall" estimate involving all the regions together.</P>
+
+<P> The parameter M is m, the per-generation migration rate, divided by mu,
+the per-site mutation rate.  Be careful in comparing results with other
+studies; there are two common ways to report migration rates, and many
+studies use 4Nm (where N is the population size of the receiving
+population) instead.  To convert the M value into 4Nm, multiply it by the
+Theta value of the recipient population.  For example, to convert a
+migration rate (M) from population 1 into population 3 to 4Nm, multiply by
+the Theta of population 3.</P>
+
+<P> Please bear in mind that we always estimate immigration rates--rates
+at which migrants enter a population.  This may seem backwards if one
+thinks in terms of the fate of individuals, but to a population as a whole
+the individuals entering it are much more significant to its future than
+the individuals leaving it.</P>
+
+<H4> alpha (scaled shape parameter of the best-fit gamma distribution
+of mutation rate over unlinked regions) </H4>
+
+<P>For data collected from multiple unlinked genomic regions, if you enable the
+gamma "force," you can have LAMARC distribute the unknown relative single-region
+mutation rates according to the gamma distribution which best fits your data.
+The general gamma distribution has two parameters, the "shape parameter" alpha
+(α) and the "scale parameter" beta (β); to avoid overparameterization,
+LAMARC internally sets β = 1/α so that the mean of the distribution
+is the product αβ = 1.  The value of α that LAMARC estimates
+is a pure, positive number which best fits the landscape of rate variation among genomic
+regions in your data set.  α = 1 corresponds to exponentially-distributed
+relative mutation rates; smaller α values imply most of your regions are
+nearly invariant and one or two are highly variable (data that are completely
+invariant everywhere would have α = 0).  Large values of α imply
+your regions are mutating at similar relative rates that are approximately
+distributed according to a normal distribution (data in which each region mutates at
+exactly the same rate would have an infinite value for α).  Some more
+information is available <A HREF="gamma.html">here</A>.</P>
+
+<P>Because there is very little power available to distinguish between very
+high values of α, LAMARC might, during the course of its analysis,
+decide to hold α constant at an arbitrarily high value such as 100,
+to avoid being pulled too close to infinity, where the likelihood calculations
+would become invalid.</P>
+
+<H3><A NAME="profile"> Profile Likelihoods </A> </H3>
+
+<P> This section gives more detailed information about the possible error of
+each estimate, and the relationships between the parameters.</P>
+
+<P> A profile likelihood table is a way of visualizing how change in one
+parameter affects the estimates of the other parameters.  For each table,
+one parameter is set to several interesting values and held constant
+at those values while all other parameters are maximized.  For example,
+we may hold Theta1 constant at 10x its MLE and see how that affects the
+best values of the other parameters.</P>
+
+<P> If varying one parameter causes another to vary wildly, the two are
+correlated.  If varying one parameter leaves another nearly constant,
+the two are uncorrelated.  An example of correlated parameters would
+be the migration rates from North to South America and South to North
+America.  If there are considerable similarities between the North
+and South American populations, then lowering the N->S migration rate
+will force a compensating raising of the S->N rate.  This would be
+visible in profile likelihood tables as a marked curve in the S->N
+rate when the N->S rate is being profiled, and vice versa.</P>
+
+<P> Profiles can be done in two ways, "percentile" or "fixed", but only one
+of those can be used per force.  You can also turn off
+profiling of any individual parameter, perhaps because you already know it
+cannot be sensibly estimated.</P>
+
+<P> "Percentile" profiles are estimated at percentiles of the likelihood
+curve; for example, they may hold Theta1 fixed at the value which could
+just be rejected at the 95% level, and see what happens to the other
+parameters.  These are the most informative kind of profiles, but they
+are expensive to calculate because the program must find the correct
+percentile.</P>
+
+<P> "Fixed" profiles are estimated at multiples of the MLE parameter value;
+for example, they may hold Theta1 fixed at 1/10 of its MLE value and
+at 10x its MLE value.  These are not always as informative (the chosen
+values may be well off the edges of the curve) but they are quick to
+compute.</P>
+
+<P> The "verbose" form of the output report gives profiles for every region
+as well as for the overall results.  Lesser levels of output reporting
+give only the overall profiles.  If you have many parameters and regions, you
+will probably want to avoid "verbose" as the output can be overwhelming. 
+The "normal" form of the output report gives only the overall profiles.  The
+"concise" form of the output report gives only the overall profiles, and
+also only calculates the two percentiles that correspond to a 95% support
+intervals, or, in the case of fixed profiles, only the 1/10X and 10X
+multipliers.</P>
+
+<P>Profiles are very time-consuming, and if you don't want them it's best
+to turn them off.  Be aware, however, that if you don't do any profiling
+there will be no confidence-interval information either.  (The confidence
+intervals presented with the MLEs are, in fact, slices through the profile
+likelihood tables.)</P>
+
+<P> If speed is an issue (and profiling can take up the majority of a LAMARC
+run), one option is to turn on summary file reading and writing (see <A
+HREF="menu.html#io">Input and
+Output related tasks</A>).  Once a summary file has been written, you can
+change the profiling options, read it in again, and get mathematically
+identical results (to a certain degree of precision), but with percentile
+profiling instead of fixed, or normal output instead of concise.
+
+<H3><A NAME="user"> User Specified Options </A></H3>
+
+<P> This section lists the settings under which the program was run.  It is
+useful as a record of what you are doing, and to verify that your
+instructions were interpreted correctly</P>
+
+<H4> Data summary </H4>
+
+<P> This section summarizes details of the input data.  If there are
+multiple linked segments in the data, it provides a table listing all of
+the segments grouped by region.  Each segment shows its type of data and
+the relative mutation rate used for that segment.  The next section
+is a table showing for each region the number of variable markers found
+in that region, the relative Ne and mu of the region,
+and a pairwise estimate of Theta based
+on the method of Watterson or the FST estimator.  It also gives the 
+number of individuals sampled for that region in that population.</P>
+
+<P> This section may provide a useful warning.  If the values of the
+pairwise-estimator Thetas are widely variable and their support intervals do
+not overlap, you could be combining regions that should not be combined, or
+you have mistaken (or omitted) your relative Ne or mu values.  The
+per-region estimates are still valid, but the combined estimate should be
+regarded with suspicion.</P>
+
+<P> You may also see that, for example, it is hopeless to estimate
+recombination in a region because there are no variable sites.</P>
+
+<P> Following the region summary is a summary of the data model(s) used and
+their parameters.  For DNA, the Felsenstein '84 model reports four base
+frequencies and the transition/transversion ratio. The GTR model reports
+the four base frequencies and the six base-base mutation parameters.  For
+microsatellite data, the type of model is listed (Brownian, Stepwise or
+K-Allele).  For Stepwise and K-Allele the number of allele bins 
+is reported, and for the Mixed-KS model the value of the percent_stepwise
+parameter is also reported.</P>
+
+<P> We also report on the use of multiple mutation rate categories (method
+of Churchill and Felsenstein).  If multiple categories are in use, we
+report the number of categories, the (normalized) rate and probability
+of each, and the mean length of autocorrelated regions.</P>
+
+<H4> Input Genetic Data </H4>
+
+<P> If you request normal or verbose output, this section will contain
+a copy of your input data, formatted like PHYLIP's "interleaved" format
+with 60 bases per line.  This is useful to check that your data are
+properly aligned.  It can also be cut and pasted into other programs.</P>
+
+<H3><A NAME="runreport"> Run Reports by Region </A></H3>
+
+<P> If you requested verbose or normal output, the reports normally printed to
+screen during the program run will be repeated here (even if their
+screen printing was suppressed).  This is useful in diagnosing problems
+such as too-short chains.</P>
+
+<P> The reports are organized by region, and within regions by chain.
+They give the following: </P>
+
+<P> "Accepted" indicates the proportion of proposed changes that were
+accepted.  The search is in trouble if this dips below 5% and probably
+not working if it dips below 1%.  Consider heating to remedy this.</P>
+
+<P> "Prior lnL" compares how well the genealogies of this chain fit their
+ending parameter estimates as opposed to their starting parameter
+estimates.  DO NOT use this number in likelihood ratio tests; it
+is a relative likelihood and has no meaning outside of context.  It
+is provided only because very high prior lnLs are a symptom of having too
+few chains, or chains which are too short.  As a rule of thumb, by the
+final chains the prior lnL should be no greater than 2x-3x the number
+of parameters being estimated.</P>
+
+<P> "Data lnL" indicates the likelihood of the genetic data on the last
+genealogy in this chain.  It is in the same units that DNAMLK
+from PHYLIP would produce.  If the Data lnL is improving rapidly from
+chain to chain all the way to the end of the program, you are not
+running the program long enough--it is still finding much better
+trees than it ever found before.</P>
+
+<P> If you are using the Brownian motion approximation of the
+microsatellite likelihood methods, "Data lnL" of zero indicates that your
+population is small enough for the approximation to break down.  One zero,
+in an initial chain, may not be cause for concern but multiple zeros or
+zeros in the final chains suggest that the Brownian method should not be
+used.</P>
+
+<P> For more information on fine-tuning your search, see the documentation
+article on <A HREF="search.html">"Search Strategies."</A> </P>
+
+<P> If genealogies were discarded due to too many migrations or
+recombinations, a line will be printed giving the number of bad
+genealogies.  If this number remains high into the final chains, it
+is cause for concern.</P>
+
+<P> If more than one arrangement strategy was in use (for example, you
+were searching over genotype reconstructions) there will be a summary
+of acceptance rates for each strategy.  It is important to keep an eye
+on these and not simply look at the overall acceptance rates (see
+<A HREF="genotype.html">"Genotypic Data."</A>).  </P>
+
+<P> If multiple temperatures were in use, there will be a table showing
+the rates of swapping between adjacent temperatures.  We feel that optimal
+swapping rates are between 10% and 40%; if your rates are not in this
+range you may wish to adjust the number of temperatures or the
+difference between adjacent temperatures.</P>
+
+<P> When adaptive heating is in use, the temperatures shown are 
+averages over the course of the chain.</P>
+
+<P> Finally, there is a summary of the parameter estimates for this
+chain.</P>
+
+<P> The output file runtime reports differ from the ongoing runtime
+reports in that they omit prognosis of the ending time.</P>
+
+<P>(<A HREF="search.html">Previous</A> | <A HREF="index.html">Contents</A> |
+<A HREF="bayes.html">Next</A>)</P>
+
+<!--
+//$Id: output.html,v 1.30 2011/06/23 21:00:36 jmcgill Exp $
+-->
+</BODY>
+</HTML>
diff --git a/doc/html/outsumfile.2reg3rep.html b/doc/html/outsumfile.2reg3rep.html
new file mode 100644
index 0000000..f69c1fc
--- /dev/null
+++ b/doc/html/outsumfile.2reg3rep.html
@@ -0,0 +1,3341 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
+<HTML>
+<BODY>
+<pre>
+&ltXML-summary-file&gt
+&lt!-- Lamarc v. 2.0
+     Please do not modify. --&gt
+&lt!--  This summary file should match the input summary file insumfile.2reg3rep.txt,
+      up until that file's end. --&gt
+&ltchainpack&gt
+	&ltnumber&gt 0 0 0 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 0 &lt/tinytrees&gt
+		&ltaccrate&gt 0.195000000000000007 &lt/accrate&gt
+		&ltllikemle&gt 4.50058419396642684 &lt/llikemle&gt
+		&ltllikedata&gt -3620.90996263017314 &lt/llikedata&gt
+		&ltstarttime&gt 1113933890 &lt/starttime&gt
+		&ltendtime&gt 1113933911 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 39 200 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00832313455063505084 0.00734484914891693919 &lt/thetas&gt
+			&ltmigrates&gt 0 96.6630952829531083 46.6299496864950527 0 &lt/migrates&gt
+			&ltgrowthrates&gt 2365.3441048272216 184.41163985471303 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainpack&gt
+	&ltnumber&gt 0 0 1 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 0 &lt/tinytrees&gt
+		&ltaccrate&gt 0.160000000000000003 &lt/accrate&gt
+		&ltllikemle&gt 1.18053638385976001 &lt/llikemle&gt
+		&ltllikedata&gt -3516.0909920893173 &lt/llikedata&gt
+		&ltstarttime&gt 1113933912 &lt/starttime&gt
+		&ltendtime&gt 1113933934 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 32 200 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00513756085360574273 0.0082400193242912461 &lt/thetas&gt
+			&ltmigrates&gt 0 131.185343491258408 42.1077506833302877 0 &lt/migrates&gt
+			&ltgrowthrates&gt 1821.55597636667744 149.760781972604775 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainpack&gt
+	&ltnumber&gt 0 0 2 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 0 &lt/tinytrees&gt
+		&ltaccrate&gt 0.130000000000000004 &lt/accrate&gt
+		&ltllikemle&gt 1.6599393589321001 &lt/llikemle&gt
+		&ltllikedata&gt -3271.75316866892263 &lt/llikedata&gt
+		&ltstarttime&gt 1113933934 &lt/starttime&gt
+		&ltendtime&gt 1113933960 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 26 200 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00505700949907143815 0.00613707484094671175 &lt/thetas&gt
+			&ltmigrates&gt 0 124.679362905292081 34.8937725369221496 0 &lt/migrates&gt
+			&ltgrowthrates&gt 1761.27593665903237 -84.0195656658781189 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainpack&gt
+	&ltnumber&gt 0 0 3 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 1 &lt/tinytrees&gt
+		&ltaccrate&gt 0.149999999999999994 &lt/accrate&gt
+		&ltllikemle&gt 0.596513557975481556 &lt/llikemle&gt
+		&ltllikedata&gt -3269.61178480345779 &lt/llikedata&gt
+		&ltstarttime&gt 1113933961 &lt/starttime&gt
+		&ltendtime&gt 1113933971 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 6 40 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.0051461021148077107 0.00589555271016268308 &lt/thetas&gt
+			&ltmigrates&gt 0 125.68905479949639 48.865939986393137 0 &lt/migrates&gt
+			&ltgrowthrates&gt 1835.77616186834098 82.0043403675645095 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainsum&gt
+	&ltreg_rep&gt 0 0 &lt/reg_rep&gt
+	&lttreesum&gt
+		&ltncopy&gt 1 &lt/ncopy&gt
+		&ltshortforce&gt coalesce long
+			&ltshortpoint&gt 20 19 &lt/shortpoint&gt
+			&ltshortwait&gt 0.0629273638426249143 0.105548312215536552 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltshortforce&gt migrate long
+			&ltshortpoint&gt 0 1 1 0 &lt/shortpoint&gt
+			&ltshortwait&gt 0.0079654795218820091 0.0206451375053703819 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltintervals&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 5.09657490636763079e-06 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 20 20 &lt/xpartlines&gt
+			&ltpartlines&gt 20 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.2102355624254017e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 19 20 &lt/xpartlines&gt
+			&ltpartlines&gt 19 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.33961400471318623e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 18 20 &lt/xpartlines&gt
+			&ltpartlines&gt 18 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.77695915387843852e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 17 20 &lt/xpartlines&gt
+			&ltpartlines&gt 17 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 2.10446887053792051e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 17 19 &lt/xpartlines&gt
+			&ltpartlines&gt 17 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 2.27457247455224152e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 17 18 &lt/xpartlines&gt
+			&ltpartlines&gt 17 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 2.44016581525568737e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 17 17 &lt/xpartlines&gt
+			&ltpartlines&gt 17 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 5.63431475801879578e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 16 17 &lt/xpartlines&gt
+			&ltpartlines&gt 16 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 6.55098716603418459e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 15 17 &lt/xpartlines&gt
+			&ltpartlines&gt 15 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 7.39112816930027905e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 14 17 &lt/xpartlines&gt
+			&ltpartlines&gt 14 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 8.18973368575778886e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 14 16 &lt/xpartlines&gt
+			&ltpartlines&gt 14 16 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 8.3802045298549127e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 16 &lt/xpartlines&gt
+			&ltpartlines&gt 13 16 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00010629432546176642 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 15 &lt/xpartlines&gt
+			&ltpartlines&gt 13 15 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000111507477845693503 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 14 &lt/xpartlines&gt
+			&ltpartlines&gt 13 14 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000150975907355033011 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 14 &lt/xpartlines&gt
+			&ltpartlines&gt 12 14 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000165638034054806815 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 14 &lt/xpartlines&gt
+			&ltpartlines&gt 11 14 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000219061924145474876 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 13 &lt/xpartlines&gt
+			&ltpartlines&gt 11 13 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000219707138094496452 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 12 &lt/xpartlines&gt
+			&ltpartlines&gt 11 12 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000258213852917746063 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 11 &lt/xpartlines&gt
+			&ltpartlines&gt 11 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000273652113921858643 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 10 &lt/xpartlines&gt
+			&ltpartlines&gt 11 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000280583868742954141 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 10 &lt/xpartlines&gt
+			&ltpartlines&gt 10 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000331106053916933646 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 9 &lt/xpartlines&gt
+			&ltpartlines&gt 10 9 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000344952279767115331 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 8 &lt/xpartlines&gt
+			&ltpartlines&gt 10 8 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000345874033941911106 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 8 &lt/xpartlines&gt
+			&ltpartlines&gt 9 8 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000348861238031590646 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 7 &lt/xpartlines&gt
+			&ltpartlines&gt 9 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000424599242926313077 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 7 &lt/xpartlines&gt
+			&ltpartlines&gt 8 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000439947186235675422 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 7 &lt/xpartlines&gt
+			&ltpartlines&gt 7 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000481070344401667829 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 6 7 &lt/xpartlines&gt
+			&ltpartlines&gt 6 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000528930654355221855 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 5 7 &lt/xpartlines&gt
+			&ltpartlines&gt 5 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000592184315630871314 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 5 6 &lt/xpartlines&gt
+			&ltpartlines&gt 5 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000857591267505941114 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 6 &lt/xpartlines&gt
+			&ltpartlines&gt 4 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000866354211828237845 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 6 &lt/xpartlines&gt
+			&ltpartlines&gt 3 6 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00091353575187592918 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltnewstatus&gt 1 &lt/newstatus&gt
+			&ltxpartlines&gt 3 5 &lt/xpartlines&gt
+			&ltpartlines&gt 3 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000919185689826623655 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 4 &lt/xpartlines&gt
+			&ltpartlines&gt 4 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000955912424407952743 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 4 &lt/xpartlines&gt
+			&ltpartlines&gt 3 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00118557134547170773 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 2 4 &lt/xpartlines&gt
+			&ltpartlines&gt 2 4 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00153425735560147494 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 1 4 &lt/xpartlines&gt
+			&ltpartlines&gt 1 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.0019246211065550127 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 5 &lt/xpartlines&gt
+			&ltpartlines&gt 0 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00206538502262878197 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 4 &lt/xpartlines&gt
+			&ltpartlines&gt 0 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00305508566675113469 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 3 &lt/xpartlines&gt
+			&ltpartlines&gt 0 3 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00533986494186010525 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 2 &lt/xpartlines&gt
+			&ltpartlines&gt 0 2 . &lt/partlines&gt
+		&lt/intervals&gt
+	&lt/treesum&gt
+	&lttreesum&gt
+		&ltncopy&gt 1 &lt/ncopy&gt
+		&ltshortforce&gt coalesce long
+			&ltshortpoint&gt 20 19 &lt/shortpoint&gt
+			&ltshortwait&gt 0.0575370553514418445 0.104633534940785464 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltshortforce&gt migrate long
+			&ltshortpoint&gt 0 1 1 0 &lt/shortpoint&gt
+			&ltshortwait&gt 0.00794664505963699544 0.0202802222566785852 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltintervals&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 5.09657490636763079e-06 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 20 20 &lt/xpartlines&gt
+			&ltpartlines&gt 20 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.2102355624254017e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 19 20 &lt/xpartlines&gt
+			&ltpartlines&gt 19 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.25411707266569827e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 18 20 &lt/xpartlines&gt
+			&ltpartlines&gt 18 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.33961400471318623e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 17 20 &lt/xpartlines&gt
+			&ltpartlines&gt 17 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.77695915387843852e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 16 20 &lt/xpartlines&gt
+			&ltpartlines&gt 16 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 2.10446887053792051e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 16 19 &lt/xpartlines&gt
+			&ltpartlines&gt 16 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 2.27457247455224152e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 16 18 &lt/xpartlines&gt
+			&ltpartlines&gt 16 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 2.44016581525568737e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 16 17 &lt/xpartlines&gt
+			&ltpartlines&gt 16 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 5.63431475801879578e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 15 17 &lt/xpartlines&gt
+			&ltpartlines&gt 15 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 6.55098716603418459e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 14 17 &lt/xpartlines&gt
+			&ltpartlines&gt 14 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 7.39112816930027905e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 17 &lt/xpartlines&gt
+			&ltpartlines&gt 13 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 8.3802045298549127e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 16 &lt/xpartlines&gt
+			&ltpartlines&gt 13 16 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00010617490035119024 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 15 &lt/xpartlines&gt
+			&ltpartlines&gt 13 15 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000111507477845693503 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 15 &lt/xpartlines&gt
+			&ltpartlines&gt 12 15 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000150975907355033011 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 15 &lt/xpartlines&gt
+			&ltpartlines&gt 11 15 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000151946997256986488 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 15 &lt/xpartlines&gt
+			&ltpartlines&gt 10 15 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000165638034054806815 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 14 &lt/xpartlines&gt
+			&ltpartlines&gt 10 14 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000191522903992015264 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 13 &lt/xpartlines&gt
+			&ltpartlines&gt 10 13 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000219707138094496452 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 12 &lt/xpartlines&gt
+			&ltpartlines&gt 10 12 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000258213852917746063 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 11 &lt/xpartlines&gt
+			&ltpartlines&gt 10 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000273652113921858643 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 10 &lt/xpartlines&gt
+			&ltpartlines&gt 10 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000280583868742954141 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 10 &lt/xpartlines&gt
+			&ltpartlines&gt 9 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000331106053916933646 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 9 &lt/xpartlines&gt
+			&ltpartlines&gt 9 9 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000345874033941911106 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 8 &lt/xpartlines&gt
+			&ltpartlines&gt 9 8 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000348861238031590646 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 7 &lt/xpartlines&gt
+			&ltpartlines&gt 9 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000424599242926313077 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 7 &lt/xpartlines&gt
+			&ltpartlines&gt 8 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000439947186235675422 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 7 &lt/xpartlines&gt
+			&ltpartlines&gt 7 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000481070344401667829 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 6 7 &lt/xpartlines&gt
+			&ltpartlines&gt 6 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000528930654355221855 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 5 7 &lt/xpartlines&gt
+			&ltpartlines&gt 5 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000592184315630871314 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 5 6 &lt/xpartlines&gt
+			&ltpartlines&gt 5 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000866354211828237845 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 6 &lt/xpartlines&gt
+			&ltpartlines&gt 4 6 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00091353575187592918 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltnewstatus&gt 1 &lt/newstatus&gt
+			&ltxpartlines&gt 4 5 &lt/xpartlines&gt
+			&ltpartlines&gt 4 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000919185689826623655 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 5 4 &lt/xpartlines&gt
+			&ltpartlines&gt 5 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000955912424407952743 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 4 &lt/xpartlines&gt
+			&ltpartlines&gt 4 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00114689035080777412 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 4 &lt/xpartlines&gt
+			&ltpartlines&gt 3 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00118557134547170773 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 2 4 &lt/xpartlines&gt
+			&ltpartlines&gt 2 4 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00153425735560147494 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 1 4 &lt/xpartlines&gt
+			&ltpartlines&gt 1 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.0019246211065550127 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 5 &lt/xpartlines&gt
+			&ltpartlines&gt 0 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00206538502262878197 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 4 &lt/xpartlines&gt
+			&ltpartlines&gt 0 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00267205676641757645 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 3 &lt/xpartlines&gt
+			&ltpartlines&gt 0 3 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00533986494186010525 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 2 &lt/xpartlines&gt
+			&ltpartlines&gt 0 2 . &lt/partlines&gt
+		&lt/intervals&gt
+	&lt/treesum&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00505700949907143815 0.00613707484094671175 &lt/thetas&gt
+			&ltmigrates&gt 0 124.679362905292081 34.8937725369221496 0 &lt/migrates&gt
+			&ltgrowthrates&gt 1761.27593665903237 -84.0195656658781189 &lt/growthrates&gt
+		&lt/estimates&gt
+&lt/chainsum&gt
+&ltchainpack&gt
+	&ltnumber&gt 0 1 0 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 0 &lt/tinytrees&gt
+		&ltaccrate&gt 0.265000000000000013 &lt/accrate&gt
+		&ltllikemle&gt 8.03638549246886313 &lt/llikemle&gt
+		&ltllikedata&gt -3574.6386248622166 &lt/llikedata&gt
+		&ltstarttime&gt 1113933971 &lt/starttime&gt
+		&ltendtime&gt 1113933986 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 53 200 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00238967973895868897 0.00857314189478221474 &lt/thetas&gt
+			&ltmigrates&gt 0 178.594354360432533 201.711534027463927 0 &lt/migrates&gt
+			&ltgrowthrates&gt 286.265570187478374 522.584629259533926 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainpack&gt
+	&ltnumber&gt 0 1 1 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 0 &lt/tinytrees&gt
+		&ltaccrate&gt 0.174999999999999989 &lt/accrate&gt
+		&ltllikemle&gt 2.24292366391492948 &lt/llikemle&gt
+		&ltllikedata&gt -3366.95583651043171 &lt/llikedata&gt
+		&ltstarttime&gt 1113933986 &lt/starttime&gt
+		&ltendtime&gt 1113934003 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 35 200 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00342828243898930232 0.00938815584810562438 &lt/thetas&gt
+			&ltmigrates&gt 0 359.333313010233837 170.709781451344668 0 &lt/migrates&gt
+			&ltgrowthrates&gt 376.258762176827872 608.980866754854333 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainpack&gt
+	&ltnumber&gt 0 1 2 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 0 &lt/tinytrees&gt
+		&ltaccrate&gt 0.115000000000000005 &lt/accrate&gt
+		&ltllikemle&gt 0.331232649867971241 &lt/llikemle&gt
+		&ltllikedata&gt -3277.07186443434921 &lt/llikedata&gt
+		&ltstarttime&gt 1113934003 &lt/starttime&gt
+		&ltendtime&gt 1113934021 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 23 200 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00382351077572252833 0.0101148980666503786 &lt/thetas&gt
+			&ltmigrates&gt 0 266.63302494001374 180.090955280637019 0 &lt/migrates&gt
+			&ltgrowthrates&gt 589.048754968938624 732.840714912730732 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainpack&gt
+	&ltnumber&gt 0 1 3 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 0 &lt/tinytrees&gt
+		&ltaccrate&gt 0.100000000000000006 &lt/accrate&gt
+		&ltllikemle&gt 0.712638597764832182 &lt/llikemle&gt
+		&ltllikedata&gt -3264.58251001428516 &lt/llikedata&gt
+		&ltstarttime&gt 1113934021 &lt/starttime&gt
+		&ltendtime&gt 1113934029 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 4 40 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.004074651441941083 0.00837181988651936049 &lt/thetas&gt
+			&ltmigrates&gt 0 118.942422560571757 182.899111430931441 0 &lt/migrates&gt
+			&ltgrowthrates&gt 1023.10652786136313 621.693756977592329 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainsum&gt
+	&ltreg_rep&gt 0 1 &lt/reg_rep&gt
+	&lttreesum&gt
+		&ltncopy&gt 1 &lt/ncopy&gt
+		&ltshortforce&gt coalesce long
+			&ltshortpoint&gt 18 21 &lt/shortpoint&gt
+			&ltshortwait&gt 0.0539318434248482123 0.124884165154835192 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltshortforce&gt migrate long
+			&ltshortpoint&gt 0 1 3 0 &lt/shortpoint&gt
+			&ltshortwait&gt 0.00820356565115269894 0.0167862026647611975 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltintervals&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 9.86113155575850924e-06 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 20 20 &lt/xpartlines&gt
+			&ltpartlines&gt 20 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.14780854199569499e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 19 20 &lt/xpartlines&gt
+			&ltpartlines&gt 19 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.2291023672518374e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 19 19 &lt/xpartlines&gt
+			&ltpartlines&gt 19 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.66635848884044525e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 19 18 &lt/xpartlines&gt
+			&ltpartlines&gt 19 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 2.22961141141907646e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 18 18 &lt/xpartlines&gt
+			&ltpartlines&gt 18 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 2.48673742353794826e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 17 18 &lt/xpartlines&gt
+			&ltpartlines&gt 17 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 3.91186994294289797e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 16 18 &lt/xpartlines&gt
+			&ltpartlines&gt 16 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 4.82376276028000035e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 16 17 &lt/xpartlines&gt
+			&ltpartlines&gt 16 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 5.19910298629359333e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 15 17 &lt/xpartlines&gt
+			&ltpartlines&gt 15 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 7.45549455970045656e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 15 16 &lt/xpartlines&gt
+			&ltpartlines&gt 15 16 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 8.48515305769414904e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 14 16 &lt/xpartlines&gt
+			&ltpartlines&gt 14 16 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 8.63938283360383356e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 16 &lt/xpartlines&gt
+			&ltpartlines&gt 13 16 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000109224715491037562 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 15 &lt/xpartlines&gt
+			&ltpartlines&gt 13 15 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000125328787711941807 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 15 &lt/xpartlines&gt
+			&ltpartlines&gt 12 15 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000134238691874153575 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 15 &lt/xpartlines&gt
+			&ltpartlines&gt 11 15 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00015730396931299724 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 15 &lt/xpartlines&gt
+			&ltpartlines&gt 10 15 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000208869889075072063 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 14 &lt/xpartlines&gt
+			&ltpartlines&gt 10 14 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000215094766862807343 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 13 &lt/xpartlines&gt
+			&ltpartlines&gt 10 13 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000225539801544489551 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 13 &lt/xpartlines&gt
+			&ltpartlines&gt 9 13 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00024281367284408395 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 13 &lt/xpartlines&gt
+			&ltpartlines&gt 8 13 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000339783045919729956 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 12 &lt/xpartlines&gt
+			&ltpartlines&gt 8 12 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000349763100889471208 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 12 &lt/xpartlines&gt
+			&ltpartlines&gt 7 12 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000380891571910665103 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 6 12 &lt/xpartlines&gt
+			&ltpartlines&gt 6 12 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.000430449683486230515 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltnewstatus&gt 1 &lt/newstatus&gt
+			&ltxpartlines&gt 6 11 &lt/xpartlines&gt
+			&ltpartlines&gt 6 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000457714375841571639 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 10 &lt/xpartlines&gt
+			&ltpartlines&gt 7 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000477876206426440411 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 6 10 &lt/xpartlines&gt
+			&ltpartlines&gt 6 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000491278080421270061 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 5 10 &lt/xpartlines&gt
+			&ltpartlines&gt 5 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000618305099660766818 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 5 9 &lt/xpartlines&gt
+			&ltpartlines&gt 5 9 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.000711694454938392642 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 5 8 &lt/xpartlines&gt
+			&ltpartlines&gt 5 8 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000735873160967268547 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 9 &lt/xpartlines&gt
+			&ltpartlines&gt 4 9 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000752775920475186756 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 8 &lt/xpartlines&gt
+			&ltpartlines&gt 4 8 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000767134447803857842 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 7 &lt/xpartlines&gt
+			&ltpartlines&gt 4 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000783650082327605664 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 6 &lt/xpartlines&gt
+			&ltpartlines&gt 4 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000848841654435085577 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 6 &lt/xpartlines&gt
+			&ltpartlines&gt 3 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000957093185831530183 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 2 6 &lt/xpartlines&gt
+			&ltpartlines&gt 2 6 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00113398823503352373 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 2 5 &lt/xpartlines&gt
+			&ltpartlines&gt 2 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00120225473233144119 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 1 6 &lt/xpartlines&gt
+			&ltpartlines&gt 1 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00125272233077632815 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 1 5 &lt/xpartlines&gt
+			&ltpartlines&gt 1 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00148623295494489864 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 1 4 &lt/xpartlines&gt
+			&ltpartlines&gt 1 4 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00243994510677220068 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 1 3 &lt/xpartlines&gt
+			&ltpartlines&gt 1 3 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00250266179027173021 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 4 &lt/xpartlines&gt
+			&ltpartlines&gt 0 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00288283320062619926 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 3 &lt/xpartlines&gt
+			&ltpartlines&gt 0 3 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00290053205202245312 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 2 &lt/xpartlines&gt
+			&ltpartlines&gt 0 2 . &lt/partlines&gt
+		&lt/intervals&gt
+	&lt/treesum&gt
+	&lttreesum&gt
+		&ltncopy&gt 1 &lt/ncopy&gt
+		&ltshortforce&gt coalesce long
+			&ltshortpoint&gt 18 21 &lt/shortpoint&gt
+			&ltshortwait&gt 0.0609748580652498956 0.11278304974572527 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltshortforce&gt migrate long
+			&ltshortpoint&gt 0 1 3 0 &lt/shortpoint&gt
+			&ltshortwait&gt 0.0085322498443588142 0.0161685184391479192 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltintervals&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 9.86113155575850924e-06 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 20 20 &lt/xpartlines&gt
+			&ltpartlines&gt 20 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.14780854199569499e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 19 20 &lt/xpartlines&gt
+			&ltpartlines&gt 19 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.2291023672518374e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 19 19 &lt/xpartlines&gt
+			&ltpartlines&gt 19 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 2.22961141141907646e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 19 18 &lt/xpartlines&gt
+			&ltpartlines&gt 19 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 2.48673742353794826e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 18 18 &lt/xpartlines&gt
+			&ltpartlines&gt 18 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 3.91186994294289797e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 17 18 &lt/xpartlines&gt
+			&ltpartlines&gt 17 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 4.82376276028000035e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 17 17 &lt/xpartlines&gt
+			&ltpartlines&gt 17 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 5.19910298629359333e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 16 17 &lt/xpartlines&gt
+			&ltpartlines&gt 16 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 7.45549455970045656e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 16 16 &lt/xpartlines&gt
+			&ltpartlines&gt 16 16 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 8.48515305769414904e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 15 16 &lt/xpartlines&gt
+			&ltpartlines&gt 15 16 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 8.63938283360383356e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 14 16 &lt/xpartlines&gt
+			&ltpartlines&gt 14 16 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000109224715491037562 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 14 15 &lt/xpartlines&gt
+			&ltpartlines&gt 14 15 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000118188935353991798 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 15 &lt/xpartlines&gt
+			&ltpartlines&gt 13 15 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000125328787711941807 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 14 &lt/xpartlines&gt
+			&ltpartlines&gt 13 14 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000134238691874153575 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 14 &lt/xpartlines&gt
+			&ltpartlines&gt 12 14 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00015730396931299724 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 14 &lt/xpartlines&gt
+			&ltpartlines&gt 11 14 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000208869889075072063 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 13 &lt/xpartlines&gt
+			&ltpartlines&gt 11 13 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000215094766862807343 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 12 &lt/xpartlines&gt
+			&ltpartlines&gt 11 12 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000225539801544489551 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 12 &lt/xpartlines&gt
+			&ltpartlines&gt 10 12 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00024281367284408395 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 12 &lt/xpartlines&gt
+			&ltpartlines&gt 9 12 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000339783045919729956 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 11 &lt/xpartlines&gt
+			&ltpartlines&gt 9 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000345347778094520265 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 11 &lt/xpartlines&gt
+			&ltpartlines&gt 8 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000349763100889471208 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 11 &lt/xpartlines&gt
+			&ltpartlines&gt 7 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000380891571910665103 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 6 11 &lt/xpartlines&gt
+			&ltpartlines&gt 6 11 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.000430449683486230515 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltnewstatus&gt 1 &lt/newstatus&gt
+			&ltxpartlines&gt 6 10 &lt/xpartlines&gt
+			&ltpartlines&gt 6 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000457714375841571639 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 9 &lt/xpartlines&gt
+			&ltpartlines&gt 7 9 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000477876206426440411 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 6 9 &lt/xpartlines&gt
+			&ltpartlines&gt 6 9 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000491278080421270061 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 5 9 &lt/xpartlines&gt
+			&ltpartlines&gt 5 9 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000618305099660766818 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 5 8 &lt/xpartlines&gt
+			&ltpartlines&gt 5 8 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.000711694454938392642 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 5 7 &lt/xpartlines&gt
+			&ltpartlines&gt 5 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000752775920475186756 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 8 &lt/xpartlines&gt
+			&ltpartlines&gt 4 8 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000767134447803857842 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 7 &lt/xpartlines&gt
+			&ltpartlines&gt 4 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000783650082327605664 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 6 &lt/xpartlines&gt
+			&ltpartlines&gt 4 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000848841654435085577 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 6 &lt/xpartlines&gt
+			&ltpartlines&gt 3 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000957093185831530183 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 2 6 &lt/xpartlines&gt
+			&ltpartlines&gt 2 6 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00113398823503352373 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 2 5 &lt/xpartlines&gt
+			&ltpartlines&gt 2 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00120225473233144119 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 1 6 &lt/xpartlines&gt
+			&ltpartlines&gt 1 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00125272233077632815 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 1 5 &lt/xpartlines&gt
+			&ltpartlines&gt 1 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00148623295494489864 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 1 4 &lt/xpartlines&gt
+			&ltpartlines&gt 1 4 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00243994510677220068 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 1 3 &lt/xpartlines&gt
+			&ltpartlines&gt 1 3 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00250266179027173021 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 4 &lt/xpartlines&gt
+			&ltpartlines&gt 0 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00288283320062619926 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 3 &lt/xpartlines&gt
+			&ltpartlines&gt 0 3 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00290053205202245312 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 2 &lt/xpartlines&gt
+			&ltpartlines&gt 0 2 . &lt/partlines&gt
+		&lt/intervals&gt
+	&lt/treesum&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00382351077572252833 0.0101148980666503786 &lt/thetas&gt
+			&ltmigrates&gt 0 266.63302494001374 180.090955280637019 0 &lt/migrates&gt
+			&ltgrowthrates&gt 589.048754968938624 732.840714912730732 &lt/growthrates&gt
+		&lt/estimates&gt
+&lt/chainsum&gt
+&ltchainpack&gt
+	&ltnumber&gt 0 2 0 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 0 &lt/tinytrees&gt
+		&ltaccrate&gt 0.234999999999999987 &lt/accrate&gt
+		&ltllikemle&gt 4.42696635227687985 &lt/llikemle&gt
+		&ltllikedata&gt -3492.15130892703792 &lt/llikedata&gt
+		&ltstarttime&gt 1113934029 &lt/starttime&gt
+		&ltendtime&gt 1113934045 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 47 200 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.0051932689740616959 0.0095267210917029492 &lt/thetas&gt
+			&ltmigrates&gt 0 0.0030934444336862682 46.6542500393172119 0 &lt/migrates&gt
+			&ltgrowthrates&gt 1401.00175791445054 338.376238311546331 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainpack&gt
+	&ltnumber&gt 0 2 1 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 0 &lt/tinytrees&gt
+		&ltaccrate&gt 0.149999999999999994 &lt/accrate&gt
+		&ltllikemle&gt 11.6317746234713901 &lt/llikemle&gt
+		&ltllikedata&gt -3382.65609923619468 &lt/llikedata&gt
+		&ltstarttime&gt 1113934046 &lt/starttime&gt
+		&ltendtime&gt 1113934062 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 30 200 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00363910230419641195 0.00949364169215391289 &lt/thetas&gt
+			&ltmigrates&gt 0 113.28617225286888 88.1277594808196767 0 &lt/migrates&gt
+			&ltgrowthrates&gt 411.557560954860037 233.86142612045623 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainpack&gt
+	&ltnumber&gt 0 2 2 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 0 &lt/tinytrees&gt
+		&ltaccrate&gt 0.110000000000000001 &lt/accrate&gt
+		&ltllikemle&gt 0.636063492994214719 &lt/llikemle&gt
+		&ltllikedata&gt -3299.08922066821424 &lt/llikedata&gt
+		&ltstarttime&gt 1113934062 &lt/starttime&gt
+		&ltendtime&gt 1113934079 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 22 200 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00344721894934559969 0.0137468371177546927 &lt/thetas&gt
+			&ltmigrates&gt 0 113.720626268037961 82.7670768811458402 0 &lt/migrates&gt
+			&ltgrowthrates&gt 320.377044363307391 443.960643777887299 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainpack&gt
+	&ltnumber&gt 0 2 3 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 0 &lt/tinytrees&gt
+		&ltaccrate&gt 0.174999999999999989 &lt/accrate&gt
+		&ltllikemle&gt 0.411652243146974017 &lt/llikemle&gt
+		&ltllikedata&gt -3301.112857149506 &lt/llikedata&gt
+		&ltstarttime&gt 1113934079 &lt/starttime&gt
+		&ltendtime&gt 1113934087 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 7 40 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00274284032250226347 0.0117803366876250235 &lt/thetas&gt
+			&ltmigrates&gt 0 120.893070720992441 85.9918539079791771 0 &lt/migrates&gt
+			&ltgrowthrates&gt 26.2473005315673831 368.198849330621101 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainsum&gt
+	&ltreg_rep&gt 0 2 &lt/reg_rep&gt
+	&lttreesum&gt
+		&ltncopy&gt 1 &lt/ncopy&gt
+		&ltshortforce&gt coalesce long
+			&ltshortpoint&gt 19 20 &lt/shortpoint&gt
+			&ltshortwait&gt 0.0517692740770155577 0.168835716039738043 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltshortforce&gt migrate long
+			&ltshortpoint&gt 0 1 2 0 &lt/shortpoint&gt
+			&ltshortwait&gt 0.00827170676388711348 0.0236086927070103779 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltintervals&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 4.65136899805835341e-06 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 20 20 &lt/xpartlines&gt
+			&ltpartlines&gt 20 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 8.04215566305798666e-06 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 19 20 &lt/xpartlines&gt
+			&ltpartlines&gt 19 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.43604566212563868e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 18 20 &lt/xpartlines&gt
+			&ltpartlines&gt 18 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.84336441146756059e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 17 20 &lt/xpartlines&gt
+			&ltpartlines&gt 17 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 2.24324521605382133e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 16 20 &lt/xpartlines&gt
+			&ltpartlines&gt 16 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 4.02727378794709678e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 16 19 &lt/xpartlines&gt
+			&ltpartlines&gt 16 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 4.66779769093841711e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 15 19 &lt/xpartlines&gt
+			&ltpartlines&gt 15 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 6.70209786261414327e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 14 19 &lt/xpartlines&gt
+			&ltpartlines&gt 14 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 8.7820260710313902e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 14 18 &lt/xpartlines&gt
+			&ltpartlines&gt 14 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 9.28621094428023613e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 18 &lt/xpartlines&gt
+			&ltpartlines&gt 13 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000107706203473348862 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 18 &lt/xpartlines&gt
+			&ltpartlines&gt 12 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000114816939483791875 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 17 &lt/xpartlines&gt
+			&ltpartlines&gt 12 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000126163485711554112 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 16 &lt/xpartlines&gt
+			&ltpartlines&gt 12 16 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.0001343202142583523 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 15 &lt/xpartlines&gt
+			&ltpartlines&gt 12 15 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000157502040375572878 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 14 &lt/xpartlines&gt
+			&ltpartlines&gt 12 14 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000158198275534539187 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 14 &lt/xpartlines&gt
+			&ltpartlines&gt 11 14 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000185924618990777787 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 13 &lt/xpartlines&gt
+			&ltpartlines&gt 11 13 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000195139416618541169 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 13 &lt/xpartlines&gt
+			&ltpartlines&gt 10 13 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.000195144856593906958 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltnewstatus&gt 1 &lt/newstatus&gt
+			&ltxpartlines&gt 9 13 &lt/xpartlines&gt
+			&ltpartlines&gt 9 13 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000208892591389243015 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 12 &lt/xpartlines&gt
+			&ltpartlines&gt 10 12 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000221729134487027137 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 11 &lt/xpartlines&gt
+			&ltpartlines&gt 10 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000231540140137400703 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 11 &lt/xpartlines&gt
+			&ltpartlines&gt 9 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000286748264076826459 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 11 &lt/xpartlines&gt
+			&ltpartlines&gt 8 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000296568169067036559 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 11 &lt/xpartlines&gt
+			&ltpartlines&gt 7 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00032780787636151992 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 10 &lt/xpartlines&gt
+			&ltpartlines&gt 7 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00040351349954208406 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 6 10 &lt/xpartlines&gt
+			&ltpartlines&gt 6 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000469019333789370591 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 5 10 &lt/xpartlines&gt
+			&ltpartlines&gt 5 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000501156369385404206 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 10 &lt/xpartlines&gt
+			&ltpartlines&gt 4 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00051564770169723541 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 9 &lt/xpartlines&gt
+			&ltpartlines&gt 4 9 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000720822165058714829 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 9 &lt/xpartlines&gt
+			&ltpartlines&gt 3 9 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00096916231842889629 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 8 &lt/xpartlines&gt
+			&ltpartlines&gt 3 8 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00117293932612708303 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 7 &lt/xpartlines&gt
+			&ltpartlines&gt 3 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00150380041638617967 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 6 &lt/xpartlines&gt
+			&ltpartlines&gt 3 6 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00173696782656019471 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 2 6 &lt/xpartlines&gt
+			&ltpartlines&gt 2 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00185054353054030468 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 1 7 &lt/xpartlines&gt
+			&ltpartlines&gt 1 7 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00191839064111927254 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 1 6 &lt/xpartlines&gt
+			&ltpartlines&gt 1 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00194182526794987008 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 7 &lt/xpartlines&gt
+			&ltpartlines&gt 0 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00225255911291697688 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 6 &lt/xpartlines&gt
+			&ltpartlines&gt 0 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00258123599239381586 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 5 &lt/xpartlines&gt
+			&ltpartlines&gt 0 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00295407111169069311 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 4 &lt/xpartlines&gt
+			&ltpartlines&gt 0 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00324495225912871431 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 3 &lt/xpartlines&gt
+			&ltpartlines&gt 0 3 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00382175977738545905 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 2 &lt/xpartlines&gt
+			&ltpartlines&gt 0 2 . &lt/partlines&gt
+		&lt/intervals&gt
+	&lt/treesum&gt
+	&lttreesum&gt
+		&ltncopy&gt 1 &lt/ncopy&gt
+		&ltshortforce&gt coalesce long
+			&ltshortpoint&gt 19 20 &lt/shortpoint&gt
+			&ltshortwait&gt 0.0517692740770155577 0.159664105752183261 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltshortforce&gt migrate long
+			&ltshortpoint&gt 0 1 2 0 &lt/shortpoint&gt
+			&ltshortwait&gt 0.00827170676388711348 0.0229534802929423874 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltintervals&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 4.65136899805835341e-06 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 20 20 &lt/xpartlines&gt
+			&ltpartlines&gt 20 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 8.04215566305798666e-06 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 19 20 &lt/xpartlines&gt
+			&ltpartlines&gt 19 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.43604566212563868e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 18 20 &lt/xpartlines&gt
+			&ltpartlines&gt 18 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.84336441146756059e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 17 20 &lt/xpartlines&gt
+			&ltpartlines&gt 17 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 2.24324521605382133e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 16 20 &lt/xpartlines&gt
+			&ltpartlines&gt 16 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 4.02727378794709678e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 16 19 &lt/xpartlines&gt
+			&ltpartlines&gt 16 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 4.66779769093841711e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 15 19 &lt/xpartlines&gt
+			&ltpartlines&gt 15 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 6.70209786261414327e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 14 19 &lt/xpartlines&gt
+			&ltpartlines&gt 14 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 8.7820260710313902e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 14 18 &lt/xpartlines&gt
+			&ltpartlines&gt 14 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 9.28621094428023613e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 18 &lt/xpartlines&gt
+			&ltpartlines&gt 13 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000107706203473348862 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 18 &lt/xpartlines&gt
+			&ltpartlines&gt 12 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000114816939483791875 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 17 &lt/xpartlines&gt
+			&ltpartlines&gt 12 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000126163485711554112 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 16 &lt/xpartlines&gt
+			&ltpartlines&gt 12 16 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.0001343202142583523 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 15 &lt/xpartlines&gt
+			&ltpartlines&gt 12 15 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000157502040375572878 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 14 &lt/xpartlines&gt
+			&ltpartlines&gt 12 14 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000158198275534539187 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 14 &lt/xpartlines&gt
+			&ltpartlines&gt 11 14 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000185924618990777787 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 13 &lt/xpartlines&gt
+			&ltpartlines&gt 11 13 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000195139416618541169 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 13 &lt/xpartlines&gt
+			&ltpartlines&gt 10 13 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.000195144856593906958 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltnewstatus&gt 1 &lt/newstatus&gt
+			&ltxpartlines&gt 9 13 &lt/xpartlines&gt
+			&ltpartlines&gt 9 13 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000208892591389243015 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 12 &lt/xpartlines&gt
+			&ltpartlines&gt 10 12 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000221729134487027137 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 11 &lt/xpartlines&gt
+			&ltpartlines&gt 10 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000231540140137400703 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 11 &lt/xpartlines&gt
+			&ltpartlines&gt 9 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000286748264076826459 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 11 &lt/xpartlines&gt
+			&ltpartlines&gt 8 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000296568169067036559 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 11 &lt/xpartlines&gt
+			&ltpartlines&gt 7 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00032780787636151992 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 10 &lt/xpartlines&gt
+			&ltpartlines&gt 7 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00040351349954208406 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 6 10 &lt/xpartlines&gt
+			&ltpartlines&gt 6 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000469019333789370591 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 5 10 &lt/xpartlines&gt
+			&ltpartlines&gt 5 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000501156369385404206 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 10 &lt/xpartlines&gt
+			&ltpartlines&gt 4 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00051564770169723541 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 9 &lt/xpartlines&gt
+			&ltpartlines&gt 4 9 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000517726912059090507 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 9 &lt/xpartlines&gt
+			&ltpartlines&gt 3 9 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000720822165058714829 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 8 &lt/xpartlines&gt
+			&ltpartlines&gt 3 8 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00096916231842889629 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 7 &lt/xpartlines&gt
+			&ltpartlines&gt 3 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00150380041638617967 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 6 &lt/xpartlines&gt
+			&ltpartlines&gt 3 6 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00173696782656019471 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 2 6 &lt/xpartlines&gt
+			&ltpartlines&gt 2 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00185054353054030468 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 1 7 &lt/xpartlines&gt
+			&ltpartlines&gt 1 7 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00191839064111927254 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 1 6 &lt/xpartlines&gt
+			&ltpartlines&gt 1 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00194182526794987008 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 7 &lt/xpartlines&gt
+			&ltpartlines&gt 0 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00225255911291697688 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 6 &lt/xpartlines&gt
+			&ltpartlines&gt 0 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00258123599239381586 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 5 &lt/xpartlines&gt
+			&ltpartlines&gt 0 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00295407111169069311 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 4 &lt/xpartlines&gt
+			&ltpartlines&gt 0 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00324495225912871431 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 3 &lt/xpartlines&gt
+			&ltpartlines&gt 0 3 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00382175977738545905 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 2 &lt/xpartlines&gt
+			&ltpartlines&gt 0 2 . &lt/partlines&gt
+		&lt/intervals&gt
+	&lt/treesum&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00344721894934559969 0.0137468371177546927 &lt/thetas&gt
+			&ltmigrates&gt 0 113.720626268037961 82.7670768811458402 0 &lt/migrates&gt
+			&ltgrowthrates&gt 320.377044363307391 443.960643777887299 &lt/growthrates&gt
+		&lt/estimates&gt
+&lt/chainsum&gt
+&ltreplicate-summary&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00356219464250582358 0.00792721479150828613 &lt/thetas&gt
+			&ltmigrates&gt 0 121.904055450476534 100.015915512533283 0 &lt/migrates&gt
+			&ltgrowthrates&gt 722.893455415187532 257.932768127086547 &lt/growthrates&gt
+		&lt/estimates&gt
+		&ltmaxlike&gt -4.6390765097207467 &lt/maxlike&gt
+&lt/replicate-summary&gt
+&ltchainpack&gt
+	&ltnumber&gt 1 0 0 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 0 &lt/tinytrees&gt
+		&ltaccrate&gt 0.195000000000000007 &lt/accrate&gt
+		&ltllikemle&gt 2.70726265416057954 &lt/llikemle&gt
+		&ltllikedata&gt -3922.87305464996825 &lt/llikedata&gt
+		&ltstarttime&gt 1113934177 &lt/starttime&gt
+		&ltendtime&gt 1113934192 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 39 200 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00380011726327808552 0.00713604248963370554 &lt/thetas&gt
+			&ltmigrates&gt 0 0.0014665879208418085 76.5973069163458717 0 &lt/migrates&gt
+			&ltgrowthrates&gt 125.906070220065573 10.9556147174487499 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainpack&gt
+	&ltnumber&gt 1 0 1 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 0 &lt/tinytrees&gt
+		&ltaccrate&gt 0.200000000000000011 &lt/accrate&gt
+		&ltllikemle&gt 19.4577047358918414 &lt/llikemle&gt
+		&ltllikedata&gt -3507.94370195006422 &lt/llikedata&gt
+		&ltstarttime&gt 1113934192 &lt/starttime&gt
+		&ltendtime&gt 1113934207 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 40 200 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00557328352740996819 0.00767691676407612593 &lt/thetas&gt
+			&ltmigrates&gt 0 72.5171669311285285 50.3899810789623572 0 &lt/migrates&gt
+			&ltgrowthrates&gt 239.320949207652802 15.0158998451551309 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainpack&gt
+	&ltnumber&gt 1 0 2 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 0 &lt/tinytrees&gt
+		&ltaccrate&gt 0.195000000000000007 &lt/accrate&gt
+		&ltllikemle&gt 1.41077196606614153 &lt/llikemle&gt
+		&ltllikedata&gt -3388.85530318494739 &lt/llikedata&gt
+		&ltstarttime&gt 1113934207 &lt/starttime&gt
+		&ltendtime&gt 1113934221 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 39 200 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00526638488263829647 0.00636010206007683442 &lt/thetas&gt
+			&ltmigrates&gt 0 145.033451919934123 78.9828742546580571 0 &lt/migrates&gt
+			&ltgrowthrates&gt 101.354431821265237 42.7824995714557517 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainpack&gt
+	&ltnumber&gt 1 0 3 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 0 &lt/tinytrees&gt
+		&ltaccrate&gt 0.0500000000000000028 &lt/accrate&gt
+		&ltllikemle&gt 0.106842797016683602 &lt/llikemle&gt
+		&ltllikedata&gt -3387.66439523280769 &lt/llikedata&gt
+		&ltstarttime&gt 1113934222 &lt/starttime&gt
+		&ltendtime&gt 1113934229 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 2 40 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.0048297296701252353 0.00632472336672777573 &lt/thetas&gt
+			&ltmigrates&gt 0 158.359159064075584 79.436028487943048 0 &lt/migrates&gt
+			&ltgrowthrates&gt 48.5740506820224667 12.3249628941104206 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainsum&gt
+	&ltreg_rep&gt 1 0 &lt/reg_rep&gt
+	&lttreesum&gt
+		&ltncopy&gt 1 &lt/ncopy&gt
+		&ltshortforce&gt coalesce long
+			&ltshortpoint&gt 21 18 &lt/shortpoint&gt
+			&ltshortwait&gt 0.0985596947596808493 0.112500144905919378 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltshortforce&gt migrate long
+			&ltshortpoint&gt 0 3 2 0 &lt/shortpoint&gt
+			&ltshortwait&gt 0.0189681695385016322 0.0251774919104673957 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltintervals&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 2.75402443608963525e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 20 20 &lt/xpartlines&gt
+			&ltpartlines&gt 20 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 3.67126558256159515e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 20 19 &lt/xpartlines&gt
+			&ltpartlines&gt 20 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 6.24882792998111863e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 19 19 &lt/xpartlines&gt
+			&ltpartlines&gt 19 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 6.28842625068443356e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 18 19 &lt/xpartlines&gt
+			&ltpartlines&gt 18 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 6.34298732185740874e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 17 19 &lt/xpartlines&gt
+			&ltpartlines&gt 17 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 6.46656309975548847e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 17 18 &lt/xpartlines&gt
+			&ltpartlines&gt 17 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 6.81601036755640491e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 16 18 &lt/xpartlines&gt
+			&ltpartlines&gt 16 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 6.9553870703353184e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 15 18 &lt/xpartlines&gt
+			&ltpartlines&gt 15 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 8.65634921089525913e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 14 18 &lt/xpartlines&gt
+			&ltpartlines&gt 14 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 9.53160440134389236e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 18 &lt/xpartlines&gt
+			&ltpartlines&gt 13 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 9.84400137091464748e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 17 &lt/xpartlines&gt
+			&ltpartlines&gt 13 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000104908914799906663 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 17 &lt/xpartlines&gt
+			&ltpartlines&gt 12 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000107378906806900444 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 16 &lt/xpartlines&gt
+			&ltpartlines&gt 12 16 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000134386781925198401 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 16 &lt/xpartlines&gt
+			&ltpartlines&gt 11 16 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00015095815823508343 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 16 &lt/xpartlines&gt
+			&ltpartlines&gt 10 16 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.000187294338352160755 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltnewstatus&gt 1 &lt/newstatus&gt
+			&ltxpartlines&gt 9 16 &lt/xpartlines&gt
+			&ltpartlines&gt 9 16 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000194311967981214922 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 15 &lt/xpartlines&gt
+			&ltpartlines&gt 10 15 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000207082475182713985 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 14 &lt/xpartlines&gt
+			&ltpartlines&gt 10 14 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000216519914862317283 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 13 &lt/xpartlines&gt
+			&ltpartlines&gt 10 13 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000240917252117856754 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 13 &lt/xpartlines&gt
+			&ltpartlines&gt 9 13 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000275177350551769888 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 12 &lt/xpartlines&gt
+			&ltpartlines&gt 9 12 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000277961519944129815 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 11 &lt/xpartlines&gt
+			&ltpartlines&gt 9 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000327184596530212871 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 10 &lt/xpartlines&gt
+			&ltpartlines&gt 9 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000375434817240545321 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 10 &lt/xpartlines&gt
+			&ltpartlines&gt 8 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000421956732764341578 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 9 &lt/xpartlines&gt
+			&ltpartlines&gt 8 9 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000454327709213169504 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 8 &lt/xpartlines&gt
+			&ltpartlines&gt 8 8 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000455301780358049633 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 8 &lt/xpartlines&gt
+			&ltpartlines&gt 7 8 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000477060867071809822 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 7 &lt/xpartlines&gt
+			&ltpartlines&gt 7 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000495771892855436606 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 6 &lt/xpartlines&gt
+			&ltpartlines&gt 7 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000577304059643193 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 5 &lt/xpartlines&gt
+			&ltpartlines&gt 7 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000728271605769075263 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 6 5 &lt/xpartlines&gt
+			&ltpartlines&gt 6 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000928286291063316091 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 6 4 &lt/xpartlines&gt
+			&ltpartlines&gt 6 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00136280706740197761 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 5 4 &lt/xpartlines&gt
+			&ltpartlines&gt 5 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00157448633617017866 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 4 &lt/xpartlines&gt
+			&ltpartlines&gt 4 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00170993270529683282 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 3 &lt/xpartlines&gt
+			&ltpartlines&gt 4 3 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00199676444517621777 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 3 3 &lt/xpartlines&gt
+			&ltpartlines&gt 3 3 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00200728073872961498 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltnewstatus&gt 1 &lt/newstatus&gt
+			&ltxpartlines&gt 2 4 &lt/xpartlines&gt
+			&ltpartlines&gt 2 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00213154474570202875 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 3 &lt/xpartlines&gt
+			&ltpartlines&gt 3 3 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00229629270831637703 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltnewstatus&gt 1 &lt/newstatus&gt
+			&ltxpartlines&gt 3 2 &lt/xpartlines&gt
+			&ltpartlines&gt 3 2 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00279444689547627594 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 1 &lt/xpartlines&gt
+			&ltpartlines&gt 4 1 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00362185244963840556 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 1 &lt/xpartlines&gt
+			&ltpartlines&gt 3 1 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00370319681038052849 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 2 1 &lt/xpartlines&gt
+			&ltpartlines&gt 2 1 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00482422222342331845 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 1 1 &lt/xpartlines&gt
+			&ltpartlines&gt 1 1 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00968056818678340696 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 2 &lt/xpartlines&gt
+			&ltpartlines&gt 0 2 . &lt/partlines&gt
+		&lt/intervals&gt
+	&lt/treesum&gt
+	&lttreesum&gt
+		&ltncopy&gt 1 &lt/ncopy&gt
+		&ltshortforce&gt coalesce long
+			&ltshortpoint&gt 21 18 &lt/shortpoint&gt
+			&ltshortwait&gt 0.0968437428145017959 0.112500144905919378 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltshortforce&gt migrate long
+			&ltshortpoint&gt 0 3 2 0 &lt/shortpoint&gt
+			&ltshortwait&gt 0.018921333345451638 0.0251774919104673957 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltintervals&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.78294379475588584e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 20 20 &lt/xpartlines&gt
+			&ltpartlines&gt 20 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 2.75402443608963525e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 19 20 &lt/xpartlines&gt
+			&ltpartlines&gt 19 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 3.67126558256159515e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 19 19 &lt/xpartlines&gt
+			&ltpartlines&gt 19 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 6.24882792998111863e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 18 19 &lt/xpartlines&gt
+			&ltpartlines&gt 18 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 6.28842625068443356e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 17 19 &lt/xpartlines&gt
+			&ltpartlines&gt 17 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 6.34298732185740874e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 16 19 &lt/xpartlines&gt
+			&ltpartlines&gt 16 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 6.81601036755640491e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 16 18 &lt/xpartlines&gt
+			&ltpartlines&gt 16 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 6.9553870703353184e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 15 18 &lt/xpartlines&gt
+			&ltpartlines&gt 15 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 8.65634921089525913e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 14 18 &lt/xpartlines&gt
+			&ltpartlines&gt 14 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 9.53160440134389236e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 18 &lt/xpartlines&gt
+			&ltpartlines&gt 13 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 9.84400137091464748e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 17 &lt/xpartlines&gt
+			&ltpartlines&gt 13 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000104908914799906663 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 17 &lt/xpartlines&gt
+			&ltpartlines&gt 12 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000107378906806900444 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 16 &lt/xpartlines&gt
+			&ltpartlines&gt 12 16 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000134386781925198401 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 16 &lt/xpartlines&gt
+			&ltpartlines&gt 11 16 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00015095815823508343 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 16 &lt/xpartlines&gt
+			&ltpartlines&gt 10 16 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.000187294338352160755 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltnewstatus&gt 1 &lt/newstatus&gt
+			&ltxpartlines&gt 9 16 &lt/xpartlines&gt
+			&ltpartlines&gt 9 16 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000194311967981214922 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 15 &lt/xpartlines&gt
+			&ltpartlines&gt 10 15 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000207082475182713985 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 14 &lt/xpartlines&gt
+			&ltpartlines&gt 10 14 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000216519914862317283 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 13 &lt/xpartlines&gt
+			&ltpartlines&gt 10 13 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000240917252117856754 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 13 &lt/xpartlines&gt
+			&ltpartlines&gt 9 13 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000275177350551769888 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 12 &lt/xpartlines&gt
+			&ltpartlines&gt 9 12 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000277961519944129815 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 11 &lt/xpartlines&gt
+			&ltpartlines&gt 9 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000327184596530212871 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 10 &lt/xpartlines&gt
+			&ltpartlines&gt 9 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000375434817240545321 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 10 &lt/xpartlines&gt
+			&ltpartlines&gt 8 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000421956732764341578 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 9 &lt/xpartlines&gt
+			&ltpartlines&gt 8 9 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000454327709213169504 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 8 &lt/xpartlines&gt
+			&ltpartlines&gt 8 8 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000455301780358049633 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 8 &lt/xpartlines&gt
+			&ltpartlines&gt 7 8 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000477060867071809822 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 7 &lt/xpartlines&gt
+			&ltpartlines&gt 7 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000495771892855436606 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 6 &lt/xpartlines&gt
+			&ltpartlines&gt 7 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000577304059643193 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 5 &lt/xpartlines&gt
+			&ltpartlines&gt 7 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000728271605769075263 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 6 5 &lt/xpartlines&gt
+			&ltpartlines&gt 6 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000928286291063316091 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 6 4 &lt/xpartlines&gt
+			&ltpartlines&gt 6 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00136280706740197761 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 5 4 &lt/xpartlines&gt
+			&ltpartlines&gt 5 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00157448633617017866 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 4 &lt/xpartlines&gt
+			&ltpartlines&gt 4 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00170993270529683282 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 3 &lt/xpartlines&gt
+			&ltpartlines&gt 4 3 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00199676444517621777 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 3 3 &lt/xpartlines&gt
+			&ltpartlines&gt 3 3 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00200728073872961498 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltnewstatus&gt 1 &lt/newstatus&gt
+			&ltxpartlines&gt 2 4 &lt/xpartlines&gt
+			&ltpartlines&gt 2 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00213154474570202875 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 3 &lt/xpartlines&gt
+			&ltpartlines&gt 3 3 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00229629270831637703 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltnewstatus&gt 1 &lt/newstatus&gt
+			&ltxpartlines&gt 3 2 &lt/xpartlines&gt
+			&ltpartlines&gt 3 2 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00279444689547627594 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 1 &lt/xpartlines&gt
+			&ltpartlines&gt 4 1 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00362185244963840556 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 1 &lt/xpartlines&gt
+			&ltpartlines&gt 3 1 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00370319681038052849 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 2 1 &lt/xpartlines&gt
+			&ltpartlines&gt 2 1 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00482422222342331845 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 1 1 &lt/xpartlines&gt
+			&ltpartlines&gt 1 1 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00968056818678340696 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 2 &lt/xpartlines&gt
+			&ltpartlines&gt 0 2 . &lt/partlines&gt
+		&lt/intervals&gt
+	&lt/treesum&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00526638488263829647 0.00636010206007683442 &lt/thetas&gt
+			&ltmigrates&gt 0 145.033451919934123 78.9828742546580571 0 &lt/migrates&gt
+			&ltgrowthrates&gt 101.354431821265237 42.7824995714557517 &lt/growthrates&gt
+		&lt/estimates&gt
+&lt/chainsum&gt
+&ltchainpack&gt
+	&ltnumber&gt 1 1 0 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 0 &lt/tinytrees&gt
+		&ltaccrate&gt 0.239999999999999991 &lt/accrate&gt
+		&ltllikemle&gt 2.34750730772453364 &lt/llikemle&gt
+		&ltllikedata&gt -3525.30915507333066 &lt/llikedata&gt
+		&ltstarttime&gt 1113934229 &lt/starttime&gt
+		&ltendtime&gt 1113934243 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 48 200 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00633684542395633252 0.00414801483484614902 &lt/thetas&gt
+			&ltmigrates&gt 0 189.840646683791618 111.21338721037425 0 &lt/migrates&gt
+			&ltgrowthrates&gt 134.29937737766619 -261.501604057297811 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainpack&gt
+	&ltnumber&gt 1 1 1 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 2 &lt/tinytrees&gt
+		&ltaccrate&gt 0.135000000000000009 &lt/accrate&gt
+		&ltllikemle&gt 1.87776764509904415 &lt/llikemle&gt
+		&ltllikedata&gt -3404.56028299746959 &lt/llikedata&gt
+		&ltstarttime&gt 1113934244 &lt/starttime&gt
+		&ltendtime&gt 1113934259 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 27 200 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.0053726478023596964 0.0034495368114013979 &lt/thetas&gt
+			&ltmigrates&gt 0 175.867668937235891 153.579579990926845 0 &lt/migrates&gt
+			&ltgrowthrates&gt 37.0453100154095694 -121.00495810761069 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainpack&gt
+	&ltnumber&gt 1 1 2 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 0 &lt/tinytrees&gt
+		&ltaccrate&gt 0.104999999999999996 &lt/accrate&gt
+		&ltllikemle&gt 0.449286917034601607 &lt/llikemle&gt
+		&ltllikedata&gt -3321.70936524524359 &lt/llikedata&gt
+		&ltstarttime&gt 1113934259 &lt/starttime&gt
+		&ltendtime&gt 1113934274 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 21 200 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00422505900112751364 0.00395290224643087169 &lt/thetas&gt
+			&ltmigrates&gt 0 160.037181408148427 147.594989310094263 0 &lt/migrates&gt
+			&ltgrowthrates&gt -173.121613861979398 -52.9903878137056665 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainpack&gt
+	&ltnumber&gt 1 1 3 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 1 &lt/badtrees&gt
+		&lttinytrees&gt 0 &lt/tinytrees&gt
+		&ltaccrate&gt 0.0749999999999999972 &lt/accrate&gt
+		&ltllikemle&gt 4.54069966505540101 &lt/llikemle&gt
+		&ltllikedata&gt -3318.52699208625927 &lt/llikedata&gt
+		&ltstarttime&gt 1113934274 &lt/starttime&gt
+		&ltendtime&gt 1113934304 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 3 40 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00295615018924887769 0.00491936629532284831 &lt/thetas&gt
+			&ltmigrates&gt 0 235.929972373142618 308.986288378157326 0 &lt/migrates&gt
+			&ltgrowthrates&gt -631.659188775292023 226.807009056767953 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainsum&gt
+	&ltreg_rep&gt 1 1 &lt/reg_rep&gt
+	&lttreesum&gt
+		&ltncopy&gt 1 &lt/ncopy&gt
+		&ltshortforce&gt coalesce long
+			&ltshortpoint&gt 18 21 &lt/shortpoint&gt
+			&ltshortwait&gt 0.0648405565130086708 0.0877252307909633239 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltshortforce&gt migrate long
+			&ltshortpoint&gt 0 4 6 0 &lt/shortpoint&gt
+			&ltshortwait&gt 0.0169381769238374467 0.0194182320038754526 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltintervals&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.30550527315558047e-06 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 20 20 &lt/xpartlines&gt
+			&ltpartlines&gt 20 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 5.162795826446426e-06 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 19 20 &lt/xpartlines&gt
+			&ltpartlines&gt 19 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.26737235869386312e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 18 20 &lt/xpartlines&gt
+			&ltpartlines&gt 18 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.55014581129354168e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 17 20 &lt/xpartlines&gt
+			&ltpartlines&gt 17 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 4.40932985043456985e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 16 20 &lt/xpartlines&gt
+			&ltpartlines&gt 16 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 4.90702713266381754e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 15 20 &lt/xpartlines&gt
+			&ltpartlines&gt 15 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 5.10411853492643601e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 15 19 &lt/xpartlines&gt
+			&ltpartlines&gt 15 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 5.12290248204213804e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 14 19 &lt/xpartlines&gt
+			&ltpartlines&gt 14 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 5.67472162385812578e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 14 18 &lt/xpartlines&gt
+			&ltpartlines&gt 14 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 9.65590413200949371e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 14 17 &lt/xpartlines&gt
+			&ltpartlines&gt 14 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000103869320417860665 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 14 16 &lt/xpartlines&gt
+			&ltpartlines&gt 14 16 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000107286941773631806 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 16 &lt/xpartlines&gt
+			&ltpartlines&gt 13 16 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000107550821053514076 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 15 &lt/xpartlines&gt
+			&ltpartlines&gt 13 15 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000109709891069030956 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 14 &lt/xpartlines&gt
+			&ltpartlines&gt 13 14 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.000114694502065424022 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltnewstatus&gt 1 &lt/newstatus&gt
+			&ltxpartlines&gt 12 14 &lt/xpartlines&gt
+			&ltpartlines&gt 12 14 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00012158499114137082 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 13 &lt/xpartlines&gt
+			&ltpartlines&gt 13 13 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000122718853104433789 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 12 &lt/xpartlines&gt
+			&ltpartlines&gt 13 12 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000125375411254551923 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 11 &lt/xpartlines&gt
+			&ltpartlines&gt 13 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000142189216663119957 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 11 &lt/xpartlines&gt
+			&ltpartlines&gt 12 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000142843686193008286 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 11 &lt/xpartlines&gt
+			&ltpartlines&gt 11 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000171515317423868074 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 11 &lt/xpartlines&gt
+			&ltpartlines&gt 10 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000218046136466306875 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 11 &lt/xpartlines&gt
+			&ltpartlines&gt 9 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.0002790329859263787 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 11 &lt/xpartlines&gt
+			&ltpartlines&gt 8 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00030767075159027072 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 10 &lt/xpartlines&gt
+			&ltpartlines&gt 8 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000326602024127066991 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 10 &lt/xpartlines&gt
+			&ltpartlines&gt 7 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000336138739569854291 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 9 &lt/xpartlines&gt
+			&ltpartlines&gt 7 9 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000411029437867106761 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 8 &lt/xpartlines&gt
+			&ltpartlines&gt 7 8 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000455284747279604488 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 7 &lt/xpartlines&gt
+			&ltpartlines&gt 7 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000539893327729701073 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 6 7 &lt/xpartlines&gt
+			&ltpartlines&gt 6 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000560189187505523802 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 6 6 &lt/xpartlines&gt
+			&ltpartlines&gt 6 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000724982991669875874 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 6 5 &lt/xpartlines&gt
+			&ltpartlines&gt 6 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000800823245136290159 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 5 5 &lt/xpartlines&gt
+			&ltpartlines&gt 5 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00101663478186505727 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 5 4 &lt/xpartlines&gt
+			&ltpartlines&gt 5 4 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00117786220475134804 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 4 4 &lt/xpartlines&gt
+			&ltpartlines&gt 4 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00118517172371627641 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 5 &lt/xpartlines&gt
+			&ltpartlines&gt 3 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00125529723210753312 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 4 &lt/xpartlines&gt
+			&ltpartlines&gt 3 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00128846095940146773 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 3 &lt/xpartlines&gt
+			&ltpartlines&gt 3 3 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00172659403406359149 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 2 3 &lt/xpartlines&gt
+			&ltpartlines&gt 2 3 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00213157854076952711 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 2 2 &lt/xpartlines&gt
+			&ltpartlines&gt 2 2 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00256248379887218016 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 1 3 &lt/xpartlines&gt
+			&ltpartlines&gt 1 3 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00262033009215670038 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 1 2 &lt/xpartlines&gt
+			&ltpartlines&gt 1 2 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00262735858334228268 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltnewstatus&gt 1 &lt/newstatus&gt
+			&ltxpartlines&gt 1 1 &lt/xpartlines&gt
+			&ltpartlines&gt 1 1 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.0031959883447954078 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 2 0 &lt/xpartlines&gt
+			&ltpartlines&gt 2 0 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00551501377964465876 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 1 1 &lt/xpartlines&gt
+			&ltpartlines&gt 1 1 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00596028250037733409 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltnewstatus&gt 1 &lt/newstatus&gt
+			&ltxpartlines&gt 0 2 &lt/xpartlines&gt
+			&ltpartlines&gt 0 2 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00803817495345396371 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 1 1 &lt/xpartlines&gt
+			&ltpartlines&gt 1 1 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00803938998428121825 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltnewstatus&gt 1 &lt/newstatus&gt
+			&ltxpartlines&gt 0 2 &lt/xpartlines&gt
+			&ltpartlines&gt 0 2 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.0086849234925416869 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 1 1 &lt/xpartlines&gt
+			&ltpartlines&gt 1 1 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00905185738010444788 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 2 &lt/xpartlines&gt
+			&ltpartlines&gt 0 2 . &lt/partlines&gt
+		&lt/intervals&gt
+	&lt/treesum&gt
+	&lttreesum&gt
+		&ltncopy&gt 1 &lt/ncopy&gt
+		&ltshortforce&gt coalesce long
+			&ltshortpoint&gt 18 21 &lt/shortpoint&gt
+			&ltshortwait&gt 0.0659711430564781931 0.0877252307909633239 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltshortforce&gt migrate long
+			&ltshortpoint&gt 0 4 6 0 &lt/shortpoint&gt
+			&ltshortwait&gt 0.0169723921539429272 0.0194182320038754526 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltintervals&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.30550527315558047e-06 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 20 20 &lt/xpartlines&gt
+			&ltpartlines&gt 20 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.26737235869386312e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 19 20 &lt/xpartlines&gt
+			&ltpartlines&gt 19 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.55014581129354168e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 18 20 &lt/xpartlines&gt
+			&ltpartlines&gt 18 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 3.93780259319326687e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 17 20 &lt/xpartlines&gt
+			&ltpartlines&gt 17 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 4.40932985043456985e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 16 20 &lt/xpartlines&gt
+			&ltpartlines&gt 16 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 4.90702713266381754e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 15 20 &lt/xpartlines&gt
+			&ltpartlines&gt 15 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 5.10411853492643601e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 15 19 &lt/xpartlines&gt
+			&ltpartlines&gt 15 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 5.12290248204213804e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 14 19 &lt/xpartlines&gt
+			&ltpartlines&gt 14 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 5.67472162385812578e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 14 18 &lt/xpartlines&gt
+			&ltpartlines&gt 14 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 9.65590413200949371e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 14 17 &lt/xpartlines&gt
+			&ltpartlines&gt 14 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000103869320417860665 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 14 16 &lt/xpartlines&gt
+			&ltpartlines&gt 14 16 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000107286941773631806 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 16 &lt/xpartlines&gt
+			&ltpartlines&gt 13 16 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000107550821053514076 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 15 &lt/xpartlines&gt
+			&ltpartlines&gt 13 15 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000109709891069030956 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 14 &lt/xpartlines&gt
+			&ltpartlines&gt 13 14 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.000114694502065424022 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltnewstatus&gt 1 &lt/newstatus&gt
+			&ltxpartlines&gt 12 14 &lt/xpartlines&gt
+			&ltpartlines&gt 12 14 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00012158499114137082 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 13 &lt/xpartlines&gt
+			&ltpartlines&gt 13 13 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000122718853104433789 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 12 &lt/xpartlines&gt
+			&ltpartlines&gt 13 12 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000125375411254551923 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 11 &lt/xpartlines&gt
+			&ltpartlines&gt 13 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000142189216663119957 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 11 &lt/xpartlines&gt
+			&ltpartlines&gt 12 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000142843686193008286 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 11 &lt/xpartlines&gt
+			&ltpartlines&gt 11 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000171515317423868074 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 11 &lt/xpartlines&gt
+			&ltpartlines&gt 10 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000218046136466306875 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 11 &lt/xpartlines&gt
+			&ltpartlines&gt 9 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.0002790329859263787 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 11 &lt/xpartlines&gt
+			&ltpartlines&gt 8 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00030767075159027072 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 10 &lt/xpartlines&gt
+			&ltpartlines&gt 8 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000326602024127066991 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 10 &lt/xpartlines&gt
+			&ltpartlines&gt 7 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000336138739569854291 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 9 &lt/xpartlines&gt
+			&ltpartlines&gt 7 9 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000411029437867106761 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 8 &lt/xpartlines&gt
+			&ltpartlines&gt 7 8 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000455284747279604488 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 7 &lt/xpartlines&gt
+			&ltpartlines&gt 7 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000539893327729701073 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 6 7 &lt/xpartlines&gt
+			&ltpartlines&gt 6 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000560189187505523802 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 6 6 &lt/xpartlines&gt
+			&ltpartlines&gt 6 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000724982991669875874 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 6 5 &lt/xpartlines&gt
+			&ltpartlines&gt 6 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000800823245136290159 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 5 5 &lt/xpartlines&gt
+			&ltpartlines&gt 5 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00101663478186505727 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 5 4 &lt/xpartlines&gt
+			&ltpartlines&gt 5 4 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00117786220475134804 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 4 4 &lt/xpartlines&gt
+			&ltpartlines&gt 4 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00118517172371627641 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 5 &lt/xpartlines&gt
+			&ltpartlines&gt 3 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00125529723210753312 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 4 &lt/xpartlines&gt
+			&ltpartlines&gt 3 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00128846095940146773 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 3 &lt/xpartlines&gt
+			&ltpartlines&gt 3 3 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00172659403406359149 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 2 3 &lt/xpartlines&gt
+			&ltpartlines&gt 2 3 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00213157854076952711 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 2 2 &lt/xpartlines&gt
+			&ltpartlines&gt 2 2 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00256248379887218016 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 1 3 &lt/xpartlines&gt
+			&ltpartlines&gt 1 3 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00262033009215670038 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 1 2 &lt/xpartlines&gt
+			&ltpartlines&gt 1 2 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00262735858334228268 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltnewstatus&gt 1 &lt/newstatus&gt
+			&ltxpartlines&gt 1 1 &lt/xpartlines&gt
+			&ltpartlines&gt 1 1 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.0031959883447954078 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 2 0 &lt/xpartlines&gt
+			&ltpartlines&gt 2 0 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00551501377964465876 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 1 1 &lt/xpartlines&gt
+			&ltpartlines&gt 1 1 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00596028250037733409 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltnewstatus&gt 1 &lt/newstatus&gt
+			&ltxpartlines&gt 0 2 &lt/xpartlines&gt
+			&ltpartlines&gt 0 2 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00803817495345396371 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 1 1 &lt/xpartlines&gt
+			&ltpartlines&gt 1 1 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00803938998428121825 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltnewstatus&gt 1 &lt/newstatus&gt
+			&ltxpartlines&gt 0 2 &lt/xpartlines&gt
+			&ltpartlines&gt 0 2 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.0086849234925416869 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 1 1 &lt/xpartlines&gt
+			&ltpartlines&gt 1 1 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00905185738010444788 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 2 &lt/xpartlines&gt
+			&ltpartlines&gt 0 2 . &lt/partlines&gt
+		&lt/intervals&gt
+	&lt/treesum&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00422505900112751364 0.00395290224643087169 &lt/thetas&gt
+			&ltmigrates&gt 0 160.037181408148427 147.594989310094263 0 &lt/migrates&gt
+			&ltgrowthrates&gt -173.121613861979398 -52.9903878137056665 &lt/growthrates&gt
+		&lt/estimates&gt
+&lt/chainsum&gt
+&ltchainpack&gt
+	&ltnumber&gt 1 2 0 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 0 &lt/tinytrees&gt
+		&ltaccrate&gt 0.260000000000000009 &lt/accrate&gt
+		&ltllikemle&gt 2.99420902451789361 &lt/llikemle&gt
+		&ltllikedata&gt -4299.50526781021654 &lt/llikedata&gt
+		&ltstarttime&gt 1113934304 &lt/starttime&gt
+		&ltendtime&gt 1113934319 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 52 200 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00756617832740799981 0.00659856362452725716 &lt/thetas&gt
+			&ltmigrates&gt 0 111.176155934682811 69.5707103933484632 0 &lt/migrates&gt
+			&ltgrowthrates&gt 409.88939497805228 543.16021718378704 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainpack&gt
+	&ltnumber&gt 1 2 1 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 0 &lt/tinytrees&gt
+		&ltaccrate&gt 0.179999999999999993 &lt/accrate&gt
+		&ltllikemle&gt 1.07089522228415079 &lt/llikemle&gt
+		&ltllikedata&gt -3490.4063004547229 &lt/llikedata&gt
+		&ltstarttime&gt 1113934320 &lt/starttime&gt
+		&ltendtime&gt 1113934335 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 36 200 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00656792579496304294 0.00584459175444660209 &lt/thetas&gt
+			&ltmigrates&gt 0 57.9007335206334091 156.694294591854742 0 &lt/migrates&gt
+			&ltgrowthrates&gt 451.288616099099272 497.009379113105979 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainpack&gt
+	&ltnumber&gt 1 2 2 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 0 &lt/tinytrees&gt
+		&ltaccrate&gt 0.140000000000000013 &lt/accrate&gt
+		&ltllikemle&gt 1.41480621744143908 &lt/llikemle&gt
+		&ltllikedata&gt -3434.66438504787766 &lt/llikedata&gt
+		&ltstarttime&gt 1113934336 &lt/starttime&gt
+		&ltendtime&gt 1113934351 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 28 200 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00550575271147208094 0.00696470662214750645 &lt/thetas&gt
+			&ltmigrates&gt 0 121.568711323335009 63.7627893168077549 0 &lt/migrates&gt
+			&ltgrowthrates&gt 354.601203542231758 606.129048483006613 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainpack&gt
+	&ltnumber&gt 1 2 3 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 0 &lt/tinytrees&gt
+		&ltaccrate&gt 0.100000000000000006 &lt/accrate&gt
+		&ltllikemle&gt 0.109493731119181953 &lt/llikemle&gt
+		&ltllikedata&gt -3423.70440531923623 &lt/llikedata&gt
+		&ltstarttime&gt 1113934352 &lt/starttime&gt
+		&ltendtime&gt 1113934359 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 4 40 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00514192935091333826 0.0062288990703713602 &lt/thetas&gt
+			&ltmigrates&gt 0 124.55408129628016 61.5396353290182461 0 &lt/migrates&gt
+			&ltgrowthrates&gt 335.346278823775265 527.046201636540445 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainsum&gt
+	&ltreg_rep&gt 1 2 &lt/reg_rep&gt
+	&lttreesum&gt
+		&ltncopy&gt 1 &lt/ncopy&gt
+		&ltshortforce&gt coalesce long
+			&ltshortpoint&gt 20 19 &lt/shortpoint&gt
+			&ltshortwait&gt 0.0861184261675420754 0.0989240815983647992 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltshortforce&gt migrate long
+			&ltshortpoint&gt 0 2 1 0 &lt/shortpoint&gt
+			&ltshortwait&gt 0.0161108601645228097 0.0163539778587721008 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltintervals&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 2.10705440289478283e-06 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 20 20 &lt/xpartlines&gt
+			&ltpartlines&gt 20 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.25399383339123163e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 20 19 &lt/xpartlines&gt
+			&ltpartlines&gt 20 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 3.24885254542245494e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 19 19 &lt/xpartlines&gt
+			&ltpartlines&gt 19 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 5.11189546645828539e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 18 19 &lt/xpartlines&gt
+			&ltpartlines&gt 18 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 8.85012116407944336e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 18 18 &lt/xpartlines&gt
+			&ltpartlines&gt 18 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 9.78107834741533388e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 18 17 &lt/xpartlines&gt
+			&ltpartlines&gt 18 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000118112773039976836 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 17 17 &lt/xpartlines&gt
+			&ltpartlines&gt 17 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000127737720313619163 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 16 17 &lt/xpartlines&gt
+			&ltpartlines&gt 16 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000129662442835112526 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 15 17 &lt/xpartlines&gt
+			&ltpartlines&gt 15 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000131648496800567045 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 14 17 &lt/xpartlines&gt
+			&ltpartlines&gt 14 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000131860052917847297 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 17 &lt/xpartlines&gt
+			&ltpartlines&gt 13 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000140310461463006713 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 17 &lt/xpartlines&gt
+			&ltpartlines&gt 12 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000155023093005773421 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 16 &lt/xpartlines&gt
+			&ltpartlines&gt 12 16 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000157657160551731915 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 15 &lt/xpartlines&gt
+			&ltpartlines&gt 12 15 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000162080373620438725 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 15 &lt/xpartlines&gt
+			&ltpartlines&gt 11 15 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000195611028967059892 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 14 &lt/xpartlines&gt
+			&ltpartlines&gt 11 14 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000233034957421893385 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 14 &lt/xpartlines&gt
+			&ltpartlines&gt 10 14 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00023544280363578128 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 14 &lt/xpartlines&gt
+			&ltpartlines&gt 9 14 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000262195006109537632 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 13 &lt/xpartlines&gt
+			&ltpartlines&gt 9 13 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000262906955612288607 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 12 &lt/xpartlines&gt
+			&ltpartlines&gt 9 12 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000299377625930490846 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 12 &lt/xpartlines&gt
+			&ltpartlines&gt 8 12 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000304231828846860254 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 11 &lt/xpartlines&gt
+			&ltpartlines&gt 8 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00035306417392301292 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 10 &lt/xpartlines&gt
+			&ltpartlines&gt 8 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000385710705381445032 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 9 &lt/xpartlines&gt
+			&ltpartlines&gt 8 9 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00043664221284098057 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 8 &lt/xpartlines&gt
+			&ltpartlines&gt 8 8 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000458570971503667059 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 7 &lt/xpartlines&gt
+			&ltpartlines&gt 8 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000462182592971902917 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 7 &lt/xpartlines&gt
+			&ltpartlines&gt 7 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000529375689534849147 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 6 &lt/xpartlines&gt
+			&ltpartlines&gt 7 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000531151851086229484 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 5 &lt/xpartlines&gt
+			&ltpartlines&gt 7 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000570644372201619386 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 6 5 &lt/xpartlines&gt
+			&ltpartlines&gt 6 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000628012110491958956 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 5 5 &lt/xpartlines&gt
+			&ltpartlines&gt 5 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00126002496707287252 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 5 4 &lt/xpartlines&gt
+			&ltpartlines&gt 5 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00133172776547203667 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 4 &lt/xpartlines&gt
+			&ltpartlines&gt 4 4 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00138378313545306345 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 4 3 &lt/xpartlines&gt
+			&ltpartlines&gt 4 3 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00139440804359624269 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 4 &lt/xpartlines&gt
+			&ltpartlines&gt 3 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.0015125657858844324 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 3 &lt/xpartlines&gt
+			&ltpartlines&gt 3 3 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00151492720306570521 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 2 3 &lt/xpartlines&gt
+			&ltpartlines&gt 2 3 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00180363775393179489 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 1 3 &lt/xpartlines&gt
+			&ltpartlines&gt 1 3 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00272922260577865804 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltnewstatus&gt 1 &lt/newstatus&gt
+			&ltxpartlines&gt 1 2 &lt/xpartlines&gt
+			&ltpartlines&gt 1 2 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00303636024375119338 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 2 1 &lt/xpartlines&gt
+			&ltpartlines&gt 2 1 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00598338892698212308 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltnewstatus&gt 1 &lt/newstatus&gt
+			&ltxpartlines&gt 1 1 &lt/xpartlines&gt
+			&ltpartlines&gt 1 1 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.0064621861657712101 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 2 0 &lt/xpartlines&gt
+			&ltpartlines&gt 2 0 . &lt/partlines&gt
+		&lt/intervals&gt
+	&lt/treesum&gt
+	&lttreesum&gt
+		&ltncopy&gt 1 &lt/ncopy&gt
+		&ltshortforce&gt coalesce long
+			&ltshortpoint&gt 20 19 &lt/shortpoint&gt
+			&ltshortwait&gt 0.0838664731195002539 0.0940195460747020734 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltshortforce&gt migrate long
+			&ltshortpoint&gt 0 2 1 0 &lt/shortpoint&gt
+			&ltshortwait&gt 0.0160107008550887178 0.0161577536563222747 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltintervals&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 2.10705440289478283e-06 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 20 20 &lt/xpartlines&gt
+			&ltpartlines&gt 20 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.25399383339123163e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 20 19 &lt/xpartlines&gt
+			&ltpartlines&gt 20 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 3.24885254542245494e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 19 19 &lt/xpartlines&gt
+			&ltpartlines&gt 19 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 5.11189546645828539e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 18 19 &lt/xpartlines&gt
+			&ltpartlines&gt 18 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 8.85012116407944336e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 18 18 &lt/xpartlines&gt
+			&ltpartlines&gt 18 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 9.20971519398864496e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 18 17 &lt/xpartlines&gt
+			&ltpartlines&gt 18 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 9.78107834741533388e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 17 17 &lt/xpartlines&gt
+			&ltpartlines&gt 17 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000127737720313619163 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 16 17 &lt/xpartlines&gt
+			&ltpartlines&gt 16 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000129662442835112526 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 15 17 &lt/xpartlines&gt
+			&ltpartlines&gt 15 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000131648496800567045 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 14 17 &lt/xpartlines&gt
+			&ltpartlines&gt 14 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000131860052917847297 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 17 &lt/xpartlines&gt
+			&ltpartlines&gt 13 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000137097093409470431 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 17 &lt/xpartlines&gt
+			&ltpartlines&gt 12 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000140310461463006713 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 16 &lt/xpartlines&gt
+			&ltpartlines&gt 12 16 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000155023093005773421 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 15 &lt/xpartlines&gt
+			&ltpartlines&gt 12 15 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000157657160551731915 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 14 &lt/xpartlines&gt
+			&ltpartlines&gt 12 14 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000158891269087890025 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 14 &lt/xpartlines&gt
+			&ltpartlines&gt 11 14 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000162080373620438725 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 14 &lt/xpartlines&gt
+			&ltpartlines&gt 10 14 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000195611028967059892 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 13 &lt/xpartlines&gt
+			&ltpartlines&gt 10 13 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000206353336623343831 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 13 &lt/xpartlines&gt
+			&ltpartlines&gt 9 13 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000262195006109537632 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 12 &lt/xpartlines&gt
+			&ltpartlines&gt 9 12 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000262906955612288607 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 11 &lt/xpartlines&gt
+			&ltpartlines&gt 9 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000299377625930490846 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 11 &lt/xpartlines&gt
+			&ltpartlines&gt 8 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00035306417392301292 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 10 &lt/xpartlines&gt
+			&ltpartlines&gt 8 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000385710705381445032 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 9 &lt/xpartlines&gt
+			&ltpartlines&gt 8 9 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00043664221284098057 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 8 &lt/xpartlines&gt
+			&ltpartlines&gt 8 8 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000458570971503667059 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 7 &lt/xpartlines&gt
+			&ltpartlines&gt 8 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000462182592971902917 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 7 &lt/xpartlines&gt
+			&ltpartlines&gt 7 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000529375689534849147 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 6 &lt/xpartlines&gt
+			&ltpartlines&gt 7 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000531151851086229484 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 5 &lt/xpartlines&gt
+			&ltpartlines&gt 7 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000570644372201619386 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 6 5 &lt/xpartlines&gt
+			&ltpartlines&gt 6 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000628012110491958956 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 5 5 &lt/xpartlines&gt
+			&ltpartlines&gt 5 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00126002496707287252 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 5 4 &lt/xpartlines&gt
+			&ltpartlines&gt 5 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00133172776547203667 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 4 &lt/xpartlines&gt
+			&ltpartlines&gt 4 4 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00138378313545306345 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 4 3 &lt/xpartlines&gt
+			&ltpartlines&gt 4 3 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00139440804359624269 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 4 &lt/xpartlines&gt
+			&ltpartlines&gt 3 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.0015125657858844324 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 3 &lt/xpartlines&gt
+			&ltpartlines&gt 3 3 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00151492720306570521 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 2 3 &lt/xpartlines&gt
+			&ltpartlines&gt 2 3 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00180363775393179489 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 1 3 &lt/xpartlines&gt
+			&ltpartlines&gt 1 3 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00272922260577865804 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltnewstatus&gt 1 &lt/newstatus&gt
+			&ltxpartlines&gt 1 2 &lt/xpartlines&gt
+			&ltpartlines&gt 1 2 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00303636024375119338 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 2 1 &lt/xpartlines&gt
+			&ltpartlines&gt 2 1 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00598338892698212308 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltnewstatus&gt 1 &lt/newstatus&gt
+			&ltxpartlines&gt 1 1 &lt/xpartlines&gt
+			&ltpartlines&gt 1 1 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.0064621861657712101 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 2 0 &lt/xpartlines&gt
+			&ltpartlines&gt 2 0 . &lt/partlines&gt
+		&lt/intervals&gt
+	&lt/treesum&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00550575271147208094 0.00696470662214750645 &lt/thetas&gt
+			&ltmigrates&gt 0 121.568711323335009 63.7627893168077549 0 &lt/migrates&gt
+			&ltgrowthrates&gt 354.601203542231758 606.129048483006613 &lt/growthrates&gt
+		&lt/estimates&gt
+&lt/chainsum&gt
+&ltreplicate-summary&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00459610289535911792 0.00543752715647888643 &lt/thetas&gt
+			&ltmigrates&gt 0 173.235122299289941 147.938467429627138 0 &lt/migrates&gt
+			&ltgrowthrates&gt 151.703172633675905 86.8286879559442326 &lt/growthrates&gt
+		&lt/estimates&gt
+		&ltmaxlike&gt -3.86601975441648626 &lt/maxlike&gt
+&lt/replicate-summary&gt
+&lt!--  New information past this point. --&gt
+&ltregion-summary&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00380528895655514584 0.00634260268782250135 &lt/thetas&gt
+			&ltmigrates&gt 0 156.653872902522551 124.266112790422568 0 &lt/migrates&gt
+			&ltgrowthrates&gt 128.113540536591103 125.014283970374336 &lt/growthrates&gt
+		&lt/estimates&gt
+		&ltmaxlike&gt -11.1852019504600744 &lt/maxlike&gt
+&lt/region-summary&gt
+&lt!-- End summary file
+	 Generated from run that started at: Tue Apr 19 11:04:50 2005
+	 and ended at: Tue Apr 19 11:12:39 2005 --&gt
+&lt/XML-summary-file&gt
+</pre>
+</BODY>
+</HTML>
diff --git a/doc/html/outsumfile.2reg3rep.xml b/doc/html/outsumfile.2reg3rep.xml
new file mode 100644
index 0000000..20cfb02
--- /dev/null
+++ b/doc/html/outsumfile.2reg3rep.xml
@@ -0,0 +1,3334 @@
+<XML-summary-file>
+<!-- Lamarc v. 2.0
+     Please do not modify. -->
+<!--  This summary file should match the input summary file insumfile.2reg3rep.txt,
+      up until that file's end. -->
+<chainpack>
+	<number> 0 0 0 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<accrate> 0.195000000000000007 </accrate>
+		<llikemle> 4.50058419396642684 </llikemle>
+		<llikedata> -3620.90996263017314 </llikedata>
+		<starttime> 1113933890 </starttime>
+		<endtime> 1113933911 </endtime>
+		<rates> <map> Tree-Arranger 39 200 </map> </rates>
+		<estimates>
+			<thetas> 0.00832313455063505084 0.00734484914891693919 </thetas>
+			<migrates> 0 96.6630952829531083 46.6299496864950527 0 </migrates>
+			<growthrates> 2365.3441048272216 184.41163985471303 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainpack>
+	<number> 0 0 1 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<accrate> 0.160000000000000003 </accrate>
+		<llikemle> 1.18053638385976001 </llikemle>
+		<llikedata> -3516.0909920893173 </llikedata>
+		<starttime> 1113933912 </starttime>
+		<endtime> 1113933934 </endtime>
+		<rates> <map> Tree-Arranger 32 200 </map> </rates>
+		<estimates>
+			<thetas> 0.00513756085360574273 0.0082400193242912461 </thetas>
+			<migrates> 0 131.185343491258408 42.1077506833302877 0 </migrates>
+			<growthrates> 1821.55597636667744 149.760781972604775 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainpack>
+	<number> 0 0 2 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<accrate> 0.130000000000000004 </accrate>
+		<llikemle> 1.6599393589321001 </llikemle>
+		<llikedata> -3271.75316866892263 </llikedata>
+		<starttime> 1113933934 </starttime>
+		<endtime> 1113933960 </endtime>
+		<rates> <map> Tree-Arranger 26 200 </map> </rates>
+		<estimates>
+			<thetas> 0.00505700949907143815 0.00613707484094671175 </thetas>
+			<migrates> 0 124.679362905292081 34.8937725369221496 0 </migrates>
+			<growthrates> 1761.27593665903237 -84.0195656658781189 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainpack>
+	<number> 0 0 3 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 1 </tinytrees>
+		<accrate> 0.149999999999999994 </accrate>
+		<llikemle> 0.596513557975481556 </llikemle>
+		<llikedata> -3269.61178480345779 </llikedata>
+		<starttime> 1113933961 </starttime>
+		<endtime> 1113933971 </endtime>
+		<rates> <map> Tree-Arranger 6 40 </map> </rates>
+		<estimates>
+			<thetas> 0.0051461021148077107 0.00589555271016268308 </thetas>
+			<migrates> 0 125.68905479949639 48.865939986393137 0 </migrates>
+			<growthrates> 1835.77616186834098 82.0043403675645095 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainsum>
+	<reg_rep> 0 0 </reg_rep>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce long
+			<shortpoint> 20 19 </shortpoint>
+			<shortwait> 0.0629273638426249143 0.105548312215536552 </shortwait>
+		</shortforce>
+		<shortforce> migrate long
+			<shortpoint> 0 1 1 0 </shortpoint>
+			<shortwait> 0.0079654795218820091 0.0206451375053703819 </shortwait>
+		</shortforce>
+		<intervals>
+		<force> Coalescence </force>
+			<endtime> 5.09657490636763079e-06 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 20 20 </xpartlines>
+			<partlines> 20 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.2102355624254017e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 19 20 </xpartlines>
+			<partlines> 19 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.33961400471318623e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 18 20 </xpartlines>
+			<partlines> 18 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.77695915387843852e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 17 20 </xpartlines>
+			<partlines> 17 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 2.10446887053792051e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 17 19 </xpartlines>
+			<partlines> 17 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 2.27457247455224152e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 17 18 </xpartlines>
+			<partlines> 17 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 2.44016581525568737e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 17 17 </xpartlines>
+			<partlines> 17 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 5.63431475801879578e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 16 17 </xpartlines>
+			<partlines> 16 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 6.55098716603418459e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 15 17 </xpartlines>
+			<partlines> 15 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 7.39112816930027905e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 14 17 </xpartlines>
+			<partlines> 14 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 8.18973368575778886e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 14 16 </xpartlines>
+			<partlines> 14 16 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 8.3802045298549127e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 13 16 </xpartlines>
+			<partlines> 13 16 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00010629432546176642 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 13 15 </xpartlines>
+			<partlines> 13 15 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000111507477845693503 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 13 14 </xpartlines>
+			<partlines> 13 14 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000150975907355033011 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 12 14 </xpartlines>
+			<partlines> 12 14 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000165638034054806815 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 11 14 </xpartlines>
+			<partlines> 11 14 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000219061924145474876 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 11 13 </xpartlines>
+			<partlines> 11 13 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000219707138094496452 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 11 12 </xpartlines>
+			<partlines> 11 12 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000258213852917746063 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 11 11 </xpartlines>
+			<partlines> 11 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000273652113921858643 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 11 10 </xpartlines>
+			<partlines> 11 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000280583868742954141 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 10 10 </xpartlines>
+			<partlines> 10 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000331106053916933646 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 10 9 </xpartlines>
+			<partlines> 10 9 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000344952279767115331 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 10 8 </xpartlines>
+			<partlines> 10 8 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000345874033941911106 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 9 8 </xpartlines>
+			<partlines> 9 8 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000348861238031590646 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 9 7 </xpartlines>
+			<partlines> 9 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000424599242926313077 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 8 7 </xpartlines>
+			<partlines> 8 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000439947186235675422 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 7 7 </xpartlines>
+			<partlines> 7 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000481070344401667829 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 6 7 </xpartlines>
+			<partlines> 6 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000528930654355221855 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 5 7 </xpartlines>
+			<partlines> 5 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000592184315630871314 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 5 6 </xpartlines>
+			<partlines> 5 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000857591267505941114 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 4 6 </xpartlines>
+			<partlines> 4 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000866354211828237845 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 3 6 </xpartlines>
+			<partlines> 3 6 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00091353575187592918 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<newstatus> 1 </newstatus>
+			<xpartlines> 3 5 </xpartlines>
+			<partlines> 3 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000919185689826623655 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 4 4 </xpartlines>
+			<partlines> 4 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000955912424407952743 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 3 4 </xpartlines>
+			<partlines> 3 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00118557134547170773 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 2 4 </xpartlines>
+			<partlines> 2 4 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00153425735560147494 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 1 4 </xpartlines>
+			<partlines> 1 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.0019246211065550127 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 5 </xpartlines>
+			<partlines> 0 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00206538502262878197 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 4 </xpartlines>
+			<partlines> 0 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00305508566675113469 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 3 </xpartlines>
+			<partlines> 0 3 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00533986494186010525 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 2 </xpartlines>
+			<partlines> 0 2 . </partlines>
+		</intervals>
+	</treesum>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce long
+			<shortpoint> 20 19 </shortpoint>
+			<shortwait> 0.0575370553514418445 0.104633534940785464 </shortwait>
+		</shortforce>
+		<shortforce> migrate long
+			<shortpoint> 0 1 1 0 </shortpoint>
+			<shortwait> 0.00794664505963699544 0.0202802222566785852 </shortwait>
+		</shortforce>
+		<intervals>
+		<force> Coalescence </force>
+			<endtime> 5.09657490636763079e-06 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 20 20 </xpartlines>
+			<partlines> 20 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.2102355624254017e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 19 20 </xpartlines>
+			<partlines> 19 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.25411707266569827e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 18 20 </xpartlines>
+			<partlines> 18 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.33961400471318623e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 17 20 </xpartlines>
+			<partlines> 17 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.77695915387843852e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 16 20 </xpartlines>
+			<partlines> 16 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 2.10446887053792051e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 16 19 </xpartlines>
+			<partlines> 16 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 2.27457247455224152e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 16 18 </xpartlines>
+			<partlines> 16 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 2.44016581525568737e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 16 17 </xpartlines>
+			<partlines> 16 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 5.63431475801879578e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 15 17 </xpartlines>
+			<partlines> 15 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 6.55098716603418459e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 14 17 </xpartlines>
+			<partlines> 14 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 7.39112816930027905e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 13 17 </xpartlines>
+			<partlines> 13 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 8.3802045298549127e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 13 16 </xpartlines>
+			<partlines> 13 16 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00010617490035119024 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 13 15 </xpartlines>
+			<partlines> 13 15 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000111507477845693503 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 12 15 </xpartlines>
+			<partlines> 12 15 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000150975907355033011 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 11 15 </xpartlines>
+			<partlines> 11 15 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000151946997256986488 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 10 15 </xpartlines>
+			<partlines> 10 15 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000165638034054806815 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 10 14 </xpartlines>
+			<partlines> 10 14 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000191522903992015264 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 10 13 </xpartlines>
+			<partlines> 10 13 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000219707138094496452 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 10 12 </xpartlines>
+			<partlines> 10 12 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000258213852917746063 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 10 11 </xpartlines>
+			<partlines> 10 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000273652113921858643 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 10 10 </xpartlines>
+			<partlines> 10 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000280583868742954141 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 9 10 </xpartlines>
+			<partlines> 9 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000331106053916933646 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 9 9 </xpartlines>
+			<partlines> 9 9 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000345874033941911106 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 9 8 </xpartlines>
+			<partlines> 9 8 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000348861238031590646 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 9 7 </xpartlines>
+			<partlines> 9 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000424599242926313077 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 8 7 </xpartlines>
+			<partlines> 8 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000439947186235675422 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 7 7 </xpartlines>
+			<partlines> 7 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000481070344401667829 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 6 7 </xpartlines>
+			<partlines> 6 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000528930654355221855 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 5 7 </xpartlines>
+			<partlines> 5 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000592184315630871314 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 5 6 </xpartlines>
+			<partlines> 5 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000866354211828237845 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 4 6 </xpartlines>
+			<partlines> 4 6 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00091353575187592918 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<newstatus> 1 </newstatus>
+			<xpartlines> 4 5 </xpartlines>
+			<partlines> 4 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000919185689826623655 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 5 4 </xpartlines>
+			<partlines> 5 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000955912424407952743 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 4 4 </xpartlines>
+			<partlines> 4 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00114689035080777412 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 3 4 </xpartlines>
+			<partlines> 3 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00118557134547170773 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 2 4 </xpartlines>
+			<partlines> 2 4 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00153425735560147494 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 1 4 </xpartlines>
+			<partlines> 1 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.0019246211065550127 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 5 </xpartlines>
+			<partlines> 0 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00206538502262878197 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 4 </xpartlines>
+			<partlines> 0 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00267205676641757645 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 3 </xpartlines>
+			<partlines> 0 3 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00533986494186010525 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 2 </xpartlines>
+			<partlines> 0 2 . </partlines>
+		</intervals>
+	</treesum>
+		<estimates>
+			<thetas> 0.00505700949907143815 0.00613707484094671175 </thetas>
+			<migrates> 0 124.679362905292081 34.8937725369221496 0 </migrates>
+			<growthrates> 1761.27593665903237 -84.0195656658781189 </growthrates>
+		</estimates>
+</chainsum>
+<chainpack>
+	<number> 0 1 0 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<accrate> 0.265000000000000013 </accrate>
+		<llikemle> 8.03638549246886313 </llikemle>
+		<llikedata> -3574.6386248622166 </llikedata>
+		<starttime> 1113933971 </starttime>
+		<endtime> 1113933986 </endtime>
+		<rates> <map> Tree-Arranger 53 200 </map> </rates>
+		<estimates>
+			<thetas> 0.00238967973895868897 0.00857314189478221474 </thetas>
+			<migrates> 0 178.594354360432533 201.711534027463927 0 </migrates>
+			<growthrates> 286.265570187478374 522.584629259533926 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainpack>
+	<number> 0 1 1 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<accrate> 0.174999999999999989 </accrate>
+		<llikemle> 2.24292366391492948 </llikemle>
+		<llikedata> -3366.95583651043171 </llikedata>
+		<starttime> 1113933986 </starttime>
+		<endtime> 1113934003 </endtime>
+		<rates> <map> Tree-Arranger 35 200 </map> </rates>
+		<estimates>
+			<thetas> 0.00342828243898930232 0.00938815584810562438 </thetas>
+			<migrates> 0 359.333313010233837 170.709781451344668 0 </migrates>
+			<growthrates> 376.258762176827872 608.980866754854333 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainpack>
+	<number> 0 1 2 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<accrate> 0.115000000000000005 </accrate>
+		<llikemle> 0.331232649867971241 </llikemle>
+		<llikedata> -3277.07186443434921 </llikedata>
+		<starttime> 1113934003 </starttime>
+		<endtime> 1113934021 </endtime>
+		<rates> <map> Tree-Arranger 23 200 </map> </rates>
+		<estimates>
+			<thetas> 0.00382351077572252833 0.0101148980666503786 </thetas>
+			<migrates> 0 266.63302494001374 180.090955280637019 0 </migrates>
+			<growthrates> 589.048754968938624 732.840714912730732 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainpack>
+	<number> 0 1 3 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<accrate> 0.100000000000000006 </accrate>
+		<llikemle> 0.712638597764832182 </llikemle>
+		<llikedata> -3264.58251001428516 </llikedata>
+		<starttime> 1113934021 </starttime>
+		<endtime> 1113934029 </endtime>
+		<rates> <map> Tree-Arranger 4 40 </map> </rates>
+		<estimates>
+			<thetas> 0.004074651441941083 0.00837181988651936049 </thetas>
+			<migrates> 0 118.942422560571757 182.899111430931441 0 </migrates>
+			<growthrates> 1023.10652786136313 621.693756977592329 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainsum>
+	<reg_rep> 0 1 </reg_rep>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce long
+			<shortpoint> 18 21 </shortpoint>
+			<shortwait> 0.0539318434248482123 0.124884165154835192 </shortwait>
+		</shortforce>
+		<shortforce> migrate long
+			<shortpoint> 0 1 3 0 </shortpoint>
+			<shortwait> 0.00820356565115269894 0.0167862026647611975 </shortwait>
+		</shortforce>
+		<intervals>
+		<force> Coalescence </force>
+			<endtime> 9.86113155575850924e-06 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 20 20 </xpartlines>
+			<partlines> 20 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.14780854199569499e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 19 20 </xpartlines>
+			<partlines> 19 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.2291023672518374e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 19 19 </xpartlines>
+			<partlines> 19 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.66635848884044525e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 19 18 </xpartlines>
+			<partlines> 19 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 2.22961141141907646e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 18 18 </xpartlines>
+			<partlines> 18 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 2.48673742353794826e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 17 18 </xpartlines>
+			<partlines> 17 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 3.91186994294289797e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 16 18 </xpartlines>
+			<partlines> 16 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 4.82376276028000035e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 16 17 </xpartlines>
+			<partlines> 16 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 5.19910298629359333e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 15 17 </xpartlines>
+			<partlines> 15 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 7.45549455970045656e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 15 16 </xpartlines>
+			<partlines> 15 16 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 8.48515305769414904e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 14 16 </xpartlines>
+			<partlines> 14 16 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 8.63938283360383356e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 13 16 </xpartlines>
+			<partlines> 13 16 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000109224715491037562 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 13 15 </xpartlines>
+			<partlines> 13 15 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000125328787711941807 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 12 15 </xpartlines>
+			<partlines> 12 15 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000134238691874153575 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 11 15 </xpartlines>
+			<partlines> 11 15 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00015730396931299724 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 10 15 </xpartlines>
+			<partlines> 10 15 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000208869889075072063 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 10 14 </xpartlines>
+			<partlines> 10 14 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000215094766862807343 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 10 13 </xpartlines>
+			<partlines> 10 13 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000225539801544489551 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 9 13 </xpartlines>
+			<partlines> 9 13 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00024281367284408395 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 8 13 </xpartlines>
+			<partlines> 8 13 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000339783045919729956 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 8 12 </xpartlines>
+			<partlines> 8 12 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000349763100889471208 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 7 12 </xpartlines>
+			<partlines> 7 12 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000380891571910665103 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 6 12 </xpartlines>
+			<partlines> 6 12 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.000430449683486230515 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<newstatus> 1 </newstatus>
+			<xpartlines> 6 11 </xpartlines>
+			<partlines> 6 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000457714375841571639 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 7 10 </xpartlines>
+			<partlines> 7 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000477876206426440411 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 6 10 </xpartlines>
+			<partlines> 6 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000491278080421270061 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 5 10 </xpartlines>
+			<partlines> 5 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000618305099660766818 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 5 9 </xpartlines>
+			<partlines> 5 9 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.000711694454938392642 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 5 8 </xpartlines>
+			<partlines> 5 8 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000735873160967268547 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 4 9 </xpartlines>
+			<partlines> 4 9 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000752775920475186756 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 4 8 </xpartlines>
+			<partlines> 4 8 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000767134447803857842 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 4 7 </xpartlines>
+			<partlines> 4 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000783650082327605664 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 4 6 </xpartlines>
+			<partlines> 4 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000848841654435085577 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 3 6 </xpartlines>
+			<partlines> 3 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000957093185831530183 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 2 6 </xpartlines>
+			<partlines> 2 6 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00113398823503352373 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 2 5 </xpartlines>
+			<partlines> 2 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00120225473233144119 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 1 6 </xpartlines>
+			<partlines> 1 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00125272233077632815 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 1 5 </xpartlines>
+			<partlines> 1 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00148623295494489864 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 1 4 </xpartlines>
+			<partlines> 1 4 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00243994510677220068 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 1 3 </xpartlines>
+			<partlines> 1 3 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00250266179027173021 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 4 </xpartlines>
+			<partlines> 0 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00288283320062619926 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 3 </xpartlines>
+			<partlines> 0 3 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00290053205202245312 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 2 </xpartlines>
+			<partlines> 0 2 . </partlines>
+		</intervals>
+	</treesum>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce long
+			<shortpoint> 18 21 </shortpoint>
+			<shortwait> 0.0609748580652498956 0.11278304974572527 </shortwait>
+		</shortforce>
+		<shortforce> migrate long
+			<shortpoint> 0 1 3 0 </shortpoint>
+			<shortwait> 0.0085322498443588142 0.0161685184391479192 </shortwait>
+		</shortforce>
+		<intervals>
+		<force> Coalescence </force>
+			<endtime> 9.86113155575850924e-06 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 20 20 </xpartlines>
+			<partlines> 20 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.14780854199569499e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 19 20 </xpartlines>
+			<partlines> 19 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.2291023672518374e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 19 19 </xpartlines>
+			<partlines> 19 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 2.22961141141907646e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 19 18 </xpartlines>
+			<partlines> 19 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 2.48673742353794826e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 18 18 </xpartlines>
+			<partlines> 18 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 3.91186994294289797e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 17 18 </xpartlines>
+			<partlines> 17 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 4.82376276028000035e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 17 17 </xpartlines>
+			<partlines> 17 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 5.19910298629359333e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 16 17 </xpartlines>
+			<partlines> 16 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 7.45549455970045656e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 16 16 </xpartlines>
+			<partlines> 16 16 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 8.48515305769414904e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 15 16 </xpartlines>
+			<partlines> 15 16 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 8.63938283360383356e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 14 16 </xpartlines>
+			<partlines> 14 16 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000109224715491037562 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 14 15 </xpartlines>
+			<partlines> 14 15 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000118188935353991798 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 13 15 </xpartlines>
+			<partlines> 13 15 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000125328787711941807 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 13 14 </xpartlines>
+			<partlines> 13 14 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000134238691874153575 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 12 14 </xpartlines>
+			<partlines> 12 14 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00015730396931299724 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 11 14 </xpartlines>
+			<partlines> 11 14 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000208869889075072063 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 11 13 </xpartlines>
+			<partlines> 11 13 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000215094766862807343 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 11 12 </xpartlines>
+			<partlines> 11 12 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000225539801544489551 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 10 12 </xpartlines>
+			<partlines> 10 12 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00024281367284408395 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 9 12 </xpartlines>
+			<partlines> 9 12 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000339783045919729956 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 9 11 </xpartlines>
+			<partlines> 9 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000345347778094520265 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 8 11 </xpartlines>
+			<partlines> 8 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000349763100889471208 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 7 11 </xpartlines>
+			<partlines> 7 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000380891571910665103 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 6 11 </xpartlines>
+			<partlines> 6 11 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.000430449683486230515 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<newstatus> 1 </newstatus>
+			<xpartlines> 6 10 </xpartlines>
+			<partlines> 6 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000457714375841571639 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 7 9 </xpartlines>
+			<partlines> 7 9 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000477876206426440411 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 6 9 </xpartlines>
+			<partlines> 6 9 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000491278080421270061 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 5 9 </xpartlines>
+			<partlines> 5 9 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000618305099660766818 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 5 8 </xpartlines>
+			<partlines> 5 8 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.000711694454938392642 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 5 7 </xpartlines>
+			<partlines> 5 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000752775920475186756 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 4 8 </xpartlines>
+			<partlines> 4 8 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000767134447803857842 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 4 7 </xpartlines>
+			<partlines> 4 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000783650082327605664 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 4 6 </xpartlines>
+			<partlines> 4 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000848841654435085577 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 3 6 </xpartlines>
+			<partlines> 3 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000957093185831530183 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 2 6 </xpartlines>
+			<partlines> 2 6 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00113398823503352373 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 2 5 </xpartlines>
+			<partlines> 2 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00120225473233144119 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 1 6 </xpartlines>
+			<partlines> 1 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00125272233077632815 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 1 5 </xpartlines>
+			<partlines> 1 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00148623295494489864 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 1 4 </xpartlines>
+			<partlines> 1 4 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00243994510677220068 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 1 3 </xpartlines>
+			<partlines> 1 3 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00250266179027173021 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 4 </xpartlines>
+			<partlines> 0 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00288283320062619926 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 3 </xpartlines>
+			<partlines> 0 3 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00290053205202245312 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 2 </xpartlines>
+			<partlines> 0 2 . </partlines>
+		</intervals>
+	</treesum>
+		<estimates>
+			<thetas> 0.00382351077572252833 0.0101148980666503786 </thetas>
+			<migrates> 0 266.63302494001374 180.090955280637019 0 </migrates>
+			<growthrates> 589.048754968938624 732.840714912730732 </growthrates>
+		</estimates>
+</chainsum>
+<chainpack>
+	<number> 0 2 0 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<accrate> 0.234999999999999987 </accrate>
+		<llikemle> 4.42696635227687985 </llikemle>
+		<llikedata> -3492.15130892703792 </llikedata>
+		<starttime> 1113934029 </starttime>
+		<endtime> 1113934045 </endtime>
+		<rates> <map> Tree-Arranger 47 200 </map> </rates>
+		<estimates>
+			<thetas> 0.0051932689740616959 0.0095267210917029492 </thetas>
+			<migrates> 0 0.0030934444336862682 46.6542500393172119 0 </migrates>
+			<growthrates> 1401.00175791445054 338.376238311546331 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainpack>
+	<number> 0 2 1 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<accrate> 0.149999999999999994 </accrate>
+		<llikemle> 11.6317746234713901 </llikemle>
+		<llikedata> -3382.65609923619468 </llikedata>
+		<starttime> 1113934046 </starttime>
+		<endtime> 1113934062 </endtime>
+		<rates> <map> Tree-Arranger 30 200 </map> </rates>
+		<estimates>
+			<thetas> 0.00363910230419641195 0.00949364169215391289 </thetas>
+			<migrates> 0 113.28617225286888 88.1277594808196767 0 </migrates>
+			<growthrates> 411.557560954860037 233.86142612045623 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainpack>
+	<number> 0 2 2 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<accrate> 0.110000000000000001 </accrate>
+		<llikemle> 0.636063492994214719 </llikemle>
+		<llikedata> -3299.08922066821424 </llikedata>
+		<starttime> 1113934062 </starttime>
+		<endtime> 1113934079 </endtime>
+		<rates> <map> Tree-Arranger 22 200 </map> </rates>
+		<estimates>
+			<thetas> 0.00344721894934559969 0.0137468371177546927 </thetas>
+			<migrates> 0 113.720626268037961 82.7670768811458402 0 </migrates>
+			<growthrates> 320.377044363307391 443.960643777887299 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainpack>
+	<number> 0 2 3 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<accrate> 0.174999999999999989 </accrate>
+		<llikemle> 0.411652243146974017 </llikemle>
+		<llikedata> -3301.112857149506 </llikedata>
+		<starttime> 1113934079 </starttime>
+		<endtime> 1113934087 </endtime>
+		<rates> <map> Tree-Arranger 7 40 </map> </rates>
+		<estimates>
+			<thetas> 0.00274284032250226347 0.0117803366876250235 </thetas>
+			<migrates> 0 120.893070720992441 85.9918539079791771 0 </migrates>
+			<growthrates> 26.2473005315673831 368.198849330621101 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainsum>
+	<reg_rep> 0 2 </reg_rep>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce long
+			<shortpoint> 19 20 </shortpoint>
+			<shortwait> 0.0517692740770155577 0.168835716039738043 </shortwait>
+		</shortforce>
+		<shortforce> migrate long
+			<shortpoint> 0 1 2 0 </shortpoint>
+			<shortwait> 0.00827170676388711348 0.0236086927070103779 </shortwait>
+		</shortforce>
+		<intervals>
+		<force> Coalescence </force>
+			<endtime> 4.65136899805835341e-06 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 20 20 </xpartlines>
+			<partlines> 20 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 8.04215566305798666e-06 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 19 20 </xpartlines>
+			<partlines> 19 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.43604566212563868e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 18 20 </xpartlines>
+			<partlines> 18 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.84336441146756059e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 17 20 </xpartlines>
+			<partlines> 17 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 2.24324521605382133e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 16 20 </xpartlines>
+			<partlines> 16 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 4.02727378794709678e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 16 19 </xpartlines>
+			<partlines> 16 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 4.66779769093841711e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 15 19 </xpartlines>
+			<partlines> 15 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 6.70209786261414327e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 14 19 </xpartlines>
+			<partlines> 14 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 8.7820260710313902e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 14 18 </xpartlines>
+			<partlines> 14 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 9.28621094428023613e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 13 18 </xpartlines>
+			<partlines> 13 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000107706203473348862 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 12 18 </xpartlines>
+			<partlines> 12 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000114816939483791875 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 12 17 </xpartlines>
+			<partlines> 12 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000126163485711554112 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 12 16 </xpartlines>
+			<partlines> 12 16 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.0001343202142583523 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 12 15 </xpartlines>
+			<partlines> 12 15 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000157502040375572878 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 12 14 </xpartlines>
+			<partlines> 12 14 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000158198275534539187 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 11 14 </xpartlines>
+			<partlines> 11 14 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000185924618990777787 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 11 13 </xpartlines>
+			<partlines> 11 13 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000195139416618541169 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 10 13 </xpartlines>
+			<partlines> 10 13 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.000195144856593906958 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<newstatus> 1 </newstatus>
+			<xpartlines> 9 13 </xpartlines>
+			<partlines> 9 13 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000208892591389243015 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 10 12 </xpartlines>
+			<partlines> 10 12 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000221729134487027137 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 10 11 </xpartlines>
+			<partlines> 10 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000231540140137400703 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 9 11 </xpartlines>
+			<partlines> 9 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000286748264076826459 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 8 11 </xpartlines>
+			<partlines> 8 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000296568169067036559 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 7 11 </xpartlines>
+			<partlines> 7 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00032780787636151992 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 7 10 </xpartlines>
+			<partlines> 7 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00040351349954208406 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 6 10 </xpartlines>
+			<partlines> 6 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000469019333789370591 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 5 10 </xpartlines>
+			<partlines> 5 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000501156369385404206 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 4 10 </xpartlines>
+			<partlines> 4 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00051564770169723541 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 4 9 </xpartlines>
+			<partlines> 4 9 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000720822165058714829 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 3 9 </xpartlines>
+			<partlines> 3 9 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00096916231842889629 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 3 8 </xpartlines>
+			<partlines> 3 8 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00117293932612708303 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 3 7 </xpartlines>
+			<partlines> 3 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00150380041638617967 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 3 6 </xpartlines>
+			<partlines> 3 6 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00173696782656019471 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 2 6 </xpartlines>
+			<partlines> 2 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00185054353054030468 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 1 7 </xpartlines>
+			<partlines> 1 7 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00191839064111927254 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 1 6 </xpartlines>
+			<partlines> 1 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00194182526794987008 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 7 </xpartlines>
+			<partlines> 0 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00225255911291697688 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 6 </xpartlines>
+			<partlines> 0 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00258123599239381586 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 5 </xpartlines>
+			<partlines> 0 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00295407111169069311 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 4 </xpartlines>
+			<partlines> 0 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00324495225912871431 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 3 </xpartlines>
+			<partlines> 0 3 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00382175977738545905 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 2 </xpartlines>
+			<partlines> 0 2 . </partlines>
+		</intervals>
+	</treesum>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce long
+			<shortpoint> 19 20 </shortpoint>
+			<shortwait> 0.0517692740770155577 0.159664105752183261 </shortwait>
+		</shortforce>
+		<shortforce> migrate long
+			<shortpoint> 0 1 2 0 </shortpoint>
+			<shortwait> 0.00827170676388711348 0.0229534802929423874 </shortwait>
+		</shortforce>
+		<intervals>
+		<force> Coalescence </force>
+			<endtime> 4.65136899805835341e-06 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 20 20 </xpartlines>
+			<partlines> 20 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 8.04215566305798666e-06 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 19 20 </xpartlines>
+			<partlines> 19 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.43604566212563868e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 18 20 </xpartlines>
+			<partlines> 18 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.84336441146756059e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 17 20 </xpartlines>
+			<partlines> 17 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 2.24324521605382133e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 16 20 </xpartlines>
+			<partlines> 16 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 4.02727378794709678e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 16 19 </xpartlines>
+			<partlines> 16 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 4.66779769093841711e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 15 19 </xpartlines>
+			<partlines> 15 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 6.70209786261414327e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 14 19 </xpartlines>
+			<partlines> 14 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 8.7820260710313902e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 14 18 </xpartlines>
+			<partlines> 14 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 9.28621094428023613e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 13 18 </xpartlines>
+			<partlines> 13 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000107706203473348862 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 12 18 </xpartlines>
+			<partlines> 12 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000114816939483791875 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 12 17 </xpartlines>
+			<partlines> 12 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000126163485711554112 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 12 16 </xpartlines>
+			<partlines> 12 16 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.0001343202142583523 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 12 15 </xpartlines>
+			<partlines> 12 15 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000157502040375572878 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 12 14 </xpartlines>
+			<partlines> 12 14 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000158198275534539187 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 11 14 </xpartlines>
+			<partlines> 11 14 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000185924618990777787 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 11 13 </xpartlines>
+			<partlines> 11 13 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000195139416618541169 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 10 13 </xpartlines>
+			<partlines> 10 13 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.000195144856593906958 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<newstatus> 1 </newstatus>
+			<xpartlines> 9 13 </xpartlines>
+			<partlines> 9 13 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000208892591389243015 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 10 12 </xpartlines>
+			<partlines> 10 12 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000221729134487027137 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 10 11 </xpartlines>
+			<partlines> 10 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000231540140137400703 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 9 11 </xpartlines>
+			<partlines> 9 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000286748264076826459 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 8 11 </xpartlines>
+			<partlines> 8 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000296568169067036559 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 7 11 </xpartlines>
+			<partlines> 7 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00032780787636151992 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 7 10 </xpartlines>
+			<partlines> 7 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00040351349954208406 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 6 10 </xpartlines>
+			<partlines> 6 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000469019333789370591 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 5 10 </xpartlines>
+			<partlines> 5 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000501156369385404206 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 4 10 </xpartlines>
+			<partlines> 4 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00051564770169723541 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 4 9 </xpartlines>
+			<partlines> 4 9 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000517726912059090507 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 3 9 </xpartlines>
+			<partlines> 3 9 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000720822165058714829 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 3 8 </xpartlines>
+			<partlines> 3 8 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00096916231842889629 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 3 7 </xpartlines>
+			<partlines> 3 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00150380041638617967 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 3 6 </xpartlines>
+			<partlines> 3 6 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00173696782656019471 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 2 6 </xpartlines>
+			<partlines> 2 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00185054353054030468 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 1 7 </xpartlines>
+			<partlines> 1 7 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00191839064111927254 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 1 6 </xpartlines>
+			<partlines> 1 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00194182526794987008 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 7 </xpartlines>
+			<partlines> 0 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00225255911291697688 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 6 </xpartlines>
+			<partlines> 0 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00258123599239381586 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 5 </xpartlines>
+			<partlines> 0 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00295407111169069311 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 4 </xpartlines>
+			<partlines> 0 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00324495225912871431 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 3 </xpartlines>
+			<partlines> 0 3 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00382175977738545905 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 2 </xpartlines>
+			<partlines> 0 2 . </partlines>
+		</intervals>
+	</treesum>
+		<estimates>
+			<thetas> 0.00344721894934559969 0.0137468371177546927 </thetas>
+			<migrates> 0 113.720626268037961 82.7670768811458402 0 </migrates>
+			<growthrates> 320.377044363307391 443.960643777887299 </growthrates>
+		</estimates>
+</chainsum>
+<replicate-summary>
+		<estimates>
+			<thetas> 0.00356219464250582358 0.00792721479150828613 </thetas>
+			<migrates> 0 121.904055450476534 100.015915512533283 0 </migrates>
+			<growthrates> 722.893455415187532 257.932768127086547 </growthrates>
+		</estimates>
+		<maxlike> -4.6390765097207467 </maxlike>
+</replicate-summary>
+<chainpack>
+	<number> 1 0 0 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<accrate> 0.195000000000000007 </accrate>
+		<llikemle> 2.70726265416057954 </llikemle>
+		<llikedata> -3922.87305464996825 </llikedata>
+		<starttime> 1113934177 </starttime>
+		<endtime> 1113934192 </endtime>
+		<rates> <map> Tree-Arranger 39 200 </map> </rates>
+		<estimates>
+			<thetas> 0.00380011726327808552 0.00713604248963370554 </thetas>
+			<migrates> 0 0.0014665879208418085 76.5973069163458717 0 </migrates>
+			<growthrates> 125.906070220065573 10.9556147174487499 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainpack>
+	<number> 1 0 1 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<accrate> 0.200000000000000011 </accrate>
+		<llikemle> 19.4577047358918414 </llikemle>
+		<llikedata> -3507.94370195006422 </llikedata>
+		<starttime> 1113934192 </starttime>
+		<endtime> 1113934207 </endtime>
+		<rates> <map> Tree-Arranger 40 200 </map> </rates>
+		<estimates>
+			<thetas> 0.00557328352740996819 0.00767691676407612593 </thetas>
+			<migrates> 0 72.5171669311285285 50.3899810789623572 0 </migrates>
+			<growthrates> 239.320949207652802 15.0158998451551309 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainpack>
+	<number> 1 0 2 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<accrate> 0.195000000000000007 </accrate>
+		<llikemle> 1.41077196606614153 </llikemle>
+		<llikedata> -3388.85530318494739 </llikedata>
+		<starttime> 1113934207 </starttime>
+		<endtime> 1113934221 </endtime>
+		<rates> <map> Tree-Arranger 39 200 </map> </rates>
+		<estimates>
+			<thetas> 0.00526638488263829647 0.00636010206007683442 </thetas>
+			<migrates> 0 145.033451919934123 78.9828742546580571 0 </migrates>
+			<growthrates> 101.354431821265237 42.7824995714557517 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainpack>
+	<number> 1 0 3 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<accrate> 0.0500000000000000028 </accrate>
+		<llikemle> 0.106842797016683602 </llikemle>
+		<llikedata> -3387.66439523280769 </llikedata>
+		<starttime> 1113934222 </starttime>
+		<endtime> 1113934229 </endtime>
+		<rates> <map> Tree-Arranger 2 40 </map> </rates>
+		<estimates>
+			<thetas> 0.0048297296701252353 0.00632472336672777573 </thetas>
+			<migrates> 0 158.359159064075584 79.436028487943048 0 </migrates>
+			<growthrates> 48.5740506820224667 12.3249628941104206 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainsum>
+	<reg_rep> 1 0 </reg_rep>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce long
+			<shortpoint> 21 18 </shortpoint>
+			<shortwait> 0.0985596947596808493 0.112500144905919378 </shortwait>
+		</shortforce>
+		<shortforce> migrate long
+			<shortpoint> 0 3 2 0 </shortpoint>
+			<shortwait> 0.0189681695385016322 0.0251774919104673957 </shortwait>
+		</shortforce>
+		<intervals>
+		<force> Coalescence </force>
+			<endtime> 2.75402443608963525e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 20 20 </xpartlines>
+			<partlines> 20 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 3.67126558256159515e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 20 19 </xpartlines>
+			<partlines> 20 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 6.24882792998111863e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 19 19 </xpartlines>
+			<partlines> 19 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 6.28842625068443356e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 18 19 </xpartlines>
+			<partlines> 18 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 6.34298732185740874e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 17 19 </xpartlines>
+			<partlines> 17 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 6.46656309975548847e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 17 18 </xpartlines>
+			<partlines> 17 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 6.81601036755640491e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 16 18 </xpartlines>
+			<partlines> 16 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 6.9553870703353184e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 15 18 </xpartlines>
+			<partlines> 15 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 8.65634921089525913e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 14 18 </xpartlines>
+			<partlines> 14 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 9.53160440134389236e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 13 18 </xpartlines>
+			<partlines> 13 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 9.84400137091464748e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 13 17 </xpartlines>
+			<partlines> 13 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000104908914799906663 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 12 17 </xpartlines>
+			<partlines> 12 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000107378906806900444 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 12 16 </xpartlines>
+			<partlines> 12 16 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000134386781925198401 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 11 16 </xpartlines>
+			<partlines> 11 16 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00015095815823508343 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 10 16 </xpartlines>
+			<partlines> 10 16 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.000187294338352160755 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<newstatus> 1 </newstatus>
+			<xpartlines> 9 16 </xpartlines>
+			<partlines> 9 16 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000194311967981214922 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 10 15 </xpartlines>
+			<partlines> 10 15 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000207082475182713985 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 10 14 </xpartlines>
+			<partlines> 10 14 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000216519914862317283 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 10 13 </xpartlines>
+			<partlines> 10 13 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000240917252117856754 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 9 13 </xpartlines>
+			<partlines> 9 13 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000275177350551769888 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 9 12 </xpartlines>
+			<partlines> 9 12 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000277961519944129815 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 9 11 </xpartlines>
+			<partlines> 9 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000327184596530212871 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 9 10 </xpartlines>
+			<partlines> 9 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000375434817240545321 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 8 10 </xpartlines>
+			<partlines> 8 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000421956732764341578 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 8 9 </xpartlines>
+			<partlines> 8 9 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000454327709213169504 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 8 8 </xpartlines>
+			<partlines> 8 8 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000455301780358049633 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 7 8 </xpartlines>
+			<partlines> 7 8 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000477060867071809822 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 7 7 </xpartlines>
+			<partlines> 7 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000495771892855436606 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 7 6 </xpartlines>
+			<partlines> 7 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000577304059643193 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 7 5 </xpartlines>
+			<partlines> 7 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000728271605769075263 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 6 5 </xpartlines>
+			<partlines> 6 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000928286291063316091 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 6 4 </xpartlines>
+			<partlines> 6 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00136280706740197761 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 5 4 </xpartlines>
+			<partlines> 5 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00157448633617017866 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 4 4 </xpartlines>
+			<partlines> 4 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00170993270529683282 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 4 3 </xpartlines>
+			<partlines> 4 3 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00199676444517621777 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 3 3 </xpartlines>
+			<partlines> 3 3 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00200728073872961498 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<newstatus> 1 </newstatus>
+			<xpartlines> 2 4 </xpartlines>
+			<partlines> 2 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00213154474570202875 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 3 3 </xpartlines>
+			<partlines> 3 3 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00229629270831637703 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<newstatus> 1 </newstatus>
+			<xpartlines> 3 2 </xpartlines>
+			<partlines> 3 2 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00279444689547627594 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 4 1 </xpartlines>
+			<partlines> 4 1 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00362185244963840556 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 3 1 </xpartlines>
+			<partlines> 3 1 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00370319681038052849 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 2 1 </xpartlines>
+			<partlines> 2 1 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00482422222342331845 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 1 1 </xpartlines>
+			<partlines> 1 1 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00968056818678340696 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 2 </xpartlines>
+			<partlines> 0 2 . </partlines>
+		</intervals>
+	</treesum>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce long
+			<shortpoint> 21 18 </shortpoint>
+			<shortwait> 0.0968437428145017959 0.112500144905919378 </shortwait>
+		</shortforce>
+		<shortforce> migrate long
+			<shortpoint> 0 3 2 0 </shortpoint>
+			<shortwait> 0.018921333345451638 0.0251774919104673957 </shortwait>
+		</shortforce>
+		<intervals>
+		<force> Coalescence </force>
+			<endtime> 1.78294379475588584e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 20 20 </xpartlines>
+			<partlines> 20 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 2.75402443608963525e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 19 20 </xpartlines>
+			<partlines> 19 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 3.67126558256159515e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 19 19 </xpartlines>
+			<partlines> 19 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 6.24882792998111863e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 18 19 </xpartlines>
+			<partlines> 18 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 6.28842625068443356e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 17 19 </xpartlines>
+			<partlines> 17 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 6.34298732185740874e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 16 19 </xpartlines>
+			<partlines> 16 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 6.81601036755640491e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 16 18 </xpartlines>
+			<partlines> 16 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 6.9553870703353184e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 15 18 </xpartlines>
+			<partlines> 15 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 8.65634921089525913e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 14 18 </xpartlines>
+			<partlines> 14 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 9.53160440134389236e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 13 18 </xpartlines>
+			<partlines> 13 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 9.84400137091464748e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 13 17 </xpartlines>
+			<partlines> 13 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000104908914799906663 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 12 17 </xpartlines>
+			<partlines> 12 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000107378906806900444 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 12 16 </xpartlines>
+			<partlines> 12 16 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000134386781925198401 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 11 16 </xpartlines>
+			<partlines> 11 16 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00015095815823508343 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 10 16 </xpartlines>
+			<partlines> 10 16 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.000187294338352160755 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<newstatus> 1 </newstatus>
+			<xpartlines> 9 16 </xpartlines>
+			<partlines> 9 16 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000194311967981214922 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 10 15 </xpartlines>
+			<partlines> 10 15 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000207082475182713985 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 10 14 </xpartlines>
+			<partlines> 10 14 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000216519914862317283 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 10 13 </xpartlines>
+			<partlines> 10 13 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000240917252117856754 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 9 13 </xpartlines>
+			<partlines> 9 13 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000275177350551769888 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 9 12 </xpartlines>
+			<partlines> 9 12 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000277961519944129815 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 9 11 </xpartlines>
+			<partlines> 9 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000327184596530212871 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 9 10 </xpartlines>
+			<partlines> 9 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000375434817240545321 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 8 10 </xpartlines>
+			<partlines> 8 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000421956732764341578 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 8 9 </xpartlines>
+			<partlines> 8 9 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000454327709213169504 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 8 8 </xpartlines>
+			<partlines> 8 8 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000455301780358049633 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 7 8 </xpartlines>
+			<partlines> 7 8 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000477060867071809822 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 7 7 </xpartlines>
+			<partlines> 7 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000495771892855436606 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 7 6 </xpartlines>
+			<partlines> 7 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000577304059643193 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 7 5 </xpartlines>
+			<partlines> 7 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000728271605769075263 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 6 5 </xpartlines>
+			<partlines> 6 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000928286291063316091 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 6 4 </xpartlines>
+			<partlines> 6 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00136280706740197761 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 5 4 </xpartlines>
+			<partlines> 5 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00157448633617017866 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 4 4 </xpartlines>
+			<partlines> 4 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00170993270529683282 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 4 3 </xpartlines>
+			<partlines> 4 3 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00199676444517621777 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 3 3 </xpartlines>
+			<partlines> 3 3 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00200728073872961498 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<newstatus> 1 </newstatus>
+			<xpartlines> 2 4 </xpartlines>
+			<partlines> 2 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00213154474570202875 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 3 3 </xpartlines>
+			<partlines> 3 3 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00229629270831637703 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<newstatus> 1 </newstatus>
+			<xpartlines> 3 2 </xpartlines>
+			<partlines> 3 2 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00279444689547627594 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 4 1 </xpartlines>
+			<partlines> 4 1 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00362185244963840556 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 3 1 </xpartlines>
+			<partlines> 3 1 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00370319681038052849 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 2 1 </xpartlines>
+			<partlines> 2 1 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00482422222342331845 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 1 1 </xpartlines>
+			<partlines> 1 1 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00968056818678340696 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 2 </xpartlines>
+			<partlines> 0 2 . </partlines>
+		</intervals>
+	</treesum>
+		<estimates>
+			<thetas> 0.00526638488263829647 0.00636010206007683442 </thetas>
+			<migrates> 0 145.033451919934123 78.9828742546580571 0 </migrates>
+			<growthrates> 101.354431821265237 42.7824995714557517 </growthrates>
+		</estimates>
+</chainsum>
+<chainpack>
+	<number> 1 1 0 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<accrate> 0.239999999999999991 </accrate>
+		<llikemle> 2.34750730772453364 </llikemle>
+		<llikedata> -3525.30915507333066 </llikedata>
+		<starttime> 1113934229 </starttime>
+		<endtime> 1113934243 </endtime>
+		<rates> <map> Tree-Arranger 48 200 </map> </rates>
+		<estimates>
+			<thetas> 0.00633684542395633252 0.00414801483484614902 </thetas>
+			<migrates> 0 189.840646683791618 111.21338721037425 0 </migrates>
+			<growthrates> 134.29937737766619 -261.501604057297811 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainpack>
+	<number> 1 1 1 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 2 </tinytrees>
+		<accrate> 0.135000000000000009 </accrate>
+		<llikemle> 1.87776764509904415 </llikemle>
+		<llikedata> -3404.56028299746959 </llikedata>
+		<starttime> 1113934244 </starttime>
+		<endtime> 1113934259 </endtime>
+		<rates> <map> Tree-Arranger 27 200 </map> </rates>
+		<estimates>
+			<thetas> 0.0053726478023596964 0.0034495368114013979 </thetas>
+			<migrates> 0 175.867668937235891 153.579579990926845 0 </migrates>
+			<growthrates> 37.0453100154095694 -121.00495810761069 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainpack>
+	<number> 1 1 2 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<accrate> 0.104999999999999996 </accrate>
+		<llikemle> 0.449286917034601607 </llikemle>
+		<llikedata> -3321.70936524524359 </llikedata>
+		<starttime> 1113934259 </starttime>
+		<endtime> 1113934274 </endtime>
+		<rates> <map> Tree-Arranger 21 200 </map> </rates>
+		<estimates>
+			<thetas> 0.00422505900112751364 0.00395290224643087169 </thetas>
+			<migrates> 0 160.037181408148427 147.594989310094263 0 </migrates>
+			<growthrates> -173.121613861979398 -52.9903878137056665 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainpack>
+	<number> 1 1 3 </number>
+	<chainout>
+		<badtrees> 1 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<accrate> 0.0749999999999999972 </accrate>
+		<llikemle> 4.54069966505540101 </llikemle>
+		<llikedata> -3318.52699208625927 </llikedata>
+		<starttime> 1113934274 </starttime>
+		<endtime> 1113934304 </endtime>
+		<rates> <map> Tree-Arranger 3 40 </map> </rates>
+		<estimates>
+			<thetas> 0.00295615018924887769 0.00491936629532284831 </thetas>
+			<migrates> 0 235.929972373142618 308.986288378157326 0 </migrates>
+			<growthrates> -631.659188775292023 226.807009056767953 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainsum>
+	<reg_rep> 1 1 </reg_rep>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce long
+			<shortpoint> 18 21 </shortpoint>
+			<shortwait> 0.0648405565130086708 0.0877252307909633239 </shortwait>
+		</shortforce>
+		<shortforce> migrate long
+			<shortpoint> 0 4 6 0 </shortpoint>
+			<shortwait> 0.0169381769238374467 0.0194182320038754526 </shortwait>
+		</shortforce>
+		<intervals>
+		<force> Coalescence </force>
+			<endtime> 1.30550527315558047e-06 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 20 20 </xpartlines>
+			<partlines> 20 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 5.162795826446426e-06 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 19 20 </xpartlines>
+			<partlines> 19 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.26737235869386312e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 18 20 </xpartlines>
+			<partlines> 18 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.55014581129354168e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 17 20 </xpartlines>
+			<partlines> 17 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 4.40932985043456985e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 16 20 </xpartlines>
+			<partlines> 16 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 4.90702713266381754e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 15 20 </xpartlines>
+			<partlines> 15 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 5.10411853492643601e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 15 19 </xpartlines>
+			<partlines> 15 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 5.12290248204213804e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 14 19 </xpartlines>
+			<partlines> 14 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 5.67472162385812578e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 14 18 </xpartlines>
+			<partlines> 14 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 9.65590413200949371e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 14 17 </xpartlines>
+			<partlines> 14 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000103869320417860665 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 14 16 </xpartlines>
+			<partlines> 14 16 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000107286941773631806 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 13 16 </xpartlines>
+			<partlines> 13 16 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000107550821053514076 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 13 15 </xpartlines>
+			<partlines> 13 15 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000109709891069030956 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 13 14 </xpartlines>
+			<partlines> 13 14 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.000114694502065424022 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<newstatus> 1 </newstatus>
+			<xpartlines> 12 14 </xpartlines>
+			<partlines> 12 14 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00012158499114137082 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 13 13 </xpartlines>
+			<partlines> 13 13 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000122718853104433789 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 13 12 </xpartlines>
+			<partlines> 13 12 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000125375411254551923 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 13 11 </xpartlines>
+			<partlines> 13 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000142189216663119957 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 12 11 </xpartlines>
+			<partlines> 12 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000142843686193008286 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 11 11 </xpartlines>
+			<partlines> 11 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000171515317423868074 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 10 11 </xpartlines>
+			<partlines> 10 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000218046136466306875 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 9 11 </xpartlines>
+			<partlines> 9 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.0002790329859263787 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 8 11 </xpartlines>
+			<partlines> 8 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00030767075159027072 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 8 10 </xpartlines>
+			<partlines> 8 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000326602024127066991 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 7 10 </xpartlines>
+			<partlines> 7 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000336138739569854291 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 7 9 </xpartlines>
+			<partlines> 7 9 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000411029437867106761 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 7 8 </xpartlines>
+			<partlines> 7 8 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000455284747279604488 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 7 7 </xpartlines>
+			<partlines> 7 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000539893327729701073 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 6 7 </xpartlines>
+			<partlines> 6 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000560189187505523802 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 6 6 </xpartlines>
+			<partlines> 6 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000724982991669875874 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 6 5 </xpartlines>
+			<partlines> 6 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000800823245136290159 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 5 5 </xpartlines>
+			<partlines> 5 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00101663478186505727 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 5 4 </xpartlines>
+			<partlines> 5 4 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00117786220475134804 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 4 4 </xpartlines>
+			<partlines> 4 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00118517172371627641 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 3 5 </xpartlines>
+			<partlines> 3 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00125529723210753312 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 3 4 </xpartlines>
+			<partlines> 3 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00128846095940146773 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 3 3 </xpartlines>
+			<partlines> 3 3 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00172659403406359149 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 2 3 </xpartlines>
+			<partlines> 2 3 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00213157854076952711 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 2 2 </xpartlines>
+			<partlines> 2 2 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00256248379887218016 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 1 3 </xpartlines>
+			<partlines> 1 3 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00262033009215670038 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 1 2 </xpartlines>
+			<partlines> 1 2 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00262735858334228268 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<newstatus> 1 </newstatus>
+			<xpartlines> 1 1 </xpartlines>
+			<partlines> 1 1 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.0031959883447954078 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 2 0 </xpartlines>
+			<partlines> 2 0 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00551501377964465876 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 1 1 </xpartlines>
+			<partlines> 1 1 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00596028250037733409 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<newstatus> 1 </newstatus>
+			<xpartlines> 0 2 </xpartlines>
+			<partlines> 0 2 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00803817495345396371 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 1 1 </xpartlines>
+			<partlines> 1 1 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00803938998428121825 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<newstatus> 1 </newstatus>
+			<xpartlines> 0 2 </xpartlines>
+			<partlines> 0 2 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.0086849234925416869 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 1 1 </xpartlines>
+			<partlines> 1 1 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00905185738010444788 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 2 </xpartlines>
+			<partlines> 0 2 . </partlines>
+		</intervals>
+	</treesum>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce long
+			<shortpoint> 18 21 </shortpoint>
+			<shortwait> 0.0659711430564781931 0.0877252307909633239 </shortwait>
+		</shortforce>
+		<shortforce> migrate long
+			<shortpoint> 0 4 6 0 </shortpoint>
+			<shortwait> 0.0169723921539429272 0.0194182320038754526 </shortwait>
+		</shortforce>
+		<intervals>
+		<force> Coalescence </force>
+			<endtime> 1.30550527315558047e-06 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 20 20 </xpartlines>
+			<partlines> 20 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.26737235869386312e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 19 20 </xpartlines>
+			<partlines> 19 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.55014581129354168e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 18 20 </xpartlines>
+			<partlines> 18 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 3.93780259319326687e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 17 20 </xpartlines>
+			<partlines> 17 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 4.40932985043456985e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 16 20 </xpartlines>
+			<partlines> 16 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 4.90702713266381754e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 15 20 </xpartlines>
+			<partlines> 15 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 5.10411853492643601e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 15 19 </xpartlines>
+			<partlines> 15 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 5.12290248204213804e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 14 19 </xpartlines>
+			<partlines> 14 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 5.67472162385812578e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 14 18 </xpartlines>
+			<partlines> 14 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 9.65590413200949371e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 14 17 </xpartlines>
+			<partlines> 14 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000103869320417860665 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 14 16 </xpartlines>
+			<partlines> 14 16 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000107286941773631806 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 13 16 </xpartlines>
+			<partlines> 13 16 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000107550821053514076 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 13 15 </xpartlines>
+			<partlines> 13 15 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000109709891069030956 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 13 14 </xpartlines>
+			<partlines> 13 14 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.000114694502065424022 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<newstatus> 1 </newstatus>
+			<xpartlines> 12 14 </xpartlines>
+			<partlines> 12 14 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00012158499114137082 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 13 13 </xpartlines>
+			<partlines> 13 13 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000122718853104433789 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 13 12 </xpartlines>
+			<partlines> 13 12 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000125375411254551923 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 13 11 </xpartlines>
+			<partlines> 13 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000142189216663119957 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 12 11 </xpartlines>
+			<partlines> 12 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000142843686193008286 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 11 11 </xpartlines>
+			<partlines> 11 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000171515317423868074 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 10 11 </xpartlines>
+			<partlines> 10 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000218046136466306875 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 9 11 </xpartlines>
+			<partlines> 9 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.0002790329859263787 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 8 11 </xpartlines>
+			<partlines> 8 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00030767075159027072 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 8 10 </xpartlines>
+			<partlines> 8 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000326602024127066991 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 7 10 </xpartlines>
+			<partlines> 7 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000336138739569854291 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 7 9 </xpartlines>
+			<partlines> 7 9 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000411029437867106761 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 7 8 </xpartlines>
+			<partlines> 7 8 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000455284747279604488 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 7 7 </xpartlines>
+			<partlines> 7 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000539893327729701073 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 6 7 </xpartlines>
+			<partlines> 6 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000560189187505523802 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 6 6 </xpartlines>
+			<partlines> 6 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000724982991669875874 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 6 5 </xpartlines>
+			<partlines> 6 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000800823245136290159 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 5 5 </xpartlines>
+			<partlines> 5 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00101663478186505727 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 5 4 </xpartlines>
+			<partlines> 5 4 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00117786220475134804 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 4 4 </xpartlines>
+			<partlines> 4 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00118517172371627641 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 3 5 </xpartlines>
+			<partlines> 3 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00125529723210753312 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 3 4 </xpartlines>
+			<partlines> 3 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00128846095940146773 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 3 3 </xpartlines>
+			<partlines> 3 3 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00172659403406359149 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 2 3 </xpartlines>
+			<partlines> 2 3 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00213157854076952711 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 2 2 </xpartlines>
+			<partlines> 2 2 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00256248379887218016 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 1 3 </xpartlines>
+			<partlines> 1 3 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00262033009215670038 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 1 2 </xpartlines>
+			<partlines> 1 2 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00262735858334228268 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<newstatus> 1 </newstatus>
+			<xpartlines> 1 1 </xpartlines>
+			<partlines> 1 1 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.0031959883447954078 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 2 0 </xpartlines>
+			<partlines> 2 0 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00551501377964465876 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 1 1 </xpartlines>
+			<partlines> 1 1 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00596028250037733409 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<newstatus> 1 </newstatus>
+			<xpartlines> 0 2 </xpartlines>
+			<partlines> 0 2 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00803817495345396371 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 1 1 </xpartlines>
+			<partlines> 1 1 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00803938998428121825 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<newstatus> 1 </newstatus>
+			<xpartlines> 0 2 </xpartlines>
+			<partlines> 0 2 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.0086849234925416869 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 1 1 </xpartlines>
+			<partlines> 1 1 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00905185738010444788 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 2 </xpartlines>
+			<partlines> 0 2 . </partlines>
+		</intervals>
+	</treesum>
+		<estimates>
+			<thetas> 0.00422505900112751364 0.00395290224643087169 </thetas>
+			<migrates> 0 160.037181408148427 147.594989310094263 0 </migrates>
+			<growthrates> -173.121613861979398 -52.9903878137056665 </growthrates>
+		</estimates>
+</chainsum>
+<chainpack>
+	<number> 1 2 0 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<accrate> 0.260000000000000009 </accrate>
+		<llikemle> 2.99420902451789361 </llikemle>
+		<llikedata> -4299.50526781021654 </llikedata>
+		<starttime> 1113934304 </starttime>
+		<endtime> 1113934319 </endtime>
+		<rates> <map> Tree-Arranger 52 200 </map> </rates>
+		<estimates>
+			<thetas> 0.00756617832740799981 0.00659856362452725716 </thetas>
+			<migrates> 0 111.176155934682811 69.5707103933484632 0 </migrates>
+			<growthrates> 409.88939497805228 543.16021718378704 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainpack>
+	<number> 1 2 1 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<accrate> 0.179999999999999993 </accrate>
+		<llikemle> 1.07089522228415079 </llikemle>
+		<llikedata> -3490.4063004547229 </llikedata>
+		<starttime> 1113934320 </starttime>
+		<endtime> 1113934335 </endtime>
+		<rates> <map> Tree-Arranger 36 200 </map> </rates>
+		<estimates>
+			<thetas> 0.00656792579496304294 0.00584459175444660209 </thetas>
+			<migrates> 0 57.9007335206334091 156.694294591854742 0 </migrates>
+			<growthrates> 451.288616099099272 497.009379113105979 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainpack>
+	<number> 1 2 2 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<accrate> 0.140000000000000013 </accrate>
+		<llikemle> 1.41480621744143908 </llikemle>
+		<llikedata> -3434.66438504787766 </llikedata>
+		<starttime> 1113934336 </starttime>
+		<endtime> 1113934351 </endtime>
+		<rates> <map> Tree-Arranger 28 200 </map> </rates>
+		<estimates>
+			<thetas> 0.00550575271147208094 0.00696470662214750645 </thetas>
+			<migrates> 0 121.568711323335009 63.7627893168077549 0 </migrates>
+			<growthrates> 354.601203542231758 606.129048483006613 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainpack>
+	<number> 1 2 3 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<accrate> 0.100000000000000006 </accrate>
+		<llikemle> 0.109493731119181953 </llikemle>
+		<llikedata> -3423.70440531923623 </llikedata>
+		<starttime> 1113934352 </starttime>
+		<endtime> 1113934359 </endtime>
+		<rates> <map> Tree-Arranger 4 40 </map> </rates>
+		<estimates>
+			<thetas> 0.00514192935091333826 0.0062288990703713602 </thetas>
+			<migrates> 0 124.55408129628016 61.5396353290182461 0 </migrates>
+			<growthrates> 335.346278823775265 527.046201636540445 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainsum>
+	<reg_rep> 1 2 </reg_rep>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce long
+			<shortpoint> 20 19 </shortpoint>
+			<shortwait> 0.0861184261675420754 0.0989240815983647992 </shortwait>
+		</shortforce>
+		<shortforce> migrate long
+			<shortpoint> 0 2 1 0 </shortpoint>
+			<shortwait> 0.0161108601645228097 0.0163539778587721008 </shortwait>
+		</shortforce>
+		<intervals>
+		<force> Coalescence </force>
+			<endtime> 2.10705440289478283e-06 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 20 20 </xpartlines>
+			<partlines> 20 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.25399383339123163e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 20 19 </xpartlines>
+			<partlines> 20 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 3.24885254542245494e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 19 19 </xpartlines>
+			<partlines> 19 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 5.11189546645828539e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 18 19 </xpartlines>
+			<partlines> 18 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 8.85012116407944336e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 18 18 </xpartlines>
+			<partlines> 18 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 9.78107834741533388e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 18 17 </xpartlines>
+			<partlines> 18 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000118112773039976836 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 17 17 </xpartlines>
+			<partlines> 17 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000127737720313619163 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 16 17 </xpartlines>
+			<partlines> 16 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000129662442835112526 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 15 17 </xpartlines>
+			<partlines> 15 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000131648496800567045 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 14 17 </xpartlines>
+			<partlines> 14 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000131860052917847297 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 13 17 </xpartlines>
+			<partlines> 13 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000140310461463006713 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 12 17 </xpartlines>
+			<partlines> 12 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000155023093005773421 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 12 16 </xpartlines>
+			<partlines> 12 16 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000157657160551731915 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 12 15 </xpartlines>
+			<partlines> 12 15 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000162080373620438725 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 11 15 </xpartlines>
+			<partlines> 11 15 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000195611028967059892 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 11 14 </xpartlines>
+			<partlines> 11 14 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000233034957421893385 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 10 14 </xpartlines>
+			<partlines> 10 14 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00023544280363578128 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 9 14 </xpartlines>
+			<partlines> 9 14 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000262195006109537632 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 9 13 </xpartlines>
+			<partlines> 9 13 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000262906955612288607 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 9 12 </xpartlines>
+			<partlines> 9 12 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000299377625930490846 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 8 12 </xpartlines>
+			<partlines> 8 12 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000304231828846860254 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 8 11 </xpartlines>
+			<partlines> 8 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00035306417392301292 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 8 10 </xpartlines>
+			<partlines> 8 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000385710705381445032 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 8 9 </xpartlines>
+			<partlines> 8 9 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00043664221284098057 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 8 8 </xpartlines>
+			<partlines> 8 8 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000458570971503667059 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 8 7 </xpartlines>
+			<partlines> 8 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000462182592971902917 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 7 7 </xpartlines>
+			<partlines> 7 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000529375689534849147 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 7 6 </xpartlines>
+			<partlines> 7 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000531151851086229484 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 7 5 </xpartlines>
+			<partlines> 7 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000570644372201619386 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 6 5 </xpartlines>
+			<partlines> 6 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000628012110491958956 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 5 5 </xpartlines>
+			<partlines> 5 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00126002496707287252 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 5 4 </xpartlines>
+			<partlines> 5 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00133172776547203667 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 4 4 </xpartlines>
+			<partlines> 4 4 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00138378313545306345 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 4 3 </xpartlines>
+			<partlines> 4 3 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00139440804359624269 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 3 4 </xpartlines>
+			<partlines> 3 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.0015125657858844324 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 3 3 </xpartlines>
+			<partlines> 3 3 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00151492720306570521 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 2 3 </xpartlines>
+			<partlines> 2 3 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00180363775393179489 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 1 3 </xpartlines>
+			<partlines> 1 3 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00272922260577865804 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<newstatus> 1 </newstatus>
+			<xpartlines> 1 2 </xpartlines>
+			<partlines> 1 2 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00303636024375119338 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 2 1 </xpartlines>
+			<partlines> 2 1 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00598338892698212308 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<newstatus> 1 </newstatus>
+			<xpartlines> 1 1 </xpartlines>
+			<partlines> 1 1 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.0064621861657712101 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 2 0 </xpartlines>
+			<partlines> 2 0 . </partlines>
+		</intervals>
+	</treesum>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce long
+			<shortpoint> 20 19 </shortpoint>
+			<shortwait> 0.0838664731195002539 0.0940195460747020734 </shortwait>
+		</shortforce>
+		<shortforce> migrate long
+			<shortpoint> 0 2 1 0 </shortpoint>
+			<shortwait> 0.0160107008550887178 0.0161577536563222747 </shortwait>
+		</shortforce>
+		<intervals>
+		<force> Coalescence </force>
+			<endtime> 2.10705440289478283e-06 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 20 20 </xpartlines>
+			<partlines> 20 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.25399383339123163e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 20 19 </xpartlines>
+			<partlines> 20 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 3.24885254542245494e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 19 19 </xpartlines>
+			<partlines> 19 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 5.11189546645828539e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 18 19 </xpartlines>
+			<partlines> 18 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 8.85012116407944336e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 18 18 </xpartlines>
+			<partlines> 18 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 9.20971519398864496e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 18 17 </xpartlines>
+			<partlines> 18 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 9.78107834741533388e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 17 17 </xpartlines>
+			<partlines> 17 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000127737720313619163 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 16 17 </xpartlines>
+			<partlines> 16 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000129662442835112526 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 15 17 </xpartlines>
+			<partlines> 15 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000131648496800567045 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 14 17 </xpartlines>
+			<partlines> 14 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000131860052917847297 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 13 17 </xpartlines>
+			<partlines> 13 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000137097093409470431 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 12 17 </xpartlines>
+			<partlines> 12 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000140310461463006713 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 12 16 </xpartlines>
+			<partlines> 12 16 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000155023093005773421 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 12 15 </xpartlines>
+			<partlines> 12 15 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000157657160551731915 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 12 14 </xpartlines>
+			<partlines> 12 14 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000158891269087890025 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 11 14 </xpartlines>
+			<partlines> 11 14 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000162080373620438725 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 10 14 </xpartlines>
+			<partlines> 10 14 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000195611028967059892 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 10 13 </xpartlines>
+			<partlines> 10 13 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000206353336623343831 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 9 13 </xpartlines>
+			<partlines> 9 13 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000262195006109537632 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 9 12 </xpartlines>
+			<partlines> 9 12 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000262906955612288607 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 9 11 </xpartlines>
+			<partlines> 9 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000299377625930490846 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 8 11 </xpartlines>
+			<partlines> 8 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00035306417392301292 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 8 10 </xpartlines>
+			<partlines> 8 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000385710705381445032 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 8 9 </xpartlines>
+			<partlines> 8 9 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00043664221284098057 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 8 8 </xpartlines>
+			<partlines> 8 8 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000458570971503667059 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 8 7 </xpartlines>
+			<partlines> 8 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000462182592971902917 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 7 7 </xpartlines>
+			<partlines> 7 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000529375689534849147 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 7 6 </xpartlines>
+			<partlines> 7 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000531151851086229484 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 7 5 </xpartlines>
+			<partlines> 7 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000570644372201619386 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 6 5 </xpartlines>
+			<partlines> 6 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000628012110491958956 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 5 5 </xpartlines>
+			<partlines> 5 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00126002496707287252 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 5 4 </xpartlines>
+			<partlines> 5 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00133172776547203667 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 4 4 </xpartlines>
+			<partlines> 4 4 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00138378313545306345 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 4 3 </xpartlines>
+			<partlines> 4 3 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00139440804359624269 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 3 4 </xpartlines>
+			<partlines> 3 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.0015125657858844324 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 3 3 </xpartlines>
+			<partlines> 3 3 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00151492720306570521 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 2 3 </xpartlines>
+			<partlines> 2 3 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00180363775393179489 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 1 3 </xpartlines>
+			<partlines> 1 3 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00272922260577865804 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<newstatus> 1 </newstatus>
+			<xpartlines> 1 2 </xpartlines>
+			<partlines> 1 2 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00303636024375119338 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 2 1 </xpartlines>
+			<partlines> 2 1 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00598338892698212308 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<newstatus> 1 </newstatus>
+			<xpartlines> 1 1 </xpartlines>
+			<partlines> 1 1 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.0064621861657712101 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 2 0 </xpartlines>
+			<partlines> 2 0 . </partlines>
+		</intervals>
+	</treesum>
+		<estimates>
+			<thetas> 0.00550575271147208094 0.00696470662214750645 </thetas>
+			<migrates> 0 121.568711323335009 63.7627893168077549 0 </migrates>
+			<growthrates> 354.601203542231758 606.129048483006613 </growthrates>
+		</estimates>
+</chainsum>
+<replicate-summary>
+		<estimates>
+			<thetas> 0.00459610289535911792 0.00543752715647888643 </thetas>
+			<migrates> 0 173.235122299289941 147.938467429627138 0 </migrates>
+			<growthrates> 151.703172633675905 86.8286879559442326 </growthrates>
+		</estimates>
+		<maxlike> -3.86601975441648626 </maxlike>
+</replicate-summary>
+<!--  New information past this point. -->
+<region-summary>
+		<estimates>
+			<thetas> 0.00380528895655514584 0.00634260268782250135 </thetas>
+			<migrates> 0 156.653872902522551 124.266112790422568 0 </migrates>
+			<growthrates> 128.113540536591103 125.014283970374336 </growthrates>
+		</estimates>
+		<maxlike> -11.1852019504600744 </maxlike>
+</region-summary>
+<!-- End summary file
+	 Generated from run that started at: Tue Apr 19 11:04:50 2005
+	 and ended at: Tue Apr 19 11:12:39 2005 -->
+</XML-summary-file>
diff --git a/doc/html/outsumfile.3rep.html b/doc/html/outsumfile.3rep.html
new file mode 100644
index 0000000..6613427
--- /dev/null
+++ b/doc/html/outsumfile.3rep.html
@@ -0,0 +1,1621 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
+<HTML>
+<BODY>
+<pre>
+&ltXML-summary-file&gt
+&lt!-- Lamarc v. 2.0
+     Please do not modify. --&gt
+&lt!--  This summary file should match the input summary file insumfile.3rep.txt,
+      up until that file's end. --&gt
+&ltchainpack&gt
+	&ltnumber&gt 0 0 0 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 0 &lt/tinytrees&gt
+		&ltaccrate&gt 0.195000000000000007 &lt/accrate&gt
+		&ltllikemle&gt 4.50058419396642684 &lt/llikemle&gt
+		&ltllikedata&gt -3620.90996263017314 &lt/llikedata&gt
+		&ltstarttime&gt 1113933890 &lt/starttime&gt
+		&ltendtime&gt 1113933911 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 39 200 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00832313455063505084 0.00734484914891693919 &lt/thetas&gt
+			&ltmigrates&gt 0 96.6630952829531083 46.6299496864950527 0 &lt/migrates&gt
+			&ltgrowthrates&gt 2365.3441048272216 184.41163985471303 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainpack&gt
+	&ltnumber&gt 0 0 1 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 0 &lt/tinytrees&gt
+		&ltaccrate&gt 0.160000000000000003 &lt/accrate&gt
+		&ltllikemle&gt 1.18053638385976001 &lt/llikemle&gt
+		&ltllikedata&gt -3516.0909920893173 &lt/llikedata&gt
+		&ltstarttime&gt 1113933912 &lt/starttime&gt
+		&ltendtime&gt 1113933934 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 32 200 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00513756085360574273 0.0082400193242912461 &lt/thetas&gt
+			&ltmigrates&gt 0 131.185343491258408 42.1077506833302877 0 &lt/migrates&gt
+			&ltgrowthrates&gt 1821.55597636667744 149.760781972604775 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainpack&gt
+	&ltnumber&gt 0 0 2 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 0 &lt/tinytrees&gt
+		&ltaccrate&gt 0.130000000000000004 &lt/accrate&gt
+		&ltllikemle&gt 1.6599393589321001 &lt/llikemle&gt
+		&ltllikedata&gt -3271.75316866892263 &lt/llikedata&gt
+		&ltstarttime&gt 1113933934 &lt/starttime&gt
+		&ltendtime&gt 1113933960 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 26 200 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00505700949907143815 0.00613707484094671175 &lt/thetas&gt
+			&ltmigrates&gt 0 124.679362905292081 34.8937725369221496 0 &lt/migrates&gt
+			&ltgrowthrates&gt 1761.27593665903237 -84.0195656658781189 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainpack&gt
+	&ltnumber&gt 0 0 3 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 1 &lt/tinytrees&gt
+		&ltaccrate&gt 0.149999999999999994 &lt/accrate&gt
+		&ltllikemle&gt 0.596513557975481556 &lt/llikemle&gt
+		&ltllikedata&gt -3269.61178480345779 &lt/llikedata&gt
+		&ltstarttime&gt 1113933961 &lt/starttime&gt
+		&ltendtime&gt 1113933971 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 6 40 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.0051461021148077107 0.00589555271016268308 &lt/thetas&gt
+			&ltmigrates&gt 0 125.68905479949639 48.865939986393137 0 &lt/migrates&gt
+			&ltgrowthrates&gt 1835.77616186834098 82.0043403675645095 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainsum&gt
+	&ltreg_rep&gt 0 0 &lt/reg_rep&gt
+	&lttreesum&gt
+		&ltncopy&gt 1 &lt/ncopy&gt
+		&ltshortforce&gt coalesce long
+			&ltshortpoint&gt 20 19 &lt/shortpoint&gt
+			&ltshortwait&gt 0.0629273638426249143 0.105548312215536552 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltshortforce&gt migrate long
+			&ltshortpoint&gt 0 1 1 0 &lt/shortpoint&gt
+			&ltshortwait&gt 0.0079654795218820091 0.0206451375053703819 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltintervals&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 5.09657490636763079e-06 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 20 20 &lt/xpartlines&gt
+			&ltpartlines&gt 20 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.2102355624254017e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 19 20 &lt/xpartlines&gt
+			&ltpartlines&gt 19 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.33961400471318623e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 18 20 &lt/xpartlines&gt
+			&ltpartlines&gt 18 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.77695915387843852e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 17 20 &lt/xpartlines&gt
+			&ltpartlines&gt 17 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 2.10446887053792051e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 17 19 &lt/xpartlines&gt
+			&ltpartlines&gt 17 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 2.27457247455224152e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 17 18 &lt/xpartlines&gt
+			&ltpartlines&gt 17 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 2.44016581525568737e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 17 17 &lt/xpartlines&gt
+			&ltpartlines&gt 17 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 5.63431475801879578e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 16 17 &lt/xpartlines&gt
+			&ltpartlines&gt 16 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 6.55098716603418459e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 15 17 &lt/xpartlines&gt
+			&ltpartlines&gt 15 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 7.39112816930027905e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 14 17 &lt/xpartlines&gt
+			&ltpartlines&gt 14 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 8.18973368575778886e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 14 16 &lt/xpartlines&gt
+			&ltpartlines&gt 14 16 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 8.3802045298549127e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 16 &lt/xpartlines&gt
+			&ltpartlines&gt 13 16 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00010629432546176642 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 15 &lt/xpartlines&gt
+			&ltpartlines&gt 13 15 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000111507477845693503 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 14 &lt/xpartlines&gt
+			&ltpartlines&gt 13 14 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000150975907355033011 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 14 &lt/xpartlines&gt
+			&ltpartlines&gt 12 14 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000165638034054806815 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 14 &lt/xpartlines&gt
+			&ltpartlines&gt 11 14 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000219061924145474876 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 13 &lt/xpartlines&gt
+			&ltpartlines&gt 11 13 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000219707138094496452 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 12 &lt/xpartlines&gt
+			&ltpartlines&gt 11 12 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000258213852917746063 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 11 &lt/xpartlines&gt
+			&ltpartlines&gt 11 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000273652113921858643 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 10 &lt/xpartlines&gt
+			&ltpartlines&gt 11 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000280583868742954141 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 10 &lt/xpartlines&gt
+			&ltpartlines&gt 10 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000331106053916933646 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 9 &lt/xpartlines&gt
+			&ltpartlines&gt 10 9 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000344952279767115331 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 8 &lt/xpartlines&gt
+			&ltpartlines&gt 10 8 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000345874033941911106 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 8 &lt/xpartlines&gt
+			&ltpartlines&gt 9 8 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000348861238031590646 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 7 &lt/xpartlines&gt
+			&ltpartlines&gt 9 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000424599242926313077 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 7 &lt/xpartlines&gt
+			&ltpartlines&gt 8 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000439947186235675422 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 7 &lt/xpartlines&gt
+			&ltpartlines&gt 7 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000481070344401667829 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 6 7 &lt/xpartlines&gt
+			&ltpartlines&gt 6 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000528930654355221855 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 5 7 &lt/xpartlines&gt
+			&ltpartlines&gt 5 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000592184315630871314 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 5 6 &lt/xpartlines&gt
+			&ltpartlines&gt 5 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000857591267505941114 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 6 &lt/xpartlines&gt
+			&ltpartlines&gt 4 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000866354211828237845 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 6 &lt/xpartlines&gt
+			&ltpartlines&gt 3 6 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00091353575187592918 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltnewstatus&gt 1 &lt/newstatus&gt
+			&ltxpartlines&gt 3 5 &lt/xpartlines&gt
+			&ltpartlines&gt 3 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000919185689826623655 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 4 &lt/xpartlines&gt
+			&ltpartlines&gt 4 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000955912424407952743 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 4 &lt/xpartlines&gt
+			&ltpartlines&gt 3 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00118557134547170773 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 2 4 &lt/xpartlines&gt
+			&ltpartlines&gt 2 4 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00153425735560147494 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 1 4 &lt/xpartlines&gt
+			&ltpartlines&gt 1 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.0019246211065550127 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 5 &lt/xpartlines&gt
+			&ltpartlines&gt 0 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00206538502262878197 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 4 &lt/xpartlines&gt
+			&ltpartlines&gt 0 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00305508566675113469 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 3 &lt/xpartlines&gt
+			&ltpartlines&gt 0 3 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00533986494186010525 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 2 &lt/xpartlines&gt
+			&ltpartlines&gt 0 2 . &lt/partlines&gt
+		&lt/intervals&gt
+	&lt/treesum&gt
+	&lttreesum&gt
+		&ltncopy&gt 1 &lt/ncopy&gt
+		&ltshortforce&gt coalesce long
+			&ltshortpoint&gt 20 19 &lt/shortpoint&gt
+			&ltshortwait&gt 0.0575370553514418445 0.104633534940785464 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltshortforce&gt migrate long
+			&ltshortpoint&gt 0 1 1 0 &lt/shortpoint&gt
+			&ltshortwait&gt 0.00794664505963699544 0.0202802222566785852 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltintervals&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 5.09657490636763079e-06 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 20 20 &lt/xpartlines&gt
+			&ltpartlines&gt 20 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.2102355624254017e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 19 20 &lt/xpartlines&gt
+			&ltpartlines&gt 19 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.25411707266569827e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 18 20 &lt/xpartlines&gt
+			&ltpartlines&gt 18 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.33961400471318623e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 17 20 &lt/xpartlines&gt
+			&ltpartlines&gt 17 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.77695915387843852e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 16 20 &lt/xpartlines&gt
+			&ltpartlines&gt 16 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 2.10446887053792051e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 16 19 &lt/xpartlines&gt
+			&ltpartlines&gt 16 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 2.27457247455224152e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 16 18 &lt/xpartlines&gt
+			&ltpartlines&gt 16 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 2.44016581525568737e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 16 17 &lt/xpartlines&gt
+			&ltpartlines&gt 16 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 5.63431475801879578e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 15 17 &lt/xpartlines&gt
+			&ltpartlines&gt 15 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 6.55098716603418459e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 14 17 &lt/xpartlines&gt
+			&ltpartlines&gt 14 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 7.39112816930027905e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 17 &lt/xpartlines&gt
+			&ltpartlines&gt 13 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 8.3802045298549127e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 16 &lt/xpartlines&gt
+			&ltpartlines&gt 13 16 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00010617490035119024 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 15 &lt/xpartlines&gt
+			&ltpartlines&gt 13 15 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000111507477845693503 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 15 &lt/xpartlines&gt
+			&ltpartlines&gt 12 15 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000150975907355033011 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 15 &lt/xpartlines&gt
+			&ltpartlines&gt 11 15 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000151946997256986488 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 15 &lt/xpartlines&gt
+			&ltpartlines&gt 10 15 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000165638034054806815 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 14 &lt/xpartlines&gt
+			&ltpartlines&gt 10 14 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000191522903992015264 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 13 &lt/xpartlines&gt
+			&ltpartlines&gt 10 13 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000219707138094496452 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 12 &lt/xpartlines&gt
+			&ltpartlines&gt 10 12 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000258213852917746063 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 11 &lt/xpartlines&gt
+			&ltpartlines&gt 10 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000273652113921858643 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 10 &lt/xpartlines&gt
+			&ltpartlines&gt 10 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000280583868742954141 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 10 &lt/xpartlines&gt
+			&ltpartlines&gt 9 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000331106053916933646 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 9 &lt/xpartlines&gt
+			&ltpartlines&gt 9 9 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000345874033941911106 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 8 &lt/xpartlines&gt
+			&ltpartlines&gt 9 8 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000348861238031590646 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 7 &lt/xpartlines&gt
+			&ltpartlines&gt 9 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000424599242926313077 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 7 &lt/xpartlines&gt
+			&ltpartlines&gt 8 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000439947186235675422 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 7 &lt/xpartlines&gt
+			&ltpartlines&gt 7 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000481070344401667829 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 6 7 &lt/xpartlines&gt
+			&ltpartlines&gt 6 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000528930654355221855 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 5 7 &lt/xpartlines&gt
+			&ltpartlines&gt 5 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000592184315630871314 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 5 6 &lt/xpartlines&gt
+			&ltpartlines&gt 5 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000866354211828237845 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 6 &lt/xpartlines&gt
+			&ltpartlines&gt 4 6 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00091353575187592918 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltnewstatus&gt 1 &lt/newstatus&gt
+			&ltxpartlines&gt 4 5 &lt/xpartlines&gt
+			&ltpartlines&gt 4 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000919185689826623655 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 5 4 &lt/xpartlines&gt
+			&ltpartlines&gt 5 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000955912424407952743 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 4 &lt/xpartlines&gt
+			&ltpartlines&gt 4 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00114689035080777412 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 4 &lt/xpartlines&gt
+			&ltpartlines&gt 3 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00118557134547170773 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 2 4 &lt/xpartlines&gt
+			&ltpartlines&gt 2 4 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00153425735560147494 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 1 4 &lt/xpartlines&gt
+			&ltpartlines&gt 1 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.0019246211065550127 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 5 &lt/xpartlines&gt
+			&ltpartlines&gt 0 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00206538502262878197 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 4 &lt/xpartlines&gt
+			&ltpartlines&gt 0 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00267205676641757645 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 3 &lt/xpartlines&gt
+			&ltpartlines&gt 0 3 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00533986494186010525 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 2 &lt/xpartlines&gt
+			&ltpartlines&gt 0 2 . &lt/partlines&gt
+		&lt/intervals&gt
+	&lt/treesum&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00505700949907143815 0.00613707484094671175 &lt/thetas&gt
+			&ltmigrates&gt 0 124.679362905292081 34.8937725369221496 0 &lt/migrates&gt
+			&ltgrowthrates&gt 1761.27593665903237 -84.0195656658781189 &lt/growthrates&gt
+		&lt/estimates&gt
+&lt/chainsum&gt
+&ltchainpack&gt
+	&ltnumber&gt 0 1 0 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 0 &lt/tinytrees&gt
+		&ltaccrate&gt 0.265000000000000013 &lt/accrate&gt
+		&ltllikemle&gt 8.03638549246886313 &lt/llikemle&gt
+		&ltllikedata&gt -3574.6386248622166 &lt/llikedata&gt
+		&ltstarttime&gt 1113933971 &lt/starttime&gt
+		&ltendtime&gt 1113933986 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 53 200 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00238967973895868897 0.00857314189478221474 &lt/thetas&gt
+			&ltmigrates&gt 0 178.594354360432533 201.711534027463927 0 &lt/migrates&gt
+			&ltgrowthrates&gt 286.265570187478374 522.584629259533926 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainpack&gt
+	&ltnumber&gt 0 1 1 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 0 &lt/tinytrees&gt
+		&ltaccrate&gt 0.174999999999999989 &lt/accrate&gt
+		&ltllikemle&gt 2.24292366391492948 &lt/llikemle&gt
+		&ltllikedata&gt -3366.95583651043171 &lt/llikedata&gt
+		&ltstarttime&gt 1113933986 &lt/starttime&gt
+		&ltendtime&gt 1113934003 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 35 200 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00342828243898930232 0.00938815584810562438 &lt/thetas&gt
+			&ltmigrates&gt 0 359.333313010233837 170.709781451344668 0 &lt/migrates&gt
+			&ltgrowthrates&gt 376.258762176827872 608.980866754854333 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainpack&gt
+	&ltnumber&gt 0 1 2 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 0 &lt/tinytrees&gt
+		&ltaccrate&gt 0.115000000000000005 &lt/accrate&gt
+		&ltllikemle&gt 0.331232649867971241 &lt/llikemle&gt
+		&ltllikedata&gt -3277.07186443434921 &lt/llikedata&gt
+		&ltstarttime&gt 1113934003 &lt/starttime&gt
+		&ltendtime&gt 1113934021 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 23 200 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00382351077572252833 0.0101148980666503786 &lt/thetas&gt
+			&ltmigrates&gt 0 266.63302494001374 180.090955280637019 0 &lt/migrates&gt
+			&ltgrowthrates&gt 589.048754968938624 732.840714912730732 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainpack&gt
+	&ltnumber&gt 0 1 3 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 0 &lt/tinytrees&gt
+		&ltaccrate&gt 0.100000000000000006 &lt/accrate&gt
+		&ltllikemle&gt 0.712638597764832182 &lt/llikemle&gt
+		&ltllikedata&gt -3264.58251001428516 &lt/llikedata&gt
+		&ltstarttime&gt 1113934021 &lt/starttime&gt
+		&ltendtime&gt 1113934029 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 4 40 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.004074651441941083 0.00837181988651936049 &lt/thetas&gt
+			&ltmigrates&gt 0 118.942422560571757 182.899111430931441 0 &lt/migrates&gt
+			&ltgrowthrates&gt 1023.10652786136313 621.693756977592329 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainsum&gt
+	&ltreg_rep&gt 0 1 &lt/reg_rep&gt
+	&lttreesum&gt
+		&ltncopy&gt 1 &lt/ncopy&gt
+		&ltshortforce&gt coalesce long
+			&ltshortpoint&gt 18 21 &lt/shortpoint&gt
+			&ltshortwait&gt 0.0539318434248482123 0.124884165154835192 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltshortforce&gt migrate long
+			&ltshortpoint&gt 0 1 3 0 &lt/shortpoint&gt
+			&ltshortwait&gt 0.00820356565115269894 0.0167862026647611975 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltintervals&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 9.86113155575850924e-06 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 20 20 &lt/xpartlines&gt
+			&ltpartlines&gt 20 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.14780854199569499e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 19 20 &lt/xpartlines&gt
+			&ltpartlines&gt 19 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.2291023672518374e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 19 19 &lt/xpartlines&gt
+			&ltpartlines&gt 19 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.66635848884044525e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 19 18 &lt/xpartlines&gt
+			&ltpartlines&gt 19 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 2.22961141141907646e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 18 18 &lt/xpartlines&gt
+			&ltpartlines&gt 18 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 2.48673742353794826e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 17 18 &lt/xpartlines&gt
+			&ltpartlines&gt 17 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 3.91186994294289797e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 16 18 &lt/xpartlines&gt
+			&ltpartlines&gt 16 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 4.82376276028000035e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 16 17 &lt/xpartlines&gt
+			&ltpartlines&gt 16 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 5.19910298629359333e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 15 17 &lt/xpartlines&gt
+			&ltpartlines&gt 15 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 7.45549455970045656e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 15 16 &lt/xpartlines&gt
+			&ltpartlines&gt 15 16 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 8.48515305769414904e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 14 16 &lt/xpartlines&gt
+			&ltpartlines&gt 14 16 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 8.63938283360383356e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 16 &lt/xpartlines&gt
+			&ltpartlines&gt 13 16 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000109224715491037562 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 15 &lt/xpartlines&gt
+			&ltpartlines&gt 13 15 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000125328787711941807 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 15 &lt/xpartlines&gt
+			&ltpartlines&gt 12 15 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000134238691874153575 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 15 &lt/xpartlines&gt
+			&ltpartlines&gt 11 15 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00015730396931299724 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 15 &lt/xpartlines&gt
+			&ltpartlines&gt 10 15 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000208869889075072063 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 14 &lt/xpartlines&gt
+			&ltpartlines&gt 10 14 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000215094766862807343 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 13 &lt/xpartlines&gt
+			&ltpartlines&gt 10 13 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000225539801544489551 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 13 &lt/xpartlines&gt
+			&ltpartlines&gt 9 13 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00024281367284408395 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 13 &lt/xpartlines&gt
+			&ltpartlines&gt 8 13 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000339783045919729956 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 12 &lt/xpartlines&gt
+			&ltpartlines&gt 8 12 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000349763100889471208 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 12 &lt/xpartlines&gt
+			&ltpartlines&gt 7 12 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000380891571910665103 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 6 12 &lt/xpartlines&gt
+			&ltpartlines&gt 6 12 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.000430449683486230515 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltnewstatus&gt 1 &lt/newstatus&gt
+			&ltxpartlines&gt 6 11 &lt/xpartlines&gt
+			&ltpartlines&gt 6 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000457714375841571639 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 10 &lt/xpartlines&gt
+			&ltpartlines&gt 7 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000477876206426440411 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 6 10 &lt/xpartlines&gt
+			&ltpartlines&gt 6 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000491278080421270061 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 5 10 &lt/xpartlines&gt
+			&ltpartlines&gt 5 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000618305099660766818 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 5 9 &lt/xpartlines&gt
+			&ltpartlines&gt 5 9 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.000711694454938392642 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 5 8 &lt/xpartlines&gt
+			&ltpartlines&gt 5 8 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000735873160967268547 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 9 &lt/xpartlines&gt
+			&ltpartlines&gt 4 9 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000752775920475186756 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 8 &lt/xpartlines&gt
+			&ltpartlines&gt 4 8 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000767134447803857842 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 7 &lt/xpartlines&gt
+			&ltpartlines&gt 4 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000783650082327605664 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 6 &lt/xpartlines&gt
+			&ltpartlines&gt 4 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000848841654435085577 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 6 &lt/xpartlines&gt
+			&ltpartlines&gt 3 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000957093185831530183 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 2 6 &lt/xpartlines&gt
+			&ltpartlines&gt 2 6 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00113398823503352373 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 2 5 &lt/xpartlines&gt
+			&ltpartlines&gt 2 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00120225473233144119 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 1 6 &lt/xpartlines&gt
+			&ltpartlines&gt 1 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00125272233077632815 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 1 5 &lt/xpartlines&gt
+			&ltpartlines&gt 1 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00148623295494489864 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 1 4 &lt/xpartlines&gt
+			&ltpartlines&gt 1 4 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00243994510677220068 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 1 3 &lt/xpartlines&gt
+			&ltpartlines&gt 1 3 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00250266179027173021 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 4 &lt/xpartlines&gt
+			&ltpartlines&gt 0 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00288283320062619926 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 3 &lt/xpartlines&gt
+			&ltpartlines&gt 0 3 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00290053205202245312 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 2 &lt/xpartlines&gt
+			&ltpartlines&gt 0 2 . &lt/partlines&gt
+		&lt/intervals&gt
+	&lt/treesum&gt
+	&lttreesum&gt
+		&ltncopy&gt 1 &lt/ncopy&gt
+		&ltshortforce&gt coalesce long
+			&ltshortpoint&gt 18 21 &lt/shortpoint&gt
+			&ltshortwait&gt 0.0609748580652498956 0.11278304974572527 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltshortforce&gt migrate long
+			&ltshortpoint&gt 0 1 3 0 &lt/shortpoint&gt
+			&ltshortwait&gt 0.0085322498443588142 0.0161685184391479192 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltintervals&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 9.86113155575850924e-06 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 20 20 &lt/xpartlines&gt
+			&ltpartlines&gt 20 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.14780854199569499e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 19 20 &lt/xpartlines&gt
+			&ltpartlines&gt 19 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.2291023672518374e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 19 19 &lt/xpartlines&gt
+			&ltpartlines&gt 19 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 2.22961141141907646e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 19 18 &lt/xpartlines&gt
+			&ltpartlines&gt 19 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 2.48673742353794826e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 18 18 &lt/xpartlines&gt
+			&ltpartlines&gt 18 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 3.91186994294289797e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 17 18 &lt/xpartlines&gt
+			&ltpartlines&gt 17 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 4.82376276028000035e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 17 17 &lt/xpartlines&gt
+			&ltpartlines&gt 17 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 5.19910298629359333e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 16 17 &lt/xpartlines&gt
+			&ltpartlines&gt 16 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 7.45549455970045656e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 16 16 &lt/xpartlines&gt
+			&ltpartlines&gt 16 16 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 8.48515305769414904e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 15 16 &lt/xpartlines&gt
+			&ltpartlines&gt 15 16 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 8.63938283360383356e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 14 16 &lt/xpartlines&gt
+			&ltpartlines&gt 14 16 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000109224715491037562 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 14 15 &lt/xpartlines&gt
+			&ltpartlines&gt 14 15 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000118188935353991798 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 15 &lt/xpartlines&gt
+			&ltpartlines&gt 13 15 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000125328787711941807 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 14 &lt/xpartlines&gt
+			&ltpartlines&gt 13 14 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000134238691874153575 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 14 &lt/xpartlines&gt
+			&ltpartlines&gt 12 14 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00015730396931299724 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 14 &lt/xpartlines&gt
+			&ltpartlines&gt 11 14 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000208869889075072063 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 13 &lt/xpartlines&gt
+			&ltpartlines&gt 11 13 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000215094766862807343 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 12 &lt/xpartlines&gt
+			&ltpartlines&gt 11 12 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000225539801544489551 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 12 &lt/xpartlines&gt
+			&ltpartlines&gt 10 12 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00024281367284408395 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 12 &lt/xpartlines&gt
+			&ltpartlines&gt 9 12 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000339783045919729956 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 11 &lt/xpartlines&gt
+			&ltpartlines&gt 9 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000345347778094520265 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 11 &lt/xpartlines&gt
+			&ltpartlines&gt 8 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000349763100889471208 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 11 &lt/xpartlines&gt
+			&ltpartlines&gt 7 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000380891571910665103 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 6 11 &lt/xpartlines&gt
+			&ltpartlines&gt 6 11 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.000430449683486230515 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltnewstatus&gt 1 &lt/newstatus&gt
+			&ltxpartlines&gt 6 10 &lt/xpartlines&gt
+			&ltpartlines&gt 6 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000457714375841571639 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 9 &lt/xpartlines&gt
+			&ltpartlines&gt 7 9 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000477876206426440411 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 6 9 &lt/xpartlines&gt
+			&ltpartlines&gt 6 9 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000491278080421270061 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 5 9 &lt/xpartlines&gt
+			&ltpartlines&gt 5 9 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000618305099660766818 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 5 8 &lt/xpartlines&gt
+			&ltpartlines&gt 5 8 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.000711694454938392642 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 5 7 &lt/xpartlines&gt
+			&ltpartlines&gt 5 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000752775920475186756 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 8 &lt/xpartlines&gt
+			&ltpartlines&gt 4 8 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000767134447803857842 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 7 &lt/xpartlines&gt
+			&ltpartlines&gt 4 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000783650082327605664 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 6 &lt/xpartlines&gt
+			&ltpartlines&gt 4 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000848841654435085577 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 6 &lt/xpartlines&gt
+			&ltpartlines&gt 3 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000957093185831530183 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 2 6 &lt/xpartlines&gt
+			&ltpartlines&gt 2 6 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00113398823503352373 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 2 5 &lt/xpartlines&gt
+			&ltpartlines&gt 2 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00120225473233144119 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 1 6 &lt/xpartlines&gt
+			&ltpartlines&gt 1 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00125272233077632815 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 1 5 &lt/xpartlines&gt
+			&ltpartlines&gt 1 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00148623295494489864 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 1 4 &lt/xpartlines&gt
+			&ltpartlines&gt 1 4 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00243994510677220068 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 1 3 &lt/xpartlines&gt
+			&ltpartlines&gt 1 3 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00250266179027173021 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 4 &lt/xpartlines&gt
+			&ltpartlines&gt 0 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00288283320062619926 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 3 &lt/xpartlines&gt
+			&ltpartlines&gt 0 3 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00290053205202245312 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 2 &lt/xpartlines&gt
+			&ltpartlines&gt 0 2 . &lt/partlines&gt
+		&lt/intervals&gt
+	&lt/treesum&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00382351077572252833 0.0101148980666503786 &lt/thetas&gt
+			&ltmigrates&gt 0 266.63302494001374 180.090955280637019 0 &lt/migrates&gt
+			&ltgrowthrates&gt 589.048754968938624 732.840714912730732 &lt/growthrates&gt
+		&lt/estimates&gt
+&lt/chainsum&gt
+&ltchainpack&gt
+	&ltnumber&gt 0 2 0 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 0 &lt/tinytrees&gt
+		&ltaccrate&gt 0.234999999999999987 &lt/accrate&gt
+		&ltllikemle&gt 4.42696635227687985 &lt/llikemle&gt
+		&ltllikedata&gt -3492.15130892703792 &lt/llikedata&gt
+		&ltstarttime&gt 1113934029 &lt/starttime&gt
+		&ltendtime&gt 1113934045 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 47 200 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.0051932689740616959 0.0095267210917029492 &lt/thetas&gt
+			&ltmigrates&gt 0 0.0030934444336862682 46.6542500393172119 0 &lt/migrates&gt
+			&ltgrowthrates&gt 1401.00175791445054 338.376238311546331 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainpack&gt
+	&ltnumber&gt 0 2 1 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 0 &lt/tinytrees&gt
+		&ltaccrate&gt 0.149999999999999994 &lt/accrate&gt
+		&ltllikemle&gt 11.6317746234713901 &lt/llikemle&gt
+		&ltllikedata&gt -3382.65609923619468 &lt/llikedata&gt
+		&ltstarttime&gt 1113934046 &lt/starttime&gt
+		&ltendtime&gt 1113934062 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 30 200 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00363910230419641195 0.00949364169215391289 &lt/thetas&gt
+			&ltmigrates&gt 0 113.28617225286888 88.1277594808196767 0 &lt/migrates&gt
+			&ltgrowthrates&gt 411.557560954860037 233.86142612045623 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainpack&gt
+	&ltnumber&gt 0 2 2 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 0 &lt/tinytrees&gt
+		&ltaccrate&gt 0.110000000000000001 &lt/accrate&gt
+		&ltllikemle&gt 0.636063492994214719 &lt/llikemle&gt
+		&ltllikedata&gt -3299.08922066821424 &lt/llikedata&gt
+		&ltstarttime&gt 1113934062 &lt/starttime&gt
+		&ltendtime&gt 1113934079 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 22 200 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00344721894934559969 0.0137468371177546927 &lt/thetas&gt
+			&ltmigrates&gt 0 113.720626268037961 82.7670768811458402 0 &lt/migrates&gt
+			&ltgrowthrates&gt 320.377044363307391 443.960643777887299 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainpack&gt
+	&ltnumber&gt 0 2 3 &lt/number&gt
+	&ltchainout&gt
+		&ltbadtrees&gt 0 &lt/badtrees&gt
+		&lttinytrees&gt 0 &lt/tinytrees&gt
+		&ltaccrate&gt 0.174999999999999989 &lt/accrate&gt
+		&ltllikemle&gt 0.411652243146974017 &lt/llikemle&gt
+		&ltllikedata&gt -3301.112857149506 &lt/llikedata&gt
+		&ltstarttime&gt 1113934079 &lt/starttime&gt
+		&ltendtime&gt 1113934087 &lt/endtime&gt
+		&ltrates&gt &ltmap&gt Tree-Arranger 7 40 &lt/map&gt &lt/rates&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00274284032250226347 0.0117803366876250235 &lt/thetas&gt
+			&ltmigrates&gt 0 120.893070720992441 85.9918539079791771 0 &lt/migrates&gt
+			&ltgrowthrates&gt 26.2473005315673831 368.198849330621101 &lt/growthrates&gt
+		&lt/estimates&gt
+	&lt/chainout&gt
+&lt/chainpack&gt
+&ltchainsum&gt
+	&ltreg_rep&gt 0 2 &lt/reg_rep&gt
+	&lttreesum&gt
+		&ltncopy&gt 1 &lt/ncopy&gt
+		&ltshortforce&gt coalesce long
+			&ltshortpoint&gt 19 20 &lt/shortpoint&gt
+			&ltshortwait&gt 0.0517692740770155577 0.168835716039738043 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltshortforce&gt migrate long
+			&ltshortpoint&gt 0 1 2 0 &lt/shortpoint&gt
+			&ltshortwait&gt 0.00827170676388711348 0.0236086927070103779 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltintervals&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 4.65136899805835341e-06 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 20 20 &lt/xpartlines&gt
+			&ltpartlines&gt 20 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 8.04215566305798666e-06 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 19 20 &lt/xpartlines&gt
+			&ltpartlines&gt 19 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.43604566212563868e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 18 20 &lt/xpartlines&gt
+			&ltpartlines&gt 18 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.84336441146756059e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 17 20 &lt/xpartlines&gt
+			&ltpartlines&gt 17 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 2.24324521605382133e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 16 20 &lt/xpartlines&gt
+			&ltpartlines&gt 16 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 4.02727378794709678e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 16 19 &lt/xpartlines&gt
+			&ltpartlines&gt 16 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 4.66779769093841711e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 15 19 &lt/xpartlines&gt
+			&ltpartlines&gt 15 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 6.70209786261414327e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 14 19 &lt/xpartlines&gt
+			&ltpartlines&gt 14 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 8.7820260710313902e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 14 18 &lt/xpartlines&gt
+			&ltpartlines&gt 14 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 9.28621094428023613e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 18 &lt/xpartlines&gt
+			&ltpartlines&gt 13 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000107706203473348862 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 18 &lt/xpartlines&gt
+			&ltpartlines&gt 12 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000114816939483791875 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 17 &lt/xpartlines&gt
+			&ltpartlines&gt 12 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000126163485711554112 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 16 &lt/xpartlines&gt
+			&ltpartlines&gt 12 16 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.0001343202142583523 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 15 &lt/xpartlines&gt
+			&ltpartlines&gt 12 15 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000157502040375572878 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 14 &lt/xpartlines&gt
+			&ltpartlines&gt 12 14 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000158198275534539187 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 14 &lt/xpartlines&gt
+			&ltpartlines&gt 11 14 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000185924618990777787 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 13 &lt/xpartlines&gt
+			&ltpartlines&gt 11 13 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000195139416618541169 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 13 &lt/xpartlines&gt
+			&ltpartlines&gt 10 13 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.000195144856593906958 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltnewstatus&gt 1 &lt/newstatus&gt
+			&ltxpartlines&gt 9 13 &lt/xpartlines&gt
+			&ltpartlines&gt 9 13 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000208892591389243015 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 12 &lt/xpartlines&gt
+			&ltpartlines&gt 10 12 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000221729134487027137 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 11 &lt/xpartlines&gt
+			&ltpartlines&gt 10 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000231540140137400703 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 11 &lt/xpartlines&gt
+			&ltpartlines&gt 9 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000286748264076826459 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 11 &lt/xpartlines&gt
+			&ltpartlines&gt 8 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000296568169067036559 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 11 &lt/xpartlines&gt
+			&ltpartlines&gt 7 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00032780787636151992 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 10 &lt/xpartlines&gt
+			&ltpartlines&gt 7 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00040351349954208406 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 6 10 &lt/xpartlines&gt
+			&ltpartlines&gt 6 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000469019333789370591 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 5 10 &lt/xpartlines&gt
+			&ltpartlines&gt 5 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000501156369385404206 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 10 &lt/xpartlines&gt
+			&ltpartlines&gt 4 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00051564770169723541 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 9 &lt/xpartlines&gt
+			&ltpartlines&gt 4 9 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000720822165058714829 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 9 &lt/xpartlines&gt
+			&ltpartlines&gt 3 9 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00096916231842889629 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 8 &lt/xpartlines&gt
+			&ltpartlines&gt 3 8 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00117293932612708303 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 7 &lt/xpartlines&gt
+			&ltpartlines&gt 3 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00150380041638617967 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 6 &lt/xpartlines&gt
+			&ltpartlines&gt 3 6 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00173696782656019471 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 2 6 &lt/xpartlines&gt
+			&ltpartlines&gt 2 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00185054353054030468 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 1 7 &lt/xpartlines&gt
+			&ltpartlines&gt 1 7 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00191839064111927254 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 1 6 &lt/xpartlines&gt
+			&ltpartlines&gt 1 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00194182526794987008 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 7 &lt/xpartlines&gt
+			&ltpartlines&gt 0 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00225255911291697688 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 6 &lt/xpartlines&gt
+			&ltpartlines&gt 0 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00258123599239381586 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 5 &lt/xpartlines&gt
+			&ltpartlines&gt 0 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00295407111169069311 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 4 &lt/xpartlines&gt
+			&ltpartlines&gt 0 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00324495225912871431 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 3 &lt/xpartlines&gt
+			&ltpartlines&gt 0 3 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00382175977738545905 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 2 &lt/xpartlines&gt
+			&ltpartlines&gt 0 2 . &lt/partlines&gt
+		&lt/intervals&gt
+	&lt/treesum&gt
+	&lttreesum&gt
+		&ltncopy&gt 1 &lt/ncopy&gt
+		&ltshortforce&gt coalesce long
+			&ltshortpoint&gt 19 20 &lt/shortpoint&gt
+			&ltshortwait&gt 0.0517692740770155577 0.159664105752183261 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltshortforce&gt migrate long
+			&ltshortpoint&gt 0 1 2 0 &lt/shortpoint&gt
+			&ltshortwait&gt 0.00827170676388711348 0.0229534802929423874 &lt/shortwait&gt
+		&lt/shortforce&gt
+		&ltintervals&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 4.65136899805835341e-06 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 20 20 &lt/xpartlines&gt
+			&ltpartlines&gt 20 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 8.04215566305798666e-06 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 19 20 &lt/xpartlines&gt
+			&ltpartlines&gt 19 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.43604566212563868e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 18 20 &lt/xpartlines&gt
+			&ltpartlines&gt 18 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 1.84336441146756059e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 17 20 &lt/xpartlines&gt
+			&ltpartlines&gt 17 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 2.24324521605382133e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 16 20 &lt/xpartlines&gt
+			&ltpartlines&gt 16 20 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 4.02727378794709678e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 16 19 &lt/xpartlines&gt
+			&ltpartlines&gt 16 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 4.66779769093841711e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 15 19 &lt/xpartlines&gt
+			&ltpartlines&gt 15 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 6.70209786261414327e-05 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 14 19 &lt/xpartlines&gt
+			&ltpartlines&gt 14 19 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 8.7820260710313902e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 14 18 &lt/xpartlines&gt
+			&ltpartlines&gt 14 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 9.28621094428023613e-05 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 13 18 &lt/xpartlines&gt
+			&ltpartlines&gt 13 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000107706203473348862 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 18 &lt/xpartlines&gt
+			&ltpartlines&gt 12 18 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000114816939483791875 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 17 &lt/xpartlines&gt
+			&ltpartlines&gt 12 17 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000126163485711554112 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 16 &lt/xpartlines&gt
+			&ltpartlines&gt 12 16 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.0001343202142583523 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 15 &lt/xpartlines&gt
+			&ltpartlines&gt 12 15 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000157502040375572878 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 12 14 &lt/xpartlines&gt
+			&ltpartlines&gt 12 14 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000158198275534539187 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 14 &lt/xpartlines&gt
+			&ltpartlines&gt 11 14 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000185924618990777787 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 11 13 &lt/xpartlines&gt
+			&ltpartlines&gt 11 13 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000195139416618541169 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 13 &lt/xpartlines&gt
+			&ltpartlines&gt 10 13 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.000195144856593906958 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltnewstatus&gt 1 &lt/newstatus&gt
+			&ltxpartlines&gt 9 13 &lt/xpartlines&gt
+			&ltpartlines&gt 9 13 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000208892591389243015 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 12 &lt/xpartlines&gt
+			&ltpartlines&gt 10 12 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000221729134487027137 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 10 11 &lt/xpartlines&gt
+			&ltpartlines&gt 10 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000231540140137400703 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 9 11 &lt/xpartlines&gt
+			&ltpartlines&gt 9 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000286748264076826459 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 8 11 &lt/xpartlines&gt
+			&ltpartlines&gt 8 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000296568169067036559 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 11 &lt/xpartlines&gt
+			&ltpartlines&gt 7 11 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00032780787636151992 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 7 10 &lt/xpartlines&gt
+			&ltpartlines&gt 7 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00040351349954208406 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 6 10 &lt/xpartlines&gt
+			&ltpartlines&gt 6 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000469019333789370591 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 5 10 &lt/xpartlines&gt
+			&ltpartlines&gt 5 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000501156369385404206 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 10 &lt/xpartlines&gt
+			&ltpartlines&gt 4 10 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00051564770169723541 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 4 9 &lt/xpartlines&gt
+			&ltpartlines&gt 4 9 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000517726912059090507 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 9 &lt/xpartlines&gt
+			&ltpartlines&gt 3 9 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.000720822165058714829 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 8 &lt/xpartlines&gt
+			&ltpartlines&gt 3 8 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00096916231842889629 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 7 &lt/xpartlines&gt
+			&ltpartlines&gt 3 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00150380041638617967 &lt/endtime&gt
+			&ltoldstatus&gt 0 &lt/oldstatus&gt
+			&ltxpartlines&gt 3 6 &lt/xpartlines&gt
+			&ltpartlines&gt 3 6 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00173696782656019471 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 2 6 &lt/xpartlines&gt
+			&ltpartlines&gt 2 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00185054353054030468 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 1 7 &lt/xpartlines&gt
+			&ltpartlines&gt 1 7 . &lt/partlines&gt
+		&ltforce&gt Migration &lt/force&gt
+			&ltendtime&gt 0.00191839064111927254 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltnewstatus&gt 0 &lt/newstatus&gt
+			&ltxpartlines&gt 1 6 &lt/xpartlines&gt
+			&ltpartlines&gt 1 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00194182526794987008 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 7 &lt/xpartlines&gt
+			&ltpartlines&gt 0 7 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00225255911291697688 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 6 &lt/xpartlines&gt
+			&ltpartlines&gt 0 6 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00258123599239381586 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 5 &lt/xpartlines&gt
+			&ltpartlines&gt 0 5 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00295407111169069311 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 4 &lt/xpartlines&gt
+			&ltpartlines&gt 0 4 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00324495225912871431 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 3 &lt/xpartlines&gt
+			&ltpartlines&gt 0 3 . &lt/partlines&gt
+		&ltforce&gt Coalescence &lt/force&gt
+			&ltendtime&gt 0.00382175977738545905 &lt/endtime&gt
+			&ltoldstatus&gt 1 &lt/oldstatus&gt
+			&ltxpartlines&gt 0 2 &lt/xpartlines&gt
+			&ltpartlines&gt 0 2 . &lt/partlines&gt
+		&lt/intervals&gt
+	&lt/treesum&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00344721894934559969 0.0137468371177546927 &lt/thetas&gt
+			&ltmigrates&gt 0 113.720626268037961 82.7670768811458402 0 &lt/migrates&gt
+			&ltgrowthrates&gt 320.377044363307391 443.960643777887299 &lt/growthrates&gt
+		&lt/estimates&gt
+&lt/chainsum&gt
+&lt!--  New information past this point. --&gt
+&ltreplicate-summary&gt
+		&ltestimates&gt
+			&ltthetas&gt 0.00356219464250582358 0.00792721479150828613 &lt/thetas&gt
+			&ltmigrates&gt 0 121.904055450476534 100.015915512533283 0 &lt/migrates&gt
+			&ltgrowthrates&gt 722.893455415187532 257.932768127086547 &lt/growthrates&gt
+		&lt/estimates&gt
+		&ltmaxlike&gt -4.6390765097207467 &lt/maxlike&gt
+&lt/replicate-summary&gt
+&lt!-- End summary file
+	 Generated from run that started at: Tue Apr 19 11:04:50 2005
+	 and ended at: Tue Apr 19 11:08:07 2005 --&gt
+&lt/XML-summary-file&gt
+</pre>
+</BODY>
+</HTML>
diff --git a/doc/html/outsumfile.3rep.xml b/doc/html/outsumfile.3rep.xml
new file mode 100644
index 0000000..03b8ab9
--- /dev/null
+++ b/doc/html/outsumfile.3rep.xml
@@ -0,0 +1,1614 @@
+<XML-summary-file>
+<!-- Lamarc v. 2.0
+     Please do not modify. -->
+<!--  This summary file should match the input summary file insumfile.3rep.txt,
+      up until that file's end. -->
+<chainpack>
+	<number> 0 0 0 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<accrate> 0.195000000000000007 </accrate>
+		<llikemle> 4.50058419396642684 </llikemle>
+		<llikedata> -3620.90996263017314 </llikedata>
+		<starttime> 1113933890 </starttime>
+		<endtime> 1113933911 </endtime>
+		<rates> <map> Tree-Arranger 39 200 </map> </rates>
+		<estimates>
+			<thetas> 0.00832313455063505084 0.00734484914891693919 </thetas>
+			<migrates> 0 96.6630952829531083 46.6299496864950527 0 </migrates>
+			<growthrates> 2365.3441048272216 184.41163985471303 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainpack>
+	<number> 0 0 1 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<accrate> 0.160000000000000003 </accrate>
+		<llikemle> 1.18053638385976001 </llikemle>
+		<llikedata> -3516.0909920893173 </llikedata>
+		<starttime> 1113933912 </starttime>
+		<endtime> 1113933934 </endtime>
+		<rates> <map> Tree-Arranger 32 200 </map> </rates>
+		<estimates>
+			<thetas> 0.00513756085360574273 0.0082400193242912461 </thetas>
+			<migrates> 0 131.185343491258408 42.1077506833302877 0 </migrates>
+			<growthrates> 1821.55597636667744 149.760781972604775 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainpack>
+	<number> 0 0 2 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<accrate> 0.130000000000000004 </accrate>
+		<llikemle> 1.6599393589321001 </llikemle>
+		<llikedata> -3271.75316866892263 </llikedata>
+		<starttime> 1113933934 </starttime>
+		<endtime> 1113933960 </endtime>
+		<rates> <map> Tree-Arranger 26 200 </map> </rates>
+		<estimates>
+			<thetas> 0.00505700949907143815 0.00613707484094671175 </thetas>
+			<migrates> 0 124.679362905292081 34.8937725369221496 0 </migrates>
+			<growthrates> 1761.27593665903237 -84.0195656658781189 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainpack>
+	<number> 0 0 3 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 1 </tinytrees>
+		<accrate> 0.149999999999999994 </accrate>
+		<llikemle> 0.596513557975481556 </llikemle>
+		<llikedata> -3269.61178480345779 </llikedata>
+		<starttime> 1113933961 </starttime>
+		<endtime> 1113933971 </endtime>
+		<rates> <map> Tree-Arranger 6 40 </map> </rates>
+		<estimates>
+			<thetas> 0.0051461021148077107 0.00589555271016268308 </thetas>
+			<migrates> 0 125.68905479949639 48.865939986393137 0 </migrates>
+			<growthrates> 1835.77616186834098 82.0043403675645095 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainsum>
+	<reg_rep> 0 0 </reg_rep>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce long
+			<shortpoint> 20 19 </shortpoint>
+			<shortwait> 0.0629273638426249143 0.105548312215536552 </shortwait>
+		</shortforce>
+		<shortforce> migrate long
+			<shortpoint> 0 1 1 0 </shortpoint>
+			<shortwait> 0.0079654795218820091 0.0206451375053703819 </shortwait>
+		</shortforce>
+		<intervals>
+		<force> Coalescence </force>
+			<endtime> 5.09657490636763079e-06 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 20 20 </xpartlines>
+			<partlines> 20 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.2102355624254017e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 19 20 </xpartlines>
+			<partlines> 19 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.33961400471318623e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 18 20 </xpartlines>
+			<partlines> 18 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.77695915387843852e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 17 20 </xpartlines>
+			<partlines> 17 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 2.10446887053792051e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 17 19 </xpartlines>
+			<partlines> 17 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 2.27457247455224152e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 17 18 </xpartlines>
+			<partlines> 17 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 2.44016581525568737e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 17 17 </xpartlines>
+			<partlines> 17 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 5.63431475801879578e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 16 17 </xpartlines>
+			<partlines> 16 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 6.55098716603418459e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 15 17 </xpartlines>
+			<partlines> 15 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 7.39112816930027905e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 14 17 </xpartlines>
+			<partlines> 14 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 8.18973368575778886e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 14 16 </xpartlines>
+			<partlines> 14 16 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 8.3802045298549127e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 13 16 </xpartlines>
+			<partlines> 13 16 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00010629432546176642 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 13 15 </xpartlines>
+			<partlines> 13 15 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000111507477845693503 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 13 14 </xpartlines>
+			<partlines> 13 14 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000150975907355033011 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 12 14 </xpartlines>
+			<partlines> 12 14 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000165638034054806815 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 11 14 </xpartlines>
+			<partlines> 11 14 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000219061924145474876 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 11 13 </xpartlines>
+			<partlines> 11 13 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000219707138094496452 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 11 12 </xpartlines>
+			<partlines> 11 12 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000258213852917746063 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 11 11 </xpartlines>
+			<partlines> 11 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000273652113921858643 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 11 10 </xpartlines>
+			<partlines> 11 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000280583868742954141 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 10 10 </xpartlines>
+			<partlines> 10 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000331106053916933646 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 10 9 </xpartlines>
+			<partlines> 10 9 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000344952279767115331 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 10 8 </xpartlines>
+			<partlines> 10 8 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000345874033941911106 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 9 8 </xpartlines>
+			<partlines> 9 8 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000348861238031590646 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 9 7 </xpartlines>
+			<partlines> 9 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000424599242926313077 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 8 7 </xpartlines>
+			<partlines> 8 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000439947186235675422 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 7 7 </xpartlines>
+			<partlines> 7 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000481070344401667829 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 6 7 </xpartlines>
+			<partlines> 6 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000528930654355221855 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 5 7 </xpartlines>
+			<partlines> 5 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000592184315630871314 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 5 6 </xpartlines>
+			<partlines> 5 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000857591267505941114 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 4 6 </xpartlines>
+			<partlines> 4 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000866354211828237845 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 3 6 </xpartlines>
+			<partlines> 3 6 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00091353575187592918 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<newstatus> 1 </newstatus>
+			<xpartlines> 3 5 </xpartlines>
+			<partlines> 3 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000919185689826623655 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 4 4 </xpartlines>
+			<partlines> 4 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000955912424407952743 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 3 4 </xpartlines>
+			<partlines> 3 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00118557134547170773 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 2 4 </xpartlines>
+			<partlines> 2 4 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00153425735560147494 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 1 4 </xpartlines>
+			<partlines> 1 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.0019246211065550127 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 5 </xpartlines>
+			<partlines> 0 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00206538502262878197 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 4 </xpartlines>
+			<partlines> 0 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00305508566675113469 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 3 </xpartlines>
+			<partlines> 0 3 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00533986494186010525 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 2 </xpartlines>
+			<partlines> 0 2 . </partlines>
+		</intervals>
+	</treesum>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce long
+			<shortpoint> 20 19 </shortpoint>
+			<shortwait> 0.0575370553514418445 0.104633534940785464 </shortwait>
+		</shortforce>
+		<shortforce> migrate long
+			<shortpoint> 0 1 1 0 </shortpoint>
+			<shortwait> 0.00794664505963699544 0.0202802222566785852 </shortwait>
+		</shortforce>
+		<intervals>
+		<force> Coalescence </force>
+			<endtime> 5.09657490636763079e-06 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 20 20 </xpartlines>
+			<partlines> 20 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.2102355624254017e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 19 20 </xpartlines>
+			<partlines> 19 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.25411707266569827e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 18 20 </xpartlines>
+			<partlines> 18 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.33961400471318623e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 17 20 </xpartlines>
+			<partlines> 17 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.77695915387843852e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 16 20 </xpartlines>
+			<partlines> 16 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 2.10446887053792051e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 16 19 </xpartlines>
+			<partlines> 16 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 2.27457247455224152e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 16 18 </xpartlines>
+			<partlines> 16 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 2.44016581525568737e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 16 17 </xpartlines>
+			<partlines> 16 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 5.63431475801879578e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 15 17 </xpartlines>
+			<partlines> 15 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 6.55098716603418459e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 14 17 </xpartlines>
+			<partlines> 14 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 7.39112816930027905e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 13 17 </xpartlines>
+			<partlines> 13 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 8.3802045298549127e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 13 16 </xpartlines>
+			<partlines> 13 16 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00010617490035119024 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 13 15 </xpartlines>
+			<partlines> 13 15 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000111507477845693503 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 12 15 </xpartlines>
+			<partlines> 12 15 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000150975907355033011 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 11 15 </xpartlines>
+			<partlines> 11 15 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000151946997256986488 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 10 15 </xpartlines>
+			<partlines> 10 15 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000165638034054806815 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 10 14 </xpartlines>
+			<partlines> 10 14 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000191522903992015264 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 10 13 </xpartlines>
+			<partlines> 10 13 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000219707138094496452 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 10 12 </xpartlines>
+			<partlines> 10 12 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000258213852917746063 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 10 11 </xpartlines>
+			<partlines> 10 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000273652113921858643 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 10 10 </xpartlines>
+			<partlines> 10 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000280583868742954141 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 9 10 </xpartlines>
+			<partlines> 9 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000331106053916933646 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 9 9 </xpartlines>
+			<partlines> 9 9 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000345874033941911106 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 9 8 </xpartlines>
+			<partlines> 9 8 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000348861238031590646 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 9 7 </xpartlines>
+			<partlines> 9 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000424599242926313077 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 8 7 </xpartlines>
+			<partlines> 8 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000439947186235675422 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 7 7 </xpartlines>
+			<partlines> 7 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000481070344401667829 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 6 7 </xpartlines>
+			<partlines> 6 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000528930654355221855 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 5 7 </xpartlines>
+			<partlines> 5 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000592184315630871314 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 5 6 </xpartlines>
+			<partlines> 5 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000866354211828237845 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 4 6 </xpartlines>
+			<partlines> 4 6 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00091353575187592918 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<newstatus> 1 </newstatus>
+			<xpartlines> 4 5 </xpartlines>
+			<partlines> 4 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000919185689826623655 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 5 4 </xpartlines>
+			<partlines> 5 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000955912424407952743 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 4 4 </xpartlines>
+			<partlines> 4 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00114689035080777412 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 3 4 </xpartlines>
+			<partlines> 3 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00118557134547170773 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 2 4 </xpartlines>
+			<partlines> 2 4 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00153425735560147494 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 1 4 </xpartlines>
+			<partlines> 1 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.0019246211065550127 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 5 </xpartlines>
+			<partlines> 0 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00206538502262878197 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 4 </xpartlines>
+			<partlines> 0 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00267205676641757645 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 3 </xpartlines>
+			<partlines> 0 3 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00533986494186010525 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 2 </xpartlines>
+			<partlines> 0 2 . </partlines>
+		</intervals>
+	</treesum>
+		<estimates>
+			<thetas> 0.00505700949907143815 0.00613707484094671175 </thetas>
+			<migrates> 0 124.679362905292081 34.8937725369221496 0 </migrates>
+			<growthrates> 1761.27593665903237 -84.0195656658781189 </growthrates>
+		</estimates>
+</chainsum>
+<chainpack>
+	<number> 0 1 0 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<accrate> 0.265000000000000013 </accrate>
+		<llikemle> 8.03638549246886313 </llikemle>
+		<llikedata> -3574.6386248622166 </llikedata>
+		<starttime> 1113933971 </starttime>
+		<endtime> 1113933986 </endtime>
+		<rates> <map> Tree-Arranger 53 200 </map> </rates>
+		<estimates>
+			<thetas> 0.00238967973895868897 0.00857314189478221474 </thetas>
+			<migrates> 0 178.594354360432533 201.711534027463927 0 </migrates>
+			<growthrates> 286.265570187478374 522.584629259533926 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainpack>
+	<number> 0 1 1 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<accrate> 0.174999999999999989 </accrate>
+		<llikemle> 2.24292366391492948 </llikemle>
+		<llikedata> -3366.95583651043171 </llikedata>
+		<starttime> 1113933986 </starttime>
+		<endtime> 1113934003 </endtime>
+		<rates> <map> Tree-Arranger 35 200 </map> </rates>
+		<estimates>
+			<thetas> 0.00342828243898930232 0.00938815584810562438 </thetas>
+			<migrates> 0 359.333313010233837 170.709781451344668 0 </migrates>
+			<growthrates> 376.258762176827872 608.980866754854333 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainpack>
+	<number> 0 1 2 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<accrate> 0.115000000000000005 </accrate>
+		<llikemle> 0.331232649867971241 </llikemle>
+		<llikedata> -3277.07186443434921 </llikedata>
+		<starttime> 1113934003 </starttime>
+		<endtime> 1113934021 </endtime>
+		<rates> <map> Tree-Arranger 23 200 </map> </rates>
+		<estimates>
+			<thetas> 0.00382351077572252833 0.0101148980666503786 </thetas>
+			<migrates> 0 266.63302494001374 180.090955280637019 0 </migrates>
+			<growthrates> 589.048754968938624 732.840714912730732 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainpack>
+	<number> 0 1 3 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<accrate> 0.100000000000000006 </accrate>
+		<llikemle> 0.712638597764832182 </llikemle>
+		<llikedata> -3264.58251001428516 </llikedata>
+		<starttime> 1113934021 </starttime>
+		<endtime> 1113934029 </endtime>
+		<rates> <map> Tree-Arranger 4 40 </map> </rates>
+		<estimates>
+			<thetas> 0.004074651441941083 0.00837181988651936049 </thetas>
+			<migrates> 0 118.942422560571757 182.899111430931441 0 </migrates>
+			<growthrates> 1023.10652786136313 621.693756977592329 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainsum>
+	<reg_rep> 0 1 </reg_rep>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce long
+			<shortpoint> 18 21 </shortpoint>
+			<shortwait> 0.0539318434248482123 0.124884165154835192 </shortwait>
+		</shortforce>
+		<shortforce> migrate long
+			<shortpoint> 0 1 3 0 </shortpoint>
+			<shortwait> 0.00820356565115269894 0.0167862026647611975 </shortwait>
+		</shortforce>
+		<intervals>
+		<force> Coalescence </force>
+			<endtime> 9.86113155575850924e-06 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 20 20 </xpartlines>
+			<partlines> 20 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.14780854199569499e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 19 20 </xpartlines>
+			<partlines> 19 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.2291023672518374e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 19 19 </xpartlines>
+			<partlines> 19 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.66635848884044525e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 19 18 </xpartlines>
+			<partlines> 19 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 2.22961141141907646e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 18 18 </xpartlines>
+			<partlines> 18 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 2.48673742353794826e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 17 18 </xpartlines>
+			<partlines> 17 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 3.91186994294289797e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 16 18 </xpartlines>
+			<partlines> 16 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 4.82376276028000035e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 16 17 </xpartlines>
+			<partlines> 16 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 5.19910298629359333e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 15 17 </xpartlines>
+			<partlines> 15 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 7.45549455970045656e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 15 16 </xpartlines>
+			<partlines> 15 16 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 8.48515305769414904e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 14 16 </xpartlines>
+			<partlines> 14 16 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 8.63938283360383356e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 13 16 </xpartlines>
+			<partlines> 13 16 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000109224715491037562 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 13 15 </xpartlines>
+			<partlines> 13 15 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000125328787711941807 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 12 15 </xpartlines>
+			<partlines> 12 15 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000134238691874153575 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 11 15 </xpartlines>
+			<partlines> 11 15 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00015730396931299724 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 10 15 </xpartlines>
+			<partlines> 10 15 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000208869889075072063 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 10 14 </xpartlines>
+			<partlines> 10 14 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000215094766862807343 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 10 13 </xpartlines>
+			<partlines> 10 13 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000225539801544489551 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 9 13 </xpartlines>
+			<partlines> 9 13 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00024281367284408395 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 8 13 </xpartlines>
+			<partlines> 8 13 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000339783045919729956 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 8 12 </xpartlines>
+			<partlines> 8 12 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000349763100889471208 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 7 12 </xpartlines>
+			<partlines> 7 12 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000380891571910665103 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 6 12 </xpartlines>
+			<partlines> 6 12 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.000430449683486230515 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<newstatus> 1 </newstatus>
+			<xpartlines> 6 11 </xpartlines>
+			<partlines> 6 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000457714375841571639 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 7 10 </xpartlines>
+			<partlines> 7 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000477876206426440411 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 6 10 </xpartlines>
+			<partlines> 6 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000491278080421270061 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 5 10 </xpartlines>
+			<partlines> 5 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000618305099660766818 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 5 9 </xpartlines>
+			<partlines> 5 9 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.000711694454938392642 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 5 8 </xpartlines>
+			<partlines> 5 8 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000735873160967268547 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 4 9 </xpartlines>
+			<partlines> 4 9 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000752775920475186756 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 4 8 </xpartlines>
+			<partlines> 4 8 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000767134447803857842 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 4 7 </xpartlines>
+			<partlines> 4 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000783650082327605664 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 4 6 </xpartlines>
+			<partlines> 4 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000848841654435085577 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 3 6 </xpartlines>
+			<partlines> 3 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000957093185831530183 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 2 6 </xpartlines>
+			<partlines> 2 6 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00113398823503352373 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 2 5 </xpartlines>
+			<partlines> 2 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00120225473233144119 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 1 6 </xpartlines>
+			<partlines> 1 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00125272233077632815 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 1 5 </xpartlines>
+			<partlines> 1 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00148623295494489864 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 1 4 </xpartlines>
+			<partlines> 1 4 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00243994510677220068 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 1 3 </xpartlines>
+			<partlines> 1 3 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00250266179027173021 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 4 </xpartlines>
+			<partlines> 0 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00288283320062619926 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 3 </xpartlines>
+			<partlines> 0 3 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00290053205202245312 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 2 </xpartlines>
+			<partlines> 0 2 . </partlines>
+		</intervals>
+	</treesum>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce long
+			<shortpoint> 18 21 </shortpoint>
+			<shortwait> 0.0609748580652498956 0.11278304974572527 </shortwait>
+		</shortforce>
+		<shortforce> migrate long
+			<shortpoint> 0 1 3 0 </shortpoint>
+			<shortwait> 0.0085322498443588142 0.0161685184391479192 </shortwait>
+		</shortforce>
+		<intervals>
+		<force> Coalescence </force>
+			<endtime> 9.86113155575850924e-06 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 20 20 </xpartlines>
+			<partlines> 20 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.14780854199569499e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 19 20 </xpartlines>
+			<partlines> 19 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.2291023672518374e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 19 19 </xpartlines>
+			<partlines> 19 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 2.22961141141907646e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 19 18 </xpartlines>
+			<partlines> 19 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 2.48673742353794826e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 18 18 </xpartlines>
+			<partlines> 18 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 3.91186994294289797e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 17 18 </xpartlines>
+			<partlines> 17 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 4.82376276028000035e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 17 17 </xpartlines>
+			<partlines> 17 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 5.19910298629359333e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 16 17 </xpartlines>
+			<partlines> 16 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 7.45549455970045656e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 16 16 </xpartlines>
+			<partlines> 16 16 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 8.48515305769414904e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 15 16 </xpartlines>
+			<partlines> 15 16 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 8.63938283360383356e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 14 16 </xpartlines>
+			<partlines> 14 16 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000109224715491037562 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 14 15 </xpartlines>
+			<partlines> 14 15 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000118188935353991798 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 13 15 </xpartlines>
+			<partlines> 13 15 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000125328787711941807 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 13 14 </xpartlines>
+			<partlines> 13 14 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000134238691874153575 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 12 14 </xpartlines>
+			<partlines> 12 14 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00015730396931299724 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 11 14 </xpartlines>
+			<partlines> 11 14 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000208869889075072063 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 11 13 </xpartlines>
+			<partlines> 11 13 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000215094766862807343 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 11 12 </xpartlines>
+			<partlines> 11 12 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000225539801544489551 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 10 12 </xpartlines>
+			<partlines> 10 12 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00024281367284408395 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 9 12 </xpartlines>
+			<partlines> 9 12 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000339783045919729956 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 9 11 </xpartlines>
+			<partlines> 9 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000345347778094520265 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 8 11 </xpartlines>
+			<partlines> 8 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000349763100889471208 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 7 11 </xpartlines>
+			<partlines> 7 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000380891571910665103 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 6 11 </xpartlines>
+			<partlines> 6 11 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.000430449683486230515 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<newstatus> 1 </newstatus>
+			<xpartlines> 6 10 </xpartlines>
+			<partlines> 6 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000457714375841571639 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 7 9 </xpartlines>
+			<partlines> 7 9 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000477876206426440411 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 6 9 </xpartlines>
+			<partlines> 6 9 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000491278080421270061 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 5 9 </xpartlines>
+			<partlines> 5 9 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000618305099660766818 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 5 8 </xpartlines>
+			<partlines> 5 8 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.000711694454938392642 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 5 7 </xpartlines>
+			<partlines> 5 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000752775920475186756 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 4 8 </xpartlines>
+			<partlines> 4 8 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000767134447803857842 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 4 7 </xpartlines>
+			<partlines> 4 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000783650082327605664 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 4 6 </xpartlines>
+			<partlines> 4 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000848841654435085577 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 3 6 </xpartlines>
+			<partlines> 3 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000957093185831530183 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 2 6 </xpartlines>
+			<partlines> 2 6 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00113398823503352373 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 2 5 </xpartlines>
+			<partlines> 2 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00120225473233144119 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 1 6 </xpartlines>
+			<partlines> 1 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00125272233077632815 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 1 5 </xpartlines>
+			<partlines> 1 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00148623295494489864 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 1 4 </xpartlines>
+			<partlines> 1 4 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00243994510677220068 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 1 3 </xpartlines>
+			<partlines> 1 3 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00250266179027173021 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 4 </xpartlines>
+			<partlines> 0 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00288283320062619926 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 3 </xpartlines>
+			<partlines> 0 3 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00290053205202245312 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 2 </xpartlines>
+			<partlines> 0 2 . </partlines>
+		</intervals>
+	</treesum>
+		<estimates>
+			<thetas> 0.00382351077572252833 0.0101148980666503786 </thetas>
+			<migrates> 0 266.63302494001374 180.090955280637019 0 </migrates>
+			<growthrates> 589.048754968938624 732.840714912730732 </growthrates>
+		</estimates>
+</chainsum>
+<chainpack>
+	<number> 0 2 0 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<accrate> 0.234999999999999987 </accrate>
+		<llikemle> 4.42696635227687985 </llikemle>
+		<llikedata> -3492.15130892703792 </llikedata>
+		<starttime> 1113934029 </starttime>
+		<endtime> 1113934045 </endtime>
+		<rates> <map> Tree-Arranger 47 200 </map> </rates>
+		<estimates>
+			<thetas> 0.0051932689740616959 0.0095267210917029492 </thetas>
+			<migrates> 0 0.0030934444336862682 46.6542500393172119 0 </migrates>
+			<growthrates> 1401.00175791445054 338.376238311546331 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainpack>
+	<number> 0 2 1 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<accrate> 0.149999999999999994 </accrate>
+		<llikemle> 11.6317746234713901 </llikemle>
+		<llikedata> -3382.65609923619468 </llikedata>
+		<starttime> 1113934046 </starttime>
+		<endtime> 1113934062 </endtime>
+		<rates> <map> Tree-Arranger 30 200 </map> </rates>
+		<estimates>
+			<thetas> 0.00363910230419641195 0.00949364169215391289 </thetas>
+			<migrates> 0 113.28617225286888 88.1277594808196767 0 </migrates>
+			<growthrates> 411.557560954860037 233.86142612045623 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainpack>
+	<number> 0 2 2 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<accrate> 0.110000000000000001 </accrate>
+		<llikemle> 0.636063492994214719 </llikemle>
+		<llikedata> -3299.08922066821424 </llikedata>
+		<starttime> 1113934062 </starttime>
+		<endtime> 1113934079 </endtime>
+		<rates> <map> Tree-Arranger 22 200 </map> </rates>
+		<estimates>
+			<thetas> 0.00344721894934559969 0.0137468371177546927 </thetas>
+			<migrates> 0 113.720626268037961 82.7670768811458402 0 </migrates>
+			<growthrates> 320.377044363307391 443.960643777887299 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainpack>
+	<number> 0 2 3 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<accrate> 0.174999999999999989 </accrate>
+		<llikemle> 0.411652243146974017 </llikemle>
+		<llikedata> -3301.112857149506 </llikedata>
+		<starttime> 1113934079 </starttime>
+		<endtime> 1113934087 </endtime>
+		<rates> <map> Tree-Arranger 7 40 </map> </rates>
+		<estimates>
+			<thetas> 0.00274284032250226347 0.0117803366876250235 </thetas>
+			<migrates> 0 120.893070720992441 85.9918539079791771 0 </migrates>
+			<growthrates> 26.2473005315673831 368.198849330621101 </growthrates>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainsum>
+	<reg_rep> 0 2 </reg_rep>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce long
+			<shortpoint> 19 20 </shortpoint>
+			<shortwait> 0.0517692740770155577 0.168835716039738043 </shortwait>
+		</shortforce>
+		<shortforce> migrate long
+			<shortpoint> 0 1 2 0 </shortpoint>
+			<shortwait> 0.00827170676388711348 0.0236086927070103779 </shortwait>
+		</shortforce>
+		<intervals>
+		<force> Coalescence </force>
+			<endtime> 4.65136899805835341e-06 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 20 20 </xpartlines>
+			<partlines> 20 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 8.04215566305798666e-06 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 19 20 </xpartlines>
+			<partlines> 19 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.43604566212563868e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 18 20 </xpartlines>
+			<partlines> 18 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.84336441146756059e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 17 20 </xpartlines>
+			<partlines> 17 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 2.24324521605382133e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 16 20 </xpartlines>
+			<partlines> 16 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 4.02727378794709678e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 16 19 </xpartlines>
+			<partlines> 16 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 4.66779769093841711e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 15 19 </xpartlines>
+			<partlines> 15 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 6.70209786261414327e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 14 19 </xpartlines>
+			<partlines> 14 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 8.7820260710313902e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 14 18 </xpartlines>
+			<partlines> 14 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 9.28621094428023613e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 13 18 </xpartlines>
+			<partlines> 13 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000107706203473348862 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 12 18 </xpartlines>
+			<partlines> 12 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000114816939483791875 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 12 17 </xpartlines>
+			<partlines> 12 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000126163485711554112 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 12 16 </xpartlines>
+			<partlines> 12 16 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.0001343202142583523 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 12 15 </xpartlines>
+			<partlines> 12 15 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000157502040375572878 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 12 14 </xpartlines>
+			<partlines> 12 14 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000158198275534539187 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 11 14 </xpartlines>
+			<partlines> 11 14 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000185924618990777787 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 11 13 </xpartlines>
+			<partlines> 11 13 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000195139416618541169 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 10 13 </xpartlines>
+			<partlines> 10 13 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.000195144856593906958 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<newstatus> 1 </newstatus>
+			<xpartlines> 9 13 </xpartlines>
+			<partlines> 9 13 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000208892591389243015 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 10 12 </xpartlines>
+			<partlines> 10 12 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000221729134487027137 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 10 11 </xpartlines>
+			<partlines> 10 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000231540140137400703 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 9 11 </xpartlines>
+			<partlines> 9 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000286748264076826459 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 8 11 </xpartlines>
+			<partlines> 8 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000296568169067036559 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 7 11 </xpartlines>
+			<partlines> 7 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00032780787636151992 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 7 10 </xpartlines>
+			<partlines> 7 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00040351349954208406 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 6 10 </xpartlines>
+			<partlines> 6 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000469019333789370591 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 5 10 </xpartlines>
+			<partlines> 5 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000501156369385404206 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 4 10 </xpartlines>
+			<partlines> 4 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00051564770169723541 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 4 9 </xpartlines>
+			<partlines> 4 9 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000720822165058714829 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 3 9 </xpartlines>
+			<partlines> 3 9 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00096916231842889629 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 3 8 </xpartlines>
+			<partlines> 3 8 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00117293932612708303 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 3 7 </xpartlines>
+			<partlines> 3 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00150380041638617967 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 3 6 </xpartlines>
+			<partlines> 3 6 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00173696782656019471 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 2 6 </xpartlines>
+			<partlines> 2 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00185054353054030468 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 1 7 </xpartlines>
+			<partlines> 1 7 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00191839064111927254 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 1 6 </xpartlines>
+			<partlines> 1 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00194182526794987008 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 7 </xpartlines>
+			<partlines> 0 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00225255911291697688 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 6 </xpartlines>
+			<partlines> 0 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00258123599239381586 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 5 </xpartlines>
+			<partlines> 0 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00295407111169069311 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 4 </xpartlines>
+			<partlines> 0 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00324495225912871431 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 3 </xpartlines>
+			<partlines> 0 3 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00382175977738545905 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 2 </xpartlines>
+			<partlines> 0 2 . </partlines>
+		</intervals>
+	</treesum>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce long
+			<shortpoint> 19 20 </shortpoint>
+			<shortwait> 0.0517692740770155577 0.159664105752183261 </shortwait>
+		</shortforce>
+		<shortforce> migrate long
+			<shortpoint> 0 1 2 0 </shortpoint>
+			<shortwait> 0.00827170676388711348 0.0229534802929423874 </shortwait>
+		</shortforce>
+		<intervals>
+		<force> Coalescence </force>
+			<endtime> 4.65136899805835341e-06 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 20 20 </xpartlines>
+			<partlines> 20 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 8.04215566305798666e-06 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 19 20 </xpartlines>
+			<partlines> 19 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.43604566212563868e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 18 20 </xpartlines>
+			<partlines> 18 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 1.84336441146756059e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 17 20 </xpartlines>
+			<partlines> 17 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 2.24324521605382133e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 16 20 </xpartlines>
+			<partlines> 16 20 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 4.02727378794709678e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 16 19 </xpartlines>
+			<partlines> 16 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 4.66779769093841711e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 15 19 </xpartlines>
+			<partlines> 15 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 6.70209786261414327e-05 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 14 19 </xpartlines>
+			<partlines> 14 19 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 8.7820260710313902e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 14 18 </xpartlines>
+			<partlines> 14 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 9.28621094428023613e-05 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 13 18 </xpartlines>
+			<partlines> 13 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000107706203473348862 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 12 18 </xpartlines>
+			<partlines> 12 18 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000114816939483791875 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 12 17 </xpartlines>
+			<partlines> 12 17 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000126163485711554112 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 12 16 </xpartlines>
+			<partlines> 12 16 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.0001343202142583523 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 12 15 </xpartlines>
+			<partlines> 12 15 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000157502040375572878 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 12 14 </xpartlines>
+			<partlines> 12 14 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000158198275534539187 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 11 14 </xpartlines>
+			<partlines> 11 14 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000185924618990777787 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 11 13 </xpartlines>
+			<partlines> 11 13 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000195139416618541169 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 10 13 </xpartlines>
+			<partlines> 10 13 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.000195144856593906958 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<newstatus> 1 </newstatus>
+			<xpartlines> 9 13 </xpartlines>
+			<partlines> 9 13 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000208892591389243015 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 10 12 </xpartlines>
+			<partlines> 10 12 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000221729134487027137 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 10 11 </xpartlines>
+			<partlines> 10 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000231540140137400703 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 9 11 </xpartlines>
+			<partlines> 9 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000286748264076826459 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 8 11 </xpartlines>
+			<partlines> 8 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000296568169067036559 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 7 11 </xpartlines>
+			<partlines> 7 11 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00032780787636151992 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 7 10 </xpartlines>
+			<partlines> 7 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00040351349954208406 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 6 10 </xpartlines>
+			<partlines> 6 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000469019333789370591 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 5 10 </xpartlines>
+			<partlines> 5 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000501156369385404206 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 4 10 </xpartlines>
+			<partlines> 4 10 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00051564770169723541 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 4 9 </xpartlines>
+			<partlines> 4 9 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000517726912059090507 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 3 9 </xpartlines>
+			<partlines> 3 9 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.000720822165058714829 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 3 8 </xpartlines>
+			<partlines> 3 8 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00096916231842889629 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 3 7 </xpartlines>
+			<partlines> 3 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00150380041638617967 </endtime>
+			<oldstatus> 0 </oldstatus>
+			<xpartlines> 3 6 </xpartlines>
+			<partlines> 3 6 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00173696782656019471 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 2 6 </xpartlines>
+			<partlines> 2 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00185054353054030468 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 1 7 </xpartlines>
+			<partlines> 1 7 . </partlines>
+		<force> Migration </force>
+			<endtime> 0.00191839064111927254 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<newstatus> 0 </newstatus>
+			<xpartlines> 1 6 </xpartlines>
+			<partlines> 1 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00194182526794987008 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 7 </xpartlines>
+			<partlines> 0 7 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00225255911291697688 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 6 </xpartlines>
+			<partlines> 0 6 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00258123599239381586 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 5 </xpartlines>
+			<partlines> 0 5 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00295407111169069311 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 4 </xpartlines>
+			<partlines> 0 4 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00324495225912871431 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 3 </xpartlines>
+			<partlines> 0 3 . </partlines>
+		<force> Coalescence </force>
+			<endtime> 0.00382175977738545905 </endtime>
+			<oldstatus> 1 </oldstatus>
+			<xpartlines> 0 2 </xpartlines>
+			<partlines> 0 2 . </partlines>
+		</intervals>
+	</treesum>
+		<estimates>
+			<thetas> 0.00344721894934559969 0.0137468371177546927 </thetas>
+			<migrates> 0 113.720626268037961 82.7670768811458402 0 </migrates>
+			<growthrates> 320.377044363307391 443.960643777887299 </growthrates>
+		</estimates>
+</chainsum>
+<!--  New information past this point. -->
+<replicate-summary>
+		<estimates>
+			<thetas> 0.00356219464250582358 0.00792721479150828613 </thetas>
+			<migrates> 0 121.904055450476534 100.015915512533283 0 </migrates>
+			<growthrates> 722.893455415187532 257.932768127086547 </growthrates>
+		</estimates>
+		<maxlike> -4.6390765097207467 </maxlike>
+</replicate-summary>
+<!-- End summary file
+	 Generated from run that started at: Tue Apr 19 11:04:50 2005
+	 and ended at: Tue Apr 19 11:08:07 2005 -->
+</XML-summary-file>
diff --git a/doc/html/overview.html b/doc/html/overview.html
new file mode 100644
index 0000000..d078e98
--- /dev/null
+++ b/doc/html/overview.html
@@ -0,0 +1,130 @@
+<!-- header fragment for html documentation -->
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
+<HTML>
+<HEAD>
+
+<META NAME="description" CONTENT="Estimation of population parameters using genetic data using a maximum likelihood approach with Metropolis-Hastings Monte Carlo Markov chain importance sampling">
+<META NAME="keywords" CONTENT="MCMC, Markov chain, Monte Carlo, Metropolis-Hastings, population, parameters, migration rate, population size, recombination rate, maximum likelihood">
+
+<TITLE>LAMARC Documentation: Overview</title>
+</HEAD>
+
+
+<BODY BGCOLOR="#FFFFFF">
+<!-- coalescent, coalescence, Metropolis-Hastings, Markov chain Monte Carlo
+ simulation, migration rate, effective population size, recombination rate,
+ maximum likelihood -->
+
+
+(<A HREF="index.html">Contents</A> | <A HREF="changes.html">Next</A>) 
+
+<H2>Overview of LAMARC </H2>
+
+<p> LAMARC is a large, complex, powerful set of data analysis tools to do coalescence of populations. It can calculate a wide variety of forces and use a large number of different methods. Because of all its options it is not easy to learn. We have done our best to cover the topics that most people need in this web site, but you need to both read the documents and be willing to experiment with various methods to find what works for your data. We recommend that you read about LAMARC's str [...]
+
+<H2>Family tree</H2>
+
+<P> All versions of LAMARC combine much of the capabilities of the previous
+programs MIGRATE, RECOMBINE, and FLUCTUATE, and each has added new
+functionality as well.  Using DNA or RNA sequence data, SNPs,
+microsatellites, or K-Allele data such as electrophoretic alleles, LAMARC
+can estimate Theta, recombination rate, migration rates, and growth (or
+decline) rates for the population(s) from which the data were drawn.  These
+four forces can be estimated all together or with any sub-combination that
+includes Theta (i.e. Theta plus up to three other forces).  It can also make
+estimates using genotyped (or unphased) data.</P>
+
+<P>As of version 2.1, LAMARC can now be used for fine-scale mapping of trait
+data.  Recombination is required for this capability, which can be performed
+in the presence or absence of migration or growth.</P>
+
+<P>The older programs RECOMBINE and FLUCTUATE are no longer being actively
+supported, as their capabilities have been incorporated into LAMARC.  <A
+HREF="http://popgen.sc.fsu.edu/">MIGRATE</a> is now being maintained by
+Peter Beerli at Florida State University.  He has
+taken the program in a slightly different direction than LAMARC, and it now
+offers unique features such as the ability to run the program in parallel on
+a cluster, and various diverse models for migration.  </P>
+
+<P>The primary advantage of LAMARC over the older programs is its ability to
+simultaneously estimate what the other programs estimated separately.  Even
+if you are primarily interested in only one of these, their simultaneous
+estimation means that your estimates will not be biased by the
+unacknowledged presence of the other.</P>
+
+<P>LAMARC is written in C++.  Each release includes executables which 
+should run on current versions of Linux, OS X, and MS Windows.
+For more information see
+<a href="compiling.html">Compiling Lamarc</a>.</p>
+
+<P>The program is free to download and use.  We would appreciate
+hearing about any publications resulting from it. To cite LAMARC,
+you can reference our announcement paper:</P>
+
+<P><A
+HREF="http://bioinformatics.oxfordjournals.org/cgi/content/abstract/22/6/768">
+Kuhner, M. K., 2006  <i>"LAMARC 2.0: maximum likelihood and Bayesian estimation  of
+population parameters."</i>  Bioinformatics 22(6): 768-770.</a> </P>
+
+<P>For more information about the Bayesian aspects of the program, see:</P>
+
+<P><A HREF="http://www.genetics.org/cgi/content/abstract/175/1/155">Kuhner,
+M. K. and L. P. Smith, 2007  <i>"Comparing Likelihood and Bayesian Coalescent
+Estimation of Population Parameters"</i> Genetics 175: 155-165.</a></P>
+<P>
+
+<P> Bug reports, comments, critiques, and notices of papers can be
+sent to <A HREF="mailto:lamarc at u.washington.edu">lamarc at u.washington.edu</A>. </P>
+
+<P>The program can be found for download on our Web site:</P>
+
+<A HREF="http://evolution.gs.washington.edu/lamarc/">
+http://evolution.gs.washington.edu/lamarc/</A>
+
+<H4>About the Authors:</H4>
+
+<P>The LAMARC program is currently being developed at the University of
+Washington in the Felsenstein/Kuhner lab.  The list of contributors
+includes:</P>
+
+    <UL>
+      <LI> Mary K. Kuhner  (UW, current development team)
+      <LI> Jon Yamato (UW, current)
+      <LI> Bob Giansiracusa (UW, current)
+      <LI> Jim McGill (UW, current)
+      <LI> Elizabeth Walkup (UW, current)
+      <LI> Peter Beerli (FSU, former contributor, now working on MIGRATE)
+      <LI> Patrick Colacurcio (UW, former contributor)
+      <LI> Chia-Chi Li (UW, former)
+      <LI> Eric Rynes (UW, former)
+      <LI> Jim Sloan (UW, former)
+      <LI> Lucian Smith (UW, former)
+      <LI> Wang Yi (UW, former)
+    </UL>
+
+<P> Other people and organizations who have provided essential infrastructure
+support for this project:
+<ul>
+<LI> <A HREF="http://www.boost.org/libs/smart_ptr/smart_ptr.htm">Boost</a> for providing the smart_ptr implementation
+<LI> <A HREF="http://www.gnu.org/">GNU</a> for the g++ compiler, gdb debugger, and CVS source control 
+system used to develop this program.  
+<LI> Lee Thomason and Yves Berquin for their <A
+HREF="http://sourceforge.net/projects/tinyxml">TinyXML parser</A>.
+<LI> <A HREF="http://www.wxwidgets.org/">wxWidgets</a> for the tools to make
+the new GUI for our file converter.
+<LI> Matthew Austern in particular and the denizens of the
+usenet newsgroup <A
+HREF="http://groups.google.com/group/comp.lang.c++.moderated/">
+comp.lang.c++.moderated</a> in general for helpful advice.
+</ul>
+</P>
+<P>Funding for this project was provided by the National Institutes
+of Health grants GM 51929-02 and HG 01989-02, both to Joseph
+Felsenstein, and the NIH grant GM 51929-10 to Mary K. Kuhner.</P>
+
+(<A HREF="index.html">Contents</A> | <A HREF="changes.html">Next</A>)
+<!--
+//$Id: overview.html,v 1.35 2012/05/14 19:55:38 ewalkup Exp $
+-->
+</BODY>
+</HTML>
diff --git a/doc/html/panels.html b/doc/html/panels.html
new file mode 100644
index 0000000..1ab11bf
--- /dev/null
+++ b/doc/html/panels.html
@@ -0,0 +1,163 @@
+<!-- header fragment for html documentation -->
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
+<HTML>
+<HEAD>
+
+<META NAME="description" CONTENT="Estimation of population parameters using
+genetic data using a maximum likelihood approach with Metropolis-Hastings
+Monte Carlo Markov chain importance sampling">  
+<META NAME="keywords" CONTENT="MCMC, Markov chain, Monte Carlo,
+Metropolis-Hastings, population, parameters, migration rate, population
+size, recombination rate, maximum likelihood">
+
+<TITLE>LAMARC Documentation: Panel Corrections</title>
+</HEAD>
+
+
+<BODY BGCOLOR="#FFFFFF"> <!-- coalescent, coalescence, Markov chain Monte
+Carlo simulation, migration rate, effective population size, recombination
+rate, maximum likelihood -->
+
+
+<P>(<A HREF="divergence.html">Back</A> | <A HREF="index.html">Contents</A> |
+<A HREF="converter_cmd.html">Next</A>)</P>
+<H2>Panel Corrections</H2>
+<UL>
+<LI><A HREF="panels.html#overview">Overview </A></LI>
+<LI><A HREF="panels.html#must">Must I Use Panel Correction? </A></LI>
+<LI> <A HREF="panels.html#converter">Defining Panel Counts via Converter</a></LI>
+<LI> <A HREF="panels.html#xmlsyntax">Defining Panel Counts via XML</a></LI>
+</UL>
+
+<H3><A NAME="overview">Overview</H3>
+
+<p>
+SNP panel data is an increasingly popular input to sequence analysis programs.
+Unlike fully sequenced data, this input type is the result of a two-step 
+process:
+
+<ul>
+<li> a <u>panel</u> of samples is chosen to span the potential variation in a given organism or population,
+and these samples are fully sequenced, after which
+<li> a <u>SNP chip</u> is created to quickly assay the variaton in additional samples, but
+only at those positions at which the original panel members varied.
+</ul>
+</p>
+
+<p>
+This process is popular because it can be used to inexpensively gather precise estimates of the
+rates of variation at the measured SNPs without the cost of exhaustively sequencing all study samples.
+While this type of data can be used to measure patterns of common variation in genetic data,
+it is not ideal for coalescent analysis.
+</p>
+
+<p>
+Like other coalescent programs, LAMARC searches the space of coalescent trees, concentrating
+on those matching the input sequence data best.
+The SNP panel process removes most of the information available about recent variation,
+which is where most of the tree structure exists.
+LAMARC's panel correction feature compensates for this loss of data.
+If it is not used when it should be, estimates of Theta will be artificially low.
+</p>
+
+<H3><A NAME="must">Must I Use Panel Correction?</H3>
+
+<p> If your sequence data came from a SNP chip, you must use panel correction. 
+Otherwise you will underestimate Theta, and if you turn on estimation of growth
+you will badly underestimate the growth rate.  The strength of this bias
+is worst for small panels, but we do not recommend uncorrected use of panel SNP
+data from panels of any size.</p>
+
+<p> Additional information beyond the produced sequences is required. You 
+need to know:
+<ul>
+<li> how many (haplotype) sequences were used to create the panel, and
+<li> (if the organism has population structure) which sub-populations
+the panel members came from.
+</ul>
+</p>
+
+<p>
+This information is not always readily available.
+If you are using a commercial SNP chip, you may need to contact
+the manufacturer for this information.
+</p>
+
+<p> If you designed and had fabricated a SNP chip created from your own panel
+members, you should consider performing your coalescent analysis on
+the original, completely sequenced set of panel members.
+The SNP chip will be useful for measuring allele frequencies, but searching
+coalescent trees is much more accurate on data that is not missing rare
+and low-frequency SNPs.
+</p>
+
+<p> Additionally, you should determine if your SNP chip was constructed with
+a minor allele frequency cutoff, in which alleles occuring with low frequency
+in the panel are omitted from the chip.  It is critical to know what cutoff was
+used.
+
+<H3>Possible Panel Correction Gotchas</H3>
+
+<P>
+<font color="#FF0000">Caution:</font>
+results are highly dependent on having accurate panel size information. Therefore,
+</P>
+
+<ul>
+<li><p>If you don't know how many panel sequences were used to create the SNP chip,
+<b>Do Not Estimate The Count!</b> You cannot perform an accurate coalescent
+analysis on your data.</p>
+<li><p>If your panel SNPs are limited to positions which met a Minor Allele Frequency 
+(MAF) cutoff, your panel size must be adjusted to compensate for this effect. Please
+email us at <A HREF="mailto:lamarc at u.washington.edu">lamarc at u.washington.edu</A>
+for help with this calculation.
+</p>
+</ul>
+
+<H3><A NAME="converter">Defining Panel Counts via the Converter</H3>
+
+<p> By default, panel corrections are not turned on in the converter.</p>
+
+<p><img src="batch_converter/images/DataPartitionsMigTab.png" alt="Converter before panels are turned on"/></p>
+
+<p> To turn panels on, double click inside the box labeled "Use Panels".
+The initial panel size will be zero, which is equivalent to no panel correction.</P>
+
+<p><img src="batch_converter/images/PanelCorrectionOn.png" alt="Panel Correction button on so Panel data is visible"/></p>
+
+<p>To edit the count and optionally name the panel, double click inside the box labeled "panel". It will look like this:</p>
+
+<p><img src="batch_converter/images/EditPanelCorrection.png" alt="Panel Correction edit window"/></p>
+
+
+
+<p>One possible source of confusion is that Panels are associated with Region / Population pairs, not with Contiguous Segments 
+(loci close enough to each other to be in linkage disequilibrium).
+If you combine two or more Regions in the Converter Editor, the Loci remain separate, but they are now associated with the same Region. 
+The other selected Regions are eliminated by the combine process. This means that Panel information associated with any 
+Population that spaned the initial Region set must be consolidated. As there is no way to know which member count is correct, the 
+largest value becomes the value for the combined Region. The resulting screen (created by separating the orignal input file into two separate files, 
+reading them both in, and combining the Regions) looks like this:</p>
+
+<p><img src="batch_converter/images/CombinedPanels.png" alt="Panel definition spanning two Loci"/></p>
+<p>
+
+<p>The other side of this is that if you decide to separate multiple Loci that are associated with a single Region (as above), multiple new Regions are produced. The Panel member count associated with the original Region will be associated with the Panels now attached to each of the new Regions.</p>
+
+
+<H3><A NAME="xmlsyntax">Defining Panel Counts via XML</H3>
+
+<p>You can add a panel definition directly into an existing LAMARC XML file via hand-editing.
+See the <a href="xmlinput.html#panel-xml">XML Input Format documentation on panels</a>
+for an example.
+</p>
+
+
+<P>(<A HREF="divergence.html">Back</A> | <A HREF="index.html">Contents</A> |
+<A HREF="converter_cmd.html">Next</A>)</P>
+
+<!--
+//$Id: panels.html,v 1.7 2012/05/29 18:59:45 ewalkup Exp $
+-->
+</BODY>
+</HTML>
diff --git a/doc/html/parallel.html b/doc/html/parallel.html
new file mode 100644
index 0000000..2f71fab
--- /dev/null
+++ b/doc/html/parallel.html
@@ -0,0 +1,298 @@
+<!-- header fragment for html documentation -->
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
+<HTML>
+<HEAD>
+
+<META NAME="description" CONTENT="Estimation of population parameters using genetic data using a maximum likelihood approach with Metropolis-Hastings Monte Carlo Markov chain importance sampling">
+<META NAME="keywords" CONTENT="MCMC, Markov chain, Monte Carlo, Metropolis-Hastings, population, parameters, migration rate, population size, recombination rate, maximum likelihood">
+
+<TITLE>LAMARC Documentation: Parallelizing</title>
+</HEAD>
+
+
+<BODY BGCOLOR="#FFFFFF">
+<!-- coalescent, coalescence, Metropolis-Hastings, Markov chain Monte Carlo
+ simulation, migration rate, effective population size, recombination rate,
+ maximum likelihood -->
+
+(<A HREF="tutorial2.html">Previous</A> | <A HREF="index.html">Contents</A> | 
+<A HREF="mapping.html">Next</A>)
+<H2>Parallelizing LAMARC</H2>
+
+<P>LAMARC does not come in a parallel version.  However, if you wish, you
+can do 'poor man's parallelization' by running subsets of your analysis
+on different machines and then consolidating the results.  This is done by
+making separate input files asking LAMARC to perform a different
+set of independent calculations and output a series of summary files. 
+Those summary files can then be concatenated together for a final LAMARC
+run that will perform the overall analysis.
+</P>
+
+<P> You can parallelize LAMARC by spreading out the calculations for
+different <A HREF="glossary.html#region">genetic regions</a> over different
+computers (one per genetic region), and/or by spreading the calculation of
+different 'replicates' over different computers.</P>
+
+<p> This is complex enough we highly recommend you keep good notes since it is easy to get confused. 
+Check lists and careful naming conventions help, but keeping track of a bunch of asynchronous software 
+jobs on multiple machines is inherently difficult.</p> 
+
+<p>There are three steps to the process</p>
+<UL>
+<LI><A HREF="parallel.html#divide">Dividing up your data</A></LI>
+<LI> <A HREF="parallel.html#running">Running the programs and collecting the data</a></LI>
+<LI> <A HREF="parallel.html#combine">Concatenating your data and running the joint analysis</a></LI>
+</UL>
+
+<h3><A NAME="pyParallel">Scripts to run "Poor Man's Parallelization"</a></h3>
+
+<p>
+We have released a set of Python scripts in the <tt>scripts/pyParallel</tt> directory of
+the LAMARC distribution. They can be used to perform the steps below. However, since
+this is a difficult topic and the scripts are not thoroughly tested, you may wish to
+read the information in the sections below.
+</p>
+
+<p>
+To run the scripts, do the following steps from a terminal/command window:
+<ul>
+<li>prepare a single LAMARC file describing all regions and setting the full 
+number of replicates you want. We'll assume that file is called <tt>infile.xml</tt>
+<li> put <tt>infile.xml</tt> in its own, otherwise empty directory
+<li> from that directory, issue the command: <tt>python <path/to/>pyParallel/divide_data.py -l infile.xml -o divdir</tt>
+</ul>
+The program will produce output telling you which files to run LAMARC on next and 
+which python program to run after the LAMARC runs are completed.
+Depending if you have both multiple regions and multiple replicates, there
+may be a second such step of multiple LAMARC runs followed by a python command.
+The last step is always a single LAMARC run to produce a final file.
+</p>
+
+<h3><A NAME="divide">Dividing up your data</h3>
+
+<h4>Regions:</h4>
+<P> If you have more than one genetic region, the first step is to divide
+up your data into its component regions.  You can do this by modifying your
+original data files and feeding them into the converter one set at a time,
+or you can, within the converter, delete the extra regions.  In the end, you
+will want one LAMARC input file for each of your regions.
+</P>
+
+<P> Do note that different genetic regions can take different amounts of
+time for LAMARC to analyze, depending on the amount of data in each.  If
+one of your regions is a 100 kbase stretch of DNA, and another region is a
+single microsatellite segment, the DNA is going to take longer to analyze
+than the microsat.  You should almost always be running preliminary LAMARC
+runs on your data anyway so you know what type and amount of analysis best
+suits your data, and these amounts will be different for every region.  In
+any event, plan to put the regions that will take a long time on your
+faster computers, and your less-data-rich regions on the slower computers.
+</P>
+
+<P> You must also be sure that the same number of chains are being used for
+each analysis (i.e. 10 initial chains and 2 final chains).  </P>
+
+<h4>Replicates:</h4>
+<P> When running multiple replicates on different machines, the same input
+files can be used with two VERY IMPORTANT caveats.
+<UL>
+
+<LI>First, if your parallel machines have shared file space, you will
+overwrite your output and summary files if run from the same directory. This is a 
+bad idea anyway, because you will want to investigate runs from months ago sometimes. It's 
+best to run every run in its own file space, for example snail/p2/s3/recomb which is the run of 
+snail data which has 2 populations, SNP segment 3 and has recombination turned on. It's a bit of a 
+hassle to type, but it pays when you discover 6 months from now that something very strange happened in that run. 
+<LI>Second, and this is particularly important: you must be <b>sure</b> that
+your parallel runs have different random number seeds.  There are two ways
+of setting the random number seed and both have their pitfalls, here:
+<UL>
+
+<LI> <b>If you set the random number seed with the <seed> tag in the
+input file, you must use different input files with different seeds for
+each replicate.  What's more, the numbers must be more that four apart from
+each other.</b>  LAMARC uses as a seed the closest integer of the form 4n+1
+where n is an integer, so, for example, 1000, 1001, 1002, and 1003 will all
+be 'rounded' to 1001 before being used as a seed.
+
+<LI> Conversely, if there is no <seed> tag in the input file, LAMARC
+will use the system clock, so you can use the same input file for each
+replicate.  However,  <b>runs must be started at least 5 seconds apart from
+each other if you do this</b>.  LAMARC will use the current time to the
+second to get an integer, then again 'round' that integer to the closest
+integer of the form 4n+1.  Effectively, that means that every four-second
+window of time gets a single random number seed.  So, if you have a script
+that starts LAMARC runs on different computers, any it starts up in the
+same 4-second window will be computationally identical to each other, and
+therefore useless as separate analyses.  Occasionally, different
+architectures will produce different results with LAMARC even with the same
+seed, but it is, shall we say, unwise to rely on this behavior.
+
+</ul>
+</ul>
+</P>
+
+<h3><A NAME="running">Running the programs and collecting the data.</h3>
+
+<P> In order to get LAMARC to do a joint analysis over all replicates and/or
+regions, it must be given a set of trees and the numbers used to obtain
+those trees.  Each run, therefore, must write this information out by using
+summary files.
+</P>
+
+<P> Summary file writing can either be turned on from the menu (I for
+Input and Output related tasks, then W for Writing of tree summary file), or
+from the XML, using the <use-out-summary> tag (set true) and the
+<out-summary-file> tag (for the name of the file) (see the <A
+href="menu.html#io">menu</A> section of the documentation).  Be sure to save
+each summary file with a different name, or at least within a different
+directory, so you can tell where it came from later.  Nothing within the
+summary files will enable you to distinguish them later, apart from the
+different values they contain.
+</P>
+
+<P> Don't forget that if you are doing multiple replicates, each region
+needs the same number of replicates. </P>
+
+<P> Also, since the purpose of this exercise is speed, be sure to turn off
+profiling for all your parameters for these individual runs.  You can turn
+on profiling again in your final LAMARC run, but having it on here just
+wastes time--this information is not saved to the output summary files.
+</P>
+
+<P>When you have run these analyses, you should have summary files for all
+of your LAMARC runs.</P>
+
+<h3><A NAME="combine">Concatenating your data and running the joint analysis</h3>
+
+<P> You now need to engage in a bit of chicanery.  You must fool LAMARC into
+thinking that it wants to run one huge analysis with all of your regions and
+replicates, then create a summary file that pretends to be the result of
+that analysis.  If you feed that summary file to LAMARC, it will then see
+that the only analysis that is left to be done is the joint analysis, and
+skip to that step.
+</P>
+
+<h4> Combining over replicates </h4>
+
+<P> First, if you have used multiple replicates from separate runs, you must
+create a new input file for each region that claims to want to run the
+number of replicates you have already created.  This is as simple as copying
+any of the input files used to create the individual replicates, and
+changing the value of the <replicates> tag (the random number seed
+does not matter for this step).  Then you must create one summary file out
+of your collected set of summary files.  This requires a bit of editing, but
+not much.   </P>
+
+<P> Each summary file will have one <number> tag for each chain in
+that analysis, numbered 0 0 0, 0 0 1, 0 0 2, etc.  The first number is for
+the region, the second for the replicate, and the third for the chain.  For
+now, regardless of which genomic region you are dealing with, just change
+the replicate number.  So, the first summary file you can leave as-is, the
+second summary file you change to 0 1 0, 0 1 1, 0 1 2, etc., the third you
+change to 0 2 0, etc., and so on.  In addition, there will be a
+<reg_rep> tag at the beginning of the <chainsum> section, which
+will read '0 0' at first.  Again, the first number is for the region, and
+the second number is for the replicate.  You should change the second number
+(corresponding to the replicate number) to match.  Finally, delete the
+<XML-summary-file> tag from the beginning of all files but the first,
+the </XML-summary-file> tag from the end of all files but the last,
+and the <end-region> </end-region> pair of tags from the end of
+all files. </P>
+
+<P>Then, concatenate your replicate summary files, in order.  Nothing need
+be interleaved; each file should simply follow the next, in whatever order
+you gave them replicate numbers.
+</P>
+
+<P>When you're done, you should have one large summary file which should
+match the format of <A HREF="insumfile.3rep.html"> insumfile.3rep.xml</a>
+(actual xml is <A HREF="insumfile.3rep.xml">here</a>)
+, though it should be much much larger (the
+example file has only two trees per replicate).</p>
+
+<P>Now you need to run LAMARC again.  Use your new input file, telling it
+to use the summary file you have created.  If you are doing this step on
+your way to concatenating your data from multiple genomic regions, you must
+again turn on summary file <B>writing</B>.  LAMARC will read in your data, and
+should skip right to the multi-replicate analysis.  If it does not,
+something has gone awry, and you should check your input summary file.  One
+trick is to compare the new output summary file to your input summary file,
+and see where they diverge--that point is presumably the source of your
+error.</P>
+
+<P>If all goes well, you should now have a new output summary file that is
+identical to your input summary file with the exception that it now has a
+<replicate-summary> tag in it containing the estimates of your
+various parameters, along with the reported maximum likelihood.  These
+differences can be seen in <a HREF="outsumfile.3rep.html">
+outsumfile.3rep.xml</a> (actual xml is <A HREF="outsumfile.3rep.xml">here</a>).  If you had more than one region, you must then
+repeat this process for each of your regions. </P>
+
+<h4>Combining over regions</h4>
+
+<P>Now you need to repeat essentially the same process to combine your
+regions together to get one overall parameter estimate.  First, you need to
+go create one input file that wants to do a full multi-region and
+multi-replicate (if necessary) analysis.  You'll need to go back to the
+converter for this, and feed it all of your data this time, instead of just
+the data from one genomic region at a time.
+</P>
+
+<P>Next, take the resulting LAMARC input file and either in the XML or in
+the menu, you need to re-set up the correct number of chains you used for
+your individual analyses, as well as the correct number of replicates.  You
+also need to tell it to read from a summary file, which we now need to
+create.  Finally, if you want error analysis, turn on profiling in this step
+(and not before, since the profiling information is not saved).
+</P>
+
+<P>To combine your region-based summary files, you'll need to again change
+the <number> tags to correctly match the region number in your data. 
+Each file should start with a set of <number> tags containing 0 0 0,
+then 0 0 1, 0 0 2, etc.  If you have multiple replicates, much further down
+you will find a new set of tags containing 0 1 0, 0 1 1, 0 1 2, etc.  Again,
+the first number is the region, the second number is the replicate, and the
+third number is the chain.  Leave these tags alone for your first region
+(the numbering starts at zero), but for each subsequent region, you'll need
+to increase the first number:  1 0 0 , 1 0 1, 1 0 2, etc. (with 1 1 0, 1 1
+1, 1 1 2, etc. following if you have multiple replicates).  Also, each
+replicate will have a <reg_rep> tag, containing 0 0, 0 1, 0 2, etc.
+(If you didn't do replicates, that still counts as 'one replicate', numbered
+0 0).  The first number in that tag (corresponding to the region) also needs
+to be changed to the appropriate region number (1 0, 1 1, 1 2, etc. for the
+summary file for your second region if it has multiple replicates, or just
+the first "1 0" if there are none. </P>
+
+<P>Finally, you need to delete the initial <XML-summary-file> tag
+from all files but the first, and the final </XML-summary-file> from
+all files but the last.  Do <b>not</b> delete any <end-region>
+</end-region> tag pairs you might have at this point (which you will
+have if you did not use replicates), and certainly do not delete the
+information in the <replicate-summary> tags. </P>
+
+<P>Now, concatenate all your files, in order.  In other words, make one big
+file that starts off with region '0', has region '1' next, and so on.  Save
+that as a unique input summary file, and tell your LAMARC input file to
+read from that.
+</p>
+
+<P>Once again, you're ready to run LAMARC, this time for the final time. 
+If all goes well, it will cruise through the data, pausing at the end of
+each replicate to recalculate some internal modifiers (the Geyer weights,
+if you know what those are) as well as the profiles (if you have turned
+those on again), and then at the very end, it will calculate a summary over
+regions.  Again, if you have LAMARC write to a summary file, that file
+should be identical to your cobbled-together version, with the exception
+that at the very end, you will get a <region-summary> tag with your
+final estimates therein.  And you're done! </P>
+
+(<A HREF="tutorial2.html">Previous</A> | <A HREF="index.html">Contents</A> | 
+<A HREF="mapping.html">Next</A>)
+<!--
+//$Id: parallel.html,v 1.17 2011/08/18 22:02:19 ewalkup Exp $
+-->
+</BODY>
+</HTML>
+
+
diff --git a/doc/html/parameters.html b/doc/html/parameters.html
new file mode 100644
index 0000000..1ef400e
--- /dev/null
+++ b/doc/html/parameters.html
@@ -0,0 +1,182 @@
+<!-- header fragment for html documentation -->
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
+<HTML>
+<HEAD>
+
+<META NAME="description" CONTENT="Estimation of population parameters using genetic data using a maximum likelihood approach with Metropolis-Hastings Monte Carlo Markov chain importance sampling">
+<META NAME="keywords" CONTENT="MCMC, Markov chain, Monte Carlo, Metropolis-Hastings, population, parameters, migration rate, population size, recombination rate, maximum likelihood">
+
+<TITLE>LAMARC Documentation: Bayesian Tutorial</title>
+</HEAD>
+
+
+<BODY BGCOLOR="#FFFFFF">
+<!-- coalescent, coalescence, Metropolis-Hastings, Markov chain Monte Carlo
+ simulation, migration rate, effective population size, recombination rate,
+ maximum likelihood -->
+
+
+(<A HREF="forces.html">Previous</A> | <A HREF="index.html">Contents</A> | <A HREF="search.html">Next</A>)
+<H2>What do the population parameters mean?</H2>
+
+<P> This page explains what each of the population parameters
+estimated by LAMARC means, and what its units are.  The same information
+is available elsewhere in the documentation but it is collected here for
+convenience.</P>
+<UL>
+<LI><A HREF="parameters.html#theta">Theta</A></LI>
+<LI> <A HREF="parameters.html#m">M (migration rate)</a></LI>
+<LI> <A HREF="parameters.html#e">Epoch boundary time </a></LI>
+<LI> <A HREF="parameters.html#r">r (recombination rate)</a></LI>
+<LI> <A HREF="parameters.html#g">g (exponential growth rate)</a></LI>
+<LI> <A HREF="parameters.html#alpha">alpha (shape parameter of gamma distribution)</a></LI>
+</UL>
+
+<H3><A NAME="theta"> Theta </H3>
+
+<P> Theta is two times the mutation rate (per site per generation) times the
+number of heritable units in the population.  Thus, if you are considering
+diploid individuals (with two heritable units each) Theta is 4Nmu.  If
+you have haploids, it is 2Nmu.  For mitochondrial DNA, which is heritable
+only in females, it is Nmu (or 2N(f)mu).</P>
+
+<P> If you wish to combine genes with different copy numbers, be sure to
+set the relative N appropriately or you will get a confused result.  For
+example, if you are combining mitochondrial DNA and nuclear DNA, either
+set the relative N as 1 for mtDNA and 4 for nuclear (you will then estimate
+Theta on the mtDNA scale) or as 0.25 for mtDNA and 1 for nuclear (you will
+then estimate Theta on the nuclear scale).</P>
+
+<P> Similarly, if you are combining data with different mutation rates, be
+sure to set the relative mu appropriately.  Theta will be scaled by whichever
+mutation rate you select to be 1.  So if you believe your microsatellites
+mutate 100 times faster than your DNA, either set the msat rate to 100 and
+the DNA rate to 1 (you will then estimate Theta in the DNA scale) or
+set the msat rate to 1 and the DNA rate to 0.01 (you will then estimate
+Theta in the msat scale).</P>
+
+<P> If you wish to convert Theta to a headcount of individuals, you will
+need both an external estimate of the mutation rate, and an idea of 
+the ratio between headcount population size and effective (breeding)
+population size.  LAMARC cannot help you with either of these issues.</P>
+
+<H3><A NAME="m"> M (migration rate) </H3>
+
+<P> LAMARC measures migration rate as M=m/mu, where m is the chance
+for a lineage to immigrate per generation, and mu is the mutation rate
+per site per generation.  An M of 1 means that it is as likely for
+the sequence to migrate as it is for a site on the sequence to mutate.
+If there are different mutation rates for different genes, M will be
+given relative to the gene (if any) given a mutation rate of 1.0.<P>
+
+<P> Biologists often want to measure migration rate as 4Nm. 
+This is useful because when 4Nm is higher than one,
+the force of migration becomes strong enough to compete with
+genetic drift.
+To get 4Nm, multiply LAMARC's estimate of
+M by its estimate of Theta for the <B> recipient</B> population.  </P>
+
+<P> LAMARC is the wrong tool to estimate migration rates if 4Nm
+is larger than about 5, as it will go crazy trying to keep track of
+so many migration events.  If the program STRUCTURE sees no structure
+in your populations, there is probably too much migration for LAMARC
+to succeed, and you may need to pool your populations together.</P>
+
+<H3><A NAME="e"> Epoch boundary time </H3>
+
+<P> Epoch boundary times are estimated as number of generations 
+back from the present (the time at which the data were collected) to
+the population splitting time, scaled by the mutation rate mu per
+site per generation.  Thus, an epoch boundary time of 1 means
+that on average each site will have accumulated one mutation since
+the populations split, and would represent a very ancient split.  If
+you want to know how many generations or years ago the populations
+split, you will need external information about mu.</P>
+
+<P> Note that not all scenarios will allow inference of epoch boundary
+times, migration rates, and population sizes.  For example, if the
+populations split yesterday there will be no information about their
+post-split sizes, and if they split very long ago there will be
+no information about their pre-split sizes.  If migration between them
+is very high there will be no information about the split time.  It
+is important to compare the Bayesian posteriors with their priors; a
+posterior that closely resembles its prior indicates a parameter about
+which the data can say nothing.</P>
+
+<H3><A NAME="r"> r (recombination rate) </H3>
+
+<P> LAMARC measures recombination rate as r=C/mu, where C is the
+rate of recombination per inter-site link per generation, and mu
+is the mutation rate per site per generation.  An r of 1 means that
+it is as likely for a recombination to occur next to a site as it
+is for that site to mutate, and is a rather high rate of recombination.
+If there are different mutation rates for different genes, r will be
+given relative to the gene (if any) given a mutation rate of 1.0.</P>
+
+<P> LAMARC does not allow r to vary among populations.</P>
+
+<P> For many comparative purposes you will want the recombination rate
+per locus rather than per site; you can obtain this by multiplying
+LAMARC's r by the number of sites minus 1 (as no meaningful recombination 
+can occur rightward of the final site).<P>
+
+<H3><A NAME="g"> g (exponential growth rate)</H3>
+
+<P>  This is the hard one!</P>
+
+<P> The parameter g is the exponent of an exponential growth rate
+formula which gives the Theta at a time t in terms of the modern-day
+Theta and the growth rate:</P>
+
+<P> Theta(t) = Theta(modern) exp(-gt) </P>
+
+<P> Time is measured in mutational units; that is, one unit of time is the
+time needed for each site to experience, on average, one mutation.
+The units of our mutation rate are mutations per generations, so the
+units of g are 1/generations.  Almost no one finds this intuitive.</P>
+
+<P> We define time as increasing into the past (the present is time 0)
+and as a result, a negative value of g indicates that the population has
+been shrinking (it was bigger in the past than it is now) and a positive
+value indicates that it has been growing (it was smaller in the past than
+it is now).  A g of zero indicates constant size.  This much we know
+even without knowing the mutation rate, but to say anything more we
+need to either know the mutation rate, or be comparing two populations
+with the same mutation rate (in which case, the one with higher g is
+growing faster).  If we know the mutation rate, we can plug it into the
+equation above to get a feeling for what this implies in terms of
+Theta or effective population size.</P>
+
+<P> Be aware that LAMARC's estimates of g are biased upwards (due to
+the shape of the likelihood surface) especially with only one or a few
+genes.  If the estimate of g is positive and big, but the confidence
+intervals include zero, it's quite likely that there is in fact little or no
+growth.  If the intervals exclude zero, that finding is generally
+reliable.</P>
+
+<H3><A NAME="alpha"> alpha (shape parameter of gamma distribution) </H3>
+
+<P>  LAMARC can use the gamma distribution to represent the unknown
+variation of mutation rate among genes.  Gamma was chosen because it is
+a simple distribution that ranges from looking rather like an exponential
+(most genes mutate very little, a few mutate much more rapidly) to
+looking rather like a bell curve (there is an average mutation rate and
+genes are spread nearly symmetrically around it).<P>
+
+<P> The gamma distribution has two parameters, but we fix the mean of
+the gamma to 1, which determines one parameter.  This leaves only the shape parameter
+alpha (α).  Low values of alpha, below 1, describe a gamma distribution
+which looks somewhat exponential.  High values describe a gamma which looks somewhat like
+a normal.  So if you estimate alpha as 0.3, this means that most of your
+genes have a low mutation rate but a few are much higher; if you estimate
+gamma as 20, this means that your genes are tightly clustered around
+their mean mutation rate.  If all your regions mutate at exactly the same
+rate, the proper estimate of alpha would be infinite, but LAMARC will 
+estimate an arbitrary high value that is practically equivalent to infinity.</P>
+
+(<A HREF="forces.html">Previous</A> | <A HREF="index.html">Contents</A> | <A HREF="search.html">Next</A>)
+<!--
+//$Id: parameters.html,v 1.7 2012/05/16 17:14:01 mkkuhner Exp $
+-->
+</BODY>
+</HTML>
diff --git a/doc/html/regions.html b/doc/html/regions.html
new file mode 100644
index 0000000..a3b0f82
--- /dev/null
+++ b/doc/html/regions.html
@@ -0,0 +1,95 @@
+<!-- header fragment for html documentation -->
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
+<HTML>
+<HEAD>
+
+<META NAME="description" CONTENT="Estimation of population parameters using genetic data using a maximum likelihood approach with Metropolis-Hastings Monte Carlo Markov chain importance sampling">
+<META NAME="keywords" CONTENT="MCMC, Markov chain, Monte Carlo, Metropolis-Hastings, population, parameters, migration rate, population size, recombination rate, maximum likelihood">
+
+<TITLE>LAMARC Documentation: Arranging your data into regions and segments</title>
+</HEAD>
+
+
+<BODY BGCOLOR="#FFFFFF">
+<!-- coalescent, coalescence, Markov chain Monte Carlo simulation, migration rate, effective
+ population size, recombination rate, maximum likelihood -->
+
+
+<P>(<A HREF="menu.html">Previous</A> | <A HREF="index.html">Contents</A> | <A HREF="data_models.html">Next</A>)</P>
+
+<H2> Regions and segments </H2>
+
+<P> If anyone can suggest better terms here than "regions" and "segments" we will
+mail them a fine chocolate bar or other confection of their choice.  These are 
+unsatisfactory terms but we have failed to find better ones.</P>
+
+<P> LAMARC can handle either pieces of genetic data which are unlinked, or pieces
+which are pretty tightly linked but perhaps have some recombination.  It does
+not deal well with intermediate cases, several centimorgans apart but still
+linked.  Generally these are better treated as unlinked than as
+linked, but neither approach is perfect.  If you are able to choose the location
+of your samples, choose either definitely linked or definitely unlinked ones.</P>
+
+<P> We call unlinked areas "regions".  Each region represents an independent
+path through evolutionary history, and will be treated as such.  Adding more
+regions is the best way to improve accuracy of your estimates, except for the
+estimate of recombination (a single long region is best for recombination).</P>
+
+<P> If your organism does not have sexual reassortment of its genome, it has
+only one region no matter how many chromosomes it may have.  All parts of the
+genome are inherited as a block.  An example would be tracing the history of
+somatically dividing cells within an organism.</P>
+
+<P>  Some regions need to be treated as containing multiple "contiguous segments" of
+data, often because they need to be modelled differently; for example, a region
+could contain a stretch of DNA and two microsatellites.  These must be treated
+differently as their mutational process and rates are very different; but if they
+are adjacent and linked, they are still in the same region.  We call subunits
+within a region "contiguous segments".  They may be genetic loci, or simply arbitrary bits
+of sequence.  They do not need to be strictly contiguous (there may be gaps between
+them) but they should be close enough together to be fairly tightly linked.</P>
+
+<P> For segments to be included in the same region, they should have been
+sampled from the same haplotypes.  If a few haplotypes have missing data
+for one or more segments, you can supply the missing data as ambiguity
+symbols ('N' for nucleotide data, '?' for allelic data.  The program will
+reject attempts to put segments into the same region if the names of the
+individuals or haplotypes do not correspond.</P>
+
+<P> You should model a collection of sequences as a single region if they cover
+an area no more than a centimorgan or so in length, and you know their relative
+locations.  Each group of sequences which fits this definition should be its
+own region.</P>
+
+<P> If you are going to estimate recombination, you <b> must </b> know how far
+apart the segments within each region are, and what order they appear in.  If
+there is no recombination, this information is not necessary (though providing
+it will do no harm).  Distances should be expressed in base pairs.  Pinpoint
+accuracy is probably not necessary, and you do not have to worry about the fact
+that microsatellites with different repeat numbers are different lengths.
+It's mainly important to get the scale of the map approximately correct.  Clearly
+you will come to different conclusions about the per-site recombination rate
+if you think your two segments are separated by a gap of 100 bp or a gap of
+10,000 bp.</P>
+
+<P> Within a region, you should model as separate segments any sections which are
+definitely evolving in different ways.  This includes not only DNA versus
+SNP versus microsatellite data, but possibly also genic versus intergenic
+stretches of DNA.  Also, if a stretch of DNA is interrupted by a microsatellite,
+it is best to cut the stretch of DNA into a segment before and a segment after
+the microsatellite, rather than trying to embed one segment in another.  (Frankly,
+we are not sure the program can handle embedded segments.)  Breaking a sequence
+into additional segments slows the program a tiny bit, but has no other bad
+consequences as far as we know, so when in doubt, subdivide.  One pitfall to
+watch for, however, is that if your nucleotide segments become very short it
+is unwise to attempt to estimate base frequencies from the data--you may not have
+a large enough sample, and should substitute an overall estimate of your
+organism's base frequencies.</P>
+
+<P>(<A HREF="menu.html">Previous</A> | <A HREF="index.html">Contents</A> | <A HREF="data_models.html">Next</A>)</P>
+<!--
+//$Id: regions.html,v 1.2 2007/02/21 17:53:19 mkkuhner Exp $
+-->
+</BODY>
+</HTML>
+
diff --git a/doc/html/search.html b/doc/html/search.html
new file mode 100644
index 0000000..264f959
--- /dev/null
+++ b/doc/html/search.html
@@ -0,0 +1,375 @@
+<!-- header fragment for html documentation -->
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
+<HTML>
+<HEAD>
+
+<META NAME="description" CONTENT="Estimation of population parameters using genetic data usi
+ng a maximum likelihood approach with Metropolis-Hastings Monte Carlo Markov chain importanc
+e sampling">
+<META NAME="keywords" CONTENT="MCMC, Markov chain, Monte Carlo, Metropolis-Hastings, populat
+ion, parameters, migration rate, population size, recombination rate, maximum likelihood">
+
+<TITLE>LAMARC Documentation: Search strategy</title>
+</HEAD>
+
+
+<BODY BGCOLOR="#FFFFFF">
+<!-- coalescent, coalescence, Markov chain Monte Carlo simulation, migration rate, effective
+ population size, recombination rate, maximum likelihood -->
+
+
+<P>(<A HREF="parameters.html">Previous</A> | <A
+HREF="index.html">Contents</A> | <A HREF="output.html">Next</A>)</P>
+
+<H2>Search Strategies for LAMARC</H2>
+
+<P> To use LAMARC effectively you must decide on a search strategy.</P>
+This article outlines the tools that are available for fine-tuning  your
+searches, and provides some practical advice on how to use them.</P>
+
+<P> Your first choice is whether to do a likelihood-based search and
+analysis or a Bayesian-based search and analysis.  The Bayesian search is a
+new option in version 2.0, and comes with its <A HREF="bayes.html">own set
+of issues</a>, but setting the search parameters for a Bayesian run is
+much simpler.  These are discussed <A HREF="search.html#bayes">below</A>,
+after a discussion of search strategies for a likelihood analysis.</p>
+
+<P> The basic unit of the LAMARC search is a "chain", a sequence of
+genealogies made using the same working values of the parameters. <A
+HREF="search.html#basic"> Basic chain control </A> involves setting the
+length, number, and kind of chains that are run.  Two <A
+HREF="search.html#advanced"> advanced techniques,</A> heating and
+replication, are available to improve results in difficult cases.</P>
+
+<P> For each set of techniques, we provide an overview of how they
+work, followed by concrete advice on what values to use.</P>
+
+
+<h2>Likelihood-based Search Strategies</h2>
+
+<H3> <A NAME="basic"> Basic Chain Control </A></H3>
+
+<P> Basic chain control involves setting the number, length,
+sampling interval, and initial discards of each chain.</P>
+
+<P> The fundamental reason for running more than one chain is
+that the Metropolis-Hastings sampling algorithm is inefficient and possibly
+biased if its driving (starting) values are too far from the true values.
+We can try to pick good driving values, but it is also
+useful to let the program itself improve its driving values.
+We do this by running multiple chains, starting each one
+with the results of the previous one.</P>
+
+<P> LAMARC provides two kinds of chains, "initial" and "final," to
+support a strategy of several brief initial chains to get
+driving values, and then one or two much lengthier
+final chains to narrow in on the final estimate.</P>
+
+<P> The length of a chain controls how much it will be able to refine
+its estimate.  The number of chains controls how many chances
+the program has to change its starting values.</P>
+
+<P> We do not normally use every genealogy from a chain to construct
+the parameter estimates.  Since successive genealogies are very
+similar, using all of them would waste time and memory.  Instead,
+we sample at intervals.  The larger the sampling interval, the
+more information each sample contains (because they are more
+independent).</P>
+
+<P> To reduce the influence of the starting genealogy, it is possible
+to discard, without sampling, the first few genealogies of each
+chain (also called "burn-in").  We recommend doing this.</P>
+
+<H4>Advice on Basic Chain Control </H4>
+
+<P> In both the initial and final chains, a reasonable sampling interval is
+one that has at least one accepted genealogy per sample.  Thus,
+if your run accepts only 5% of proposed genealogies, your sampling
+interval may as well be 20.  Sampling more often will only lead to
+sampling the same genealogy over and over.  (Recall that each Markov chain 
+evolves by rearranging the latest genealogy and deciding whether to accept 
+this rearrangement or retain the old genealogy.)
+We tend to use
+20 as a standard value for sampling interval, but if your acceptance
+rate is very low, a longer interval may be preferable.</P>
+
+<P> It is probably wise to set burn-in (discard) to discard the first
+5% of each chain, especially in cases with many populations.
+The early genealogies can be very unreasonable.  Burn-in is more
+important for initial than for final chains.</P>
+
+<P> We have found that a good general strategy is to run 5-10
+fairly short initial chains.  It is not worthwhile to make them
+very long, as they are only being used to get a rough idea of
+the parameters.  Once the initials chains have established
+good starting parameters, one can run 1 or 2 final chains
+which are 10x or 100x longer, and can therefore give a more
+precise estimate with more accurate error bars.</P>
+
+<P> How many initial chains are needed?  Since their purpose is to
+reach good starting values of the parameters, there should be
+enough of them that the parameter estimates have stabilized.
+A symptom of too-few chains is parameter estimates that are
+still changing directionally at the end of the initial chains:</P>
+
+<P>chain 1:  Theta = 0.0100</P>
+<P>chain 2:  Theta = 0.0157</P>
+<P>chain 3:  Theta = 0.0210</P>
+<P>chain 4:  Theta = 0.0248</P>
+
+<P> It seems likely that if more chains were run, the estimate would
+continue to increase.  You should run enough chains that the estimates
+appear to be varying around a point, rather than continually
+increasing or decreasing.</P>
+
+<P> How long does an initial chain need to be?  If it is too short, it
+will not be any help in finding better parameter estimates.
+One symptom of too-short chains is estimates that leap wildly
+from chain to chain:</P>
+<P>
+<P>chain 1:  Theta = 0.0100</P>
+<P>chain 2:  Theta = 0.2301</P>
+<P>chain 3:  Theta = 0.0047</P>
+<P>chain 4:  Theta = 0.0599</P>
+
+<P> Estimates of recombination and migration rates do jump around more
+than estimates of Theta, but you should be suspicious of estimates
+that change by orders of magnitude.  This probably means that the
+chains sample so few trees that they get a lopsided view of the
+likelihood surface.</P>
+
+<P> The actual number of steps needed per chain will depend
+on your data set; data sets with few individuals, highly variable
+sequences, and few parameters will stabilize more quickly than
+others.  Please note that the default values in LAMARC are definitely
+on the short side.  We did this to avoid anxious email from
+new users saying "Nothing is happening," but you should probably
+increase the settings, and should certainly not decrease them.</P>
+
+<P> You can also use the posterior log-likelihood value ("Posterior lnL") given by the
+LAMARC progress reports (and repeated in the output report if you ask
+for "verbose" output) to diagnose too-short or too-few chains.
+This value should be no more than 2-3 times than the number
+of parameters you are estimating.  For example, in a 3-population
+case with migration and recombination you are estimating 10
+parameters (3 Thetas, 6 migration rates, 1 recombination rate).
+If the posterior log-likelihood is much greater than 20 or 30
+even in your final chains, you should try increasing the number
+or length of chains.  (In complex migration cases you may never
+succeed in getting the posterior log likelihood to decrease, so this
+is not an absolute rule, but extremely high numbers--10 times
+the number of parameters or more--are definitely cause for concern.)</P>
+
+<P> Please do <B>not</B> compare posterior log-likelihood values between runs,
+or use them for likelihood ratio tests.  They do not have any
+absolute meaning; they only show how much better the ending
+values were than the starting values.  The particular value has <i>no</i> bearing on
+whether the model is a good one.  For information about the probable
+error of your estimates, you should consult the confidence intervals
+and the profile likelihood tables in the output report.</P>
+
+<P> One final number to observe in setting your chain lengths is
+the acceptance rate -- the proportion of proposed trees which are
+retained rather than discarded.  If this is very low, your chains
+are not moving around in the search space (they are "stuck"),
+and you will need to use much longer chains to get good results.  It is
+also a good idea to consider heating, a tactic discussed under
+Advanced Chain Control.  An acceptance rate below 1% is certainly
+a problem, and below 5% is worrisome.</P>
+
+<H3><A NAME="advanced"> Advanced Chain Control </A></H3>
+
+<P> Two additional tools are available for improving your estimates,
+especially in difficult cases such as migration models with
+many populations.  Replication creates several
+replicates of each chain, using different random starting points,
+and can help when the estimates are
+highly variable from chain to chain.  Heating supplements
+the search process with additional, more adventurous searchers
+who can report back on good genealogies that they find, and
+can help when the search tends to remain "stuck" near
+its starting value or when the acceptance rate is low.</P>
+
+<P> Replication involves repeating an entire set of chains several
+times using different starting genealogies, and then combining
+the results (using the algorithm of Geyer 1991).  If you run
+the program several times and its answers are not consistent,
+replication can help.  It is also useful when estimates vary
+wildly from chain to chain.  Running N replicates will slow
+the program N-fold, plus some additional slowdown to construct
+the combined result.  (In the long run we hope to allow multiple
+replicates to be run on different processors of a multi-processor
+machine.)</P>
+
+<P> One advantage of replication is that it produces more accurate
+error bars.  The LAMARC algorithm assesses the likelihood curve very
+precisely near the values where the chain is run, but less
+accurately elsewhere.  Error bars based on a single chain may
+therefore be inaccurate (usually too narrow), since they are
+based on the curvature far from the maximum.  Combining several
+replicates gives a broader region of accuracy for the curve.</P>
+
+<P> Heating, or MCMCMC (Metropolis-Coupled MCMC or "MC cubed"), is a
+more radical change in the search strategy.  It involves
+splitting each chain into several searches running at different
+"temperatures."  One search, run at a "cold" temperature,
+explores the normal likelihood surface.  The others, run at
+"hot" temperatures, explore flattened-out, "melted" versions of the
+surface.  This enables them to search more adventurously, but
+would produce distorted parameter estimates, so we do not use
+the hot results directly for estimation.  Instead, we allow
+them to swap good genealogies into the cold search.  In this
+way, the cold search has access to possibly-good genealogies that it
+might otherwise never find.  If you run more than one "hot"
+search, these "hot" searches can be run at different temperatures,
+and these can swap genealogies among themselves as well.</P>
+
+<P> If you use N different temperatures, the program will slow
+down approximately N-fold.  (Someday we hope to allow multiple
+replicates to be run on different processors of a multi-processor
+machine.)  However, in cases where the search
+is performing badly, we find that a heated run with
+three temperatures is often much more successful at getting good
+estimates than an unheated run of triple length.</P>
+
+<P> You can set the "static" option and determine the temperatures of the
+heated chains yourself, or use the "adaptive" option which allows the
+program to adjust the temperatures as it runs.  The adaptive scheme adjusts
+the upper temperature downwards if the swapping rate falls below 10% and
+adjusts it upwards if the swapping rate exceeds 40%.  The rationale is that
+too little swapping means that the search is stuck, whereas too much
+swapping indicates that the chains are all searching the same space and
+therefore not contributing usefully.  Temperatures are re-evaluated 
+after each chain has had approximately one chance to swap 
+(which may be too often; we don't have much experience
+with adaptive heating yet).</P>
+
+<H4>Practical Advice on Advanced Chain Control</H4>
+
+<P> Consider replication if your estimates vary wildly from one
+run of the program to another or from one chain to the next,
+and if lengthening the chains is not helping much.  Replication
+is also strongly advised if you need your error bars to be
+highly accurate; results based on a single set of chains may
+have error bars that are narrower than they should be.</P>
+
+<P> Consider heating if your parameter estimates do not move
+away from their starting values, and if this is true for several
+different sets of starting values.  Also consider heating if
+your acceptance rate is very low (definitely if it is below
+1%, and probably if it is below 5%).  Heating may also
+help with the same problems that replication does.</P>
+
+<P> Consider both replication and heating if both of these are
+true; the program will take a long time to run, but it's your
+best chance of getting good estimates.  It never hurts to
+try replication and/or heating if you have enough computer time.
+They should never make the estimates worse.</P>
+
+<P> Three replicates seem to work well unless the estimate is
+terribly unstable.  Each chain will still have to be adequately
+long, although you can be a little more tolerant of values that
+leap around from chain to chain.  Replication does not
+decrease the number of initial and final chains required to
+get a good estimate.</P>
+
+<P> For heating, our limited empirical experience recommends three or more
+temperatures.  The optimal temperatures seem to depend heavily on the data
+and force model.  1, 1.1 and 1.3 may be good first choices, but it might be
+necessary to include really  hot temperatures if the run still gets stuck.
+It is important to check  whether the chains are able to interact with each
+other; the chains will stop interacting when the temperature differences are
+too large. For difficult problems one might need to insert many heated
+chains, for example with temperatures: 1.0, 1.1, 1.3, 1.6, 3.0, 6.0,
+12.0</P>
+
+<P> Note that the final estimate is based on results from the
+cold search, so the cold search must be at a temperature of
+1.0 for correct results.  Temperatures below 1.0 are
+not allowed.  No two temperatures should be the same, as the
+duplicate is wasted.</P>
+
+<P> Adaptive heating is a new and experimental idea.  Try it if you
+have plenty of time to experiment; stick to static heating if
+you need results quickly.</P>
+
+<P> The LAMARC progress reports include a table of swapping rates which 
+indicate how often genealogies are exchanged between searches.  Try to keep
+this number in a moderate range (10% to 40%).  If more than half of the
+genealogies are swapped, your hot searches are not hot enough--they are
+exploring the same areas as the cold search.  If almost none are swapped,
+the temperature differences between the chains are too large, and the
+significant computation effort spent is wasted.</P>
+
+<P> Heating may allow you to decrease the length of each chain.
+You will probably still need as many initial and final chains
+as before.</P>
+
+<h2><A NAME="bayes">Bayesian-based Search Strategies</a></h2>
+
+<P> Unlike a likelihood-based search, a Bayesian search is not dependent on
+driving values during its search.  As a result, the strategy of using
+multiple chains to get the best driving values is not nearly as helpful.  In
+fact, we recommend that you perform only one long chain, instead of the
+several we recommend if you are doing a likelihood run.</P>
+
+<P>The easiest way to do this is to set the number of initial chains to
+zero, and the number of final chains to one.  Then increase the number of
+samples, the sampling interval, and the number of samples to discard by
+about a factor of two each.  The increase to the sampling interval is
+useful because LAMARC is now dividing its time between re-sampling trees and
+re-sampling the parameters, and so needs twice the time it used to need to
+visit the same number of trees.  The increase in the number of samples may not
+need to be increased by as much as a factor of two, but doubling this will
+approximate the amount of time that would be spent on all the initial chains in the
+likelihood setup.  Increasing the number of discarded samples gives you a
+little more leeway in getting away from the initial de-novo genealogy to
+ones that better fit the data.</P>
+
+<P>Examining the <A HREF="bayes.html#curvefiles">curvefiles</a> produced by
+a Bayesian run should tell you more about whether the search parameters you
+have chosen need to be longer, or if you can get by with shorter searches.  A
+curve with multiple peaks probably needs to be run longer, and might also
+need larger intervals between samples, particularly in LAMARC runs
+that attempt to estimate many parameters.</P>
+
+<P>Both of the <A HREF="search.html#advanced">advanced techniques</a> above
+(replication and heating) can be used in a Bayesian run, but heating is
+likely to be much more helpful than replication.  Heating decreases your
+chance of getting 'stuck' in particular areas of tree-space, but keeps the
+ability to compare different areas of tree-space directly.  Replication in a
+likelihood run is designed to mitigate the effects of particular driving
+values, but a Bayesian run doesn't have analogous driving values, so a search
+here merely investigates tree-space starting from a different initial
+position, without the advantage of being able to compare trees directly. 
+Heating is therefore directly advantageous in a Bayesian run, whereas
+the only practical use of replication is to give you more feedback on
+whether your search was adequate.  A generously adequate search will
+be similar among replicates; if the replicates are very dissimilar, either
+the run is too short or the data are not informative.</P>
+
+<P>The <A
+HREF="http://tree.bio.ed.ac.uk/software/tracer/">Tracer</a> program
+of Drummond and Rambaut can be very useful in assessing whether your
+Bayesian search has run long enough, when used in conjuction with Tracer
+output from LAMARC (available as of version 2.0.3).   A trace, for any
+parameter, which is still rising or falling systematically at the end of the
+run indicates a too-short run.  Ideally, the rising or falling portion of
+the graph should be over before the end of burn-in, and by far the majority
+of the run should be exploring a plateau.</P>
+
+<P>Unfortunately, good-looking Tracer results do not guarantee a sufficiently
+long run, as the program may be exploring a plateau which does not contain
+the maximum.  There is probably no way, even in principle, to prove that
+the search has been sufficiently comprehensive, other than to use an
+exhaustive search (impractical for any but the smallest data sets).  Bad-looking
+Tracer results, on the other hand, are reliable indicators of a bad run.</P>
+
+<P>(<A HREF="parameters.html">Previous</A> | <A
+HREF="index.html">Contents</A> | <A HREF="output.html">Next</A>)</P>
+
+<!--
+//$Id: search.html,v 1.23 2007/10/04 20:54:36 lpsmith Exp $
+-->
+</BODY>
+</HTML>
diff --git a/doc/html/tracer.html b/doc/html/tracer.html
new file mode 100644
index 0000000..721b27b
--- /dev/null
+++ b/doc/html/tracer.html
@@ -0,0 +1,132 @@
+<!-- header fragment for html documentation -->
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
+<HTML>
+<HEAD>
+
+<META NAME="description" CONTENT="Estimation of population parameters using genetic data usi
+ng a maximum likelihood approach with Metropolis-Hastings Monte Carlo Markov chain importanc
+e sampling">
+<META NAME="keywords" CONTENT="MCMC, Markov chain, Monte Carlo, Metropolis-Hastings, populat
+ion, parameters, migration rate, population size, recombination rate, maximum likelihood">
+
+<TITLE>LAMARC Documentation: Using Tracer</title>
+</HEAD>
+
+
+<BODY BGCOLOR="#FFFFFF">
+<!-- coalescent, coalescence, Markov chain Monte Carlo simulation, migration rate, effective
+ population size, recombination rate, maximum likelihood -->
+
+<P>(<A HREF="bayes.html">Previous</A> | <A HREF="index.html">Contents</A>
+| <A HREF="genotype.html">Next</A>)</P>
+
+<H2>Using Tracer with LAMARC</H2>
+
+<P>LAMARC can produce output readable by the utility program Tracer,
+written by Andrew Rambaut and Alexei Drummond.  We do not distribute
+Tracer.  It can be found at:
+</P>
+
+<A HREF="http://tree.bio.ed.ac.uk/software/tracer/">
+http://tree.bio.ed.ac.uk/software/tracer/</A>
+
+<P>We thank the authors for producing this useful program.  It is
+written in Java and runs on most systems as long as a Java runtime
+environment is available.  Tracer is mainly intended for Bayesian
+runs.  For each parameter being estimated, it can display summary
+statistics (mean, standard deviation, etc) and a graph of the
+change in that parameter as the run progresses.  It can also
+show the correlation between pairs of parameters.  Finally, it
+produces a statistic, the Effective Sample Size (ESS), meant to
+indicate the size of a set of independent data points with the
+same information as our correlated data points.  A low ESS means
+that even though we may have sampled many data points, due to strong
+correlation between them we have not obtained much real information.
+This can happen if our search is rejecting most of its proposals,
+or if the proposals it accepts are all very close together so that
+it is not moving freely across the surface.</P>
+
+<P>The files that LAMARC outputs for Tracer (as of version 2.1.2) show all
+values ever sampled by the program, including any 
+chains used prior to the last final chain. 
+This means that the subset of the data used for parameter estimation will be
+only the final swath of data, and will not include anything before that.  We
+include it in the Tracer output so you can visualize the entire LAMARC run
+to better see the parameters as they move from their starting values to
+their final values, and then (hopefully) level off.  If you use Tracer to
+give you estimates, be sure to tell it to not use the initial section, as it
+will bias your estimates towards the starting values.  Other than that, the
+estimates Tracer produces should be very similar to the estimates LAMARC
+produces--the two programs use different curve-smoothing algorithms
+(LAMARC's is described <A HREF="curve-smoothing.html">in this article</a>),
+but should be drawn from the same underlying data.
+</P>
+
+<P>In both the Bayesian and Likelihood runs of LAMARC, one of the output
+parameters will be not the estimated parameters, but the data likelihood
+values for the trees (in the Likelihood run, this is the sole output value,
+as it does not sample among other parameter values).  These data likelihoods
+are the log of the probability that the data would be produced on the given
+tree, and will increase from the start of the run and should eventually
+level off, just like all other parameters.</P>
+
+<P>Tracer is
+very useful in diagnosing too-short runs.  If the trace
+graph shows a rising line, rather than a line which plateaus and
+varies around a particular value, the run is too short.  Be aware,
+however, that while a bad-looking trace nearly guarantees a bad
+run, a good-looking trace cannot guarantee a good run.  If there
+is a favorable region of parameter space which was never found, no
+examination of the regions which were found can reveal this.</P>
+
+<P> Here is an example of a Bayesian run whose results for the
+parameter shown (Theta for the first population) are not satisfactory.
+Note the systematic upward trend through most of the run length.
+Even though the trace dips back down at the end, the run has
+clearly not reached a stable state yet, and should be redone with
+many more steps.</P>
+
+<p><IMG SRC="images/tracer_trend.png" ALT="upward trending trace"/></P>
+
+<P>  The authors of Tracer red-flag an Effective Sample Size (ESS)
+statistic under 100 as indicating a run which is definitely too
+short, but in some of their discussion express doubt about runs
+with ESS < 200.  We distrust runs with ESS < 200 for any
+parameter.  Unfortunately higher ESS is not a guarantee of good
+results; note that the ESS of the displayed example is 245.</P>
+ You should check both trace shape and ESS for each parameter,
+and doubt a run in which either one is unsatisfactory.<P>
+
+<P>Do note that the values reported from within a LAMARC run labelled
+"Number of unique sampled values for each parameter" are <b>not</b> ESS
+values, but rather exactly what that title would suggest.  In a sense, they
+could be termed the 'actual sample size', but in addition to this not being
+a useful statistical measure, nobody wants to make an acronym out of it.</P>
+
+<P> Tracer cannot be used to track parameter estimates in a likelihood
+run of LAMARC, as likelihood runs don't make running estimates of
+their parameters (it would be too expensive).  The only
+use of Tracer in a likelihood run is to monitor the fit of the data
+to the genealogy (the "data likelihood").
+Again, a rising line which does not plateau definitely indicates
+a too-short run, whereas a nice plateau suggests but does
+not guarantee a good run.</P>
+
+<P>LAMARC automatically writes a file suitable for Tracer for
+each chromosomal region it analyzes.  The files are called
+"tracefile_regionname_replicate.txt".  So the file containing
+information on the mtDNA region, first replicate, would be
+named "tracefile_mtDNA_1.txt".  This file can be provided to
+Tracer as-is using the "Import" option in Tracer's menu.</P>
+
+<P>We welcome any input on how to make Tracer more useful with
+LAMARC, and visa versa.</P> 
+
+<P>(<A HREF="bayes.html">Previous</A> | <A HREF="index.html">Contents</A>
+| <A HREF="genotype.html">Next</A>)</P>
+
+<!--
+//$Id: tracer.html,v 1.9 2007/10/04 21:02:27 jay Exp $
+-->
+</BODY>
+</HTML>
diff --git a/doc/html/trait_mapping/README.txt b/doc/html/trait_mapping/README.txt
new file mode 100644
index 0000000..15314e2
--- /dev/null
+++ b/doc/html/trait_mapping/README.txt
@@ -0,0 +1,13 @@
+Copying 'lam_conv' (or lam_conv.exe, depending on your system) into this 
+directory, and running:
+
+    ./lam_conv -b -c traitCmd.xml
+
+should produce the file 'lamarc-trait-input.xml'.  You can also omit the
+'-b' option to pull the data into the converter, and further manipulate it
+with the GUI.
+
+You can then copy 'lamarc' (or lamarc.exe) into this directory and run it on
+the lamarc-trait-input.xml file.  The result should be similar to the file
+'outfile.txt' (though not identical, due to lamarc's use of a random number
+seed).
diff --git a/doc/html/trait_mapping/lamarc-trait-input.html b/doc/html/trait_mapping/lamarc-trait-input.html
new file mode 100644
index 0000000..7eefdcc
--- /dev/null
+++ b/doc/html/trait_mapping/lamarc-trait-input.html
@@ -0,0 +1,164 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
+<HTML>
+<BODY>
+<pre>
+&lt?xml version="1.0" ?&gt
+&lt!--traitExample: generated with theta=0.1 rec=0.1 trait at position 906--&gt
+&ltlamarc version="2.1"&gt
+    &ltformat&gt
+        &ltconvert-output-to-eliminate-zero&gttrue&lt/convert-output-to-eliminate-zero&gt
+    &lt/format&gt
+    &ltdata&gt
+        &ltregion name="region1"&gt
+            &lteffective-popsize&gt1.000000&lt/effective-popsize&gt
+            &ltspacing&gt
+                &ltblock name="locus_1"&gt
+                    &ltmap-position&gt 1 &lt/map-position&gt
+                    &ltoffset&gt 1 &lt/offset&gt
+                &lt/block&gt
+            &lt/spacing&gt
+            &lttraits&gt
+                &lttrait&gt
+                    &ltname&gtfunny-nose&lt/name&gt
+                &lt/trait&gt
+            &lt/traits&gt
+            &ltpopulation name="pop1"&gt
+                &ltindividual name="s0_s1"&gt
+                    &ltgenotype-resolutions&gt
+                        &lttrait-name&gtfunny-nose&lt/trait-name&gt
+                        &lthaplotypes&gt
+                            &ltpenetrance&gt1.000000&lt/penetrance&gt
+                            &ltalleles&gt affected affected &lt/alleles&gt
+                        &lt/haplotypes&gt
+                    &lt/genotype-resolutions&gt
+                    &ltphase type="unknown"&gt&lt/phase&gt
+                    &ltsample name="s0"&gt
+                        &ltdatablock type="DNA"&gt CGTTGTACACATCATTCGTTGTACACATCATTACTTCATACTATACGTCGCACCGCTAGAGTGCCTTTGGGCCTGGTTCAGCGTATCGAGTACTCGATAATCTGCCCTCCTTTTGGTCCGGACTAAACTATTTCGTACAGCGGGTACGGTCAATACCTTGATACACGTTGTACACATCATTACTAGTTCCTGCGATGTCCGTACGGACCGCTTCCAGCGCCTTCATTGAGGTATAATACACGGCGCGGTTGGATGTCCGTACGGACCGCTTCCCTTTGTACAACCCCAGCCGGCGCGGTTGCCCTCGGCATATCGCTACCAAACGTGCGCCCGATCAATTGTCTAGCGGACGCTCTTACGTGTACCCCATACTTCATACTATAAACACATGATAAGTATTTGCGTAAAGGGTGGTTTGCCCGTTACCGTTCAATAGCCATG [...]
+                    &lt/sample&gt
+                    &ltsample name="s1"&gt
+                        &ltdatablock type="DNA"&gt CCTTGTACACATCATTCCTTGTACACATCATTACTTCATACTATATGTCGCACAGCTAGAGTGCCTTTGGGCCTGGTTCAGCGTATCGAGTACTTTATAATCTTCCCTCCTTTTGGTCCGGACTAAACTACTTCGGACAGCGGGTACGGTTAATACCTAGATAGACCTTGTACACATCATTACTAGTTCCTGAGATGTCCGTTCGTACCACTTCCAGCGCCTTCTTTGAGGTGTAATACACGGCGCGGTTGGATGTCCGTTCGTACCACTTCCCTTTGTACAACCCCCGGCGGCGCGGCTGCCCTCGGCATATCGCTACCAAACGTGCGCCCGATCAATTGTCTAGCGGACGCTCTTACGTGTACCCCATACTTCATACTATAAACACATGATAAGTATTTGCGTAAAGGGTGGTTTGCCCGGCACCGTTCAATAGCCATG [...]
+                    &lt/sample&gt
+                &lt/individual&gt
+                &ltindividual name="s2_s3"&gt
+                    &ltgenotype-resolutions&gt
+                        &lttrait-name&gtfunny-nose&lt/trait-name&gt
+                        &lthaplotypes&gt
+                            &ltpenetrance&gt1.000000&lt/penetrance&gt
+                            &ltalleles&gt affected affected &lt/alleles&gt
+                        &lt/haplotypes&gt
+                    &lt/genotype-resolutions&gt
+                    &ltphase type="unknown"&gt&lt/phase&gt
+                    &ltsample name="s2"&gt
+                        &ltdatablock type="DNA"&gt CCTTGTACACATCATTCCTTGTACACATCATTACTTCATACTATATGTCGCACGGCTAGAGTGCCTTTGGGCCTGGTTCAGCGTATCGAGTACTCTATAATCTTCCCTCCTTTTGGTCCGGACTAAACTACTTCGGACAGCGGGTACGGTTAATACCTAGATAGACCTTGTACACATCATTACTAGTTCCTGAGATGTCCGCCCGTACCACTTCCAGCGCCTTCTTTGAGGTGTAATACACGGCGCGGTTGGATGTCCGCCCGTACCACTTCCCTTTGTACAACCCCCGGCGGCGCGGCTGCCCTCGGCATATCGCTACCAAATGTGCGCCCGATCAATTGTCTAGCGGACGCTCTTACGTGTACCCCATACTTCATACTATAAACACATGATAAGTATTTGCGTAAAGGGTGGTTTGCCCGGCACCGTTCAATAGCCATG [...]
+                    &lt/sample&gt
+                    &ltsample name="s3"&gt
+                        &ltdatablock type="DNA"&gt CCTTGTACAGATCATTCCTTGTACAGATCATTACTTCATACTATATGTCGCACAGCTAGAGTGCCTTTGGGCCTGGTTCAGCGTATCGAGTACTCTATAATCTTCCCTCCTTTTGGTCCGGACTAAACTACTTCGGACAGCGGGTACAGTTAATACCTAGATAGACCTTGTACAGATCATTACTAGTTCCTGAGATGTCCGTCCGTACCACTTCCAGCGCCTTCTTTGAGGTGTAATACACGGCGCGGTTGGATGTCCGTCCGTACCACTTCCCTTTGTACAACCCCCGGCGGCGCGGCTGCCCTCGGCATATCGCTACCAAATGTGCGCCAGATCAATTGTCTAGCGGACGCTCTTACGTGTTCCCCATACTTCATACTATAAACACATGGTAAGTATTTGCGTAAAGGGTGGTTTGCCCGGCACCGTTCACTAGCCATG [...]
+                    &lt/sample&gt
+                &lt/individual&gt
+                &ltindividual name="s4_s5"&gt
+                    &ltgenotype-resolutions&gt
+                        &lttrait-name&gtfunny-nose&lt/trait-name&gt
+                        &lthaplotypes&gt
+                            &ltpenetrance&gt1.000000&lt/penetrance&gt
+                            &ltalleles&gt affected affected &lt/alleles&gt
+                        &lt/haplotypes&gt
+                    &lt/genotype-resolutions&gt
+                    &ltphase type="unknown"&gt&lt/phase&gt
+                    &ltsample name="s4"&gt
+                        &ltdatablock type="DNA"&gt CCTTGTACACATCATTCCTTGTACACATCATTACTTCATACTATATGTCGCACAGCTAGAGTGCCTTTGGGCCTGGTTCAGCGTATCGAGTACTCTATAATCTTCCCTCCTTTTGGTCCGGACTAAACAACTTCGGACAGCGGGTACGGTTAATACCTAGATAGACCTTGTACACATCATTACTAGTTCCTGAGATGTCCGTCCGTACCACTTCCAGCGCCTTCTTTGAGGTGTAATACACGGCGCGGTTGGATGTCCGTCCGTACCACTTCCCTTTGTACAACCCCCGGCGGCGCGGCTGCCCTCGGCATATCGCTACCAAATGTGCGCCCGATCAATTGTCTAGCGGACGCTCTTACGTGTACCCCATACTTCATACTATAAACACATGATAAGTATTTGCGTAAAGGGTGGTTTGCCCGGCACCGTTCACTAGCCATG [...]
+                    &lt/sample&gt
+                    &ltsample name="s5"&gt
+                        &ltdatablock type="DNA"&gt CCTTGTACACATCATTCCTTGTACACATCATTACTTCATACTATATGTCGCACAGCTAGAGTGCCTTTGGGCCTGGTTCAGCGTATCGAGTACTCTATAATCTTCCCTCCTTTTGGTCCGGACTAAACTACTTCGGACAGCGGGTACGGTTAACACCTAGATAGACCTTGTACACATCATTACTAGTTCCTGAGATGTCCGTCCGTACCACTTCCAGCGCCTTCTTTGAGGTGTAATACACGGCGCGGTTGGATGTCCGTCCGTACCACTTCCCTTTGTACCACCCCCGGCGGCGCGGCTGCCCTCGGCTTATCGCTACCAAATGTGCGCCCGATCAATTGTCTAGCGGACGCTCTTACGTGTACCCCATACTTCATACTATAAACACATCATAAGTATTTGCGTAAAGGGTGGTTTGCCCGGCACCGTTCACTAGCCATG [...]
+                    &lt/sample&gt
+                &lt/individual&gt
+                &ltindividual name="s6_s7"&gt
+                    &ltgenotype-resolutions&gt
+                        &lttrait-name&gtfunny-nose&lt/trait-name&gt
+                        &lthaplotypes&gt
+                            &ltpenetrance&gt1.000000&lt/penetrance&gt
+                            &ltalleles&gt normal affected &lt/alleles&gt
+                        &lt/haplotypes&gt
+                        &lthaplotypes&gt
+                            &ltpenetrance&gt1.000000&lt/penetrance&gt
+                            &ltalleles&gt affected normal &lt/alleles&gt
+                        &lt/haplotypes&gt
+                    &lt/genotype-resolutions&gt
+                    &ltphase type="unknown"&gt&lt/phase&gt
+                    &ltsample name="s6"&gt
+                        &ltdatablock type="DNA"&gt CCTTGTACACATCATTCCTTGTACACATCATTACTTCATACTATATGTCGCACAGCTAGAGTGCCTTTGGGCCTGGTTCAGCGTATCGAGTACTCTATAATCTTCCCTCCTTTTGGTCCGGACTAAACTACTTCGGACAGCGGGTACGGTTAACACCTAGATAGACCTTGTACACATCATTACTAGTTCCTGAGATGTCCGTCCGTACCACTTCCAGCGCCTTCTTTGAGGTGTAATACACGGCGCGGTTGGATGTCCGTCCGTACCACTTCCCTTTGTACAACCCCCGGCGGCGCGGCTGCCCTCGGCTTATCGCTACCAAATGTGCGCCCGATCAATTGTCTAGCGGACGCTCTTACGTGTACCCCATACTTCATACTATAAACACATCATAAGTATTTGCGTAAAGGGTGGTTTGCCCGGCACCGTTCACTAGCCATG [...]
+                    &lt/sample&gt
+                    &ltsample name="s7"&gt
+                        &ltdatablock type="DNA"&gt TGTTGTACACATCATTTGTTGTACACATCATTAATTCATACTAAATGTCGCACAGATAGAGTGCCTCTGGGCCTGGTTCAGCGTATCGAGTAATCTATAATCTGCCCTCTTTTTCCTGCGGACTAAACTATTTCGTACAGCGGGTACGACCCATACCTAGATACACGTTGTACACATCATTAATAGTTCCTGAGATGTCCGTACGGACCGCTTCCAGCGCCTTCTTGGAGGTATAATACACGGCCCGGTTGGATGTCCGTACGGACCGCTTCCCTTTGTACAACCCCCGCCGGCCCGGCTGCCCTCGGCATATCGCTACCAAACGTGCGCCCGATCAATTGTCTAGCGCACGCTCTTACGTGTACCCCATACTTCATACTATAAACACATCATCAGTATTTGCGCAAAGGGTGGTTTGCCCGGCACCGTTCAATAGCCATG [...]
+                    &lt/sample&gt
+                &lt/individual&gt
+                &ltindividual name="s8_s9"&gt
+                    &ltgenotype-resolutions&gt
+                        &lttrait-name&gtfunny-nose&lt/trait-name&gt
+                        &lthaplotypes&gt
+                            &ltpenetrance&gt1.000000&lt/penetrance&gt
+                            &ltalleles&gt normal normal &lt/alleles&gt
+                        &lt/haplotypes&gt
+                    &lt/genotype-resolutions&gt
+                    &ltphase type="unknown"&gt&lt/phase&gt
+                    &ltsample name="s8"&gt
+                        &ltdatablock type="DNA"&gt TGTTGTACACATCATTTGTTGTACACATCATTAATTCATACTAAATGTCGCAAAGCTAGAGTGCCTTTGGGCCTGGTTCAGCGTATCGAGTAATCTATAATCTGCCCTCTTTTTCCTGCGGTCTAAACTATTTCGTACAGCGGGTACGATCCATACCTAGATACACGTTGTACACATCATTAATAGATCCTGAGATGTCCGTACGGACCGCTTCCAGCGCCTTCTAGGAGGTATAATACACGGCGCGGTTGGATGTCCGTACGGACCGCTTCCCTTTGTACAACCCCCGCCGGCGCGGCTGCCCTCGGCATATCGCTACCAAACGTGCGCCCGATCAATTGTTTAGCGCATGCTCTTACGTGCACCCCATACTTCATACTATAAACACATGATAAGTATTTGCGTAAAGGGTGGTTTGCCCGGCACCGTTCAATAGCCATG [...]
+                    &lt/sample&gt
+                    &ltsample name="s9"&gt
+                        &ltdatablock type="DNA"&gt CGTTGTACGCATCATTCGTTGTACGCATCATTAATTCATACTAAATGTCGCACAGCTAGAGTGCCTTTGGGCCTAGTTCAGCGTATCGAGTAATCTATAATCTGCCCTCTTTTTCCTGCGGACTAAACTATTTCGTACAGCGGGTACGATCCATACCTAGATACACGTTGTACACATCATTAAAAGTTCCTGAGATGTCCGTACGGACCGCTTCCAGCGCCTTCTTGGAGGTATAATACACGGCGCGGTTGGATGTCCGTACGGACCGCTTCCCTTTGTACAACCCCCGCCGGCGCGGCTGCCCTCGGCATATCGCTACCAAACGTGCGCCCGATCAATTGTCTAGCGCACGCTCTTACGTGTACCCCATACTTCATACTATAAACACATGATAAGTATTTGCGTAAAGGGTGGTTTGCCCGTCACCGTTCAATAGCCATG [...]
+                    &lt/sample&gt
+                &lt/individual&gt
+                &ltindividual name="s10_s11"&gt
+                    &ltgenotype-resolutions&gt
+                        &lttrait-name&gtfunny-nose&lt/trait-name&gt
+                        &lthaplotypes&gt
+                            &ltpenetrance&gt1.000000&lt/penetrance&gt
+                            &ltalleles&gt normal normal &lt/alleles&gt
+                        &lt/haplotypes&gt
+                    &lt/genotype-resolutions&gt
+                    &ltphase type="unknown"&gt&lt/phase&gt
+                    &ltsample name="s10"&gt
+                        &ltdatablock type="DNA"&gt CGTTGTACACATCATTCGTTGTACACATCATTAATTCATACTAAATGTCGCACAGCTAGAGTGCCTTTGGGCCTGTTTCAGCGTATCGAGTAATCTATAATCTGCCCTCTTTTTCCTGCGGACTAAACTATTTCGTACAGCGGGTACGATCCATACCTAGATACACGTTGTACACATCATTAATAGTTCCTGAGATGTCCGTACGGACCGCTTCCAGCGCCTTCTTGGAGGAATAATACACGGCGCGGTTGGATGTCCGTACGGACCGCTTCCCTTTGTACAACCCCCGCCGGCGCGGCTGCCCTCGGCATATCGCTACCAAACGTGCGCCCGATCAATTGTCTAGCGCACGCTCTTACGTGTACCCCATACTTCATACCATAAACACATGATAAGTATTTGCGTAAAGGGTGGTTTGCCCGGCACCGTTCAATAGCCATG [...]
+                    &lt/sample&gt
+                    &ltsample name="s11"&gt
+                        &ltdatablock type="DNA"&gt CGTTGTACACATCATTCGTTGTACACATCATTAATTCATACTATATGTCGCATAGCTAGAGTGCCTTTGGGCCTGGTTCAGCGTATCGAGTACTCTATAATCTGCCCTCTTTTTGGTGCGGACTAAACTATTACGTACAGCGGGTGCGGTCAATACCTAGATACACGTTGTACACATCATTAATAGTTCCTGAGATGTCCGTACGGACCGCTTCCAGCGCCTTCTTGGAGGAATAATACACGGCGCGGTTGGATGTCCGTACGGACCGCTTCCCTTTGTACAACCCCCGCCGGCGCGGCTGCCCTCGGCATATCGCTACCAAACGTGCGCCCGATCAATTGTCTAGCGCACGCTGTTACGTGTACCCCATACTTCATACCATAAACACATGATAAGTATTTGCGTAAAGGGTGGTTTGCCCGGCACCGTTCAATAGCCATG [...]
+                    &lt/sample&gt
+                &lt/individual&gt
+                &ltindividual name="s12_s13"&gt
+                    &ltgenotype-resolutions&gt
+                        &lttrait-name&gtfunny-nose&lt/trait-name&gt
+                        &lthaplotypes&gt
+                            &ltpenetrance&gt1.000000&lt/penetrance&gt
+                            &ltalleles&gt normal normal &lt/alleles&gt
+                        &lt/haplotypes&gt
+                    &lt/genotype-resolutions&gt
+                    &ltphase type="unknown"&gt&lt/phase&gt
+                    &ltsample name="s12"&gt
+                        &ltdatablock type="DNA"&gt CGTTGTACACATCATTCGTTGTACACATCATTAATTCATACTATATGTCGCACAGCTAGAGTGCCTTTGGGCCTCGTTCAGCGTATCGAGTGCTCTATAATCTGCCCTCTTTTTGGTGCGGACTAAACTATTTCGTACAGCGGGTGCGGTCAATACCTAGATACATGTTGTACACATCATTAATTGTTCCTGAGATGTCCGCACAGACCGCTTCCGACGCCTTCTTTGAGGAATAATACACGGCGCGGTTGGATGTCCGCACAGACCGCTTCCATTTGTACAACCCCAGACGGCGCGGTTCCCCTCGGCATATGGCTGCCAAACGTGCGCCCGATCAATTGTCTAGCGGACGAGCTTACGTGTACCCCATACTTCATACTATAAACACATGATAAGTATTTGCGTAAAGGGTATTTTGCCCGGCACCGTTCAATAGCCATG [...]
+                    &lt/sample&gt
+                    &ltsample name="s13"&gt
+                        &ltdatablock type="DNA"&gt CGTTGTACACATCATTCGTTGTACACATCATTAATTCATACTATATGTCGCACAGCTAGAGTGCCTTTGGGCCTCGTTCAGCGTATCGAGTGCTCTATAATCTGCCCTCTTTTTGGTGCGGACTAAACTATTTCGTACAGCGGGTGCGGTCAATACCTAGATACACGTTGTACACATCATTAATTGTTCCTGAGATGTCCGCACAGACCGCTTCCGACGCCTTGTTTGAGGTATAATACACGGCGCGGTTGGATGTCCGCACAGACCGCTTCCATTTGTACAACCCCAGACGGCGCGGTTCCCCTCGGCACATGGCTGCCAAACGTGCGCCCGATCAATTGTCTAGCGGACGCGCTTACGTGTACCCCATACTTCATACTATAAACACATGATAAGTATTTGCGTAAAGGGTATTTTGCCCGGCACCGTTCAATAGCCATG [...]
+                    &lt/sample&gt
+                &lt/individual&gt
+                &ltindividual name="s14_s15"&gt
+                    &ltgenotype-resolutions&gt
+                        &lttrait-name&gtfunny-nose&lt/trait-name&gt
+                        &lthaplotypes&gt
+                            &ltpenetrance&gt1.000000&lt/penetrance&gt
+                            &ltalleles&gt normal normal &lt/alleles&gt
+                        &lt/haplotypes&gt
+                    &lt/genotype-resolutions&gt
+                    &ltphase type="unknown"&gt&lt/phase&gt
+                    &ltsample name="s14"&gt
+                        &ltdatablock type="DNA"&gt CGTTGTACACATCATTCGTTGTACACATCATTAATTCATACTATATGTCGCACAGCTAGAGTGCCTTTGGGCCTCGTTCAGCGTAACGAGTGCTCTATAATCTGCCCTCTTTTTGGTGCGGACTAAACTATTTCGTACAGCGGGTGCGGTCAATACCTAGATACACGTTGTACGCATCATTAATTGTTCCTGAGATGTCCGCACAGACCGCTTCCGACGCCTTCTTTGAGGAATAATACACGGCGCGGTTGGATGTCCGCACAGACCGCTTCCATTTGTACAACCCCAGACGGCGCGGTTCCCCTCGGCCTATGGCTGCCAAACGTGCGCCCGATCAATTGTCTAGCGGACGCGCTAACGTGTACCCCATACTTCATACTATAAACACATGATAAGTATTTGCGTAAAGGGTATTTTGCCCGGCACCGTACAATAGCCATG [...]
+                    &lt/sample&gt
+                    &ltsample name="s15"&gt
+                        &ltdatablock type="DNA"&gt CGTTGTACACATCATTCGTTGTACACATCATTAATTCATACTATATGTCGCACAGCTAGAGTGCCTTTGGGCCTCGTTCAGCGTAACGAGTGCTCTATAATCTGCCCTCTTTTTGGTGCGGACTAAACTATTTCGTACAGCGGGTGCGGTCAATACCTAGATACACGTTGTACACATCATTAATTGTTCCTGAGATGTCCGTACAGACCGCTTCCGACGCCTTCTTTGAGGAATAATACACGGCGCGGTTGGATGTCCGTACAGACCGCTTCCATTTGTACATCCCCAGACGGCGCGGTTCCCCTCGGCCTATGGCTGCCAAACGTGCGCCCGATCAATTGTCTAGCGGACGCGCTTACGTGTACCCCATACTTCATACTATAAACACATGATAAGTATTTGCGTAAAGGGTATTTTGCCCGGCACCGTTCAATAGCCATG [...]
+                    &lt/sample&gt
+                &lt/individual&gt
+            &lt/population&gt
+        &lt/region&gt
+    &lt/data&gt
+&lt/lamarc&gt
+</pre>
+</BODY>
+</HTML>
diff --git a/doc/html/trait_mapping/lamarc-trait-input.xml b/doc/html/trait_mapping/lamarc-trait-input.xml
new file mode 100644
index 0000000..c5efaf5
--- /dev/null
+++ b/doc/html/trait_mapping/lamarc-trait-input.xml
@@ -0,0 +1,157 @@
+<?xml version="1.0" ?>
+<!--traitExample: generated with theta=0.1 rec=0.1 trait at position 906-->
+<lamarc version="2.1">
+    <format>
+        <convert-output-to-eliminate-zero>true</convert-output-to-eliminate-zero>
+    </format>
+    <data>
+        <region name="region1">
+            <effective-popsize>1.000000</effective-popsize>
+            <spacing>
+                <block name="locus_1">
+                    <map-position> 1 </map-position>
+                    <offset> 1 </offset>
+                </block>
+            </spacing>
+            <traits>
+                <trait>
+                    <name>funny-nose</name>
+                </trait>
+            </traits>
+            <population name="pop1">
+                <individual name="s0_s1">
+                    <genotype-resolutions>
+                        <trait-name>funny-nose</trait-name>
+                        <haplotypes>
+                            <penetrance>1.000000</penetrance>
+                            <alleles> affected affected </alleles>
+                        </haplotypes>
+                    </genotype-resolutions>
+                    <phase type="unknown"></phase>
+                    <sample name="s0">
+                        <datablock type="DNA"> CGTTGTACACATCATTCGTTGTACACATCATTACTTCATACTATACGTCGCACCGCTAGAGTGCCTTTGGGCCTGGTTCAGCGTATCGAGTACTCGATAATCTGCCCTCCTTTTGGTCCGGACTAAACTATTTCGTACAGCGGGTACGGTCAATACCTTGATACACGTTGTACACATCATTACTAGTTCCTGCGATGTCCGTACGGACCGCTTCCAGCGCCTTCATTGAGGTATAATACACGGCGCGGTTGGATGTCCGTACGGACCGCTTCCCTTTGTACAACCCCAGCCGGCGCGGTTGCCCTCGGCATATCGCTACCAAACGTGCGCCCGATCAATTGTCTAGCGGACGCTCTTACGTGTACCCCATACTTCATACTATAAACACATGATAAGTATTTGCGTAAAGGGTGGTTTGCCCGTTACCGTTCAATAGCCATGAGAA [...]
+                    </sample>
+                    <sample name="s1">
+                        <datablock type="DNA"> CCTTGTACACATCATTCCTTGTACACATCATTACTTCATACTATATGTCGCACAGCTAGAGTGCCTTTGGGCCTGGTTCAGCGTATCGAGTACTTTATAATCTTCCCTCCTTTTGGTCCGGACTAAACTACTTCGGACAGCGGGTACGGTTAATACCTAGATAGACCTTGTACACATCATTACTAGTTCCTGAGATGTCCGTTCGTACCACTTCCAGCGCCTTCTTTGAGGTGTAATACACGGCGCGGTTGGATGTCCGTTCGTACCACTTCCCTTTGTACAACCCCCGGCGGCGCGGCTGCCCTCGGCATATCGCTACCAAACGTGCGCCCGATCAATTGTCTAGCGGACGCTCTTACGTGTACCCCATACTTCATACTATAAACACATGATAAGTATTTGCGTAAAGGGTGGTTTGCCCGGCACCGTTCAATAGCCATGAGAA [...]
+                    </sample>
+                </individual>
+                <individual name="s2_s3">
+                    <genotype-resolutions>
+                        <trait-name>funny-nose</trait-name>
+                        <haplotypes>
+                            <penetrance>1.000000</penetrance>
+                            <alleles> affected affected </alleles>
+                        </haplotypes>
+                    </genotype-resolutions>
+                    <phase type="unknown"></phase>
+                    <sample name="s2">
+                        <datablock type="DNA"> CCTTGTACACATCATTCCTTGTACACATCATTACTTCATACTATATGTCGCACGGCTAGAGTGCCTTTGGGCCTGGTTCAGCGTATCGAGTACTCTATAATCTTCCCTCCTTTTGGTCCGGACTAAACTACTTCGGACAGCGGGTACGGTTAATACCTAGATAGACCTTGTACACATCATTACTAGTTCCTGAGATGTCCGCCCGTACCACTTCCAGCGCCTTCTTTGAGGTGTAATACACGGCGCGGTTGGATGTCCGCCCGTACCACTTCCCTTTGTACAACCCCCGGCGGCGCGGCTGCCCTCGGCATATCGCTACCAAATGTGCGCCCGATCAATTGTCTAGCGGACGCTCTTACGTGTACCCCATACTTCATACTATAAACACATGATAAGTATTTGCGTAAAGGGTGGTTTGCCCGGCACCGTTCAATAGCCATGAGAA [...]
+                    </sample>
+                    <sample name="s3">
+                        <datablock type="DNA"> CCTTGTACAGATCATTCCTTGTACAGATCATTACTTCATACTATATGTCGCACAGCTAGAGTGCCTTTGGGCCTGGTTCAGCGTATCGAGTACTCTATAATCTTCCCTCCTTTTGGTCCGGACTAAACTACTTCGGACAGCGGGTACAGTTAATACCTAGATAGACCTTGTACAGATCATTACTAGTTCCTGAGATGTCCGTCCGTACCACTTCCAGCGCCTTCTTTGAGGTGTAATACACGGCGCGGTTGGATGTCCGTCCGTACCACTTCCCTTTGTACAACCCCCGGCGGCGCGGCTGCCCTCGGCATATCGCTACCAAATGTGCGCCAGATCAATTGTCTAGCGGACGCTCTTACGTGTTCCCCATACTTCATACTATAAACACATGGTAAGTATTTGCGTAAAGGGTGGTTTGCCCGGCACCGTTCACTAGCCATGAGAA [...]
+                    </sample>
+                </individual>
+                <individual name="s4_s5">
+                    <genotype-resolutions>
+                        <trait-name>funny-nose</trait-name>
+                        <haplotypes>
+                            <penetrance>1.000000</penetrance>
+                            <alleles> affected affected </alleles>
+                        </haplotypes>
+                    </genotype-resolutions>
+                    <phase type="unknown"></phase>
+                    <sample name="s4">
+                        <datablock type="DNA"> CCTTGTACACATCATTCCTTGTACACATCATTACTTCATACTATATGTCGCACAGCTAGAGTGCCTTTGGGCCTGGTTCAGCGTATCGAGTACTCTATAATCTTCCCTCCTTTTGGTCCGGACTAAACAACTTCGGACAGCGGGTACGGTTAATACCTAGATAGACCTTGTACACATCATTACTAGTTCCTGAGATGTCCGTCCGTACCACTTCCAGCGCCTTCTTTGAGGTGTAATACACGGCGCGGTTGGATGTCCGTCCGTACCACTTCCCTTTGTACAACCCCCGGCGGCGCGGCTGCCCTCGGCATATCGCTACCAAATGTGCGCCCGATCAATTGTCTAGCGGACGCTCTTACGTGTACCCCATACTTCATACTATAAACACATGATAAGTATTTGCGTAAAGGGTGGTTTGCCCGGCACCGTTCACTAGCCATGAGAA [...]
+                    </sample>
+                    <sample name="s5">
+                        <datablock type="DNA"> CCTTGTACACATCATTCCTTGTACACATCATTACTTCATACTATATGTCGCACAGCTAGAGTGCCTTTGGGCCTGGTTCAGCGTATCGAGTACTCTATAATCTTCCCTCCTTTTGGTCCGGACTAAACTACTTCGGACAGCGGGTACGGTTAACACCTAGATAGACCTTGTACACATCATTACTAGTTCCTGAGATGTCCGTCCGTACCACTTCCAGCGCCTTCTTTGAGGTGTAATACACGGCGCGGTTGGATGTCCGTCCGTACCACTTCCCTTTGTACCACCCCCGGCGGCGCGGCTGCCCTCGGCTTATCGCTACCAAATGTGCGCCCGATCAATTGTCTAGCGGACGCTCTTACGTGTACCCCATACTTCATACTATAAACACATCATAAGTATTTGCGTAAAGGGTGGTTTGCCCGGCACCGTTCACTAGCCATGAGAA [...]
+                    </sample>
+                </individual>
+                <individual name="s6_s7">
+                    <genotype-resolutions>
+                        <trait-name>funny-nose</trait-name>
+                        <haplotypes>
+                            <penetrance>1.000000</penetrance>
+                            <alleles> normal affected </alleles>
+                        </haplotypes>
+                        <haplotypes>
+                            <penetrance>1.000000</penetrance>
+                            <alleles> affected normal </alleles>
+                        </haplotypes>
+                    </genotype-resolutions>
+                    <phase type="unknown"></phase>
+                    <sample name="s6">
+                        <datablock type="DNA"> CCTTGTACACATCATTCCTTGTACACATCATTACTTCATACTATATGTCGCACAGCTAGAGTGCCTTTGGGCCTGGTTCAGCGTATCGAGTACTCTATAATCTTCCCTCCTTTTGGTCCGGACTAAACTACTTCGGACAGCGGGTACGGTTAACACCTAGATAGACCTTGTACACATCATTACTAGTTCCTGAGATGTCCGTCCGTACCACTTCCAGCGCCTTCTTTGAGGTGTAATACACGGCGCGGTTGGATGTCCGTCCGTACCACTTCCCTTTGTACAACCCCCGGCGGCGCGGCTGCCCTCGGCTTATCGCTACCAAATGTGCGCCCGATCAATTGTCTAGCGGACGCTCTTACGTGTACCCCATACTTCATACTATAAACACATCATAAGTATTTGCGTAAAGGGTGGTTTGCCCGGCACCGTTCACTAGCCATGAGAA [...]
+                    </sample>
+                    <sample name="s7">
+                        <datablock type="DNA"> TGTTGTACACATCATTTGTTGTACACATCATTAATTCATACTAAATGTCGCACAGATAGAGTGCCTCTGGGCCTGGTTCAGCGTATCGAGTAATCTATAATCTGCCCTCTTTTTCCTGCGGACTAAACTATTTCGTACAGCGGGTACGACCCATACCTAGATACACGTTGTACACATCATTAATAGTTCCTGAGATGTCCGTACGGACCGCTTCCAGCGCCTTCTTGGAGGTATAATACACGGCCCGGTTGGATGTCCGTACGGACCGCTTCCCTTTGTACAACCCCCGCCGGCCCGGCTGCCCTCGGCATATCGCTACCAAACGTGCGCCCGATCAATTGTCTAGCGCACGCTCTTACGTGTACCCCATACTTCATACTATAAACACATCATCAGTATTTGCGCAAAGGGTGGTTTGCCCGGCACCGTTCAATAGCCATGAGAA [...]
+                    </sample>
+                </individual>
+                <individual name="s8_s9">
+                    <genotype-resolutions>
+                        <trait-name>funny-nose</trait-name>
+                        <haplotypes>
+                            <penetrance>1.000000</penetrance>
+                            <alleles> normal normal </alleles>
+                        </haplotypes>
+                    </genotype-resolutions>
+                    <phase type="unknown"></phase>
+                    <sample name="s8">
+                        <datablock type="DNA"> TGTTGTACACATCATTTGTTGTACACATCATTAATTCATACTAAATGTCGCAAAGCTAGAGTGCCTTTGGGCCTGGTTCAGCGTATCGAGTAATCTATAATCTGCCCTCTTTTTCCTGCGGTCTAAACTATTTCGTACAGCGGGTACGATCCATACCTAGATACACGTTGTACACATCATTAATAGATCCTGAGATGTCCGTACGGACCGCTTCCAGCGCCTTCTAGGAGGTATAATACACGGCGCGGTTGGATGTCCGTACGGACCGCTTCCCTTTGTACAACCCCCGCCGGCGCGGCTGCCCTCGGCATATCGCTACCAAACGTGCGCCCGATCAATTGTTTAGCGCATGCTCTTACGTGCACCCCATACTTCATACTATAAACACATGATAAGTATTTGCGTAAAGGGTGGTTTGCCCGGCACCGTTCAATAGCCATGAGAA [...]
+                    </sample>
+                    <sample name="s9">
+                        <datablock type="DNA"> CGTTGTACGCATCATTCGTTGTACGCATCATTAATTCATACTAAATGTCGCACAGCTAGAGTGCCTTTGGGCCTAGTTCAGCGTATCGAGTAATCTATAATCTGCCCTCTTTTTCCTGCGGACTAAACTATTTCGTACAGCGGGTACGATCCATACCTAGATACACGTTGTACACATCATTAAAAGTTCCTGAGATGTCCGTACGGACCGCTTCCAGCGCCTTCTTGGAGGTATAATACACGGCGCGGTTGGATGTCCGTACGGACCGCTTCCCTTTGTACAACCCCCGCCGGCGCGGCTGCCCTCGGCATATCGCTACCAAACGTGCGCCCGATCAATTGTCTAGCGCACGCTCTTACGTGTACCCCATACTTCATACTATAAACACATGATAAGTATTTGCGTAAAGGGTGGTTTGCCCGTCACCGTTCAATAGCCATGAGAA [...]
+                    </sample>
+                </individual>
+                <individual name="s10_s11">
+                    <genotype-resolutions>
+                        <trait-name>funny-nose</trait-name>
+                        <haplotypes>
+                            <penetrance>1.000000</penetrance>
+                            <alleles> normal normal </alleles>
+                        </haplotypes>
+                    </genotype-resolutions>
+                    <phase type="unknown"></phase>
+                    <sample name="s10">
+                        <datablock type="DNA"> CGTTGTACACATCATTCGTTGTACACATCATTAATTCATACTAAATGTCGCACAGCTAGAGTGCCTTTGGGCCTGTTTCAGCGTATCGAGTAATCTATAATCTGCCCTCTTTTTCCTGCGGACTAAACTATTTCGTACAGCGGGTACGATCCATACCTAGATACACGTTGTACACATCATTAATAGTTCCTGAGATGTCCGTACGGACCGCTTCCAGCGCCTTCTTGGAGGAATAATACACGGCGCGGTTGGATGTCCGTACGGACCGCTTCCCTTTGTACAACCCCCGCCGGCGCGGCTGCCCTCGGCATATCGCTACCAAACGTGCGCCCGATCAATTGTCTAGCGCACGCTCTTACGTGTACCCCATACTTCATACCATAAACACATGATAAGTATTTGCGTAAAGGGTGGTTTGCCCGGCACCGTTCAATAGCCATGAGAA [...]
+                    </sample>
+                    <sample name="s11">
+                        <datablock type="DNA"> CGTTGTACACATCATTCGTTGTACACATCATTAATTCATACTATATGTCGCATAGCTAGAGTGCCTTTGGGCCTGGTTCAGCGTATCGAGTACTCTATAATCTGCCCTCTTTTTGGTGCGGACTAAACTATTACGTACAGCGGGTGCGGTCAATACCTAGATACACGTTGTACACATCATTAATAGTTCCTGAGATGTCCGTACGGACCGCTTCCAGCGCCTTCTTGGAGGAATAATACACGGCGCGGTTGGATGTCCGTACGGACCGCTTCCCTTTGTACAACCCCCGCCGGCGCGGCTGCCCTCGGCATATCGCTACCAAACGTGCGCCCGATCAATTGTCTAGCGCACGCTGTTACGTGTACCCCATACTTCATACCATAAACACATGATAAGTATTTGCGTAAAGGGTGGTTTGCCCGGCACCGTTCAATAGCCATGAGAA [...]
+                    </sample>
+                </individual>
+                <individual name="s12_s13">
+                    <genotype-resolutions>
+                        <trait-name>funny-nose</trait-name>
+                        <haplotypes>
+                            <penetrance>1.000000</penetrance>
+                            <alleles> normal normal </alleles>
+                        </haplotypes>
+                    </genotype-resolutions>
+                    <phase type="unknown"></phase>
+                    <sample name="s12">
+                        <datablock type="DNA"> CGTTGTACACATCATTCGTTGTACACATCATTAATTCATACTATATGTCGCACAGCTAGAGTGCCTTTGGGCCTCGTTCAGCGTATCGAGTGCTCTATAATCTGCCCTCTTTTTGGTGCGGACTAAACTATTTCGTACAGCGGGTGCGGTCAATACCTAGATACATGTTGTACACATCATTAATTGTTCCTGAGATGTCCGCACAGACCGCTTCCGACGCCTTCTTTGAGGAATAATACACGGCGCGGTTGGATGTCCGCACAGACCGCTTCCATTTGTACAACCCCAGACGGCGCGGTTCCCCTCGGCATATGGCTGCCAAACGTGCGCCCGATCAATTGTCTAGCGGACGAGCTTACGTGTACCCCATACTTCATACTATAAACACATGATAAGTATTTGCGTAAAGGGTATTTTGCCCGGCACCGTTCAATAGCCATGAGAA [...]
+                    </sample>
+                    <sample name="s13">
+                        <datablock type="DNA"> CGTTGTACACATCATTCGTTGTACACATCATTAATTCATACTATATGTCGCACAGCTAGAGTGCCTTTGGGCCTCGTTCAGCGTATCGAGTGCTCTATAATCTGCCCTCTTTTTGGTGCGGACTAAACTATTTCGTACAGCGGGTGCGGTCAATACCTAGATACACGTTGTACACATCATTAATTGTTCCTGAGATGTCCGCACAGACCGCTTCCGACGCCTTGTTTGAGGTATAATACACGGCGCGGTTGGATGTCCGCACAGACCGCTTCCATTTGTACAACCCCAGACGGCGCGGTTCCCCTCGGCACATGGCTGCCAAACGTGCGCCCGATCAATTGTCTAGCGGACGCGCTTACGTGTACCCCATACTTCATACTATAAACACATGATAAGTATTTGCGTAAAGGGTATTTTGCCCGGCACCGTTCAATAGCCATGAGAA [...]
+                    </sample>
+                </individual>
+                <individual name="s14_s15">
+                    <genotype-resolutions>
+                        <trait-name>funny-nose</trait-name>
+                        <haplotypes>
+                            <penetrance>1.000000</penetrance>
+                            <alleles> normal normal </alleles>
+                        </haplotypes>
+                    </genotype-resolutions>
+                    <phase type="unknown"></phase>
+                    <sample name="s14">
+                        <datablock type="DNA"> CGTTGTACACATCATTCGTTGTACACATCATTAATTCATACTATATGTCGCACAGCTAGAGTGCCTTTGGGCCTCGTTCAGCGTAACGAGTGCTCTATAATCTGCCCTCTTTTTGGTGCGGACTAAACTATTTCGTACAGCGGGTGCGGTCAATACCTAGATACACGTTGTACGCATCATTAATTGTTCCTGAGATGTCCGCACAGACCGCTTCCGACGCCTTCTTTGAGGAATAATACACGGCGCGGTTGGATGTCCGCACAGACCGCTTCCATTTGTACAACCCCAGACGGCGCGGTTCCCCTCGGCCTATGGCTGCCAAACGTGCGCCCGATCAATTGTCTAGCGGACGCGCTAACGTGTACCCCATACTTCATACTATAAACACATGATAAGTATTTGCGTAAAGGGTATTTTGCCCGGCACCGTACAATAGCCATGAGAA [...]
+                    </sample>
+                    <sample name="s15">
+                        <datablock type="DNA"> CGTTGTACACATCATTCGTTGTACACATCATTAATTCATACTATATGTCGCACAGCTAGAGTGCCTTTGGGCCTCGTTCAGCGTAACGAGTGCTCTATAATCTGCCCTCTTTTTGGTGCGGACTAAACTATTTCGTACAGCGGGTGCGGTCAATACCTAGATACACGTTGTACACATCATTAATTGTTCCTGAGATGTCCGTACAGACCGCTTCCGACGCCTTCTTTGAGGAATAATACACGGCGCGGTTGGATGTCCGTACAGACCGCTTCCATTTGTACATCCCCAGACGGCGCGGTTCCCCTCGGCCTATGGCTGCCAAACGTGCGCCCGATCAATTGTCTAGCGGACGCGCTTACGTGTACCCCATACTTCATACTATAAACACATGATAAGTATTTGCGTAAAGGGTATTTTGCCCGGCACCGTTCAATAGCCATGAGAA [...]
+                    </sample>
+                </individual>
+            </population>
+        </region>
+    </data>
+</lamarc>
diff --git a/doc/html/trait_mapping/mapfile_funny-nose.txt b/doc/html/trait_mapping/mapfile_funny-nose.txt
new file mode 100644
index 0000000..26db6a9
--- /dev/null
+++ b/doc/html/trait_mapping/mapfile_funny-nose.txt
@@ -0,0 +1,1009 @@
+Mapping results for funny-nose from the region "region1".
+This analysis for this trait was performed by collecting trees, then calculating the data likelihood of the trait marker at all allowed sites on those trees, and then averaging.
+Most likely site(s) for funny-nose:  919:923.  Relative data likelihood = 0.0025891
+The top 5% of all sites in this region:  919:936, 946:947
+The top 50% of all sites in this region:  678:709, 776:803, 811:953
+The top 95% of all sites in this region:  1:212, 214:215, 217:220, 641:953
+You have a total of 531 sites in your 95% range.
+
+Site	Data likelihood
+1	0.00090735
+2	0.00090871
+3	0.00090877
+4	0.00091575
+5	0.00091603
+6	0.00091603
+7	0.00091603
+8	0.00091756
+9	0.00092021
+10	0.00092455
+11	0.00098743
+12	0.00098762
+13	0.00099016
+14	0.00099081
+15	0.00099097
+16	0.00099295
+17	0.00099319
+18	0.00099319
+19	0.00099528
+20	0.00099638
+21	0.00100073
+22	0.00100046
+23	0.00100568
+24	0.00100568
+25	0.00100593
+26	0.00100593
+27	0.00100621
+28	0.00100657
+29	0.00100218
+30	0.00100218
+31	0.00100350
+32	0.00100350
+33	0.00100350
+34	0.00100601
+35	0.00099806
+36	0.00099806
+37	0.00099806
+38	0.00099806
+39	0.00099783
+40	0.00099639
+41	0.00099662
+42	0.00099601
+43	0.00099598
+44	0.00099598
+45	0.00099598
+46	0.00099400
+47	0.00099400
+48	0.00099400
+49	0.00099402
+50	0.00099273
+51	0.00099273
+52	0.00099273
+53	0.00099270
+54	0.00099270
+55	0.00099508
+56	0.00099508
+57	0.00099508
+58	0.00099506
+59	0.00099506
+60	0.00099506
+61	0.00099506
+62	0.00099503
+63	0.00099503
+64	0.00099416
+65	0.00099412
+66	0.00099403
+67	0.00099403
+68	0.00099403
+69	0.00099306
+70	0.00099265
+71	0.00099265
+72	0.00099240
+73	0.00099369
+74	0.00099391
+75	0.00099394
+76	0.00099198
+77	0.00099200
+78	0.00099173
+79	0.00099173
+80	0.00099254
+81	0.00099250
+82	0.00099419
+83	0.00099112
+84	0.00099112
+85	0.00099112
+86	0.00099112
+87	0.00099007
+88	0.00098821
+89	0.00098744
+90	0.00099688
+91	0.00099688
+92	0.00099689
+93	0.00099689
+94	0.00099695
+95	0.00099695
+96	0.00099695
+97	0.00099647
+98	0.00099626
+99	0.00099616
+100	0.00099683
+101	0.00099684
+102	0.00099684
+103	0.00099502
+104	0.00099521
+105	0.00099573
+106	0.00099508
+107	0.00099508
+108	0.00099508
+109	0.00099508
+110	0.00099422
+111	0.00099422
+112	0.00099423
+113	0.00099428
+114	0.00099437
+115	0.00099437
+116	0.00099437
+117	0.00099451
+118	0.00099451
+119	0.00099451
+120	0.00099451
+121	0.00099471
+122	0.00099366
+123	0.00099366
+124	0.00099377
+125	0.00099377
+126	0.00099377
+127	0.00099292
+128	0.00099292
+129	0.00099292
+130	0.00099290
+131	0.00098943
+132	0.00098943
+133	0.00098489
+134	0.00098489
+135	0.00098483
+136	0.00098483
+137	0.00098315
+138	0.00098243
+139	0.00098295
+140	0.00098223
+141	0.00098223
+142	0.00098178
+143	0.00098178
+144	0.00098187
+145	0.00098185
+146	0.00098185
+147	0.00098185
+148	0.00098185
+149	0.00098140
+150	0.00098140
+151	0.00098157
+152	0.00096886
+153	0.00096876
+154	0.00096870
+155	0.00097485
+156	0.00098667
+157	0.00098667
+158	0.00098667
+159	0.00098667
+160	0.00098666
+161	0.00098666
+162	0.00098627
+163	0.00098627
+164	0.00098134
+165	0.00098134
+166	0.00098152
+167	0.00098135
+168	0.00098135
+169	0.00098747
+170	0.00098749
+171	0.00098682
+172	0.00098682
+173	0.00098682
+174	0.00102132
+175	0.00101695
+176	0.00101697
+177	0.00101770
+178	0.00101770
+179	0.00101545
+180	0.00101545
+181	0.00101035
+182	0.00100915
+183	0.00104897
+184	0.00104897
+185	0.00104876
+186	0.00104820
+187	0.00104823
+188	0.00104791
+189	0.00068163
+190	0.00068162
+191	0.00067945
+192	0.00067945
+193	0.00067747
+194	0.00067747
+195	0.00067747
+196	0.00066959
+197	0.00066959
+198	0.00066959
+199	0.00066959
+200	0.00062615
+201	0.00062574
+202	0.00062574
+203	0.00062574
+204	0.00062640
+205	0.00062643
+206	0.00062643
+207	0.00062643
+208	0.00062673
+209	0.00062673
+210	0.00062673
+211	0.00062178
+212	0.00062178
+213	0.00062166
+214	0.00062166
+215	0.00062167
+216	0.00062155
+217	0.00062181
+218	0.00062181
+219	0.00062181
+220	0.00062181
+221	0.00062095
+222	0.00062092
+223	0.00062090
+224	0.00062090
+225	0.00062051
+226	0.00062051
+227	0.00062051
+228	0.00062058
+229	0.00061890
+230	0.00061890
+231	0.00061890
+232	0.00061890
+233	0.00061890
+234	0.00061890
+235	0.00061890
+236	0.00061561
+237	0.00061561
+238	0.00061561
+239	0.00061563
+240	0.00061440
+241	0.00061440
+242	0.00061440
+243	0.00061440
+244	0.00061440
+245	0.00061440
+246	0.00061447
+247	0.00061436
+248	0.00061436
+249	0.00061437
+250	0.00061047
+251	0.00061047
+252	0.00061047
+253	0.00061047
+254	0.00061047
+255	0.00061047
+256	0.00061047
+257	0.00061047
+258	0.00061047
+259	0.00061047
+260	0.00061047
+261	0.00061047
+262	0.00061082
+263	0.00061082
+264	0.00061074
+265	0.00061506
+266	0.00061505
+267	0.00061486
+268	0.00061486
+269	0.00005039
+270	0.00005039
+271	0.00005039
+272	0.00005039
+273	0.00005039
+274	0.00005039
+275	0.00005039
+276	0.00005039
+277	0.00005039
+278	0.00005039
+279	0.00005039
+280	0.00005039
+281	0.00005034
+282	0.00005034
+283	0.00005034
+284	0.00004841
+285	0.00004841
+286	0.00004841
+287	0.00004842
+288	0.00004842
+289	0.00004842
+290	0.00004842
+291	0.00004842
+292	0.00004842
+293	0.00004842
+294	0.00004842
+295	0.00004842
+296	0.00004842
+297	0.00004840
+298	0.00004840
+299	0.00004818
+300	0.00004818
+301	0.00004818
+302	0.00004818
+303	0.00004818
+304	0.00004819
+305	0.00004819
+306	0.00004819
+307	0.00004819
+308	0.00004819
+309	0.00004819
+310	0.00004819
+311	0.00004819
+312	0.00004819
+313	0.00004819
+314	0.00004819
+315	0.00004819
+316	0.00004819
+317	0.00004819
+318	0.00004819
+319	0.00004819
+320	0.00004819
+321	0.00004819
+322	0.00004819
+323	0.00004839
+324	0.00004839
+325	0.00004839
+326	0.00004839
+327	0.00004839
+328	0.00004839
+329	0.00004839
+330	0.00004839
+331	0.00004842
+332	0.00004842
+333	0.00004842
+334	0.00004842
+335	0.00004842
+336	0.00004842
+337	0.00004842
+338	0.00004842
+339	0.00004842
+340	0.00004842
+341	0.00004842
+342	0.00004842
+343	0.00004842
+344	0.00004842
+345	0.00004842
+346	0.00004842
+347	0.00004842
+348	0.00004842
+349	0.00004850
+350	0.00004850
+351	0.00004850
+352	0.00004850
+353	0.00004850
+354	0.00004850
+355	0.00004850
+356	0.00004850
+357	0.00004853
+358	0.00004853
+359	0.00004853
+360	0.00004853
+361	0.00004853
+362	0.00004853
+363	0.00004853
+364	0.00004853
+365	0.00004853
+366	0.00004853
+367	0.00004853
+368	0.00004853
+369	0.00004853
+370	0.00004853
+371	0.00004853
+372	0.00004853
+373	0.00004421
+374	0.00002939
+375	0.00002939
+376	0.00002939
+377	0.00002939
+378	0.00002939
+379	0.00002939
+380	0.00002939
+381	0.00002939
+382	0.00002939
+383	0.00002939
+384	0.00002939
+385	0.00002939
+386	0.00002939
+387	0.00002939
+388	0.00002939
+389	0.00002939
+390	0.00002939
+391	0.00002939
+392	0.00002941
+393	0.00002941
+394	0.00002941
+395	0.00002941
+396	0.00002941
+397	0.00002941
+398	0.00002938
+399	0.00002930
+400	0.00002930
+401	0.00002930
+402	0.00002910
+403	0.00002910
+404	0.00002910
+405	0.00002910
+406	0.00002910
+407	0.00002910
+408	0.00002910
+409	0.00002910
+410	0.00002910
+411	0.00002910
+412	0.00002910
+413	0.00002910
+414	0.00002910
+415	0.00001097
+416	0.00001097
+417	0.00001095
+418	0.00001095
+419	0.00001095
+420	0.00001095
+421	0.00001095
+422	0.00001095
+423	0.00001095
+424	0.00001095
+425	0.00001095
+426	0.00001095
+427	0.00001095
+428	0.00001095
+429	0.00001095
+430	0.00001095
+431	0.00001095
+432	0.00001095
+433	0.00001095
+434	0.00001095
+435	0.00001095
+436	0.00001096
+437	0.00001096
+438	0.00001096
+439	0.00001096
+440	0.00001096
+441	0.00001096
+442	0.00001096
+443	0.00001096
+444	0.00001096
+445	0.00001099
+446	0.00001099
+447	0.00001099
+448	0.00001099
+449	0.00001099
+450	0.00001094
+451	0.00001094
+452	0.00001094
+453	0.00001093
+454	0.00001093
+455	0.00001093
+456	0.00001093
+457	0.00001093
+458	0.00001093
+459	0.00001093
+460	0.00001093
+461	0.00001093
+462	0.00001093
+463	0.00001093
+464	0.00001093
+465	0.00001093
+466	0.00001093
+467	0.00001093
+468	0.00001093
+469	0.00001093
+470	0.00001093
+471	0.00001093
+472	0.00001093
+473	0.00001093
+474	0.00001093
+475	0.00001093
+476	0.00001092
+477	0.00001092
+478	0.00001092
+479	0.00001092
+480	0.00001092
+481	0.00001092
+482	0.00001093
+483	0.00001093
+484	0.00001093
+485	0.00001093
+486	0.00001093
+487	0.00001093
+488	0.00001093
+489	0.00001093
+490	0.00001093
+491	0.00001093
+492	0.00001093
+493	0.00001093
+494	0.00001093
+495	0.00001093
+496	0.00001093
+497	0.00001093
+498	0.00001093
+499	0.00001093
+500	0.00001093
+501	0.00001093
+502	0.00001093
+503	0.00001093
+504	0.00001094
+505	0.00001094
+506	0.00001094
+507	0.00001094
+508	0.00001094
+509	0.00001094
+510	0.00001094
+511	0.00001094
+512	0.00001094
+513	0.00001094
+514	0.00001094
+515	0.00001094
+516	0.00001094
+517	0.00001094
+518	0.00001094
+519	0.00001094
+520	0.00001095
+521	0.00001095
+522	0.00001095
+523	0.00001095
+524	0.00001095
+525	0.00001095
+526	0.00001088
+527	0.00001088
+528	0.00001088
+529	0.00001088
+530	0.00001088
+531	0.00001088
+532	0.00001088
+533	0.00001088
+534	0.00001088
+535	0.00001088
+536	0.00001088
+537	0.00001088
+538	0.00001088
+539	0.00001088
+540	0.00001088
+541	0.00001088
+542	0.00001088
+543	0.00001088
+544	0.00001088
+545	0.00001088
+546	0.00001088
+547	0.00001088
+548	0.00001087
+549	0.00001087
+550	0.00001087
+551	0.00001087
+552	0.00001087
+553	0.00001087
+554	0.00001087
+555	0.00001087
+556	0.00001087
+557	0.00001087
+558	0.00001087
+559	0.00001087
+560	0.00001087
+561	0.00001087
+562	0.00001087
+563	0.00001087
+564	0.00001097
+565	0.00001097
+566	0.00001200
+567	0.00001200
+568	0.00001200
+569	0.00001200
+570	0.00001200
+571	0.00001200
+572	0.00001220
+573	0.00001220
+574	0.00001220
+575	0.00001235
+576	0.00001234
+577	0.00001234
+578	0.00001234
+579	0.00001234
+580	0.00001234
+581	0.00001234
+582	0.00001234
+583	0.00003992
+584	0.00003992
+585	0.00003993
+586	0.00003993
+587	0.00003994
+588	0.00003989
+589	0.00003989
+590	0.00003989
+591	0.00003989
+592	0.00003989
+593	0.00003988
+594	0.00003989
+595	0.00003989
+596	0.00004000
+597	0.00004000
+598	0.00004000
+599	0.00004000
+600	0.00004001
+601	0.00004006
+602	0.00004006
+603	0.00004006
+604	0.00004006
+605	0.00004008
+606	0.00004008
+607	0.00004007
+608	0.00004007
+609	0.00004009
+610	0.00004009
+611	0.00004009
+612	0.00004009
+613	0.00004008
+614	0.00004008
+615	0.00004009
+616	0.00003933
+617	0.00003933
+618	0.00003933
+619	0.00003933
+620	0.00003930
+621	0.00003930
+622	0.00003930
+623	0.00003930
+624	0.00003757
+625	0.00003773
+626	0.00003773
+627	0.00003779
+628	0.00003779
+629	0.00003779
+630	0.00003779
+631	0.00003779
+632	0.00003779
+633	0.00003779
+634	0.00003780
+635	0.00003780
+636	0.00003780
+637	0.00003780
+638	0.00003780
+639	0.00003780
+640	0.00003780
+641	0.00143050
+642	0.00143050
+643	0.00199863
+644	0.00199909
+645	0.00199909
+646	0.00199909
+647	0.00199913
+648	0.00199913
+649	0.00199913
+650	0.00200189
+651	0.00200524
+652	0.00200524
+653	0.00200499
+654	0.00200497
+655	0.00200497
+656	0.00200497
+657	0.00200497
+658	0.00200510
+659	0.00200510
+660	0.00206220
+661	0.00206220
+662	0.00206220
+663	0.00206220
+664	0.00206220
+665	0.00206220
+666	0.00206220
+667	0.00206578
+668	0.00206578
+669	0.00206686
+670	0.00206686
+671	0.00206686
+672	0.00206687
+673	0.00206687
+674	0.00206854
+675	0.00206951
+676	0.00206910
+677	0.00206950
+678	0.00233942
+679	0.00233942
+680	0.00233945
+681	0.00233966
+682	0.00233963
+683	0.00233865
+684	0.00233864
+685	0.00233864
+686	0.00233874
+687	0.00233874
+688	0.00233874
+689	0.00233876
+690	0.00233856
+691	0.00233856
+692	0.00233856
+693	0.00233856
+694	0.00233856
+695	0.00233859
+696	0.00233850
+697	0.00233850
+698	0.00233850
+699	0.00233850
+700	0.00233849
+701	0.00233806
+702	0.00233806
+703	0.00233806
+704	0.00234071
+705	0.00234121
+706	0.00234121
+707	0.00234121
+708	0.00234121
+709	0.00234121
+710	0.00233015
+711	0.00233015
+712	0.00233091
+713	0.00233104
+714	0.00233104
+715	0.00233104
+716	0.00233104
+717	0.00233104
+718	0.00233089
+719	0.00230795
+720	0.00230795
+721	0.00230795
+722	0.00230629
+723	0.00230629
+724	0.00230629
+725	0.00230638
+726	0.00230638
+727	0.00230650
+728	0.00228429
+729	0.00228429
+730	0.00228448
+731	0.00228017
+732	0.00228071
+733	0.00231557
+734	0.00231552
+735	0.00231552
+736	0.00231551
+737	0.00231551
+738	0.00231551
+739	0.00231908
+740	0.00231908
+741	0.00231908
+742	0.00231908
+743	0.00231909
+744	0.00231909
+745	0.00231909
+746	0.00231909
+747	0.00231909
+748	0.00231909
+749	0.00231909
+750	0.00231712
+751	0.00231834
+752	0.00231834
+753	0.00231749
+754	0.00231750
+755	0.00231750
+756	0.00231750
+757	0.00231753
+758	0.00231406
+759	0.00231410
+760	0.00231874
+761	0.00231959
+762	0.00231959
+763	0.00232174
+764	0.00232204
+765	0.00232204
+766	0.00232679
+767	0.00232679
+768	0.00232679
+769	0.00232679
+770	0.00232679
+771	0.00232679
+772	0.00232640
+773	0.00232640
+774	0.00232640
+775	0.00232813
+776	0.00233179
+777	0.00233346
+778	0.00233442
+779	0.00233442
+780	0.00233444
+781	0.00233443
+782	0.00233528
+783	0.00233528
+784	0.00233528
+785	0.00233528
+786	0.00233528
+787	0.00233525
+788	0.00233525
+789	0.00233525
+790	0.00233528
+791	0.00233571
+792	0.00233341
+793	0.00233307
+794	0.00233324
+795	0.00233331
+796	0.00233331
+797	0.00233331
+798	0.00233462
+799	0.00233249
+800	0.00233249
+801	0.00233249
+802	0.00233249
+803	0.00233249
+804	0.00233127
+805	0.00233127
+806	0.00233101
+807	0.00233101
+808	0.00233028
+809	0.00232963
+810	0.00232963
+811	0.00251261
+812	0.00251261
+813	0.00251439
+814	0.00251439
+815	0.00251439
+816	0.00251439
+817	0.00251439
+818	0.00251320
+819	0.00251320
+820	0.00251320
+821	0.00251320
+822	0.00251311
+823	0.00251311
+824	0.00251361
+825	0.00251261
+826	0.00251261
+827	0.00251261
+828	0.00251087
+829	0.00251087
+830	0.00251092
+831	0.00251092
+832	0.00251092
+833	0.00250843
+834	0.00250843
+835	0.00250843
+836	0.00250913
+837	0.00251042
+838	0.00249511
+839	0.00249555
+840	0.00249555
+841	0.00249555
+842	0.00249555
+843	0.00249555
+844	0.00249555
+845	0.00248404
+846	0.00248404
+847	0.00248442
+848	0.00248442
+849	0.00248443
+850	0.00248409
+851	0.00248409
+852	0.00248409
+853	0.00248409
+854	0.00248472
+855	0.00248472
+856	0.00248472
+857	0.00248472
+858	0.00248472
+859	0.00248472
+860	0.00248550
+861	0.00248550
+862	0.00248673
+863	0.00248491
+864	0.00248491
+865	0.00248491
+866	0.00248746
+867	0.00248746
+868	0.00248746
+869	0.00248746
+870	0.00248746
+871	0.00248747
+872	0.00248747
+873	0.00249386
+874	0.00249386
+875	0.00249385
+876	0.00249379
+877	0.00249371
+878	0.00249371
+879	0.00249417
+880	0.00249417
+881	0.00249417
+882	0.00249738
+883	0.00249738
+884	0.00249765
+885	0.00249765
+886	0.00249765
+887	0.00249765
+888	0.00249765
+889	0.00249765
+890	0.00249544
+891	0.00249382
+892	0.00249382
+893	0.00249382
+894	0.00249391
+895	0.00249391
+896	0.00249391
+897	0.00249391
+898	0.00249355
+899	0.00249371
+900	0.00249356
+901	0.00249356
+902	0.00249336
+903	0.00249336
+904	0.00258377
+905	0.00258377
+906	0.00258375
+907	0.00257633
+908	0.00257425
+909	0.00257425
+910	0.00257425
+911	0.00257420
+912	0.00257617
+913	0.00257617
+914	0.00256608
+915	0.00256608
+916	0.00256608
+917	0.00256608
+918	0.00256860
+919	0.00258910
+920	0.00258910
+921	0.00258910
+922	0.00258910
+923	0.00258910
+924	0.00258870
+925	0.00258870
+926	0.00258870
+927	0.00258870
+928	0.00258865
+929	0.00258877
+930	0.00258877
+931	0.00258855
+932	0.00258855
+933	0.00258855
+934	0.00258855
+935	0.00258856
+936	0.00258856
+937	0.00258808
+938	0.00258808
+939	0.00258797
+940	0.00258803
+941	0.00258803
+942	0.00258803
+943	0.00258803
+944	0.00258790
+945	0.00258790
+946	0.00258833
+947	0.00258833
+948	0.00258802
+949	0.00258802
+950	0.00258802
+951	0.00258802
+952	0.00258385
+953	0.00258330
+954	0.00031511
+955	0.00031463
+956	0.00031463
+957	0.00031463
+958	0.00031536
+959	0.00031536
+960	0.00031473
+961	0.00031473
+962	0.00031323
+963	0.00031312
+964	0.00031312
+965	0.00031081
+966	0.00013327
+967	0.00013327
+968	0.00013326
+969	0.00013307
+970	0.00013307
+971	0.00013311
+972	0.00013311
+973	0.00013311
+974	0.00013301
+975	0.00012873
+976	0.00012873
+977	0.00012793
+978	0.00012793
+979	0.00012191
+980	0.00012191
+981	0.00012190
+982	0.00012195
+983	0.00012540
+984	0.00012540
+985	0.00012540
+986	0.00012695
+987	0.00012694
+988	0.00012694
+989	0.00012691
+990	0.00012681
+991	0.00012681
+992	0.00013026
+993	0.00013025
+994	0.00013013
+995	0.00013013
+996	0.00013015
+997	0.00013015
+998	0.00013015
+999	0.00013005
+1000	0.00012872
diff --git a/doc/html/trait_mapping/outfile.txt b/doc/html/trait_mapping/outfile.txt
new file mode 100644
index 0000000..c2de3e4
--- /dev/null
+++ b/doc/html/trait_mapping/outfile.txt
@@ -0,0 +1,376 @@
+************************************************************
+     LAMARC:  Maximum Likelihood Parameter Estimation
+    using Hastings-Metropolis Markov Chain Monte Carlo
+************************************************************
+version 2.1.3
+
+
+        Program started on Fri Jul 11 15:04:05 2008
+                finished on Fri Jul 11 16:20:32 2008
+
+===========================================================================
+Maximum Likelihood Estimates (MLEs) of Parameters
+===========================================================================
+
+
+                 Theta   
+Population       Theta1  
+Best Val (MLE)  0.120621 
+    Percentile           
+   99%   0.005  0.071116 
+   95%   0.025  0.080300 
+   90%   0.050  0.085565 
+   75%   0.125  0.094684 
+   50%   0.250  0.104548 
+           MLE  0.120621 
+   50%   0.750  0.139607 
+   75%   0.875  0.155109 
+   90%   0.950  0.173419 
+   95%   0.975  0.186433 
+   99%   0.995  0.215416 
+Theta1:  Theta for pop1
+
+
+                  Rec    
+                RecRate  
+Best Val (MLE)  0.054609 
+    Percentile           
+   99%   0.005  0.022539 
+   95%   0.025  0.028421 
+   90%   0.050  0.031831 
+   75%   0.125  0.037767 
+   50%   0.250  0.044197 
+           MLE  0.054609 
+   50%   0.750  0.066650 
+   75%   0.875  0.076151 
+   90%   0.950  0.086889 
+   95%   0.975  0.094172 
+   99%   0.995  0.109338 
+Rec:  Recombination Rate
+
+
+

+===========================================================================
+Profile Likelihoods
+===========================================================================
+
+The first listed parameter is the parameter held constant.
+
+===========================================================================
+                           Overall Profile Tables                          
+===========================================================================
+
+Overall: Theta for pop1 (Theta1):
+---------------------------------
+
+Percentile profile: Points shown indicate approximate confidence
+  intervals.
+
+Log Likelihoods:
+
+Percentile  Theta1   |   Ln(L)   
+  0.005    0.071116  | -3.219535 
+  0.025    0.080300  | -1.822809 
+  0.050    0.085565  | -1.254813 
+  0.125    0.094684  | -0.563671 
+  0.250    0.104548  | -0.129503 
+   MLE     0.120621  |  0.097962 
+  0.750    0.139607  | -0.129520 
+  0.875    0.155109  | -0.563700 
+  0.950    0.173419  | -1.254805 
+  0.975    0.186433  | -1.822791 
+  0.995    0.215416  | -3.219533 
+
+Best fit parameters with Theta1 held constant:
+
+Percentile  Theta1   |    Rec    
+  0.005    0.071116  |  0.060394 
+  0.025    0.080300  |  0.059115 
+  0.050    0.085565  |  0.058371 
+  0.125    0.094684  |  0.057177 
+  0.250    0.104548  |  0.056067 
+   MLE     0.120621  |  0.054609 
+  0.750    0.139607  |  0.053246 
+  0.875    0.155109  |  0.052318 
+  0.950    0.173419  |  0.051384 
+  0.975    0.186433  |  0.050810 
+  0.995    0.215416  |  0.049755 
+
+
+

+===========================================================================
+Profile Likelihoods	(cont.)
+(overall profile tables)
+===========================================================================
+
+Overall: RecRate (Rec):
+-----------------------
+
+Percentile profile: Points shown indicate approximate confidence
+  intervals.
+
+Log Likelihoods:
+
+Percentile   Rec     |   Ln(L)   
+  0.005    0.022539  | -3.219546 
+  0.025    0.028421  | -1.822793 
+  0.050    0.031831  | -1.254816 
+  0.125    0.037767  | -0.563699 
+  0.250    0.044197  | -0.129513 
+   MLE     0.054609  |  0.097962 
+  0.750    0.066650  | -0.129509 
+  0.875    0.076151  | -0.563680 
+  0.950    0.086889  | -1.254805 
+  0.975    0.094172  | -1.822802 
+  0.995    0.109338  | -3.219510 
+
+Best fit parameters with Rec held constant:
+
+Percentile   Rec     |   Theta1  
+  0.005    0.022539  |  0.126765 
+  0.025    0.028421  |  0.125720 
+  0.050    0.031831  |  0.125087 
+  0.125    0.037767  |  0.123945 
+  0.250    0.044197  |  0.122676 
+   MLE     0.054609  |  0.120621 
+  0.750    0.066650  |  0.118366 
+  0.875    0.076151  |  0.116735 
+  0.950    0.086889  |  0.115070 
+  0.975    0.094172  |  0.114053 
+  0.995    0.109338  |  0.112237 
+
+
+

+===========================================================================
+Mapping results
+===========================================================================
+
+This analysis for this trait was performed by collecting trees, then
+  calculating the data likelihood of the trait marker at all allowed sites
+  on those trees, and then averaging.
+
+Mapping results for funny-nose from the region "region1".
+Most likely site(s) for funny-nose:  919:923.  Relative data likelihood =
+  0.0025891
+The top 5% of all sites in this region:  919:936, 946:947
+The top 50% of all sites in this region:  678:709, 776:803, 811:953
+The top 95% of all sites in this region:  1:212, 214:215, 217:220,
+  641:953
+You have a total of 531 sites in your 95% range.
+
+

+===========================================================================
+User Specified Options
+===========================================================================
+
+Force specific options:
+-----------------------
+
+Starting Parameters:
+
+
+
+
+
+Search Strategy:
+----------------
+
+Type of analysis:  Likelihood
+
+Number of replicates:  1
+
+Markov Chain Parameters:
+                           Initial           Final
+  Number of Chains              10               2
+     Trees Sampled             500           10000
+Sampling Increment              20              20
+   Trees Discarded            1000            1000
+
+Random number seed             1215813845                              
+
+
+File options:
+-------------
+
+Read data from file:           lamarc-trait-input.xml                  
+Wrote to mapping file(s):      mapfile_funny-nose.txt                  
+Wrote to Tracer file(s):       tracefile_region1_1.txt                 
+
+
+Output summary options:
+-----------------------
+
+Calculate profile likelihoods? Yes                                     
+

+===========================================================================
+Data summary
+===========================================================================
+
+Number of populations:                                      1
+Number of regions:                                          1
+Total number of samples in all regions                     16
+
+
+Linked-segments by region:
+--------------------------
+
+                              Datatype       MuRate      
+region1          |
+                 |    #1 |      DNA            1         
+                 |funny-nose  K-Allele         1         
+---------------------------------------------------------
+
+
+Region summary:
+---------------
+
+Population      Variable  Relative  Relative  Pairwise   Sample  
+   Region       markers      Ne     rec rate   theta      size   
+1 pop1
+   1 region1      232        1        1.0    0.0699168     16    
+-----------------------------------------------------------------
+
+
+Summary of Data Model Parameters:
+---------------------------------
+
+
+---------------------------------------------------------------------------
+
+Parameters of a Felsenstein '84 model for the #1 segment of the region1 region
+Base frequencies: 0.255625, 0.254875, 0.217562, 0.271937
+Transition/transversion ratio: 2
+---------------------------------------------------------------------------
+
+Parameters of a K Allele model for the funny-nose segment of the region1 region
+Maximum number of bins:  2
+---------------------------------------------------------------------------
+

+===========================================================================
+Run Reports by Region
+===========================================================================
+
+"Accepted" is the observed rate at which any change to the proposal trees
+  was accepted.
+
+15:04:07  Beginning region: region1
+Initial Chain 1 of 10:
+15:04:37  Predicted end of chains for this region:  Fri Jul 11 15:27:23 2008
+
+15:04:37  Accepted  4.94% | Posterior lnL 32.1234746 | Data lnL -3410.96008
+No trees discarded due to limit violations.
+Tree-Arranger accepted            427/8302 proposals
+Tree-Size-Arranger accepted        67/1698 proposals
+  Theta       0.040525  Rec         0.034930  
+  
+Initial Chain 2 of 10:
+15:05:12  Predicted end of chains for this region:  Fri Jul 11 15:29:19 2008
+
+15:05:12  Accepted 10.73% | Posterior lnL 8.30673609 | Data lnL -3253.77947
+No trees discarded due to limit violations.
+Tree-Arranger accepted            755/8335 proposals
+Tree-Size-Arranger accepted       318/1665 proposals
+  Theta       0.094397  Rec         0.036964  
+  
+Initial Chain 3 of 10:
+15:05:51  Predicted end of chains for this region:  Fri Jul 11 15:31:00 2008
+
+15:05:51  Accepted  8.28% | Posterior lnL 0.48837554 | Data lnL -3210.75409
+No trees discarded due to limit violations.
+Tree-Arranger accepted            621/8313 proposals
+Tree-Size-Arranger accepted       207/1687 proposals
+  Theta       0.116520  Rec         0.031698  
+  
+Initial Chain 4 of 10:
+15:06:33  Predicted end of chains for this region:  Fri Jul 11 15:32:25 2008
+
+15:06:33  Accepted   7.4% | Posterior lnL 0.11244487 | Data lnL -3212.79690
+No trees discarded due to limit violations.
+Tree-Arranger accepted            600/8328 proposals
+Tree-Size-Arranger accepted       140/1672 proposals
+  Theta       0.117805  Rec         0.037398  
+  
+Initial Chain 5 of 10:
+15:07:13  Predicted end of chains for this region:  Fri Jul 11 15:32:58 2008
+
+15:07:13  Accepted  7.88% | Posterior lnL 0.07669565 | Data lnL -3209.77447
+No trees discarded due to limit violations.
+Tree-Arranger accepted            650/8414 proposals
+Tree-Size-Arranger accepted       138/1586 proposals
+  Theta       0.114143  Rec         0.042701  
+  
+Initial Chain 6 of 10:
+15:07:56  Predicted end of chains for this region:  Fri Jul 11 15:33:43 2008
+
+15:07:56  Accepted  7.15% | Posterior lnL 0.13397531 | Data lnL -3188.94408
+No trees discarded due to limit violations.
+Tree-Arranger accepted            510/8330 proposals
+Tree-Size-Arranger accepted       205/1670 proposals
+  Theta       0.125161  Rec         0.037918  
+  
+Initial Chain 7 of 10:
+15:08:48  Predicted end of chains for this region:  Fri Jul 11 15:35:15 2008
+
+15:08:48  Accepted  7.19% | Posterior lnL 1.32304194 | Data lnL -3158.82103
+No trees discarded due to limit violations.
+Tree-Arranger accepted            588/8350 proposals
+Tree-Size-Arranger accepted       131/1650 proposals
+  Theta       0.117245  Rec         0.061214  
+  
+Initial Chain 8 of 10:
+15:09:43  Predicted end of chains for this region:  Fri Jul 11 15:36:41 2008
+
+15:09:43  Accepted  8.92% | Posterior lnL 0.12144198 | Data lnL -3155.91152
+No trees discarded due to limit violations.
+Tree-Arranger accepted            682/8318 proposals
+Tree-Size-Arranger accepted       210/1682 proposals
+  Theta       0.112106  Rec         0.069393  
+  
+Initial Chain 9 of 10:
+15:10:39  Predicted end of chains for this region:  Fri Jul 11 15:37:54 2008
+
+15:10:39  Accepted  9.88% | Posterior lnL 0.05952281 | Data lnL -3149.13796
+No trees discarded due to limit violations.
+Tree-Arranger accepted            662/8316 proposals
+Tree-Size-Arranger accepted       326/1684 proposals
+  Theta       0.120211  Rec         0.068023  
+  
+Initial Chain 10 of 10:
+15:11:33  Predicted end of chains for this region:  Fri Jul 11 15:38:42 2008
+
+15:11:33  Accepted  9.41% | Posterior lnL 0.27065307 | Data lnL -3139.63750
+No trees discarded due to limit violations.
+Tree-Arranger accepted            616/8323 proposals
+Tree-Size-Arranger accepted       325/1677 proposals
+  Theta       0.105426  Rec         0.076304  
+  
+Final Chain 1 of 2:
+15:28:09  Predicted end of chains for this region:  Fri Jul 11 15:43:40 2008
+
+15:28:09  Accepted 9.998% | Posterior lnL 0.33009443 | Data lnL -3125.44391
+No trees discarded due to limit violations.
+Tree-Arranger accepted          13481/166785 proposals
+Tree-Size-Arranger accepted      6516/33215 proposals
+  Theta       0.113947  Rec         0.061273  
+  
+Final Chain 2 of 2:
+15:44:03  Accepted  8.22% | Posterior lnL 0.09796165 | Data lnL -3121.52779
+No trees discarded due to limit violations.
+Tree-Arranger accepted          11251/166512 proposals
+Tree-Size-Arranger accepted      5189/33488 proposals
+  Theta       0.120621  Rec         0.054609  
+  
+15:44:03  Most likely site(s) for funny-nose:  919:923.  Relative data
+  likelihood = 0.0025891
+          The top 5% of all sites in this region:  919:936, 946:947
+          The top 50% of all sites in this region:  678:709, 776:803, 811:953
+          The top 95% of all sites in this region:  1:212, 214:215, 217:220,
+  641:953
+          You have a total of 531 sites in your 95% range.
+15:44:03  Beginning profiling, please be patient
+16:01:59  Finished profile 1 of 2.  Predicted end of this set of profiles: 
+  Fri Jul 11 16:19:55 2008
+
+16:20:32  Finished profile 2 of 2.
+
+

diff --git a/doc/html/trait_mapping/traitCmd.html b/doc/html/trait_mapping/traitCmd.html
new file mode 100644
index 0000000..37c82cb
--- /dev/null
+++ b/doc/html/trait_mapping/traitCmd.html
@@ -0,0 +1,129 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
+<HTML>
+<BODY>
+<pre>
+&ltlamarc-converter-cmd&gt
+    &ltlamarc-header-comment&gttraitExample: generated with theta=0.1 rec=0.1 trait at position 906&lt/lamarc-header-comment&gt
+    &ltoutfile&gtlamarc-trait-input.xml&lt/outfile&gt
+    &lttraits&gt
+        &lttrait-info&gt
+            &ltname&gtfunny-nose&lt/name&gt
+            &ltallele&gtnormal&lt/allele&gt
+            &ltallele&gtaffected&lt/allele&gt
+        &lt/trait-info&gt
+        &ltphenotype&gt
+            &ltname&gtstraight&lt/name&gt
+            &ltgenotype-resolutions&gt
+                &lttrait-name&gtfunny-nose&lt/trait-name&gt
+                &lthaplotypes&gt
+                    &ltalleles&gt normal normal &lt/alleles&gt
+                    &ltpenetrance&gt 1.0 &lt/penetrance&gt
+                &lt/haplotypes&gt
+            &lt/genotype-resolutions&gt
+        &lt/phenotype&gt
+        &ltphenotype&gt
+            &ltname&gtbent&lt/name&gt
+            &ltgenotype-resolutions&gt
+                &lttrait-name&gtfunny-nose&lt/trait-name&gt
+                &lthaplotypes&gt
+                    &ltalleles&gt normal affected &lt/alleles&gt
+                    &ltpenetrance&gt 1.0 &lt/penetrance&gt
+                &lt/haplotypes&gt
+                &lthaplotypes&gt
+                    &ltalleles&gt affected normal &lt/alleles&gt
+                    &ltpenetrance&gt 1.0 &lt/penetrance&gt
+                &lt/haplotypes&gt
+            &lt/genotype-resolutions&gt
+        &lt/phenotype&gt
+        &ltphenotype&gt
+            &ltname&gtbroken&lt/name&gt
+            &ltgenotype-resolutions&gt
+                &lttrait-name&gtfunny-nose&lt/trait-name&gt
+                &lthaplotypes&gt
+                    &ltalleles&gt affected affected &lt/alleles&gt
+                    &ltpenetrance&gt 1.0 &lt/penetrance&gt
+                &lt/haplotypes&gt
+            &lt/genotype-resolutions&gt
+        &lt/phenotype&gt
+    &lt/traits&gt
+    &ltregions&gt
+        &ltregion&gt
+            &ltname&gtregion1&lt/name&gt
+            &lteffective-popsize&gt1&lt/effective-popsize&gt
+            &lttrait-location&gt
+                &lttrait-name&gtfunny-nose&lt/trait-name&gt
+            &lt/trait-location&gt
+            &ltsegments&gt
+                &ltsegment datatype="dna"&gt
+                    &ltname&gtlocus_1&lt/name&gt
+                    &ltmarkers&gt1000&lt/markers&gt
+                    &ltfirst-position-scanned&gt1&lt/first-position-scanned&gt
+                    &ltmap-position&gt1&lt/map-position&gt
+                &lt/segment&gt
+            &lt/segments&gt
+        &lt/region&gt
+    &lt/regions&gt
+    &ltpopulations&gt
+        &ltpopulation&gtpop1&lt/population&gt
+    &lt/populations&gt
+    &ltindividuals&gt
+        &ltindividual&gt
+            &ltname&gts0_s1&lt/name&gt
+            &ltsample&gt&ltname&gts0&lt/name&gt&lt/sample&gt
+            &ltsample&gt&ltname&gts1&lt/name&gt&lt/sample&gt
+            &lthas-phenotype&gtbroken&lt/has-phenotype&gt
+        &lt/individual&gt
+        &ltindividual&gt
+            &ltname&gts2_s3&lt/name&gt
+            &ltsample&gt&ltname&gts2&lt/name&gt&lt/sample&gt
+            &ltsample&gt&ltname&gts3&lt/name&gt&lt/sample&gt
+            &lthas-phenotype&gtbroken&lt/has-phenotype&gt
+        &lt/individual&gt
+        &ltindividual&gt
+            &ltname&gts4_s5&lt/name&gt
+            &ltsample&gt&ltname&gts4&lt/name&gt&lt/sample&gt
+            &ltsample&gt&ltname&gts5&lt/name&gt&lt/sample&gt
+            &lthas-phenotype&gtbroken&lt/has-phenotype&gt
+        &lt/individual&gt
+        &ltindividual&gt
+            &ltname&gts6_s7&lt/name&gt
+            &ltsample&gt&ltname&gts6&lt/name&gt&lt/sample&gt
+            &ltsample&gt&ltname&gts7&lt/name&gt&lt/sample&gt
+            &lthas-phenotype&gtbent&lt/has-phenotype&gt
+        &lt/individual&gt
+        &ltindividual&gt
+            &ltname&gts10_s11&lt/name&gt
+            &ltsample&gt&ltname&gts10&lt/name&gt&lt/sample&gt
+            &ltsample&gt&ltname&gts11&lt/name&gt&lt/sample&gt
+            &lthas-phenotype&gtstraight&lt/has-phenotype&gt
+        &lt/individual&gt
+        &ltindividual&gt
+            &ltname&gts12_s13&lt/name&gt
+            &ltsample&gt&ltname&gts12&lt/name&gt&lt/sample&gt
+            &ltsample&gt&ltname&gts13&lt/name&gt&lt/sample&gt
+            &lthas-phenotype&gtstraight&lt/has-phenotype&gt
+        &lt/individual&gt
+        &ltindividual&gt
+            &ltname&gts14_s15&lt/name&gt
+            &ltsample&gt&ltname&gts14&lt/name&gt&lt/sample&gt
+            &ltsample&gt&ltname&gts15&lt/name&gt&lt/sample&gt
+            &lthas-phenotype&gtstraight&lt/has-phenotype&gt
+        &lt/individual&gt
+        &ltindividual&gt
+            &ltname&gts8_s9&lt/name&gt
+            &ltsample&gt&ltname&gts8&lt/name&gt&lt/sample&gt
+            &ltsample&gt&ltname&gts9&lt/name&gt&lt/sample&gt
+            &lthas-phenotype&gtstraight&lt/has-phenotype&gt
+        &lt/individual&gt
+    &lt/individuals&gt
+    &ltinfiles&gt
+        &ltinfile format="migrate" datatype="dna" sequence-alignment="sequential"&gt
+            &ltname&gttraitData.mig&lt/name&gt
+            &ltpopulation-matching type="single"&gtpop1&lt/population-matching&gt
+            &ltsegments-matching type="single"&gtlocus_1&lt/segments-matching&gt
+        &lt/infile&gt
+    &lt/infiles&gt
+&lt/lamarc-converter-cmd&gt
+</pre>
+</BODY>
+</HTML>
diff --git a/doc/html/trait_mapping/traitCmd.xml b/doc/html/trait_mapping/traitCmd.xml
new file mode 100644
index 0000000..005d4b2
--- /dev/null
+++ b/doc/html/trait_mapping/traitCmd.xml
@@ -0,0 +1,122 @@
+<lamarc-converter-cmd>
+    <lamarc-header-comment>traitExample: generated with theta=0.1 rec=0.1 trait at position 906</lamarc-header-comment>
+    <outfile>lamarc-trait-input.xml</outfile>
+    <traits>
+        <trait-info>
+            <name>funny-nose</name>
+            <allele>normal</allele>
+            <allele>affected</allele>
+        </trait-info>
+        <phenotype>
+            <name>straight</name>
+            <genotype-resolutions>
+                <trait-name>funny-nose</trait-name>
+                <haplotypes>
+                    <alleles> normal normal </alleles>
+                    <penetrance> 1.0 </penetrance>
+                </haplotypes>
+            </genotype-resolutions>
+        </phenotype>
+        <phenotype>
+            <name>bent</name>
+            <genotype-resolutions>
+                <trait-name>funny-nose</trait-name>
+                <haplotypes>
+                    <alleles> normal affected </alleles>
+                    <penetrance> 1.0 </penetrance>
+                </haplotypes>
+                <haplotypes>
+                    <alleles> affected normal </alleles>
+                    <penetrance> 1.0 </penetrance>
+                </haplotypes>
+            </genotype-resolutions>
+        </phenotype>
+        <phenotype>
+            <name>broken</name>
+            <genotype-resolutions>
+                <trait-name>funny-nose</trait-name>
+                <haplotypes>
+                    <alleles> affected affected </alleles>
+                    <penetrance> 1.0 </penetrance>
+                </haplotypes>
+            </genotype-resolutions>
+        </phenotype>
+    </traits>
+    <regions>
+        <region>
+            <name>region1</name>
+            <effective-popsize>1</effective-popsize>
+            <trait-location>
+                <trait-name>funny-nose</trait-name>
+            </trait-location>
+            <segments>
+                <segment datatype="dna">
+                    <name>locus_1</name>
+                    <markers>1000</markers>
+                    <first-position-scanned>1</first-position-scanned>
+                    <map-position>1</map-position>
+                </segment>
+            </segments>
+        </region>
+    </regions>
+    <populations>
+        <population>pop1</population>
+    </populations>
+    <individuals>
+        <individual>
+            <name>s0_s1</name>
+            <sample><name>s0</name></sample>
+            <sample><name>s1</name></sample>
+            <has-phenotype>broken</has-phenotype>
+        </individual>
+        <individual>
+            <name>s2_s3</name>
+            <sample><name>s2</name></sample>
+            <sample><name>s3</name></sample>
+            <has-phenotype>broken</has-phenotype>
+        </individual>
+        <individual>
+            <name>s4_s5</name>
+            <sample><name>s4</name></sample>
+            <sample><name>s5</name></sample>
+            <has-phenotype>broken</has-phenotype>
+        </individual>
+        <individual>
+            <name>s6_s7</name>
+            <sample><name>s6</name></sample>
+            <sample><name>s7</name></sample>
+            <has-phenotype>bent</has-phenotype>
+        </individual>
+        <individual>
+            <name>s10_s11</name>
+            <sample><name>s10</name></sample>
+            <sample><name>s11</name></sample>
+            <has-phenotype>straight</has-phenotype>
+        </individual>
+        <individual>
+            <name>s12_s13</name>
+            <sample><name>s12</name></sample>
+            <sample><name>s13</name></sample>
+            <has-phenotype>straight</has-phenotype>
+        </individual>
+        <individual>
+            <name>s14_s15</name>
+            <sample><name>s14</name></sample>
+            <sample><name>s15</name></sample>
+            <has-phenotype>straight</has-phenotype>
+        </individual>
+        <individual>
+            <name>s8_s9</name>
+            <sample><name>s8</name></sample>
+            <sample><name>s9</name></sample>
+            <has-phenotype>straight</has-phenotype>
+        </individual>
+    </individuals>
+    <infiles>
+        <infile format="migrate" datatype="dna" sequence-alignment="sequential">
+            <name>traitData.mig</name>
+            <population-matching type="single">pop1</population-matching>
+            <segments-matching type="single">locus_1</segments-matching>
+        </infile>
+    </infiles>
+</lamarc-converter-cmd>
diff --git a/doc/html/trait_mapping/traitCmd.xml.txt b/doc/html/trait_mapping/traitCmd.xml.txt
new file mode 100644
index 0000000..005d4b2
--- /dev/null
+++ b/doc/html/trait_mapping/traitCmd.xml.txt
@@ -0,0 +1,122 @@
+<lamarc-converter-cmd>
+    <lamarc-header-comment>traitExample: generated with theta=0.1 rec=0.1 trait at position 906</lamarc-header-comment>
+    <outfile>lamarc-trait-input.xml</outfile>
+    <traits>
+        <trait-info>
+            <name>funny-nose</name>
+            <allele>normal</allele>
+            <allele>affected</allele>
+        </trait-info>
+        <phenotype>
+            <name>straight</name>
+            <genotype-resolutions>
+                <trait-name>funny-nose</trait-name>
+                <haplotypes>
+                    <alleles> normal normal </alleles>
+                    <penetrance> 1.0 </penetrance>
+                </haplotypes>
+            </genotype-resolutions>
+        </phenotype>
+        <phenotype>
+            <name>bent</name>
+            <genotype-resolutions>
+                <trait-name>funny-nose</trait-name>
+                <haplotypes>
+                    <alleles> normal affected </alleles>
+                    <penetrance> 1.0 </penetrance>
+                </haplotypes>
+                <haplotypes>
+                    <alleles> affected normal </alleles>
+                    <penetrance> 1.0 </penetrance>
+                </haplotypes>
+            </genotype-resolutions>
+        </phenotype>
+        <phenotype>
+            <name>broken</name>
+            <genotype-resolutions>
+                <trait-name>funny-nose</trait-name>
+                <haplotypes>
+                    <alleles> affected affected </alleles>
+                    <penetrance> 1.0 </penetrance>
+                </haplotypes>
+            </genotype-resolutions>
+        </phenotype>
+    </traits>
+    <regions>
+        <region>
+            <name>region1</name>
+            <effective-popsize>1</effective-popsize>
+            <trait-location>
+                <trait-name>funny-nose</trait-name>
+            </trait-location>
+            <segments>
+                <segment datatype="dna">
+                    <name>locus_1</name>
+                    <markers>1000</markers>
+                    <first-position-scanned>1</first-position-scanned>
+                    <map-position>1</map-position>
+                </segment>
+            </segments>
+        </region>
+    </regions>
+    <populations>
+        <population>pop1</population>
+    </populations>
+    <individuals>
+        <individual>
+            <name>s0_s1</name>
+            <sample><name>s0</name></sample>
+            <sample><name>s1</name></sample>
+            <has-phenotype>broken</has-phenotype>
+        </individual>
+        <individual>
+            <name>s2_s3</name>
+            <sample><name>s2</name></sample>
+            <sample><name>s3</name></sample>
+            <has-phenotype>broken</has-phenotype>
+        </individual>
+        <individual>
+            <name>s4_s5</name>
+            <sample><name>s4</name></sample>
+            <sample><name>s5</name></sample>
+            <has-phenotype>broken</has-phenotype>
+        </individual>
+        <individual>
+            <name>s6_s7</name>
+            <sample><name>s6</name></sample>
+            <sample><name>s7</name></sample>
+            <has-phenotype>bent</has-phenotype>
+        </individual>
+        <individual>
+            <name>s10_s11</name>
+            <sample><name>s10</name></sample>
+            <sample><name>s11</name></sample>
+            <has-phenotype>straight</has-phenotype>
+        </individual>
+        <individual>
+            <name>s12_s13</name>
+            <sample><name>s12</name></sample>
+            <sample><name>s13</name></sample>
+            <has-phenotype>straight</has-phenotype>
+        </individual>
+        <individual>
+            <name>s14_s15</name>
+            <sample><name>s14</name></sample>
+            <sample><name>s15</name></sample>
+            <has-phenotype>straight</has-phenotype>
+        </individual>
+        <individual>
+            <name>s8_s9</name>
+            <sample><name>s8</name></sample>
+            <sample><name>s9</name></sample>
+            <has-phenotype>straight</has-phenotype>
+        </individual>
+    </individuals>
+    <infiles>
+        <infile format="migrate" datatype="dna" sequence-alignment="sequential">
+            <name>traitData.mig</name>
+            <population-matching type="single">pop1</population-matching>
+            <segments-matching type="single">locus_1</segments-matching>
+        </infile>
+    </infiles>
+</lamarc-converter-cmd>
diff --git a/doc/html/trait_mapping/traitData.mig b/doc/html/trait_mapping/traitData.mig
new file mode 100644
index 0000000..2a20001
--- /dev/null
+++ b/doc/html/trait_mapping/traitData.mig
@@ -0,0 +1,19 @@
+   1 1
+1000
+16
+s0        CGTTGTACACATCATTCGTTGTACACATCATTACTTCATACTATACGTCGCACCGCTAGAGTGCCTTTGGGCCTGGTTCAGCGTATCGAGTACTCGATAATCTGCCCTCCTTTTGGTCCGGACTAAACTATTTCGTACAGCGGGTACGGTCAATACCTTGATACACGTTGTACACATCATTACTAGTTCCTGCGATGTCCGTACGGACCGCTTCCAGCGCCTTCATTGAGGTATAATACACGGCGCGGTTGGATGTCCGTACGGACCGCTTCCCTTTGTACAACCCCAGCCGGCGCGGTTGCCCTCGGCATATCGCTACCAAACGTGCGCCCGATCAATTGTCTAGCGGACGCTCTTACGTGTACCCCATACTTCATACTATAAACACATGATAAGTATTTGCGTAAAGGGTGGTTTGCCCGTTACCGTTCAATAGCCATGAGAATGGGAGGGCGCTTACGTGTACCCCATACTTCATACTA [...]
+s1        CCTTGTACACATCATTCCTTGTACACATCATTACTTCATACTATATGTCGCACAGCTAGAGTGCCTTTGGGCCTGGTTCAGCGTATCGAGTACTTTATAATCTTCCCTCCTTTTGGTCCGGACTAAACTACTTCGGACAGCGGGTACGGTTAATACCTAGATAGACCTTGTACACATCATTACTAGTTCCTGAGATGTCCGTTCGTACCACTTCCAGCGCCTTCTTTGAGGTGTAATACACGGCGCGGTTGGATGTCCGTTCGTACCACTTCCCTTTGTACAACCCCCGGCGGCGCGGCTGCCCTCGGCATATCGCTACCAAACGTGCGCCCGATCAATTGTCTAGCGGACGCTCTTACGTGTACCCCATACTTCATACTATAAACACATGATAAGTATTTGCGTAAAGGGTGGTTTGCCCGGCACCGTTCAATAGCCATGAGAATGGGAGGGCTCTTACGTGTACCCCATACTTCATACTA [...]
+s2        CCTTGTACACATCATTCCTTGTACACATCATTACTTCATACTATATGTCGCACGGCTAGAGTGCCTTTGGGCCTGGTTCAGCGTATCGAGTACTCTATAATCTTCCCTCCTTTTGGTCCGGACTAAACTACTTCGGACAGCGGGTACGGTTAATACCTAGATAGACCTTGTACACATCATTACTAGTTCCTGAGATGTCCGCCCGTACCACTTCCAGCGCCTTCTTTGAGGTGTAATACACGGCGCGGTTGGATGTCCGCCCGTACCACTTCCCTTTGTACAACCCCCGGCGGCGCGGCTGCCCTCGGCATATCGCTACCAAATGTGCGCCCGATCAATTGTCTAGCGGACGCTCTTACGTGTACCCCATACTTCATACTATAAACACATGATAAGTATTTGCGTAAAGGGTGGTTTGCCCGGCACCGTTCAATAGCCATGAGAATGTGAGGGCTCTTACGTGTACCCCATACTTCATACTA [...]
+s3        CCTTGTACAGATCATTCCTTGTACAGATCATTACTTCATACTATATGTCGCACAGCTAGAGTGCCTTTGGGCCTGGTTCAGCGTATCGAGTACTCTATAATCTTCCCTCCTTTTGGTCCGGACTAAACTACTTCGGACAGCGGGTACAGTTAATACCTAGATAGACCTTGTACAGATCATTACTAGTTCCTGAGATGTCCGTCCGTACCACTTCCAGCGCCTTCTTTGAGGTGTAATACACGGCGCGGTTGGATGTCCGTCCGTACCACTTCCCTTTGTACAACCCCCGGCGGCGCGGCTGCCCTCGGCATATCGCTACCAAATGTGCGCCAGATCAATTGTCTAGCGGACGCTCTTACGTGTTCCCCATACTTCATACTATAAACACATGGTAAGTATTTGCGTAAAGGGTGGTTTGCCCGGCACCGTTCACTAGCCATGAGAATGTGAGGGCTCTTACGTGTTCCCCATACTTCATACTA [...]
+s4        CCTTGTACACATCATTCCTTGTACACATCATTACTTCATACTATATGTCGCACAGCTAGAGTGCCTTTGGGCCTGGTTCAGCGTATCGAGTACTCTATAATCTTCCCTCCTTTTGGTCCGGACTAAACAACTTCGGACAGCGGGTACGGTTAATACCTAGATAGACCTTGTACACATCATTACTAGTTCCTGAGATGTCCGTCCGTACCACTTCCAGCGCCTTCTTTGAGGTGTAATACACGGCGCGGTTGGATGTCCGTCCGTACCACTTCCCTTTGTACAACCCCCGGCGGCGCGGCTGCCCTCGGCATATCGCTACCAAATGTGCGCCCGATCAATTGTCTAGCGGACGCTCTTACGTGTACCCCATACTTCATACTATAAACACATGATAAGTATTTGCGTAAAGGGTGGTTTGCCCGGCACCGTTCACTAGCCATGAGAATGTGAGGGCTCTTACGTGTACCCCATACTTCATACTA [...]
+s5        CCTTGTACACATCATTCCTTGTACACATCATTACTTCATACTATATGTCGCACAGCTAGAGTGCCTTTGGGCCTGGTTCAGCGTATCGAGTACTCTATAATCTTCCCTCCTTTTGGTCCGGACTAAACTACTTCGGACAGCGGGTACGGTTAACACCTAGATAGACCTTGTACACATCATTACTAGTTCCTGAGATGTCCGTCCGTACCACTTCCAGCGCCTTCTTTGAGGTGTAATACACGGCGCGGTTGGATGTCCGTCCGTACCACTTCCCTTTGTACCACCCCCGGCGGCGCGGCTGCCCTCGGCTTATCGCTACCAAATGTGCGCCCGATCAATTGTCTAGCGGACGCTCTTACGTGTACCCCATACTTCATACTATAAACACATCATAAGTATTTGCGTAAAGGGTGGTTTGCCCGGCACCGTTCACTAGCCATGAGAATGTGAGGGCTCTTACGTGTACCCCATACTTCATACTA [...]
+s6        CCTTGTACACATCATTCCTTGTACACATCATTACTTCATACTATATGTCGCACAGCTAGAGTGCCTTTGGGCCTGGTTCAGCGTATCGAGTACTCTATAATCTTCCCTCCTTTTGGTCCGGACTAAACTACTTCGGACAGCGGGTACGGTTAACACCTAGATAGACCTTGTACACATCATTACTAGTTCCTGAGATGTCCGTCCGTACCACTTCCAGCGCCTTCTTTGAGGTGTAATACACGGCGCGGTTGGATGTCCGTCCGTACCACTTCCCTTTGTACAACCCCCGGCGGCGCGGCTGCCCTCGGCTTATCGCTACCAAATGTGCGCCCGATCAATTGTCTAGCGGACGCTCTTACGTGTACCCCATACTTCATACTATAAACACATCATAAGTATTTGCGTAAAGGGTGGTTTGCCCGGCACCGTTCACTAGCCATGAGAATGTGAGGGCTCTTACGTGTACCCCATACTTCATACTA [...]
+s7        TGTTGTACACATCATTTGTTGTACACATCATTAATTCATACTAAATGTCGCACAGATAGAGTGCCTCTGGGCCTGGTTCAGCGTATCGAGTAATCTATAATCTGCCCTCTTTTTCCTGCGGACTAAACTATTTCGTACAGCGGGTACGACCCATACCTAGATACACGTTGTACACATCATTAATAGTTCCTGAGATGTCCGTACGGACCGCTTCCAGCGCCTTCTTGGAGGTATAATACACGGCCCGGTTGGATGTCCGTACGGACCGCTTCCCTTTGTACAACCCCCGCCGGCCCGGCTGCCCTCGGCATATCGCTACCAAACGTGCGCCCGATCAATTGTCTAGCGCACGCTCTTACGTGTACCCCATACTTCATACTATAAACACATCATCAGTATTTGCGCAAAGGGTGGTTTGCCCGGCACCGTTCAATAGCCATGAGAATGGGAGGGCGCTTACGTGTACCCCATACTTCATACTA [...]
+s8        TGTTGTACACATCATTTGTTGTACACATCATTAATTCATACTAAATGTCGCAAAGCTAGAGTGCCTTTGGGCCTGGTTCAGCGTATCGAGTAATCTATAATCTGCCCTCTTTTTCCTGCGGTCTAAACTATTTCGTACAGCGGGTACGATCCATACCTAGATACACGTTGTACACATCATTAATAGATCCTGAGATGTCCGTACGGACCGCTTCCAGCGCCTTCTAGGAGGTATAATACACGGCGCGGTTGGATGTCCGTACGGACCGCTTCCCTTTGTACAACCCCCGCCGGCGCGGCTGCCCTCGGCATATCGCTACCAAACGTGCGCCCGATCAATTGTTTAGCGCATGCTCTTACGTGCACCCCATACTTCATACTATAAACACATGATAAGTATTTGCGTAAAGGGTGGTTTGCCCGGCACCGTTCAATAGCCATGAGAATGGGAGGGTGCTTACGTGCACCCCATACTTCATACTA [...]
+s9        CGTTGTACGCATCATTCGTTGTACGCATCATTAATTCATACTAAATGTCGCACAGCTAGAGTGCCTTTGGGCCTAGTTCAGCGTATCGAGTAATCTATAATCTGCCCTCTTTTTCCTGCGGACTAAACTATTTCGTACAGCGGGTACGATCCATACCTAGATACACGTTGTACACATCATTAAAAGTTCCTGAGATGTCCGTACGGACCGCTTCCAGCGCCTTCTTGGAGGTATAATACACGGCGCGGTTGGATGTCCGTACGGACCGCTTCCCTTTGTACAACCCCCGCCGGCGCGGCTGCCCTCGGCATATCGCTACCAAACGTGCGCCCGATCAATTGTCTAGCGCACGCTCTTACGTGTACCCCATACTTCATACTATAAACACATGATAAGTATTTGCGTAAAGGGTGGTTTGCCCGTCACCGTTCAATAGCCATGAGAATGGGAGAGCGCTTACGTGTACCCCATACTTCATACTA [...]
+s10       CGTTGTACACATCATTCGTTGTACACATCATTAATTCATACTAAATGTCGCACAGCTAGAGTGCCTTTGGGCCTGTTTCAGCGTATCGAGTAATCTATAATCTGCCCTCTTTTTCCTGCGGACTAAACTATTTCGTACAGCGGGTACGATCCATACCTAGATACACGTTGTACACATCATTAATAGTTCCTGAGATGTCCGTACGGACCGCTTCCAGCGCCTTCTTGGAGGAATAATACACGGCGCGGTTGGATGTCCGTACGGACCGCTTCCCTTTGTACAACCCCCGCCGGCGCGGCTGCCCTCGGCATATCGCTACCAAACGTGCGCCCGATCAATTGTCTAGCGCACGCTCTTACGTGTACCCCATACTTCATACCATAAACACATGATAAGTATTTGCGTAAAGGGTGGTTTGCCCGGCACCGTTCAATAGCCATGAGAATGGGAGGGCGCTTACGTGTACCCCATACTTCATACCA [...]
+s11       CGTTGTACACATCATTCGTTGTACACATCATTAATTCATACTATATGTCGCATAGCTAGAGTGCCTTTGGGCCTGGTTCAGCGTATCGAGTACTCTATAATCTGCCCTCTTTTTGGTGCGGACTAAACTATTACGTACAGCGGGTGCGGTCAATACCTAGATACACGTTGTACACATCATTAATAGTTCCTGAGATGTCCGTACGGACCGCTTCCAGCGCCTTCTTGGAGGAATAATACACGGCGCGGTTGGATGTCCGTACGGACCGCTTCCCTTTGTACAACCCCCGCCGGCGCGGCTGCCCTCGGCATATCGCTACCAAACGTGCGCCCGATCAATTGTCTAGCGCACGCTGTTACGTGTACCCCATACTTCATACCATAAACACATGATAAGTATTTGCGTAAAGGGTGGTTTGCCCGGCACCGTTCAATAGCCATGAGAATGGGAGGGCGGTTACGTGTACCCCATACTTCATACCA [...]
+s12       CGTTGTACACATCATTCGTTGTACACATCATTAATTCATACTATATGTCGCACAGCTAGAGTGCCTTTGGGCCTCGTTCAGCGTATCGAGTGCTCTATAATCTGCCCTCTTTTTGGTGCGGACTAAACTATTTCGTACAGCGGGTGCGGTCAATACCTAGATACATGTTGTACACATCATTAATTGTTCCTGAGATGTCCGCACAGACCGCTTCCGACGCCTTCTTTGAGGAATAATACACGGCGCGGTTGGATGTCCGCACAGACCGCTTCCATTTGTACAACCCCAGACGGCGCGGTTCCCCTCGGCATATGGCTGCCAAACGTGCGCCCGATCAATTGTCTAGCGGACGAGCTTACGTGTACCCCATACTTCATACTATAAACACATGATAAGTATTTGCGTAAAGGGTATTTTGCCCGGCACCGTTCAATAGCCATGAGAATGGGAGGGCGCTTACGTGTACCCCATACTTCATACTA [...]
+s13       CGTTGTACACATCATTCGTTGTACACATCATTAATTCATACTATATGTCGCACAGCTAGAGTGCCTTTGGGCCTCGTTCAGCGTATCGAGTGCTCTATAATCTGCCCTCTTTTTGGTGCGGACTAAACTATTTCGTACAGCGGGTGCGGTCAATACCTAGATACACGTTGTACACATCATTAATTGTTCCTGAGATGTCCGCACAGACCGCTTCCGACGCCTTGTTTGAGGTATAATACACGGCGCGGTTGGATGTCCGCACAGACCGCTTCCATTTGTACAACCCCAGACGGCGCGGTTCCCCTCGGCACATGGCTGCCAAACGTGCGCCCGATCAATTGTCTAGCGGACGCGCTTACGTGTACCCCATACTTCATACTATAAACACATGATAAGTATTTGCGTAAAGGGTATTTTGCCCGGCACCGTTCAATAGCCATGAGAATGGGAGGGCGCTTACGTGTACCCCATACTTCATACTA [...]
+s14       CGTTGTACACATCATTCGTTGTACACATCATTAATTCATACTATATGTCGCACAGCTAGAGTGCCTTTGGGCCTCGTTCAGCGTAACGAGTGCTCTATAATCTGCCCTCTTTTTGGTGCGGACTAAACTATTTCGTACAGCGGGTGCGGTCAATACCTAGATACACGTTGTACGCATCATTAATTGTTCCTGAGATGTCCGCACAGACCGCTTCCGACGCCTTCTTTGAGGAATAATACACGGCGCGGTTGGATGTCCGCACAGACCGCTTCCATTTGTACAACCCCAGACGGCGCGGTTCCCCTCGGCCTATGGCTGCCAAACGTGCGCCCGATCAATTGTCTAGCGGACGCGCTAACGTGTACCCCATACTTCATACTATAAACACATGATAAGTATTTGCGTAAAGGGTATTTTGCCCGGCACCGTACAATAGCCATGAGAATGGGAGGGCGCTAACGTGTACCCCATACTTCATACTA [...]
+s15       CGTTGTACACATCATTCGTTGTACACATCATTAATTCATACTATATGTCGCACAGCTAGAGTGCCTTTGGGCCTCGTTCAGCGTAACGAGTGCTCTATAATCTGCCCTCTTTTTGGTGCGGACTAAACTATTTCGTACAGCGGGTGCGGTCAATACCTAGATACACGTTGTACACATCATTAATTGTTCCTGAGATGTCCGTACAGACCGCTTCCGACGCCTTCTTTGAGGAATAATACACGGCGCGGTTGGATGTCCGTACAGACCGCTTCCATTTGTACATCCCCAGACGGCGCGGTTCCCCTCGGCCTATGGCTGCCAAACGTGCGCCCGATCAATTGTCTAGCGGACGCGCTTACGTGTACCCCATACTTCATACTATAAACACATGATAAGTATTTGCGTAAAGGGTATTTTGCCCGGCACCGTTCAATAGCCATGAGAATGGGAGGGCGCTTACGTGTACCCCATACTTCATACTA [...]
diff --git a/doc/html/troubleshooting.html b/doc/html/troubleshooting.html
new file mode 100644
index 0000000..030ea49
--- /dev/null
+++ b/doc/html/troubleshooting.html
@@ -0,0 +1,575 @@
+<!-- header fragment for html documentation -->
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
+<HTML>
+<HEAD>
+
+<META NAME="description" CONTENT="Estimation of population parameters using genetic data usi
+ng a maximum likelihood approach with Metropolis-Hastings Monte Carlo Markov chain importanc
+e sampling">
+<META NAME="keywords" CONTENT="MCMC, Markov chain, Monte Carlo, Metropolis-Hastings, populat
+ion, parameters, migration rate, population size, recombination rate, maximum likelihood">
+
+<TITLE>LAMARC Documentation: Frequently asked Questions and Answers</title>
+</HEAD>
+
+
+<BODY BGCULOR="#FFFFFF" TEXT="#000000">
+<!-- coalescent, coalescence, Markov chain Monte Carlo simulation, migration rate, effective
+ population size, recombination rate, maximum likelihood -->
+
+
+<P>(<A HREF="upcoming.html">Previous</A> | <A HREF="index.html">Contents</A>
+| <A HREF="messages.html">Next</A>)</P>
+
+<H2>Troubleshooting LAMARC </H2>
+
+<P> This article lists some common sources of trouble, and suggestions on
+how to fix them.</P>
+
+<H3> LIST OF FAQS: </H3>
+<OL>
+<LI> <A HREF="troubleshooting.html#Q1"> The program will not compile on my machine.</A></LI>
+<LI> <A HREF="troubleshooting.html#Q2"> The program says it can't find my data file, but it's right here.</A></LI>
+<LI> <A HREF="troubleshooting.html#Q3"> My data file can't be read at all.</A></LI>
+<LI> <A HREF="troubleshooting.html#Q3.1"> The data converter mangles my file.
+</A></LI>
+<LI> <A HREF="troubleshooting.html#Q4"> The program crashes early or complains about lack of memory.</A></LI>
+<LI> <A HREF="troubleshooting.html#Q5"> The program runs much too slowly.</A></LI>
+<LI> <A HREF="troubleshooting.html#Q6"> How can I tell if I've run the program long enough?</A></LI>
+<LI> <A HREF="troubleshooting.html#Q7"> Some of my parameter estimates are ridiculously high--ten or twenty digits.  This can't be right.</A></LI>
+<LI> <A HREF="troubleshooting.html#Q8"> My estimates have enormous error bars.</A></LI>
+<LI> <A HREF="troubleshooting.html#Q9"> What does Theta mean if I have mtDNA (mitochondrial DNA) instead of nuclear DNA?  Do I need to divide it by four?</A></LI>
+<LI> <A HREF="troubleshooting.html#Q10"> The program works when I use a small or low-polymorphism data set, but crashes on a larger or higher-polymorphism data set.</A></LI>
+<LI> <A HREF="troubleshooting.html#Q10.0.1"> I get a long warning message stating that my 'data may be difficult to model'--what does this mean?</A></LI>
+<LI> <A HREF="troubleshooting.html#Q10.0.2"> My profile likelihood tables look ragged rather than smoothly curved--is something wrong?</A></LI>
+<LI> <A HREF="troubleshooting.html#Q10.1"> The program stops with a message 'Unable to create initial tree. Starting parameter values may be too extreme;'--what does this mean?</A></LI>
+<LI><A HREF="troubleshooting.html#Q10.2"> Which microsatellite model should I use?</A></LI>
+<LI> <A HREF="troubleshooting.html#Q11"> How can I do a likelihood ratio test using LAMARC?</A></LI>
+<LI> <A HREF="troubleshooting.html#Q12"> Why can't the program use other data types, other data models, or other evolutionary forces?</A></LI>
+<LI> <A HREF="troubleshooting.html#Q13"> What happened to the 'Normalize' option in previous versions of LAMARC?</A></LI>
+<LI> <A HREF="troubleshooting.html#Q14"> Does LAMARC use 'site 0'? Do I?</A></LI>
+<LI> <A HREF="troubleshooting.html#QLAST"> How can I report a bug or inadequacy of the program or documentation?</A></LI>
+</OL>
+
+<OL>
+<LI> <A NAME="Q1"><B> "The program will not compile on my machine."</B></A> </LI>
+
+<P> This is covered in a separate article, "<A
+HREF="compiling.html">Compiling LAMARC</A>".  You may also want to see if
+one of our <A
+HREF="http://evolution.gs.washington.edu/lamarc/download.html">pre-made
+executables</A> will work for you.</P>
+
+<LI> <A NAME="Q2"><B> "The program says it can't find my data file, but it's right here."
+</B></A></LI>
+
+<P> Check to see if your filename has an invisible extension.  LAMARC
+does not think that "infile.xml" and "infile" are the same.  Also
+check to make sure your file is in the folder or directory you think
+it is.</P>
+
+<LI> <A NAME="Q3"><B> "My data file can't be read at all; the program
+crashes immediately or prints errors that have nothing to do with anything
+in my file."</B></A></LI>
+
+<P> Did you save your input file as a Word document, RTF, or some other
+fancy format?  It needs to be plain unformatted text.</P>
+
+<P> An early crash may also be a symptom of lack of memory; see the
+<A HREF="troubleshooting.html#Q4"> out of memory</A> section.</P>
+
+<LI> <A NAME="Q3.1"><B> "The data converter mangles my file." </B></A> </LI>
+
+<P>Check to see if you are using the correct option for "interleaved"
+versus "sequential" data in conversion.  Interleaved data presents
+the first line of sequence 1, then the first line of sequence 2...
+and eventually the second line of sequence 1, sequence 2, etc.
+Sequential data presents all of sequence 1, then all of sequence
+2, and so forth.  Misrepresenting one as the other will cause your
+sequence names to be treated as nucleotides and vice versa, with
+disastrous results.</P>
+
+<LI> <A NAME="Q4"><B> "The program crashes early or complains about lack of
+memory." </B></A></LI>
+
+<P> On many Macintosh systems you can use the Finder to allocate more
+memory to a specific program, and you'll probably need to do this
+for LAMARC; the defaults are too low.</P>
+
+<P> In general, if you suspect that there's not enough memory, try a
+smaller subset of your data for a trial run.  <B>Important:</B>  if you
+decide that you need to produce your final results based on a
+subsample of your data, the subsample <B>must</B> be random.  It is
+allowable to leave out whole genetic regions or populations, but
+if you decide instead to leave out individual sequences or sites,
+choose them randomly.  Leaving out the "boring" invariant sites
+or identical sequences will severely distort the results.  Similarly,
+if you leave out genetic regions, choose them at random; don't
+preferentially choose the least polymorphic ones.</P>
+
+<P> Decreasing the number of sampled genealogies will reduce memory
+demands somewhat, at a cost in accuracy.  You will want to
+increase the interval between samples at the same time, so as to
+make each sample as independent (and thus informative) as possible.</P>
+
+<P> LAMARC is a large program and realistic cases will require a computer
+with generous memory.  Our development machines have about 2 gigabytes
+of RAM.  Probably under about 500 megabytes the program
+will not work except for toy cases.</P>
+
+<P> You may also want to consider whether you are asking for too many
+populations and parameters; see below.</P>
+
+<LI><A NAME="Q5"><B> "The program runs much too slowly."</B></A></LI>
+
+<P> If you compiled LAMARC yourself from source code, optimization may
+help (though some optimizers produce buggy code, so use at your
+own risk).  The executables we supply are optimized to the best
+of our ability.</P>
+
+<P> Running a smaller case may help.  Please note that you cannot safely
+leave out "boring" data such as invariant sites or identical individuals <A
+HREF="troubleshooting.html#Q4">(details here)</A>.  We find that the
+information value of additional individuals is quite low beyond twenty
+individuals, so if you are running 50 individuals per population you can
+probably cut them randomly down to 20 and get good results.</P>
+
+<P> If the program has barely enough memory it may "thrash", wasting
+a lot of time on memory management.  (You can often tell if
+thrashing is occurring by listening to your computer; many will
+whirr or rattle from the constant hard disc access.)  Adding
+more memory may help.</P>
+
+<P> If you are estimating recombination, and the program runs well
+at first but then slows down, it may be adding more and more
+recombinations to the genealogies.  You can set the
+"maximum number of events" parameter lower, but doing so risks
+a downward bias in your estimate.  It's a good solution to rare
+slow-down periods, but not a good solution to a whole run full
+of highly recombinant trees.  The latter may indicate that
+your sequence spans too long a region and the ends are essentially
+unlinked.  LAMARC is not able to meaningfully estimate the
+recombination rate across unlinked sequences, and will bog down
+if asked to try.  You can diagnose this problem by noticing
+high "dropped" counts in the runtime reports.  (The "runtime 
+reports" are given at the very end of your output file.  These 
+contain information about possibly interesting things that 
+happened while the program was running.)</P>
+
+<P> Similarly, if you are estimating migration and the program bogs
+down, you may have identified two groups as separate populations
+which are really one panmictic population.  LAMARC cannot usefully
+estimate the migration rate in this situation, and will bog down
+trying.  Consolidating the problematic populations together may
+get better results.  The program 
+<a href="http://pritch.bsd.uchicago.edu/software.html">STRUCTURE</a> 
+can be useful for
+detecting non-differentiated populations.</P>
+
+<P> Profiling is expensive, and switching to fixed-point rather than
+percentile profiles, or eliminating profiles for some or all
+parameters, will help considerably.  (But be sure you aren't
+eliminating information that you really need.)  You should also
+be aware that some profiles take longer than others, and the
+estimate of time to finish profiling is very rough--it is not
+unusual for profiling to take two or three times as long as
+predicted, if the prediction happens to come from an easy
+profile and there are several hard profiles in the set.</P>
+
+<P> Setting the output file verbosity to "concise" should drastically
+reduce the amount of time profiling takes, since the number of
+profiles calculated for each parameter is two instead of eleven.  If you 
+are writing a tree summary file, you will be able to re-load that file
+and run with different profiling options later.</P>
+
+<P> LAMARC is a computationally intensive approach and simply won't
+succeed with really complex problems.  For example, if you have
+twenty populations all exchanging migrants, you are trying to
+estimate 400 parameters.  The amount of data required to do this
+would be very high; the amount of computation would be staggering.
+Try breaking your problem into subproblems.  Constraining sets
+of these parameters to be zero, or to be identical, can greatly
+reduce the complexity of the problem and increase your chance of
+a good solution.  </P>
+
+<P> Finally, it's worth asking yourself how long the data took to
+collect.  If they took several years to collect, an analysis which
+takes several weeks shouldn't seem too long.  Run small pilot
+cases first to get an idea of the scale of the problem.</P>
+
+<P> Some useful rules of thumb:</P>
+
+<P> Adding more sequence length slows the program down, but less than
+linearly with the amount of sequence.  This is the best way to
+refine an estimate of recombination rate in a single region.</P>
+
+<P> Adding more individuals slows the program down linearly with the
+number of individuals, and you will also need to run more steps in
+your chains to get equivalently refined results, as the search
+space is bigger.  We find that 20 individuals per population is
+usually enough, and we have never seen a use for more than 100.</P>
+
+<P> Adding more genetic regions (loci) slows the program down linearly
+with the number of regions.  This is far and away the most effective
+at improving estimation of Theta or migration.  If you can choose
+between adding more individuals or adding more regions, always add
+more regions once you have 20 individuals per population.</P>
+
+<P>If you have microsatellite data, the Brownian-motion approximation
+is much faster than the stepwise model.  It is also a very good 
+approximation except when population size is low.  The usual symptom
+of breakdown in the Brownian model is data log-likelihood estimates
+of 0.0.  If you see many of these, especially in the final chains of
+your search, the Brownian approximation is not safe for your data and
+will produce an upwards bias.  In all other cases, however, we
+recommend it.</P>
+
+<LI><A NAME="Q6"><B> "How can I tell if I've run the program long enough?" 
+</B></A></LI>
+
+<P> This is covered in a separate article, <A HREF="search.html">
+"Search Strategy."</A></P>
+
+<LI><A NAME="Q7"><B> "Some of my parameter estimates are ridiculously high--ten
+or twenty digits.  This can't be right." </B></A></LI>
+
+<P> It is possible for a data set to be so uninformative with
+regard to migration (or, more rarely, recombination) that
+the likelihood surface is flat, or almost flat.  This can
+lead to an almost infinite estimate of the
+parameter.</P>
+
+<P> This is particularly common in migration cases where you are
+trying to estimate too many parameters from a small amount of
+data.  Consider a case where you have only 1 individual from
+a certain population, and he turns out to have been a recent
+migrant.  How big is that population?  What are its migration
+rates to other populations?  LAMARC really can't tell, and this
+is reflected by a flat likelihood surface.  You can verify
+this by examining the profiling results.</P>
+
+<P> If you think that some parameter really cannot be estimated,
+holding it fixed at a reasonable value can rescue your ability
+to estimate other parameters.</P>
+
+<P> A second possible explanation is that you've run too few chains
+or chains that are too short.  You can try running longer ones.</P>
+
+<P> A third explanation, particularly for huge estimates of Theta,
+is that your data aren't correctly aligned and so appear
+much more variable than they should.  It can be helpful to
+ask the program to echo back the input data, and examine it
+for alignment problems.</P>
+
+<P> If some of your estimates are huge, the rest may be all right,
+but it is not wise to rely on this.  It's better to reduce
+the problem until all estimates are reasonable.</P>
+
+<P> LAMARC's strange behavior with inadequate data is not a program
+bug; if the likelihood surface for the given data really is
+flat, there's nothing the program can do to get an intelligent
+estimate.  Running LAMARC in Bayesian analysis mode will produce text
+files containing <A HREF="bayes.html#LnLpictures">portraits</A> of the likelihood surface; these files
+can confirm whether the surface is flat.</P>
+
+<LI><A NAME="Q8"> <B>"My estimates have enormous error bars."</B></A></LI>
+
+<P> While this might possibly improve with a longer run, it is usually
+an accurate reflection of your data.  (In fact, a too-short run
+more often produces error bars that are narrower than they should
+be.)  You might also try re-running with multiple replicates or
+heating.</P>
+
+<P> If possible, add more genetic regions.  If you can't do that, add
+additional data to the regions (longer sequences) or more individuals.
+In some cases (e.g. HIV sequences) additional individuals are the
+only possible way to improve your data set, and you'll have to
+be aware that you may never be able to get a really tight estimate.</P>
+
+<P> Please do not ignore the error bars.  They are there for a reason.</P>
+
+<LI> <A NAME="Q9"><B> "What does Theta mean if I have mtDNA (mitochondrial
+DNA) instead of nuclear DNA?  Do I need to divide it by four?"</B></A></LI>
+
+<P> Theta is always "number of heritable copies in the population * 2 * mu".
+If you put in mtDNA, the value that comes out will be 2N<sub>f</sub> * mu,
+where N<sub>f</sub> is the effective number of females.
+You do <B>not</B> need to divide it by four.  A similar argument applies
+to Y chromosome DNA.</P>
+
+<P> If you have both mtDNA and nuclear DNA, be sure to indicate
+to the program that they have different effective population sizes, either
+by setting the effective population size of the mtDNA region to 1 and of the
+nuclear DNA region(s) to 4, or by setting the effective population size of the
+mtDNA region to .25 and of the nuclear DNA region(s) to 1.</P>
+
+<P>Also note that if you collected data from different sections of the
+mitochondrion, all data should be put in the same genomic region.  If the
+relative mutation rates are different, you can put them in different
+segments, but then put both segments together in the same region.  You will
+seriously underestimate your support intervals if you claim that each
+section is its own region.</P>
+
+<LI><A NAME="Q10"><B> "The program works when I use a small or low-polymorphism data
+set, but crashes on a larger or higher-polymorphism data set." </B></A></LI>
+
+<P> This may be a symptom of running out of memory (see previous
+questions).  You should also check whether your data are aligned correctly;
+improperly aligned data will look like excessive polymorphism.</P>
+
+<LI><A NAME="Q10.0.1"><B> "I get a long warning message stating that my
+'data may be difficult to model'--what does this mean?"</B></A></LI>
+
+<P>Some of the above items have discussed consequences of
+<A HREF="troubleshooting.html#Q7">telling LAMARC that your data comes from
+two populations when it really comes from one</A>,
+<A HREF="troubleshooting.html#Q7">providing LAMARC with an inadequate amount
+of data</A>, and <A HREF="troubleshooting.html#Q10">analyzing highly
+polymorphic data</A>. These high-level, big-concept problems can trigger
+low-level numerical problems which the program cannot relate to the
+big picture; the best it can do is describe the low-level problem.</P>
+
+<P>When performing a maximum-likelihood analysis, 
+LAMARC searches the likelihood surface for its maximum height.
+It does this once after each Markov chain, and several times 
+more if parameter profiles are turned on.  In rare cases,
+two shapes of surface can arise that are intractable and lead to warning
+messages.</P>
+
+<P>One problem case is a flat surface (discussed
+<A HREF="troubleshooting.html#Q7">above</A>), or a surface that continues 
+to rise beyond a reasonable value for one or more parameters.  This 
+implies that your data has insufficient power to accurately estimate
+the population parameters.  The following warning message may appear:
+</P>
+
+<P><PRE>
+Warning!  Encountered a region of the log-likelihood surface in which the
+log-likelihood increases steadily, seemingly without an upper bound.
+This implies that your data is difficult to model.  The problematic
+parameter is parameter <your parameter name>; it has been increased
+or decreased to a value of  <some number>, and the maximum lnL,
+if one exists, seems to lie beyond this value.  The maximization routine
+is terminating....
+</PRE></P>
+
+<P>Another type of problem surface is very spiky with multiple
+peaks and valleys.
+This can result when combinations of
+parameter values exceed some machine-specific threshold; for example, their
+product can become too large to store in the allotted amount of memory, or
+their quotient or difference can become too small to be distinguishable
+from zero.  The following warning message
+may appear:</P>
+
+<P><PRE>
+Warning!  Calculated a log-likelihood of <some number> for the
+parameter vector p = (<some numbers...>), and determined that
+a greater log-likelihood could be found in a certain direction, but
+no greater log-likelihood was found in that direction.  This implies
+that your data may be difficult to model, or that there is a problem
+with lamarc.  The maximization routine is terminating....
+</PRE></P>
+
+<P>(Those interested in the math may like to know that the problem is
+detected when the surface's gradient becomes inconsistent with the
+surface's height.)</P>
+
+<P>If you receive either of these warning messages, or a message very
+similar to these, you may be able to ignore it if it only within one or two
+of the earlier Markov chains in your series of chains.  The more reasonable
+the ultimate results are, the safer it is to ignore warnings appearing early
+or infrequently in your run.  If you receive this type of message late or
+frequently in a run, then the ultimate results should be considered
+dubious.</P>
+
+<P>If you receive the "... increases steadily, seemingly without an
+upper bound ..." warning, then you may be able to achieve better
+results by reducing the number of parameters you are estimating,
+or analyzing a subset of your data.  If you receive the "... no greater
+log-likelihood was found in that direction ..." warning, then you can
+try proceeding in the same manner, but troubleshooting is much more
+difficult in this case.  We encourage you to contact us
+and provide us with a copy of your data if you encounter this latter
+warning: doing so would help us as we continue to research ways of
+cleanly coping with these computational challenges.</P>
+
+<LI><A NAME="Q10.0.2"><B> "My profile likelihood tables look ragged rather than smoothly curved--is something wrong?"</B></A></LI>
+
+<P> Occasionally LAMARC, run in likelihood mode, encounters a likelihood
+surface it simply can't maximize reliably, often because it has more
+than one maximum.  One symptom of this is ragged profile tables where
+the values of the parameters jump around from line to line rather
+than increasing or decreasing smoothly.  When you see this, none of
+your estimates, even the MLE, are completely reliable.  Ideas for
+improving the situation include running the program longer (more
+chains, longer chains or both) or reducing the number of parameters
+you are trying to estimate.</P>
+
+<P> The Bayesian mode of LAMARC, which maximizes its parameters one
+at a time rather than jointly, is less prone to this but you may
+see the very similiar symptom of curvefiles with multiple spikes in
+them.  Again, collecting more samples by running LAMARC longer, or
+simplifying the problem so that fewer samples are needed, are your
+best bets.</P> 
+
+<LI><A NAME="Q10.1"><B> "The program stops with a message
+'Unable to create initial tree.  Starting parameter values may be too extreme; try using more conservative ones.'
+--what does this mean?"</B></A></LI>
+
+
+<P>The initial tree for the search (also called the "de novo tree")
+is created based on the starting parameters (either calculated from
+the data or provided by the user).  Attempts to make a de novo
+tree may fail because too many migrations or recombinations are
+put in.  The program will try 100 times to make a de novo tree,
+but if every one of them has too many events it will give up in
+order to avoid an infinite loop.</P>
+
+<P>This error suggests that the current starting values for recombination
+or migration are far too high, given the currently specified upper limits
+on recombination or migration events.  A common cause is breakdown
+of the FST calculation for migration rate.  Check the starting
+values and make sure they are reasonable.  When in doubt, try
+a slightly lower value; the program can adjust it upwards if
+necessary.  Don't use extremely low values for migration (below 0.001) however;
+these can cause the program to become stuck at low values for
+a very long time.</P>
+
+<LI><A NAME="Q10.2"><B> "Which microsatellite model should I use?" </B></A></LI>
+
+<P> Try the Brownian-motion model first, since it is much faster.
+Consider switching to the stepwise-mutation model if you see signs,
+in the runtime reports, of failure of the Brownian approximation.
+These take the form of data log-likelihoods of 0.0.  If many of these
+appear, or any appear in the final chains, switch to the stepwise model.
+You may want to start with the stepwise model if you know that your
+population size(s) are very small, since this is the weak point of
+the Brownian approximation.</P>
+
+<LI><A NAME="Q11"><B> "How can I do a likelihood ratio test using LAMARC?" </B></A></LI>
+
+<P> The short answer is that you can't.  The "likelihoods" produced
+by the program are relative likelihoods, and they are meaningful
+only within one run--there is no way to compare them across runs.
+(They represent the answer to the question "How much better do
+the sampled trees fit the maximum likelihood values than the
+values they were sampled from?")</P>
+
+<P> However, approximate confidence intervals based on the shape of
+the curve are possible.  LAMARC presents these in two ways:  as
+the percentile profiling in the MLE tables, and as full
+profile-likelihood tables (if requested).  These should enable you
+to get a picture of the uncertainty in your analysis.</P>
+
+<LI><A NAME="Q12"><B> "Why can't the program use other data types, other data
+models, or other evolutionary forces?" </B></A></LI>
+
+<P> For version 2.0 we have included almost all of the 
+commonly available mutational models.  We do not have provision for
+RFLP or protein sequence data, because the existing maximum 
+likelihood algorithms for these are agonizingly slow, or for 
+AFLP data, because no one has
+yet developed an AFLP maximum likelihood algorithm.  (If you succeed
+in doing so, and it runs at a reasonable speed, we will be happy
+to add it to LAMARC.)  Most other data
+types can be accomodated with the K-Allele model.</P>
+
+<P> New evolutionary forces are more difficult, but we will
+be slowly increasing the number of forces supported.   Our
+next major project is natural selection.  </P>
+
+<P> If you are a programmer, you may also want to consider adding
+new data types or models yourself.  We have tried to write LAMARC
+in a modular fashion that will accommodate additions fairly well.
+Only time will tell if we've succeeded.  Feel free to write
+and ask questions about possible additions.  </P>
+
+<LI><A NAME="Q13"><B> "What happened to the 'Normalize' option in previous
+versions of LAMARC?"</B></A></LI>
+
+<P> The program now automatically checks to see if normalization is needed,
+and turns it on if so.  Normalization will not be needed for the majority of
+data sets, and since it causes a significant decrease in speed if on (and
+because the option was confusing to many of our users), we made control of
+this option automatic.  If "verbose" runtime reports are selected, LAMARC
+will note when this occurs.  If you feel that normalization is necessary
+for your data, the option remains to turn it on in the <A HREF="xmlinput.html#normalize">XML</A>.</P>
+
+<LI><A NAME="Q14"><B> "Does LAMARC use 'site 0'?  Do I?" </B></A></LI>
+
+<P> To our consternation, we recently discovered that the common biological
+naming convention is to call the site that's to the left of site 1, "site
+-1" instead of site 0.  All versions of LAMARC prior to v2.1 do *not* follow
+this convention, so if you claimed that one of your SNPs was at site -5, and
+another SNP was at site 5, LAMARC would assume those SNPs were 11
+nucleotides apart, and not 10.  This probably doesn't make a huge
+difference, but it's probably worth fixing once you know.</P>
+
+<P>As of version 2.1, the converter program lam_conv examines your data, and
+if you never use a '0' for a 'map position' or a 'first position scanned'
+(aka 'offset'), it assumes that you fall in the majority case, and that all
+your negative sites are one base closer to the positive ones than we
+previously believed.  When it creates a LAMARC input file, it adds one to
+all your negative numbers, so if you tell the converter you have a SNP at
+site -5, and then examine the LAMARC input file, you will see '-4' in the
+list instead.</P>
+
+<P>Because LAMARC usually doesn't report its results in terms of actual
+sites, this change will likely be invisible to you, and the only difference
+will be that LAMARC will now be a bit more precise.</P>
+
+<P>However, if you're using our 2.1-introduced mapping feature, these
+results are reported in terms of the sites where the trait has been mapped. 
+As such, it's more important to know whether there is a 'site 0' or not,
+assuming you have any negative map positions.  Here, we let you have it both
+ways:  in the XML, under the 'format' tag, the converter writes a
+'convert-output-to-eliminate-zero' tag, which is set to 'true' unless (as
+noted) you ever used a '0' for a map position or first-position-scanned. 
+When this is set 'true', LAMARC will assume you are following traditional
+biologist convention, and convert its values to the 'non-zero' scale before
+displaying them.  This means that if it tells you that your trait might be
+mapped to sites "-1:1", it is talking about two sites, and not three.  It
+also means that the final list of sites in the output file will skip
+directly from -1 to 1:</P>
+
+<pre>
+-3         0.00079395
+-2         0.00079395
+-1         0.00078690
+1          0.00078688
+2          0.00078688
+</pre>
+
+<P>So, how can you tell if you yourself are using a system that includes a 0
+or not?  If all you have are positive numbers, it makes no difference, and
+you can safely ignore it.  If you got your numbers from a genome browser or
+the like, it probably does not include a 0.  In fact, you probably only have
+0's in your site lists if a) you made a mistake, b) you made up your own
+system, or c) you are a tireless crusader for the forces of justice, with a
+penchant for attaching yourself to Sisyphean challenges.  If you fall in the
+latter category, we'd love to hear from you, if only to commiserate.  Which
+brings us to...</P>
+
+
+<LI><A NAME="QLAST"><B> "How can I report a bug or inadequacy of the program
+or documentation?"
+</B></A></LI>
+
+<P> The easiest method is email to <A
+HREF="mailto:lamarc at u.washington.edu">lamarc at u.washington.edu</a>. Please
+tell us the exact symptoms of the bug, the operating system you're using,
+and if possible, send a copy of the data file that produces the problem.  We
+also appreciate questions that the documentation doesn't adequately address
+or is unclear or hard to find, as this allows us to improve the
+documentation for the next release.
+</P>
+
+</OL> <P>(<A HREF="upcoming.html">Previous</A> | <A
+HREF="index.html">Contents</A> | <A HREF="messages.html">Next</A>)</P>
+
+<!--
+//$Id: troubleshooting.html,v 1.34 2012/05/14 19:55:38 ewalkup Exp $
+-->
+</BODY>
+</HTML>
diff --git a/doc/html/tutorial.html b/doc/html/tutorial.html
new file mode 100644
index 0000000..5efe92e
--- /dev/null
+++ b/doc/html/tutorial.html
@@ -0,0 +1,603 @@
+<!-- header fragment for html documentation -->
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
+<HTML>
+<HEAD>
+
+<META NAME="description" CONTENT="Estimation of population parameters using genetic data using a maximum likelihood approach with Metropolis-Hastings Monte Carlo Markov chain importance sampling">
+<META NAME="keywords" CONTENT="MCMC, Markov chain, Monte Carlo, Metropolis-Hastings, population, parameters, migration rate, population size, recombination rate, maximum likelihood">
+
+<TITLE>LAMARC Documentation: Tutorial</title>
+</HEAD>
+
+
+<BODY BGCOLOR="#FFFFFF">
+<!-- coalescent, coalescence, Metropolis-Hastings, Markov chain Monte Carlo
+ simulation, migration rate, effective population size, recombination rate,
+ maximum likelihood -->
+
+
+(<A HREF="data_required.html">Previous</A> | <A
+HREF="index.html">Contents</A> | <A HREF="bayes_howto.html">Next</A>)
+
+<H2>LAMARC tutorial</H2>
+<P>This tutorial is designed to be read from beginning to end, but if you
+like you can jump straight to:</P>
+
+<UL>
+<LI><A HREF="tutorial.html#why">Why should I use LAMARC?</A></LI>
+<LI><A HREF="tutorial.html#what">What do I need to use LAMARC?</A></LI>
+<LI><A HREF="tutorial.html#nowwhat">Got my data, now what? </A></LI>
+<LI><A HREF="tutorial.html#converter">The converter made a file for me, what next?</A></LI>
+<LI><A HREF="tutorial.html#bayesian">What about Bayesian.  How does that work?</A></LI>
+<LI><A HREF="tutorial.html#hitrun">So... that's it?  Enter the filename and hit 'run'?</A></LI>
+<LI><A HREF="tutorial.html#counting">It's running, can you explain this counting thing?</A></LI>
+<LI><A HREF="tutorial.html#screen_output">It gave me a lot of information and
+started counting again.  What does all this mean?</A></LI>
+<LI><A HREF="tutorial.html#finalchain"> LAMARC got to something called
+'Final Chain 1' and it's taking a lot longer.  What's happening?</A></LI>
+<LI><A HREF="tutorial.html#profile"> It finished Final Chain 2, and now it's stuck.</A></LI>
+<LI><A HREF="tutorial.html#restart"> It finished profiling, but then started up
+again?</A></LI>
+<LI><A HREF="tutorial.html#output"> What can I tell from this 'outfile' it produced?</A></LI>
+<LI><A HREF="tutorial.html#whyagain"> Remind me why am I looking at all this?</A></LI>
+</UL>
+
+<h3><A NAME="why">Why should I use LAMARC?</h3>
+
+<P>
+Lamarc is a tool for studying populations.  It can estimate:
+<UL>
+  <LI> Effective population size
+  <LI> Migration rates between populations
+  <LI> Changes to population size with time (growth rates)
+  <LI> Recombination rates
+  <LI> Variations of mutation rates across genomic regions (gamma)
+  <LI> Fine-scale mapping of trait data
+</UL>
+It can estimate them all at once or hold some of them fixed while estimating
+the rest.  If this is the sort of information you're after, LAMARC can help.
+</P>
+
+<h3><A NAME="what">What do I need to use LAMARC?</h3> 
+
+<p> First off, you need genetic data.  This might be standard genetic data
+like DNA or SNP sequences, or microsatellite counts, or it could (with
+version 2.0) be observations with genetic underpinnings, like phenotypic
+observations or electrophoretic data.  More data will result in more
+accurate results, but it's important to note that the addition of more
+<i>individuals</i>, after a certain point (10-15 per population), is much less helpful than the
+addition of more independent genetic <i>regions</i>.  The history of a known region in
+an additional individual is probably similar to that of its cohorts.  The
+history of a new unlinked region from the same (or new) set of individuals is
+almost certainly very different from that of the old region, and therefore
+more likely to hold new information. </P>
+
+<P> Secondly, you need a computer.  In particular, you probably need a
+computer with a lot of memory.  A good LAMARC work-out can use hundreds of
+megabytes of RAM, and for particularly ambitious analyses with lots of
+data, gigabytes are probably called for.  For learning purposes, LAMARC can
+run on more limited computers, if you're careful to give it a limited data
+set and not allow it to record too many observations. 
+</P>
+
+<P> Finally, you need some time.  The total amount of time can vary from
+data set to data set, but it's not unusual for a complete, solid run of
+LAMARC to take a week or two.  Larger data sets requiring full error
+analysis (profiling) can take a month or longer.  However, it would be a
+mistake to throw all your data at LAMARC in January, then write to us in
+May asking when you could expect results.  The answer would probably be a
+tragic, "LAMARC ran out of memory, and inexplicably failed to crash."  Much
+better, instead, to test LAMARC on subsets of your data and get preliminary
+results in a day, which will then help you determine how best to analyze your
+full data set.  This tutorial is designed to help you do just that.
+</P>
+
+<P> One caveat:  some enterprising souls, having lots of computers and
+little time, may think, 'Aha!  I can solve my problem of having no time by
+running the parallelized version of LAMARC!'  Which would be a great idea,
+if such a thing existed.  There is a parallel version of <A
+href="http://popgen.sc.fsu.edu/Migrate/Migrate-n.html">
+MIGRATE</a>, so if you only want to estimate effective population sizes and
+migration rates, that option is open to you.  
+If your LAMARC run contains multiple <a href="regions.html">regions</a>
+or does multiple <a href="search.html#advanced">replicates</a>, 
+you can separate them into different files, run with
+<a href="menu.html#summary">summary files</a> turned on
+and recombine them using
+<A HREF="parallel.html">poor man's parallelization</a> .
+But if those options
+don't appeal to you, you're stuck waiting for single runs to finish. 
+Sorry. </P>
+
+<h3> <A NAME="nowwhat">OK, I got my data.  Now what? </h3>
+
+<P> Hang on, let's take this slowly.  If this is your first time using
+LAMARC, I strongly recommend using only a subset of your data, to get a
+feel for how the program works, first. </P>
+
+<P> If you have multiple populations, let's select two of them to start
+with.  If you know something about the migration patterns of the populace
+already, pick two populations you know share migrants.  Then select around
+10 individuals from each population.  If you're only studying one
+population, pick 10-15 individuals.  If you want accurate results, it's
+vitally important that you pick these individuals <font color="#FF0000"><u>randomly</u></font> from your data
+set, and not on the basis of them being the 'most different' or 'most
+interesting'.  You can grossly distort your results should you rely on such
+a scheme.
+</P>
+
+<P> Next, pick one region to analyze.  If you have DNA or SNP data, that
+means one continuous stretch of sequence.  If you have microsat data, that
+probably means just one microsatellite, though if you happen to have linked
+microsat data, it could be those linked microsats. </P>
+
+<P> Armed with the knowledge of which data you want to analyze, it's now
+time to assault the converter.  Lamarc 2.1's file conversion program
+can be used in both a batch mode and as a graphical tool.
+You can 
+<A HREF="converter.html"> read about how to use the converter here.</a>
+</p>
+
+<h3><A NAME="converter"> OK, the converter made a file for me.  What next? </h3>
+
+<P> You are ready to run LAMARC.  The very first thing LAMARC asks you for
+is the name of a file, and the converter-created file is what it wants.  You
+can edit the contents of that file if you wish (it's all XML), but it's
+probably much easier to change things in the menu.  If you like, you can
+even enter the name of your file at the command line ('lamarc
+yourfile.xml'), and if you don't want to do anything in the menu, you can
+run LAMARC in 'batch mode' by adding a '-b' flag ('lamarc yourfile.xml -b').
+</P>
+
+<h3> <A NAME="bayesian">What about Bayesian.  How does that work?</h3>
+
+<P> You can read about the Bayesian capabilities of
+LAMARC in <A HREF="bayes.html">why you might want
+to run Bayesian-LAMARC</A> and <A HREF="bayes_howto.html">how you would do
+so.</A>  The latter picks up from right here.   From here on, this tutorial focuses on the Likelihood-LAMARC path.
+</P>
+
+<h3> <A NAME="hitrun">So... that's it?  Enter the filename and hit 'run'? </h3>
+
+<P> For a first crack at your data, yes.  It'll use a lot of default values
+that are perhaps not the best choice for your data set, but they'll work for
+now.  After you see how it runs, we'll get around to changing the defaults.
+</P>
+
+<h3> OK, hang on, I'm going to go try it. </h3>
+
+<P> I'll be here.
+</P>
+
+<h3> <A NAME="counting">It's running!  Um, can you explain this counting thing? </h3>
+
+<P> After creating a tree, LAMARC is rearranging it to see if it can find
+better ones.  Each time it does so, it increments the counter.  Using the
+defaults, it first counts to zero from -1000 without sampling
+anything (this is the phase known as 'burn-in'), then it starts counting up
+to 10,000, sampling every 20th tree.  Those trees will be used to estimate
+parameters, which you'll see in a moment.
+</p>
+
+<h3> OK, it finished counting and now it's stopped. </h3>
+
+<P> It's busy calculating the max--
+</p>
+
+<A NAME="screen_output"><h3> No, never mind, there we go.  OK, it gave me a lot of information and
+started counting again.  What does all this mean?</h3>
+
+<P> OK, your screen probably looks something like this: </P>
+
+<pre>
+15:39:34  Initial chain   1:  [====================]        10000 steps
+ 
+15:40:09  Predicted end of chains for this region:  Tue Feb  8 16:06:43
+          2005
+ 
+15:40:09  Accepted  7.39% | Posterior lnL 27.0287215 | Data lnL -627.243228
+Trees discarded due to too many events:        2
+Trees discarded due to too small population sizes:        0
+Trees discarded due to an infinitesimal data likelihood:        0
+Trees discarded due to extremely long branch lengths:        0
+Tree-Arranger accepted            284/8268 proposals
+Tree-Size-Arranger accepted       455/1732 proposals
+  Class                  Theta
+  Population 1          0.039795
+  Population 2          0.065072
+   
+  Population                     Mig
+  Population 1            --------  164.6673
+  Population 2            142.1566  --------
+
+15:40:09  Initial chain   2:  [=|                  ]           35
+</pre>
+
+<P>Let's take a look at each piece in turn.  
+<dl>
+
+<P>
+<DT><b>Predicted end of chains</b></dt>
+  <dd> LAMARC's first estimate of when it will be done.  This
+  estimate gets revised at the end of every chain, and hopefully gets more
+  accurate as it goes.
+
+</P><P>
+<DT><b>Accepted  7.39%</b></dt>
+  <dd> The number of proposed changes to the tree that were accepted by
+  LAMARC.  This number is usually in the 10% range--if it's very low, it
+  means that it's latched on to some form of a tree, and is very reluctant
+  to vary from that, which means that it probably is not exploring
+  tree-space very effectively.  Too high, and it's accepting almost
+  everything that comes its way, which probably means it's not
+  proposing radical-enough changes, or else that your data give it
+  no reason to prefer one tree over another (such data are likely mostly noise, so they aren't very informative about population parameters - if you run into this, go review your input data for obvious problems like missing data).
+  
+</P><P>
+<DT><b>Posterior lnL 27.0287215</b></dt>
+  <dd> The posterior log likelihood.  This number (which should always be
+  positive) indicates how much better the maximized parameters are than the
+  initial, driving set of parameters.  In early chains (like this one), it
+  may be quite high, but it will drop down to the single digits in later
+  chains.  Please note that this is a relative likelihood, valid only
+  within this specific run; it cannot be used in log-likelihood tests
+  or other statistical applications.
+
+</P><P>
+<dt><b>Data lnL -627.243228</b></dt>
+  <dd> The data log likelihood.  This number will probably be very
+  negative, should increase for the first few chains, then level off for
+  the last few chains.  It represents the log of the probability of
+  the data given the currently accepted tree.  Since a large data set
+  is highly improbable on <b>any</b> tree, it's not a problem that this
+  number is so low.  However, if it is still rising rapidly by the end
+  of your run, you haven't yet found the best trees and your run is too
+  short.<br><br>
+  
+  One other thing you should note is that if you're comparing this run to a
+  new run with more of your data, the data log likelihood will go
+  <b>down</b>.  This is due to the fact that a tree with more tips will
+  generally fit the data at those tips more poorly than a tree with fewer
+  tips.
+
+</P><P>
+<dt><b>Trees discarded due to...</b></dt>
+  <dd>Sometimes, LAMARC will discard trees because the trees themselves are
+   inherently too tricky to deal with.  Almost always, these trees would
+   also be rejected from having too low a likelihood, so you shouldn't worry
+   about this too much unless one of these numbers gets very high (say,
+   larger than 5% of the total number of proposed trees).  If that happens,
+   your starting parameters might be too extreme, or you might be calling
+   two populations different when they are actually genetically identical
+   (rejections due to 'too many events' can have this cause).  Generally,
+   though, it's just LAMARC being efficient.<br><br>
+   
+   If absolutely no trees are rejected, you'll see the message "No trees
+   discarded due to limit violations." which means you're fine.
+   
+  
+</P><P>
+<dt><b>Arranger accepted</b></dt>
+  <dd>This is a more detailed breakdown of the 'Accepted 7.39%', above. 
+  The two arrangers on by default in LAMARC are the Tree-Arranger, which
+  breaks a branch of a tree and then re-attaches it, and the
+  Tree-Size-Arranger, which preserves the topology of the tree but picks
+  new sizes for some or all of the branches.  The Tree-Arranger is
+  absolutely required, since it's the only one to change tree topologies. 
+  The Tree-Size-Arranger is more of a helper function, which is why (by
+  default) it is only attempted 1/5 as often as the Tree-Arranger.<br><br>
+  
+  These numbers can vary fairly widely, but pay attention if they get
+  too large or too small, just as you would to the overall 'Accepted X%'
+  line, above.
+
+</P><P>
+<dt><b>Theta</b> and <b>Mig</b></dt>
+  <dd>These are the parameters LAMARC is trying to estimate.  You will be
+  estimating one theta value for every population in your data, and two
+  migration rates for every pair of populations in your data.  In this case,
+  with two populations, that means a theta for each (0.039795 for population
+  1 and 0.065072 for population 2), and two migration rates (164.6673 for
+  the rate from pop2 to pop1, and 142.1566 for the rate from pop1 to pop2). 
+  These are LAMARC's best estimates of the parameters, as judged from one
+  'chain'-worth of trees.  They will be used as driving values for the next
+  chain, as will the last tree sampled in the chain.  These reported values
+  for the <b>last</b> chain will be reported as LAMARC's estimates of your
+  parameters.
+
+</P><P>
+<dt><b>Initial Chain 2</b></dt>
+  <dd>LAMARC is starting over, essentially.  Except that this time, it
+  hopefully has a halfway-decent tree to start with, and better driving
+  values than it did the first time.  It'll repeat this process (by default)
+  until it's done it 10 times.
+</P>
+</dl>
+
+<h3><A NAME="finalchain">Great!  While you were explaining that, LAMARC got to something called
+'Final Chain 1' and it's taking a lot longer.  What's happening?</h3>
+
+<P>By default, LAMARC runs 10 'Initial' chains and 2 'Final' chains.  The
+initial chains take 10,000 steps, and the final chains take 200,000 steps. 
+This is LAMARC's attempt to solve a basic problem in likelihood analysis:  
+LAMARC does a great search in the area right around the driving values, but
+not so great a search elsewhere.  By using multiple chains, LAMARC moves to
+driving values as close to the true maximum as possible, so that it can
+find that maximum efficiently.  The first 10 chains are an attempt to find
+the right neighborhood, and the first final chain is an attempt to narrow
+it down even further.  The last final chain is used to actually estimate
+your parameters, and to provide support intervals for those estimates.
+</P>
+
+<h3><A NAME="profile">Yay, it finished Final Chain 2!  And, er, now it's stuck.</h3>
+
+<P>If you can see the message "Beginning profiling, please be patient", it
+means that LAMARC has entered a computationally-intense phase where it
+calculates the support intervals for its point estimates.  We call this
+phase 'Profiling', since it provides profiles of the likelihood surface as
+seen from the perspective of each parameter in turn.  After a while, your
+screen will look something like this:
+</P>
+
+<pre>
+03:44:15  Beginning profiling, please be patient
+04:06:09  Finished profile 1 of 7.  Predicted end of this set of profiles:
+          Sat Apr  9 06:17:33 2005
+ 
+04:36:23  Finished profile 2 of 7.  Predicted end of this set of profiles:
+          Sat Apr  9 06:46:43 2005
+ 
+04:37:26  Finished profile 3 of 7.  Predicted end of this set of profiles:
+          Sat Apr  9 05:48:20 2005
+</pre>
+
+<P> It can take an extremely variable amount of time to finish profiling, but a
+good rule of thumb is 'about half again as long as it's taken so far'. 
+LAMARC provides estimates of when it expects to be done as it finishes each
+profile in turn, predicting that each profile it has to go will take about
+as long as the the average profile so far.  For an exploratory run like
+we've assembled here, a few hours is not unreasonable.
+</P>
+
+<h3><A NAME="restart">[extra credit] Wait a minute--it finished profiling, but then started up
+again?  What's going on?</h3>
+
+<P>If you have more than one genetic region in your data, it first goes
+through the chains for the first region, then calculates the profiles for
+that region, then goes on to the next region.  So what you're seeing is the
+beginnings of LAMARC calculating the data from the second region.  Back
+when we were getting started, I suggested only using data from one region,
+and this is why--each region goes through the same process, which can be
+rather lengthy.  But, no harm done--it just means a longer wait.
+</p>
+
+<A NAME="output"><h3>OK, it's finally done.  So, what can I tell 
+from this 'outfile' it produced?</h3>
+
+<P> Let's go through the outfile a section at a time.  More information on
+outfiles can be found in <A HREF="output.html">this section</a>.
+</P>
+
+<h4>Maximum Likelihood Estimates (MLEs) of Parameters</h4>
+<P> This first section contains the bulk of the most critical information
+about your parameters:  their best estimates and their support intervals.
+ Your outfile might look something like:
+</P>
+
+<pre>
+                       Theta        |    Migration Rate
+Population       Theta1     Theta2  |   M21        M12
+Best Val (MLE)  0.005697   0.002163 | 716.8337   1478.605
+    Percentile                      |
+   99%   0.005  0.003973   0.001279 | 213.8190   551.3366
+   95%   0.025  0.004314   0.001424 | 301.2110   718.4079
+   90%   0.050  0.004503   0.001518 | 355.3201   816.1701
+   75%   0.125  0.004824   0.001706 | 451.1927   987.5201
+   50%   0.250  0.005162   0.001903 | 553.9211   1174.315
+           MLE  0.005697   0.002163 | 716.8337   1478.605
+   50%   0.750  0.006309   0.002432 | 902.0924   1832.927
+   75%   0.875  0.006793   0.002636 | 1047.330   2115.421
+   90%   0.950  0.007349   0.002867 | 1211.883   2438.987
+   95%   0.975  0.007735   0.003025 | 1324.255   2661.947
+   99%   0.995  0.008568   0.003364 | 1561.403   3136.894
+Theta1:  Theta for Pop1
+Theta2:  Theta for Pop2
+M21:  Migration rate into Pop1 from Pop2
+M12:  Migration rate into Pop2 from Pop1
+</pre>
+
+<P> There are four parameters LAMARC is estimating: two Thetas (population
+sizes) and two migration rates.  The estimate can be read across the first
+line of numbers:  the theta for population one is .005697, and is .002163
+for population two.  The migration rate estimate into population one is 
+716.8337, and is 1478.605 for the rate into population two. </P>
+
+<P> The rest of the table is a result of choosing percentile profiling, and
+is what LAMARC was doing after the end of 'Final Chain 2'.  Each line
+represents a percentile, or, values for which the true value of a parameter
+is that percentage likely to be lower than.  So, for example, since the
+.025 percentile for Theta 1 is 0.004314, the true value for Theta 1 is 2.5%
+likely to be lower than that value, and 97.5% likely to be higher than that
+value.  To find the 95% support interval, then, we take the lower 2.5% and
+the upper 2.5% percentiles, and report the spread between them--in this
+case, LAMARC is 95% certain that Theta 1 lies between .004314 and .007735. 
+Similar confidence intervals can be found using this table for the 99%,
+90%, 75%, and 50% confidence intervals, and can give you a better idea of
+LAMARC's picture of the data. </P>
+
+
+<h4>Profile Likelihoods</h4>
+
+<P> In this section, you are given many more details about the profiles
+listed in the 'Maximum Likelihood Estimates' section.  The first section
+will always be "Overall Profile Tables", and since in this example we only
+used one genetic region, that's all we have.  If we had used more than one
+region, we would start with this "Overall Profile Tables" again, which would
+be followed by "Regional Profile Tables", containing the same sort of
+information considering each genetic region separately in turn.
+</P>
+
+<P> The first swath of data will look something like this: </P>
+
+<pre>
+Log Likelihoods:
+
+Percentile  Theta1   |   Ln(L)   
+  0.005    0.003973  | -3.158082 
+  0.025    0.004314  | -1.761258 
+  0.050    0.004503  | -1.193397 
+  0.125    0.004824  | -0.502098 
+  0.250    0.005162  | -0.068077 
+   MLE     0.005697  |  0.159471 
+  0.750    0.006309  | -0.068013 
+  0.875    0.006793  | -0.502098 
+  0.950    0.007349  | -1.193396 
+  0.975    0.007735  | -1.761353 
+  0.995    0.008568  | -3.157959 
+</pre>
+
+<P> The first two columns we have seen before, in the first section.  Now,
+however, we are given the actual values for the posterior log likelihoods
+when that parameter (Theta 1, in this case) is constrained to be held at
+each particular value.  The log likelihoods here are actually what
+determine the percentiles, instead of the other way around.  If you assume
+a gamma distribution of probability, and know that your point of maximum
+likelihood has a log value of 0.159471 (as it does in the above table), then
+the points at which the likelihood has a log value of approximately -1.761 
+will be your .025 and .975 percentiles.
+</P>
+
+<P> The next section gives us information about parameter correlation:
+</P>
+
+<pre>
+Best fit parameters with Theta1 held constant:
+
+Percentile  Theta1   |   Theta2     M21       M12    
+  0.005    0.003973  |  0.002155  722.4041  1475.974 
+  0.025    0.004314  |  0.002157  721.1656  1476.671 
+  0.050    0.004503  |  0.002158  720.5119  1477.008 
+  0.125    0.004824  |  0.002159  719.4664  1477.540 
+  0.250    0.005162  |  0.002161  718.4165  1478.015 
+   MLE     0.005697  |  0.002163  716.8337  1478.605 
+  0.750    0.006309  |  0.002165  715.0486  1479.142 
+  0.875    0.006793  |  0.002166  713.6108  1479.514 
+  0.950    0.007349  |  0.002167  711.9080  1479.921 
+  0.975    0.007735  |  0.002166  710.6590  1480.219 
+  0.995    0.008568  |  0.002164  707.7885  1480.974 
+</pre>
+
+<P> Once again, the first two columns are repeats of what we've seen before.
+But now, we get to see what happens to the rest of the parameters if we
+modify the first one.  The basic idea here is that we hold Theta1 to a
+particular value, then allow our other parameters to vary as we find a new
+point of joint maximum likelihood (and it is this maximum likelihood that we
+report in the above table).  In other words, LAMARC asks the data, "OK, I
+know that the best value for Theta1 is .005697.  But if I was wrong, and the
+best value was at .003973, what would Theta2, M21, and M12 be?"  In this
+case, the answer is "Not very different."  There's a slight correlation
+between Theta1 and all three other parameters, but it's not very strong. 
+Contrast the above table with this, taken from the same example file:
+</P>
+
+<pre>
+Percentile   M21     |   Theta1    Theta2     M12    
+  0.005    213.8190  |  0.005733  0.001852  1545.118 
+  0.025    301.2110  |  0.005727  0.001907  1525.316 
+  0.050    355.3201  |  0.005721  0.001960  1508.983 
+  0.125    451.1927  |  0.005711  0.002050  1487.323 
+  0.250    553.9211  |  0.005704  0.002113  1479.440 
+   MLE     716.8337  |  0.005697  0.002163  1478.605 
+  0.750    902.0924  |  0.005692  0.002191  1480.951 
+  0.875    1047.330  |  0.005689  0.002203  1482.800 
+  0.950    1211.883  |  0.005685  0.002213  1484.520 
+  0.975    1324.255  |  0.005683  0.002218  1485.435 
+  0.995    1561.403  |  0.005678  0.002226  1486.743 
+</pre>
+
+<P> Here, we see how varying the migration rate M21 affects the estimation
+of the other three parameters.  In contrast with varying Theta1, which only
+caused variation in the third or fourth significant digit, we can see here
+that varying M21 causes variation in the second or third significant digit
+of the other parameters.  Again, not huge, but more noticeable.
+
+<h3><A NAME="whyagain">Wait, wait, wait, I'm lost already.  Why am I looking at all this?</h3>
+
+<p> Sorry.  OK, here's the basic questions these sections answer: </p>
+<dl>
+<dt><b>What are the values for my parameters?</b></dt>
+
+  <dd>The values for your parameters are the first row of the 'Maximum
+  Likelihood Estimates' tables; the ones labeled 'MLE'.
+
+<dt><b>What are the confidence intervals for my parameters?</b></dt>
+  <dd>Technically for this kind of analysis they're called 'support
+  intervals', so the standard 95% support intervals can be found by
+  looking for the rows that start with '95%'.  So, in our Theta1 example
+  above, the lower 95% point is at 0.004314, the upper is at 0.007735, and
+  the MLE is at 0.005697.  One way to write this is: </P> 
+
+  <center>0.005697 +.002083/-.001383 </center>
+
+  <P>Since this is clearly too many digits of precision, you might round off
+  to:</P>
+
+  <center>0.0057 +.0021/-.0013</center>
+
+  <P>If you thought this was confusing and were willing to sacrifice
+  precision, you could average this error to:
+  </P>
+  <center>0.0057 +/-.0017</center>
+
+  <P>even though it's, you know, wrong.  Also keep in mind that results
+  from likelihood LAMARC are believed to estimate too-narrow confidence
+  intervals due to its reliance on the driving values.  Runs with
+  replication get away from this problem.</P> </dd>
+
+<dt><b>What else can I say about my estimated parameters?</b></dt>
+
+  <dd>You can comment on parameter correlations by looking for trends in
+  the 'Profile Likelihoods' section.  For example, you could look at M21 as
+  compared to M12, and if a high M21 correlated with a low M12 and vice
+  versa, you could talk about the total amount of migration between those
+  two populations.</dd>
+
+</dl>
+
+<h3>OK, I think I get it.  Let's go to the next sections.</h3>
+
+<P> Gladly!  Actually the next bits don't need a lot of explanation--the
+next section ("User Specified Options") is a summary of your starting
+values, search strategy, and filenames.  The next section ("Data summary")
+is an overview of what your data looked like, what data models you used, and
+an actual copy of the raw data.  And the final section ("Run Reports by
+Region") is a copy of most of what was sent to the screen during the LAMARC
+run (sans the progress bar, of course).
+</p>
+
+<h3>OK, then!  I'm ready to publish!</h3>
+
+<p> Ha ha.</p>
+
+<h3>Er, right.  So, what do I do next?</h3>
+
+<P>Now you have decisions to make, and since these decisions will be based
+on your data, which I don't have with me just now, so you're going to have
+to make the majority of these decisions on your own.  If you're up for
+another question and answer session, though, skip ahead to <A
+HREF="tutorial2.html">Analyzing the Rest of Your Data</a>.  Or if you're keen
+to discover how to do a Bayesian run, <A HREF="bayes_howto.html">read
+on</a>.
+</P>
+
+
+(<A HREF="data_required.html">Previous</A> | <A
+HREF="index.html">Contents</A> | <A HREF="bayes_howto.html">Next</A>)
+
+<!--
+//$Id: tutorial.html,v 1.23 2012/05/14 19:55:38 ewalkup Exp $
+-->
+</BODY>
+</HTML>
+
+
diff --git a/doc/html/tutorial2.html b/doc/html/tutorial2.html
new file mode 100644
index 0000000..0dba3d2
--- /dev/null
+++ b/doc/html/tutorial2.html
@@ -0,0 +1,277 @@
+<!-- header fragment for html documentation -->
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
+<HTML>
+<HEAD>
+
+<META NAME="description" CONTENT="Estimation of population parameters using
+genetic data using a maximum likelihood approach with Metropolis-Hastings
+Monte Carlo Markov chain importance sampling"> 
+<META NAME="keywords" CONTENT="MCMC, Markov chain, Monte Carlo,
+Metropolis-Hastings, population, parameters, migration rate, population
+size, recombination rate, maximum likelihood">
+
+<TITLE>LAMARC Documentation: Tutorial Conclusion</title>
+</HEAD>
+
+
+<BODY BGCOLOR="#FFFFFF">
+<!-- coalescent, coalescence, Metropolis-Hastings, Markov chain Monte Carlo
+ simulation, migration rate, effective population size, recombination rate,
+ maximum likelihood -->
+
+
+(<A HREF="bayes_howto.html">Previous</A> | <A HREF="index.html">Contents</A> | <A
+HREF="parallel.html">Next</A>)
+<H2>LAMARC Tutorial, Conclusion:  Analyzing the Rest of Your Data</H2>
+
+<P>At this point, you should have been able to coax some of your data
+through the whole LAMARC process, from the converter to actual estimates of
+parameters.  If not, you should read through the <A
+HREF="tutorial.html">basic tutorial</a> so you can do so.  But after having
+done the work for a subset of your data, you now need to figure out what to
+do for <b>all</b> your data.  While the majority of the tutorial so far has
+been fairly universal, this is where it must by necessity diverge, since
+everyone's data are different.  But we'll try to keep the advice as generic
+as possible, and sectioned so that you'll know which bits to skip.
+<P>
+<UL>
+<LI><A HREF="tutorial2.html#data">How much data do I need/want</A></LI>
+<LI><A HREF="tutorial2.html#pops">How many populations do I need/want</A></LI>
+<LI><A HREF="tutorial2.html#bvsl">Should I do a Bayesian or likelihood run? </A></LI>
+<LI><A HREF="tutorial2.html#working">How do I tell if LAMARC is working correctly?</a></li>
+<LI><A HREF="tutorial2.html#curves">How do I view curve files? </A></LI>
+<LI><A HREF="tutorial2.html#notrunning">What do I do if LAMARC is failing?</A></LI>
+<LI><A HREF="tutorial2.html#ref">What is the correct LAMARC reference? </A></LI>
+</ul>
+<h3><A NAME="data">How much data do I need and/or want?</h3>
+
+<p> There are several areas you can focus on to expand your data set.  You
+can collect data from more <b>individuals</b>, you can collect data from
+<b>new genomic regions</b>, and you can <b>extend your sequencing runs off
+the end of your current genomic regions</b>.  All have their advantages and
+disadvantages. </P>
+
+<h4>Individuals</h4>
+<P> We recommend that you collect data from about 15 to 20 individuals per
+population.  Too many less than that, and you might end up with idiosyncratic
+results, but as you add more individuals, the increase in power gets smaller
+and smaller, while the increase in the volume of tree-space you need to
+search goes up greater than polynomially.  So, as you add more and more
+individuals, LAMARC must work harder and harder for less and less
+payoff.</P>
+
+<h4>New genomic regions</h4>
+<p> If you're bursting at the seams to collect new data, this is probably
+your best bet.  Each new genetic region for which you collect data will
+contain an essentially identical amount of unique information.  In addition,
+the time it takes to analyze a new genetic region is the same as it takes to
+analyze any other region.  The upshot of this is that as you add regions
+linearly, your estimates are informed by a linear amount of new information,
+and it takes a linear amount more time to process.  Also note that
+data collected for new genomic regions need not be from the same individuals
+for which you collected data for your original genomic regions.
+</p>
+
+<h4>Extending the sequences of current genomic regions</h4>
+<P> In general, the problem with adjacent sequences is that they are very
+likely to have the same genetic heritage as the sequence you already had. 
+So, while you'll increase the resolution of your picture of your current
+set of trees, you won't get a whole new set of trees as you would if you had
+spent the same amount of effort sequencing an entirely new genetic
+region.</P>
+
+<P>The exception to this rule is when you are interested in estimating
+the recombination rate.  In this case, the longer the sequence, the better. 
+The only practical limit is the size of your computer's memory--as more and
+more recombinations get added to the trees, the tree itself takes up more
+and more memory, and your computer might slow down as it has a hard time
+accessing the whole tree at once, and begins to have to store data in less
+accessible places.</P>
+
+<P> Also remember that LAMARC's current recombination model is to have a
+single recombination rate over the entire genome.  If your organism has, in
+truth, recombination 'hot spots', and some of your regions have higher
+recombination rates than others, the reported joint estimate over all
+regions may be suspect.  LAMARC does report the estimated recombination
+rates for each region, which should help confirm or deny your suspicions
+about any such hot spots.  We are working on incorporating a hot spot model
+for recombination into future versions of LAMARC and Recombine.</p>
+
+
+<h3>It looks like I have too many individuals.  How about I just pick
+the interesting ones and use them?</h3>
+
+<P>Remember LAMARC expects randomly selected individuals. If by 'interesting' you mean, 'have unique sequences' <font color="#FF0000"><u>DON'T DO THAT!!!</u></font>. It will hopelessly bias your
+results.  The identical sequences are <b>crucial</b> for accurately
+determining the population size. If you choose to winnow your data, you must pick those to remove randomly (20-sided gamer dice work well, but there are plenty of other sources of random numbers).</P>
+
+<h3><A NAME="pops">How many populations do I need/want</h3>
+
+<P>If you have lots of populations (more than, say, about 5), you will
+probably have so many parameters to estimate that LAMARC cannot estimate
+any individual parameter very well.  If you can constrain your migration
+model or population size estimates so that not as many independent
+parameters need be estimated at once, you can start to relax this
+restriction.  Also, collecting more data from multiple regions can also help
+(keeping in mind that DNA or SNP data is more informative per region than
+microsatellite data).  As a single data point, we have found that 10 
+unlinked DNA regions generally contain enough information to get reasonable 
+migration estimates from a suite of 5 populations, all exchanging migrants.  
+You can constrain individual migration rates to be constant (and zero, if you wish), 
+or to be equal to one another.  One popular constrained migration
+models is the stepping-stone model where only adjacent populations
+exchange migrants.  You can use LAMARC's menu to set these; see the <A
+HREF="menu.html#constraints">'constraints'</a> section of the menu
+documentation for more details. </P>
+
+<h3>Can I have too few populations? </h3>
+
+<P> The possibility for error here is if there are populations in real life
+for which you have no samples, that are significant nodes in the overall
+migration pattern of your species.  One way to combat this problem is to
+assign a 'ghost population'--a population which you include in your
+analysis, but which has no individuals.  In the analysis, then, migrants
+are free to move through this otherwise unknown population.  Peter
+Beerli, who has worked on LAMARC and is currently working on MIGRATE, has
+found this technique to have moderate but limited effectiveness (see
+Beerli, P.  "Effect of unsampled populations on the estimation of
+population sizes and migration rates between sampled populations."  Mol
+Ecol. 2004 Apr;13(4):827-36.)  However, we have not had good success with
+it in LAMARC, and currently it is disallowed.  If you feel strongly that
+you need a 'ghost population' analysis in LAMARC, please let us know;
+we can re-enable the capability, and consider whether it is a good addition
+to the program in general.</P>
+
+<h3> What happens if I get assignments wrong? </h3>
+
+<P> The most problematic of misassignments is when you have claimed that
+two groups of individuals come from distinct populations when in reality,
+they come from a single population.  In these cases, the estimated
+migration rates between the two skyrocket, and the trees become a 
+mass of migrations, making the search very inefficient.  If you see LAMARC
+assign very high migration rates between two populations, you may wish to
+revisit your data and determine if it's possible that what you classified
+as two populations is in reality a single interbreeding population. 
+Re-analyzing your data with this assumption may then help your
+analysis.  The program <A HREF="http://pritch.bsd.uchicago.edu/software.html">STRUCTURE</A>
+can also be helpful in diagnosing
+whether two populations are sufficiently distinct or not.  Remember that
+even if two populations are exchanging no migrants today, if they did
+so in the fairly recent past, they may be genetically homogenized.</p>
+
+<h3> <A NAME="bvsl">Should I do a Bayesian or likelihood run?</h3>
+
+<P> That is a difficult question. It highly depend on
+your particular data set.  Some of our simulated data sets have worked well
+on both types of analysis, some data sets work with one and not the other,
+and some don't seem to work well no matter how you try to analyze them.  
+General Bayesian vs. likelihood issues are detailed in the <A
+HREF="bayes.html">Bayes tutorial</a>. </p>
+
+<h3> <A NAME="working">How do I tell if LAMARC is working correctly?</h3>
+
+<P> Your best test is to track run-to-run variability.  One way to 
+do this is to simply run LAMARC more than once and compare the output, 
+making sure to use a different <a href="menu.html#data">random number 
+seed</a> each time.  Another way is to increase the number of <a 
+href="menu.html#chains">replicates</a> you perform.  This has the 
+advantage of allowing you to 'save' the effort LAMARC spent on the 
+individual replicates so it can use it for its joint estimate over 
+replicates.  (Of course, if you have already determined that you needed 
+replicates, the only way to compare your joint estimate over replicates is 
+to run LAMARC more than once.)</P>
+
+<P> In a likelihood run, you will be able to compare the point estimates of
+your parameters, plus their confidence intervals (if you have profiling
+turned on).  If LAMARC is succeeding, 95% of your point estimates should
+fall within each other's confidence intervals.  If you didn't turn on
+profiling, you can at least check the standard deviation of your point
+estimates, and determine their percentage of the average.</P>
+
+<P> In a Bayesian run, the best way to tell if LAMARC is succeeding is by
+looking at the produced curvefiles.  The simplest way to view the
+curvefiles is in a spreadsheet program like Excel or a plotting package like R.  There's discussion of
+what to look for in individual curvefiles in the <A
+HREF="bayes.html#curvefiles">Bayes tutorial</a>, but in general, multiple
+peaks generally mean that LAMARC may not have been run long enough.</p>
+
+<h3> <A NAME="curves">How do I view these curvefiles, and how would I compare curvefiles from
+different runs?</h3>
+
+<P> We have included a sample spreadsheet (<A
+HREF="comparing_curvefiles.xls">comparing_curvefiles.xls</a> for the
+Microsoft Excel version; <A
+HREF="comparing_curvefiles.sxc">comparing_curvefiles.sxc</a> for the
+OpenOffice version) from a run with simulated data.  The first three sheets
+are the original curvefiles,  imported into the document using the 'tab'
+delimiter (seed1005, seed401, and seed17, each from the random number seed
+used to generate that data).  The fourth sheet ('Lined_up') is that exact
+data, copied and pasted into sequential columns, with inserted cells for
+two of the pairs of columns such that  the data for any given Ln(Theta1)
+are lined up with each other.  Then, in the next sheet ('Combined'), the
+Ln(Theta1) column is filled out with enough values for all three sets of
+data, and the extra Ln(Theta1) columns are deleted.  Finally, the last
+sheet ('Graph') shows a graph that was created by highlighting the first
+four columns, selecting 'add graph', choosing the X-Y Scatter Plot graph
+type, and filling in the axes labels.  As you can see, the resulting graph
+shows that the estimates of Theta 1 are nicely reproducible. </P>
+
+<P>
+You can also generate nice plots of curvefiles using the
+<a href="http://www.r-project.org/">R</a> programming language and environment.
+</P>
+
+<h3> <A NAME="notrunning">What do I do if LAMARC is failing? </h3>
+
+<P> More detail is given in the <A HREF="search.html">Search Strategies for
+LAMARC</a> article, but in brief, your options are:
+</P>
+
+<UL>
+<LI> Increase the number of 'Initial' chains that collect data before the 
+'Final' chain(s).
+<LI> Run LAMARC longer in the 'Final' chain by either increasing the number 
+of samples (collecting more trees) or increasing the sampling interval 
+(allowing the trees to evolve more between successive samples).
+<LI> Tweak the relative amount of time spent on each of the <A
+HREF="menu.html#rearrangers">rearrangers</a>.
+<LI> Turn on Replication.
+<LI> Turn on Heating.
+<LI> Collect more genomic data for your organisms.
+<LI> Switch your analysis mode from likelihood to Bayesian, or vice versa.
+<LI> Do some/all of the above.
+</ul>
+
+<h3> Once LAMARC is succeeding, what do I do? </h3>
+
+<P> If you have the computational resources for it, once you get a good idea
+of how long LAMARC will take to crunch your data, we recommend doing one
+final particularly-long run so you get the best possible estimates.  If
+you've done a likelihood analysis, your estimates will be found at the
+beginning of the output file.  If you've done a Bayesian analysis, your
+point estimates will be found at the beginning of the output file, but
+you'll probably want to look at the produced curvefiles, if only because
+they give you pretty graphs to display.  But in general, you're done!  Go
+publish.  LAMARC's job is complete.  </P>
+
+<h3> <A NAME="ref">What is the correct LAMARC reference? </h3>
+
+<P>The recommended citation is our announcement paper in Bioinformatics for
+version 2.0 of LAMARC:</P>
+
+<P><A
+HREF="http://bioinformatics.oxfordjournals.org/cgi/content/abstract/22/6/768">
+Kuhner, M. K., 2006  <i>"LAMARC 2.0: maximum likelihood and Bayesian estimation  of
+population parameters."</i>  Bioinformatics 22(6): 768-770.</a> </P>
+
+<p>
+(<A HREF="bayes_howto.html">Previous</A> | <A HREF="index.html">Contents</A> | <A
+HREF="parallel.html">Next</A>)
+
+<!--
+//$Id: tutorial2.html,v 1.18 2011/06/23 21:00:36 jmcgill Exp $
+-->
+</BODY>
+</HTML>
+
+
diff --git a/doc/html/upcoming.html b/doc/html/upcoming.html
new file mode 100644
index 0000000..7026876
--- /dev/null
+++ b/doc/html/upcoming.html
@@ -0,0 +1,65 @@
+<!-- header fragment for html documentation -->
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
+<HTML>
+<HEAD>
+
+<META NAME="description" CONTENT="Estimation of population parameters using genetic data usi
+ng a maximum likelihood approach with Metropolis-Hastings Monte Carlo Markov chain importanc
+e sampling">
+<META NAME="keywords" CONTENT="MCMC, Markov chain, Monte Carlo, Metropolis-Hastings, populat
+ion, parameters, migration rate, population size, recombination rate, maximum likelihood">
+
+<P>(<A HREF="changes.html">Previous</A> | <A HREF="index.html">Contents</A>
+| <A HREF="troubleshooting.html">Next</A>)</P>
+
+<TITLE>LAMARC Documentation: Future</title>
+</HEAD>
+
+
+<BODY BGCOLOR="#FFFFFF">
+
+<!-- coalescent, coalescence, Markov chain Monte Carlo simulation, migration
+ rate, effective population size, recombination rate, maximum likelihood -->
+
+<H2>Upcoming Features for LAMARC</H2>
+
+<P>Here are some features we plan to add to LAMARC, both
+in the next release, and in the more distant future.</P>
+
+<UL> 
+
+<LI> <b>Natural selection</b>.  We will estimate selection parameters by
+treating the selected and non-selected alleles as defining "populations"
+with "migration" due to mutation and recombination allowing haplotypes or
+sections of haplotypes to move between the "populations".</LI><br>
+
+<LI> <b>Ascertainment bias</b>.  We will deal with ascertainment bias (for example,
+the preferential choice of diseased individuals in case/control studies)
+by modeling diseased and healthy as "populations".</LI><br>
+
+<LI> <b>Recombination hotspot estimation</b>.  We will estimate the presence of
+recombination hot or cold regions.  (Chul Joo Kang is currently working on
+implementing this in Recombine.)
+</LI><br>
+
+<LI> <b>Alternative models for growth</b>.  Our current model of
+continuously exponential growth is inadequate for certain populations, and
+we plan to add both linear and stair-step models of growth (and shrinkage).
+</LI><br>
+
+<LI> <b>Sequential sampling</b>.  Sometimes it is possible to collect or
+obtain data from multiple generations of your organism (HIV is a good
+candidate for this).  If the mutation rate is known, it is theoretically
+possible to include this data in a LAMARC-type analysis, and we plan to
+allow this.</LI><br>
+
+</UL>
+
+<P>(<A HREF="changes.html">Previous</A> | <A HREF="index.html">Contents</A>
+| <A HREF="troubleshooting.html">Next</A>)</P>
+
+<!--
+//$Id: upcoming.html,v 1.15 2007/01/24 19:11:30 mkkuhner Exp $
+-->
+</BODY>
+</HTML>
diff --git a/doc/html/viral_data.html b/doc/html/viral_data.html
new file mode 100644
index 0000000..88fcb7a
--- /dev/null
+++ b/doc/html/viral_data.html
@@ -0,0 +1,231 @@
+<!-- header fragment for html documentation -->
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
+<HTML>
+<HEAD>
+
+<META NAME="description" CONTENT="Estimation of population parameters using genetic data usi
+ng a maximum likelihood approach with Metropolis-Hastings Monte Carlo Markov chain importanc
+e sampling">
+<META NAME="keywords" CONTENT="MCMC, Markov chain, Monte Carlo, Metropolis-Hastings, populat
+ion, parameters, migration rate, population size, recombination rate, maximum likelihood">
+
+<TITLE>LAMARC Documentation: Using virus data</title>
+</HEAD>
+
+
+<BODY BGCOLOR="#FFFFFF">
+<!-- coalescent, coalescence, Markov chain Monte Carlo simulation, migration rate, effective
+ population size, recombination rate, maximum likelihood -->
+
+
+<P>(<A HREF="genotype.html">Previous</A> | 
+<A HREF="index.html">Contents</A> | <A HREF="panels.html">Next</A>))</P>
+<H2>Using virus data in LAMARC</H2>
+
+<P>Data derived from viruses can be analyzed effectively in LAMARC, but
+experience shows that some unusual issues arise.  This article outlines
+some possible issues, as well as special opportunities offered by
+virus data.</P>
+
+<P> Two other coalescent MCMC programs which are often useful with virus
+data are:</P>
+
+<P><A HREF="http://beast.bio.ed.ac.uk/Main_Page">
+BEAST (Drummond and Rambaut)</A></P>
+
+<P><A HREF="http://genfaculty.rutgers.edu/heylab/software/">
+IM, IMa, IMa2 (Nielsen, Hey, Wakeley and Choi)</A></P>
+
+<P> We recommend BEAST particularly when data from multiple time points
+are available, and IM particularly when divergence of populations from
+a common ancestor is relatively recent.  Neither program can handle
+recombination, however, and IM cannot handle population growth.  If
+those forces are significant in your data, estimates from these
+programs may be biased.</P>
+
+<H3> High growth rates </H3>
+
+<P> Sufficiently rapid growth will make the underlying genealogy of your
+data resemble a starburst, the so-called "star phylogeny."  There is 
+much less information in a star phylogeny than in an ordinary one, because
+all of the coalescences happen at practically the same time, and that
+single time is the only available piece of information.  The practical
+result is that if you try to estimate Theta and growth on a star, you
+must fail; you are trying to infer two parameters from one piece of
+data.  The likelihood surface will resemble an infinite ridge running
+in the direction of high Theta and high growth.  LAMARC will attempt to
+find a maximum on this ridge, and either return an enormous value for
+Theta and growth, or give up and complain. </P>
+
+<P> You can spot a star phylogeny by making an estimate of your
+phylogeny with branch lengths (using PAUP*, PHYLIP, or other tools).
+A star will have many near-zero branches near the bottom.  If your
+data look like this, you will not be able to co-estimate Theta and
+growth.  (Multiple unlinked star-like regions give you some chance of success,
+but a single one is hopeless.)  You can estimate one parameter if you
+are willing to assume the other, however.  If, for example, you feel
+you know your organism's current Theta, you can hold that value
+fixed and successfully estimate growth rate.  Clearly, if you fix
+Theta at an incorrect value you will receive an incorrect growth rate
+in return.</P>
+
+<P> Multiple time point data are more powerful in separating
+Theta and growth than single time point data.  Such data can
+appropriately be analyzed by BEAST.</P>
+
+<H3> Data from multiple time points </H3>
+
+<P> Data from multiple time points far enough apart that measurable
+evolution has happened between the first and last are not handled
+correctly by LAMARC.  We hope to add this capability in the near future.
+In the meantime, BEAST makes good use of such data.</P>
+
+<P>  You can analyze data from any one time point correctly in LAMARC, but
+you cannot consolidate estimates from multiple time points.  They are
+highly non-independent and cannot be treated as multiple regions, nor is
+it a good idea to average them.  Mixing data from different time points
+in a single LAMARC analysis will bias the estimate of Theta upwards,
+possibly severely.  It is probably best to use only data from the time
+point with the largest sample size.</P>
+
+<H3> Unavailability of multiple unlinked genomic regions </H3>
+
+<P> LAMARC is much more powerful, for all parameters except recombination 
+rate, if it is provided with multiple unlinked genomic regions.  (For 
+recombination, a single long region is best.)  Many viruses simply do 
+not have multiple unlinked regions, frustrating the researcher's desire 
+for a precise estimate.  </P>
+
+<P>If the virus has recombination, using long sequences and performing
+an analysis which allows recombination will provide some of the
+advantages of multiple regions, because distant parts of the viral genome
+will have different trees.</P>
+
+<P> In some cases, different patients or geographic area can be
+regarded as replicates of the evolutionary process, and combined as
+if they were independent genomic regions.  For example,
+if you were trying to estimate the growth rate of HIV within a patient,
+but found that a single patient did not provide enough information, you
+could try treating virus sequences from a second patient as if they
+were, not additional copies of the same gene, but copies of a new gene.
+This is done by entering them into the file conversion utility as a
+separate region, and giving them a separate name.  For example, you could
+analyze "env from patient 1" and "env from patient 2" as if they were
+completely separate genes.</P>
+
+<P>The danger of this approach is that if population parameters differ between
+your two patients you will introduce errors by combining them
+together. Some hint of this can be found by comparing the single-region
+estimates.  If patient 1's confidence intervals on any of the parameters
+reject patient 2's values, it is unwise to combine them.  You will also
+want to avoid combining patients with known differences in their likely
+population parameters, such as patients with and without drug treatment.</P>
+
+<P> If multiple time point samples are available, they may partially
+compensate for lack of additional regions.  Such data can currently best be
+analyzed by BEAST.  We hope to add this capability to LAMARC in the
+future.</P>
+
+<H3> Identifying specific recombinant strains </H3>
+
+<P>  LAMARC estimates the recombination rate; it does not identify
+individual sequences as recombinants.  If you are actually interested
+in finding recombinants, you will want a different tool, such as a
+bootscanning program.  Knowing which sequences are probably recombinants
+is not actually helpful to LAMARC, so there is no way to give
+bootscanning results to LAMARC.</P>
+
+<P>  We recommend against the strategy of using  bootscanning or eyeballing
+to identify recombinant sequences, removing them, and then doing a
+no-recombination LAMARC analysis (or BEAST or IM analysis).  Many 
+recombinants are inconspicuous because the two partner sequences are
+closely related.  Any attempt to spot recombinants will therefore miss many
+of them (such as within-subtype recombinations in HIV) and those cryptic
+recombinations will distort the estimates of other parameters.  It is better
+to leave all your data in the analysis and include recombination as a
+parameter. The only exception is if the putative recombinants are believed
+to be PCR artifacts, rather than biological recombinants.  In this case it
+is correct to discard them.</P>
+
+<H3>  Recombination in bursts </H3>
+
+<P> Some viruses seldom co-infect, so opportunities for recombination
+are rare, but when co-infection does occur multiple recombinations
+may immediately result.  LAMARC's estimate of the recombination rate 
+will be some unpredictable composite of the co-infection rate and recombination rate
+in these cases, though it is still
+valid to ask whether the confidence interval for the recombination rate
+includes or excludes zero.</P>
+
+<H3>  Recent divergence </H3>
+
+<P> If you give LAMARC two populations which have recently diverged from
+a common ancestor, LAMARC will tend to estimate high migration between
+them even if there is none.  LAMARC is detecting the shared lineages that 
+came from the common ancestor, and interpreting them as migration.
+The IM program is a better tool if you suspect that divergence is 
+recent.  "Recent" here means divergence within approximately the
+last 2N generations for a haploid, 4N generations for a diploid. </P>
+
+<H3>  Think of "migration" broadly </H3>
+
+<P> Migration rate estimation is often used when organisms are found
+in more than one geographical location.  However, it is more
+generally applicable to any situation in which there is a long-term
+division of the population into two or more niches, with limited
+gene flow between niches.  LAMARC has been successfully used to
+estimate gene flow between different tissue compartments within an
+HIV patient.  It may also be able to estimate gene flow among
+different risk groups (e.g. sexual transmission versus needle-sharing
+transmission).</P>
+
+<P> One thing you must not do, however, is sort your viruses into
+categories <B>based on their genetic sequences</B> and then try to
+infer the population sizes or migration rates of these genetically defined
+subgroups.  For example, you cannot use LAMARC to estimate the effective
+population size of an HIV serotype.  Using genetic data to define the 
+subgroups destroys the evidence you would need to estimate population parameters.
+(For example, presence of a divergent sequence in a population
+is evidence of migration; but if population membership is defined
+by genetic sequence, divergent sequences will never be
+found.)  To the best of our knowledge there is no way to analyze
+genetically defined subgroups in LAMARC, although we hope to add such
+capabilities in the future.</P>
+
+<H3>  Mutational saturation </H3>
+
+<P> Error-prone virus replication can lead to data sets with
+immense amounts of polymorphism.  It is important to choose an
+appropriate mutational model (the ModelTest plug-in to PAUP* can
+help here) but as long as the data are alignable, LAMARC can
+still handle them.  Unlike algorithms based on the assumptions of
+the infinite-sites mutational model, it is not confused by multiple
+hits.  There is an upper limit on mutations beyond which the
+information in the data will be lost, but generally alignment
+becomes impossible long before this limit is reached.</P>
+
+<P> Areas of the sequence where reliable alignment is not possible
+should be replaced with "unknown data" characters.  This may lead to
+a slight downwards bias in Theta (as the most variable areas are
+most likely to be unalignable) but is better than the large
+upwards bias produced by including wrongly aligned sequences.</P>
+
+<H3>  Per-subtype data collection </H3>
+
+<P>  Unfortunately, if you collect your data by identifying
+serological or sequence subtypes, and then sequencing one individual
+per subtype (or any similar strategy) you will not be able to
+analyze it correctly in LAMARC, BEAST or IM.  All three programs
+assume a random population sample, and violating this assumption
+will lead to huge upwards biases in your parameter estimates.
+Similarly, you must not omit identical sequences, boring though
+they may appear.  We know of no way to rescue such data for a
+coalescent analysis.</P>
+
+<P>(<A HREF="genotype.html">Previous</A> | 
+<A HREF="index.html">Contents</A> | <A HREF="panels.html">Next</A>)</P>
+<!--
+//$Id: viral_data.html,v 1.7 2012/05/14 19:55:38 ewalkup Exp $
+-->
+</BODY>
+</HTML>
diff --git a/doc/html/xmlinput.html b/doc/html/xmlinput.html
new file mode 100644
index 0000000..7460cd4
--- /dev/null
+++ b/doc/html/xmlinput.html
@@ -0,0 +1,1166 @@
+<!-- header fragment for html documentation -->
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
+<HTML>
+<HEAD>
+
+<META NAME="description" CONTENT="Estimation of population parameters using genetic data usi
+ng a maximum likelihood approach with Metropolis-Hastings Monte Carlo Markov chain importanc
+e sampling">
+<META NAME="keywords" CONTENT="MCMC, Markov chain, Monte Carlo, Metropolis-Hastings, populat
+ion, parameters, migration rate, population size, recombination rate, growth rate, maximum likelihood">
+
+<TITLE>LAMARC Documentation: XML data description</title>
+</HEAD>
+
+
+<BODY BGCOLOR="#FFFFFF">
+<!-- coalescent, coalescence, Markov chain Monte Carlo simulation, migration rate, effective
+ population size, recombination rate, maximum likelihood -->
+
+<P>(<A HREF="converter_cmd.html">Previous</A> | <A
+HREF="index.html">Contents</A> | <A HREF="menu.html">Next</A>)</P>
+
+<H2>XML Input Format for LAMARC</H2>
+
+<P> You will probably not need to directly look at or edit LAMARC's XML
+input files, because these are generated for you by our file conversion
+programs.  However, in rare cases, you may need or want to write or edit
+these files by hand.  Here is the necessary information for doing so.</P>
+
+<P> We are not currently supporting all the nice abilities that would
+really justify using XML, but we hope to do so in the future.</P>
+
+<P> <B>Please</B> be careful not to save the XML file in Microsoft Word or
+another word processor format.  This will fill it with invisible
+editing characters, and it will not work.  Always use the "save
+as plain text" option of your word processor.  If a previously
+successful input file stops working, check whether you
+have accidentally saved it as formatted text.  This is the #1
+cause of input-file problems in both PHYLIP and LAMARC today.</P>
+
+<P> Also, be aware that some word processors (including at least
+one version of MS Word) will automatically parse XML text, reducing your
+input data file to a disorganized heap of data.  If this happens,
+you can try giving your file an extension of .txt.  If that doesn't
+work, we have no suggestions other than using a different word processor. </P>
+
+<H3> Index</H3>
+
+<UL>
+<LI><A HREF="xmlinput.html#overview">Overview</A></LI>
+<LI><A HREF="xmlinput.html#data">Data Section</A></LI>
+<LI><A HREF="xmlinput.html#datamodel">Data Models</A></LI>
+<LI><A HREF="xmlinput.html#forces">Evolutionary Forces</A></LI>
+<LI><A HREF="xmlinput.html#chains">Chain Control</A></LI>
+<LI><A HREF="xmlinput.html#options">User Options</A></LI>
+<LI><A HREF="xmlinput.html#mapping">Trait Mapping</A></LI>
+</UL>
+
+<H3><A NAME="overview"> Overview:</A></H3>
+<P> XML is a very simple minded language, so you have to be methodical as it is easily confused. Best approach is to start with a functioning script, either the one generated for you by LAMARC or one of examples included in this web site. (If you find one that doesn't work, <A HREF="MAILTO:lamarc at u.washington.edu">
+please tell us</A>. We've tested them, but they're easily broken.) Starting from an appropriate script, incrementally modify it to your needs, testing as you go.</p>
+
+<p>Looking at our examples, you'll note that each level of tag is indented 4 spaces further than the next higher level tag. This not required by the XML compiler (it ignores all those spaces) but is invaluable when trying to debug a malfunctioning script. <u>We highly recommend</u> you follow this convention. It will save you many hours of pain (don't ask how we know that :-).  One thing to particularly avoid is XML that is indented differently from its meaning. This leads to exceptional [...]
+
+<p>If you are unfortunate enough to start with an XML script that is not properly indented, it is worth your time to go through and indent it properly. Not only will you understand it better, the indenting it will make modifications much easier.</p>
+
+<p>Each bit of information in the input file is surrounded by
+a beginning and ending tag.  For example, the whole input
+file is surrounded by <lamarc> and </lamarc>, and the data
+section is surrounded by <data> and </data>.  Tags must
+come in <tag> </tag> pairs and must be strictly nested, so
+these are legal:</P>
+
+<P> <font color="#0000FF"><blue> </font> <font color="#FF0000"><red> </red> </font> <font color="#0000FF"></blue></font></P>
+
+<P> <font color="#FF0000"><red> </red> </font><font color="#0000FF"> <blue> </blue></font></P>
+
+<P> but this is illegal:</P>
+
+<P><font color="#0000FF"> <blue> </font><font color="#FF0000"><red> </font><font color="#0000FF"></blue> </font><font color="#FF0000"></red></font></P>
+
+<P> The symbol "<!--" starts a comment, and the symbol "-->" ends
+it.  Anything inside a comment is ignored.  You can use comment
+symbols to temporarily remove parts of your input file that you
+don't want, as long as what remains is legal.  </P>
+
+<P> Some tags require additional information, such as the name of
+a population.  This is done with "attributes":</P>
+
+<P> <population name="Washington DC"></P>
+
+<P> The quotes are required.</P>
+
+<H3><A NAME="data"> Data section </A></H3>
+
+<P> The data section contains your actual molecular data, and additional
+information used to interpret it.  It is required to be present,
+and is enclosed in <data> tags.</P>
+
+<P> LAMARC divides molecular data into "regions".  A region is all the
+available genetic information that is closely linked on the same 
+chromosome and has a known map.  
+Use a single region for data which is one contiguous stretch, so that
+it would be meaningful to calculate recombination rates along it.
+Use multiple regions for data composed of several
+disconnected bits or bits whose connections are not known.  Regions
+are enclosed in the <region> tag, and at least one must be present.
+The region's name can be added in an optional name attribute.</P>
+
+<P> The optional tag <effective-popsize> may be used to specify
+a different relative effective population size for each <region>.
+For example, data from nuclear chromosomes of a diploid organism reflect
+an effective population size four times larger than data from the
+mitochondrion of the same organism. Data from sex chromosomes also have
+unique effective population sizes--the relative effective population size
+ratios for a non-sex chromosome to an X chromosome to a Y chromosome is
+4:3:1. Be aware that the parameter estimates produced by LAMARC will be
+scaled proportional to an effective population size of 1.  That is, if
+you tell LAMARC that you have two regions, one with an effective population
+size of 4 and one with an effective population size of 3, your final
+overall estimate of Θ will be lowered to correspond to an
+effective population size of 1.  If you are combining mitochondrial
+and autosomal data in diploids into a joint analysis, set your
+relative effective population sizes to 0.25 and 1, respectively,
+if you want the joint Θ estimate to be reported using the
+autosomal scale; otherwise, set these to 1 and 4, respectively,
+to obtain Θ using the mitochondrial scale.</P>
+
+<P> Within a region there may be several "contiguous segments."  Segments
+are stretches of genetic information which are linked to one another
+but need separate handling.  For instance, if we have a stretch of
+DNA (modelled by a nucleotide substitution model) and an adjacent
+microsatellite (modelled by a stepwise model) they need to be in the
+same region as they are linked, but cannot be in the same segment as
+they require different mutational models.  Information about the
+relative position of segments is placed in a <spacing> tag.</P>
+
+<P>  Each segment is indicated by a <block> tag, which can
+also give information about the position of the segment itself,
+and (particularly for non-contiguous markers such as SNPs and
+microsatellites) the positions of the markers within the segment.</P>
+
+<P> Within <block>, the <length> tag indicates the total length 
+of the segment.  This is important for SNPs in particular
+because correct interpretation of the
+markers requires knowledge of how many non-markers were surveyed.
+The <map-position> tag gives the position of this segment on
+an overall map of the region.  The map position is the point at which
+sequencing or scanning began, even if that is not the zero point of
+your segment's internal numbering.  For example, if you sequenced
+a gene from upstream position -45 to downstream position 500,
+your map position should tell where position -45 is on the overall
+map of the region.
+The <locations> tag
+encloses a list of marker positions within the region; for example,
+the tag <locations> -10 7 18 22 </locations> indicates
+that the four markers are at positions -10, 7, 18 and 22 with respect
+to the segment's numbering system.  The <offset> tag
+gives the origin of the segment's numbering system with respect
+to the boundaries of the region.  For example, if you began sequencing
+at position -45 with regard to your chosen numbering system (perhaps
+you have begun your numbering at a gene's start codon but obtained 
+upstream sequence), your <offset> would be -45.  If no offset
+is given, the offset is assumed to be zero.  </P>
+
+<P> Within each region you can list various populations.  If you list
+<population> tags under more than one region, they will be matched 
+by means of their name attributes, so the names are not optional.
+Use of good names here will also make your output much
+easier to interpret!</P>
+
+<a name="panel-xml">
+<P> If a <a href="panels.html">SNP panel</a> was used to generate your
+input data, the panel size will be specified under the <population> tags.
+Panel members are represented as additional tips in the coalescence tree that have unknown data. 
+All that is needed is the number of tips, though optionally one can name the panel. 
+Here is a fragment that shows the panel correction information for one of two populations.
+</p></a>
+<P><PRE>
+<lamarc>
+  ...
+  <data>
+    <region name="Alcohol dehydrogenase">
+      <population name="Seattle">
+        <panel name="adhpanel">
+          <panel-size> 6 </panel-size>
+        </panel>
+        <individual> ... </individual>
+        ...
+      </population>
+      <population>
+        ...
+      </population>
+    </region>
+  </data>
+</PRE>
+</P>
+
+<P> Within each population you can list various individuals.  An
+individual represents all the data for that region that comes from
+a single biological individual, which may be one, two, or more
+sets of data depending on how you obtained your data.  For
+example, an individual might consist of one mtDNA sequence, or
+two nuclear DNA sequences.  Individuals can have a name attribute,
+but it is optional; the <individual> tag itself is required.</P>
+
+<P> Within each individual, you can have one or more <sample> tags
+indicating the actual sequences.  For example, an individual
+with one mtDNA sequence would have a single <sample> tag for it,
+which would contain this sequence.  An individual with
+two nuclear DNA sequences would contain two <sample> tags.
+While the two sequences are treated as separate tips of the
+tree, their identity as a single individual is important if haplotypes
+are to be considered. </P>
+
+<A NAME="phase"><P> Optionally, an individual can have a <phase> tag, indicating
+uncertainty about the phase of certain sites.  For example, if you
+have only genotypic data, you will want to indicate uncertainty
+about the phase of all sites.  The <phase> tag has an obligatory
+attribute, "type," which can be either "known" or "unknown."  If
+the type is "known," the list of sites which follows is the list
+of all sites whose phase is known, and therefore need not be 
+reconsidered during the run.  If the type is "unknown," the list
+which follows is the list of all sites whose phase is unknown,
+and thus should be reconsidered.  A simple way to code a sequence
+of genotypic data with no phase information is <phase type="known"> 
+</phase>.
+It is not necessary to specify homozygous sites as to their
+phase-known or phase-unknown status, as they will not be reconsidered
+anyway.</P>
+
+<P>Valid values for the phase tag are site numbers between the value of the
+offset for that segment (which defaults to 1) and the length of the segment
+plus the offset.  If the segment is longer than the number of markers you
+have (as is the case for SNP data), valid values here are the same values
+used for the 'locations' tag in the 'block' section (above).  (Note that
+versions of LAMARC prior to 2.1 did not use this numbering scheme, and
+instead required you to indicate the particular marker number, starting from
+0, that was phased or unphased.  Old LAMARC infiles will give errors or fail
+to run properly using the 2.1 system.)</P>
+
+<P> The sequences themselves are enclosed in <datablock> tags,
+one per segment per sample.
+Currently we support DNA, RNA, SNP, and microsatellite data.
+Each datablock must have an attribute indicating the type of data 
+it contains.  Use type="DNA" for full DNA or RNA sequences,
+type="SNP" for SNP sequences, and type="Microsat" for microsatellites.
+Please do not mislabel SNPs as full DNA, because the estimates of
+population size will become vastly overblown. </P>
+
+<P> Sequence data must be aligned and of the same length for all
+samples within a region.  "Unknown nucleotide" codes (X, N or -) can
+be used to fill in missing or unknown sequence.  There is no point
+in including individuals for whom the entire sequence is unknown,
+as they add nothing to the analysis (and will slow it down).
+The full IUPAC nucleotide ambiguity code is available, and DNA and
+RNA are both accepted (and treated identically).  Upper- and lowercase
+nucleotide symbols are treated equivalently.  Deletions should be
+coded as unknown, and will be treated as unknown; no attempt is made to
+model the insertion/deletion process.</P>
+
+<P> Here is a minimal DNA data block describing a single region, 
+a single segment, a
+single population, and two individuals with a single haplotype each.
+Note that while the two blocks of data are differently formatted,
+they contain the same number of bases; this is required since all
+blocks corresponding to a single segment must contain the same
+number of markers.  If your
+sequences for a given segment are of different lengths, they must be padded out with
+unknown-nucleotide codes.</P>
+
+<P>
+<PRE>
+<data>
+  <region name="Alcohol dehydrogenase">
+    <population name="Seattle">
+      <individual name="Mary">
+        <sample>
+          <datablock type="DNA">
+            CTTGTAACCTAATGGCTTCCGAGATGGACTAGTGAGCCGCTTTCTC
+            TACACCAACGCAGCACATGACGGTCTTACATGCGGAGCCCGCTCAA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="Jon">
+        <sample>
+          <datablock type="DNA">
+            CTTGTAACCTAATGGCTTCCGA
+            GATGGACTAGTGAGCCGCTTTCTC
+            TACACCAACGCAGCACATGACG
+            GTCTTACATGCGGAGCCCGCTCAA
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+  </region>
+</data>
+</PRE> </P>
+
+<P>Microsatellite data are coded as the number of repeats, with "?"
+standing for unknown data.  Successive microsatellites within the
+same region are separated by blank spaces.  Here is a microsatellite
+data block which also illustrates the use of multiple samples per
+individual.  In this example, "Mary" is a heterozygote for the
+second microsatellite and a homozygote for the other five.</P>
+
+<P><PRE>
+<data>
+  <region name="Alcohol dehydrogenase">
+    <population name="Seattle">
+      <individual name="Mary">
+        <sample>
+          <datablock type="Microsat">
+              7 8 14 7 9 21
+          </datablock>
+        </sample>
+        <sample>
+          <datablock type="Microsat">
+              7 9 14 7 9 21
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="Jon">
+        <sample>
+          <datablock type="Microsat">
+              7 9 14 7 10 23
+          </datablock>
+        </sample>
+        <sample>
+          <datablock type="Microsat">
+              8 9 13 7 ? 23
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+  </region>
+</data>
+</PRE> </P> 
+
+
+<H3><A NAME="datamodel"> Data Model</A> </H3>
+
+<P> A <region> can contain models <model> to be used to interpret its
+data.  One model should be provided per segment within the region,
+in the same order that the segments are listed.
+You can also provide a global <model> (inside the <lamarc>
+tag) which will be used for any region that does not provide
+its own models.  Only models in these two locations will be used by the program;
+if both are present, the regional models will
+be used in preference to the global one.  The model chosen must
+be appropriate for the data type chosen.  If no model
+is provided, then a default model of the appropriate datatype will be used by
+the program--nucleotides use the Felsenstein '84 model, and microsatellites
+use the Brownian motion model.
+</P>
+
+<P> Several tags are common to all models.  </P>
+
+<P>  The <relative-murate> tag is used to specify segment-specific
+mutation rates.  This is essential if very different data types are
+combined in one analysis.  For example, including DNA and microsat
+data without taking account of the fact that microsats mutate
+thousands of times more rapidly than the single-base substitution
+rate will lead to a nonsense overall estimate.  If all of your
+segments have the same expected mutation rate, this tag is not
+needed.  When it is needed, you should chose some data type as
+the standard and set its relative rate to 1, and give all other
+data types in relation to that standard.  The estimated parameters
+will then also be in terms of your standard.</P>
+
+<P> Be aware that the parameter
+estimates produced by LAMARC will be scaled proportional to a
+segment of relative mutation rate 1, even if no such segment is
+included in the data.  That is, if you tell LAMARC that you have 
+two segments, one with a relative mutation rate of 5 and the other 
+with a relative mutation rate of 50, your final estimate of Θ
+will describe a fictional segment with a relative mutation rate
+of 1, and you will need to multiply by 5 or 50 to find the
+Θ of your actual segments.</P>
+
+<P> If you believe that your segments vary in mutation rate
+according to a gamma distribution, you may wish to use the
+gamma-estimation facilities of LAMARC instead of the 
+<relative-murate> tag.  Relative mutation rate is designed
+for the case where different segments or regions fall into
+clearly distinct groups, known in advance, such as DNA versus
+microsats or introns versus exons. </P>
+ 
+<P> The <categories> tag encloses information about variable rate
+categories per site (or microsat).  Within it, <num-categories> gives 
+the number of rate categories, <rates> gives the relative mutation
+rate of each category, and <probabilities> gives the probability
+that a site is in each category.  The <probabilities> must add
+up to 1.0, and there must be a rate and a probability for each
+category.  The <autocorrelation> tag encloses information about the
+average length of a run of sites (or microsats) with the same rate.  For
+example, if you believe your data consists of runs averaging
+100 sites with the same rate, you would set it to 100.0.
+If you wish to assume that there is no autocorrelation of rates,
+set this value to 1.0.  (We expect 1.0 to be an appropriate
+value for microsatellites in most cases.) </P>
+
+<A NAME="normalize"><P> The <normalize> tag </a> controls internal
+normalization of data likelihood values.  Data likelihoods can be extremely
+small, and may be subject to underflow (that is, rounded down to zero,
+resulting in a loss of information).  Normalization attempts to buffer
+them against underflow at the cost of making the program run more slowly.
+Versions 1.2 and above of LAMARC turn on normalization automatically if they encounter 
+underflow, so this tag should almost always be set to "false," and we may
+remove it in a future version.  If you are sure your data set will need
+normalization (extremely polymorphic data, especially microsatellites, or
+extremely large data sets), then there is no harm in turning it on 
+to free the program from the task of diagnosing the need for normalization.</P>
+
+
+<P> The remaining tags are specific to the particular data model.  The
+current selection of models is described below. </P>
+
+<H4> Nucleotide models </H4>
+
+<P> Lamarc offers a choice of two mutational models for DNA, RNA, and
+SNP data.  The simpler (and slightly quicker) model is the model
+of Felsenstein 1984 (F84), which allows differing nucleotide frequencies
+and differing rates of transition versus transversion.  The more
+general model is the General Time-Reversible model (GTR), which allows
+every pair of nucleotides to have a characteristic mutation rate, as
+well as allowing differing nucleotide frequencies.<P>
+
+<P>Simpler models such as Jukes-Cantor or Kimura Two-Parameter 
+can be obtained by correct choice of parameters to the two models
+given.  If a model can be expressed as a simplification of F84, it
+will run faster than the same model expressed as a simplification
+of GTR.<P>
+
+<P> The difference between F84 and GTR is that F84 expresses its
+mutation rate information as a single parameter, the ttratio (ratio
+of transitions to transversions) whereas GTR uses six parameters
+(relative mutation rates between each pair of bases).
+
+<P> Here is a sample F84 data model, which could appear either globally
+or within a region.</P>
+
+<P>
+<PRE>
+<model name="F84">
+  <base-freqs> 0.25 0.25 0.25 0.25 </base-freqs>
+  <per-base-error-rate> 0.001 </per-base-error-rate>
+  <ttratio> 2.0 </ttratio>
+  <categories>
+    <num-categories> 2 </num-categories>
+    <rates> 1.0 10.0 </rates>
+    <probabilities> 0.8 0.2 </probabilities>
+    <autocorrelation> 5.0 </autocorrelation>
+  </categories>
+  <normalize> false </normalize>
+</model>
+</PRE> </P>
+
+<P> To change this to a GTR model, remove the ttratio, change the model 
+name, and add a line for the mutational rates.  We recommend using
+an external tool such as PAUP* with Modeltest to estimate appropriate
+GTR rates.</P>
+
+<P>
+<PRE>
+<model name="GTR">
+  <base-freqs> 0.25 0.25 0.25 0.25 </base-freqs>
+  <per-base-error-rate> 0.001 </per-base-error-rate>
+  <gtr-rates> 5.4050 147.7765 4.2745 3.5801 96.3678 1.0 </gtr-rates>
+  <categories>
+    <num-categories> 2 </num-categories>
+    <rates> 1.0 10.0 </rates>
+    <probabilities> 0.8 0.2 </probabilities>
+    <autocorrelation> 5.0 </autocorrelation>
+  </categories>
+  <normalize> false </normalize>
+</model>
+</PRE></P>
+
+<P> The <base-freqs> tag sets the frequencies of the nucleotides
+A, C, G, and T (or U) in that order.  They must be strictly greater than zero,
+and must add to 1.  Alternatively, instead of four frequencies, you can 
+enter the keyword "calculated," which will cause the program to calculate 
+nucleotide frequencies based on your input data.  This will not work if 
+one or more nucleotides are missing from your input data; you must explicitly 
+set four non-zero frequencies in such cases.  We also recommend that
+you always set the base frequencies explicitly when using the GTR model. </P> 
+
+<P> The <per-base-error-rate> tag gives the rate at which each individual
+nucleotide should be assumed to have been miss-called. A value of 0 indicates
+that all were sequenced correctly. A value of 0.001 indicates one in one
+thousand is incorrect. 
+If not present, the value is assumed to be 0.
+This functionality is in beta test as of December, 2009.
+</P>
+
+<P> The <ttratio> tag (F84 model only) gives the ratio of 
+transitions to transversions.
+The Jukes-Cantor model (no transition bias) would correspond
+to a <ttratio> of 0.5, but due to a limitation in the algorithm
+this (and lower) values are illegal.  If you wish to
+use a Jukes-Cantor model, set <ttratio> to a very slightly
+larger number such as 0.500001.</P>
+
+<P> The <gtr-rates> tag (GTR model only) gives the 
+relative mutation rate of each
+pair of nucleotides, in the order AC, AG, AT, CG, CT, GT.  Only relative
+values are important.  These are rates as output by PAUP*, before
+consideration of nucleotide frequencies. </P>
+
+<H4> Microsatellite models </H4>
+
+<P>For microsatellite data we offer four models: the stepwise model of
+Beerli and Felsenstein (1999), the Brownian model of Beerli and Felsenstein
+(in preparation), a simple K-Allele model similar in concept to the
+Jukes-Cantor DNA model but for arbitrary K>1, and a mixed K-Allele/Stepwise
+model where both stepwise and K-Allele-type mutations are allowed at a
+relative ratio.</P>
+
+<P>The stepwise model assumes that microsatellites evolve
+via stepwise changes and are constrained not to go below one repeat.
+This model currently has no unique user-settable parameters; it deduces
+its required number of bins from the data, and always considers a
+window of 10 steps on either side of the most extreme data elements
+unless this is found to overlap zero.</P>
+
+<P>Here is a sample data model which could appear either globally
+or within a region:</P>
+
+<P><PRE>
+<model name="Stepwise">
+  <categories>
+    <num-categories> 2 </num-categories>
+    <rates> 1.0 10.0 </rates>
+    <probabilities> 0.8 0.2 </probabilities>
+    <autocorrelation> 1.0 </autocorrelation>
+  </categories>
+  <normalize> false </normalize>
+</model>
+</PRE></P>
+
+<P>The Brownian model assumes that the changes in microsatellite
+length can be approximated by a continuous distribution (we use a Normal).
+This model currently has no unique user-settable parameters.  It
+is much faster than the stepwise model, and appears to work well,
+except for genealogies with very short branches (such as those
+associated with very small population sizes) on which it shows a significant
+upward bias.  When using this model, be on the lookout for data
+log-likelihoods of zero in the runtime reports (these are labelled
+"Data lnL").  If many of these
+appear, they are an indication that your population sizes are too
+small for safe use of the Brownian approximation.</P>
+
+<P><PRE>
+<model name="Brownian">
+  <categories>
+    <num-categories> 2 </num-categories>
+    <rates> 1.0 10.0 </rates>
+    <probabilities> 0.8 0.2 </probabilities>
+    <autocorrelation> 1.0 </autocorrelation>
+  </categories>
+  <normalize> false </normalize>
+</model>
+</PRE></P>
+
+<P> The K-Allele model assumes that the observed alleles represent
+all possible alleles and that mutation is equally likely among any
+pair of alleles.  It is probably not appropriate for most microsatellite
+data, and is provided mainly for its ability to handle data types
+otherwise not analyzable with Lamarc, such as elecrophoretic or
+indel data.
+The K-Allele model can also
+be used to assess how severe an effect violation of the Stepwise
+model's assumptions might have on the results, since it is essentially
+the opposite of the Stepwise model.  If both have similar results,
+the results are probably quite insensitive to the details of the
+microsatellite mutational process.</P>
+
+<P> A K-Allele model block looks just like the Brownian one with
+a different name:</P>
+
+<P><PRE>
+<model name="KAllele">
+  <categories>
+    <num-categories> 2 </num-categories>
+    <rates> 1.0 10.0 </rates>
+    <probabilities> 0.8 0.2 </probabilities>
+    <autocorrelation> 1.0 </autocorrelation>
+  </categories>
+  <normalize> false </normalize>
+</model>
+</PRE></P>
+
+
+<P> The Mixed K-Allele/Stepwise considers both Stepwise mutational
+possibilities and K-Allele mutational possibilities.  The
+relative weight of the two types of mutation is given by the
+parameter 'percent_stepwise'.
+The allele range considered for the K-Allele changes is the
+same as that for the Stepwise model (that is, all alleles within
+a certain range of any observed allele) and will therefore
+include some alleles never observed in the data.  
+The
+initial percent_stepwise is set by the user, and if the 'optimize' option
+is set, it is reset at the end of every chain to its optimum
+value based on the final genealogy of that chain,
+as calculated using the bisection approach.</P>
+
+<P>Because this model incorporates both a K-Allele and Stepwise approach,
+it should only be used for data for which both of those models are
+legal, namely microsatellite data.</P>
+
+<P> A Mixed K-Allele/Stepwise model block includes two new tags, 'alpha' 
+(meaning the percent_stepwise parameter),
+and 'optimize', which should be on the same level as 'categories' and
+'normalize'.</P>
+
+<P><PRE>
+<model name="MixedKS">
+  <categories>
+    <num-categories> 2 </num-categories>
+    <rates> 1.0 10.0 </rates>
+    <probabilities> 0.8 0.2 </probabilities>
+    <autocorrelation> 1.0 </autocorrelation>
+  </categories>
+  <normalize> false </normalize>
+  <alpha> 0.3 </alpha>
+  <optimize> false </optimize>
+</model>
+</PRE></P>
+
+
+
+<H3><A Name="forces"> Evolutionary Forces</A></H3>
+
+<P> The <forces> tag encloses information about each evolutionary force
+to be considered in the analysis.  The <forces> section should not
+specify forces that make no sense--for example, migration is not
+allowed if there is only one population, and recombination is not
+allowed if there is only one site.
+</P>
+
+<P> The force tags are <coalescence>, <migration>, <recombination>,
+<growth>, <gamma-over-regions> and the pair <divergence-migration> and <divergence>. In this version of Lamarc.
+<coalescence> is required, the others are optional (though if multiple populations
+are specified, either <migration> or the  <divergence-migration>, <divergence> pair is required). 
+</P>
+
+<P>The gamma-over-regions "force" may only be applied
+to data spread over multiple, unlinked genomic regions.  This "force" assumes the
+relative mutation rates over unlinked genomic regions are gamma-distributed, and
+causes Lamarc to simultaneously infer the shape of the gamma distribution which best fits
+the data.  More information about this can be found <A HREF="gamma.html">here</A>.
+Please note that Lamarc is unable to co-estimate the shape of the gamma distribution
+and population growth rates.  Lamarc will accept input files containing either the
+tag <growth> or the tag <gamma-over-regions>, but not both.
+</P>
+
+<P> Divergence can only be defined if there are two or more populations in the data. If the user wishes to estimate Divergence, they must define the relationships between the populations in the input data by defining parents. This is most easily done in the <a href="converter.html">data file converter</a>, but can be done directly in the XML as shown <A HREF="xmlinput.html#divergence">below</A>.
+</P>
+
+<P>For each force tag, the following information can be included:
+</P>
+
+<P> <start-values> contains a space-delimited list of the starting
+values of the parameters for that force.  For example, in a
+3-population case, you would provide 3 starting values for population
+Thetas, and 9 starting values for immigration rates.  In the migration
+case, diagonal entries (meaningless values for migration from
+a population to itself) can be indicated with dashes instead of
+zeros, if desired.
+</P>
+
+<P> <method> indicates the algorithms for computing starting values for
+each parameter.  This can be "User," meaning that the user-specified
+values should be used.  Other options are "FST" to set migration
+parameters using the <i>F<sub>ST</sub></i> algorithm, and "Watterson" to set Thetas using
+the method of Watterson, We do not currently provide algorithms to
+estimate starting recombination or growth rates, or a starting value
+for the single parameter of the gamma-over-regions "force."  (None of the available
+algorithms seemed to perform well enough to be helpful.)  If you are
+going to specify any methods, you need to specify one for each parameter.
+</P>
+
+<P> It is sometimes helpful to set certain parameters to "User" even if
+you mainly intend to use FST and Watterson.  FST, in particular,
+will fail in certain cases.  We use an arbitrary default value when
+FST fails, but you may be able to provide a better value than this
+default.
+</P>
+
+<P> <max-events> gives a maximum for the number of events in the
+tree that are generated by this force.  For example, for the
+migration force it gives the maximum total number of migrations.
+If a tree violates any of these maxima, that tree will be discarded.
+Discarded trees are noted in the runtime reports and, if verbose
+output is requested, in the output file.  This option is not useful
+(though it is harmless) for coalescence, growth, and gamma-over-regions.</P>
+
+<P> You may wish to set the maxima relatively low if the program is
+running out of space or slowing down tremendously.  However, if the
+maxima are encountered often (look at the runtime report to check),
+the estimate of parameters for that force will tend to be biased
+downward.</P>
+
+<P> <profiles> gives the type of profile likelihood to be computed for
+each parameter.  The options are "percentile," which will compute
+profile likelihoods at selected percentiles of the distribution;
+"fixed," which will compute profile likelihoods at fixed multiples
+of the maximum-likelihood parameter value; and "none," which will
+compute no profiles.  The profiles of a given run may be a mix
+of "percentile," "fixed," and "none," but you cannot
+use both "percentile" and "fixed" percentiles for the same force.</P>
+
+<P> In a likelihood analysis, percentile profiles are very time-consuming. 
+If you are not interested in a particular parameter, consider turning off
+its profiling, and if the overall run is still too slow, consider fixed
+rather than percentile profiles.  If you are only interested in the 95%
+support interval, you may also change the output file <A
+HREF="#verbosity">verbosity</a> to 'concise', which causes only those
+intervals to be calculated and output (decreasing the time spent profiling
+by approximately a factor of 5). In a Bayesian analysis, profiling is simply a matter
+of reading off values from the produced posterior probability curve, so the
+more-informative percentile profiles should be the profile type of choice
+for most users.</P>
+
+<P> The profile line, if present, must have one entry per parameter for
+that force (for example, a three-population case must have nine
+entries for migration profiling).  It does not matter what profile
+type you specify for the "diagonal" migration rates.</P>
+
+<P> An example <forces> block for a case with two populations and migration:</P>
+
+<P>
+<PRE>
+<forces>
+  <coalescence>
+    <start-values> 0.01 0.03 </start-values>
+    <method> Watterson Watterson </method>
+    <profiles> fixed fixed </profiles>
+  </coalescence>
+  <migration>
+    <start-values> - 1.2 1.8 - </start-values>
+    <method> - FST User User - </method>
+    <max-events> 1000 </max-events>
+    <profiles> - none fixed - </profiles>
+  </migration>
+  <recombination>
+    <start-values> 0.04 </start-values>
+    <method> User </method>
+    <max-events> 1000 </max-events>
+    <profiles> none </profiles>
+  </recombination>
+</forces>
+</PRE> </P>
+
+<H4><A Name="divergence">Divergence</A></H4>
+<P> Divergence introduces population divergence into migration, so things get much more complex. The populations are combined pairwise, so if there are three measured populations, there will be two ancestors. Once the first ancestor is defined, migration cannot happen between the two populations subsumed into that ancestor and the third population, as those two populations no longer exist. This is shown graphically on the <a href="divergence.html">Divergence</a> page. As a result you nee [...]
+
+<P>Besides all the standard force tags <divergence> also specifies how the measured populations and their ancestors are related. This is done creating the <population-tree> which explicitly defines, using <epoch-boundary> tags, a pair of  <new-populations> and their <ancestor>. That <ancestor> name can then be used as a <new-populations> member for an earlier  <ancestor>.</P>
+
+<P><divergence-migration> defines the migrations between the populations and ancestors (it has exactly the same format as  <migration> but is, of course, much larger because of the additional rows and columns required to specify migration to and from the ancestor populations). Below is the XML for the 3 population example on the <a href="divergence.html">Divergence</a> page. </P>
+
+<P> Note that in the example both <divergence> and  <divergence-migration> have a <prior> defined. This is because Divergence only functions in Bayesian analysis currently. Likelihood analysis is not available.
+</P> 
+<P>
+<PRE>
+<divergence>
+    <prior type="linear">
+        <paramindex> default </paramindex>
+        <lower> 0.0 </lower>
+        <upper> 0.01 </upper>
+    </prior>
+    <method> USER USER </method>
+    <start-values>  0.002000  0.004000 </start-values>
+    <population-tree>
+        <epoch-boundary>
+            <new-populations> North South </new-populations>
+            <ancestor> Parent_1 </ancestor>
+        </epoch-boundary>
+        <epoch-boundary>
+            <new-populations> East Parent_1 </new-populations>
+            <ancestor> Parent_2 </ancestor>
+        </epoch-boundary>
+    </population-tree>
+</divergence>
+<divergence-migration>
+    <start-values> 0 50.000000 50.000000 0 0 50.000000 0 50.000000 0 0 
+    50.000000 50.000000 0 50.000000 0 0 0 50.000000 0 0 0 0 0 0 0 </start-values>
+    <method> USER USER USER USER USER USER USER USER USER USER USER 
+    USER USER USER USER USER USER USER USER USER USER USER USER USER USER </method>
+    <max-events> 10000 </max-events>
+    <profiles> None None None None None None None None None None None 
+    None None None None None None None None None None None None None None </profiles>
+    <constraints> Invalid Unconstrained Unconstrained Invalid Invalid 
+    Unconstrained Invalid Unconstrained Invalid Invalid Unconstrained Unconstrained 
+    Invalid Unconstrained Invalid Invalid Invalid Unconstrained Invalid Invalid 
+    Invalid Invalid Invalid Invalid Invalid </constraints>
+    <prior type="linear">
+        <paramindex> default </paramindex>
+        <lower> 0.0 </lower>
+        <upper> 100.0 </upper>
+    </prior>
+</divergence-migration>
+</PRE> 
+</P>
+
+<H3><A NAME="chains"> Chain Control </A></H3>
+
+<P> The <chains> tag contains information controlling the search
+strategy, such as number and length of chains, sampling interval,
+heating, and rearrangement strategy.  For details on selecting a
+search strategy, see the file "Search Strategies." </P>
+
+<P> Chains come in two kinds, "initial" and "final;" the program will
+run the requested number of initial chains, and then the requested
+number of final chains.  It is often useful to make the initial
+chains shorter, giving a quick-and-dirty estimate which the
+final chains can refine, but the program does not dictate any
+particular relationship between initial and final.</P>
+
+<P> The <initial> and <final> tags lay out parameters for the two
+chain types.  Within them, <number> is the number of chains of
+that type, <samples> is the number of genealogies that will be
+sampled for parameter estimation, <interval> is the number of
+genealogies generated for each one that is sampled, and <discard>
+is the length of the burn-in period before any genealogies are
+sampled.  For example, if <samples> is 35, <interval> is 10, and
+<discard> is 200, the program will first produce and discard
+200 genealogies, and then produce 350 more, sampling every 10th
+one for a total of 35 sampled genealogies.</P>
+
+<P> If you wish, for some reason, to run only one chain, you can set
+the samples of the other chain type to zero.  For likelihood-based
+parameter estimation we believe that one chain is always too few.
+One chain is a reasonable choice for Bayesian analysis, however.  (If only
+one chain is run, burn-in should probably be quite long.)
+</P>
+
+<P> Note that this convention for indicating how many genealogies to
+sample is the same as the one used in MIGRATE, but different from
+the one used in COALESCE, FLUCTUATE, and RECOMBINE.  The latter
+three programs take the total number of genealogies to be produced,
+not the number to be sampled.  Be careful of this point when comparing
+runs of the different programs.</P>
+
+<P> The <replicates> tag gives the number of replications for each
+chain (one or more).  If more than one replicate is requested,
+a joint parameter estimate over all replicates will be produced.</P>
+
+<P> The <heating> tag controls the heating strategy.  It must contain
+two tags.  <temperatures> gives a list of temperatures for the
+various searches (the number of entries in this tag determines the
+number of searches that will be run).  The lowest temperature should
+always be 1.0.  (We know of no use for a chain colder than that.)
+The <swap-interval> tag gives the number of chain steps that will
+pass between each attempt to swap trees among the different
+temperatures.  1 is a reasonable default; higher values may run a
+little faster, but are less effective.</P>
+
+<P> A third, optional tag <adaptive> can be used to turn on or
+off "adaptive" heating, in which the program will adjust the temperatures
+in use so as to try to optimize acceptance rates.  Adaptive heating is
+off by default.  If you enable it (<adaptive> true </adaptive>), 
+keep a close eye on your searches
+to make sure that they are performing well.  In theory adaptive
+heating should be superior to fixed-temperature heating, but we have
+little experience with it so far.</P>
+
+<P>The <strategy> tag indicates the rearrangement strategy.   It contains
+a relative frequency (a number between 0 and 1) for use of this
+particular strategy; the frequencies of all strategies should add
+up to 1.</P>
+
+<P>In version 2.0, four strategies are possible.  </P>
+
+<P>The <resimulating> strategy
+rearranges the genealogy.  Every run must have a <resimulating>
+strategy, since only this strategy allows free movement through the
+genealogy search space.  Its frequency should always be fairly
+high; we recommend at least 80% in a likelihood run, and at least
+40% in a Bayesian run.</P>
+
+<P>The <haplotyping> strategy reconsiders 
+haplotype assignments.  This is only useful if you have some phase-unknown
+sites in your data, and will be silently disabled, even
+if you specify it, on data with no phase-unknown sites.  A
+reasonable frequency for it, if it is needed, might be around 20%.</P>
+
+<P>The <bayesian> strategy allows a Bayesian analysis to
+search the space of population parameters.  It is only useful in
+a Bayesian run (including a Bayesian arranger will turn the
+run into a Bayesian run).  We have found setting its frequency
+equal to the resimulating arranger's frequency to be satisfactory.
+Please note that a Bayesian run with a 50/50 resimulating/Bayesian
+arranger strategy will have to take twice as many steps to 
+consider as many trees as a pure likelihood run, and probably
+should.  Bayesian rearrangements are relatively quick, so this will
+not slow the program inordinately.</P>
+
+<P> The <trait-arranger> strategy is useful only when trying
+to map a trait to a location on the chromosome using the "jumping"
+algorithm.  It allows the search to reconsider the location of the
+trait, and should probably have a high frequency such as 30-50%.  
+It will be silently turned off if not needed.</P>
+
+<P> Here is a sample search-strategy block.</P>
+
+<P>
+<PRE>
+<chains>
+  <replicates>1</replicates>
+  <heating>
+    <temperatures>1.0</temperatures>
+    <swap-intervals>1 1</swap-intervals>
+  </heating>
+  <strategy>
+    <resimulating> 1.0 </resimulating>
+  </strategy>
+  <initial>
+    <number>5</number>
+    <samples>50</samples>
+    <discard>100</discard>
+    <interval>20</interval>
+  </initial>
+  <final>
+    <number>1</number>
+    <samples>100</samples>
+    <discard>100</discard>
+    <interval>20</interval>
+  </final>
+</chains>
+</PRE> </P>
+
+<H3><A NAME="options"> User options</A> </H3>
+
+<P> The <format> tag encloses various options for formatting and
+detail level of results, as well as the random number seed.</P>
+
+<P>The <convert-output-to-eliminate-zero> tag indicates whether you
+think of your data in terms of it having a 'site 0' or not.  The traditional
+biologist assumption is that 'site 1' is right next to 'site -1', with
+nothing inbetween.  As you might imagine, such a scheme would cause no end
+of trouble computationally, so a LAMARC input file scoots all such negative
+numbers up by one such that 'site -1' becomes 0, 'site -2' become -1, and so
+on.  (The converter lam_conv does not make this assumption, but produces
+LAMARC input files that do.)  If you are using the mapping option, you will
+see numbers for sites in both the menu and in the output file; this option
+can be toggled ('true' or 'false') by hand here so that the data output will
+match your expectations.  See <A HREF="troubleshooting.html#Q14">"Does
+LAMARC use 'site 0'?"</a> in the FAQ.</P>
+
+
+<A NAME="verbosity"></a><P> The <verbosity> tag indicates how lengthy the output report should
+be; options are concise, normal, and verbose.  When in doubt, try
+verbose--you can always discard unneeded parts later.</P>
+
+<P> Similarly, the <progress-reports> tag indicates what kind of feedback
+should be given while the program is running.  Options are none,
+concise, normal and verbose.  For exploratory runs, we strongly
+recommend verbose.</P>
+
+<P> The <seed> tag initializes the random number generator.  The
+number provided should be an integer of the form 4N+1, such
+as 101 or 105.  If the program is run twice with the same
+parameters, data, and seed, it will produce the same results.</P>
+If no <seed> tag is given and no seed is set from the menu,
+the program will try to use the system clock to seed the random
+number generator.  If your system is peculiar enough to have
+no system clock, you'll want to prevent this by always specifying
+a seed.</P>
+
+<P> <results-file> gives the name of the file which will receive the
+output report when the program finishes (destroying any previous
+contents).</P>
+
+<P> <use-in-summary-file> can be set to either "true" (if data reading
+from a summary file is desired) or "false" (if not).  
+<in-summary-file> gives the name of the file from which to read in
+data from a previous run of LAMARC.</P>
+
+<P> <use-out-summary-file> can be set to either "true" (if data
+writing to a summary file is desired) or "false" (if not).  
+<out-summary-file> gives the name of the file which will contain
+the output summary file.</P>
+
+<P> <use-curvefiles> can be set to "true" in a Bayesian run for
+which curvefiles should be saved, or "false" otherwise.  
+<curvefile-prefix> gives a prefix which will form the first part
+of the name of all such curvefiles (the remainder of the name is
+formed from the name of the parameter whose curve is being recorded).</P>
+
+<P> <use-reclocfile> can be set to "true" if you want to dump
+out the locations of all recombination events in sampled trees in
+the last final chain. This is "false" by default since
+the files can be quite large.
+<reclocfile-prefix> gives a prefix which will form the first
+part of the name of all recombination location files.</P>
+
+<P> <use-tracefile> can be set to "true" if Tracer-readable
+summaries of the run are desired, or "false" otherwise.  
+<tracefile-prefix> gives a prefix which will form the first
+part of the name of all Tracer files.</P>
+
+<P> <use-newicktreefile> can be set to "true" if trees from
+this run should be written out in Newick format, or "false"
+otherwise.  <newicktreefile-prefix> gives a prefix which
+will form the first part of the name of all Newick-format tree
+files.</P>
+
+<P> <out-xml-file> gives the name of the file which will receive a
+copy of the input file, as modified by options selected in the menu.</P>
+
+<P> There is no entry for the name of the input data file, since
+that's the file this XML is in.</P>
+
+<P> Here is a sample <format> block: </P>
+
+<P>
+<PRE>
+<format>
+  <verbosity>verbose</verbosity>
+  <progress-reports>normal</progress-reports>
+  <seed>1005</seed>
+  <use-in-summary-file>false</use-in-summary-file>
+  <in-summary-file>insumfile.xml</in-summary-file>
+  <use-out-summary-file>false</use-out-summary-file>
+  <out-summary-file>outsumfile.xml</out-summary-file>
+  <out-xml-file>menusettings_infile.xml</out-xml-file>
+</format>
+</PRE> </P>
+
+<H3><A NAME="mapping"> Trait Mapping </A></H3>
+
+<P> Trait mapping is enabled by presence of a <traits>
+block nested within a <region>.  Within it there
+should be a <trait> block for each trait being mapped.
+Although you are unlikely to need it, if you have multiple traits that map
+to the same genomic region, you may map them simultaneously by giving each a
+'trait' tag.  The trait is assumed
+to be located somewhere in this region; LAMARC will not
+consider the possibility that it is elsewhere.</P>
+
+<P> The trait is given a name with a <name> block,
+and the type of analysis (floating or jumping) is given
+with an <analysis> block whose legal values are
+"float" or "jump".</P>
+
+<P> If information is available about possible locations for
+the trait, they can be indicated in a <possible-locations>
+block containing one or more <range> blocks.  Each
+<range> should contain a <start> and an <end>
+block giving the starting and ending sites of the range,
+numbering the first site of the region as 1.  At least one
+site must be a legal location for the trait (and really, it is
+silly to attempt mapping unless more than one site is legal).</P>
+
+<P> The trait block may also contain an appropriate data model for trait
+allele mutations, which must (at present) be a K-Allele model.  It is
+inappropriate to have multiple categories for a trait allele (since the
+categories involve differences in mutation rate among markers, and there
+will always be exactly one marker for trait alleles), and normalization can
+usually be ignored, but you may want to input a relative mu rate.  For
+example, if you believe your alleles are the result of a dysfunctional gene,
+and you are working with SNP data, you might estimate that there are
+approximately 500 sites in an average gene that lead to that gene's
+disruption, and observed SNPs approximately one every 100 sites, so you
+would tell LAMARC that the relative mutation rate of your trait allele is 5
+times more than that for your SNP data.  (If you had DNA data instead, you
+would put in '500' as the relative mutation rate.)    
+ </P>
+
+<P>As an example, here is XML input for the trait 'funny-nose', which we
+know might be mappable to site 1 or somewhere within sites 35-68, and which
+has a relative mutation rate of 5:
+<PRE>
+<data>
+  <region name="Region1">
+    <traits>
+      <trait>
+        <name> funny-nose </name>
+        <analysis> float </analysis>
+        <possible-locations>
+          <range>
+            <start> 1 </start>
+            <end> 1 </end>
+          </range>
+          <range>
+            <start> 35 </start>
+            <end> 68 </end>
+          </range>
+        </possible-locations>
+        <model name="KAllele">
+          <normalize>false</normalize>
+          <categories>
+            <num-categories>1</num-categories>
+            <rates> 1</rates>
+            <probabilities> 1</probabilities>
+            <autocorrelation>1</autocorrelation>
+          </categories>
+          <relative-murate>5</relative-murate>
+        </model>
+      </trait>
+    </traits>
+</PRE>
+<P> Additionally, within the <individual> tag of each
+individual from whom data was sampled, you will need to include
+a <genotype-resolutions> block specifying the possible
+genotypes which could correspond to that individual's phenotype.
+The first block within the <genotype-resolutions> block
+should be a <trait-name> matching the name given earlier
+for the trait.  This is followed by a collection of 
+<haplotypes> blocks which give each haplotype possible
+for that individual, and the penetrance.  Be careful with
+these penetrances!  The penetrance of a haplotype is the
+chance that, if the individual has this haplotype, they will
+show the phenotype they did in fact show; it is <b>not</b>
+the chance that the individual has this haplotype.  Only
+haplotypes which the individual could have should be listed.
+The penetrance, which should be a number greater than 0 and
+less than or equal to 1, goes in a <penetrance> tag
+and the two alleles, in order, which make up the haplotype
+go in a <alleles> tag.</P>
+
+<P> Here is an example from an individual whose nose is bent.  Based on our
+previous analysis of this trait, we know that 100% of individuals who are
+homozygous for the 'broken' allele have a bent nose, that 50% of individuals
+who are heterozygous and have one 'broken' allele and one 'normal' allele
+have a bent nose, and that no individual who is homozygous for the 'normal'
+allele has a bent nose:
+</P>
+
+<P>
+<PRE>
+  <individual name="Mary">
+    <genotype-resolutions>
+      <trait-name> funny-nose </trait-name>
+         <haplotypes>
+            <penetrance> 1 </penetrance>
+            <alleles> broken broken </alleles>
+         </haplotypes>
+         <haplotypes>
+            <penetrance> 0.5 </penetrance>
+            <alleles> normal broken </alleles>
+         </haplotypes>
+         <haplotypes>
+            <penetrance> 0.5 </penetrance>
+            <alleles> broken normal </alleles>
+         </haplotypes>
+       </genotype-resolutions>
+</PRE></P>
+
+<P>Note that we must include both 'normal broken' as well as 'broken normal'
+for the heterozygote.</P>
+
+<P> Every individual will require such a block giving all
+possible haplotype resolutions of the trait data.</P>
+
+<P>(<A HREF="converter_cmd.html">Previous</A> | <A
+HREF="index.html">Contents</A> | <A HREF="menu.html">Next</A>)</P>
+
+<!--
+$Id: xmlinput.html,v 1.55 2012/05/29 18:59:45 ewalkup Exp $
+-->
+</BODY>
+</HTML>
diff --git a/doc/licenses/boost.txt b/doc/licenses/boost.txt
new file mode 100644
index 0000000..36b7cd9
--- /dev/null
+++ b/doc/licenses/boost.txt
@@ -0,0 +1,23 @@
+Boost Software License - Version 1.0 - August 17th, 2003
+
+Permission is hereby granted, free of charge, to any person or organization
+obtaining a copy of the software and accompanying documentation covered by
+this license (the "Software") to use, reproduce, display, distribute,
+execute, and transmit the Software, and to prepare derivative works of the
+Software, and to permit third-parties to whom the Software is furnished to
+do so, all subject to the following:
+
+The copyright notices in the Software and this entire statement, including
+the above license grant, this restriction and the following disclaimer,
+must be included in all copies of the Software, in whole or in part, and
+all derivative works of the Software, unless such copies or derivative
+works are solely in the form of machine-executable object code generated by
+a source language processor.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT
+SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE
+FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE,
+ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
diff --git a/doc/licenses/gpl.txt b/doc/licenses/gpl.txt
new file mode 100644
index 0000000..b8cf3a1
--- /dev/null
+++ b/doc/licenses/gpl.txt
@@ -0,0 +1,339 @@
+		    GNU GENERAL PUBLIC LICENSE
+		       Version 2, June 1991
+
+ Copyright (C) 1989, 1991 Free Software Foundation, Inc.
+                          675 Mass Ave, Cambridge, MA 02139, USA
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+			    Preamble
+
+  The licenses for most software are designed to take away your
+freedom to share and change it.  By contrast, the GNU General Public
+License is intended to guarantee your freedom to share and change free
+software--to make sure the software is free for all its users.  This
+General Public License applies to most of the Free Software
+Foundation's software and to any other program whose authors commit to
+using it.  (Some other Free Software Foundation software is covered by
+the GNU Library General Public License instead.)  You can apply it to
+your programs, too.
+
+  When we speak of free software, we are referring to freedom, not
+price.  Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+this service if you wish), that you receive source code or can get it
+if you want it, that you can change the software or use pieces of it
+in new free programs; and that you know you can do these things.
+
+  To protect your rights, we need to make restrictions that forbid
+anyone to deny you these rights or to ask you to surrender the rights.
+These restrictions translate to certain responsibilities for you if you
+distribute copies of the software, or if you modify it.
+
+  For example, if you distribute copies of such a program, whether
+gratis or for a fee, you must give the recipients all the rights that
+you have.  You must make sure that they, too, receive or can get the
+source code.  And you must show them these terms so they know their
+rights.
+
+  We protect your rights with two steps: (1) copyright the software, and
+(2) offer you this license which gives you legal permission to copy,
+distribute and/or modify the software.
+
+  Also, for each author's protection and ours, we want to make certain
+that everyone understands that there is no warranty for this free
+software.  If the software is modified by someone else and passed on, we
+want its recipients to know that what they have is not the original, so
+that any problems introduced by others will not reflect on the original
+authors' reputations.
+
+  Finally, any free program is threatened constantly by software
+patents.  We wish to avoid the danger that redistributors of a free
+program will individually obtain patent licenses, in effect making the
+program proprietary.  To prevent this, we have made it clear that any
+patent must be licensed for everyone's free use or not licensed at all.
+
+  The precise terms and conditions for copying, distribution and
+modification follow.
+
+		    GNU GENERAL PUBLIC LICENSE
+   TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+  0. This License applies to any program or other work which contains
+a notice placed by the copyright holder saying it may be distributed
+under the terms of this General Public License.  The "Program", below,
+refers to any such program or work, and a "work based on the Program"
+means either the Program or any derivative work under copyright law:
+that is to say, a work containing the Program or a portion of it,
+either verbatim or with modifications and/or translated into another
+language.  (Hereinafter, translation is included without limitation in
+the term "modification".)  Each licensee is addressed as "you".
+
+Activities other than copying, distribution and modification are not
+covered by this License; they are outside its scope.  The act of
+running the Program is not restricted, and the output from the Program
+is covered only if its contents constitute a work based on the
+Program (independent of having been made by running the Program).
+Whether that is true depends on what the Program does.
+
+  1. You may copy and distribute verbatim copies of the Program's
+source code as you receive it, in any medium, provided that you
+conspicuously and appropriately publish on each copy an appropriate
+copyright notice and disclaimer of warranty; keep intact all the
+notices that refer to this License and to the absence of any warranty;
+and give any other recipients of the Program a copy of this License
+along with the Program.
+
+You may charge a fee for the physical act of transferring a copy, and
+you may at your option offer warranty protection in exchange for a fee.
+
+  2. You may modify your copy or copies of the Program or any portion
+of it, thus forming a work based on the Program, and copy and
+distribute such modifications or work under the terms of Section 1
+above, provided that you also meet all of these conditions:
+
+    a) You must cause the modified files to carry prominent notices
+    stating that you changed the files and the date of any change.
+
+    b) You must cause any work that you distribute or publish, that in
+    whole or in part contains or is derived from the Program or any
+    part thereof, to be licensed as a whole at no charge to all third
+    parties under the terms of this License.
+
+    c) If the modified program normally reads commands interactively
+    when run, you must cause it, when started running for such
+    interactive use in the most ordinary way, to print or display an
+    announcement including an appropriate copyright notice and a
+    notice that there is no warranty (or else, saying that you provide
+    a warranty) and that users may redistribute the program under
+    these conditions, and telling the user how to view a copy of this
+    License.  (Exception: if the Program itself is interactive but
+    does not normally print such an announcement, your work based on
+    the Program is not required to print an announcement.)
+
+These requirements apply to the modified work as a whole.  If
+identifiable sections of that work are not derived from the Program,
+and can be reasonably considered independent and separate works in
+themselves, then this License, and its terms, do not apply to those
+sections when you distribute them as separate works.  But when you
+distribute the same sections as part of a whole which is a work based
+on the Program, the distribution of the whole must be on the terms of
+this License, whose permissions for other licensees extend to the
+entire whole, and thus to each and every part regardless of who wrote it.
+
+Thus, it is not the intent of this section to claim rights or contest
+your rights to work written entirely by you; rather, the intent is to
+exercise the right to control the distribution of derivative or
+collective works based on the Program.
+
+In addition, mere aggregation of another work not based on the Program
+with the Program (or with a work based on the Program) on a volume of
+a storage or distribution medium does not bring the other work under
+the scope of this License.
+
+  3. You may copy and distribute the Program (or a work based on it,
+under Section 2) in object code or executable form under the terms of
+Sections 1 and 2 above provided that you also do one of the following:
+
+    a) Accompany it with the complete corresponding machine-readable
+    source code, which must be distributed under the terms of Sections
+    1 and 2 above on a medium customarily used for software interchange; or,
+
+    b) Accompany it with a written offer, valid for at least three
+    years, to give any third party, for a charge no more than your
+    cost of physically performing source distribution, a complete
+    machine-readable copy of the corresponding source code, to be
+    distributed under the terms of Sections 1 and 2 above on a medium
+    customarily used for software interchange; or,
+
+    c) Accompany it with the information you received as to the offer
+    to distribute corresponding source code.  (This alternative is
+    allowed only for noncommercial distribution and only if you
+    received the program in object code or executable form with such
+    an offer, in accord with Subsection b above.)
+
+The source code for a work means the preferred form of the work for
+making modifications to it.  For an executable work, complete source
+code means all the source code for all modules it contains, plus any
+associated interface definition files, plus the scripts used to
+control compilation and installation of the executable.  However, as a
+special exception, the source code distributed need not include
+anything that is normally distributed (in either source or binary
+form) with the major components (compiler, kernel, and so on) of the
+operating system on which the executable runs, unless that component
+itself accompanies the executable.
+
+If distribution of executable or object code is made by offering
+access to copy from a designated place, then offering equivalent
+access to copy the source code from the same place counts as
+distribution of the source code, even though third parties are not
+compelled to copy the source along with the object code.
+
+  4. You may not copy, modify, sublicense, or distribute the Program
+except as expressly provided under this License.  Any attempt
+otherwise to copy, modify, sublicense or distribute the Program is
+void, and will automatically terminate your rights under this License.
+However, parties who have received copies, or rights, from you under
+this License will not have their licenses terminated so long as such
+parties remain in full compliance.
+
+  5. You are not required to accept this License, since you have not
+signed it.  However, nothing else grants you permission to modify or
+distribute the Program or its derivative works.  These actions are
+prohibited by law if you do not accept this License.  Therefore, by
+modifying or distributing the Program (or any work based on the
+Program), you indicate your acceptance of this License to do so, and
+all its terms and conditions for copying, distributing or modifying
+the Program or works based on it.
+
+  6. Each time you redistribute the Program (or any work based on the
+Program), the recipient automatically receives a license from the
+original licensor to copy, distribute or modify the Program subject to
+these terms and conditions.  You may not impose any further
+restrictions on the recipients' exercise of the rights granted herein.
+You are not responsible for enforcing compliance by third parties to
+this License.
+
+  7. If, as a consequence of a court judgment or allegation of patent
+infringement or for any other reason (not limited to patent issues),
+conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License.  If you cannot
+distribute so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you
+may not distribute the Program at all.  For example, if a patent
+license would not permit royalty-free redistribution of the Program by
+all those who receive copies directly or indirectly through you, then
+the only way you could satisfy both it and this License would be to
+refrain entirely from distribution of the Program.
+
+If any portion of this section is held invalid or unenforceable under
+any particular circumstance, the balance of the section is intended to
+apply and the section as a whole is intended to apply in other
+circumstances.
+
+It is not the purpose of this section to induce you to infringe any
+patents or other property right claims or to contest validity of any
+such claims; this section has the sole purpose of protecting the
+integrity of the free software distribution system, which is
+implemented by public license practices.  Many people have made
+generous contributions to the wide range of software distributed
+through that system in reliance on consistent application of that
+system; it is up to the author/donor to decide if he or she is willing
+to distribute software through any other system and a licensee cannot
+impose that choice.
+
+This section is intended to make thoroughly clear what is believed to
+be a consequence of the rest of this License.
+
+  8. If the distribution and/or use of the Program is restricted in
+certain countries either by patents or by copyrighted interfaces, the
+original copyright holder who places the Program under this License
+may add an explicit geographical distribution limitation excluding
+those countries, so that distribution is permitted only in or among
+countries not thus excluded.  In such case, this License incorporates
+the limitation as if written in the body of this License.
+
+  9. The Free Software Foundation may publish revised and/or new versions
+of the General Public License from time to time.  Such new versions will
+be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+Each version is given a distinguishing version number.  If the Program
+specifies a version number of this License which applies to it and "any
+later version", you have the option of following the terms and conditions
+either of that version or of any later version published by the Free
+Software Foundation.  If the Program does not specify a version number of
+this License, you may choose any version ever published by the Free Software
+Foundation.
+
+  10. If you wish to incorporate parts of the Program into other free
+programs whose distribution conditions are different, write to the author
+to ask for permission.  For software which is copyrighted by the Free
+Software Foundation, write to the Free Software Foundation; we sometimes
+make exceptions for this.  Our decision will be guided by the two goals
+of preserving the free status of all derivatives of our free software and
+of promoting the sharing and reuse of software generally.
+
+			    NO WARRANTY
+
+  11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY
+FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW.  EXCEPT WHEN
+OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES
+PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED
+OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.  THE ENTIRE RISK AS
+TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU.  SHOULD THE
+PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,
+REPAIR OR CORRECTION.
+
+  12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR
+REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
+INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING
+OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED
+TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY
+YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER
+PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGES.
+
+		     END OF TERMS AND CONDITIONS
+
+	Appendix: How to Apply These Terms to Your New Programs
+
+  If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these terms.
+
+  To do so, attach the following notices to the program.  It is safest
+to attach them to the start of each source file to most effectively
+convey the exclusion of warranty; and each file should have at least
+the "copyright" line and a pointer to where the full notice is found.
+
+    <one line to give the program's name and a brief idea of what it does.>
+    Copyright (C) 19yy  <name of author>
+
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of the GNU General Public License as published by
+    the Free Software Foundation; either version 2 of the License, or
+    (at your option) any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
+
+    You should have received a copy of the GNU General Public License
+    along with this program; if not, write to the Free Software
+    Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
+
+Also add information on how to contact you by electronic and paper mail.
+
+If the program is interactive, make it output a short notice like this
+when it starts in an interactive mode:
+
+    Gnomovision version 69, Copyright (C) 19yy name of author
+    Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
+    This is free software, and you are welcome to redistribute it
+    under certain conditions; type `show c' for details.
+
+The hypothetical commands `show w' and `show c' should show the appropriate
+parts of the General Public License.  Of course, the commands you use may
+be called something other than `show w' and `show c'; they could even be
+mouse-clicks or menu items--whatever suits your program.
+
+You should also get your employer (if you work as a programmer) or your
+school, if any, to sign a "copyright disclaimer" for the program, if
+necessary.  Here is a sample; alter the names:
+
+  Yoyodyne, Inc., hereby disclaims all copyright interest in the program
+  `Gnomovision' (which makes passes at compilers) written by James Hacker.
+
+  <signature of Ty Coon>, 1 April 1989
+  Ty Coon, President of Vice
+
+This General Public License does not permit incorporating your program into
+proprietary programs.  If your program is a subroutine library, you may
+consider it more useful to permit linking proprietary applications with the
+library.  If this is what you want to do, use the GNU Library General
+Public License instead of this License.
diff --git a/doc/licenses/lamarc.txt b/doc/licenses/lamarc.txt
new file mode 100644
index 0000000..08d3f13
--- /dev/null
+++ b/doc/licenses/lamarc.txt
@@ -0,0 +1,14 @@
+Copyright 2002-2005 Mary K. Kuhner, Peter Beerli, and Joseph Felsenstein
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
diff --git a/doc/licenses/lgpl.txt b/doc/licenses/lgpl.txt
new file mode 100644
index 0000000..d43cdf0
--- /dev/null
+++ b/doc/licenses/lgpl.txt
@@ -0,0 +1,517 @@
+
+	  GNU LIBRARY GENERAL PUBLIC LICENSE
+	  ==================================
+                Version 2, June 1991
+
+ Copyright (C) 1991 Free Software Foundation, Inc.
+                    675 Mass Ave, Cambridge, MA 02139, USA
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+[This is the first released version of the library GPL.  It is
+ numbered 2 because it goes with version 2 of the ordinary GPL.]
+
+                        Preamble
+
+The licenses for most software are designed to take away your
+freedom to share and change it.  By contrast, the GNU General
+Public Licenses are intended to guarantee your freedom to share
+and change free software--to make sure the software is free for
+all its users.
+
+This license, the Library General Public License, applies to
+some specially designated Free Software Foundation software, and
+to any other libraries whose authors decide to use it.  You can
+use it for your libraries, too.
+
+When we speak of free software, we are referring to freedom, not
+price.  Our General Public Licenses are designed to make sure
+that you have the freedom to distribute copies of free software
+(and charge for this service if you wish), that you receive
+source code or can get it if you want it, that you can change
+the software or use pieces of it in new free programs; and that
+you know you can do these things.
+
+To protect your rights, we need to make restrictions that forbid
+anyone to deny you these rights or to ask you to surrender the
+rights. These restrictions translate to certain responsibilities
+for you if you distribute copies of the library, or if you
+modify it.
+
+For example, if you distribute copies of the library, whether
+gratis or for a fee, you must give the recipients all the rights
+that we gave you.  You must make sure that they, too, receive or
+can get the source code.  If you link a program with the
+library, you must provide complete object files to the
+recipients so that they can relink them with the library, after
+making changes to the library and recompiling it.  And you must
+show them these terms so they know their rights.
+
+Our method of protecting your rights has two steps: (1)
+copyright the library, and (2) offer you this license which
+gives you legal permission to copy, distribute and/or modify the
+library.
+
+Also, for each distributor's protection, we want to make certain
+that everyone understands that there is no warranty for this
+free library.  If the library is modified by someone else and
+passed on, we want its recipients to know that what they have is
+not the original version, so that any problems introduced by
+others will not reflect on the original authors' reputations.
+ 
+Finally, any free program is threatened constantly by software
+patents.  We wish to avoid the danger that companies
+distributing free software will individually obtain patent
+licenses, thus in effect transforming the program into
+proprietary software.  To prevent this, we have made it clear
+that any patent must be licensed for everyone's free use or not
+licensed at all.
+
+Most GNU software, including some libraries, is covered by the
+ordinary GNU General Public License, which was designed for
+utility programs.  This license, the GNU Library General Public
+License, applies to certain designated libraries.  This license
+is quite different from the ordinary one; be sure to read it in
+full, and don't assume that anything in it is the same as in the
+ordinary license.
+
+The reason we have a separate public license for some libraries
+is that they blur the distinction we usually make between
+modifying or adding to a program and simply using it.  Linking a
+program with a library, without changing the library, is in some
+sense simply using the library, and is analogous to running a
+utility program or application program.  However, in a textual
+and legal sense, the linked executable is a combined work, a
+derivative of the original library, and the ordinary General
+Public License treats it as such.
+
+Because of this blurred distinction, using the ordinary General
+Public License for libraries did not effectively promote
+software sharing, because most developers did not use the
+libraries.  We concluded that weaker conditions might promote
+sharing better.
+
+However, unrestricted linking of non-free programs would deprive
+the users of those programs of all benefit from the free status
+of the libraries themselves.  This Library General Public
+License is intended to permit developers of non-free programs to
+use free libraries, while preserving your freedom as a user of
+such programs to change the free libraries that are incorporated
+in them.  (We have not seen how to achieve this as regards
+changes in header files, but we have achieved it as regards
+changes in the actual functions of the Library.)  The hope is
+that this will lead to faster development of free libraries.
+
+The precise terms and conditions for copying, distribution and
+modification follow.  Pay close attention to the difference
+between a "work based on the library" and a "work that uses the
+library".  The former contains code derived from the library,
+while the latter only works together with the library.
+
+Note that it is possible for a library to be covered by the
+ordinary General Public License rather than by this special one.
+
+                GNU LIBRARY GENERAL PUBLIC LICENSE
+ TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+0. This License Agreement applies to any software library which
+contains a notice placed by the copyright holder or other
+authorized party saying it may be distributed under the terms of
+this Library General Public License (also called "this
+License").  Each licensee is addressed as "you".
+
+A "library" means a collection of software functions and/or data
+prepared so as to be conveniently linked with application
+programs (which use some of those functions and data) to form
+executables.
+
+The "Library", below, refers to any such software library or
+work which has been distributed under these terms.  A "work
+based on the Library" means either the Library or any derivative
+work under copyright law: that is to say, a work containing the
+Library or a portion of it, either verbatim or with
+modifications and/or translated straightforwardly into another
+language.  (Hereinafter, translation is included without
+limitation in the term "modification".)
+
+"Source code" for a work means the preferred form of the work
+for making modifications to it.  For a library, complete source
+code means all the source code for all modules it contains, plus
+any associated interface definition files, plus the scripts used
+to control compilation and installation of the library.
+
+Activities other than copying, distribution and modification are
+not covered by this License; they are outside its scope.  The
+act of running a program using the Library is not restricted,
+and output from such a program is covered only if its contents
+constitute a work based on the Library (independent of the use
+of the Library in a tool for writing it).  Whether that is true
+depends on what the Library does and what the program that uses
+the Library does.
+  
+1. You may copy and distribute verbatim copies of the Library's
+complete source code as you receive it, in any medium, provided
+that you conspicuously and appropriately publish on each copy an
+appropriate copyright notice and disclaimer of warranty; keep
+intact all the notices that refer to this License and to the
+absence of any warranty; and distribute a copy of this License
+along with the Library.
+
+You may charge a fee for the physical act of transferring a
+copy, and you may at your option offer warranty protection in
+exchange for a fee.
+ 
+2. You may modify your copy or copies of the Library or any
+portion of it, thus forming a work based on the Library, and
+copy and distribute such modifications or work under the terms
+of Section 1 above, provided that you also meet all of these
+conditions:
+
+    a) The modified work must itself be a software library.
+
+    b) You must cause the files modified to carry prominent notices
+    stating that you changed the files and the date of any change.
+
+    c) You must cause the whole of the work to be licensed at no
+    charge to all third parties under the terms of this License.
+
+    d) If a facility in the modified Library refers to a function or a
+    table of data to be supplied by an application program that uses
+    the facility, other than as an argument passed when the facility
+    is invoked, then you must make a good faith effort to ensure that,
+    in the event an application does not supply such function or
+    table, the facility still operates, and performs whatever part of
+    its purpose remains meaningful.
+
+    (For example, a function in a library to compute square roots has
+    a purpose that is entirely well-defined independent of the
+    application.  Therefore, Subsection 2d requires that any
+    application-supplied function or table used by this function must
+    be optional: if the application does not supply it, the square
+    root function must still compute square roots.)
+
+These requirements apply to the modified work as a whole.  If
+identifiable sections of that work are not derived from the
+Library, and can be reasonably considered independent and
+separate works in themselves, then this License, and its terms,
+do not apply to those sections when you distribute them as
+separate works.  But when you distribute the same sections as
+part of a whole which is a work based on the Library, the
+distribution of the whole must be on the terms of this License,
+whose permissions for other licensees extend to the entire
+whole, and thus to each and every part regardless of who wrote
+it.
+
+Thus, it is not the intent of this section to claim rights or
+contest your rights to work written entirely by you; rather, the
+intent is to exercise the right to control the distribution of
+derivative or collective works based on the Library.
+
+In addition, mere aggregation of another work not based on the
+Library with the Library (or with a work based on the Library)
+on a volume of a storage or distribution medium does not bring
+the other work under the scope of this License.
+
+3. You may opt to apply the terms of the ordinary GNU General
+Public License instead of this License to a given copy of the
+Library.  To do this, you must alter all the notices that refer
+to this License, so that they refer to the ordinary GNU General
+Public License, version 2, instead of to this License.  (If a
+newer version than version 2 of the ordinary GNU General Public
+License has appeared, then you can specify that version instead
+if you wish.)  Do not make any other change in these notices.
+ 
+Once this change is made in a given copy, it is irreversible for
+that copy, so the ordinary GNU General Public License applies to
+all subsequent copies and derivative works made from that copy.
+
+This option is useful when you wish to copy part of the code of
+the Library into a program that is not a library.
+
+4. You may copy and distribute the Library (or a portion or
+derivative of it, under Section 2) in object code or executable
+form under the terms of Sections 1 and 2 above provided that you
+accompany it with the complete corresponding machine-readable
+source code, which must be distributed under the terms of
+Sections 1 and 2 above on a medium customarily used for software
+interchange.
+
+If distribution of object code is made by offering access to
+copy from a designated place, then offering equivalent access to
+copy the source code from the same place satisfies the
+requirement to distribute the source code, even though third
+parties are not compelled to copy the source along with the
+object code.
+
+5. A program that contains no derivative of any portion of the
+Library, but is designed to work with the Library by being
+compiled or linked with it, is called a "work that uses the
+Library".  Such a work, in isolation, is not a derivative work
+of the Library, and therefore falls outside the scope of this
+License.
+
+However, linking a "work that uses the Library" with the Library
+creates an executable that is a derivative of the Library
+(because it contains portions of the Library), rather than a
+"work that uses the library".  The executable is therefore
+covered by this License. Section 6 states terms for distribution
+of such executables.
+
+When a "work that uses the Library" uses material from a header
+file that is part of the Library, the object code for the work
+may be a derivative work of the Library even though the source
+code is not. Whether this is true is especially significant if
+the work can be linked without the Library, or if the work is
+itself a library.  The threshold for this to be true is not
+precisely defined by law.
+
+If such an object file uses only numerical parameters, data
+structure layouts and accessors, and small macros and small
+inline functions (ten lines or less in length), then the use of
+the object file is unrestricted, regardless of whether it is
+legally a derivative work.  (Executables containing this object
+code plus portions of the Library will still fall under Section
+6.)
+
+Otherwise, if the work is a derivative of the Library, you may
+distribute the object code for the work under the terms of
+Section 6. Any executables containing that work also fall under
+Section 6, whether or not they are linked directly with the
+Library itself.
+ 
+6. As an exception to the Sections above, you may also compile
+or link a "work that uses the Library" with the Library to
+produce a work containing portions of the Library, and
+distribute that work under terms of your choice, provided that
+the terms permit modification of the work for the customer's own
+use and reverse engineering for debugging such modifications.
+
+You must give prominent notice with each copy of the work that
+the Library is used in it and that the Library and its use are
+covered by this License.  You must supply a copy of this
+License.  If the work during execution displays copyright
+notices, you must include the copyright notice for the Library
+among them, as well as a reference directing the user to the
+copy of this License.  Also, you must do one of these things:
+
+    a) Accompany the work with the complete corresponding
+    machine-readable source code for the Library including whatever
+    changes were used in the work (which must be distributed under
+    Sections 1 and 2 above); and, if the work is an executable linked
+    with the Library, with the complete machine-readable "work that
+    uses the Library", as object code and/or source code, so that the
+    user can modify the Library and then relink to produce a modified
+    executable containing the modified Library.  (It is understood
+    that the user who changes the contents of definitions files in the
+    Library will not necessarily be able to recompile the application
+    to use the modified definitions.)
+
+    b) Accompany the work with a written offer, valid for at
+    least three years, to give the same user the materials
+    specified in Subsection 6a, above, for a charge no more
+    than the cost of performing this distribution.
+
+    c) If distribution of the work is made by offering access to copy
+    from a designated place, offer equivalent access to copy the above
+    specified materials from the same place.
+
+    d) Verify that the user has already received a copy of these
+    materials or that you have already sent this user a copy.
+
+For an executable, the required form of the "work that uses the
+Library" must include any data and utility programs needed for
+reproducing the executable from it.  However, as a special
+exception, the source code distributed need not include anything
+that is normally distributed (in either source or binary form)
+with the major components (compiler, kernel, and so on) of the
+operating system on which the executable runs, unless that
+component itself accompanies the executable.
+
+It may happen that this requirement contradicts the license
+restrictions of other proprietary libraries that do not normally
+accompany the operating system.  Such a contradiction means you
+cannot use both them and the Library together in an executable
+that you distribute.
+ 
+7. You may place library facilities that are a work based on the
+Library side-by-side in a single library together with other
+library facilities not covered by this License, and distribute
+such a combined library, provided that the separate distribution
+of the work based on the Library and of the other library
+facilities is otherwise permitted, and provided that you do
+these two things:
+
+    a) Accompany the combined library with a copy of the same work
+    based on the Library, uncombined with any other library
+    facilities.  This must be distributed under the terms of the
+    Sections above.
+
+    b) Give prominent notice with the combined library of the fact
+    that part of it is a work based on the Library, and explaining
+    where to find the accompanying uncombined form of the same work.
+
+8. You may not copy, modify, sublicense, link with, or
+distribute the Library except as expressly provided under this
+License.  Any attempt otherwise to copy, modify, sublicense,
+link with, or distribute the Library is void, and will
+automatically terminate your rights under this License.
+However, parties who have received copies, or rights, from you
+under this License will not have their licenses terminated so
+long as such parties remain in full compliance.
+
+9. You are not required to accept this License, since you have
+not signed it.  However, nothing else grants you permission to
+modify or distribute the Library or its derivative works.  These
+actions are prohibited by law if you do not accept this
+License.  Therefore, by modifying or distributing the Library
+(or any work based on the Library), you indicate your acceptance
+of this License to do so, and all its terms and conditions for
+copying, distributing or modifying the Library or works based on
+it.
+
+10. Each time you redistribute the Library (or any work based on
+the Library), the recipient automatically receives a license
+from the original licensor to copy, distribute, link with or
+modify the Library subject to these terms and conditions.  You
+may not impose any further restrictions on the recipients'
+exercise of the rights granted herein. You are not responsible
+for enforcing compliance by third parties to this License.
+ 
+11. If, as a consequence of a court judgment or allegation of
+patent infringement or for any other reason (not limited to
+patent issues), conditions are imposed on you (whether by court
+order, agreement or otherwise) that contradict the conditions of
+this License, they do not excuse you from the conditions of this
+License.  If you cannot distribute so as to satisfy
+simultaneously your obligations under this License and any other
+pertinent obligations, then as a consequence you may not
+distribute the Library at all.  For example, if a patent license
+would not permit royalty-free redistribution of the Library by
+all those who receive copies directly or indirectly through you,
+then the only way you could satisfy both it and this License
+would be to refrain entirely from distribution of the Library.
+
+If any portion of this section is held invalid or unenforceable
+under any particular circumstance, the balance of the section is
+intended to apply, and the section as a whole is intended to
+apply in other circumstances.
+
+It is not the purpose of this section to induce you to infringe
+any patents or other property right claims or to contest
+validity of any such claims; this section has the sole purpose
+of protecting the integrity of the free software distribution
+system which is implemented by public license practices.  Many
+people have made generous contributions to the wide range of
+software distributed through that system in reliance on
+consistent application of that system; it is up to the
+author/donor to decide if he or she is willing to distribute
+software through any other system and a licensee cannot impose
+that choice.
+
+This section is intended to make thoroughly clear what is
+believed to be a consequence of the rest of this License.
+
+12. If the distribution and/or use of the Library is restricted
+in certain countries either by patents or by copyrighted
+interfaces, the original copyright holder who places the Library
+under this License may add an explicit geographical distribution
+limitation excluding those countries, so that distribution is
+permitted only in or among countries not thus excluded.  In such
+case, this License incorporates the limitation as if written in
+the body of this License.
+
+13. The Free Software Foundation may publish revised and/or new
+versions of the Library General Public License from time to
+time. Such new versions will be similar in spirit to the present
+version, but may differ in detail to address new problems or
+concerns.
+
+Each version is given a distinguishing version number.  If the
+Library specifies a version number of this License which applies
+to it and "any later version", you have the option of following
+the terms and conditions either of that version or of any later
+version published by the Free Software Foundation.  If the
+Library does not specify a license version number, you may
+choose any version ever published by the Free Software
+Foundation.
+
+14. If you wish to incorporate parts of the Library into other
+free programs whose distribution conditions are incompatible
+with these, write to the author to ask for permission.  For
+software which is copyrighted by the Free Software Foundation,
+write to the Free Software Foundation; we sometimes make
+exceptions for this.  Our decision will be guided by the two
+goals of preserving the free status of all derivatives of our
+free software and of promoting the sharing and reuse of software
+generally.
+
+                           NO WARRANTY
+
+  15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO
+WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW.
+EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR
+OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY KIND,
+EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+PURPOSE.  THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE
+LIBRARY IS WITH YOU.  SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME
+THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+  16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN
+WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY
+AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU
+FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL
+DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE
+LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING
+RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A
+FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF
+SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
+
+                    END OF TERMS AND CONDITIONS
+
+ Appendix: How to Apply These Terms to Your New Libraries
+
+If you develop a new library, and you want it to be of the
+greatest possible use to the public, we recommend making it free
+software that everyone can redistribute and change.  You can do
+so by permitting redistribution under these terms (or,
+alternatively, under the terms of the ordinary General Public
+License).
+
+To apply these terms, attach the following notices to the
+library.  It is safest to attach them to the start of each
+source file to most effectively convey the exclusion of
+warranty; and each file should have at least the "copyright"
+line and a pointer to where the full notice is found.
+
+    <one line to give the library's name and a brief idea of what it does.>
+    Copyright (C) <year>  <name of author>
+
+    This library is free software; you can redistribute it and/or
+    modify it under the terms of the GNU Library General Public
+    License as published by the Free Software Foundation; either
+    version 2 of the License, or (at your option) any later version.
+
+    This library is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+    Library General Public License for more details.
+
+    You should have received a copy of the GNU Library General Public
+    License along with this library; if not, write to the Free
+    Software Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
+
+Also add information on how to contact you by electronic and paper mail.
+
+You should also get your employer (if you work as a programmer) or your
+school, if any, to sign a "copyright disclaimer" for the library, if
+necessary.  Here is a sample; alter the names:
+
+  Yoyodyne, Inc., hereby disclaims all copyright interest in the
+  library `Frob' (a library for tweaking knobs) written by James Random Hacker.
+
+  <signature of Ty Coon>, 1 April 1990
+  Ty Coon, President of Vice
+
+That's all there is to it!
+
diff --git a/doc/licenses/mingw.txt b/doc/licenses/mingw.txt
new file mode 100644
index 0000000..96b8c80
--- /dev/null
+++ b/doc/licenses/mingw.txt
@@ -0,0 +1,23 @@
+MinGW - Licensing Terms
+
+Various pieces distributed with MinGW come with its own copyright 
+and license:
+
+Basic MinGW runtime
+    MinGW base runtime package is uncopyrighted and placed in the 
+    public domain. This basically means that you can do what you 
+    want with the code.
+
+w32api
+    You are free to use, modify and copy this package. No restrictions 
+    are imposed on programs or object files compiled with this library. 
+    You may not restrict the the usage of this library. You may distribute
+    this library as part of another package or as a modified package if 
+    and only if you do not restrict the usage of the portions consisting 
+    of this (optionally modified) library. If distributed as a modified 
+    package then this file must be included.
+
+    This library is distributed in the hope that it will be useful, but 
+    WITHOUT ANY WARRANTY; without even the implied warranty of 
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+
diff --git a/doc/licenses/wx.txt b/doc/licenses/wx.txt
new file mode 100644
index 0000000..c91deed
--- /dev/null
+++ b/doc/licenses/wx.txt
@@ -0,0 +1,53 @@
+                wxWindows Library Licence, Version 3
+                ====================================
+
+  Copyright (c) 1998 Julian Smart, Robert Roebling et al
+
+  Everyone is permitted to copy and distribute verbatim copies
+  of this licence document, but changing it is not allowed.
+
+                       WXWINDOWS LIBRARY LICENCE
+     TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+  
+  This library is free software; you can redistribute it and/or modify it
+  under the terms of the GNU Library General Public Licence as published by
+  the Free Software Foundation; either version 2 of the Licence, or (at
+  your option) any later version.
+  
+  This library is distributed in the hope that it will be useful, but
+  WITHOUT ANY WARRANTY; without even the implied warranty of
+  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU Library
+  General Public Licence for more details.
+
+  You should have received a copy of the GNU Library General Public Licence
+  along with this software, usually in a file named COPYING.LIB.  If not,
+  write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330,
+  Boston, MA 02111-1307 USA.
+
+  EXCEPTION NOTICE
+
+  1. As a special exception, the copyright holders of this library give
+  permission for additional uses of the text contained in this release of
+  the library as licenced under the wxWindows Library Licence, applying
+  either version 3 of the Licence, or (at your option) any later version of
+  the Licence as published by the copyright holders of version 3 of the
+  Licence document.
+
+  2. The exception is that you may use, copy, link, modify and distribute
+  under the user's own terms, binary object code versions of works based
+  on the Library.
+
+  3. If you copy code from files distributed under the terms of the GNU
+  General Public Licence or the GNU Library General Public Licence into a
+  copy of this library, as this licence permits, the exception does not
+  apply to the code that you add in this way.  To avoid misleading anyone as
+  to the status of such modified files, you must delete this exception
+  notice from such code and/or adjust the licensing conditions notice
+  accordingly.
+
+  4. If you write modifications of your own for this library, it is your
+  choice whether to permit this exception to apply to your modifications. 
+  If you do not wish that, you must delete the exception notice from such
+  code and/or adjust the licensing conditions notice accordingly.
+
+
diff --git a/doc/licenses/wxdoc.txt b/doc/licenses/wxdoc.txt
new file mode 100644
index 0000000..5bfa143
--- /dev/null
+++ b/doc/licenses/wxdoc.txt
@@ -0,0 +1,60 @@
+                wxWindows Free Documentation Licence, Version 3
+                ===============================================
+
+  Copyright (c) 1998 Julian Smart, Robert Roebling et al
+
+  Everyone is permitted to copy and distribute verbatim copies
+  of this licence document, but changing it is not allowed.
+   
+                   WXWINDOWS FREE DOCUMENTATION LICENCE
+     TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+  1. Permission is granted to make and distribute verbatim copies of this
+  manual or piece of documentation provided any copyright notice and this
+  permission notice are preserved on all copies.
+
+  2. Permission is granted to process this file or document through a
+  document processing system and, at your option and the option of any third
+  party, print the results, provided a printed document carries a copying
+  permission notice identical to this one.
+
+  3. Permission is granted to copy and distribute modified versions of this
+  manual or piece of documentation under the conditions for verbatim
+  copying, provided also that any sections describing licensing conditions
+  for this manual, such as, in particular, the GNU General Public Licence,
+  the GNU Library General Public Licence, and any wxWindows Licence are
+  included exactly as in the original, and provided that the entire
+  resulting derived work is distributed under the terms of a permission
+  notice identical to this one.
+
+  4. Permission is granted to copy and distribute translations of this
+  manual or piece of documentation into another language, under the above
+  conditions for modified versions, except that sections related to
+  licensing, including this paragraph, may also be included in translations
+  approved by the copyright holders of the respective licence documents in
+  addition to the original English.
+
+                            WARRANTY DISCLAIMER
+
+  5. BECAUSE THIS MANUAL OR PIECE OF DOCUMENTATION IS LICENSED FREE OF CHARGE,
+  THERE IS NO WARRANTY FOR IT, TO THE EXTENT PERMITTED BY APPLICABLE LAW. 
+  EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER
+  PARTIES PROVIDE THIS MANUAL OR PIECE OF DOCUMENTATION "AS IS" WITHOUT
+  WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT
+  LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
+  PARTICULAR PURPOSE.  THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF
+  THE MANUAL OR PIECE OF DOCUMENTATION IS WITH YOU.  SHOULD THE MANUAL OR
+  PIECE OF DOCUMENTATION PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL
+  NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+  6. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL
+  ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR
+  REDISTRIBUTE THE MANUAL OR PIECE OF DOCUMENTATION AS PERMITTED ABOVE, BE
+  LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR
+  CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE
+  MANUAL OR PIECE OF DOCUMENTATION (INCLUDING BUT NOT LIMITED TO LOSS OF
+  DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
+  PARTIES OR A FAILURE OF A PROGRAM BASED ON THE MANUAL OR PIECE OF
+  DOCUMENTATION TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR
+  OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
+
diff --git a/doc/licenses/zlib.txt b/doc/licenses/zlib.txt
new file mode 100644
index 0000000..90563be
--- /dev/null
+++ b/doc/licenses/zlib.txt
@@ -0,0 +1,21 @@
+The zlib/libpng License
+
+This software is provided 'as-is', without any express or implied 
+warranty. In no event will the authors be held liable for any 
+damages arising from the use of this software.
+
+Permission is granted to anyone to use this software for any 
+purpose, including commercial applications, and to alter it and 
+redistribute it freely, subject to the following restrictions:
+
+1. The origin of this software must not be misrepresented; you must 
+not claim that you wrote the original software. If you use this 
+software in a product, an acknowledgment in the product documentation 
+would be appreciated but is not required.
+
+2. Altered source versions must be plainly marked as such, and 
+must not be misrepresented as being the original software.
+
+3. This notice may not be removed or altered from any source 
+distribution.
+
diff --git a/doc/testfiles/infile.2pop b/doc/testfiles/infile.2pop
new file mode 100644
index 0000000..7db014a
--- /dev/null
+++ b/doc/testfiles/infile.2pop
@@ -0,0 +1,786 @@
+<lamarc version="2.1">
+<!-- Created by the Lamarc program -->
+  <chains>
+    <replicates>1</replicates>
+    <bayesian-analysis>No</bayesian-analysis>
+    <heating>
+      <adaptive>false</adaptive>
+      <temperatures> 1</temperatures>
+      <swap-interval>1</swap-interval>
+    </heating>
+    <strategy>
+      <resimulating>0.833333</resimulating>
+      <tree-size>0.166667</tree-size>
+      <haplotyping>0</haplotyping>
+      <trait-arranger>0</trait-arranger>
+    </strategy>
+    <initial>
+      <number>10</number>
+      <samples>100</samples>
+      <discard>1000</discard>
+      <interval>20</interval>
+    </initial>
+    <final>
+      <number>2</number>
+      <samples>1000</samples>
+      <discard>1000</discard>
+      <interval>20</interval>
+    </final>
+  </chains>
+  <format>
+    <seed>1005</seed>
+    <verbosity>verbose</verbosity>
+    <progress-reports>verbose</progress-reports>
+    <results-file>outfile.2pop</results-file>
+    <use-in-summary>false</use-in-summary>
+    <in-summary-file>insumfile.2pop</in-summary-file>
+    <use-out-summary>true</use-out-summary>
+    <out-summary-file>outsumfile.2pop</out-summary-file>
+    <use-curvefiles>true</use-curvefiles>
+    <curvefile-prefix>curvefile</curvefile-prefix>
+    <use-tracefile>true</use-tracefile>
+    <tracefile-prefix>tracefile</tracefile-prefix>
+    <use-newicktreefile>false</use-newicktreefile>
+    <newicktreefile-prefix>newick</newicktreefile-prefix>
+    <out-xml-file>menuinfile</out-xml-file>
+  </format>
+  <forces>
+    <coalescence>
+      <start-values> 0.0016436 0.0011111</start-values>
+      <method> WATTERSON WATTERSON</method>
+      <max-events>32000</max-events>
+      <profiles> percentile percentile </profiles>
+      <constraints> unconstrained unconstrained </constraints>
+      <prior type="log">
+        <paramindex> all </paramindex>
+        <lower> 1e-05 </lower>
+        <upper> 10 </upper>
+      </prior>
+    </coalescence>
+    <migration>
+      <start-values> 0 100 100 0</start-values>
+      <method> PROGRAMDEFAULT USER USER PROGRAMDEFAULT</method>
+      <max-events>10000</max-events>
+      <profiles> none none none none </profiles>
+      <constraints> invalid unconstrained unconstrained invalid </constraints>
+      <prior type="log">
+        <paramindex> all </paramindex>
+        <lower> 0.01 </lower>
+        <upper> 1000 </upper>
+      </prior>
+    </migration>
+    <recombination>
+      <start-values> 0.01</start-values>
+      <method> USER</method>
+      <max-events>1000</max-events>
+      <profiles> percentile </profiles>
+      <constraints> unconstrained </constraints>
+      <prior type="log">
+        <paramindex> all </paramindex>
+        <lower> 1e-05 </lower>
+        <upper> 10 </upper>
+      </prior>
+    </recombination>
+  </forces>
+  <data>
+    <region name="lpl">
+      <model name="F84">
+        <normalize>false</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <relative-murate>1</relative-murate>
+        <base-freqs> calculated </base-freqs>
+        <ttratio>2</ttratio>
+      </model>
+      <effective-popsize>1</effective-popsize>
+      <spacing>
+        <block>
+          <map-position>0</map-position>
+          <length>9734</length>
+          <locations> 105 109 144 324 342 478 550 735 1215 1219 1285 1546 1570 1755 1827 1938 1960 2130 2499 2588 2618 2637 2848 2986 2995 3021 3247 3289 3296 3552 3608 3722 3842 4015 4201 4342 4345 4417 4425 4508 4575 4822 4871 4906 4934 5084 5167 5370 5394 5440 5553 5559 5686 6175 6195 6202 6249 6282 6594 6677 6717 6759 6771 6792 6839 6862 6938 7311 7340 7356 7409 7750 8084 8085 8281 8288 8389 8498 8529 8533 8534 8640 8751 8848 9036 9398 9708 9717</locations>
+          <offset>0</offset>
+        </block>
+      </spacing>
+    <population name="Jackson">
+      <individual name="J01-1     ">
+        <sample name="J01-1     ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACAACGCACATCGCNNCTTGCGACCTCANACTGTGNTAGTGCATCGTAGATTCNGAGTCNGCATGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J01-3     ">
+        <sample name="J01-3     ">
+          <datablock type="SNP">
+            CAGTNTNTCTCACACAACGCACATAGCNNCTCACGTCTTTANACTGTGNTAGTGCATCGTAGATTCNGAGTCNGCCTGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J02-3     ">
+        <sample name="J02-3     ">
+          <datablock type="SNP">
+            CAGTNTNTCTCACACAACGCACATAGCNNCTCACGTCTTTANACTGTGNTAGTGCATCGTAGATTCNGAGTCNGCCTGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J02-57    ">
+        <sample name="J02-57    ">
+          <datablock type="SNP">
+            CCGTNTNTGCCACACAATACACAGCGGNNCTTGGGACCCTANGCCACGNTCGCGCACCGGGGGTTCNCGATANGCAGGCANTGGCAGA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J03-8     ">
+        <sample name="J03-8     ">
+          <datablock type="SNP">
+            CAGTNCNCGCCACACAACGCGCAGCGCNNCTTGCGACCCTANGCCACANTCGCGTACCGGGGGTTCNCGATCNGCAGGCANTGGCAGA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J03-17    ">
+        <sample name="J03-17    ">
+          <datablock type="SNP">
+            CCGTNTNTGCCACACAATACACAGCGCNNCCTGCGTCTTTANACTGTGNTAGTGCATCGTAGATTCNGAATCNGCATGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J04-18    ">
+        <sample name="J04-18    ">
+          <datablock type="SNP">
+            CAATNTNTCTCACACAACGCACATCGCNNCTTGCGAGCTTANGCCGCGNNCGCGCACCCGGGATTTNCAATANGCAGGCANTGGCAAA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J04-25    ">
+        <sample name="J04-25    ">
+          <datablock type="SNP">
+            CAGTNTNCCTCACACAACGCACATCGCNNCTTGCGACCTTANGCCGCGNNCGCGCACCCGGGATTTNCAATANGCAGGCANTGGCAAA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J05-43    ">
+        <sample name="J05-43    ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACAACGCACATCGCNNCTTGCGTCTTTANACTGTGNTAGTGCATCGTAGATTCNGAATCNGCATGAANTGTCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J05-45    ">
+        <sample name="J05-45    ">
+          <datablock type="SNP">
+            CAGCNTNTCTCACACAACGCACAGCGCNNCTTGCGACCTTTNGCCGCGNTCGCGCACCGGGGATTCNGAATCNTCAGGCANCGGCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J06-2     ">
+        <sample name="J06-2     ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACAACGCATATCGCNNCTTGCGACCTCANACTGTGNTAGTGCATCGTAGATTCNGAGTCNGCATGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J06-8     ">
+        <sample name="J06-8     ">
+          <datablock type="SNP">
+            CAGTNCNCGCCACACAACGGGCAGCGCNNCTTGCGACCCTANGCCACANTCGCGCACCGGGGGTTCNCGATCNGCAGGCANTGGCAGA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J07-64    ">
+        <sample name="J07-64    ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACATCGCACATCGCNNCTTGCGTCTTTANACTGTGNTAGTGCATCGGAGATTCNGAATCNGCATGAANTGTCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J07-12    ">
+        <sample name="J07-12    ">
+          <datablock type="SNP">
+            CAGTNTNCCTCACACAACGCACATCGCNNCTTGCGAGCTTANACTGCGNCCGCGCACCCTGGATTTNCAATANGGAGGCANTGGCAAA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J08-13    ">
+        <sample name="J08-13    ">
+          <datablock type="SNP">
+            AAGTNTNTGCTACACAATACACAGCACNNCTTGCGACCTTANGCNNNGNTCGCGCACCGGGGATTCNGAATCNTCAGGNNNCGGCAGA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J08-14    ">
+        <sample name="J08-14    ">
+          <datablock type="SNP">
+            CAGTNCNTGCCACACAACGCGCAGCGCNNCTTGCGACCCTANGCNNNGNTCGCGCATCGGGGGTTCNCGATCNGCAGCNNNTGGCAGA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J09-3     ">
+        <sample name="J09-3     ">
+          <datablock type="SNP">
+            CAGTNTNTCTCACACAACGCACATAGCNNCTCACGTCTTTANACTGTGNTAGTGCATCGTAGATTCNGAGTCNGCCTGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J09-15    ">
+        <sample name="J09-15    ">
+          <datablock type="SNP">
+            CAGTNCNTGCCACAGGATGCGCAGCGCNNCTTGCGTCTTTANACTGTGNTAGTGCATCGTAGATTCNGAGTCNGCATGAANTGTCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J10-16    ">
+        <sample name="J10-16    ">
+          <datablock type="SNP">
+            CAGTNTNCCTCNNACAACGCACAGCGCNNCTTGCGTCTTTANACTGTGNTAGTGCATCGTAGATTCNGAATCNGCATGCANTGGCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J10-11    ">
+        <sample name="J10-11    ">
+          <datablock type="SNP">
+            CAGTNTNTCTCNNACAATGCACATCGCNNCCTGCGTCTTTANACTGTGNTAGTGCATCGTAGATTCNGAATCNGCATGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J11-1     ">
+        <sample name="J11-1     ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACAACGCACATCGCNNCNNNNGACCTCANACTGTGNTAGTGCATCGTAGATTCNGAGTCNGCATGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J11-4     ">
+        <sample name="J11-4     ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACAACGCACATCGCNNCNNNNGTCTTTANACTGTGNTAGTGCATCGTAGATTCNGAGTCNGCCTGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J12-19    ">
+        <sample name="J12-19    ">
+          <datablock type="SNP">
+            CAGTNTNTCTCACACAACGCACAGCGCNNCTTGCGTCTTTANACNGTGNTAGTGCGTCGTAGATTCNGAATCNGCATGNNNTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J12-20    ">
+        <sample name="J12-20    ">
+          <datablock type="SNP">
+            CAATNCNTCTCACAGGATGCGCATCGCNNCCTGCGAGCTTANGCNGCGNTCGCGCACCCGGGATTCNCAATANGCAGGNNNTGGCAAA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J13-21    ">
+        <sample name="J13-21    ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACAACGCACATCGCNNCTTGCGTCCTTANACNGCGNCCGCGCACCCGGGATTTNCAATANGCAGGCCNTGGCAAA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J13-22    ">
+        <sample name="J13-22    ">
+          <datablock type="SNP">
+            CCGTNTNTGCCACACAATACACAGCGGNNCTTGGGACCCTANACNACGNTCGCGCACCGGGGGTTCNCGATANGCAGGCANTGGCAGA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J14-23    ">
+        <sample name="J14-23    ">
+          <datablock type="SNP">
+            CAGTNTNCCTCACACAACGCACATCGCNNCTTGCGACTTTANACNGTGNTAGTGCATCGTAGATTCNGAATCNGCATGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J14-24    ">
+        <sample name="J14-24    ">
+          <datablock type="SNP">
+            CAGTNTNTCTCACACAATGCACAGCGCNNCTTGCGTCCTTANACNGTGNTAGTGCATCGTAGATTCNGAGTCNGGATGAANTGTCGGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J15-10    ">
+        <sample name="J15-10    ">
+          <datablock type="SNP">
+            CAGTNTNCCTCACACAACGCACAGCGCNNCTTGCGACTTTANACTGNGNTAGTGCATCGTAGATTCNGAATCNGCATGCANTGTCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J15-26    ">
+        <sample name="J15-26    ">
+          <datablock type="SNP">
+            CAGTNTNTCTCACACAATGCACATCGCNNCTTGCGTCCTTANACTGNGNTAGCGCACCCGGGATTTNCAATANGGAGGAANTGGCGAA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J16-27    ">
+        <sample name="J16-27    ">
+          <datablock type="SNP">
+            CAGTNTNTCTCACACAACGCACATCGCNNCTCGCGTCTTTANACTGTGNTAGTGCATCGTAGATTCNGAGTCNGCATGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J16-28    ">
+        <sample name="J16-28    ">
+          <datablock type="SNP">
+            CAGCNTNTCTCACACAACGCACATCGCNNCCTGCGTCTTTANACTGTGNTAGTGCATCGTAGATTCNGAATCNGCATGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J17-8     ">
+        <sample name="J17-8     ">
+          <datablock type="SNP">
+            CAGTNCNCGCCACACAACGCGCAGCGCNNCTTGCGACCCTANGCCACGNTCGCGCACCGGGGGTTCNCGATCNGCAGCCANTGGCAGA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J17-29    ">
+        <sample name="J17-29    ">
+          <datablock type="SNP">
+            CAGTNTNTCTCACACAACGCACATCGCNNCTTGCGACCCTANGCCACGNTCGCGCATCGGGGGTTCNCGATANGCAGGCANTGGCAGA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J18-3     ">
+        <sample name="J18-3     ">
+          <datablock type="SNP">
+            CAGTNTNTCTCACACAACGCACATAGCNNCTCACATCTTTANACTGTGNTAGTGCATCGTAAATTCNGAGTCNGCCTGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J18-30    ">
+        <sample name="J18-30    ">
+          <datablock type="SNP">
+            CAGTNTNCCTCACACAACGCACAGCGCNNCTTGCGACTTTANACTGTGNTCGTGCATCGTAGATTCNGAATCNGCATGCANTGGCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J19-31    ">
+        <sample name="J19-31    ">
+          <datablock type="SNP">
+            CAGTNTNTCTCACACAACGCACAGCGCNNCTTGCGTCTTTANNCNGNGNTAGTGCATCGNAGATTCNGAATCNGCATGCANTGNCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J19-32    ">
+        <sample name="J19-32    ">
+          <datablock type="SNP">
+            CAGCNCNTCTCACTCAATGCACAGCGCNNCTTGGGACCTTANNCNGNGNCCGCGCACCCNGGATTTNCAATANGCAGGAANTGNCAAA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J20-33    ">
+        <sample name="J20-33    ">
+          <datablock type="SNP">
+            CAGTNCNTGCCACAGGATGCGCAGCGCNNCTTGCGTCTTTANACTGTGNTAGTGCATCGTAGATTCNGAGTCNGCATGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J20-34    ">
+        <sample name="J20-34    ">
+          <datablock type="SNP">
+            CAGTNTNTCTCACACAACGCACATCGCNNCTCGCGTCTTTANACTGTGNTAGTGCATCGTAGATTCNGAGTCNGGATGAANTGTCGGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J21-35    ">
+        <sample name="J21-35    ">
+          <datablock type="SNP">
+            CCGTNTNTGCCACACAATACACAGCGGNNCTTGGAACCCTANANNACGNTAGCGCACCGGGAGTTCNCGATCNGCAGGCANTGGCAGA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J21-36    ">
+        <sample name="J21-36    ">
+          <datablock type="SNP">
+            CAGTNTNTCTCCCACAATGCACAGCGCNNCTTGCGACTTTANANNGTGNTAGTGCATCGTAGATTCNGAATCNGCATGCANTGGCAGA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J22-11    ">
+        <sample name="J22-11    ">
+          <datablock type="SNP">
+            CAGTNTNTNTCCCACAATGCACATCGCNNTCTGCGTCTTTANACTGTGNTAGTGCATCGTAGATTCNGAATCNGCATGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J22-37    ">
+        <sample name="J22-37    ">
+          <datablock type="SNP">
+            CAGTNTNTNTCACACAACGCACATCGCNNCTTGCGTCTTTANACTGTGNCAGTGCATCGTAGATTCNGAATCNGGATGAANTGTCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J23-10    ">
+        <sample name="J23-10    ">
+          <datablock type="SNP">
+            CAGTNTNCCTCACACAACGCACAGCGCNNTTTGCGACTTTANACTGTGNTAGTGCATCGNAGATTCNGAATCNGCATGCANTGTCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J23-38    ">
+        <sample name="J23-38    ">
+          <datablock type="SNP">
+            AAGTNTNTGCTACACAATACACATCACNNCTTGCGTCCTTTNACTGCGNTCGCGCACCGNGGATTCNGAATANGGAGGAANCGGCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J24-39    ">
+        <sample name="J24-39    ">
+          <datablock type="SNP">
+            CAATNTNTCTCACACATCGCACATCGCNNCTTGCGAGCTTANGCCGCGNNCGCGCACCCNGGATTCNCAATCNGCAGGCANTGGCAAA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J24-40    ">
+        <sample name="J24-40    ">
+          <datablock type="SNP">
+            CAGTNTNCCTCACACAACGCACATCGCNNCCTGCGTCTTTANACTGTGNNAGTGCATCGNAGATTCNGAATCNGGATGAANTGTCAGG
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    <population name="NorthKarelia">
+      <individual name="N01-1     ">
+        <sample name="N01-1     ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACAACGCACATCGCNNCTTGCGACCTCANACTGTGNTAGTGCATCGTAGATTCNGAGTCNGCATGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N01-41    ">
+        <sample name="N01-41    ">
+          <datablock type="SNP">
+            CAGTNTNTCTCACACAACGCACATAGCNNCTCACGTGTTCANACTGTGNTAGTGCATCGTAGATTCNGAGTCNGCCTGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N02-1     ">
+        <sample name="N02-1     ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACAACGCACATCGCNNCTTGCGACCTCANACTGTGNTAGTGCATCGTAGATTCNGAGTCNGCNTGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N02-42    ">
+        <sample name="N02-42    ">
+          <datablock type="SNP">
+            CAGTNTNTCTCACACAACGCACATAGCNNCTCACGTGCTTANACTGCGNTAGTGCATCGTAGATTCNGAGCCNGCNTGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N03-5     ">
+        <sample name="N03-5     ">
+          <datablock type="SNP">
+            CAGTNTNTCTCACACAACGCACATAGCNNCTTGCGACCTCANACTGTGNTAGTGCATCGTAGATTCNGAGTCNGCATGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N03-44    ">
+        <sample name="N03-44    ">
+          <datablock type="SNP">
+            CAATNTNTCTCACACAACACACATCGCNNCTTGCGACCTCANACTGCGNTAGTGCATCGTAGATTCNGAGTCNGCATGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N04-9     ">
+        <sample name="N04-9     ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACAACGCACATCGCNNCTTGCGACCTCANACNGCGNTAGTGCATCGTAGATTCNGAGTCNGCATGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N04-46    ">
+        <sample name="N04-46    ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACAACGCACATCGCNNCTTGCGAGCTCANGCNGCGNCCGCGCATCCGGGATTTNCAGTANGCAGGCANTGGCAAA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N05-4     ">
+        <sample name="N05-4     ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACAACGCACATCGCNNCTTGCGTCTTTANACTGTGNTAGTGCATCGTAGATTCNGAGTCNGCCTGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N05-47    ">
+        <sample name="N05-47    ">
+          <datablock type="SNP">
+            CAGTNTNTCTCACACAACGCACATAGCNNCTCACGACCTTTNGCCGCGNTCGCGCACCGGGGATTCNGAGTCNTCAGGCANCGGCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N06-5     ">
+        <sample name="N06-5     ">
+          <datablock type="SNP">
+            CAGTNTNTCTCACACAACGCACATAGCNNCTTGCGACCTCANACTGTGNTAGTGCATCGTAGATTCNGAGTCNGCATGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N06-48    ">
+        <sample name="N06-48    ">
+          <datablock type="SNP">
+            CAATNTNTCTCACACAACGCACATCGCNNCTTACGAGCTTANGCCGCGNTCGCGCATCCGGGATTTNCAGTANGCAGGCANTGGCAAA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N07-2     ">
+        <sample name="N07-2     ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACAACGCACATCGCNNCTTGCGACCTCANACTGTGNNAGTGCATCGTAGATTCNGAGTCNGCATGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N07-49    ">
+        <sample name="N07-49    ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACAACACACATCGCNNCTCGCGACCTCANGCCGCGNNCACGCATCCGGGATTTNCAGTANGCAGGCANTGGCAAA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N08-2     ">
+        <sample name="N08-2     ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACAACGCACATCGCNNCTTGCGACCTCANACTGTGNTAGTGCATCGTAGATTCNGAGTCNGCATGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N08-50    ">
+        <sample name="N08-50    ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACAACGCACATCGCNNCTTGCGTCCTTANGCCGCGNCCGCGCATCCGGGATTTNCAGTANGCAGGCCNTGGCAAA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N09-5     ">
+        <sample name="N09-5     ">
+          <datablock type="SNP">
+            CAGTNTNTCTCACACAACGCACATAGCNNCTTGCGACCTCANACTGTGNTAGTGCATCGTAGATTCNGAGTCNGCNTGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N09-51    ">
+        <sample name="N09-51    ">
+          <datablock type="SNP">
+            CAGTNTNTCTCACACAACGCACATAGCNNCTCACGTCCTTANACTGTGNTAGTGCATCGTAGATTCNGAGTCNGCNTGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N10-1     ">
+        <sample name="N10-1     ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACAACGCACATCGCNNCTTGCGACNTCANACTGTGNTAGTGCATCNTAGATTCNGAGTCNGCATGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N10-4     ">
+        <sample name="N10-4     ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACAACGCACATCGCNNCTTGCGTCNTTANACTGTGNTAGTGCATCNTAGATTCNGAGTCNGCCTGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N11-52    ">
+        <sample name="N11-52    ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACAACGCACATCGCNNCTTGCGTCCTCANACTGTGNTAGTGCATCGTAGATTCNGAGTCNGCATGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N11-2     ">
+        <sample name="N11-2     ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACAACGCACATCGCNNCTTGCGACCTCANACTGTGNTAGTGCATCGTAGATTCNGAGTCNGCATGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N12-1     ">
+        <sample name="N12-1     ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACAACGCACATCGCNNCTTGCGACCTCANACTGTGNTAGTGCATCGTAGATTCNGAGTCNGCATGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N12-53    ">
+        <sample name="N12-53    ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACAACGCACATCGCNNCTCGCGAGCTCANACTGTGNTAATGCATCGTAGATTCNGAGTCNGCATGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N13-54    ">
+        <sample name="N13-54    ">
+          <datablock type="SNP">
+            CAGTNTNTCTCACACAACGCACATAGCNNCTCACGTCTTTANACTGTGNTAGTGCACCGTAGAGCCNGAGTCNGCCTGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N13-55    ">
+        <sample name="N13-55    ">
+          <datablock type="SNP">
+            CAGCNTNTGCTACACAATACACAGCACNNCTTGCGACCTTANGCCGCGNTCGCGCACCGGGGATTCNGAGTCNTCAGGCANCGGCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N14-1     ">
+        <sample name="N14-1     ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACAACGCACATCGCNNCTTGCGACCTCANACTGTGNTAGTGCATCGTAGATTCNGAGTCNGCATGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N14-56    ">
+        <sample name="N14-56    ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACAACGCACATCGCNNCTTGCGAGCTCANACTGTGNTAGTGCATCGTAGATTCNGAGTCNGCATGAANCATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N15-7     ">
+        <sample name="N15-7     ">
+          <datablock type="SNP">
+            CAGTNTNTCTCACACAACGCACATCGCNNCTCACGTCTTTANACTGTGNTAGTGCATCGTAGATTCNGAGTCNGCCTGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N15-58    ">
+        <sample name="N15-58    ">
+          <datablock type="SNP">
+            AAGTNTNTGCTACACAATACACAGCACNNCTTGCGACCTTTNGCCGCGNTCGCGCACCGGGGATTCNGAGTCNTCCGGCANCGGCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N16-1     ">
+        <sample name="N16-1     ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACAACGCACATCGCNNCTTGCGACCTCANACTGTGNTAGTGCATCGTAGATTCNGAGTCNGCATGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N16-59    ">
+        <sample name="N16-59    ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACAACGCACATCGCNNCTCGCGACCTCANACTGTGNTAATGCATCGTAGATTCNGAGTCNGCATGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N17-4     ">
+        <sample name="N17-4     ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACAACGCACATCGCNNCTTGCGTCTTTANACTGTGNTAGTGCATCGTAGATTCNGAGTCNGCNTGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N17-60    ">
+        <sample name="N17-60    ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACAACGCACATCGCNNCTTGCGACCTCANACTGTGNTAGTGCATCCTAGATTCNGAGTCNGCNTGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N18-1     ">
+        <sample name="N18-1     ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACAACGCACATCGCNNCTTGCGACCTCANACTGTGNTAGTGCATCGTAGATTCNGAGTCNGCATGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N18-5     ">
+        <sample name="N18-5     ">
+          <datablock type="SNP">
+            CAGTNTNTCTCACACAACGCACATAGCNNCTTGCGACCTCANACTGTGNTAGTGCATCGTAGATTCNGAGTCNGCATGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N19-3     ">
+        <sample name="N19-3     ">
+          <datablock type="SNP">
+            CAGTNTNTCTCACACAACGCACATAGCNNCTCACGTCTTTANACTGTGNTAGTGCATCGTAGATTCNGAGTCNGCCTGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N19-61    ">
+        <sample name="N19-61    ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACAACGCACATCGCNNCTTGCGACCTCANACTGTGNTAGTGCATCCTAGATTCNGAGTCNGCCTGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N20-1     ">
+        <sample name="N20-1     ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACAACGCACATCGCNNCTTGCGACCTCANACTGTGNTAGTGCATCGTAGATTCNGAGTCNGCNTGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N20-7     ">
+        <sample name="N20-7     ">
+          <datablock type="SNP">
+            CAGTNTNTCTCACACAACGCACATCGCNNCTCACGTCTTTANACTGTGNTAGTGCATCGTAGATTCNGAGTCNGCNTGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N21-62    ">
+        <sample name="N21-62    ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACAACGCACATCGCNNCTTGCGACCTCANGCCGCGNCCGCGCACCCGGGATTTNCAATANGCAGGCANTGGCAAA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N21-63    ">
+        <sample name="N21-63    ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACAACGCACATCGCNNCTTGCGACCTTTNGCCGCGNTCGCGCACCGGGGATTCNGAATCNTCAGGCANCGGCAAA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N22-6     ">
+        <sample name="N22-6     ">
+          <datablock type="SNP">
+            AAGTNTNTNNTACACAATACACAGCACNNCTTGCGTCCTTANGCCGCGNCCGCGCACCNGGGATTTNCAATANGCAGGCANTGGCAAA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N22-65    ">
+        <sample name="N22-65    ">
+          <datablock type="SNP">
+            CAGTNTNCNNCAGACAACGCACATCGCNNCTTGCGTCCTCANGCCGCGNCCGCGCACCNGGGATTTNCAATANGCAGGCANTGGCAAA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N23-4     ">
+        <sample name="N23-4     ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACAACGCACATCGCNNCTTGCGTCTTTANACTGTGNTAGTGCATCGTAGATTCNGAGTCNGCCTGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N23-66    ">
+        <sample name="N23-66    ">
+          <datablock type="SNP">
+            CAGTNTNTCTCACACAACGCACATAGCNNCTCACGACCTTTNGCCGCGNTCGCGCACCGGGGATTCNGAGTCNTCCGGCANCGGCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N24-9     ">
+        <sample name="N24-9     ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACAACGCACATCGCNNCTTGCGACCTCANACTGCGNTAGNGCATCGTAGATTCNGAGTCNGCATGNNNTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N24-67    ">
+        <sample name="N24-67    ">
+          <datablock type="SNP">
+            CAATNTNTCTCACACAACGCACATCGCNNCTTGCGAGCTTANGCCGCGNTAGNGCATCCGGGATTTNCAGTANGCAGGNNNTGGCAAA
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    </region>
+  </data>
+</lamarc>
diff --git a/doc/testfiles/infile.baselocus b/doc/testfiles/infile.baselocus
new file mode 100644
index 0000000..e1b66bb
--- /dev/null
+++ b/doc/testfiles/infile.baselocus
@@ -0,0 +1,158 @@
+<lamarc version="2.1">
+<!-- Created by the Lamarc program -->
+  <chains>
+    <replicates>1</replicates>
+    <bayesian-analysis>No</bayesian-analysis>
+    <heating>
+      <adaptive>false</adaptive>
+      <temperatures> 1</temperatures>
+      <swap-interval>10</swap-interval>
+    </heating>
+    <strategy>
+      <resimulating>0.833333</resimulating>
+      <tree-size>0.166667</tree-size>
+      <haplotyping>0</haplotyping>
+      <trait-arranger>0</trait-arranger>
+    </strategy>
+    <initial>
+      <number>10</number>
+      <samples>500</samples>
+      <discard>1000</discard>
+      <interval>20</interval>
+    </initial>
+    <final>
+      <number>2</number>
+      <samples>10000</samples>
+      <discard>1000</discard>
+      <interval>20</interval>
+    </final>
+  </chains>
+  <format>
+    <seed>3001</seed>
+    <verbosity>normal</verbosity>
+    <progress-reports>normal</progress-reports>
+    <results-file>outfile</results-file>
+    <use-in-summary>false</use-in-summary>
+    <in-summary-file>insumfile</in-summary-file>
+    <use-out-summary>false</use-out-summary>
+    <out-summary-file>outsumfile</out-summary-file>
+    <use-curvefiles>true</use-curvefiles>
+    <curvefile-prefix>curvefile</curvefile-prefix>
+    <use-tracefile>true</use-tracefile>
+    <tracefile-prefix>tracefile</tracefile-prefix>
+    <use-newicktreefile>false</use-newicktreefile>
+    <newicktreefile-prefix>newick</newicktreefile-prefix>
+    <out-xml-file>menuinfile</out-xml-file>
+  </format>
+  <forces>
+    <coalescence>
+      <start-values> 0.01</start-values>
+      <method> USER</method>
+      <max-events>100000</max-events>
+      <profiles> percentile </profiles>
+      <constraints> unconstrained </constraints>
+      <prior type="log">
+        <paramindex> all </paramindex>
+        <lower> 1e-05 </lower>
+        <upper> 10 </upper>
+      </prior>
+    </coalescence>
+  </forces>
+  <data>
+    <region name="jointloci">
+      <model name="F84">
+        <normalize>false</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <relative-murate>1</relative-murate>
+        <base-freqs> 0.25 0.25 0.25 0.25</base-freqs>
+        <ttratio>2</ttratio>
+      </model>
+      <effective-popsize>1</effective-popsize>
+      <spacing>
+        <block>
+          <map-position>0</map-position>
+          <length>100</length>
+          <locations> 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99</locations>
+          <offset>0</offset>
+        </block>
+      </spacing>
+    <population name="seattle">
+      <individual name="00_0001   ">
+        <sample name="00_0001   ">
+          <datablock type="DNA">
+            CAAGCCGTTTGGCCCAGGGCTAGGGGGTGCCAGGGTCATGATGGATTTTATTCGCTAGCCCATGTGTGATTAGAAGGGTAACCGAGACATTCCACAGCTA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0006   ">
+        <sample name="00_0006   ">
+          <datablock type="DNA">
+            CAAGCAATTTCGCCCAGGGCTAGGGGGTGCCGGGGTCATGATGGATTTTATTCGCTAGCCCATGTGTGATTAGAAGGGTAACCGAGACATTCCACAGCTA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0007   ">
+        <sample name="00_0007   ">
+          <datablock type="DNA">
+            CAAACAGTTTCGCCCAGGGCTAGGGGGTGCCAGGGTCATGATGGATTTTATTCGCTAGCCCATGTGTGATTAGAAGGGTAACCGAGACATTCCACAGCTA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0002   ">
+        <sample name="00_0002   ">
+          <datablock type="DNA">
+            CAAGCAGTTTCGCCCAGGGCTAGGGGGTGCCAGGGTCATGATGGATTTTATTCGCTAGCCCATGTGTGATTAGAAGGGTAACCGAGACATGCCACAGCTA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0008   ">
+        <sample name="00_0008   ">
+          <datablock type="DNA">
+            CAAGCAGTTTCGCCCAGGGCTAGGGGGTGCCAGGGTCATGATGGATTTTATTCGCTAGCCCATGTGTGATTAGAAGGGTAACCGAGACATGCCACAGCTA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0003   ">
+        <sample name="00_0003   ">
+          <datablock type="DNA">
+            CAAGCAGTTTCGCCCAGGGCTAGGGGGTGCCAGGGTCATGATGGATTTTATTCGCTAGCCCATGTGTGCTTAGAAGGGTAACCGAGACATGCCACAGCTA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0009   ">
+        <sample name="00_0009   ">
+          <datablock type="DNA">
+            CAAGCAGTTTCGCCCAGGGCTAGGGGGTGCCAGGGTCATGATGGATTTTATTCGCTAGCCCATGTGTGATTAGAAGGGTAACCGGGACATGCCACAGCTA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0005   ">
+        <sample name="00_0005   ">
+          <datablock type="DNA">
+            CAAGCAGTTTCGCCCAGGGCTAGGGGGTGCCAGGGTCATGATGGATTTTATTCGCTAGCCCATGTGTGATTAGAAGGGTAACCGGGGCATGCCACAACTA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0000   ">
+        <sample name="00_0000   ">
+          <datablock type="DNA">
+            CAAGCAGTTTCGCCCAGGGCTAGGGGGTGCCAGGGTCATGATGGATTTTATTCGCTTGCCCATGTGTGATTAGAAGGGTAACCGAGACATGCCACAACTA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0004   ">
+        <sample name="00_0004   ">
+          <datablock type="DNA">
+            CAAGCAGTTTCGCCCAGGGCTAGGGGGTGCCAGGGTCATGGTGGATTTTATTCGCTAGCCCACGTGTGATTAGAAGGGTAACCGAGACATGCCACAACTA
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    </region>
+  </data>
+</lamarc>
diff --git a/doc/testfiles/infile.bayes b/doc/testfiles/infile.bayes
new file mode 100644
index 0000000..0c76e2a
--- /dev/null
+++ b/doc/testfiles/infile.bayes
@@ -0,0 +1,130 @@
+<lamarc version="2.1">
+<!-- Created by the Lamarc program -->
+  <chains>
+    <replicates>1</replicates>
+    <bayesian-analysis>Yes</bayesian-analysis>
+    <heating>
+      <adaptive>false</adaptive>
+      <temperatures> 1</temperatures>
+      <swap-interval>1</swap-interval>
+    </heating>
+    <strategy>
+      <resimulating>0.454545</resimulating>
+      <tree-size>0.0909091</tree-size>
+      <haplotyping>0</haplotyping>
+      <trait-arranger>0</trait-arranger>
+      <bayesian>0.454545</bayesian>
+    </strategy>
+    <initial>
+      <number>3</number>
+      <samples>200</samples>
+      <discard>1000</discard>
+      <interval>20</interval>
+    </initial>
+    <final>
+      <number>1</number>
+      <samples>500</samples>
+      <discard>1000</discard>
+      <interval>20</interval>
+    </final>
+  </chains>
+  <format>
+    <seed>1005</seed>
+    <verbosity>verbose</verbosity>
+    <progress-reports>verbose</progress-reports>
+    <results-file>outfile.bayes</results-file>
+    <use-in-summary>false</use-in-summary>
+    <in-summary-file>insumfile.bayes</in-summary-file>
+    <use-out-summary>false</use-out-summary>
+    <out-summary-file>outsumfile.bayes</out-summary-file>
+    <use-curvefiles>true</use-curvefiles>
+    <curvefile-prefix>curvefile</curvefile-prefix>
+    <use-tracefile>true</use-tracefile>
+    <tracefile-prefix>tracefile</tracefile-prefix>
+    <use-newicktreefile>false</use-newicktreefile>
+    <newicktreefile-prefix>newick</newicktreefile-prefix>
+    <out-xml-file>menuinfile</out-xml-file>
+  </format>
+  <forces>
+    <coalescence>
+      <start-values> 0.01</start-values>
+      <method> USER</method>
+      <max-events>1000</max-events>
+      <profiles> percentile </profiles>
+      <constraints> unconstrained </constraints>
+      <prior type="log">
+        <paramindex> all </paramindex>
+        <lower> 1e-05 </lower>
+        <upper> 10 </upper>
+      </prior>
+    </coalescence>
+  </forces>
+  <data>
+    <region name="region 1">
+      <model name="F84">
+        <normalize>false</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <relative-murate>1</relative-murate>
+        <base-freqs> calculated </base-freqs>
+        <ttratio>2</ttratio>
+      </model>
+      <effective-popsize>1</effective-popsize>
+      <spacing>
+        <block>
+          <map-position>0</map-position>
+          <length>68</length>
+          <offset>0</offset>
+        </block>
+      </spacing>
+    <population name="Population JYRM">
+      <individual name="germ_10   ">
+        <sample name="germ_10   ">
+          <datablock type="DNA">
+            TCATTTCCGGTGCAAACCGAATCTCAGCTTGATTAATCTGGATCACCCAGAAGAGCTCTTAAAACGCA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="germ_11   ">
+        <sample name="germ_11   ">
+          <datablock type="DNA">
+            AGATTTGGGGTGCAATGGGAATCTCTCGTTGATTATAGTGGATCAGGGAGAAGAGGACTTAAAACCGT
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="germ_20   ">
+        <sample name="germ_20   ">
+          <datablock type="DNA">
+            ACTTTTGCCGTGCATAGCCAATCTGACCATGATTTAACAGGATCTCGCTGAAGACCAGATAAAAGGGA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="germ_21   ">
+        <sample name="germ_21   ">
+          <datablock type="DNA">
+            ACAATTGCGCTGCTAAGCGTATCACACCTAGATAAAACTCGATGACGCACAAGTGCAGTAAAATCGGA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="germ_30   ">
+        <sample name="germ_30   ">
+          <datablock type="DNA">
+            ACATATGCGGAGGAAAGCGATTGTCACCTTCAATAAACTGCAACACGCAGTACAGCAGTTTATACGGA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="germ_31   ">
+        <sample name="germ_31   ">
+          <datablock type="DNA">
+            ACATTAGCGGTCCAAAGCGAAACTCACCTTGTTTAAACTGGTTCACGCAGATGAGCAGTTATAACGGA
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    </region>
+  </data>
+</lamarc>
diff --git a/doc/testfiles/infile.coalgrowmig b/doc/testfiles/infile.coalgrowmig
new file mode 100644
index 0000000..979a0ba
--- /dev/null
+++ b/doc/testfiles/infile.coalgrowmig
@@ -0,0 +1,394 @@
+<lamarc version="2.1">
+<!-- Created by the Lamarc program -->
+  <chains>
+    <replicates>1</replicates>
+    <bayesian-analysis>No</bayesian-analysis>
+    <heating>
+      <adaptive>false</adaptive>
+      <temperatures> 1</temperatures>
+      <swap-interval>1</swap-interval>
+    </heating>
+    <strategy>
+      <resimulating>0.833333</resimulating>
+      <tree-size>0.166667</tree-size>
+      <haplotyping>0</haplotyping>
+      <trait-arranger>0</trait-arranger>
+    </strategy>
+    <initial>
+      <number>5</number>
+      <samples>1000</samples>
+      <discard>1000</discard>
+      <interval>20</interval>
+    </initial>
+    <final>
+      <number>2</number>
+      <samples>2500</samples>
+      <discard>1000</discard>
+      <interval>20</interval>
+    </final>
+  </chains>
+  <format>
+    <seed>1005</seed>
+    <verbosity>verbose</verbosity>
+    <progress-reports>verbose</progress-reports>
+    <results-file>outfile.coalgrowmig</results-file>
+    <use-in-summary>false</use-in-summary>
+    <in-summary-file>insumfile.coalgrowmig</in-summary-file>
+    <use-out-summary>true</use-out-summary>
+    <out-summary-file>outsumfile.coalgrowmig</out-summary-file>
+    <use-curvefiles>true</use-curvefiles>
+    <curvefile-prefix>curvefile</curvefile-prefix>
+    <use-tracefile>true</use-tracefile>
+    <tracefile-prefix>tracefile</tracefile-prefix>
+    <use-newicktreefile>false</use-newicktreefile>
+    <newicktreefile-prefix>newick</newicktreefile-prefix>
+    <out-xml-file>menuinfile</out-xml-file>
+  </format>
+  <forces>
+    <coalescence>
+      <start-values> 0.01 0.01</start-values>
+      <method> USER USER</method>
+      <max-events>32000</max-events>
+      <profiles> percentile percentile </profiles>
+      <constraints> unconstrained unconstrained </constraints>
+      <prior type="log">
+        <paramindex> all </paramindex>
+        <lower> 1e-05 </lower>
+        <upper> 10 </upper>
+      </prior>
+    </coalescence>
+    <migration>
+      <start-values> 0 100 100 0</start-values>
+      <method> PROGRAMDEFAULT FST FST PROGRAMDEFAULT</method>
+      <max-events>10000</max-events>
+      <profiles> none percentile percentile none </profiles>
+      <constraints> invalid unconstrained unconstrained invalid </constraints>
+      <prior type="log">
+        <paramindex> all </paramindex>
+        <lower> 0.01 </lower>
+        <upper> 1000 </upper>
+      </prior>
+    </migration>
+    <growth type="CURVE">
+      <start-values> 1 1</start-values>
+      <method> USER USER</method>
+      <max-events>10000</max-events>
+      <profiles> percentile percentile </profiles>
+      <constraints> unconstrained unconstrained </constraints>
+      <prior type="linear">
+        <paramindex> all </paramindex>
+        <lower> -500 </lower>
+        <upper> 1000 </upper>
+      </prior>
+    </growth>
+  </forces>
+  <data>
+    <region name="coalgrowmig">
+      <model name="GTR">
+        <normalize>false</normalize>
+        <categories>
+          <num-categories>3</num-categories>
+          <rates> 0.059438 0.482404 2.41527</rates>
+          <probabilities> 0.33 0.33 0.34</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <relative-murate>1</relative-murate>
+        <base-freqs> 0.23467 0.25993 0.24313 0.26227</base-freqs>
+        <gtr-rates> 0.7403 4.3727 1.00429 1.67422 3.89754 1</gtr-rates>
+      </model>
+      <effective-popsize>1</effective-popsize>
+      <spacing>
+        <block>
+          <map-position>0</map-position>
+          <length>1000</length>
+          <locations> 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144  [...]
+          <offset>0</offset>
+        </block>
+      </spacing>
+    <population name="  Popmig0">
+      <individual name="00_0002   ">
+        <sample name="00_0002   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCTGACACAAGAGGCGGATACTTTAGCTATAAGAACCTCTCATGGCCCGTCACGAACAACGATACCGCGGGTCTAGAATCTGACGCTCGAAAGCCGTTTGCGGCCTAAGAACGATGGTCTTACTTGGTAGCTCTTAAAACCAACTAGGTGAACCATAGGACTCTAAACCCGGTATCCACAACGCTGCACATTCTCTGCACCCAATCGCCAGATAACATGTCCAATTTTCAGGTGCACGTGCGTAGCTCACGGATCGCCCCGACTTGGATGACGCTTAACTGGTTAATTTCGCGCGGTCGCCACTTCAGCTTTTACATGCAATTTGGCTTGATTCATCGGTTAGCAACAGGTGCCGAATGATTGGCTGAGGGCGTTGCTAGCTTTTCCACTCCTTCCTATTTCGGGTCAGGGTGCCCAAGGTTTATGTACCGTACCCTGCAAGCCAATCCGGGTAATGTGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0014   ">
+        <sample name="00_0014   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTGTAAGAACCCTTCATGGACCGTCGCGAGCAACGATACTGCGGGTTGAGGATCTGACACTGGAAAGCCGTTTGCGGCCTAAGAACCATGCTCTTACTTTTTAGTTCTTAAAACCAACTAGGTGAACTATAGGACTCTTAACCCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGGATAACATGTCCAATTTTCAGGTGCACGTGCGTACCCCACGGATCGCCCCGACTTGGATGACGCTTGACTGCTTAATTTCGCGCGGTTGCGACTTCAGCTTCTGGATGCAATTTGCCTTGATTCGTCGGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCTTCGCTGGCTTTTCCATTCCCCCCTATTTCGGGTTAGGGTGCCCAAGGTTTATGTACCGTACCCGGCAAGCCAACCCTGGTAATATGTGTCTGGGTT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0009   ">
+        <sample name="00_0009   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTGTAAGAACCCTTCATGGACCGTCGCGAGCAACGATACTGCGGGTTTAGGATCTGACACTGGAAAGCCGTTTGCGGCCTAAGAACCATGCTCTTACTTTGTAGTTCTTAAAACCAACTAGGTGAACTATAGGACTCTTAACCCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGGATAACATGTCCAACTTTCAGGTGCACGTACGTACCCCACGGATCGCCCCGACTTGGATGACGCTTAACTGCTTAATTTCGCGCGGTTGCGACCTCAGCTTCTGGATGCAATTTGCCTTGATTCGTCGGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCTTCGCTGGCTTTTCCATTCCCCCCTATTTCGGGTTAGGGTGCCCAAGGTTTATGTACCGTACCCGGCAAGCCAACCCGGGTAATATGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0013   ">
+        <sample name="00_0013   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTGTAAGAACCCTTCATGGACCGTCGCGAGCAACGATACTGCGGGTTTAGGATCTGACACTGGAAAGCCGTTTGCGGCCTAAGAACCATGCTCTTACTTTGTAGTTCTTAAAACCAACTAGGTGAACTATAGGACTCTTAACCCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGGATAACATGTCCAACTTTCAGGTGCACGTACGTACCCCACGGATCGCCCCGACTTGGATGACGCTTAACTGCTTAATTTCGCGCGGTTGCGACCTCAGCTTCTGGATGCAATTTGCCTTGATTCGTCGGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCTTCGCTGGCTTTTCCATTCCCCCCTATTTCGGGTTAGGGTGCCCAAGGTTTATGTACCGTACCCGGCAAGCCAACCCGGGTAATATGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0004   ">
+        <sample name="00_0004   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTGTAAGAACCCTTCATGGACCGTCGCGAGCAACGATACTGCGGGTTTAGGATCTGACACTGGAAAGCCGTTTGCGGCCTAAGAACCATGCTCTTACTTTGTAGTTCTTAAAACCAACTAGGTGAACTATAGGACTCTTAACCCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGGATAACATGTCCAACTTTCAGGTGCACGTACGTACCCCACGGATCGCCCCGACTTGGATGACGCTTAACTGCTTAATTTCGCGCGGTTGCGACCTCAGCTTCTGGATGCAATTTGCCTTGATTCGTCGGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCTTCGCTGGCTTTTCCATTCCCCCCTATTTCGGGTTAGGGTGCCCAAGGTTTATGTACCGTACCCGGCAAGCCAACCCGGGTAATATGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0017   ">
+        <sample name="00_0017   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTGTAAGAACCCTTCATGGACCGTCGCGAGCAACGATACTGCGGGTTTAGGATCTGACACTGGAAAGCCGTTTGCGGCCTAAGAACCATGCTCTTACTTTTTAGTTCTTAAAACCAACTAGGTGAACTATAGGACTCTTAACCCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGGATAACATGTCCAATTTTCAGGTGCACGTGCGTACCCCACGGATCGCCCCGACTTGGATGACGCTTGACTGCTTAATTTCGCGCGGTTGCGACTTCAGCTTCTGGATGCAATTTGCCTTGATTCGTCGGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCTTCGCTGGCTTTTCCATTCCCCCCTATTTCGGGTTAGGGTGCCCAAGGTTTATGTACCGTACCCGGCAAGCCAACCCTGGTAATATGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0018   ">
+        <sample name="00_0018   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTGTAAGAACCCTTCATGGACCGTCGCGAGCAACGATACTGCGGGTTTAGGATCTGACACTGGAAAGCCGTTTGCGGCCTAAGAACCATGCTCTTACTTTTTAGTTCTTAAAACCAACTAGGTGAACTATAGGACTCTTAACCCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGGATAACATGTCCAATTTTCAGGTGCACGTGCGTACCCCACGGATCGCCCCGACTTGGATGACGCTTGACTGCTTAATTTCGCGCGGTTGCGACTTCAGCTTCTGGATGCAATTTGCCTTGATTCGTCGGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCTTCGCTGGCTTTTCCATTCCCCCCTATTTCGGGTTAGGGTGCCCAAGGTTTATTTACCGTACCCGGCAAGCCAACCCTGGTAATATGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0000   ">
+        <sample name="00_0000   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTGTAAGAACCCTTCATGGACCGTCGCGAGCAACGATACTGCGGGTTTAGGATCTGACACTGGAAAGCCGTTTGCGGCCTAAGAACCATGCTCTTACTTTTTAGTTCTTAAAACCAACTAGGTGAACTATAGGACTCTTAACCCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGGATAACATGTCCAATTTTCAGGTGCACGTGCGTACCCCACGGATCGCCCCGACTTGGATGACGCTTGACTGCTTAATTTCGCGCGGTTGCGACTTCAGCTTCTGGATGCAATTTGCCTTGATTCGTCGGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCTTCGCTGGCTTTTCCATTCCCCCCTATTTCGGGTTAGGGTGCCCAAGGTTTATTTACCGTACCCGGCAAGCCAACCCTGGTAATATGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0003   ">
+        <sample name="00_0003   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTGTAAGAACCCTTCATGGACCGTCGCGAGCAACGATACTGCGGGTTTAGGATCTGACACTGGAAAGCCGTTTGCGGCCTAAGAACCATGCTCTTACTTTTTAGTTCTTAAAACCAACTAGGTGAACTATAGGACTCTTAACCCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGGATAACATGTCCAATTTTCAGGTGCACGTGCGTACCCCACGGATCGCCCCGACTTGGATGACGCTTGACTGCTTAATTTCGCGCGGTTGCGACTTCAGCTTCTGGATGCAATTTGCCTTGATTCGTCGGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCTTCGCTGGCTTTTCCATTCCCCCCTATTTCGGGTTAGGGTGCCCAAGGTTTATTTACCGTACCCGGCAAGCCAACCCTGGTAATATGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0006   ">
+        <sample name="00_0006   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTGTAAGAACCCTTCATGGACCGTCGCGAGCAACGATACTGCGGGTTTAGGATCTGACACTGGAAAGCCGTTTGCGGCCTAAGAACCATGCTCTTACTTTTTAGTTCTTAAAACCAACTAGGTGAACTATAGGACTCTTAACCCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGGATAACATGTCCAATTTTCAGGTGCACGTGCGTACCCCACGGATCGCCCCGACTTGGATGACGCTTGACTGCTTAATTTCGCGCGGTTGCGACTTCAGCTTCTGGATGCAATTTGCCTTGATTCGTCGGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCTTCGCTGGCTTTTCCATTCCCCCCTATTTCGGGTTAGGGTGCCCAAGGTTTATGTACCGTACCCGGCAAGCCAACCCTGGTAATATGTGTCTGGGTT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0011   ">
+        <sample name="00_0011   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTGTAAGAACCCTTCATGGACCGTCGCGAGCAACGATACTGCGGGTTTAGGATCTGACACTGGAAAGCCGTTTGCGGCCTAAGAACCATGCTCTTACTTTTTAGTTCTTAAAACCAACTAGGTGAACTATAGGACTCTTAACCCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGGATAACATGTCCAATTTTCAGGTGCACGTGCGTACCCCACGGATCGCCCCGACTTGGATGACGCTTGACTGCTTAATTTCGCGCGGTTGCGACTTCAGCTTCTGGATGCAATTTGCCTTGATTCGTCGGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCTTCGCTGGCTTTTCCATTCCCCCCTATTTCGGGTTAGGGTGCCCAAGGTTTATGTACCGTACCCGGCAAGCCAACCCTGGTAATATGTGTCTGGGTT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0015   ">
+        <sample name="00_0015   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTGTAAGAACCCTTCATGGACCGTCGCGAGCAACGATACTGCGGGTTTAGGATCTGACACTGGAAAGCCGTTTGCGGCCTAAGAACCATGCTCTTACTTTTTAGTTCTTAAAACCAACTAGGTGAACTATAGGACTCTTAACCCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGGATAACATGTCCAATTTTCAGGTGCACGTGCGTACCCCACGGATCGCCCCGACTTGGATGACGCTTGACTGCTTAATTTCGCGCGGTTGCGACTTCAGCTTCTGGATGCAATTTGCCTTGATTCGTCGGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCTTCGCTGGCTTTTCCATTCCCCCCTATTTCGGGTTAGGGTGCCCAAGGTTTATGTACCGTACCCGGCAAGCCAACCCTGGTAATATGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0019   ">
+        <sample name="00_0019   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTGTAAGAACCCTTCATGGACCGTCGCGAGCAACGATACTGCGGGTTTAGGATCTGACACTGGAAAGCCGTTTGCGGCCTAAGAACCATGCTCTTACTTTTTAGTTCTTAAAACCAACTAGGTGAACTATAGGACTCTTAGCCCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGGATAACATGTCCAATTTTCAGGTGCACGTGCGTACCCCACGGATCGCCCCGACTTGGATGACGCTTGACTGCTTAATTTCGCGCGGTTGCGACTTCAGCTTCTGGATGCAATTTGCCTTGATTCGTCGGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCTTCGCTGGCTTTTCCATTCCCCCCTATTTCGGGTTAGGGTGCCCAAGGTTTATGTACCGTACCCGGCAAGCCAACCCTGGTAATATGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0016   ">
+        <sample name="00_0016   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTGTAAGAACCCTTCATGGACCGTCGCGAGCAACGATACTGTGGGTTTAGGATCTGACACTGGAAAGCCGTTTGCGGCCTAAGAACCATGCTCTTACTTTTTAGTTCTTAAAACCAACTAGGTGAACTATAGGACTCTTAACCCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGGATAACATGTCCAATTTTCAGGTGCACGTGCGTACCCCACGGATCGCCCCGACTTGGATGACGCTTGACTGCTTAATTTCGCGCGGTTGCGACTTCAGCTTCTGGATGCAATTTGCCTTGATTCGTCGGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCTTCGCTGGCTTTTCCATTCCCCCCTATTTCGGGTTAGGGTGCCCAAGGTTTATGTACCGTACCCGGCAAGCCAACCCTGGTAATATGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0012   ">
+        <sample name="00_0012   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTGTAAGAACCCTTCATGGACCGTCGCGAGCAACGATACTGCGGGTTTAGGATCTGACACTGGAAAGCCGTTTGCGGCCTAAGAACCATGCTCTTACTTTTTAGTTCTTAAAACCAACTAGGTGAACTATAGGACTCTTAACCCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGGATAACATGTCCAATTTTCAGGTGCACGTGCGTACCCCACGGATCGCCCCGACTTGGATGACGCTTGACTGCTTAATTTCGCGCGGTTGCGACTTCAGCTTCTGGATGCAATTTGCCTTGATTCGTCGGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCTTCGCTGGCTTTTCCATTCCCCCCTATTTCGGGTTAGGGTGCCCAAGGTTTATGTACCGTACCCGGCAAGCCAACCCTGGTAATATGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0008   ">
+        <sample name="00_0008   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTGTAAGAACCCTTCATGGACCGTCGCGAGCAACGATACTGCGGGTTTAGGATCTGACACTGGAAAGCCGTTTGCGGCCTAAGAACCATGCTCTTACTTTGTAGTTCTTAAAACCAACTAGGTGAACTATAGGACTCTTAACCCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGGATAACATGTCCAACTTTCAGGTGCACGTACGTACCCCACGGATCGCCCCGACTTGGATGACGCTTAACTGCTTAATTTCGCGCGGTTGCGACCTCAGCTTCTGGATGCAATTTGCCTTGATTCGTCGGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCTTCGCTGGCTTTTCCATTCCCCCCTATTTCGGGTTAGGGTGCCCAAGGTTTATGTACCGTACCCGGCAAGCCAACCCGGGTAATATGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0001   ">
+        <sample name="00_0001   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTGTAAGAACCCTTCATGGACCGTCGCGAGCAACGATACTGCGGGTTTAGGATCTGACACTGGAAGGCCGTTTGCGGCCTAAGAACCATGCTCTTACTTTGTAGTTCTTAAAACCAACTAGGTGAACTATAGGACTCTTAACCCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGGATAACATGTCCAACTTTCAGGTGCACGTACGTACCCCACGGATCGCCCCGACTTGGATGACGCTTAACTGCTTAATTTCGCGCGGTTGCGACCTCAGCTTCTGGATGCAATTTGCCTTGATTCGTCGGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCTTCGCTGGCTTTTCCATTCCCCCCTATTTCGGGTTAGGGTGCCCAAGGTTTATGTACCGTACCCGGCAAGCCAACCCGGGTAATATGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0010   ">
+        <sample name="00_0010   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTGTAAGAACCCTTCATGGACCGTCGCGAGCAACGATACTGCGGGTTTAGGATCTGACACTGGAAGGCCGTTTGCGGCCTAAGAACCATGCTCTTACTTTGTAGTTCTTAAAACCAACTAGGTGAACTATAGGACTCTTAACCCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGGATAACATGTCCAACTTTCAGGTGCACGTACGTACCCCACGGATCGCCCCGACTTGGATGACGCTTAACTGCTTAATTTCGCGCGGTTGCGACCTCAGCTTCTGGATGCAATTTGCCTTGATTCGTCGGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCTTCGCTGGCTTTTCCATTCCCCCCTATTTCGGGTTAGGGTGCCCAAGGTTTATGTACCGTACCCGGCAAGCCAACCCGGGTAATATGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0005   ">
+        <sample name="00_0005   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTGTAAGAACCCTTCATGGACCGTCGCGAGCAACGATACTGCGGGTTTAGGATCTGACACTGGAAAGCCGTTTGCGGCCTAAGAACCATGCTCTTACTTTTTAGTTCTTAAAACCAACTAGGTGAACTATAGGACTCTTAACCCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGGATAACATGTCCAATTTTCAGGTGCACGTGCGTACCCCACGGATCGCCCCGACTTGGATGACGCTTGACTGCTTAATTTCGCGCGGTTGCGACTTCAGCTTCTGGATGCAATTTGCCTTGATTCGTCGGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCTTCGCTGGCTTTTCCATTCCCCCCTATTTCGGGTTAGGGTGCCCAAGGTTTATGTACCGTACCCGGCAAGCCAACCCTGGTAATATGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0007   ">
+        <sample name="00_0007   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTGTAAGAACCCTTCATGGACCGTCGCGAGCAACGATACTGCGGGTTTAGGATCTGACACTGGAAGGCCGTTTGCGGCCTAAGAACCATGCTCTTACTTTGTAGTTCTTAAAACCAACTAGGTGAACTATAGGACTCTTAACCCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGGATAACATGTCCAACTTTCAGGTGCACGTACGTACCCCACGGATCGCCCCGACTTGGATGACGCTTAACTGCTTAATTTCGCGCGGTTGCGACCTCAGCTTCTGGATGCAATTTGCCTTGATTCGTCGGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCTTCGCTGGCTTTTCCATTCCCCCCTATTTCGGGTTAGGGTGCCCAAGGTTTATGTACCGTACCCGGCAAGCCAACCCGGGTAATATGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    <population name="  Popmig1">
+      <individual name="01_0027   ">
+        <sample name="01_0027   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCTGACACAAGAGGCGGATACTTTAGCTATAAGAACCTCTCATGGCCCGTCACGAACAACGATACCGCGGGTCTAGAATCTGACGCTCGAAAGCCGTTTGCGGCCTAAGAACGATGGTCTTACTTGGTAGCTCTTAAAACCAACTAGGTGAACCATAGGACTCTAAACCCGGTATCCACAACGCTGCACATTCTCTGCACCCAATCGCCAGATAACATGTCCAATTTTCAGGTGCACGTGCGTAGCTCACGGATCGCCCCGACTTGGATGACGCTTAACTGGTTAATTTCGCGCGGTCGCCACTTCAGCTTTTACATGCAATTTGGCTTGATTCATCGGTTAGCAACAGGTGCCGAATGATTGGCTGAGGGCGTTGCTAGCTTTTCCACTCCTTCCTATTTCGGGTCAGGGTGCCCAAGGTTTATGTACCGTAGCCTGCAAGCCAATCCGGGTAATGTGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0021   ">
+        <sample name="01_0021   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTGTAAGAACCCTTCATGGACCGTCGCGAGCAACGATACTGCGGGTTTAGGATCTGACACTGGAAAGCCGTTTGCGGCCTAAGAACCATGCTCTTACTTTGTAGTTCTTAAAACCAACTAGGTGAACTATTGGACTCTTAACCCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGGATAACATGTCCAACTTTCAGGTGCACGTACGTACCCCACGGATCGCCCCGACTTGGATGACGCTTAACTGCTTAATTTCGCGCGGTTGCGACCTCAGCTTCTGGATGCAATTTGCCTTGATTCGTCGGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCTTCGCTGGCTTTTCCATTCCCCCCTATTTCGGGTTAGGGTGCCCAAGGTTTATGTACCGTACCCGGCAAGCCAACCCGGGTAATATGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0039   ">
+        <sample name="01_0039   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCTGACACAAGAGGCGGATACTTTAGCTATAAGAACCTCTCATGGCCCGTCACGAACAACGATACCGCGGGTCTAGAATCTGACGCTCGAAAGCCGTTTGCGGCCTAAGAACGATGGTCTTACTTGGTAGCTCTTAAAACCAACTAGGTGAACCATAGGACTCTAAACCCGGTATCCACAACGCTGCACATTCTCTGCACCCAATCGCCAGATAACATGTCCAATTTTCAGGTGCACGTGCGTAGCTCACGGATCGCCCCGACTTGGATGACGCTTAACTGGTTAATTTCGCGCGGTCGCCACTTCAGCTTTTACATGCAATTTGGCTTGATTCATCGGTTAGCAACAGGTGCCGAATGATTGGCTGAGGGCGTTGCTAGCTTTTCCACTCCTTCCTATTTCGGGTCAGGGTGCCCAAGGTTTATGTACCGTAGCCTGCAAGCCAATCCGGGTAATGTGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0028   ">
+        <sample name="01_0028   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCTGACACAAGAGGCGGATACTTTAGCTATAAGAACCTCTCATGGCCCGTCACGAACAACGATACCGCGGGTCTAGAATCTGACGCTCGAAAGCCGTTTGCGGCCTAAGAACGATGGTCTTACTTGGTAGCTCTTAAAACCAACTAGGTGAACCATAGGACTCTAAACCCGGTATCCACAACGCTGCACATTCTCTGCACCCAATCGCCAGATAACATGTCCAATTTTCAGGTGCACGTGCGTAGCTCACGGATCGCCCCGACTTGGATGACGCTTAACTGGTTAATTTCGCGCGGTCGCCACTTCAGCTTTTACATGCAATTTGGCTTGATTCATCGGTTAGCAACAGGTGCCGAATGATTGGCTGAGGGCGTTGCTAGCTTTTCCACTCCTTCCTATTTCGGGTCAGGGTGCCCAAGGTTTATGTACCGTAGCCTGCAAGCCAATCCGGGTAATGTGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0036   ">
+        <sample name="01_0036   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCTGACACAAGAGGCGGATACTTTAGCTATAAGAACCTCTCATGGCCCGTCACGAACAACGATACCGCGGGTCTAGAATCTGACGCTCGAAAGCCGTTTGCGGCCTAAGAACGATGGTCTTACTTGGTAGCTCTTAAAACCAACTAGGTGAACCATAGGACTCTAAACCCGGTATCCACAACGCTGCACATTCTCTGCACCCAATCGCCAGATAACATGTCCAATTTTCAGGTGCACGTGCGTAGCTCACGGATCGCCCCGACTTGGATGACGCTTAACTGGTTAATTTCGCGCGGTCGCCACTTCAGCTTTTACATGCAATTTGGCTTGATTCATCGGTTAGCAACAGGTGCCGAATGATTGGCTGAGGGCGTTGCTAGCTTTTCCACTCCTTCCTATTTCGGGTCAGGGTGCCCAAGGTTTATGTACCGTAGCCTGCAAGCCAATCCGGGTAATGTGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0038   ">
+        <sample name="01_0038   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTGTAAGAACCCTTCATGGACCGTCGCGAGCAACGATACTGCGGGTTTAGGATCTGACACTGGAAAGCCGTTTGCGGCCTAAGAACCATGCTCTTACTTTGTAGTTCTTAAAACCAACTAGGTGAACTATAGGACTCTTAACCCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGGATAACATGTCCAACTTTCAGGTGCACGTACGTACCCCACGGATCGCCCCGACTTGGATGACGCTTAACTGCTTAATTTCGCGCGGTTGCGACCTCAGCTTCTGGATGCAATTTGCCTTGATTCGTCGGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCTTCGCTGGCTTTTCCATTCCCCCCTATTTCGGGTTAGGGTGCCCAAGGTTTATGTACCGTACCCGGCAAGCCAACCCGGGTAATATGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0023   ">
+        <sample name="01_0023   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTATAAGAACCCTTCATGGACCGTCACGAGCAACGATACTGCGGGTTTAGGGTCTGACACTCGAAAGCCGTTTGCGGCCTAAGAACGATGCTCTTACTTGGTAGTTCTTAAAACCAACTAGGTGAACTATAGGACACTTAACTCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGGATAACAAGTCCAATTTTCAGGTGCACGTGCGTACCTCACGGATCGCCCCGACTTGGATGACGCTTGACTGCTTGATTTCGCGCGGTTGCGACTTCAGCTTCTGGATGCAATTTGCCTTGATTCATCAGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCGTCGCTGGCTTTTCCATTCCCCCCTGTTTCGGGTTAGAGTGCCCAAGGTTTATGTACCGTACCCGGCAAGCCACTCCGGGTAATCTGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0031   ">
+        <sample name="01_0031   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTATAAGAACCCTTCATGGACCGTCACGAGCAACGATACTGCGGGTTTAGGGTCTGACACTCGAAAGCCGTTTGCGGCCTAAGAACGATGCTCTTACTTGGTAGTTCTTAAAACCAACTAGGTGAACTATAGGACACTTAACTCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGGATAACAAGTCCAATTTTCAGGTGCACGTGCGTACCTCACGGATCGCCCCGACTTGGATGACGCTTGACTGCTTGATTTCGCGCGGTTGCGACTTCAGCTTCTGGATGCAATTTGCCTTGATTCATCAGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCGTCGCTGGCTTTTCCATTCCCCCCTATTTCGGGTTAGAGTGCCCAAGGTTTATGTACCGTACCCGGCAAGCCACTCCGGGTAATCTGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0020   ">
+        <sample name="01_0020   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTATAAGAACCCTTCATGGACCGTCACGAGCAACGATACTGCGGGTTTAGGGTCTGACACTCGAAAGCCGTTTGCGGCCTAAGAACGATGCTCTTACTTGGTAGTTCTTAAAACCAACTAGGTGAACTATAGGACACTTAACTCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGGATAACAAGTCCAATTTTCAGGTGCACGTGCGTACCTCACGGATCGCCCCGACTTGGATGACGCTTGACTGCTTGATTTCGCGCGGTTGCGACTTCAGCTTCTGGATGCAATTTGCCTTGATTCATCAGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCGTCGCTGGCTTTTCCATTCCCCCCTATTTCGGGTTAGAGTGCCCAAGGTTTATGTACCGTACCCGGCAAGCCACTCCGGGTAATCTGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0022   ">
+        <sample name="01_0022   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTATAAGAACCCTTCATGGACCGTCACGAGCAACGATACTGCGGGTTTAGGGTCTGACACTCGAAAGCCGTTTGCGGCCTAAGAACGATGCTCTTACTTGGTAGTTCTTAAAACCAACTAGGTGAACTATAGGACACTTAACTCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGGATAACAAGTCCAATTTTCAGGTGCACGTGCGTACCTCACGGATCGCCCCGACTTGGATGACGCTTGACTGCTTGATTTCGCGCGGTTGCGACTTCAGCTTCTGGATGCAATTTGCCTTGATTCATCAGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCGTCGCTGGCTTTTCCATTCCCCCCTATTTCGGGTTAGAGTGCCCAAGGTTTATGTACCGTACCCGGCAAGCCACTCCGGGTAATCTGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0026   ">
+        <sample name="01_0026   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTATAAGAACCCTTCATGGACCGTCACGAGCAACGATACTGCGGGTTTAGGGTCTGACACTCGAAAGCCGTTTGCGGCCTAAGAACGATGCTCTTACTTGGTAGTTCTTAAAACCAACTAGGTGAACTATAGGACACTTAACTCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGGATAACAAGTCCAATTTTCAGGTGCACGTGCGTACCTCACGGATCGCCCCGACTTGGATGACGCTTGACTGCTTGATTTCGCGCGGTTGCGACTTCAGCTTCTGGATGCAATTTGCCTTGATTCATCAGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCGTCGCTGGCTTTTCCATTCCCCCCTATTTCGGGTTAGAGTGCCCAAGGTTTATGTACCGTACCCGGCAAGCCACTCCGGGTAATCTGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0037   ">
+        <sample name="01_0037   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTATAAGAACCCTTCATGGACCGTCACGAGCAACGATACTGCGGGTTTAGGGTCTGACACTCGAAAGCCGTTTGCGGCCTAACAACGATGCTCTTACTTGGTAGTTCTTAAAACCAACTAGGTGAACTATAGGACACTTAACTCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGGATAACAAGTCCAATTTTCAGGTGCACGTGCGTACCTCACGGATCGCCCCGACTTGGATGACGCTTGACTGCTTGATTTCGCGCGGTTGCGACTTCAGCTTCTGGATGCAATTTGCCTTGATTCATCAGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCGTCGCTGGCTTTTCCATTCCCCCCTATTTCGGGTTAGAGTGCCCAAGGTTTATGTACCGTACCCGGCAAGCCACTCCGGGTAATCTGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0030   ">
+        <sample name="01_0030   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTGTAAGAACCCTTCATGGACCGTCGCGAGCAACGATACTGCGGGTTTAGGATCTGACACTGGAAAGCCGTTTGCGGCCTAAGAACCATGCTCTTACTTTGTAGTTCTTAAAACCAACTAGGTGAACTATTGGACTCTTAACCCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGGATAACATGTCCAACTTTCAGGTGCACGTACGTACCCCACGGATCGCCCCGACTTGGATGACGCTTAACTGCTTAATTTCGCGCGGTTGCGACCTCAGCTTCTGGATGCAATTTGCCTTGATTCGTCGGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCTTCGCTGGCTTTTCCATTCCCCCCTATTTCGGGTTAGGGTGCCCAAGGTTTATGTACCGTACCCGGCAAGCCAACCCGGGTAATATGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0025   ">
+        <sample name="01_0025   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTGTAAGAACCCTTCATGGACCGTCGCGAGCAACGATACTGCGGGTTTAGGATCTGACACTGGAAAGCCGTTTGCGGCCTAAGAACCATGCTCTTACTTTGTAGTTCTTAAAACCAACTAGGTGAACTATAGGACTCTTAACCCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGGATAACATGTCCAACTTTCAGGTGCACGTACGTACCCCACGGATCGCCCCGACTTGGATGACGCTTAACTGCTTAATTTCGCGCGGTTGCGACCTCAGCTTCTGGATGCAATTTGCCTTGATTCGTCGGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCTTCGCTGGCTTTTCCATTCCCCCCTATTTCGGGTTAGGGTGCCCAAGGTTTATGTACCGTACCCGGCAAGCCAACCCGGGTAATATGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0032   ">
+        <sample name="01_0032   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTGTAAGAACCCTTCATGGACCGTCGCGAGCAACGATACTGCGGGTTTAGGATCTGACACTGGAAAGCCGTTTGCGGCCTAAGAACCATGCTCTTACTTTGTAGTTCTTAAAACCAACTAGGTGAACTATAGGACTCTTAACCCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGGATAACATGTCCAACTTTCAGGTGCACGTACGTACCCCACGGATCGCCCCGACTTGGATGACGCTTAACTGCTTAATTTCGCGCGGTTGCGACCTCAGCTTCTGGATGCAATTTGCCTTGATTCGTCGGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCTTCGCTGGCTTTTCCATTCCCCCCTATTTCGGGTTAGGGTGCCCAAGGTTTATGTACCGTACCCGGCAAGCCAACCCGGGTAATATGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0035   ">
+        <sample name="01_0035   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTATAAGAACCCTTCATGGACCGTCACGAGCAACGATACTGCGGGTTTAGGGTCTGACACTCGAAAGCCGTTTGCGGCCTAAGAACGATGCTCTTACTTGGTAGTTCTTAAAACCAACTAGGTGAACTATAGGACACTTAACTCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGGATAACAAGTCCAATTTTCAGGTGCACGTGCGTACCTCACGGATCGCCCCGACTTGGATGACGCTTGACTGCTTGATTTCGCGCGGTTGCGACTTCAGCTTCTGGATGCAATTTGCCTTGATTCATCAGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCGTCGCTGGCTTTTCCATTCCCCCCTATTTCGGGTTAGAGTGCCCAAGGTTTATGTACCGTACCCGGCAAGCCACTCCGGGTAATCTGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0034   ">
+        <sample name="01_0034   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTGTAAGAACCCTTCATGGACCGTCGTGAGCAACGATACTGCGGGTTTAGGATCTGACACTGGAAGGCCGTTTGCGGCCTAAGAACCATGCTCTTACTTTGTAGTTCTTAAAACCAACTAGGTGAACTATAGGACTCTTAACCCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGAATAACATGTCCAACTTTCAGGTGCACGTACGTACCCCACGGATCGCCCCGACTTGGATGACGCTTAACTGCTTAATTTCGCGCGGTTGCGACCTCAGCTTCTGGATGCAATTTGCCTTGATTCGTCGGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCTTCGCTGGCTTTTCCATTCCCCCCTATTTCGGGTTAGGGTGCCCAAGGTTTATGTACCGTACCCGGCAAGCCAACCCGGGTAATATGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0033   ">
+        <sample name="01_0033   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTGTAAGAACCCTTCATGGACCGTCGCGAGCAACGATACTGCGGGTTTAGGATCTGACACTGGAAAGCCGTTTGCGGCCTAAGAACCATGCTCTTACTTTTTAGTTCTTAAAACCAACTAGGTGAACTATAGGACTCTTAACCCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGGATAACATGTCCAATTTTCAGGTGCACGTGCGTACCCCACGGATCGCCCCGACTTGGATGACGCTTGACTGCTTAATTTCGCGCGGTTGCGACTTCAGCTTCTGGATGCAATTTGCCTTGATTCGTCGGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCTTCGCTGGCTTTTCCATTCCCCCCTATTTCGGGTTAGGGTGCCCAAGGTTTATGTACCGTACCCGGCAAGCCAACCCTGGTAATATGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0024   ">
+        <sample name="01_0024   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTGTAAGAACCCTTCATGGACCGTCGCGAGCAACGATACTGCGGGTTTAGGATCTGACACTGGAAAGCCGTTTGCGGCCTAAGAACCATGCTCTTACTTTTTAGTTCTTAAAACCAACTAGGTGAACTATAGGACTCTTAACCCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGGATAACATGTCCAATTTTCAGGTGCACGTGCGTACCCCACGGATCGCCCCGACTTGGATGACGCTTGACTGCTTAATTTCGCGCGGTTGCGACTTCAGCTTCTGGATGCAATTTGCCTTGATTCGTCGGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCTTCGCTGGCTTTTCCATTCCCCCCTATTTCGGGTTAGGGTGCCCAAGGTTTATGTACCGTACCCGGCAAGCCAACCCTGGTAATATGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0029   ">
+        <sample name="01_0029   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTGTAAGAACCCTTCATGGACCGTCGCGAGCAACGATACTGCGGGTTTAGGATCTGACACTGGAAAGCCGTTTGCGGCCTAAGAACCATGCTCTTACTTTTTAGTTCTTAAAACCAACTAGGTGAACTATAGGACTCTTAACCCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGGATAACATGTCCAATTTTCAGGTGCACGTGCGTACCCCACGGATCGCCCCGACTTGGATGACGCTTGACTGCTTAATTTCGCGCGGTTGCGACTTCAGCTTCTGGATGCAATTTGCCTTGATTCGTCGGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCTTCGCTGGCTTTTCCATTCCCCCCTATTTCGGGTTAGGGTGCCCAAGGTTTATGTACCGTACCCGGCAAGCCAACCCTGGTAATATGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    </region>
+  </data>
+</lamarc>
diff --git a/doc/testfiles/infile.coalmigmsat b/doc/testfiles/infile.coalmigmsat
new file mode 100644
index 0000000..3656a1b
--- /dev/null
+++ b/doc/testfiles/infile.coalmigmsat
@@ -0,0 +1,5125 @@
+<lamarc version="2.1">
+<!-- Created by the Lamarc program -->
+  <chains>
+    <replicates>1</replicates>
+    <bayesian-analysis>No</bayesian-analysis>
+    <heating>
+      <adaptive>false</adaptive>
+      <temperatures> 1</temperatures>
+      <swap-interval>1</swap-interval>
+    </heating>
+    <strategy>
+      <resimulating>0.833333</resimulating>
+      <tree-size>0.166667</tree-size>
+      <haplotyping>0</haplotyping>
+      <trait-arranger>0</trait-arranger>
+    </strategy>
+    <initial>
+      <number>5</number>
+      <samples>1000</samples>
+      <discard>1000</discard>
+      <interval>20</interval>
+    </initial>
+    <final>
+      <number>2</number>
+      <samples>2500</samples>
+      <discard>1000</discard>
+      <interval>20</interval>
+    </final>
+  </chains>
+  <format>
+    <seed>1005</seed>
+    <verbosity>verbose</verbosity>
+    <progress-reports>verbose</progress-reports>
+    <results-file>outfile.coalmigmsat</results-file>
+    <use-in-summary>false</use-in-summary>
+    <in-summary-file>insumfile.coalmigmsat</in-summary-file>
+    <use-out-summary>true</use-out-summary>
+    <out-summary-file>outsumfile.coalmigmsat</out-summary-file>
+    <use-curvefiles>true</use-curvefiles>
+    <curvefile-prefix>curvefile</curvefile-prefix>
+    <use-tracefile>true</use-tracefile>
+    <tracefile-prefix>tracefile</tracefile-prefix>
+    <use-newicktreefile>false</use-newicktreefile>
+    <newicktreefile-prefix>newick</newicktreefile-prefix>
+    <out-xml-file>menuinfile</out-xml-file>
+  </format>
+  <forces>
+    <coalescence>
+      <start-values> 0.2351 0.2351</start-values>
+      <method> WATTERSON WATTERSON</method>
+      <max-events>32000</max-events>
+      <profiles> none none </profiles>
+      <constraints> unconstrained unconstrained </constraints>
+      <prior type="log">
+        <paramindex> all </paramindex>
+        <lower> 1e-05 </lower>
+        <upper> 10 </upper>
+      </prior>
+    </coalescence>
+    <migration>
+      <start-values> 0 10 10 0</start-values>
+      <method> PROGRAMDEFAULT USER USER PROGRAMDEFAULT</method>
+      <max-events>10000</max-events>
+      <profiles> none none none none </profiles>
+      <constraints> invalid unconstrained unconstrained invalid </constraints>
+      <prior type="log">
+        <paramindex> all </paramindex>
+        <lower> 0.01 </lower>
+        <upper> 1000 </upper>
+      </prior>
+    </migration>
+  </forces>
+  <data>
+    <region name="Ocharinka0">
+      <model name="Stepwise">
+        <normalize>true</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <relative-murate>1</relative-murate>
+      </model>
+      <effective-popsize>1</effective-popsize>
+      <spacing>
+        <block>
+          <map-position>0</map-position>
+          <length>1</length>
+          <locations> 0</locations>
+          <offset>0</offset>
+        </block>
+      </spacing>
+    <population name="   pop-0">
+      <individual name="0BAA 0BBF ">
+        <sample name="0BAA 0BBF -1">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+        <sample name="0BAA 0BBF -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAQ 0BAL ">
+        <sample name="0BAQ 0BAL -1">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+        <sample name="0BAQ 0BAL -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAI 0BAP ">
+        <sample name="0BAI 0BAP -1">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+        <sample name="0BAI 0BAP -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBE 0BBL ">
+        <sample name="0BBE 0BBL -1">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+        <sample name="0BBE 0BBL -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBK 0BBG ">
+        <sample name="0BBK 0BBG -1">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+        <sample name="0BBK 0BBG -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAB 0BBJ ">
+        <sample name="0BAB 0BBJ -1">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+        <sample name="0BAB 0BBJ -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBB 0BBM ">
+        <sample name="0BBB 0BBM -1">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+        <sample name="0BBB 0BBM -2">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAC 0BBP ">
+        <sample name="0BAC 0BBP -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="0BAC 0BBP -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBD 0BBA ">
+        <sample name="0BBD 0BBA -1">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+        <sample name="0BBD 0BBA -2">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBC 0BAU ">
+        <sample name="0BBC 0BAU -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="0BBC 0BAU -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAG 0BAH ">
+        <sample name="0BAG 0BAH -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="0BAG 0BAH -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAN 0BBI ">
+        <sample name="0BAN 0BBI -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="0BAN 0BBI -2">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAO 0BAD ">
+        <sample name="0BAO 0BAD -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="0BAO 0BAD -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBO 0BAF ">
+        <sample name="0BBO 0BAF -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="0BBO 0BAF -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAR 0BAS ">
+        <sample name="0BAR 0BAS -1">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+        <sample name="0BAR 0BAS -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAJ 0BAW ">
+        <sample name="0BAJ 0BAW -1">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+        <sample name="0BAJ 0BAW -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBH 0BAK ">
+        <sample name="0BBH 0BAK -1">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+        <sample name="0BBH 0BAK -2">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAV 0BAM ">
+        <sample name="0BAV 0BAM -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="0BAV 0BAM -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAT 0BBN ">
+        <sample name="0BAT 0BBN -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="0BAT 0BBN -2">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAX 0BAE ">
+        <sample name="0BAX 0BAE -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="0BAX 0BAE -2">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    <population name="   pop-1">
+      <individual name="1BAU 1BAF ">
+        <sample name="1BAU 1BAF -1">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+        <sample name="1BAU 1BAF -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAX 1BAP ">
+        <sample name="1BAX 1BAP -1">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+        <sample name="1BAX 1BAP -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAC 1BBH ">
+        <sample name="1BAC 1BBH -1">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+        <sample name="1BAC 1BBH -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAO 1BBN ">
+        <sample name="1BAO 1BBN -1">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+        <sample name="1BAO 1BBN -2">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAQ 1BAL ">
+        <sample name="1BAQ 1BAL -1">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+        <sample name="1BAQ 1BAL -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAB 1BAT ">
+        <sample name="1BAB 1BAT -1">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+        <sample name="1BAB 1BAT -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBB 1BAR ">
+        <sample name="1BBB 1BAR -1">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+        <sample name="1BBB 1BAR -2">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBA 1BBL ">
+        <sample name="1BBA 1BBL -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="1BBA 1BBL -2">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAI 1BBM ">
+        <sample name="1BAI 1BBM -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="1BAI 1BBM -2">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBC 1BBG ">
+        <sample name="1BBC 1BBG -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="1BBC 1BBG -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAG 1BAM ">
+        <sample name="1BAG 1BAM -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="1BAG 1BAM -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBK 1BAA ">
+        <sample name="1BBK 1BAA -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="1BBK 1BAA -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBO 1BAH ">
+        <sample name="1BBO 1BAH -1">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+        <sample name="1BBO 1BAH -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAK 1BBE ">
+        <sample name="1BAK 1BBE -1">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+        <sample name="1BAK 1BBE -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAW 1BBI ">
+        <sample name="1BAW 1BBI -1">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+        <sample name="1BAW 1BBI -2">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAV 1BAE ">
+        <sample name="1BAV 1BAE -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="1BAV 1BAE -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAN 1BBJ ">
+        <sample name="1BAN 1BBJ -1">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+        <sample name="1BAN 1BBJ -2">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBP 1BAJ ">
+        <sample name="1BBP 1BAJ -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="1BBP 1BAJ -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBF 1BAD ">
+        <sample name="1BBF 1BAD -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="1BBF 1BAD -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBD 1BAS ">
+        <sample name="1BBD 1BAS -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="1BBD 1BAS -2">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    </region>
+    <region name="Ocharinka1">
+      <model name="Stepwise">
+        <normalize>true</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <relative-murate>1</relative-murate>
+      </model>
+      <effective-popsize>1</effective-popsize>
+      <spacing>
+        <block>
+          <map-position>0</map-position>
+          <length>1</length>
+          <locations> 0</locations>
+          <offset>0</offset>
+        </block>
+      </spacing>
+    <population name="   pop-0">
+      <individual name="0BAA 0BBF ">
+        <sample name="0BAA 0BBF -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="0BAA 0BBF -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAQ 0BAL ">
+        <sample name="0BAQ 0BAL -1">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+        <sample name="0BAQ 0BAL -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAI 0BAP ">
+        <sample name="0BAI 0BAP -1">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+        <sample name="0BAI 0BAP -2">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBE 0BBL ">
+        <sample name="0BBE 0BBL -1">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+        <sample name="0BBE 0BBL -2">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBK 0BBG ">
+        <sample name="0BBK 0BBG -1">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+        <sample name="0BBK 0BBG -2">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAB 0BBJ ">
+        <sample name="0BAB 0BBJ -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="0BAB 0BBJ -2">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBB 0BBM ">
+        <sample name="0BBB 0BBM -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="0BBB 0BBM -2">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAC 0BBP ">
+        <sample name="0BAC 0BBP -1">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+        <sample name="0BAC 0BBP -2">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBD 0BBA ">
+        <sample name="0BBD 0BBA -1">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+        <sample name="0BBD 0BBA -2">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBC 0BAU ">
+        <sample name="0BBC 0BAU -1">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+        <sample name="0BBC 0BAU -2">
+          <datablock type="MICROSAT">
+            12 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAG 0BAH ">
+        <sample name="0BAG 0BAH -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="0BAG 0BAH -2">
+          <datablock type="MICROSAT">
+            13 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAN 0BBI ">
+        <sample name="0BAN 0BBI -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="0BAN 0BBI -2">
+          <datablock type="MICROSAT">
+            14 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAO 0BAD ">
+        <sample name="0BAO 0BAD -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="0BAO 0BAD -2">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBO 0BAF ">
+        <sample name="0BBO 0BAF -1">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+        <sample name="0BBO 0BAF -2">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAR 0BAS ">
+        <sample name="0BAR 0BAS -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="0BAR 0BAS -2">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAJ 0BAW ">
+        <sample name="0BAJ 0BAW -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="0BAJ 0BAW -2">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBH 0BAK ">
+        <sample name="0BBH 0BAK -1">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+        <sample name="0BBH 0BAK -2">
+          <datablock type="MICROSAT">
+            13 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAV 0BAM ">
+        <sample name="0BAV 0BAM -1">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+        <sample name="0BAV 0BAM -2">
+          <datablock type="MICROSAT">
+            13 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAT 0BBN ">
+        <sample name="0BAT 0BBN -1">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+        <sample name="0BAT 0BBN -2">
+          <datablock type="MICROSAT">
+            14 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAX 0BAE ">
+        <sample name="0BAX 0BAE -1">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+        <sample name="0BAX 0BAE -2">
+          <datablock type="MICROSAT">
+            14 
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    <population name="   pop-1">
+      <individual name="1BAU 1BAF ">
+        <sample name="1BAU 1BAF -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="1BAU 1BAF -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAX 1BAP ">
+        <sample name="1BAX 1BAP -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="1BAX 1BAP -2">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAC 1BBH ">
+        <sample name="1BAC 1BBH -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="1BAC 1BBH -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAO 1BBN ">
+        <sample name="1BAO 1BBN -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="1BAO 1BBN -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAQ 1BAL ">
+        <sample name="1BAQ 1BAL -1">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+        <sample name="1BAQ 1BAL -2">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAB 1BAT ">
+        <sample name="1BAB 1BAT -1">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+        <sample name="1BAB 1BAT -2">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBB 1BAR ">
+        <sample name="1BBB 1BAR -1">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+        <sample name="1BBB 1BAR -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBA 1BBL ">
+        <sample name="1BBA 1BBL -1">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+        <sample name="1BBA 1BBL -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAI 1BBM ">
+        <sample name="1BAI 1BBM -1">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+        <sample name="1BAI 1BBM -2">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBC 1BBG ">
+        <sample name="1BBC 1BBG -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="1BBC 1BBG -2">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAG 1BAM ">
+        <sample name="1BAG 1BAM -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="1BAG 1BAM -2">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBK 1BAA ">
+        <sample name="1BBK 1BAA -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="1BBK 1BAA -2">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBO 1BAH ">
+        <sample name="1BBO 1BAH -1">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+        <sample name="1BBO 1BAH -2">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAK 1BBE ">
+        <sample name="1BAK 1BBE -1">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+        <sample name="1BAK 1BBE -2">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAW 1BBI ">
+        <sample name="1BAW 1BBI -1">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+        <sample name="1BAW 1BBI -2">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAV 1BAE ">
+        <sample name="1BAV 1BAE -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="1BAV 1BAE -2">
+          <datablock type="MICROSAT">
+            13 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAN 1BBJ ">
+        <sample name="1BAN 1BBJ -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="1BAN 1BBJ -2">
+          <datablock type="MICROSAT">
+            13 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBP 1BAJ ">
+        <sample name="1BBP 1BAJ -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="1BBP 1BAJ -2">
+          <datablock type="MICROSAT">
+            14 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBF 1BAD ">
+        <sample name="1BBF 1BAD -1">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+        <sample name="1BBF 1BAD -2">
+          <datablock type="MICROSAT">
+            13 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBD 1BAS ">
+        <sample name="1BBD 1BAS -1">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+        <sample name="1BBD 1BAS -2">
+          <datablock type="MICROSAT">
+            13 
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    </region>
+    <region name="Ocharinka2">
+      <model name="Stepwise">
+        <normalize>true</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <relative-murate>1</relative-murate>
+      </model>
+      <effective-popsize>1</effective-popsize>
+      <spacing>
+        <block>
+          <map-position>0</map-position>
+          <length>1</length>
+          <locations> 0</locations>
+          <offset>0</offset>
+        </block>
+      </spacing>
+    <population name="   pop-0">
+      <individual name="0BAA 0BBF ">
+        <sample name="0BAA 0BBF -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="0BAA 0BBF -2">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAQ 0BAL ">
+        <sample name="0BAQ 0BAL -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BAQ 0BAL -2">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAI 0BAP ">
+        <sample name="0BAI 0BAP -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BAI 0BAP -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBE 0BBL ">
+        <sample name="0BBE 0BBL -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BBE 0BBL -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBK 0BBG ">
+        <sample name="0BBK 0BBG -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BBK 0BBG -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAB 0BBJ ">
+        <sample name="0BAB 0BBJ -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="0BAB 0BBJ -2">
+          <datablock type="MICROSAT">
+            13 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBB 0BBM ">
+        <sample name="0BBB 0BBM -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="0BBB 0BBM -2">
+          <datablock type="MICROSAT">
+            13 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAC 0BBP ">
+        <sample name="0BAC 0BBP -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="0BAC 0BBP -2">
+          <datablock type="MICROSAT">
+            13 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBD 0BBA ">
+        <sample name="0BBD 0BBA -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="0BBD 0BBA -2">
+          <datablock type="MICROSAT">
+            14 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBC 0BAU ">
+        <sample name="0BBC 0BAU -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BBC 0BAU -2">
+          <datablock type="MICROSAT">
+            13 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAG 0BAH ">
+        <sample name="0BAG 0BAH -1">
+          <datablock type="MICROSAT">
+            11 
+          </datablock>
+        </sample>
+        <sample name="0BAG 0BAH -2">
+          <datablock type="MICROSAT">
+            13 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAN 0BBI ">
+        <sample name="0BAN 0BBI -1">
+          <datablock type="MICROSAT">
+            10 
+          </datablock>
+        </sample>
+        <sample name="0BAN 0BBI -2">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAO 0BAD ">
+        <sample name="0BAO 0BAD -1">
+          <datablock type="MICROSAT">
+            10 
+          </datablock>
+        </sample>
+        <sample name="0BAO 0BAD -2">
+          <datablock type="MICROSAT">
+            12 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBO 0BAF ">
+        <sample name="0BBO 0BAF -1">
+          <datablock type="MICROSAT">
+            10 
+          </datablock>
+        </sample>
+        <sample name="0BBO 0BAF -2">
+          <datablock type="MICROSAT">
+            10 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAR 0BAS ">
+        <sample name="0BAR 0BAS -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BAR 0BAS -2">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAJ 0BAW ">
+        <sample name="0BAJ 0BAW -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BAJ 0BAW -2">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBH 0BAK ">
+        <sample name="0BBH 0BAK -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BBH 0BAK -2">
+          <datablock type="MICROSAT">
+            10 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAV 0BAM ">
+        <sample name="0BAV 0BAM -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BAV 0BAM -2">
+          <datablock type="MICROSAT">
+            12 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAT 0BBN ">
+        <sample name="0BAT 0BBN -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BAT 0BBN -2">
+          <datablock type="MICROSAT">
+            12 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAX 0BAE ">
+        <sample name="0BAX 0BAE -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="0BAX 0BAE -2">
+          <datablock type="MICROSAT">
+            12 
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    <population name="   pop-1">
+      <individual name="1BAU 1BAF ">
+        <sample name="1BAU 1BAF -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="1BAU 1BAF -2">
+          <datablock type="MICROSAT">
+            13 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAX 1BAP ">
+        <sample name="1BAX 1BAP -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="1BAX 1BAP -2">
+          <datablock type="MICROSAT">
+            13 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAC 1BBH ">
+        <sample name="1BAC 1BBH -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="1BAC 1BBH -2">
+          <datablock type="MICROSAT">
+            14 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAO 1BBN ">
+        <sample name="1BAO 1BBN -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="1BAO 1BBN -2">
+          <datablock type="MICROSAT">
+            14 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAQ 1BAL ">
+        <sample name="1BAQ 1BAL -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="1BAQ 1BAL -2">
+          <datablock type="MICROSAT">
+            13 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAB 1BAT ">
+        <sample name="1BAB 1BAT -1">
+          <datablock type="MICROSAT">
+            13 
+          </datablock>
+        </sample>
+        <sample name="1BAB 1BAT -2">
+          <datablock type="MICROSAT">
+            13 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBB 1BAR ">
+        <sample name="1BBB 1BAR -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="1BBB 1BAR -2">
+          <datablock type="MICROSAT">
+            13 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBA 1BBL ">
+        <sample name="1BBA 1BBL -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="1BBA 1BBL -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAI 1BBM ">
+        <sample name="1BAI 1BBM -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="1BAI 1BBM -2">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBC 1BBG ">
+        <sample name="1BBC 1BBG -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="1BBC 1BBG -2">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAG 1BAM ">
+        <sample name="1BAG 1BAM -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="1BAG 1BAM -2">
+          <datablock type="MICROSAT">
+            11 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBK 1BAA ">
+        <sample name="1BBK 1BAA -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="1BBK 1BAA -2">
+          <datablock type="MICROSAT">
+            13 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBO 1BAH ">
+        <sample name="1BBO 1BAH -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="1BBO 1BAH -2">
+          <datablock type="MICROSAT">
+            10 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAK 1BBE ">
+        <sample name="1BAK 1BBE -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="1BAK 1BBE -2">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAW 1BBI ">
+        <sample name="1BAW 1BBI -1">
+          <datablock type="MICROSAT">
+            13 
+          </datablock>
+        </sample>
+        <sample name="1BAW 1BBI -2">
+          <datablock type="MICROSAT">
+            10 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAV 1BAE ">
+        <sample name="1BAV 1BAE -1">
+          <datablock type="MICROSAT">
+            13 
+          </datablock>
+        </sample>
+        <sample name="1BAV 1BAE -2">
+          <datablock type="MICROSAT">
+            12 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAN 1BBJ ">
+        <sample name="1BAN 1BBJ -1">
+          <datablock type="MICROSAT">
+            13 
+          </datablock>
+        </sample>
+        <sample name="1BAN 1BBJ -2">
+          <datablock type="MICROSAT">
+            12 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBP 1BAJ ">
+        <sample name="1BBP 1BAJ -1">
+          <datablock type="MICROSAT">
+            12 
+          </datablock>
+        </sample>
+        <sample name="1BBP 1BAJ -2">
+          <datablock type="MICROSAT">
+            12 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBF 1BAD ">
+        <sample name="1BBF 1BAD -1">
+          <datablock type="MICROSAT">
+            12 
+          </datablock>
+        </sample>
+        <sample name="1BBF 1BAD -2">
+          <datablock type="MICROSAT">
+            13 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBD 1BAS ">
+        <sample name="1BBD 1BAS -1">
+          <datablock type="MICROSAT">
+            14 
+          </datablock>
+        </sample>
+        <sample name="1BBD 1BAS -2">
+          <datablock type="MICROSAT">
+            12 
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    </region>
+    <region name="Ocharinka3">
+      <model name="Stepwise">
+        <normalize>true</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <relative-murate>1</relative-murate>
+      </model>
+      <effective-popsize>1</effective-popsize>
+      <spacing>
+        <block>
+          <map-position>0</map-position>
+          <length>1</length>
+          <locations> 0</locations>
+          <offset>0</offset>
+        </block>
+      </spacing>
+    <population name="   pop-0">
+      <individual name="0BAA 0BBF ">
+        <sample name="0BAA 0BBF -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="0BAA 0BBF -2">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAQ 0BAL ">
+        <sample name="0BAQ 0BAL -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="0BAQ 0BAL -2">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAI 0BAP ">
+        <sample name="0BAI 0BAP -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="0BAI 0BAP -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBE 0BBL ">
+        <sample name="0BBE 0BBL -1">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+        <sample name="0BBE 0BBL -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBK 0BBG ">
+        <sample name="0BBK 0BBG -1">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+        <sample name="0BBK 0BBG -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAB 0BBJ ">
+        <sample name="0BAB 0BBJ -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BAB 0BBJ -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBB 0BBM ">
+        <sample name="0BBB 0BBM -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BBB 0BBM -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAC 0BBP ">
+        <sample name="0BAC 0BBP -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="0BAC 0BBP -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBD 0BBA ">
+        <sample name="0BBD 0BBA -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BBD 0BBA -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBC 0BAU ">
+        <sample name="0BBC 0BAU -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BBC 0BAU -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAG 0BAH ">
+        <sample name="0BAG 0BAH -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BAG 0BAH -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAN 0BBI ">
+        <sample name="0BAN 0BBI -1">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+        <sample name="0BAN 0BBI -2">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAO 0BAD ">
+        <sample name="0BAO 0BAD -1">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+        <sample name="0BAO 0BAD -2">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBO 0BAF ">
+        <sample name="0BBO 0BAF -1">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+        <sample name="0BBO 0BAF -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAR 0BAS ">
+        <sample name="0BAR 0BAS -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="0BAR 0BAS -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAJ 0BAW ">
+        <sample name="0BAJ 0BAW -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="0BAJ 0BAW -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBH 0BAK ">
+        <sample name="0BBH 0BAK -1">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+        <sample name="0BBH 0BAK -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAV 0BAM ">
+        <sample name="0BAV 0BAM -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BAV 0BAM -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAT 0BBN ">
+        <sample name="0BAT 0BBN -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BAT 0BBN -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAX 0BAE ">
+        <sample name="0BAX 0BAE -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BAX 0BAE -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    <population name="   pop-1">
+      <individual name="1BAU 1BAF ">
+        <sample name="1BAU 1BAF -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="1BAU 1BAF -2">
+          <datablock type="MICROSAT">
+            25 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAX 1BAP ">
+        <sample name="1BAX 1BAP -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="1BAX 1BAP -2">
+          <datablock type="MICROSAT">
+            25 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAC 1BBH ">
+        <sample name="1BAC 1BBH -1">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+        <sample name="1BAC 1BBH -2">
+          <datablock type="MICROSAT">
+            25 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAO 1BBN ">
+        <sample name="1BAO 1BBN -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="1BAO 1BBN -2">
+          <datablock type="MICROSAT">
+            25 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAQ 1BAL ">
+        <sample name="1BAQ 1BAL -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="1BAQ 1BAL -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAB 1BAT ">
+        <sample name="1BAB 1BAT -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="1BAB 1BAT -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBB 1BAR ">
+        <sample name="1BBB 1BAR -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="1BBB 1BAR -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBA 1BBL ">
+        <sample name="1BBA 1BBL -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="1BBA 1BBL -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAI 1BBM ">
+        <sample name="1BAI 1BBM -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="1BAI 1BBM -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBC 1BBG ">
+        <sample name="1BBC 1BBG -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="1BBC 1BBG -2">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAG 1BAM ">
+        <sample name="1BAG 1BAM -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="1BAG 1BAM -2">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBK 1BAA ">
+        <sample name="1BBK 1BAA -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="1BBK 1BAA -2">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBO 1BAH ">
+        <sample name="1BBO 1BAH -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="1BBO 1BAH -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAK 1BBE ">
+        <sample name="1BAK 1BBE -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="1BAK 1BBE -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAW 1BBI ">
+        <sample name="1BAW 1BBI -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="1BAW 1BBI -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAV 1BAE ">
+        <sample name="1BAV 1BAE -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="1BAV 1BAE -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAN 1BBJ ">
+        <sample name="1BAN 1BBJ -1">
+          <datablock type="MICROSAT">
+            25 
+          </datablock>
+        </sample>
+        <sample name="1BAN 1BBJ -2">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBP 1BAJ ">
+        <sample name="1BBP 1BAJ -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="1BBP 1BAJ -2">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBF 1BAD ">
+        <sample name="1BBF 1BAD -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="1BBF 1BAD -2">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBD 1BAS ">
+        <sample name="1BBD 1BAS -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="1BBD 1BAS -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    </region>
+    <region name="Ocharinka4">
+      <model name="Stepwise">
+        <normalize>true</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <relative-murate>1</relative-murate>
+      </model>
+      <effective-popsize>1</effective-popsize>
+      <spacing>
+        <block>
+          <map-position>0</map-position>
+          <length>1</length>
+          <locations> 0</locations>
+          <offset>0</offset>
+        </block>
+      </spacing>
+    <population name="   pop-0">
+      <individual name="0BAA 0BBF ">
+        <sample name="0BAA 0BBF -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BAA 0BBF -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAQ 0BAL ">
+        <sample name="0BAQ 0BAL -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BAQ 0BAL -2">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAI 0BAP ">
+        <sample name="0BAI 0BAP -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BAI 0BAP -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBE 0BBL ">
+        <sample name="0BBE 0BBL -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BBE 0BBL -2">
+          <datablock type="MICROSAT">
+            14 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBK 0BBG ">
+        <sample name="0BBK 0BBG -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BBK 0BBG -2">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAB 0BBJ ">
+        <sample name="0BAB 0BBJ -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="0BAB 0BBJ -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBB 0BBM ">
+        <sample name="0BBB 0BBM -1">
+          <datablock type="MICROSAT">
+            12 
+          </datablock>
+        </sample>
+        <sample name="0BBB 0BBM -2">
+          <datablock type="MICROSAT">
+            14 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAC 0BBP ">
+        <sample name="0BAC 0BBP -1">
+          <datablock type="MICROSAT">
+            12 
+          </datablock>
+        </sample>
+        <sample name="0BAC 0BBP -2">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBD 0BBA ">
+        <sample name="0BBD 0BBA -1">
+          <datablock type="MICROSAT">
+            14 
+          </datablock>
+        </sample>
+        <sample name="0BBD 0BBA -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBC 0BAU ">
+        <sample name="0BBC 0BAU -1">
+          <datablock type="MICROSAT">
+            12 
+          </datablock>
+        </sample>
+        <sample name="0BBC 0BAU -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAG 0BAH ">
+        <sample name="0BAG 0BAH -1">
+          <datablock type="MICROSAT">
+            13 
+          </datablock>
+        </sample>
+        <sample name="0BAG 0BAH -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAN 0BBI ">
+        <sample name="0BAN 0BBI -1">
+          <datablock type="MICROSAT">
+            12 
+          </datablock>
+        </sample>
+        <sample name="0BAN 0BBI -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAO 0BAD ">
+        <sample name="0BAO 0BAD -1">
+          <datablock type="MICROSAT">
+            12 
+          </datablock>
+        </sample>
+        <sample name="0BAO 0BAD -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBO 0BAF ">
+        <sample name="0BBO 0BAF -1">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+        <sample name="0BBO 0BAF -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAR 0BAS ">
+        <sample name="0BAR 0BAS -1">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+        <sample name="0BAR 0BAS -2">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAJ 0BAW ">
+        <sample name="0BAJ 0BAW -1">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+        <sample name="0BAJ 0BAW -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBH 0BAK ">
+        <sample name="0BBH 0BAK -1">
+          <datablock type="MICROSAT">
+            9 
+          </datablock>
+        </sample>
+        <sample name="0BBH 0BAK -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAV 0BAM ">
+        <sample name="0BAV 0BAM -1">
+          <datablock type="MICROSAT">
+            10 
+          </datablock>
+        </sample>
+        <sample name="0BAV 0BAM -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAT 0BBN ">
+        <sample name="0BAT 0BBN -1">
+          <datablock type="MICROSAT">
+            11 
+          </datablock>
+        </sample>
+        <sample name="0BAT 0BBN -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAX 0BAE ">
+        <sample name="0BAX 0BAE -1">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+        <sample name="0BAX 0BAE -2">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    <population name="   pop-1">
+      <individual name="1BAU 1BAF ">
+        <sample name="1BAU 1BAF -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="1BAU 1BAF -2">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAX 1BAP ">
+        <sample name="1BAX 1BAP -1">
+          <datablock type="MICROSAT">
+            12 
+          </datablock>
+        </sample>
+        <sample name="1BAX 1BAP -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAC 1BBH ">
+        <sample name="1BAC 1BBH -1">
+          <datablock type="MICROSAT">
+            13 
+          </datablock>
+        </sample>
+        <sample name="1BAC 1BBH -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAO 1BBN ">
+        <sample name="1BAO 1BBN -1">
+          <datablock type="MICROSAT">
+            13 
+          </datablock>
+        </sample>
+        <sample name="1BAO 1BBN -2">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAQ 1BAL ">
+        <sample name="1BAQ 1BAL -1">
+          <datablock type="MICROSAT">
+            13 
+          </datablock>
+        </sample>
+        <sample name="1BAQ 1BAL -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAB 1BAT ">
+        <sample name="1BAB 1BAT -1">
+          <datablock type="MICROSAT">
+            13 
+          </datablock>
+        </sample>
+        <sample name="1BAB 1BAT -2">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBB 1BAR ">
+        <sample name="1BBB 1BAR -1">
+          <datablock type="MICROSAT">
+            13 
+          </datablock>
+        </sample>
+        <sample name="1BBB 1BAR -2">
+          <datablock type="MICROSAT">
+            14 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBA 1BBL ">
+        <sample name="1BBA 1BBL -1">
+          <datablock type="MICROSAT">
+            14 
+          </datablock>
+        </sample>
+        <sample name="1BBA 1BBL -2">
+          <datablock type="MICROSAT">
+            14 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAI 1BBM ">
+        <sample name="1BAI 1BBM -1">
+          <datablock type="MICROSAT">
+            14 
+          </datablock>
+        </sample>
+        <sample name="1BAI 1BBM -2">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBC 1BBG ">
+        <sample name="1BBC 1BBG -1">
+          <datablock type="MICROSAT">
+            10 
+          </datablock>
+        </sample>
+        <sample name="1BBC 1BBG -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAG 1BAM ">
+        <sample name="1BAG 1BAM -1">
+          <datablock type="MICROSAT">
+            11 
+          </datablock>
+        </sample>
+        <sample name="1BAG 1BAM -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBK 1BAA ">
+        <sample name="1BBK 1BAA -1">
+          <datablock type="MICROSAT">
+            11 
+          </datablock>
+        </sample>
+        <sample name="1BBK 1BAA -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBO 1BAH ">
+        <sample name="1BBO 1BAH -1">
+          <datablock type="MICROSAT">
+            11 
+          </datablock>
+        </sample>
+        <sample name="1BBO 1BAH -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAK 1BBE ">
+        <sample name="1BAK 1BBE -1">
+          <datablock type="MICROSAT">
+            9 
+          </datablock>
+        </sample>
+        <sample name="1BAK 1BBE -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAW 1BBI ">
+        <sample name="1BAW 1BBI -1">
+          <datablock type="MICROSAT">
+            10 
+          </datablock>
+        </sample>
+        <sample name="1BAW 1BBI -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAV 1BAE ">
+        <sample name="1BAV 1BAE -1">
+          <datablock type="MICROSAT">
+            11 
+          </datablock>
+        </sample>
+        <sample name="1BAV 1BAE -2">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAN 1BBJ ">
+        <sample name="1BAN 1BBJ -1">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+        <sample name="1BAN 1BBJ -2">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBP 1BAJ ">
+        <sample name="1BBP 1BAJ -1">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+        <sample name="1BBP 1BAJ -2">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBF 1BAD ">
+        <sample name="1BBF 1BAD -1">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+        <sample name="1BBF 1BAD -2">
+          <datablock type="MICROSAT">
+            14 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBD 1BAS ">
+        <sample name="1BBD 1BAS -1">
+          <datablock type="MICROSAT">
+            12 
+          </datablock>
+        </sample>
+        <sample name="1BBD 1BAS -2">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    </region>
+    <region name="Ocharinka5">
+      <model name="Stepwise">
+        <normalize>true</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <relative-murate>1</relative-murate>
+      </model>
+      <effective-popsize>1</effective-popsize>
+      <spacing>
+        <block>
+          <map-position>0</map-position>
+          <length>1</length>
+          <locations> 0</locations>
+          <offset>0</offset>
+        </block>
+      </spacing>
+    <population name="   pop-0">
+      <individual name="0BAA 0BBF ">
+        <sample name="0BAA 0BBF -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BAA 0BBF -2">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAQ 0BAL ">
+        <sample name="0BAQ 0BAL -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BAQ 0BAL -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAI 0BAP ">
+        <sample name="0BAI 0BAP -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BAI 0BAP -2">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBE 0BBL ">
+        <sample name="0BBE 0BBL -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="0BBE 0BBL -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBK 0BBG ">
+        <sample name="0BBK 0BBG -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="0BBK 0BBG -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAB 0BBJ ">
+        <sample name="0BAB 0BBJ -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="0BAB 0BBJ -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBB 0BBM ">
+        <sample name="0BBB 0BBM -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="0BBB 0BBM -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAC 0BBP ">
+        <sample name="0BAC 0BBP -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="0BAC 0BBP -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBD 0BBA ">
+        <sample name="0BBD 0BBA -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="0BBD 0BBA -2">
+          <datablock type="MICROSAT">
+            26 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBC 0BAU ">
+        <sample name="0BBC 0BAU -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="0BBC 0BAU -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAG 0BAH ">
+        <sample name="0BAG 0BAH -1">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+        <sample name="0BAG 0BAH -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAN 0BBI ">
+        <sample name="0BAN 0BBI -1">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+        <sample name="0BAN 0BBI -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAO 0BAD ">
+        <sample name="0BAO 0BAD -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BAO 0BAD -2">
+          <datablock type="MICROSAT">
+            25 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBO 0BAF ">
+        <sample name="0BBO 0BAF -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BBO 0BAF -2">
+          <datablock type="MICROSAT">
+            25 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAR 0BAS ">
+        <sample name="0BAR 0BAS -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="0BAR 0BAS -2">
+          <datablock type="MICROSAT">
+            25 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAJ 0BAW ">
+        <sample name="0BAJ 0BAW -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="0BAJ 0BAW -2">
+          <datablock type="MICROSAT">
+            25 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBH 0BAK ">
+        <sample name="0BBH 0BAK -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="0BBH 0BAK -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAV 0BAM ">
+        <sample name="0BAV 0BAM -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="0BAV 0BAM -2">
+          <datablock type="MICROSAT">
+            25 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAT 0BBN ">
+        <sample name="0BAT 0BBN -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="0BAT 0BBN -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAX 0BAE ">
+        <sample name="0BAX 0BAE -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BAX 0BAE -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    <population name="   pop-1">
+      <individual name="1BAU 1BAF ">
+        <sample name="1BAU 1BAF -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="1BAU 1BAF -2">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAX 1BAP ">
+        <sample name="1BAX 1BAP -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="1BAX 1BAP -2">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAC 1BBH ">
+        <sample name="1BAC 1BBH -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="1BAC 1BBH -2">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAO 1BBN ">
+        <sample name="1BAO 1BBN -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="1BAO 1BBN -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAQ 1BAL ">
+        <sample name="1BAQ 1BAL -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="1BAQ 1BAL -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAB 1BAT ">
+        <sample name="1BAB 1BAT -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="1BAB 1BAT -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBB 1BAR ">
+        <sample name="1BBB 1BAR -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="1BBB 1BAR -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBA 1BBL ">
+        <sample name="1BBA 1BBL -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="1BBA 1BBL -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAI 1BBM ">
+        <sample name="1BAI 1BBM -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="1BAI 1BBM -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBC 1BBG ">
+        <sample name="1BBC 1BBG -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="1BBC 1BBG -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAG 1BAM ">
+        <sample name="1BAG 1BAM -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="1BAG 1BAM -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBK 1BAA ">
+        <sample name="1BBK 1BAA -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="1BBK 1BAA -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBO 1BAH ">
+        <sample name="1BBO 1BAH -1">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+        <sample name="1BBO 1BAH -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAK 1BBE ">
+        <sample name="1BAK 1BBE -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="1BAK 1BBE -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAW 1BBI ">
+        <sample name="1BAW 1BBI -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="1BAW 1BBI -2">
+          <datablock type="MICROSAT">
+            25 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAV 1BAE ">
+        <sample name="1BAV 1BAE -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="1BAV 1BAE -2">
+          <datablock type="MICROSAT">
+            25 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAN 1BBJ ">
+        <sample name="1BAN 1BBJ -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="1BAN 1BBJ -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBP 1BAJ ">
+        <sample name="1BBP 1BAJ -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="1BBP 1BAJ -2">
+          <datablock type="MICROSAT">
+            25 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBF 1BAD ">
+        <sample name="1BBF 1BAD -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="1BBF 1BAD -2">
+          <datablock type="MICROSAT">
+            25 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBD 1BAS ">
+        <sample name="1BBD 1BAS -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="1BBD 1BAS -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    </region>
+    <region name="Ocharinka6">
+      <model name="Stepwise">
+        <normalize>true</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <relative-murate>1</relative-murate>
+      </model>
+      <effective-popsize>1</effective-popsize>
+      <spacing>
+        <block>
+          <map-position>0</map-position>
+          <length>1</length>
+          <locations> 0</locations>
+          <offset>0</offset>
+        </block>
+      </spacing>
+    <population name="   pop-0">
+      <individual name="0BAA 0BBF ">
+        <sample name="0BAA 0BBF -1">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+        <sample name="0BAA 0BBF -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAQ 0BAL ">
+        <sample name="0BAQ 0BAL -1">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+        <sample name="0BAQ 0BAL -2">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAI 0BAP ">
+        <sample name="0BAI 0BAP -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="0BAI 0BAP -2">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBE 0BBL ">
+        <sample name="0BBE 0BBL -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BBE 0BBL -2">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBK 0BBG ">
+        <sample name="0BBK 0BBG -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="0BBK 0BBG -2">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAB 0BBJ ">
+        <sample name="0BAB 0BBJ -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BAB 0BBJ -2">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBB 0BBM ">
+        <sample name="0BBB 0BBM -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BBB 0BBM -2">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAC 0BBP ">
+        <sample name="0BAC 0BBP -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BAC 0BBP -2">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBD 0BBA ">
+        <sample name="0BBD 0BBA -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="0BBD 0BBA -2">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBC 0BAU ">
+        <sample name="0BBC 0BAU -1">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+        <sample name="0BBC 0BAU -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAG 0BAH ">
+        <sample name="0BAG 0BAH -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="0BAG 0BAH -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAN 0BBI ">
+        <sample name="0BAN 0BBI -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="0BAN 0BBI -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAO 0BAD ">
+        <sample name="0BAO 0BAD -1">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+        <sample name="0BAO 0BAD -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBO 0BAF ">
+        <sample name="0BBO 0BAF -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="0BBO 0BAF -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAR 0BAS ">
+        <sample name="0BAR 0BAS -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="0BAR 0BAS -2">
+          <datablock type="MICROSAT">
+            25 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAJ 0BAW ">
+        <sample name="0BAJ 0BAW -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="0BAJ 0BAW -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBH 0BAK ">
+        <sample name="0BBH 0BAK -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="0BBH 0BAK -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAV 0BAM ">
+        <sample name="0BAV 0BAM -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="0BAV 0BAM -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAT 0BBN ">
+        <sample name="0BAT 0BBN -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="0BAT 0BBN -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAX 0BAE ">
+        <sample name="0BAX 0BAE -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="0BAX 0BAE -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    <population name="   pop-1">
+      <individual name="1BAU 1BAF ">
+        <sample name="1BAU 1BAF -1">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+        <sample name="1BAU 1BAF -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAX 1BAP ">
+        <sample name="1BAX 1BAP -1">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+        <sample name="1BAX 1BAP -2">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAC 1BBH ">
+        <sample name="1BAC 1BBH -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="1BAC 1BBH -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAO 1BBN ">
+        <sample name="1BAO 1BBN -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="1BAO 1BBN -2">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAQ 1BAL ">
+        <sample name="1BAQ 1BAL -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="1BAQ 1BAL -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAB 1BAT ">
+        <sample name="1BAB 1BAT -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="1BAB 1BAT -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBB 1BAR ">
+        <sample name="1BBB 1BAR -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="1BBB 1BAR -2">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBA 1BBL ">
+        <sample name="1BBA 1BBL -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="1BBA 1BBL -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAI 1BBM ">
+        <sample name="1BAI 1BBM -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="1BAI 1BBM -2">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBC 1BBG ">
+        <sample name="1BBC 1BBG -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="1BBC 1BBG -2">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAG 1BAM ">
+        <sample name="1BAG 1BAM -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="1BAG 1BAM -2">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBK 1BAA ">
+        <sample name="1BBK 1BAA -1">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+        <sample name="1BBK 1BAA -2">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBO 1BAH ">
+        <sample name="1BBO 1BAH -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="1BBO 1BAH -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAK 1BBE ">
+        <sample name="1BAK 1BBE -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="1BAK 1BBE -2">
+          <datablock type="MICROSAT">
+            25 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAW 1BBI ">
+        <sample name="1BAW 1BBI -1">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+        <sample name="1BAW 1BBI -2">
+          <datablock type="MICROSAT">
+            25 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAV 1BAE ">
+        <sample name="1BAV 1BAE -1">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+        <sample name="1BAV 1BAE -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAN 1BBJ ">
+        <sample name="1BAN 1BBJ -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="1BAN 1BBJ -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBP 1BAJ ">
+        <sample name="1BBP 1BAJ -1">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+        <sample name="1BBP 1BAJ -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBF 1BAD ">
+        <sample name="1BBF 1BAD -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="1BBF 1BAD -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBD 1BAS ">
+        <sample name="1BBD 1BAS -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="1BBD 1BAS -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    </region>
+    <region name="Ocharinka7">
+      <model name="Stepwise">
+        <normalize>true</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <relative-murate>1</relative-murate>
+      </model>
+      <effective-popsize>1</effective-popsize>
+      <spacing>
+        <block>
+          <map-position>0</map-position>
+          <length>1</length>
+          <locations> 0</locations>
+          <offset>0</offset>
+        </block>
+      </spacing>
+    <population name="   pop-0">
+      <individual name="0BAA 0BBF ">
+        <sample name="0BAA 0BBF -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BAA 0BBF -2">
+          <datablock type="MICROSAT">
+            25 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAQ 0BAL ">
+        <sample name="0BAQ 0BAL -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BAQ 0BAL -2">
+          <datablock type="MICROSAT">
+            25 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAI 0BAP ">
+        <sample name="0BAI 0BAP -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BAI 0BAP -2">
+          <datablock type="MICROSAT">
+            26 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBE 0BBL ">
+        <sample name="0BBE 0BBL -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BBE 0BBL -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBK 0BBG ">
+        <sample name="0BBK 0BBG -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="0BBK 0BBG -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAB 0BBJ ">
+        <sample name="0BAB 0BBJ -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BAB 0BBJ -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBB 0BBM ">
+        <sample name="0BBB 0BBM -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BBB 0BBM -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAC 0BBP ">
+        <sample name="0BAC 0BBP -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="0BAC 0BBP -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBD 0BBA ">
+        <sample name="0BBD 0BBA -1">
+          <datablock type="MICROSAT">
+            25 
+          </datablock>
+        </sample>
+        <sample name="0BBD 0BBA -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBC 0BAU ">
+        <sample name="0BBC 0BAU -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="0BBC 0BAU -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAG 0BAH ">
+        <sample name="0BAG 0BAH -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BAG 0BAH -2">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAN 0BBI ">
+        <sample name="0BAN 0BBI -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BAN 0BBI -2">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAO 0BAD ">
+        <sample name="0BAO 0BAD -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BAO 0BAD -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBO 0BAF ">
+        <sample name="0BBO 0BAF -1">
+          <datablock type="MICROSAT">
+            25 
+          </datablock>
+        </sample>
+        <sample name="0BBO 0BAF -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAR 0BAS ">
+        <sample name="0BAR 0BAS -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="0BAR 0BAS -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAJ 0BAW ">
+        <sample name="0BAJ 0BAW -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BAJ 0BAW -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBH 0BAK ">
+        <sample name="0BBH 0BAK -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BBH 0BAK -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAV 0BAM ">
+        <sample name="0BAV 0BAM -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BAV 0BAM -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAT 0BBN ">
+        <sample name="0BAT 0BBN -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="0BAT 0BBN -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAX 0BAE ">
+        <sample name="0BAX 0BAE -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="0BAX 0BAE -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    <population name="   pop-1">
+      <individual name="1BAU 1BAF ">
+        <sample name="1BAU 1BAF -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="1BAU 1BAF -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAX 1BAP ">
+        <sample name="1BAX 1BAP -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="1BAX 1BAP -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAC 1BBH ">
+        <sample name="1BAC 1BBH -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="1BAC 1BBH -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAO 1BBN ">
+        <sample name="1BAO 1BBN -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="1BAO 1BBN -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAQ 1BAL ">
+        <sample name="1BAQ 1BAL -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="1BAQ 1BAL -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAB 1BAT ">
+        <sample name="1BAB 1BAT -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="1BAB 1BAT -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBB 1BAR ">
+        <sample name="1BBB 1BAR -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="1BBB 1BAR -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBA 1BBL ">
+        <sample name="1BBA 1BBL -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="1BBA 1BBL -2">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAI 1BBM ">
+        <sample name="1BAI 1BBM -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="1BAI 1BBM -2">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBC 1BBG ">
+        <sample name="1BBC 1BBG -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="1BBC 1BBG -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAG 1BAM ">
+        <sample name="1BAG 1BAM -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="1BAG 1BAM -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBK 1BAA ">
+        <sample name="1BBK 1BAA -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="1BBK 1BAA -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBO 1BAH ">
+        <sample name="1BBO 1BAH -1">
+          <datablock type="MICROSAT">
+            25 
+          </datablock>
+        </sample>
+        <sample name="1BBO 1BAH -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAK 1BBE ">
+        <sample name="1BAK 1BBE -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="1BAK 1BBE -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAW 1BBI ">
+        <sample name="1BAW 1BBI -1">
+          <datablock type="MICROSAT">
+            25 
+          </datablock>
+        </sample>
+        <sample name="1BAW 1BBI -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAV 1BAE ">
+        <sample name="1BAV 1BAE -1">
+          <datablock type="MICROSAT">
+            25 
+          </datablock>
+        </sample>
+        <sample name="1BAV 1BAE -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAN 1BBJ ">
+        <sample name="1BAN 1BBJ -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="1BAN 1BBJ -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBP 1BAJ ">
+        <sample name="1BBP 1BAJ -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="1BBP 1BAJ -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBF 1BAD ">
+        <sample name="1BBF 1BAD -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="1BBF 1BAD -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBD 1BAS ">
+        <sample name="1BBD 1BAS -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="1BBD 1BAS -2">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    </region>
+    <region name="Ocharinka8">
+      <model name="Stepwise">
+        <normalize>true</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <relative-murate>1</relative-murate>
+      </model>
+      <effective-popsize>1</effective-popsize>
+      <spacing>
+        <block>
+          <map-position>0</map-position>
+          <length>1</length>
+          <locations> 0</locations>
+          <offset>0</offset>
+        </block>
+      </spacing>
+    <population name="   pop-0">
+      <individual name="0BAA 0BBF ">
+        <sample name="0BAA 0BBF -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BAA 0BBF -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAQ 0BAL ">
+        <sample name="0BAQ 0BAL -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BAQ 0BAL -2">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAI 0BAP ">
+        <sample name="0BAI 0BAP -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="0BAI 0BAP -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBE 0BBL ">
+        <sample name="0BBE 0BBL -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="0BBE 0BBL -2">
+          <datablock type="MICROSAT">
+            25 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBK 0BBG ">
+        <sample name="0BBK 0BBG -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="0BBK 0BBG -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAB 0BBJ ">
+        <sample name="0BAB 0BBJ -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BAB 0BBJ -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBB 0BBM ">
+        <sample name="0BBB 0BBM -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BBB 0BBM -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAC 0BBP ">
+        <sample name="0BAC 0BBP -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BAC 0BBP -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBD 0BBA ">
+        <sample name="0BBD 0BBA -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BBD 0BBA -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBC 0BAU ">
+        <sample name="0BBC 0BAU -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BBC 0BAU -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAG 0BAH ">
+        <sample name="0BAG 0BAH -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="0BAG 0BAH -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAN 0BBI ">
+        <sample name="0BAN 0BBI -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BAN 0BBI -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAO 0BAD ">
+        <sample name="0BAO 0BAD -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BAO 0BAD -2">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBO 0BAF ">
+        <sample name="0BBO 0BAF -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BBO 0BAF -2">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAR 0BAS ">
+        <sample name="0BAR 0BAS -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BAR 0BAS -2">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAJ 0BAW ">
+        <sample name="0BAJ 0BAW -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BAJ 0BAW -2">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBH 0BAK ">
+        <sample name="0BBH 0BAK -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="0BBH 0BAK -2">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAV 0BAM ">
+        <sample name="0BAV 0BAM -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="0BAV 0BAM -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAT 0BBN ">
+        <sample name="0BAT 0BBN -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BAT 0BBN -2">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAX 0BAE ">
+        <sample name="0BAX 0BAE -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BAX 0BAE -2">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    <population name="   pop-1">
+      <individual name="1BAU 1BAF ">
+        <sample name="1BAU 1BAF -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="1BAU 1BAF -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAX 1BAP ">
+        <sample name="1BAX 1BAP -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="1BAX 1BAP -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAC 1BBH ">
+        <sample name="1BAC 1BBH -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="1BAC 1BBH -2">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAO 1BBN ">
+        <sample name="1BAO 1BBN -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="1BAO 1BBN -2">
+          <datablock type="MICROSAT">
+            25 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAQ 1BAL ">
+        <sample name="1BAQ 1BAL -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="1BAQ 1BAL -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAB 1BAT ">
+        <sample name="1BAB 1BAT -1">
+          <datablock type="MICROSAT">
+            26 
+          </datablock>
+        </sample>
+        <sample name="1BAB 1BAT -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBB 1BAR ">
+        <sample name="1BBB 1BAR -1">
+          <datablock type="MICROSAT">
+            26 
+          </datablock>
+        </sample>
+        <sample name="1BBB 1BAR -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBA 1BBL ">
+        <sample name="1BBA 1BBL -1">
+          <datablock type="MICROSAT">
+            26 
+          </datablock>
+        </sample>
+        <sample name="1BBA 1BBL -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAI 1BBM ">
+        <sample name="1BAI 1BBM -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="1BAI 1BBM -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBC 1BBG ">
+        <sample name="1BBC 1BBG -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="1BBC 1BBG -2">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAG 1BAM ">
+        <sample name="1BAG 1BAM -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="1BAG 1BAM -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBK 1BAA ">
+        <sample name="1BBK 1BAA -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="1BBK 1BAA -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBO 1BAH ">
+        <sample name="1BBO 1BAH -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="1BBO 1BAH -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAK 1BBE ">
+        <sample name="1BAK 1BBE -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="1BAK 1BBE -2">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAW 1BBI ">
+        <sample name="1BAW 1BBI -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="1BAW 1BBI -2">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAV 1BAE ">
+        <sample name="1BAV 1BAE -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="1BAV 1BAE -2">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAN 1BBJ ">
+        <sample name="1BAN 1BBJ -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="1BAN 1BBJ -2">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBP 1BAJ ">
+        <sample name="1BBP 1BAJ -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="1BBP 1BAJ -2">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBF 1BAD ">
+        <sample name="1BBF 1BAD -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="1BBF 1BAD -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBD 1BAS ">
+        <sample name="1BBD 1BAS -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="1BBD 1BAS -2">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    </region>
+    <region name="Ocharinka9">
+      <model name="Stepwise">
+        <normalize>true</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <relative-murate>1</relative-murate>
+      </model>
+      <effective-popsize>1</effective-popsize>
+      <spacing>
+        <block>
+          <map-position>0</map-position>
+          <length>1</length>
+          <locations> 0</locations>
+          <offset>0</offset>
+        </block>
+      </spacing>
+    <population name="   pop-0">
+      <individual name="0BAA 0BBF ">
+        <sample name="0BAA 0BBF -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BAA 0BBF -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAQ 0BAL ">
+        <sample name="0BAQ 0BAL -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BAQ 0BAL -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAI 0BAP ">
+        <sample name="0BAI 0BAP -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BAI 0BAP -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBE 0BBL ">
+        <sample name="0BBE 0BBL -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BBE 0BBL -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBK 0BBG ">
+        <sample name="0BBK 0BBG -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BBK 0BBG -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAB 0BBJ ">
+        <sample name="0BAB 0BBJ -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BAB 0BBJ -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBB 0BBM ">
+        <sample name="0BBB 0BBM -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BBB 0BBM -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAC 0BBP ">
+        <sample name="0BAC 0BBP -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="0BAC 0BBP -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBD 0BBA ">
+        <sample name="0BBD 0BBA -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BBD 0BBA -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBC 0BAU ">
+        <sample name="0BBC 0BAU -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BBC 0BAU -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAG 0BAH ">
+        <sample name="0BAG 0BAH -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="0BAG 0BAH -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAN 0BBI ">
+        <sample name="0BAN 0BBI -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="0BAN 0BBI -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAO 0BAD ">
+        <sample name="0BAO 0BAD -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BAO 0BAD -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBO 0BAF ">
+        <sample name="0BBO 0BAF -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="0BBO 0BAF -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAR 0BAS ">
+        <sample name="0BAR 0BAS -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="0BAR 0BAS -2">
+          <datablock type="MICROSAT">
+            25 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAJ 0BAW ">
+        <sample name="0BAJ 0BAW -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="0BAJ 0BAW -2">
+          <datablock type="MICROSAT">
+            25 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBH 0BAK ">
+        <sample name="0BBH 0BAK -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="0BBH 0BAK -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAV 0BAM ">
+        <sample name="0BAV 0BAM -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="0BAV 0BAM -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAT 0BBN ">
+        <sample name="0BAT 0BBN -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="0BAT 0BBN -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAX 0BAE ">
+        <sample name="0BAX 0BAE -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="0BAX 0BAE -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    <population name="   pop-1">
+      <individual name="1BAU 1BAF ">
+        <sample name="1BAU 1BAF -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="1BAU 1BAF -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAX 1BAP ">
+        <sample name="1BAX 1BAP -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="1BAX 1BAP -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAC 1BBH ">
+        <sample name="1BAC 1BBH -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="1BAC 1BBH -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAO 1BBN ">
+        <sample name="1BAO 1BBN -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="1BAO 1BBN -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAQ 1BAL ">
+        <sample name="1BAQ 1BAL -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="1BAQ 1BAL -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAB 1BAT ">
+        <sample name="1BAB 1BAT -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="1BAB 1BAT -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBB 1BAR ">
+        <sample name="1BBB 1BAR -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="1BBB 1BAR -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBA 1BBL ">
+        <sample name="1BBA 1BBL -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="1BBA 1BBL -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAI 1BBM ">
+        <sample name="1BAI 1BBM -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="1BAI 1BBM -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBC 1BBG ">
+        <sample name="1BBC 1BBG -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="1BBC 1BBG -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAG 1BAM ">
+        <sample name="1BAG 1BAM -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="1BAG 1BAM -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBK 1BAA ">
+        <sample name="1BBK 1BAA -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="1BBK 1BAA -2">
+          <datablock type="MICROSAT">
+            25 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBO 1BAH ">
+        <sample name="1BBO 1BAH -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="1BBO 1BAH -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAK 1BBE ">
+        <sample name="1BAK 1BBE -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="1BAK 1BBE -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAW 1BBI ">
+        <sample name="1BAW 1BBI -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="1BAW 1BBI -2">
+          <datablock type="MICROSAT">
+            25 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAV 1BAE ">
+        <sample name="1BAV 1BAE -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="1BAV 1BAE -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAN 1BBJ ">
+        <sample name="1BAN 1BBJ -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="1BAN 1BBJ -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBP 1BAJ ">
+        <sample name="1BBP 1BAJ -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="1BBP 1BAJ -2">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBF 1BAD ">
+        <sample name="1BBF 1BAD -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="1BBF 1BAD -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBD 1BAS ">
+        <sample name="1BBD 1BAS -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="1BBD 1BAS -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    </region>
+  </data>
+</lamarc>
diff --git a/doc/testfiles/infile.coalmigrep b/doc/testfiles/infile.coalmigrep
new file mode 100644
index 0000000..e6a2be5
--- /dev/null
+++ b/doc/testfiles/infile.coalmigrep
@@ -0,0 +1,1702 @@
+<lamarc version="2.1">
+<!-- Created by the Lamarc program -->
+  <chains>
+    <replicates>2</replicates>
+    <bayesian-analysis>No</bayesian-analysis>
+    <heating>
+      <adaptive>false</adaptive>
+      <temperatures> 1</temperatures>
+      <swap-interval>1</swap-interval>
+    </heating>
+    <strategy>
+      <resimulating>0.833333</resimulating>
+      <tree-size>0.166667</tree-size>
+      <haplotyping>0</haplotyping>
+      <trait-arranger>0</trait-arranger>
+    </strategy>
+    <initial>
+      <number>5</number>
+      <samples>2000</samples>
+      <discard>1000</discard>
+      <interval>20</interval>
+    </initial>
+    <final>
+      <number>1</number>
+      <samples>5000</samples>
+      <discard>1000</discard>
+      <interval>20</interval>
+    </final>
+  </chains>
+  <format>
+    <seed>1005</seed>
+    <verbosity>verbose</verbosity>
+    <progress-reports>verbose</progress-reports>
+    <results-file>outfile.coalmigrep</results-file>
+    <use-in-summary>false</use-in-summary>
+    <in-summary-file>insumfile.coalmigrep</in-summary-file>
+    <use-out-summary>true</use-out-summary>
+    <out-summary-file>outsumfile.coalmigrep</out-summary-file>
+    <use-curvefiles>true</use-curvefiles>
+    <curvefile-prefix>curvefile</curvefile-prefix>
+    <use-tracefile>true</use-tracefile>
+    <tracefile-prefix>tracefile</tracefile-prefix>
+    <use-newicktreefile>false</use-newicktreefile>
+    <newicktreefile-prefix>newick</newicktreefile-prefix>
+    <out-xml-file>menuinfile</out-xml-file>
+  </format>
+  <forces>
+    <coalescence>
+      <start-values> 0.23837 0.23917</start-values>
+      <method> WATTERSON WATTERSON</method>
+      <max-events>1000</max-events>
+      <profiles> percentile percentile </profiles>
+      <constraints> unconstrained unconstrained </constraints>
+      <prior type="log">
+        <paramindex> all </paramindex>
+        <lower> 1e-05 </lower>
+        <upper> 10 </upper>
+      </prior>
+    </coalescence>
+    <migration>
+      <start-values> 0 100 100 0</start-values>
+      <method> PROGRAMDEFAULT FST FST PROGRAMDEFAULT</method>
+      <max-events>10000</max-events>
+      <profiles> none fixed fixed none </profiles>
+      <constraints> invalid unconstrained unconstrained invalid </constraints>
+      <prior type="log">
+        <paramindex> all </paramindex>
+        <lower> 0.01 </lower>
+        <upper> 1000 </upper>
+      </prior>
+    </migration>
+  </forces>
+  <data>
+    <region name="region1">
+      <model name="F84">
+        <normalize>false</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <relative-murate>1</relative-murate>
+        <base-freqs> calculated </base-freqs>
+        <ttratio>2</ttratio>
+      </model>
+      <effective-popsize>1</effective-popsize>
+      <spacing>
+        <block>
+          <map-position>0</map-position>
+          <length>1000</length>
+          <locations> 4 17 24 51 80 82 88 96 107 113 125 141 151 163 194 198 208 209 212 217 247 255 259 262 265 280 291 293 299 304 306 315 324 325 333 344 367 369 375 381 386 399 400 407 421 422 453 459 471 484 512 517 529 534 547 552 557 562 576 580 588 602 630 632 636 648 649 651 653 668 689 703 710 715 718 722 731 732 737 741 743 758 762 767 771 772 791 805 812 813 818 822 832 845 849 852 863 864 872 873 883 886 892 906 912 917 922 936 963 964 983 985 990 991</locations>
+          <offset>0</offset>
+        </block>
+      </spacing>
+    <population name="pop1">
+      <individual name="00_0004   ">
+        <sample name="00_0004   ">
+          <datablock type="DNA">
+            GCCTCGAGCCCGCACATTGGCTACGGGGTGTGCCGCGCGGGCCACAACGCGCAGGGATGCAATGCCTTGGTGGTCTAACCTATCCCAGGACCAGAATTGGGTCAGTTTAGAGGC
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0011   ">
+        <sample name="00_0011   ">
+          <datablock type="DNA">
+            GCCTCGAGCCCGCACATTGGCTACGGGGTGTGCCGCGCGGGCCACAACGCGCAGGGATGCAATGCCTTGGTGGTCTAACCTATCCCAGGACCAGAATTGGGTCAGTTTAGAGGC
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0009   ">
+        <sample name="00_0009   ">
+          <datablock type="DNA">
+            GCTACAATACTTGAGACTGGCCATGGCACTTGGCCCACAGGACATGGTGCGCGTGAACACCTTGACTTAATCGTCTAAGCTTTCATAAAGCCGGAATCAGAGCAATCAAGTGGC
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0017   ">
+        <sample name="00_0017   ">
+          <datablock type="DNA">
+            GCTACAATACTTGAGACTGGCCATGGCACTTGGCCCACAGGACATGGTGCGCGTGAACACCTTGACTTAATCGTCTAAGCTTTCATAAAGCCGGAATCAGAGCAATCAAGTGGC
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0002   ">
+        <sample name="00_0002   ">
+          <datablock type="DNA">
+            GCTACAATACTTGAGACTGGCCATGGCACTTGGCCCACAGGACATGGTGCGGGTGAACACCTTGACTTAATCGTCTAAGCTTTCATAAAGCCGGAATCAGAGCAATCAAGTGGC
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0012   ">
+        <sample name="00_0012   ">
+          <datablock type="DNA">
+            GCTACAATACTTGAGACTGGCCATGGCACTTGGCCCACAGGACATGGTGCGGGTGAACACCTTGACTTAATCGTCTAAGCTTTCATAAAGCCGGAATCAGAGCAATCAAGTGGC
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0007   ">
+        <sample name="00_0007   ">
+          <datablock type="DNA">
+            GCTACAATACTTGAGACTGGCCATGGCACTTGGCCCACAGGACATGGTGCGCGTGAACACCTTGACTTAATCGTCTAACCTTTCATAAAGCCGGAATCAGAGCAATCAAGTGGC
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0008   ">
+        <sample name="00_0008   ">
+          <datablock type="DNA">
+            GCTACAATACTTGAGACTGGCCATAGCACTTGGCCCACAGGACATGGTGCGCGTGAACACCTTGACTTAATCGTCTAACCTTTCATAAAGCCGGAATCAGAGCAATCAAGTGGC
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0014   ">
+        <sample name="00_0014   ">
+          <datablock type="DNA">
+            GCTACAATACTTGAGACTGGCCATGGCACTTGGCCCACAGGACATGGTGTGCGTGAACACCTTGACTTAATCGTCTAACCTTTCATAAAGCCGGAATCAGAGCAATCAAGTGGC
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0019   ">
+        <sample name="00_0019   ">
+          <datablock type="DNA">
+            GCTATAATACTTGACACTGGCCATGGCACTTGGTCCGCAGGACATGATGCGCGTGAACATCTTAACTTAACCAACTAACCTTTCATAAAGCCGGAACCAGAGCAATTAGTTGGC
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0000   ">
+        <sample name="00_0000   ">
+          <datablock type="DNA">
+            GCTATAATACTTGACACTAGCCATGGCACTTGGTCCGCAGGACATGATGCGCGTGAACATCTTAACTTAACCAACTAACCTTTCATAAAGCCGGAACCAGAGCAATTAGTTGGC
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0013   ">
+        <sample name="00_0013   ">
+          <datablock type="DNA">
+            GCTATAATACTTGACACTGGCCATGGCACTTGGTCCGCAGGACATGATGCGCGTGAACATCTTAACTTAACCAACTAACCTTTCATAAAGCCGGAACCAGAGCAATTAGTTGGC
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0010   ">
+        <sample name="00_0010   ">
+          <datablock type="DNA">
+            GCTATAATACTTGGCACTGGCCATGGCACTTGGCCCACACGACATGATGCGCGTGAACATCTTGACTTAACCAACTAACCGTTCATAAAGCCGGAATCACAGCAATTAGGTGGC
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0016   ">
+        <sample name="00_0016   ">
+          <datablock type="DNA">
+            GCTATAATACTTGGCACTGGCCATGGCACTTGGCCCACACGACATGATGCGCGTGAACATCTTGACTTAACCAACTAACCTTTCATAAAGCCGGTATCACAGCAATTAGGTGGC
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0003   ">
+        <sample name="00_0003   ">
+          <datablock type="DNA">
+            GCTATAATACTTGGCACTGGCCATGGCACTTGGCCCACACGACATGATGCGCGTGAACATCTTGACTTAACCAACTAACCTTTCATAAAGCCGGAATCACAGCAATTAGGTGGC
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0005   ">
+        <sample name="00_0005   ">
+          <datablock type="DNA">
+            GCTATAATACTTGGCACTGGCCATGGCACTTGGCCCACACGACATGATGCGCGTGAACATCTTGACTTAACCAACTAACCTTTCATAAAGCCGGAATCACAGCAATTAGGTGGC
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0018   ">
+        <sample name="00_0018   ">
+          <datablock type="DNA">
+            GCTATAATACTTGGCACTGGCCATGGCACTTGGCCCACAGGACATGATACGCGTGAACATCTTGACTTAACCAACTAACCTTTCATAAAGCCGGAATCACGGCAATTAGGTGGC
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0006   ">
+        <sample name="00_0006   ">
+          <datablock type="DNA">
+            GCTATAATACTTGGCACTGGCCATGGCACTTGACCCACAGGACATGATGCGCGTGAACATCTTGACTTAACCAATTAACCTTTCATAAAGCCGGAATCACAGCGATTAGGTGGC
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0001   ">
+        <sample name="00_0001   ">
+          <datablock type="DNA">
+            GCTATAATACTTGGCACTGGCCATGGCACTTGACCCACAGGACATGATGCGCGTGAACGTCTTGACTTAACCAATTAACCTTTCATAAAGCCGGAATCACAGCAATTAGGTGGC
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0015   ">
+        <sample name="00_0015   ">
+          <datablock type="DNA">
+            GCTATAATACTTGGCACTGGCCATGGCACTTGACCCACAGGACATGATGCGCGTGAACATCTTGACTTAACCAATTAACCTTTCATAAAGCCGGAATCACAGCAATTAGGTGGC
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    <population name="pop2">
+      <individual name="01_0022   ">
+        <sample name="01_0022   ">
+          <datablock type="DNA">
+            GCCTTAAGCCCGCACGTTGCTTACGAGGCGTGGCGCGCGGGCTTCAACGCGCGGGGATGCAACGCTTGGGTGGTCTAACCTATACCAGGACCAGAATTGGGTGAGCTTAGTGGT
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0020   ">
+        <sample name="01_0020   ">
+          <datablock type="DNA">
+            GCCTTACGCCCGCACGTTGCTTACGAGGCGTGGCGCGCGGGCTTCAACGCGCGGGGATGCAACGCTTGGGTGGTCTAACCTATCCCAGGACCAGAATTGGGTCAGCTTAGTGGT
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0030   ">
+        <sample name="01_0030   ">
+          <datablock type="DNA">
+            GCCTTACGCCCGCACGTTGCTTACGAGGCGTGGCGCGCGGGCTTCAACGCGCGGGGATGCAACGCTTGGGTGGTCTAACCTATCCCAGGACCAGAATTGGGTCAGCTTAGTGGT
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0027   ">
+        <sample name="01_0027   ">
+          <datablock type="DNA">
+            GCCTTAAGCCCGCACGTTGCTTACGAGGCGTGGCGCGGGGGCTTCAACGCGCGGGGATGCAACGCTTTGGTGGTCTAGCCTATCCCAGGACCAGAGTTGGGTCAGCTTAGTGGT
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0029   ">
+        <sample name="01_0029   ">
+          <datablock type="DNA">
+            GCCTTAAGCCCGCACGTTGCTTACGAGGCGTGGCGCGGGGGCTTCAACGCGCGGGGATGCAACGCTTTGGTGGTCTAGCCTATCCCAGGACCAGAGTTGGGTCAGCTTAGTGGT
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0025   ">
+        <sample name="01_0025   ">
+          <datablock type="DNA">
+            GCCTTAAGCCCGCACGTTGCTTACGAGGCGTGGCGCGGGGGCTTCAACGCGCGGGGATGCAACGCTTTGGTGGTCTCGCCTATCCCAGGACCAGAATTGGGTCAGCTTAGTGGT
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0038   ">
+        <sample name="01_0038   ">
+          <datablock type="DNA">
+            GCCTTAAGCCCGCACGTTGCTTACGAGGCGTGGCGCGGGGGCTTCAACGCGCGGGGATGCAACGCTTTGGTGGTCTAGCCTATCCCAGGACCAGAATTGGGTCAGCTTAGTGGT
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0024   ">
+        <sample name="01_0024   ">
+          <datablock type="DNA">
+            GCCTTAAGCCCGCACGTTGCTTACGAGGCGTGGCGCGGGGCCTTCAACGCGCGGGGATGCAACGCTTTGGTGGTCTAGCCTATCCCGGGACCAGAATTGGGTCAGCTTAGTGGT
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0026   ">
+        <sample name="01_0026   ">
+          <datablock type="DNA">
+            GCCTTAAGCCCGCACGTTGCTTACGAGGCGTGGCGCGGGGGCTTCAACGCGCGGGGATGCAACGCTTTGGTGGTCTAGCCTATCCCAGGACCAGAATTGGGTCAGCTTAGTGGT
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0033   ">
+        <sample name="01_0033   ">
+          <datablock type="DNA">
+            GCCTCAAGCCCGCACATTGGCTACGGGGTGCAGCGCGCGGGCCACAACGCGCAGGGCTGCAATGCCTTGGTGGTCTAACCTATCCCAGGACTAGAATTGGGTCAGTTTAGTGGC
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0035   ">
+        <sample name="01_0035   ">
+          <datablock type="DNA">
+            ACCTCAAGCCCGCACATTGGCTACGGGGTGTGGCGCGCGGGCCACAACGCGCAGGGATGCAATGCCTTGGTGGTCTAACCTATCCCAGGACCAGAATTGGGTCAGTTTAGTGGC
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0031   ">
+        <sample name="01_0031   ">
+          <datablock type="DNA">
+            GTTACAATACTTGACACCGGCTCTGGCACTTGGCCAACAGGACATGATGCACGTGAACACGTTGACTTAATCGTCTAACCTTTCACAAGGCCAAAATCAGAGCAATTAAGTTAC
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0028   ">
+        <sample name="01_0028   ">
+          <datablock type="DNA">
+            GTTACAATACTTGACACCGGCTCTGGCACTTGGCCAACAGGACATGATGCACGTGAACACGTTGACTTAATCGTCTAACCTTTCACAAGGCCAAAATCAGAGCAATTAAGTTAC
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0032   ">
+        <sample name="01_0032   ">
+          <datablock type="DNA">
+            GTTACAATACTTGACACCGGCTCTGGCACTTGGCCAACAGGACATGATGCACGTGAACACGTTGACTTAATCGTCTAACCTTTCACAAGGCCAAAATCAGAGCAATTAAGTTAC
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0023   ">
+        <sample name="01_0023   ">
+          <datablock type="DNA">
+            GCTATAATACTTGACACTGGCCATGGCACTTGGCCCACAGGACATGATGCGCGTAAACATCTTGACATAACCGACTAACCTTGCATAAAGTCGGAATCAGAGCAATTAGGTGGC
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0036   ">
+        <sample name="01_0036   ">
+          <datablock type="DNA">
+            GCTATAATACTTGACACTGGCCATGGCACTTGGCCCACAGGACATGATGCGCGTAAACATCTTGACATAACCGACTAACCTTGCATAAAGTCGGAATCAGAGCAATTAGGTGGC
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0039   ">
+        <sample name="01_0039   ">
+          <datablock type="DNA">
+            GCTATAATACTTGACACTGGCCATGGCACTTGGCCCACAGGACATGATGCGCGTAAACATCTTGACATAACCGACTAACCTTGCATAAAGTCGGAATCAGAGCAATTAGGTGGC
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0021   ">
+        <sample name="01_0021   ">
+          <datablock type="DNA">
+            GCTATAATATTTGACACTGGCCATGGCACTTGGCCCACAGGACATGATGCGCGTGAACATCTTGACTTAACCAACAAACTTTTCATAAAGCCGGAATCAGAGCAATTAGGTGGC
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0034   ">
+        <sample name="01_0034   ">
+          <datablock type="DNA">
+            GCTATAATATTTGACACTGGCCATGGCACTTGGCCCACAGGACATGATGCGCGTGAACATCTTGACTTAACCAACTAACTTTTCATAAAGCCGGAATCAGAGCAATTAGGTGGC
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0037   ">
+        <sample name="01_0037   ">
+          <datablock type="DNA">
+            GCTATAATATTTGACACTGGCCATGGCACTTGGCCCACAGGACATGATGCGCGTGAACATCTTGACTTAACCAACTAACTTTTCATAAAGCCGGAATCAGAGCAATTAGGTGGC
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    </region>
+    <region name="region2">
+      <model name="Brownian">
+        <normalize>false</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <relative-murate>1</relative-murate>
+      </model>
+      <effective-popsize>1</effective-popsize>
+      <spacing>
+        <block>
+          <map-position>0</map-position>
+          <length>1</length>
+          <offset>0</offset>
+        </block>
+      </spacing>
+    <population name="pop1">
+      <individual name="0BAM 0BAG ">
+        <sample name="0BAM 0BAG -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BAM 0BAG -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAT 0BAK ">
+        <sample name="0BAT 0BAK -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BAT 0BAK -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAB 0BAS ">
+        <sample name="0BAB 0BAS -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="0BAB 0BAS -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAJ 0BAR ">
+        <sample name="0BAJ 0BAR -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="0BAJ 0BAR -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAC 0BAI ">
+        <sample name="0BAC 0BAI -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="0BAC 0BAI -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAN 0BAQ ">
+        <sample name="0BAN 0BAQ -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BAN 0BAQ -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAD 0BAA ">
+        <sample name="0BAD 0BAA -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="0BAD 0BAA -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAE 0BAL ">
+        <sample name="0BAE 0BAL -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="0BAE 0BAL -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAF 0BAH ">
+        <sample name="0BAF 0BAH -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="0BAF 0BAH -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAP 0BAO ">
+        <sample name="0BAP 0BAO -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BAP 0BAO -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    <population name="pop2">
+      <individual name="1BAA 1BAB ">
+        <sample name="1BAA 1BAB -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="1BAA 1BAB -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAC 1BAJ ">
+        <sample name="1BAC 1BAJ -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="1BAC 1BAJ -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAN 1BAP ">
+        <sample name="1BAN 1BAP -1">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+        <sample name="1BAN 1BAP -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAR 1BAS ">
+        <sample name="1BAR 1BAS -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="1BAR 1BAS -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAH 1BAI ">
+        <sample name="1BAH 1BAI -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="1BAH 1BAI -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAD 1BAQ ">
+        <sample name="1BAD 1BAQ -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="1BAD 1BAQ -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAL 1BAG ">
+        <sample name="1BAL 1BAG -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="1BAL 1BAG -2">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAT 1BAM ">
+        <sample name="1BAT 1BAM -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="1BAT 1BAM -2">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAE 1BAK ">
+        <sample name="1BAE 1BAK -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="1BAE 1BAK -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAO 1BAF ">
+        <sample name="1BAO 1BAF -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="1BAO 1BAF -2">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    </region>
+    <region name="region3">
+      <model name="Brownian">
+        <normalize>false</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <relative-murate>1</relative-murate>
+      </model>
+      <effective-popsize>1</effective-popsize>
+      <spacing>
+        <block>
+          <map-position>0</map-position>
+          <length>1</length>
+          <offset>0</offset>
+        </block>
+      </spacing>
+    <population name="pop1">
+      <individual name="0BAM 0BAG ">
+        <sample name="0BAM 0BAG -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BAM 0BAG -2">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAT 0BAK ">
+        <sample name="0BAT 0BAK -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BAT 0BAK -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAB 0BAS ">
+        <sample name="0BAB 0BAS -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BAB 0BAS -2">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAJ 0BAR ">
+        <sample name="0BAJ 0BAR -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BAJ 0BAR -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAC 0BAI ">
+        <sample name="0BAC 0BAI -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BAC 0BAI -2">
+          <datablock type="MICROSAT">
+            12 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAN 0BAQ ">
+        <sample name="0BAN 0BAQ -1">
+          <datablock type="MICROSAT">
+            25 
+          </datablock>
+        </sample>
+        <sample name="0BAN 0BAQ -2">
+          <datablock type="MICROSAT">
+            12 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAD 0BAA ">
+        <sample name="0BAD 0BAA -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BAD 0BAA -2">
+          <datablock type="MICROSAT">
+            12 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAE 0BAL ">
+        <sample name="0BAE 0BAL -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="0BAE 0BAL -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAF 0BAH ">
+        <sample name="0BAF 0BAH -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BAF 0BAH -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAP 0BAO ">
+        <sample name="0BAP 0BAO -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="0BAP 0BAO -2">
+          <datablock type="MICROSAT">
+            14 
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    <population name="pop2">
+      <individual name="1BAA 1BAB ">
+        <sample name="1BAA 1BAB -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="1BAA 1BAB -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAC 1BAJ ">
+        <sample name="1BAC 1BAJ -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="1BAC 1BAJ -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAN 1BAP ">
+        <sample name="1BAN 1BAP -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="1BAN 1BAP -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAR 1BAS ">
+        <sample name="1BAR 1BAS -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="1BAR 1BAS -2">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAH 1BAI ">
+        <sample name="1BAH 1BAI -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="1BAH 1BAI -2">
+          <datablock type="MICROSAT">
+            14 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAD 1BAQ ">
+        <sample name="1BAD 1BAQ -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="1BAD 1BAQ -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAL 1BAG ">
+        <sample name="1BAL 1BAG -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="1BAL 1BAG -2">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAT 1BAM ">
+        <sample name="1BAT 1BAM -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="1BAT 1BAM -2">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAE 1BAK ">
+        <sample name="1BAE 1BAK -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="1BAE 1BAK -2">
+          <datablock type="MICROSAT">
+            14 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAO 1BAF ">
+        <sample name="1BAO 1BAF -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="1BAO 1BAF -2">
+          <datablock type="MICROSAT">
+            14 
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    </region>
+    <region name="region4">
+      <model name="Brownian">
+        <normalize>false</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <relative-murate>1</relative-murate>
+      </model>
+      <effective-popsize>1</effective-popsize>
+      <spacing>
+        <block>
+          <map-position>0</map-position>
+          <length>1</length>
+          <offset>0</offset>
+        </block>
+      </spacing>
+    <population name="pop1">
+      <individual name="0BAM 0BAG ">
+        <sample name="0BAM 0BAG -1">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+        <sample name="0BAM 0BAG -2">
+          <datablock type="MICROSAT">
+            14 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAT 0BAK ">
+        <sample name="0BAT 0BAK -1">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+        <sample name="0BAT 0BAK -2">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAB 0BAS ">
+        <sample name="0BAB 0BAS -1">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+        <sample name="0BAB 0BAS -2">
+          <datablock type="MICROSAT">
+            14 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAJ 0BAR ">
+        <sample name="0BAJ 0BAR -1">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+        <sample name="0BAJ 0BAR -2">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAC 0BAI ">
+        <sample name="0BAC 0BAI -1">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+        <sample name="0BAC 0BAI -2">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAN 0BAQ ">
+        <sample name="0BAN 0BAQ -1">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+        <sample name="0BAN 0BAQ -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAD 0BAA ">
+        <sample name="0BAD 0BAA -1">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+        <sample name="0BAD 0BAA -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAE 0BAL ">
+        <sample name="0BAE 0BAL -1">
+          <datablock type="MICROSAT">
+            13 
+          </datablock>
+        </sample>
+        <sample name="0BAE 0BAL -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAF 0BAH ">
+        <sample name="0BAF 0BAH -1">
+          <datablock type="MICROSAT">
+            14 
+          </datablock>
+        </sample>
+        <sample name="0BAF 0BAH -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAP 0BAO ">
+        <sample name="0BAP 0BAO -1">
+          <datablock type="MICROSAT">
+            14 
+          </datablock>
+        </sample>
+        <sample name="0BAP 0BAO -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    <population name="pop2">
+      <individual name="1BAA 1BAB ">
+        <sample name="1BAA 1BAB -1">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+        <sample name="1BAA 1BAB -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAC 1BAJ ">
+        <sample name="1BAC 1BAJ -1">
+          <datablock type="MICROSAT">
+            14 
+          </datablock>
+        </sample>
+        <sample name="1BAC 1BAJ -2">
+          <datablock type="MICROSAT">
+            14 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAN 1BAP ">
+        <sample name="1BAN 1BAP -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="1BAN 1BAP -2">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAR 1BAS ">
+        <sample name="1BAR 1BAS -1">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+        <sample name="1BAR 1BAS -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAH 1BAI ">
+        <sample name="1BAH 1BAI -1">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+        <sample name="1BAH 1BAI -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAD 1BAQ ">
+        <sample name="1BAD 1BAQ -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="1BAD 1BAQ -2">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAL 1BAG ">
+        <sample name="1BAL 1BAG -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="1BAL 1BAG -2">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAT 1BAM ">
+        <sample name="1BAT 1BAM -1">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+        <sample name="1BAT 1BAM -2">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAE 1BAK ">
+        <sample name="1BAE 1BAK -1">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+        <sample name="1BAE 1BAK -2">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAO 1BAF ">
+        <sample name="1BAO 1BAF -1">
+          <datablock type="MICROSAT">
+            14 
+          </datablock>
+        </sample>
+        <sample name="1BAO 1BAF -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    </region>
+    <region name="region5">
+      <model name="Brownian">
+        <normalize>false</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <relative-murate>1</relative-murate>
+      </model>
+      <effective-popsize>1</effective-popsize>
+      <spacing>
+        <block>
+          <map-position>0</map-position>
+          <length>1</length>
+          <offset>0</offset>
+        </block>
+      </spacing>
+    <population name="pop1">
+      <individual name="0BAM 0BAG ">
+        <sample name="0BAM 0BAG -1">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+        <sample name="0BAM 0BAG -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAT 0BAK ">
+        <sample name="0BAT 0BAK -1">
+          <datablock type="MICROSAT">
+            14 
+          </datablock>
+        </sample>
+        <sample name="0BAT 0BAK -2">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAB 0BAS ">
+        <sample name="0BAB 0BAS -1">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+        <sample name="0BAB 0BAS -2">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAJ 0BAR ">
+        <sample name="0BAJ 0BAR -1">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+        <sample name="0BAJ 0BAR -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAC 0BAI ">
+        <sample name="0BAC 0BAI -1">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+        <sample name="0BAC 0BAI -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAN 0BAQ ">
+        <sample name="0BAN 0BAQ -1">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+        <sample name="0BAN 0BAQ -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAD 0BAA ">
+        <sample name="0BAD 0BAA -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="0BAD 0BAA -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAE 0BAL ">
+        <sample name="0BAE 0BAL -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="0BAE 0BAL -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAF 0BAH ">
+        <sample name="0BAF 0BAH -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BAF 0BAH -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAP 0BAO ">
+        <sample name="0BAP 0BAO -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="0BAP 0BAO -2">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    <population name="pop2">
+      <individual name="1BAA 1BAB ">
+        <sample name="1BAA 1BAB -1">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+        <sample name="1BAA 1BAB -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAC 1BAJ ">
+        <sample name="1BAC 1BAJ -1">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+        <sample name="1BAC 1BAJ -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAN 1BAP ">
+        <sample name="1BAN 1BAP -1">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+        <sample name="1BAN 1BAP -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAR 1BAS ">
+        <sample name="1BAR 1BAS -1">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+        <sample name="1BAR 1BAS -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAH 1BAI ">
+        <sample name="1BAH 1BAI -1">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+        <sample name="1BAH 1BAI -2">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAD 1BAQ ">
+        <sample name="1BAD 1BAQ -1">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+        <sample name="1BAD 1BAQ -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAL 1BAG ">
+        <sample name="1BAL 1BAG -1">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+        <sample name="1BAL 1BAG -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAT 1BAM ">
+        <sample name="1BAT 1BAM -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="1BAT 1BAM -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAE 1BAK ">
+        <sample name="1BAE 1BAK -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="1BAE 1BAK -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAO 1BAF ">
+        <sample name="1BAO 1BAF -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="1BAO 1BAF -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    </region>
+    <region name="region6">
+      <model name="Brownian">
+        <normalize>false</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <relative-murate>1</relative-murate>
+      </model>
+      <effective-popsize>1</effective-popsize>
+      <spacing>
+        <block>
+          <map-position>0</map-position>
+          <length>1</length>
+          <offset>0</offset>
+        </block>
+      </spacing>
+    <population name="pop1">
+      <individual name="0BAM 0BAG ">
+        <sample name="0BAM 0BAG -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="0BAM 0BAG -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAT 0BAK ">
+        <sample name="0BAT 0BAK -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="0BAT 0BAK -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAB 0BAS ">
+        <sample name="0BAB 0BAS -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="0BAB 0BAS -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAJ 0BAR ">
+        <sample name="0BAJ 0BAR -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BAJ 0BAR -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAC 0BAI ">
+        <sample name="0BAC 0BAI -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BAC 0BAI -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAN 0BAQ ">
+        <sample name="0BAN 0BAQ -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BAN 0BAQ -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAD 0BAA ">
+        <sample name="0BAD 0BAA -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BAD 0BAA -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAE 0BAL ">
+        <sample name="0BAE 0BAL -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="0BAE 0BAL -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAF 0BAH ">
+        <sample name="0BAF 0BAH -1">
+          <datablock type="MICROSAT">
+            14 
+          </datablock>
+        </sample>
+        <sample name="0BAF 0BAH -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAP 0BAO ">
+        <sample name="0BAP 0BAO -1">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+        <sample name="0BAP 0BAO -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    <population name="pop2">
+      <individual name="1BAA 1BAB ">
+        <sample name="1BAA 1BAB -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="1BAA 1BAB -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAC 1BAJ ">
+        <sample name="1BAC 1BAJ -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="1BAC 1BAJ -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAN 1BAP ">
+        <sample name="1BAN 1BAP -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="1BAN 1BAP -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAR 1BAS ">
+        <sample name="1BAR 1BAS -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="1BAR 1BAS -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAH 1BAI ">
+        <sample name="1BAH 1BAI -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="1BAH 1BAI -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAD 1BAQ ">
+        <sample name="1BAD 1BAQ -1">
+          <datablock type="MICROSAT">
+            13 
+          </datablock>
+        </sample>
+        <sample name="1BAD 1BAQ -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAL 1BAG ">
+        <sample name="1BAL 1BAG -1">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+        <sample name="1BAL 1BAG -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAT 1BAM ">
+        <sample name="1BAT 1BAM -1">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+        <sample name="1BAT 1BAM -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAE 1BAK ">
+        <sample name="1BAE 1BAK -1">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+        <sample name="1BAE 1BAK -2">
+          <datablock type="MICROSAT">
+            25 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAO 1BAF ">
+        <sample name="1BAO 1BAF -1">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+        <sample name="1BAO 1BAF -2">
+          <datablock type="MICROSAT">
+            29 
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    </region>
+  </data>
+</lamarc>
diff --git a/doc/testfiles/infile.coalrec b/doc/testfiles/infile.coalrec
new file mode 100644
index 0000000..3ee3692
--- /dev/null
+++ b/doc/testfiles/infile.coalrec
@@ -0,0 +1,380 @@
+<lamarc version="2.1">
+<!-- Created by the Lamarc program -->
+  <chains>
+    <replicates>1</replicates>
+    <bayesian-analysis>No</bayesian-analysis>
+    <heating>
+      <adaptive>true</adaptive>
+      <temperatures> 1 1.1 1.2 1.3</temperatures>
+      <swap-interval>10</swap-interval>
+    </heating>
+    <strategy>
+      <resimulating>0.454545</resimulating>
+      <tree-size>0.0909091</tree-size>
+      <haplotyping>0.454545</haplotyping>
+      <trait-arranger>0</trait-arranger>
+    </strategy>
+    <initial>
+      <number>10</number>
+      <samples>2000</samples>
+      <discard>1000</discard>
+      <interval>20</interval>
+    </initial>
+    <final>
+      <number>2</number>
+      <samples>5000</samples>
+      <discard>1000</discard>
+      <interval>20</interval>
+    </final>
+  </chains>
+  <format>
+    <seed>1005</seed>
+    <verbosity>verbose</verbosity>
+    <progress-reports>verbose</progress-reports>
+    <results-file>outfile.coalrec</results-file>
+    <use-in-summary>false</use-in-summary>
+    <in-summary-file>insumfile.coalrec</in-summary-file>
+    <use-out-summary>true</use-out-summary>
+    <out-summary-file>outsumfile.coalrec</out-summary-file>
+    <use-curvefiles>true</use-curvefiles>
+    <curvefile-prefix>curvefile</curvefile-prefix>
+    <use-tracefile>true</use-tracefile>
+    <tracefile-prefix>tracefile</tracefile-prefix>
+    <use-newicktreefile>false</use-newicktreefile>
+    <newicktreefile-prefix>newick</newicktreefile-prefix>
+    <out-xml-file>menuinfile</out-xml-file>
+  </format>
+  <forces>
+    <coalescence>
+      <start-values> 0.01</start-values>
+      <method> USER</method>
+      <max-events>1000</max-events>
+      <profiles> fixed </profiles>
+      <constraints> unconstrained </constraints>
+      <prior type="log">
+        <paramindex> all </paramindex>
+        <lower> 1e-05 </lower>
+        <upper> 10 </upper>
+      </prior>
+    </coalescence>
+    <recombination>
+      <start-values> 0.1</start-values>
+      <method> USER</method>
+      <max-events>1000</max-events>
+      <profiles> fixed </profiles>
+      <constraints> unconstrained </constraints>
+      <prior type="log">
+        <paramindex> all </paramindex>
+        <lower> 1e-05 </lower>
+        <upper> 10 </upper>
+      </prior>
+    </recombination>
+  </forces>
+  <data>
+    <region name="coalrec">
+      <model name="F84">
+        <normalize>false</normalize>
+        <categories>
+          <num-categories>2</num-categories>
+          <rates> 0.666667 1.33333</rates>
+          <probabilities> 0.5 0.5</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <relative-murate>1</relative-murate>
+        <base-freqs> calculated </base-freqs>
+        <ttratio>2</ttratio>
+      </model>
+      <effective-popsize>1</effective-popsize>
+      <spacing>
+        <block>
+          <map-position>0</map-position>
+          <length>1000</length>
+          <locations> 2 9 18 46 51 78 82 89 90 98 104 106 114 172 197 199 222 293 326 343 391 397 417 424 426 435 438 443 464 488 499 526 546 556 575 576 595 602 614 617 621 629 642 650 653 674 690 694 699 702 711 718 719 729 770 848 851 854 856 857 915 923 940 958 974</locations>
+          <offset>0</offset>
+        </block>
+      </spacing>
+    <population name="ZMNE">
+      <individual name="GG-322166855">
+        <phase type="known">
+        </phase>
+        <sample name="00_0025   ">
+          <datablock type="SNP">
+            TGAAGCCCCAATTATGGTTACCACGACTTACCAGTACCTCTTAGCGACGTGGGCTCTATCTCTCA
+          </datablock>
+        </sample>
+        <sample name="00_0032   ">
+          <datablock type="SNP">
+            TGAAGCCCCAATTATGGTTACCACGACTTACCAGTACCTCTTAGCGACGTGGGCTCTATCTCTCA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="CA-1016758720">
+        <phase type="known">
+        </phase>
+        <sample name="00_0022   ">
+          <datablock type="SNP">
+            CCACATCGTAGACGTTCGTACCGCTTCTTACCAGTACCTCTTAGCGACGTGGGCTCTATCTCTCA
+          </datablock>
+        </sample>
+        <sample name="00_0003   ">
+          <datablock type="SNP">
+            CCACATCGTAGACGTTCGTACCGCTTCTTACCAGTACCTCTTAGCGACGTGGGCTCTATCTCTCA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="GT-438712109">
+        <phase type="known">
+        </phase>
+        <sample name="00_0024   ">
+          <datablock type="SNP">
+            CCACATCGTAGACGTTCGTACCGCTTCTTACCAGTACCTCTTAGCGACGTGGGCTCTATCTCTCA
+          </datablock>
+        </sample>
+        <sample name="00_0000   ">
+          <datablock type="SNP">
+            CCACATCGTAGACGTTCGTGCCGCTTCTTATCAGTACCTCTTAGCGACGTGGGCTCTATCTCTCA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="AT1063120482">
+        <phase type="known">
+        </phase>
+        <sample name="00_0027   ">
+          <datablock type="SNP">
+            CCACATCGTAGACGTTCGTGGCGCTTCTTATCAGTACCTCTTAGCGACGTGGGCTCTATCTCTAA
+          </datablock>
+        </sample>
+        <sample name="00_0013   ">
+          <datablock type="SNP">
+            CCACATCGTAGACGTTCGTGCCGCTTCTTATCAGTACCTCTAAGCGACGTGGGCTCTATCTCTCA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="TG-108633955">
+        <phase type="known">
+        </phase>
+        <sample name="00_0037   ">
+          <datablock type="SNP">
+            CCACATCGTAGACGTTCGTGCCGCTTCTTATCAGTACCTCTTAGCGACGTGGGCTCTATCTCTCA
+          </datablock>
+        </sample>
+        <sample name="00_0009   ">
+          <datablock type="SNP">
+            CCACATCGTAGACGTTCGTGCCGCTTCTTCTCAGTACCACTTAGCAACGTAGACTCTAACACTCA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="GC-816174348">
+        <phase type="known">
+        </phase>
+        <sample name="00_0034   ">
+          <datablock type="SNP">
+            CCACATCGTAGACGTTCGTACCGCTTCCTCTCAGTACCACTTAGCAACGTAGACTCTATCACTCA
+          </datablock>
+        </sample>
+        <sample name="00_0038   ">
+          <datablock type="SNP">
+            CCACATCGTAGACGTTCGTACCGCTTCCTCTCAGTACCACTTAGCAACGTAGACTCTATCACTCA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="GG-1252698729">
+        <phase type="known">
+        </phase>
+        <sample name="00_0023   ">
+          <datablock type="SNP">
+            CCACATCGTAGACGTTCGTACCGCTTCCTATCGGTACCACTTAGCAACGTAGACCCTATATCTCA
+          </datablock>
+        </sample>
+        <sample name="00_0030   ">
+          <datablock type="SNP">
+            CCACATCGTAGACGTTCGTACCGCTTCTTATCGGTACCACTTAGCAACGTAGACCCTATATCTCA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="GG-173374346">
+        <phase type="known">
+        </phase>
+        <sample name="00_0031   ">
+          <datablock type="SNP">
+            CCACATCGTAGACGTTCGTACCGCTTCTTATCGGTACCACTTAGCAACGTAGACCCTATATCTCA
+          </datablock>
+        </sample>
+        <sample name="00_0002   ">
+          <datablock type="SNP">
+            CCACATCGTAGACGTTCGTACCGCTTCTTATCGGTACCACTTAGCAACGTAGACCCTATATCTCA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="CA-1985314239">
+        <phase type="known">
+        </phase>
+        <sample name="00_0018   ">
+          <datablock type="SNP">
+            CCACATCGTAGACGTTCGTACCGCTTCTTATCGGTACCACTTAGCAACGTAGACCCTATATCTCA
+          </datablock>
+        </sample>
+        <sample name="00_0014   ">
+          <datablock type="SNP">
+            CCACATCGTAGACGTTCGTACCGCTTCTTATCGGTACCACTTAGCAACGTAGACCCTATCTCTCA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="GC-780785816">
+        <phase type="known">
+        </phase>
+        <sample name="00_0005   ">
+          <datablock type="SNP">
+            CCACATCGTAGACGTTCGTACCGCTTCTTATCGGTACCACTTAGCAACGTAGACCCTATCTCTCA
+          </datablock>
+        </sample>
+        <sample name="00_0036   ">
+          <datablock type="SNP">
+            CCACATCGTAGACGTTCGTACCGCTTCTTATCGGTACCACTTAGCAACGTAGACCCTATCTCTCA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="GG1351601435">
+        <phase type="known">
+        </phase>
+        <sample name="00_0010   ">
+          <datablock type="SNP">
+            CCACATCGTAGACGTTCGTACCGCTTCTTATCGGTACCACTTAGCAACGTAGACCCTATCTCCCA
+          </datablock>
+        </sample>
+        <sample name="00_0001   ">
+          <datablock type="SNP">
+            CCACATCGTTGACATTCGCACTGGTTTTTATCGGTACCACTTAGCAACGTAGACCCTATCTCTCA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="CA-402572214">
+        <phase type="known">
+        </phase>
+        <sample name="00_0011   ">
+          <datablock type="SNP">
+            CCACATCGTTGACATTCGCACTGGTTTTTATCGGTACCACTTAGCAACGTAGACCCTATCTCTCA
+          </datablock>
+        </sample>
+        <sample name="00_0004   ">
+          <datablock type="SNP">
+            CCACATCGTTGACATTCGCACTGGTTTTTATCGGTACCACTTAGCAACGTAGACCCTATCTCTCA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="AC1355139237">
+        <phase type="known">
+        </phase>
+        <sample name="00_0033   ">
+          <datablock type="SNP">
+            CCACATCGTTGACATTCGCACTGGTTTTTATCGGTACCACTTAGCAACGTAGACCCTATCTCTCA
+          </datablock>
+        </sample>
+        <sample name="00_0029   ">
+          <datablock type="SNP">
+            CCACATCGTTGACATTCGCACTGGTTTTTATCGGTACCACTTAGCAACGTAGACCCTATCTCTCA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="GA865732508">
+        <phase type="known">
+        </phase>
+        <sample name="00_0006   ">
+          <datablock type="SNP">
+            CCACATCGTTGACATTCGCACTGGTTTTTATCGGTACCACTTAGCAACGTAGACCCTATCTCTCA
+          </datablock>
+        </sample>
+        <sample name="00_0015   ">
+          <datablock type="SNP">
+            CCACATCGTTGACATTCGCACTGGTTTTTATCGGTACCACTTAGCAACGTAGACCCTATCTCTCA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="GA1359434591">
+        <phase type="known">
+        </phase>
+        <sample name="00_0028   ">
+          <datablock type="SNP">
+            CCACATCGTTGACATTCGCACTGGTTTTCATTATTACCACATAGTGTCTCAGGTTCATTCTCTCA
+          </datablock>
+        </sample>
+        <sample name="00_0035   ">
+          <datablock type="SNP">
+            CCACATCGTTGACATTCGCACTGGTTTTCATTATTACCACATAGTGTCTCAGGTTCATTCTCTCA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="AT-1640691234">
+        <phase type="known">
+        </phase>
+        <sample name="00_0007   ">
+          <datablock type="SNP">
+            CCGCATCGTTGACATTCGCACTGGTTTTCATTAGCGCCATATAGTGTATCAAGTTCATTCTCTCA
+          </datablock>
+        </sample>
+        <sample name="00_0026   ">
+          <datablock type="SNP">
+            CCACATCGTTGACACTCGCACTGGTTCTCATTAGCGCCATATAGTGTATCAAGTTCATTCTCTCA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="AG774387657">
+        <phase type="known">
+        </phase>
+        <sample name="00_0019   ">
+          <datablock type="SNP">
+            CCACATCGTTGACACTCGCACTGGTTCTCATTAGCGCCATATAGTGTATCAAGTTCATTCTCTCA
+          </datablock>
+        </sample>
+        <sample name="00_0020   ">
+          <datablock type="SNP">
+            CCACATCGTTGACACTCGCACTGGTTCTCATTAGCGCCATATAGTGTATCAAGTTCATTCTCTCA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="TA864034704">
+        <phase type="known">
+        </phase>
+        <sample name="00_0021   ">
+          <datablock type="SNP">
+            CCACATCGTTGACACTCGCACTGGTTCTCATTAGCGCCATATAGTGTATCAAGTTCATTCTCTCA
+          </datablock>
+        </sample>
+        <sample name="00_0016   ">
+          <datablock type="SNP">
+            CCACATCGTTGACACTCGCACTGGTTCTCATTAGCGCCATATGGTGTATCAAGTTCATTCTCTCA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="GT-545862557">
+        <phase type="known">
+        </phase>
+        <sample name="00_0017   ">
+          <datablock type="SNP">
+            CCACATCGTTGACATTCGCACTGGTTCTCATTAGCGGCATATGGTGTATCAAGTTCATTCTCTCA
+          </datablock>
+        </sample>
+        <sample name="00_0039   ">
+          <datablock type="SNP">
+            CCACATTGTTGACATTCGCACTGGTTCTCATTAGCGCCATATAATGTATCAAGTTTATTCTCTCA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="GT502571314">
+        <phase type="known">
+        </phase>
+        <sample name="00_0008   ">
+          <datablock type="SNP">
+            CCACATCGTTGACATTCGCACTGGTTCTCATTAGCGCCATATAGTGTATCAAGTTCATTCTTTCG
+          </datablock>
+        </sample>
+        <sample name="00_0012   ">
+          <datablock type="SNP">
+            CCACATCGTTGACATTCGCACTGGTTCTCATTAGCGCTATATAGTGTATCAAGTTCATTCTCTCG
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    </region>
+  </data>
+</lamarc>
diff --git a/doc/testfiles/infile.coalregrep b/doc/testfiles/infile.coalregrep
new file mode 100644
index 0000000..ae8f501
--- /dev/null
+++ b/doc/testfiles/infile.coalregrep
@@ -0,0 +1,886 @@
+<lamarc version="2.1">
+<!-- Created by the Lamarc program -->
+  <chains>
+    <replicates>3</replicates>
+    <bayesian-analysis>No</bayesian-analysis>
+    <heating>
+      <adaptive>false</adaptive>
+      <temperatures> 1</temperatures>
+      <swap-interval>1</swap-interval>
+    </heating>
+    <strategy>
+      <resimulating>0.833333</resimulating>
+      <tree-size>0.166667</tree-size>
+      <haplotyping>0</haplotyping>
+      <trait-arranger>0</trait-arranger>
+    </strategy>
+    <initial>
+      <number>5</number>
+      <samples>1000</samples>
+      <discard>1000</discard>
+      <interval>20</interval>
+    </initial>
+    <final>
+      <number>2</number>
+      <samples>2500</samples>
+      <discard>1000</discard>
+      <interval>20</interval>
+    </final>
+  </chains>
+  <format>
+    <seed>1005</seed>
+    <verbosity>verbose</verbosity>
+    <progress-reports>verbose</progress-reports>
+    <results-file>outfile</results-file>
+    <use-in-summary>false</use-in-summary>
+    <in-summary-file>insumfile</in-summary-file>
+    <use-out-summary>true</use-out-summary>
+    <out-summary-file>outsumfile</out-summary-file>
+    <use-curvefiles>true</use-curvefiles>
+    <curvefile-prefix>curvefile</curvefile-prefix>
+    <use-tracefile>true</use-tracefile>
+    <tracefile-prefix>tracefile</tracefile-prefix>
+    <use-newicktreefile>false</use-newicktreefile>
+    <newicktreefile-prefix>newick</newicktreefile-prefix>
+    <out-xml-file>menuinfile</out-xml-file>
+  </format>
+  <forces>
+    <coalescence>
+      <start-values> 0.014037</start-values>
+      <method> WATTERSON</method>
+      <max-events>1000</max-events>
+      <profiles> percentile </profiles>
+      <constraints> unconstrained </constraints>
+      <prior type="log">
+        <paramindex> all </paramindex>
+        <lower> 1e-05 </lower>
+        <upper> 10 </upper>
+      </prior>
+    </coalescence>
+  </forces>
+  <data>
+    <region name="DR-1">
+      <model name="F84">
+        <normalize>false</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <relative-murate>1</relative-murate>
+        <base-freqs> calculated </base-freqs>
+        <ttratio>2</ttratio>
+      </model>
+      <effective-popsize>1</effective-popsize>
+      <spacing>
+        <block>
+          <map-position>0</map-position>
+          <length>1000</length>
+          <offset>0</offset>
+        </block>
+      </spacing>
+    <population name="Berkeley">
+      <individual name="00_0003   ">
+        <sample name="00_0003   ">
+          <datablock type="DNA">
+            TCGGGCATCCATATTTCCCCACTGGAGCTAGAATTGACCCCGAAGTCAAACGATCTATATGACGTCCCTAATGACGTTGGCGCCATTGGGCATTCGTTCTGGACTATCGCGGACATAAAAGCAGCTCGGGTGGCATGATCATTGGTGCGAGCCCGCGTGAGCAAGCTGGTAAGAACTGAAGCGAGGCGAGTAGTTCAAGAGTTCGATCTCTTTCTCTTAACATCCATAGCACTGGGCGTCCCCCCTTGCCACTTACGACTTAAAACTTATCAGCTTATGTTTCGATTCCCCGCATTGTCCACATTCAGACGAAACCAATCCATTGCGAAATGTACTCTCAACTATGATGGTGTCTGGGGCCCTAAGCCGCGACATGAAGTAGATTGGGCCATCCCCATTTTACAAGGGTAGAGTAAAATGTTGAAGACGCTGTGGGCCAAGGGGGGCCTTAGAATTCATCAGGATAGATCCGTGTGGTAA [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0009   ">
+        <sample name="00_0009   ">
+          <datablock type="DNA">
+            TCGGGCATCCATATTTCCCCACTGGAGCTAGAATTGACCCCGAAGTCAAACGATCTATATGACGTCCCTAATGACGTTGGCGCCATTGGGCATTCGTTCTGGACTATCGCGGACATAAAAGCAGCTCGGGTGGCATGATCATTGGTGCGAGCCCGCGTGAGCAAGCTGGTAAGAACTGAAGCGAGGCGAGTAGTTCAAGAGTTCGATCTCTTTCTCTTAACATCCATAGCACTGGGCGTCCCCCCTTGCCACTTACGACTTAAAACTTATCAGCTTATGTTTCGATTCCCCGCATTGTCCACATTCAGACGAAACCAATCCATTGCGAAATGTACTCTCAACTATGATGGTGTCTGGGGCCCTAAGCCGCGACATGAAGTAGATTGGGCCATCCCCATTTTACAAGGGTAGAGTAAAATGTTGAAGACGCTGTGGGCCAAGGGGGGCCTTAGAATTCATCAGGATAGATCCGTGTGGTAA [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0002   ">
+        <sample name="00_0002   ">
+          <datablock type="DNA">
+            TCGGGCATCCATATTTCCCCACTGGAGCTAGAATTGACCCCGAAGTCAAACGATCTATGTGACGTCACTAATGACGTTGGCGCCATTGGGCATTCGTTCTGGACTATCGCGGACATAAACGCAGCTCGGGTGGCATGATCATTGGTGCGAGCCCGCGTGAGCAAGCTGGTAAGAACTGAAGCGAGGCGAGTAGTTCAAGAGTTCGATCTCTTTCTCTTAACATCCATAGCACTGGGCGTCCCCCCTTGCCACTTACGACTTAAAACTTGTCAGCTTATGTTTCGATTCCCCGCATTGTCCACATTGAAACGAAACCAATCCATTGCGAAATGTACTCTCAACTATGAGGGCGTCTGGGGCCCTGAGCCGCGACATGAAGTAGATTGGGCCATCCCCAGTTTACAAGGGTAGAGTAAAATGTTGAAGACGCTGTGGGCCAAGGGGGGCCTTAGAATTCATCAGGATAGATCCGTGTGGTAA [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0011   ">
+        <sample name="00_0011   ">
+          <datablock type="DNA">
+            TCGGGCATCCATATTTCCCCACTGGAGCTAGAATTGACCCCGAAGTCAAACGATCTATGTGACGTCACTAATGACGTTGGCGCCATTGGGCATTCGTTCTGGACTATCGCGGACATAAACGCAGCTCGGGTGGCATGATCATTGGTGCGAGCCCGCGTGAGCAAGCTGGTAAGAACTGAAGCGAGGCGAGTAGTTCAAGAGTTCGATCTCTTTCTCTTAACATCCATAGCACTGGGCGTCCCCCCTTGCCACTTACGACTTAAAACTTATCAGCTTATGTTTCGATTCCCCGCATTGTCCACATTCAAACGAAACCAATCCATTGCGAAATGTACTCTCAACTATGAGGGCGTCTGGGGCCCTGAGCCGCGACATGAAGTAGATTGGGCCATCCCCAGTTTACAAGGGTAGAGTAAAATGTTGAAGACGCTGTGGGCCAAGGGGGGCCTTAGAATTCATCAGGATAGATCCGTGTGGTAA [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0017   ">
+        <sample name="00_0017   ">
+          <datablock type="DNA">
+            TCGGGCATCCATATTTCCCCACTGGAGCTAGAATTGACCCCGAAGTCAAACGATCTATGTGACGTCACTAATGACGTTGGCGCCATTGGGCATTCGTTCTGGACTATCGCGGACATAAACGCAGCTCGGGTGGCATGATCATTGGTGCGAGCCCGCGTGAGCAAGCTGGTAAGAACTGAAGCGAGGCGAGTAGTTCAAGAGTTCGATCTCTTTCTCTTAACATCCATAGCACTGGGCGTCCCCCCTTGCCACTTACGACTTAAAACTTATCAGCTTATGTTTCGATTCCCCGCATTGTCCACATTCAAACGAAACCAATCCATTGCGAAATGTACTCTCAACTATGAGGGCGTCTGGGGCCCTGAGCCGCGACATGAAGTAGATTGGGCCATCCCCAGTTTACAAGGGTAGAGTAAAATGTTGAAGACGCTGTGGGCCAAGGGGGGCCTTAGAATTCATCAGGATAGATCCGTGTGGTAA [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0006   ">
+        <sample name="00_0006   ">
+          <datablock type="DNA">
+            TCGGGCATCCATATTTCCCCACTGGAGCTAGAATTGACCCCGAAGTCAAACGATCTATGTGACGTCACTAATGACGTTGGCGCCATTGGGCATTCGTTCTGGACTATCGCGGACATAAACGCAGCTCGGGTGGCATGATCATTGGTGCGAGCCCGCGTGAGCAAGCTGGTAAGAACTGAAGCGAGGCGAGTAGTTCAAGAGTTCGATCTCTTTCTCTTAACATCCATAGCACTGGGCGTCCCCCCTTGCCACTTACGACTTAAAACTTATCAGCTTATGTTTCGATTCCCCGCATTGTCCACATTCAAACGAAACCAATCCATTGCGAAATGTACTCTCAACTATGAGGGCGTCTGGGGCCCTGAGCCGCGACATGAAGTAGATTGGGCCATCCCCATTTTACAAGGGTAGAGTAAAATGTTGAAGACGCTGTGGGCCAAGGGGGGCCTTAGAATTCATCAGGATAGATCCGTGTGGTAA [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0013   ">
+        <sample name="00_0013   ">
+          <datablock type="DNA">
+            TCGGGCATCCATATTTCCCCACTGGAGCTAGAATTGACCCCGAAGTCAAACGATCTATGTGACGTCACTAATGACGTTGGCGCCATTGGGCATTCGTTCTGGACTATCGCGGACATAAACGCAGCTCGGGTGGCATGATCATTGGTGCGAGCCCGCGTGAGCAAGCTGGTAAGAACTGAAGCGAGGCGAGTAGTTCAAGAGTTCGATCTCTTTCTCTTAACATCCATAGCACTGGGCGTCCCCCCTTGCCACTTACGACTTAAAACTTATCAGCTTATGTTTCGATTCCCCGCATTGTCCACATTCAAACGAAACCAATCCATTGCGAAATGTACTCTCAACTATGAGGGCGTCTGGGGCCCTGAGCCGCGACATGAAGTAGATTGGGCCATCCCCATTTTACAAGGGTAGAGTAAAATGTTGAAGACGCTGTGGGCCAAGGGGGGCCTTAGAATTCATCAGGATAGATCCGTGTGGTAA [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0001   ">
+        <sample name="00_0001   ">
+          <datablock type="DNA">
+            TCGGGCATCCATATTTCCCCACTGGAGCTAGAATTGACCCCGAAGTCAAACGATCTATGTGACGTCACTAATGACGTTGGCGCCATTGGGCATTCGTTCTGGACTATCGCGGACATAAACGCAGCTCGGGTGGCATGATCATTGGTGCGAGCCCGCGTGAGCAAGCTGGTAAGAACTGAAGCGAGGCGAGTAGTTCAAGAGTTCGATCTCTTTCTCTTAACATCCATAGCACTGGGCGTCCCCCCTTGCCACTTACGACTTAAAACTTATCAGCTTATGTTTCGATTCCCCGCATTGTCCACATTCAAACGAAACCAATCCATTGCGAAATGTACTCTCAACTATGAGGGCGTCTGGGGCCCTGAGCCGCGACATGAAGTAGATTGGGCCATCCCCATTTTACAAGGGTAGAGTAAAATGTTGAAGACGCTGTGGGCCAAGGGGGGCCTTAGAATTCATCAGGATAGATCCGTGTGGTAA [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0000   ">
+        <sample name="00_0000   ">
+          <datablock type="DNA">
+            TCGGGCATCCATATTTCCCCACTGGAGCTAGAATTGACCCCGAAGTCAAACGATCTATGTGACGTCACTAATGACGTTGGCGCCATTGGGCATTCGTTCTGGACTATCGCGGACATAAACGCAGCTCGGGTGGCATGATCATTGGTGCGAGCCCGCGTGAGCAAGCTGGTAAGAACTGAAGCGAGGCGAGTAGTTCAAGAGTTCGATCTCTTTCTCTTAACATCCATAGCACTGGGCGTCCCCCCTTGCCACTTACGACTTAAAACTTATCAGCTTATGTTTCGATTCCCCGCATTGTCCACATTCAAACGAAACCAATCCATTGCGAAATGTACTCTCAACTATGAGGGCGTCTGGGGCCCTGAGCCGCGACATGAAGTAGATTGGGCCATCCCCATTTTACAAGGGTAGAGTAAAATGTTGAAGACGCTGTGGGCCAAGGGGGGCCTTAGAATTCATCAGGATAGATCCGTGTGGTAA [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0016   ">
+        <sample name="00_0016   ">
+          <datablock type="DNA">
+            TCGGGCATCCATATTTCCCCACTGGAGCTAGAATTGACCCCGAAGTCAAACGATCTATGTGACGTCACTAATGACGTTGGCGCCATTGGGCATTCGTTCTGGACTATCGCGGACATAAACGCAGCTCGGGTGGCATGATCATTGGTGCGAGCCCGCGTGAGCAAGCTGGTAAGAACTGAAGCGAGGCGAGTAGTTCAAGAGTTCGATCTCTTTCTCTTAACATCCATAGCACTGGGCGTCCCCCCTTGCCACTTACGACTTAAAACTTATCAGCTTATGTTTCGATTCCCCGCATTGTCCACATTCAAACGAAACCAATCCATTGCGAAATGTACTCTCAACTATGAGGGCGTCTGGGGCCCTGAGCCGCGACATGAAGTAGATTGGGCCATCCCCATTTTACAAGGGTAGAGTAAAATGTTGAAGACGCTGTGGGCCAAGGGGGGCCTTAGAATTCATCAGGATAGATCCGTGTGGTAA [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0007   ">
+        <sample name="00_0007   ">
+          <datablock type="DNA">
+            TCGGGCATCCATATTTCCCCACTGGAGCTAGAATTGACCCCGAAGTCAAACGATCTATGTGACGTCACTAATGACGTTGGCGCCATTGGGCATTCGTTCTGGACTATCGCGGACATAAACGCAGCTCGGGTGGCATGATCATTGGTGCGAGCCCGCGTGAGCAAGCTGGTAAGAACTGAAGCGAGGCGAGTAGTTCAAGAGTTCGATCTCTTTCTCTTAACATCCATAGCACTGGGCGTCCCCCCTTGCCACTTACGACTTAAAACTTATCAGCTTATGTTTCGATTCCCCGCATTGTCCACATTCAAACGAAACCAATCCATTGCGAAATGTACTCTCAACTATGAGGGCGTCTGGGGCCCTGAGCCGCGACATGAAGTAGATTGGGCCATCCCCATTTTACAAGGGTAGAGTAAAATGTTGAAGACGCTGTGGGCCAAGGGGGGCCTTAGAATTCATCAGGATAGATCCGTGTGGTAA [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0019   ">
+        <sample name="00_0019   ">
+          <datablock type="DNA">
+            TCGGGCATCCATATTTCCCCACTGGAGCTAGAATTGACCCCGAAGTCAAACGATCTATGTGACGTCACTAATGACGTTGGCGCCATTGGGCATTCGTTCTGGACTATCGCGGACATAAACGCAGCTCGGGTGGCATGATCATTGGTGCGAGCCCGCGTGAGCAAGCTGGTAAGAACTGAAGCGAGGCGAGTAGTTCAAGAGTTCGATCTCTTTCTCTTAACATCCATAGCACTGGGCGTCCCCCCTTGCCACTTACGACTTAAAACTTATCAGCTTATGTTTCGATTCCCCGCATTGTCCACATTCAAACGAAACCAATCCATTGCGAAATGTACTCTCAACTATGAGGGCGTCTGGGGCCCTGAGCCGCGACATGAAGTAGATTGGGCCATCCCCATTTTACAAGGGTAGAGTAAAATGTTGAAGACGCTGTGGGCCAAGGGGGGCCTTAGAATTCATCAGGATAGATCCGTGTGGTAA [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0004   ">
+        <sample name="00_0004   ">
+          <datablock type="DNA">
+            TCGGGCATCCATATTTCCCCACTGGAGCTAGAATTGACCCCGAAGTCAAACGATCTATGTGACGTCACTAATGACGTTGGCGCCATTGGGCATTCGTTCTGGACTATCGCGGACATAAACGCAGCTCGGGTGGCATGATCATTGGTGCGAGCCCGCGTGAGCAAGCTGGTAAGAACTGAAGCGAGGCGAGTAGTTCAAGAGTTCGATCTCTTTCTCTTAACATCCATAGCACTGGGCGTCCCCCCTTGCCACTTACGACTTAAAACTTATCAGCTTATGTTTCGATTCCCCGCATTGTCCACATTCAAACGAAACCAATCCATTGCGAAATGTACTCTCAACTATGAGGGCGTCTGGGGCCCTGAGCCGCGACGTGAAGTAGATTGGGCCATCCCCATTTTACAAGGGTAGAGTAAAATGTTGAAGACGCTGTGGGCCAAGGGGGGCCTTAGAATTCATCAGGATAGATCCGTGTGGTGA [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0005   ">
+        <sample name="00_0005   ">
+          <datablock type="DNA">
+            TCGGGCATCCATATTTCCCCACTGGAGCTAGAATTGACCCCGAAGTCAAACGATCTATGTGACGTCACTAATGACGTTGGCGCCATTGGGCATTCGTTCTGGACTATCGCGGACATAAACGCAGCTCGGGTGGCATGATCATTGGTGCGAGCCCGCGTGAGCAAGCTGGTAAGAACTGAAGCGAGGCGAGTAGTTCAAGAGTTCGATCTCTTTCTCTTAACATCCATAGCACTGGGCGTCCCCCCTTGCCACTTACGACTTAAAACTTATCAGCTTATGTTTCGATTCCCCGCATTGTCCACATTCAAACGAAACCAATCCATTGCGAAATGTACTCTCAACTATGAGGGCGTCTGGGGCCCTGAGCCGCGACGTGAAGTAGATTGGGCCATCCCCATTTTACAAGGGTAGAGTAAAATGTTGAAGACGCTGTGGGCCAAGGGGGGCCTTAGAATTCATCAGGATAGATCCGTGTGGTGA [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0015   ">
+        <sample name="00_0015   ">
+          <datablock type="DNA">
+            TCGGGCATCCATATTTCCCCACTGGAGCTAGAATTGACCCCGAAGTCAAACGATCTATGTGACGTCACTAATGACGTTGGCGCCATTGGGCATTCGTTCTGGACTATCGCGGACATAAACGCAGCTCGGGTGGCATGATCATTGGTGCGAGCCCGCGTGAGCAAGCTGGTAAGAACTGAAGCGAGGCGAGTAGTTCAAGAGTTCGATCTCTTTCTCTTAACATCCATAGCACTGGGCGTCCCCCCTTGCCACTTACGACTTAAAACTTATCAGCTTATGTTTCGATTCCCCGCATTGTCCACATTCAAACGAAACCAATCCATTGCGAAATGTACTCTCAACTATGAGGGCGTCTGGGGCCCTGAGCCGCGACGTGAAGTAGATTGGGCCATCCCCATTTTACAAGGGTAGAGTAAAATGTTGAAGACGCTGTGGGCCAAGGGGGGCCTTAGAATTCATCAGGATAGATCCGTGTGGTGA [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0010   ">
+        <sample name="00_0010   ">
+          <datablock type="DNA">
+            TCGGGCATCCATATTTCCCCACTGGAGCTAGAATTGACCCCGAAGTCAAACGATCTATGTGACGTCACTAATGACGTTGGCGCCATTGGGCATTCGTTCTGGACTATCGCGGACATAAACGCAGCTCGGGTGGCATGATCATTGGTGCGAGCCCGCGTGAGCAAGCTGGTAAGAACTGAAGCGAGGCGAGTAGTTCAAGAGTTCGATCTCTTTCTCTTAACATCCATAGCACTGGGCGTCCCCCCTTGCCACTTACGACTTAAAACTTATCAGCTTATGTTTCGATTCCCCGCATTGTCCACATTCAAACGAAACCAATCCATTGCGAAATGTACTCTCAACTATGAGGGCGTCTGGGGCCCTGAGCCGCGACATGAAGTAGATTGGGCCATCCCCATTTTACAAGGGTAGAGTAAAATGTTGAAGACGCTGTGGGCCAAGGGGGGCCTTAGAATTCATCAGGATAGATCCGTGTGGTAA [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0018   ">
+        <sample name="00_0018   ">
+          <datablock type="DNA">
+            TCGGGCATCCATATTTCCCCACTGGAGCTAGAATTGACCCCGAAGTCAAACGATCTATGTGACGTCACTAATGACGTTGGCGCCATTGGGCATTCGTTCTGGACTATCGCGGACATAAACGCAGCTCGGGTGGCATGATCATTGGTGCGAGCCCGCGTGAGCAAGCTGGTAAGAACTGAAGCGAGGCGAGTAGTTCAAGAGTTCGATCTCTTTCTCTTAACATCCATAGCACTGGGCGTCCCCCCTTGCCACTTACGACTTAAAACTTATCAGCTTATGTTTCGATTCCCCGCATTGTCCACATTCAAACGAAACCAATCCATTGCGAAATGTACTCTCAACTATGAGGGCGTCTGGGGCCCTGAGCCGCGACATGAAGTAGATTGGGCCATCCCCATTTTACAAGGGTAGAGTAAAATGTTGAAGACGCTGTGGGCCAAGGGGGGCCTTAGAATTCATCAGGATAGATCCGTGTGGTAA [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0008   ">
+        <sample name="00_0008   ">
+          <datablock type="DNA">
+            TCGGGCATCCATATTTCCCCACTGGAGCTAGAGTTGACCCCGAAGTCAAACGATCTATGTGACGTCACTAATGACGTTGGCGCCATTGGGCATTCGTTCTGGACTATCGCGGACATAAACGCAGCTCGGGTGGCATGATCATTGGTGCGAGCCCGCGTGAGCAAGCTGGTAAGAACTGAAGCGAGGCGAGTAGTTCAAGAGTTCGATCTCTTTCTCTTAACATCCATAGCACTGGGCGTCCCCCCTTGCCACTTACGACTTAAAACTTATCAGCTTATGTTTCGATTCCCCGCATTGACCACATTCAAACGAAACCAATCCATTGCGAAATGTACTCTCAACTATGAGGGCGTCTGGGGCCCTGAGCCGCGACATGAAGTAGATTGGGCCATCCCCATTTTACAAGGGTAGAGTAAAATGTTGAAGACGCTGTGGGCCAAGGGGGGCCTTAGAATTCATCAGGATAGATCCGTGTGGTAA [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0012   ">
+        <sample name="00_0012   ">
+          <datablock type="DNA">
+            TCGGGCATCCATATTTCCCCACTGGAGCTAGAGTTGACCCCGAAGTCAAACGATCTATGTGACGTCACTAATGACGTTGGCGCCATTGGGCATTCGTTCTGGACTATCGCGGACATAAACGCAGCTCGGGTGGCATGATCATTGGTGCGAGCCCGCGTGAGCAAGCTGGTAAGAACTGAAGCGAGGCGAGTAGTTCAAGAGTTCGATCTCTTTCTCTTAACATCCATAGCACTGGGCGTCCCCCCTTGCCACTTACGACTTAAAACTTATCAGCTTATGTTTCGATTCCCCGCATTGTCCACATTCAAACGAAACCAATCCATTGCGAAATGTACTCTCAACTATGAGGGCGTCTGGGGCCCTGAGCCGCGACATGAAGTAGATTGGGCCATCCCCATTTTACAAGGGTAGAGTAAAATGTTGAAGACGCTGTGGGCCAAGGGGGGCCTTAGAATTCATCAGGATAGATCCGTGTGGTAA [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0014   ">
+        <sample name="00_0014   ">
+          <datablock type="DNA">
+            TCGGGCATCCATATTTCCCCACTGGAGCTAGAGTTGACCCCGAAGTCAAACGATCTATGTGACGTCACTAATGACGTTGGCGCCATTGGGCATTCGTTCTGGACTATCGCGGACATAAACGCAGCTCGGGTGGCATGATCATTGGTGCGAGCCCGCGTGAGCAAGCTGGTAAGAACTGAAGCGAGGCGAGTAGTTCAAGAGTTCGATCTCTTTCTCTTAACATCCATAGCACTGGGCGTCCCCCCTTGCCACTTACGACTTAAAACTTATCAGCTTATGTTTCGATTCCCCGCATTGTCCACATTCAAACGAAACCAATCCATTGCGAAATGTACTCTCAACTATGAGGGCGTCTGGGGCCCTGAGCCGCGACATGAAGTAGATTGGGCCATCCCCATTTTACAAGGGTAGAGTAAAATGTTGAAGACGCTGTGGGCCAAGGGGGGCCTTAGAATTCATCAGGATAGATCCGTGTGGTAA [...]
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    </region>
+    <region name="DR-2">
+      <model name="F84">
+        <normalize>false</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <relative-murate>1</relative-murate>
+        <base-freqs> calculated </base-freqs>
+        <ttratio>2</ttratio>
+      </model>
+      <effective-popsize>1</effective-popsize>
+      <spacing>
+        <block>
+          <map-position>0</map-position>
+          <length>1000</length>
+          <locations> 32 58 267 297 333 350 355 373 412 466 478 512 526 548 625 629 630 647 692 739 749 763 826 855 880 918 981 986</locations>
+          <offset>0</offset>
+        </block>
+      </spacing>
+    <population name="Berkeley">
+      <individual name="00_0011   ">
+        <sample name="00_0011   ">
+          <datablock type="SNP">
+            AATTTCTAACGACCTGCGCCTACATTCA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0013   ">
+        <sample name="00_0013   ">
+          <datablock type="SNP">
+            ATTTTTTAACGACCTGTGCCTGCATCCA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0009   ">
+        <sample name="00_0009   ">
+          <datablock type="SNP">
+            ATTTTCTAACGACCTGTGCCTACATTCA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0001   ">
+        <sample name="00_0001   ">
+          <datablock type="SNP">
+            ATTTTCTAACGACCTGTGCCTACATTCA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0010   ">
+        <sample name="00_0010   ">
+          <datablock type="SNP">
+            ATTTTCTAACGACCTGTGCCTACATTCA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0014   ">
+        <sample name="00_0014   ">
+          <datablock type="SNP">
+            ATTTTCTAACGACCTGTGCCTACATTCA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0015   ">
+        <sample name="00_0015   ">
+          <datablock type="SNP">
+            AATTTCTATCGACCCGTGTCTACATTTA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0012   ">
+        <sample name="00_0012   ">
+          <datablock type="SNP">
+            AATTTCTAACGACCCGTGTCTACATTTA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0018   ">
+        <sample name="00_0018   ">
+          <datablock type="SNP">
+            AATTTCTAACGATCCGTGTCTACATTTA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0019   ">
+        <sample name="00_0019   ">
+          <datablock type="SNP">
+            AATTTCTAACGACCCGTGTCTACATTTA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0007   ">
+        <sample name="00_0007   ">
+          <datablock type="SNP">
+            AATTTCTAACGACCCGTGTCCATATTTA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0008   ">
+        <sample name="00_0008   ">
+          <datablock type="SNP">
+            AATTTCTAACGACCCGTGTCCACATTTA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0016   ">
+        <sample name="00_0016   ">
+          <datablock type="SNP">
+            AACTTCAAACGACCCCTCTCTACATTTA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0003   ">
+        <sample name="00_0003   ">
+          <datablock type="SNP">
+            AACTTCAGACAACCCCTCTCTACATTTT
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0004   ">
+        <sample name="00_0004   ">
+          <datablock type="SNP">
+            AACTTCAAACGACCCCTCTCTACATTTT
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0000   ">
+        <sample name="00_0000   ">
+          <datablock type="SNP">
+            AATTCCAAACGACCCATGTTTACATTTA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0017   ">
+        <sample name="00_0017   ">
+          <datablock type="SNP">
+            AATTCCAAACGACCCATGTTTACATTTA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0006   ">
+        <sample name="00_0006   ">
+          <datablock type="SNP">
+            GATTTCAAATGGCTCCTGTCTACGATTA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0002   ">
+        <sample name="00_0002   ">
+          <datablock type="SNP">
+            GATCTCAAATGGCCCCTGTCTACGATTA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0005   ">
+        <sample name="00_0005   ">
+          <datablock type="SNP">
+            GATCTCAAATGGCCCCTGTCTACGATTA
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    </region>
+    <region name="DR-3">
+      <model name="F84">
+        <normalize>false</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <relative-murate>1</relative-murate>
+        <base-freqs> calculated </base-freqs>
+        <ttratio>2</ttratio>
+      </model>
+      <effective-popsize>1</effective-popsize>
+      <spacing>
+        <block>
+          <map-position>0</map-position>
+          <length>1000</length>
+          <offset>0</offset>
+        </block>
+      </spacing>
+    <population name="Berkeley">
+      <individual name="00_0006   ">
+        <sample name="00_0006   ">
+          <datablock type="DNA">
+            TCGGGCCTACGCTGTAGCATAGTGTTACCTCAAATGCCCTAGGGCCTGCTCAGTTAGGTGAGCGTAGTCTGATGTCATCCGTCAGATCCCTCACAGTCTGCTAGGTCAGCTGCTGCCCGATTACCCACTTACATAAGCAGATGAAGACTTAATATTCTTTCGCAGAGCACAAAAGATCACGTTTGATACAATCTCAAATACCCGCTTCCTGTATGATATCAGTACATCTCCATAAGACGTACCAACCGAGGCTTCCAACAGCTCTCCATTTAACTGGTCGTTTGCTAGCGGAGTCGTTCGCTGCTTGCATAATGAATTAGCGTTCTTGAATGGTTCCCTGTCCATCTTGCGGTTCGACAGGGTGACTGGTGCCTTTCCCCAAGGAAGACGTACGTGGTAAGAGGGTTCGGCGACGAGGATCGCTCTCAAGCATCCTATGGCGCCCTAGTGAAGCTGAGTTTGATGCCAGCTAACACAGGG [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0013   ">
+        <sample name="00_0013   ">
+          <datablock type="DNA">
+            TCGGGCCTACGCTGTAGCATAGTGTTACCTCAAATGCCCTAGGGCCTGCTCAGTTAGGTGAGCGTAGTCTGATGTCATCCGTCAGATCCCTCACAGTCTGCTAGGTCAGCTGCTGCCCGATTACCCACTTACATAAGCAGATGAAGACTTAATATTCTTTCGCAGAGCACAAAAGATCACGTTTGATACAATCTCAAATACCCGCTTCCTGTATGATATCAGTACATCTCCATAAGACGTACCAACCGAGGCTTCCAACAGCTCTCCATTTAACTGGTCGTTTGCTAGCGGAGTCGTTCGCTGCTTGCATAATGAATTAGCGTTCTTGAATGGTTCCCTGTCCATCTTGCGGTTCGACAGGGTGACTGGTGCCTTTCCCCAAGGAAGACGTACGTGGTAAGAGGGTTCGGCGACGAGGATCGCTCTCAAGCATCCTATGGCGCCCTAGTGAAGCTGAGTTTGATGCCAGCTAACACAGGG [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0004   ">
+        <sample name="00_0004   ">
+          <datablock type="DNA">
+            TCGGGCCTACGCTGTAGCATAGTGTTACCTCAAATGCCCTAGGGCCTGCTCAGTTAGGTGAGCGTAGTCTGATGTCATCCGTCAGATCCCTCACAGTCTGCTAGGTCAGCTGCTGCCCGATTACCCACTTACATAAGCAGATGAAGACTTAATATTCTTTCGCAGAGCACAAAAGATCACGTTTGATACAATCTCAAATACCCGCTTCCTGTATGATATCAGTACATCTCCATAAGACGTACCAACCGAGGCTTCCAACAGCTCTCCATTTAACTGGTCGTTTGCTAGCGGAGTCGTTCGCTGCTTGCATAATGAATTAGCGTTCTTGAATGGTTCCCTGTCCATCTTGCGGTTCGACAGGGTGACTGGTGCCTTTCCCCAAGGAAGACGTACGTGGTAAGAGGGTTCGGCGACGAGGATCGCTCTCAAGCATCCTATGGCGCCCTAGTGAAGCTGAGTTTGATGCCAGCTAACACAGGG [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0016   ">
+        <sample name="00_0016   ">
+          <datablock type="DNA">
+            TCGGGCCTACGCTGTAGCATAGTGTTACCTCAAATGCCCTAGGGCCTGCTCAGTTAGGTGAGCGTAGTCTGATGTCATCCGTCAGATCCCTCACAGTCTGCTAGGTCAGCTGCTGCCCGATTACCCACTTACATAAGCAGATGAAGACTTAATATTCTTTCGCAGAGCACAAAAGATCACGTTTGATACAATCTCAAATACCCGCTTCCTGTATGATATCAGTACATCTCCATAAGACGTACCAACCGAGGCTTCCAACAGCTCTCCATTTAACTGGTCGTTTGCTAGCGGAGTCGTTCGCTGCTTGCATAATGAATTAGCGTTCTTGAATGGTTCCCTGTCCATCTTGCGGTTCGACAGGGTGACTGGTGCCTTTCCCCAAGGAAGACGTACGTGGTAAGAGGGTTCGGCGACGAGGATCGCTCTCAAGCATCCTATGGCGCCCTAGTGAAGCTGAGTTTGATGCCAGCTAACACAGGG [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0017   ">
+        <sample name="00_0017   ">
+          <datablock type="DNA">
+            TCGGGCCTACGCTGTAGCATAGTGTTACCTCAAATGCCCTAGGGCCTGCTCAGTTAGGTGAGCGTAGTCTGATGTCATCCGTCAGATCCCTCACAGTCTGCTAGGTCAGCTGCTGCCCGATTACCCACTTACATAAGCAGATGAAGACTTAATATTCTTTCGCAGAGCACAAAAGATCACGTTTGATACAATCTCAAATACCCGCTTCCTGTATGATATCAGTACATCTCCATAAGACGTACCAACCGAGGCTTCCAACAGCTCTCCATTTAACTGGTCGTTTGCTAGCGGAGTCGTTCGCTGCTTGCATAATGAATTAGCGTTCTTGAATGGTTCCCTGTCCATCTTGCGGTTCGACAGGGTGACTGGTGCCTTTCCCCAAGGAAGACGTACGTGGTAAGAGGGTTCGGCGACGAGGATCGCTCTCAAGCATCCTATGGCGCCCTAGTGAAGCTGAGTTTGATGCCAGCTAACACAGGG [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0007   ">
+        <sample name="00_0007   ">
+          <datablock type="DNA">
+            TCGGGCCTACGCTGTAGCATAGTGTTACCTCAAATGCCCTAGGGCCTGCTCAGTTAGGTGAGCGTAGTCTGATGTCATCCGTCAGATCCCTCACAGTCTGCTAGGTCAGCTGCTGCCCGATTACCCACTTACATAAGCAGATGAAGACTTAATATTCTTTCGCAGAGCACAAAAGATCACGTTTGATACAATCTCAAATACCCGCTTCCTGTATGATATCAGTACATCTCCATAAGACGTACCAACCGAGGCTTCCAACAGCTCTCCATTTAACTGGTCGTCTGCTAGCGGAGTCGTTCGCTGCTTGCATAATGAATTAGCGTTCTTGAATGGTTCCCTGTCCATCTTGCGGTTCGACAGGGTGACTGGTGCCTTTCCCCAAGGAAGACGTACGTGGTAAGAGGGTTCGGCGACGAGGATCGCTCTCAAGCATCCTATGGCGCCCTAGTGAAGCTGAGTTTGATGCCAGCTAACACAGGG [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0002   ">
+        <sample name="00_0002   ">
+          <datablock type="DNA">
+            TCGGGCCTACGCTGTAGCATAGTGTTACCTCAAATGCCCTAGGGCCTGCTCAGTTAGGTGAGCGTAGTCTGATGTCATCCGTCAGATCCCTCACAGTCTGCTAGGTCAGCTGCTGCCCGATTACCCACTTACATAAGCAGATGAAGACTTAATATTCTTTCGCAGAGCACAAAAGATCACGTTTGATACAATCTCAAATACCCGCTTCCTGTATGATATCAGTACATCTCCATAAGACGTACCAACCGAGGCTTCCAACAGCTCTCCATTTAACTGGTCGTCTGCTAGCGGAGTCGTTCGCTGCTTGCATAATGAATTAGCGTTCTTGAATGGTTCCCTGTCCATCTTGCGGTTCGACAGGGTGACTGGTGCCTTTCCCCAAGGAAGACGTACGTGGTAAGAGGGTTCGGCGACGAGGATCGCTCTCAAGCATCCTATGGCGCCCTAGTGAAGCTGAGTTTGATGCCAGCTAACACAGGG [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0008   ">
+        <sample name="00_0008   ">
+          <datablock type="DNA">
+            TCGGGCCTACGCTGTAGCATAGTGTTACCTCAAATGCCCTAGGGCCTGCTCAGTTAGGTGAGCGTAGTCTGATGTCATCCGTCAGATCCCTCACAGTCTGCTAGGTCAGCTGCTGCCCGATTACCCACTTACATAAGCAGATGAAGACTTAATATTCTTTCGCAGAGCACAAAAGATCACGTTTGATACAATCTCAAATACCCGCTTCCTGTATGATATCAGTACATCTCCATAAGACGTACCAACCGAGGCTTCCAACAGCTCTCCATTTAACTGGTCGTCTGCTAGCGGAGTCGTTCGCTGCTTGCATAATGAATTAGCGTTCTTGAATGGTTCCCTGTCCATCTTGCGGTTCGACAGGGTGACTGGTGCCTTTCCCCAAGGAAGACGTACGTGGTAAGAGGGTTCGGCGACGAGGATCGCTCTCAAGCATCCTATGGCGCCCTAGTGAAGCTGAGTTTGATGCCAGCTAACACAGAG [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0010   ">
+        <sample name="00_0010   ">
+          <datablock type="DNA">
+            TCGGGGCTACGCTGTAGCATCGTGTTACCTCAAATGCCCTAGCGCCTGCTTAGTTAGGTGAGCGTAGTTTAATGTCATCCGTCGGATCACTCAAAGTTTGCTAGGTCAGCTACTGCCCGATTACCCACCTACATAAGCAGATGAAGACTTAATATTGTTTCGCAGAGCATAAAAGATCACGTTTGATACAATCTCAAATACCCGCTTCCTGTATCATATCAGTACATCTCCATAAGACGTACCAACCGAGGCTTCCAACAGCTCTCCATATAACTAGTCGTTTGCTAGCGGAGTCGTTCGCTGCTCGCATAATGGATTGGCGTTCTTGAGTGGTTCCCTGTCCATCGTGCGGTTCGACAGAGTGACTGGTGCCCTTCCCGAAGAAAGACGTATTTGGTAGGAGGGTTCGGTGGCGAGGATCGCTCTCAAGCATCTTGTGGCGCCCTGCTGAAGCTGAGTCTGATGCCAGCTAACACAGGG [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0000   ">
+        <sample name="00_0000   ">
+          <datablock type="DNA">
+            TCGGGGCTACGCTGTAGCATCGTGTTACCTCAAATGCCCTAGCGCCTGCTTAGTTAGGTGAGCGTAGTTTAATGTCATCCGTCAGATCCCTCAAAGTTCGCTAGGTCAGCTACTGCCCGATTACCCACCTACATAAGCAGATGAAGACGTAATATTGTTTCGCAGAGCATAAAAGATCAAGTTTGATACAATCTCAAAAACCCGCTTCCTGTATCATATCAGTACATCTCCATAAGACGTACCAACCGAGGCTTCCAACAGCTCTCCATATAACTAGTCGTTTGCTAGCGGAGTCGTTCGCTGCTCGCATAATGGATTGGCGTTCTTGAGTGGTTCCCTGTCCATCGTGCAGTTCGACAGAGTGACTGGTGCCTTTCCCGAAGAAAGACGTATTTGGTAGGAGGGTTCGGTGGCGAGGATCGCTCTCAAGCATCTTGTGGCGCCCTACTGAAGCTGAGTCTGATGCCAGCTAACACAGGG [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0001   ">
+        <sample name="00_0001   ">
+          <datablock type="DNA">
+            TCGGGGCTACGCTGTAGCATCGTGTTACCTCAAATGCCCTAGCGCCTGCTTAGTTAGGTGAGCGTAGTTTAATGTCATCCGTCAGATCCCTCAAAGTTCGCTAGGTCAGCTACTGCCCGATTACCCACCTACATAAGCAGATGAAGACGTAATATTGTTTCGCAGAGCATAAAAGATCAAGTTTGATACAATCTCAAAAACCCGCTTCCTGTATCATATCAGTACATCTCCATAAGACGTACCAACCGAGGCTTCCAACAGCTCTCCATATAACTAGTCGTTTGCTAGCGGAGTCGTTCGCTGCTCGCATAATGGATTGGCGTTCTTGAGTGGTTCCCTGTCCATCGTGCAGTTCGACAGAGTGACTGGTGCCTTTCCCGAAGAAAGACGTATTTGGTAGGAGGGTTCGGTGGCGAGGATCGCTCTCAAGCATCTTGTGGCGCCCTACTGAAGCTGAGTCTGATGCCAGCTAACACAGGG [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0011   ">
+        <sample name="00_0011   ">
+          <datablock type="DNA">
+            TCGGGGCTACGCTGTAGCATCGTGTTACCTCAAATGCCCTAGCGCCTGCTTAGTTAGGTGAGCGTAGTTTAATGTCATCCGTCAGATCCCTCAAAGTTCGCTAGGTCAGCTACTGCCCGATTACCCACCTACATAAGCAGATGAAGACGTAATATTGTTTCGCAGAGCATAAAAGATCAAGTTTGATACAATCTCAAATACCCGCTTCCTGTATCATATCAGTACATCTCCATAAGACGTACCAACCGAGGCTTCCAACAGCTCTCCATATAACTAGTCGTTTGCTAGCGGAGTCGTTCGCTGCTCGCATAATGGATTGGCGTTCTTGAGTGGTTCCCTGTCCATCGTGCAGTTCGACAGAGTGACTGGTGCCTTTCCCGAAGAAAGACGTATTTGGTAGGAGGGTTCGGTGGCGAGGATCGCTCTCAAGCATCTTGTGGCGCCCTACTGAAGCTGAGTCTGATGCCAGCTAACACAGGG [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0018   ">
+        <sample name="00_0018   ">
+          <datablock type="DNA">
+            TCGGGGCTACGCTGTAGCATCGTGTTACCTCAAATGCCCTAGCGCCTGCTTAGTTAGGTGAGCGTAGTTTAATGTCATCCGTCAGATCCCTCAAAGTTCGCTAGGTCAGCTACTGCCCGATTACCCACCTACATAAGCAGATGAAGACGTAATATTGTTTCGCAGAGCATAAAAGATCAAGTTTGATACAATCTCAAATACCCGCTTCCTGTATCATATCAGTACATCTCCATAAGACGTACCAACCGAGGCTTCCAACAGCTCTCCATATAACTAGTCGTTTGCTAGCGGAGTCGTTCGCTGCTCGCATAATGGATTGGCGTTCTTGAGTGGTTCCCTGTCCATCGTGCAGTTCGACAGAGTGACTGGTGCCTTTCCCGAAGAAAGACGTATTTGGTAGGAGGGTTCGGTGGCGAGGATCGCTCTCAAGCATCTTGTGGCGCCCTACTGAAGCTGAGTCTGATGCCAGCTAACACAGGG [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0012   ">
+        <sample name="00_0012   ">
+          <datablock type="DNA">
+            TCGGGGCTACGCTGTAGCATCGTGTTACCTCAAATGCCCTAGCGCCTGCTTAGTTAGGTGAGCGTAGTTTAATGTCATCCGTCAGATCCCTCAAAGTTCGCTAGGTCAGCTACTGCCCGATTACCCACCTACATAAGCAGATGAAGACGTAATATTGTTTCGCAGAGCATAAAAGATCAAGTTTGATACAATCTCAAATACCCGCTTCCTGTATCATATCAGTACATCTCCATAAGACGTACCAACCGAGGCTTCCAACAGCTCTCCATATAACTAGTCGTTTGCTAGCGGAGTCGTTCGCTGCTCGCATAATGGATTGGCGTTCTTGAGTGGTTCCCTGTCCATCGTGCAGTTCGACAGAGTGACTGGTGCCTTTCCCGAAGAAAGACGTATTTGGTAGGAGGGTTCGGTGGCGAGGATCGCTCTCAAGCATCTTGTGGCGCCCTGCTGAAGCTGAGTCTGATGCCAGCTAACACAGGG [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0015   ">
+        <sample name="00_0015   ">
+          <datablock type="DNA">
+            TCGGGGCTACGCTGTAGCATCGTGTTACCTCAAATGCCCTAGCGCCTGCTTAGTTAGGTGAGCGTAGTTTAACGTCATCCGTCAGATCCCTCAAAGTTCGCTAGGTCAGCTACTGCCCGATTACCCACCTACATAAGCAGATGAAGACGTAATATTGTTTCGCAGAGCATAAAAGATCAAGTTTGATACAATCTCAAATACCCGCTTCCTGTATCATATCAGTACATCTCCATAAGACGTACCAACCGAGGCTTCCAACAGCTCTCCATATAACTAGTCGTTTGCTAGCGGAGTCGTTCGCTGCTCGCATAATGGATTGGCGTTCTTGAGTGGTTCCCTGTCCATCGTGCAGTTCGACAGAGTGACTGGTGCCTTTCCCGAAGAAAGACGTATTTGGTAGGAGGGTTCGGTGGCGAGGATCGCTCTCAAGCATCTTGTGGCGCCCTACTGAAGCTGAGTCTGATGCCAGCTAACACAGGG [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0003   ">
+        <sample name="00_0003   ">
+          <datablock type="DNA">
+            TCGGGGCTACGCTGTAGCATCGTGTTACCTCAAATGCCCTAGCGCCTGCTTAGTTAGGTGAGCGTAGTTTAATGTCATCCGTCAGATCCCTCAAAGTTCGCTAGGTCAGCTACTGCCCGATTACCCACCTACATAAGCAGATGAAGACGTAATATTGTTTCGCAGAGCATAAAAGATCAAGTTTGATACAATCTCAAATACCCGCTTCCTGTATCATATCAGTACATCTCCATAAGACGTACCAACCGAGGCTTCCAACAGCTCTCCATATAACTAGTCGTTTGCTAGCGGAGTCGTTCGCTGCTCGCATAATGGATTGGCGTTCTTGAGTGGTTCCCTGTCCATCGTGCAGTTCGACAGAGTGACTGGTGCCTTTCCCGAAGAAAGACGTATTTGGTAGGAGGGTTCGGTGGCGAGGATCGCTCTCAAGCATCTTGTGGCGCCCTACTGAAGCTGAGTCTGATGCCAGCTAACACAGGG [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0009   ">
+        <sample name="00_0009   ">
+          <datablock type="DNA">
+            TCGGGGCTACGCTGTAGCATCGTGTTACCTCACATGCCCTAGCGCCTGCTTAGTTAGGTGAGCGTAGTTTAATGTCATCCGTCAGATCCCTCAAAGTTCGCTAGGTCAGCTACTGCCCGATTACCCACCTACATAAGCAGATGAAGACGTAATATTGTTTCGCAGAGCATAAAAGATCAAGTTTGATACAATCTCAAATACCCGCTTCCTGTATCATATCAGTACATCTCCATAAGACGTACCAACCGAGGCTTCCAACAGCTCTCCATATAACTAGTCGTTTGCTAGCGGAGTCGTTCGCTGCTCGCATAATGGATTGGCGTTCTTGAGTGGTTCCCTGTCCATCGTGTAGTTCGACAGAGTGACTGGTGCCTTTCCCGAAGAAAGACGTATTTGGTAGGAGGGTTCGGTGGCGAGGATCGCTCTCAAGCATCTTGTGGCGCCCTACTGAAGCTGAGTCTGATGCCAGCTAACACAGGG [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0019   ">
+        <sample name="00_0019   ">
+          <datablock type="DNA">
+            TCGGGGCTACGCTGTAGCATCGTGTTACCTCACATGCCCTAGCGCCTGCTTAGTTAGGTGAGCGTAGTTTAATGTCATCCGTCAGATCCCTCAAAGTTCGCTAGGTCAGCTACTGCCCGATTACCCACCTACATAAGCAGATGAAGACGTAATATTGTTTCGCAGAGCATAAAAGATCAAGTTTGATACAATCTCAAATACCCGCTTCCTGTATCATATCAGTACATCTCCATAAGACGTACCAACCGAGGCTTCCAACAGCTCTCCATATAACTAGTCGTTTGCTAGCGGAGTCGTTCGCTGCTCGCATAATGGATTGGCGTTCTTGAGTGGTTCCCTGTCCATCGTGTAGTTCGACAGAGTGACTGGTGCCTTTCCCGAAGAAAGACGTATTTGGTAGGAGGGTTCGGTGGCGAGGATCGCTCTCAAGCATCTTGTGGCGCCCTACTGAAGCTGAGTCTGATGCCAGCTAACACAGGG [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0005   ">
+        <sample name="00_0005   ">
+          <datablock type="DNA">
+            TCGGGGCTACGCTGTAGCATCGTGTTACCTCACATGCCCTAGCGCCTGCTTAGTTAGGTGAGCGTAGTTTAATGTCATCCGTCAGATCCCTCAAAGTTCGCTAGGTCAGCTACTGCCCGATTACCCACCTACATAAGCAGATGAAGACGTAATATTGTTTCGCAGAGCATAAAAGATCAAGTTTGATACAATCTCAAATACCCGCTTCCTGTATCATATCAGTACATCTCCATAAGACGTACCAACCGAGGCTTCCAACAGCTCTCCATATAACTAGTCGTTTGCTAGCGGAGTCGTTCGCTGCTCGCATAATGGATTGGCGTTCTTGAGTGGTTCCCTGTCCATCGTGCAGTTCGACAGAGTGACTGGTGCCTTTCCCGAAGAAAGACGTATTTGGTAGGAGGGTTCGGTGGCGAGGATCGCTCTCAAGCATCTTGTGGCGCCCTACTGAAGCTGAGTCTGATGCCAGCTAACACAGGG [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0014   ">
+        <sample name="00_0014   ">
+          <datablock type="DNA">
+            TCGGGGCTACGCTGTAGCATCGTGTTACCTCACATGCCCTAGCGCCTGCTTAGTTAGGTGAGCGTAGTTTAATGTCATCCGTCAGATCCCTCAAAGTTCGCTAGGTCAGCTACTGCCCGATTACCCACCTACATAAGCAGATGAAGACGTAATATTGTTTCGCAGAGCATAAAAGATCAAGTTTGATACAATCTCAAATACCCGCTTCCTGTATCATATCAGTACATCTCCATAAGACGTACCAACCGAGGCTTCCAACAGCTCTCCATATAACTAGTCGTTTGCTAGCGGAGTCGTTCGCTGCTCGCATAATGGATTGGCGTTCTTGAGTGGTTCCCTGTCCATCGTGCAGTTCGACAGAGTGACTGGTGCCTTTCCCGAAGAAAGACGTATTTGGTAGGAGGGTTCGGTGGCGAGGATCGCTCTCAAGCATCTTGTGGCGCCCTACTGAAGCTGAGTCTGATGCCAGCTAACACAGGG [...]
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    </region>
+    <region name="DR-4">
+      <model name="F84">
+        <normalize>false</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <relative-murate>1</relative-murate>
+        <base-freqs> calculated </base-freqs>
+        <ttratio>2</ttratio>
+      </model>
+      <effective-popsize>1</effective-popsize>
+      <spacing>
+        <block>
+          <map-position>0</map-position>
+          <length>1000</length>
+          <locations> 58 63 80 139 160 195 215 248 254 265 294 297 302 304 313 320 330 333 370 427 452 457 475 533 549 555 574 583 596 647 663 674 703 722 743 753 761 769 788 830 834 848 852 878 932 980</locations>
+          <offset>0</offset>
+        </block>
+      </spacing>
+    <population name="Berkeley">
+      <individual name="00_0001   ">
+        <sample name="00_0001   ">
+          <datablock type="SNP">
+            AGTAATGCAGCCAGCTTGCGGCGTACCAAGATTAGGACCGTTCAAG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0011   ">
+        <sample name="00_0011   ">
+          <datablock type="SNP">
+            AGTAATGCAGCCAGCTTGCGGCGTACCAAGATTAGGACCGTTCATG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0017   ">
+        <sample name="00_0017   ">
+          <datablock type="SNP">
+            GACGTGAATATTGAAATCCAATGTGCTGGGACCCGGTTAGCTCGAA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0008   ">
+        <sample name="00_0008   ">
+          <datablock type="SNP">
+            GACGTGAATATTGAAATCCAATGTGCTGGGACCCGGTTAGCTTGAA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0016   ">
+        <sample name="00_0016   ">
+          <datablock type="SNP">
+            GACGTGAATATTGAAATCCAATGTGCTGGGACCCGGTTAGCTCGAA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0012   ">
+        <sample name="00_0012   ">
+          <datablock type="SNP">
+            GACGTGAATATTGAAATCCAATGCGCTGGGACCCGGTTAGCTCGAA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0018   ">
+        <sample name="00_0018   ">
+          <datablock type="SNP">
+            GACGTGAATATTGAAATCCAATGTGCTGGGACCCGGTTAGCTCGAA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0000   ">
+        <sample name="00_0000   ">
+          <datablock type="SNP">
+            GACGTGAATATTGAAATCCAATGTGCTGGGACCCGGTTAACTCGAA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0007   ">
+        <sample name="00_0007   ">
+          <datablock type="SNP">
+            GACGTGAATATTGAAATCCAATGTGCTGGGACCCGATTAGCTCGAA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0006   ">
+        <sample name="00_0006   ">
+          <datablock type="SNP">
+            GACGTGAATATTGAAATCCAATGTGCTGGGACCCGATTAGCTCGAA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0010   ">
+        <sample name="00_0010   ">
+          <datablock type="SNP">
+            GACGTGAATATTGAAATCTAATGTGCTGGGACCCGATTAGCTCGAA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0004   ">
+        <sample name="00_0004   ">
+          <datablock type="SNP">
+            GACGTGAATATTGAAATCCAATATGCTGGGACCCGATTAGCTCGAA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0013   ">
+        <sample name="00_0013   ">
+          <datablock type="SNP">
+            GACGTGAATATTGAAATCCAATATGCTGGGACCCGATTAGCTCGAA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0002   ">
+        <sample name="00_0002   ">
+          <datablock type="SNP">
+            GACGTAAATATTAAAAACCAATGTGCTGGGACCAAGTTCGCCCGAG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0015   ">
+        <sample name="00_0015   ">
+          <datablock type="SNP">
+            GACGTAAATATTAAAAACCAATGTGCTGGGACCAAGTTCGCCCGAG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0005   ">
+        <sample name="00_0005   ">
+          <datablock type="SNP">
+            GACGTAAATATTAAAAACCAATGTGTTGGCGCCAGGTTCGCTCGAG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0009   ">
+        <sample name="00_0009   ">
+          <datablock type="SNP">
+            GACGTAAATATTAAAAACCAATGTGTTGGCGCCAGGTTCGCTCGAG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0014   ">
+        <sample name="00_0014   ">
+          <datablock type="SNP">
+            GACGTAAATATTAAAAACCAATGTGTTGGCGCCAGGTTCGCTCGAG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0003   ">
+        <sample name="00_0003   ">
+          <datablock type="SNP">
+            GACGTAAATATTAAAAACCAATGTGTTGGCGCCAGGTTCGCTCGAG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0019   ">
+        <sample name="00_0019   ">
+          <datablock type="SNP">
+            GACGTAAATATTAAAAACCAATGTGTTGGCGCCAGGTTCGCTCGAG
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    </region>
+    <region name="DR-5">
+      <model name="F84">
+        <normalize>false</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <relative-murate>1</relative-murate>
+        <base-freqs> calculated </base-freqs>
+        <ttratio>2</ttratio>
+      </model>
+      <effective-popsize>1</effective-popsize>
+      <spacing>
+        <block>
+          <map-position>0</map-position>
+          <length>1000</length>
+          <locations> 58 66 119 226 267 305 307 315 347 350 363 373 386 397 455 478 536 552 558 577 599 669 749 773 803 804 836 854 859 865 921 938 986</locations>
+          <offset>0</offset>
+        </block>
+      </spacing>
+    <population name="Berkeley">
+      <individual name="00_0016   ">
+        <sample name="00_0016   ">
+          <datablock type="SNP">
+            AGAATTGCTTAACACGTACCCAAAACTACTGGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0012   ">
+        <sample name="00_0012   ">
+          <datablock type="SNP">
+            GACGTTTTGCGAGTAGTGCTGAAGGCTAATAAA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0009   ">
+        <sample name="00_0009   ">
+          <datablock type="SNP">
+            AACGTTTTGCGAGAAGTGCTGAAGGCTAATAAA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0002   ">
+        <sample name="00_0002   ">
+          <datablock type="SNP">
+            AACGTTTTGCGAGAAGTGCTGAAGGCTAATAAA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0005   ">
+        <sample name="00_0005   ">
+          <datablock type="SNP">
+            AACGTTTTGCGAGAAGTGCTGAAGGCTAATAAA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0000   ">
+        <sample name="00_0000   ">
+          <datablock type="SNP">
+            AACGTGTTGCGAGAAGTGCTGAAGGCTAATAAA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0006   ">
+        <sample name="00_0006   ">
+          <datablock type="SNP">
+            AACGTGTTGCGAGAAGCGCTGAAGGCTAATAAA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0004   ">
+        <sample name="00_0004   ">
+          <datablock type="SNP">
+            AACGTTTTGCGAGAAGTGCTGAAGGATAATAAA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0010   ">
+        <sample name="00_0010   ">
+          <datablock type="SNP">
+            AACGTTTTGCGAGAAGTGCTGAAGGACAATAAA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0003   ">
+        <sample name="00_0003   ">
+          <datablock type="SNP">
+            AACGTTTTGCGAGAAGTGCTGAAGGACAATAAA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0008   ">
+        <sample name="00_0008   ">
+          <datablock type="SNP">
+            AACGTTTTGCGAGAAGTGCTGAAGGACAATAAA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0014   ">
+        <sample name="00_0014   ">
+          <datablock type="SNP">
+            AACGTTTTGCGAGAAATGCTGAAGGACAATAAA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0015   ">
+        <sample name="00_0015   ">
+          <datablock type="SNP">
+            AACGGTTTGCGGGAAGTGCTGAAGGCTAATAAA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0018   ">
+        <sample name="00_0018   ">
+          <datablock type="SNP">
+            AACGTTTTGCGAGAAGTGCTGACGGCTTACAAA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0019   ">
+        <sample name="00_0019   ">
+          <datablock type="SNP">
+            AACGTTTTGCGAGAAGTGCTGAAGGCTAACAAA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0017   ">
+        <sample name="00_0017   ">
+          <datablock type="SNP">
+            AACGTTTTGCGAGAAGTGTTGGAGGCTAATAAA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0001   ">
+        <sample name="00_0001   ">
+          <datablock type="SNP">
+            AACGTTTTGCGAGAAGTGTTGGAGGCTAATAAA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0007   ">
+        <sample name="00_0007   ">
+          <datablock type="SNP">
+            AACGTTTTGCGAGAAGTGTTGGAGGCTAATAAA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0011   ">
+        <sample name="00_0011   ">
+          <datablock type="SNP">
+            AACGTTTTGCGAGAAGTGTTGGAGGCTAATAAA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0013   ">
+        <sample name="00_0013   ">
+          <datablock type="SNP">
+            AACGTTTTGCGAGAAGTGTTGGAGGCTAATAAA
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    </region>
+  </data>
+</lamarc>
diff --git a/doc/testfiles/infile.divergence b/doc/testfiles/infile.divergence
new file mode 100644
index 0000000..47ce833
--- /dev/null
+++ b/doc/testfiles/infile.divergence
@@ -0,0 +1,430 @@
+<lamarc version="pre-2.1.7">
+  <chains>
+    <replicates>1</replicates>
+    <bayesian-analysis>Yes</bayesian-analysis>
+    <heating>
+      <adaptive>false</adaptive>
+      <temperatures> 1</temperatures>
+      <swap-interval>1</swap-interval>
+    </heating>
+    <strategy>
+      <resimulating>0.5</resimulating>
+      <haplotyping>0</haplotyping>
+      <bayesian>0.3</bayesian>
+      <epoch-size>0.2</epoch-size>
+      <tree-size>0.2</tree-size>
+    </strategy>
+    <initial>
+      <number>0</number>
+      <samples>1000</samples>
+      <discard>1000</discard>
+      <interval>20</interval>
+    </initial>
+    <final>
+      <number>1</number>
+      <samples>3000000</samples>
+      <discard>1000</discard>
+      <interval>20</interval>
+    </final>
+  </chains>
+  <format>
+    <seed>1005</seed>
+    <verbosity>verbose</verbosity>
+    <progress-reports>verbose</progress-reports>
+    <results-file>divout.txt</results-file>
+    <use-in-summary>false</use-in-summary>
+    <in-summary-file>insumfile.divcoalmsat</in-summary-file>
+    <use-out-summary>true</use-out-summary>
+    <out-summary-file>divsumfile</out-summary-file>
+    <out-xml-file>menuinfile</out-xml-file>
+    <plotting>
+    </plotting>
+  </format>
+  <forces>
+    <coalescence>
+      <start-values> 0.01 0.01 0.01 </start-values>
+      <method> USER USER USER</method>
+      <max-events>32000</max-events>
+      <profiles>none none none </profiles>
+      <prior type="linear">
+        <paramindex> default </paramindex>
+        <lower> 1e-4</lower>
+        <upper> 0.02 </upper>
+      </prior>
+    </coalescence>
+    <divergence-migration>
+      <start-values> 0 50.0 0 50.0 0 0 0 0 0</start-values>
+      <method> USER USER USER USER USER USER USER USER USER</method>
+      <max-events>10000</max-events>
+      <profiles>none none none none none none none none none</profiles>
+      <constraints> invalid unconstrained invalid
+                    unconstrained invalid invalid
+                    invalid invalid invalid </constraints>
+      <prior type="linear">
+        <paramindex> default </paramindex>
+        <lower> 0.0 </lower>
+        <upper> 100.0 </upper>
+      </prior>
+    </divergence-migration>
+    <divergence>
+      <start-values> 0.002 </start-values>
+      <method> USER </method>
+      <profiles> none </profiles>
+      <constraints> unconstrained </constraints>
+      <prior type="linear">
+        <paramindex> default </paramindex>
+        <lower> 0.0 </lower>
+        <upper> 0.004 </upper>
+      </prior>
+      <population-tree>
+        <epoch-boundary>
+          <new-populations> Pop1 Pop2 </new-populations>
+          <ancestor> ancestorpop </ancestor>
+        </epoch-boundary>
+      </population-tree>
+    </divergence>
+    <recombination>
+      <start-values> 0.1 </start-values>
+      <method> USER </method>
+      <max-events> 1000 </max-events>
+      <profiles>none</profiles>
+      <constraints>unconstrained</constraints>
+      <prior type="linear">
+        <paramindex> default </paramindex>
+        <lower> 0.0 </lower>
+        <upper> 0.15 </upper>
+      </prior>
+    </recombination>
+  </forces>
+    <data>
+        <region name="divergence sims">
+            <spacing>
+                <block name="segment 1 of pop1" />
+            </spacing>
+<population name="Pop1">
+<individual name="2">
+<sample name="2">
+<datablock type="DNA">
+CCTAGAGTAATTATCCAGCTAAGAGCAAGACGTTCTGGATGAATGAACTATCAATTGGTCAAGCGAGCAGACTAATTTGTGGGAAACTTCTCTGACGGCAACTCAATAGAGGTGTGCTGTCTCATCACTACACACCTCTGGGCGGCCGGATCGTCAGTTTCACCTCCGACAGGCTTCATAGTAACAAGGGGATTTCACCACCAGCCGACCTAGCACCAGTAAGCATATGGCGCTGCCTTGTGCTGACCATACGACCAACCTTCAGCCCCGAGGCTCAAGAGCGCAACGTACAAAGAGCTTTCACCGTATGACGATTGAGCTCACGCAAGTCATAAACCTTTAATTCATTGAATTATAACAGCGAATTCTACAATCCAAATACGCGGCTTTCAGCATTCCACTACCCTGGGGGGCCGTGCGAGGCAACGTTATAGACGTTTCGCGTAAAGCGGTGACGTACGTAAATTAGGGCCATAATGTGTTGAATGACGA [...]
+
+</datablock>
+</sample>
+</individual>
+<individual name="3">
+<sample name="3">
+<datablock type="DNA">
+CCTAGAGTAATTATCCAGCTAAGAGCAAGACGTTCTGGATGAATGAACTATCAATTGGTCCAGCGAGCAGACTAATTTGTGGGAAACTTCTCTGACGGCAACTCAATAGAGGTGTGCTGTCTCATCACTACACACCTCTGGGCGGCCGGATCGTCAGTTTCACCTCCGACAGGCTTCATAGTAACAAGGGGATTTCACCACCAGCCGACCTAGCACCAGTAAGCATATGGCGGTGCCTTGTGCTGACCATACGACCAACCTTCAGCCCCGAGGCTCAAGAGCGCAACGTACAAAGAGCTTTCACCGTATGACGATTGAGCTCACGCAAGTCATAAACCTTTAATTCATTGAATTATAACAGCGAATTCTACAATCCAAATACGCGGCTTTCAGCATTCCACTACCCTGGGGGGCCGTGCGAGGCAACGTTATAGACGTTTCGCGTAAAGCGCTGACGTACGTAAATTAGGGCCATAATGTGTTGAATGACGA [...]
+
+</datablock>
+</sample>
+</individual>
+<individual name="4">
+<sample name="4">
+<datablock type="DNA">
+CCTAGAGTAATTATCCAGCTAAGAGCAAGACGTTCTGGATGAATGAACTATCAATTGTTCAAGCGAGCAGACTAATTTGTGGGAAACTTCTCTGACGGCAACTCAATAGAGGTGTGCTGTCTCATCACTACACACCTCTGGGCGGCCGGATCGTCAGTTTCACCTCCGACAGGCTTCATAGTAACAAGGGGATTTCACCACCAGCCGACCTAGCACCAGTAAGCATATGGCGCTGCCTTGTGCTGACCATACGACCAACCTTCAGCCCCGAGGCTCAAGAGCGCAACGTACAAAGAGCTTTCACCGTATGACGATTGAGCTCACGCAAGTCATAAACCTTTAATTCATTGAATTACAACAGCGAATTCTACAATCCAAATACGCGACTTTCAGCATTCCACTACCCTGGGGGGCCGTGCGAGGCAACGTTATAGACGTTTCGCGTAAAGCGGTGACGTACGTAAATTAGGGCCATAATGTGTTGAATGACGA [...]
+
+</datablock>
+</sample>
+</individual>
+<individual name="7">
+<sample name="7">
+<datablock type="DNA">
+CCTAGAGTAATTATCCAGCTAAGAGCAAGACGTTCTGGATGAATGAACTATCAATTGTTCAAGCGAGCAGACTAATTTGTGGGAAACTTCTCTGACGGCAACTCAATAGAGGTGTGCTGTCTCATCACTACACACCTCTGGGCGGCCGGATCGTCAGTTTCACCTCCGACAGGCTTCATAGTAACAAGGGGATTTCACCACCAGCCGACCTAGCACCAGTAAGCATATGGCGCTGCCTTGTGCTGACCATACGACCAACCTTCAGCCCCGAGGCTCAAGAGCGCAACGTACAAAGAGCTTTCACCGTATGACGATTGAGCTCACGCAAGTCATAAACCTTTAATTCATTGAATTACAACAGCGAATTCTACAATCCAAATACGCGACTTTCAGCATTCCACTACCCTGGGGGGCCGTGCGAGGCAACGTTATAGACGTTTCGCGTAAAGCGGTGACGTACGTAAATTAGGGCCATAATGTGTTGAATGACGA [...]
+
+</datablock>
+</sample>
+</individual>
+<individual name="11">
+<sample name="11">
+<datablock type="DNA">
+CCTAGAGTAATTATCCAGCTAAGAGCAAGACGTTCTGGATGAATGAACTATCAATTGTTCAAGCGAGCAGACTAATTTGTGGGAAACTTCTCTGACGGCAACTCAATAGAGGTGTGCTGTCTCATCACTACACACCTCTGGGCGGCCGGATCGTCAGTTTCACCTCCGACAGGCTTCATAGTAACAAGGGGATTTCACCACCAGCCGACCTAGCACCAGTAAGCATATGGCGCTGCCTTGTGCTGACCATACGACCAACCTTCAGCCCCGAGGCTCAAGAGCGCAACGTACAAAGAGCTTTCACCGTATGACGATTGAGCTCACGCAAGTCATAAACCTTTAATTCATTGAATTACAACAGCGAATTCTACAATCCAAATACGCGACTTTCAGCATTCCACTACCCTGGGGGGCCGTGCGAGGCAACGTTATAGACGTTTCGCGTAAAGCGGTGACGTACGTAAATTAGGGCCATAATGTGTTGAATGACGA [...]
+
+</datablock>
+</sample>
+</individual>
+<individual name="12">
+<sample name="12">
+<datablock type="DNA">
+CCTAGAGTAATTATCCAGCTAAGAGCAAGACGTTCTGGATGAATGAACTATCAATTGTTCAAGCGAGCAGACTAATTTGTGGGAAACTTCTCTGACGGCAACTCAATAGAGGTGTGCTGTCTCATCACTACACACCTCTGGGCGGCCGGATCGTCAGTTTCACCTCCGACAGGCTTCATAGTAACAAGGGGATTTCACCACCAGCCGACCTAGCACCAGTAAGCATATGGCGCTGCCTTGTGCTGACCATACGACCAACCTTCAGCCCCGAGGCTCAAGAGCGCAACGTACAAAGAGCTTTCACCGTATGACGATTGAGCTCACGCAAGTCATAAACCTTTAATTCATTGAATTACAACAGCGAATTCTACAATCCAAATACGCGACTTTCAGCATTCCACTACCCTGGGGGGCCGTGCGAGGCAACGTTATAGACGTTTCGCGTAAAGCGGTGACGTACGTAAATTAGGGCCATAATGTGTTGAATGACGA [...]
+
+</datablock>
+</sample>
+</individual>
+<individual name="1">
+<sample name="1">
+<datablock type="DNA">
+CCTAGAGTAATTATCCAGCTAAGAGCAAGACGTTCTGGATGAATGAACTATCAATTGTTCAAGCGAGCAGACTAATTTGTGGGAAACTTCTCTGACGGCAACTCAATAGAGGTGTGCTGTCTCATCACTACACACCTCTGGGCGGCCGGATCGTCAGTTTGACCTCCGACAGGCTTCATAGTAACAAGGGGATTTCACCACCAGCCGACCTAGCACCAGTAAGCATATGGCGCTGCCTTGTGCTGACCATACGACCAACCTTCAGCCCCGAGGCTCAAGAGCGGAACGTACAAAGAGCTTTCACCGTATGACGATTGAGCTCACGCAAGTCATAAACCTTTAATTCATTGAATTACAACAGCGAATTCTACAATCCAAATACGCGACTTTCAGCATTCCACTACCCTGGGGGGCCGTGCGAGGCAACGTTATAGACGTTTCGCGTAAAGCGGTGACGTACGTAAATTAGGGCCATAATGTGTTGAATGACGA [...]
+
+</datablock>
+</sample>
+</individual>
+<individual name="6">
+<sample name="6">
+<datablock type="DNA">
+CCTAGAGTAATTATCCAGCTAAGAGCAAGACGTTCTGGATGAATGAACTATCAATTGTTCAAGCGAGCAGACTAATTTGTGGGAAACTTCTCTGACGGCAACTCAATAGAGGTGTGCTGTCTCATCACTACACACCTCTGGGCGGCCGGATCGTCAGTTTGACCTCCGACAGGCTTCATAGTAACAAGGGGATTTCACCACCAGCCGACCTAGCACCAGTAAGCATATGGCGCTGCCTTGTGCTGACCATACGACCAACCTTCAGCCCCGAGGCTCAAGAGCGGAACGTACAAAGAGCTTTCACCGTATGACGATTGAGCTCACGCAAGTCATAAACCTTTAATTCATTGAATTACAACAGCGAATTCTACAATCCAAATACGCGACTTTCAGCATTCCACTACCCTGGGGGGCCGTGCGAGGCAACGTTATAGACGTTTCGCGTAAAGCGGTGACGTACGTAAATTAGGGCCATAATGTGTTGAATGACGA [...]
+
+</datablock>
+</sample>
+</individual>
+<individual name="20">
+<sample name="20">
+<datablock type="DNA">
+CCTAGAGTAATTATCCAGCTAAGAGCAAGACGTTCTGGATGAATGAACTATCAATTGTTCAAGCGAGCAGACTAATTTGTGGGAAACTTCTCTGACGGCAACTCAATAGAGGTGTGCTGTCTCATCACTACACACCTCTGGGCGGCCGGATCGTCAGTTTGACCTCCGACAGGCTTCATAGTAACAAGGGGATTTCACCACCAGCCGACCTAGCACCAGTAAGCATATGGCGCTGCCTTGTGCTGACCATACGACCAACCTTCAGCCCCGAGGCTCAAGAGCGGAACGTACAAAGAGCTTTCACCGTATGACGATTGAGCTCACGCAAGTCATAAACCTTTAATTCATTGAATTACAACAGCGAATTCTACAATCCAAATACGCGACTTTCAGCATTCCACTACCCTGGGGGGCCGTGCGAGGCAACGTTATAGACGTTTCGCGTAAAGCGGTGACGTACGTAAATTAGGGCCATAATGTGTTGAATGACGA [...]
+
+</datablock>
+</sample>
+</individual>
+<individual name="13">
+<sample name="13">
+<datablock type="DNA">
+CCTAGAGTAATTATCCAGCTAAGAGCAAGACGTTCTGGATGAATGAACTATCAATTGTTCAAGCGAGCAGACTAATTTGTGGGAAACTTCTCTGACGGCAACTCAATAGAGGTGTGCTGTCTCATCACTACACACCTCTGGGCGGCCGGATCGTCAGTTTGACCTCCGACAGGCTTCATAGTAACAAGGGGATTTCACCACCAGCCGACCTAGCACCAGTAAGCATATGGCGCTGCCTTGTGCTGACCATACGACCAACCTTCAGCCCCGAGGCTCAAGAGCGGAACGTACAAAGAGCTTTCACCGTATGACGATTGAGCTCACGCAAGTCATAAACCTTTAATTCATTGAATTACAACAGCGAATTCTACAATCCAAATACGCGACTTTCAGCATTCCACTACCCTGGGGGGCCGTGCGAGGCAACGTTATAGACGTTTCGCGTAAAGCGGTGACGTACGTAAATTAGGGCCATAATGTGTTGAATGACGA [...]
+
+</datablock>
+</sample>
+</individual>
+<individual name="18">
+<sample name="18">
+<datablock type="DNA">
+CCTAGAGTAATTATCCAGCTAAGAGCAAGACGTTCTGGATGAATGAACTATCAATTGTTCAAGCGAGCAGACTAATTTGTGGGAAACTTCTCTGACGGCAACTCAATAGAGGTGTGCTGTCTCATCACTACACACCTCTGGGCGGCCGGATCGTCAGTTTGACCTCCGACAGGCTTCATAGTAACAAGGGGATTTCACCACCAGCCGACCTAGCACCAGTAAGCATATGGCGCTGCCTTGTGCTGACCATACGACCAACCTTCAGCCCCGAGGCTCAAGAGCGGAACGTACAAAGAGCTTTCACCGTATGACGATTGAGCTCACGCAAGTCATAAACCTTTAATTCATTGAATTACAACAGCGAATTCTACAATCCAAATACGCGACTTTCAGCATTCCACTACCCTGGGGGGCCGTGCGAGGCAACGTTATAGACGTTTCGCGTAAAGCGGTGACGTACGTAAATTAGGGCCATAATGTGTTGAATGACGA [...]
+
+</datablock>
+</sample>
+</individual>
+<individual name="9">
+<sample name="9">
+<datablock type="DNA">
+CCTAGAGTAATTATCCAGCTAAGAGCAAGACGTTCTGGATGAATGAACTATCAATTGTTCAAGCGAGCAGACTAATTTGTGGGAAACTTCTCTGACGGCAACTCAATAGAGGTGTGCTGTCTCATCACTACACACCTCTGGGCGGCCGGATCGTGAGTTTCACCTCCGACTGGCTTCATAGTAACAAGGGGATTTCACCACCAGCCGACCTAGCACCAGTAAGCATATGGCACTGCCTTGTGCTGACCATACGACCAACCTTCAGCCCCGAGGCTCAAGAGCGCAACGTACAAAGAGCTTTCACCGTATGACGATTGAGCTCACGCAAGTCATAAACCTTTAATTCATTGAATTACAACAGCGAATTCTACAATCCAAATACGCGACTTTCAGCATTCCACTACCCTGGGGGGCCGTGCGAGGCAACGTTATAGACGTTTCGCGTAAAGCGGTGACGTACGTAAATTAGGGCCATAATGTGTTGAATGACGC [...]
+
+</datablock>
+</sample>
+</individual>
+<individual name="10">
+<sample name="10">
+<datablock type="DNA">
+CCTAGAGTAATTATCCAGCTAAGAGCAAGACGTTCTGGATGAATGAACTATCAATTGTTCAAGCGAGCAGACTAATTTGTGGGAAACTTCTCTGACGGCAACTCAATAGAGGTGTGCTGTCTCATCACTACACACCTCTGGGCGGCCGGATCGTCAGTTTCACCTCCGACTGGCTTCATAGTAACAAGGGGATTTCACCACCAGCCGACCTAGCACCAGTAAGCATATGGCGCTGCCTTGTGCTGACCATACGACCAACCTTCAGCCCCGAGGCTCAAGAGCGCAACGTACGAAGAGCTTTCACCGTATGACGATTGAGCTCACGCAAGTCATAAACCTTTAATTCATTGAATTACAACAGCGAATTCTACAATCCAAATACGCGACTTTCAGCATTCCACTACCCTGGGGGGCCGTGCGAGGCAACGTTATAGACGTTTCGCGTAAAGCGGTGACGTACGTAAATTAGGGCCATAATGTGTTGAATGACGA [...]
+
+</datablock>
+</sample>
+</individual>
+<individual name="19">
+<sample name="19">
+<datablock type="DNA">
+CCTAGAGTAATTATCCAGCTAAGAGCAAGACGTTCTGGATGAATGAACTATCAATTGTTCAAGCGAGCAGACTAATTTGTGGGAAACTTCTCTGACGGCAACTCAATAGAGGTGTGCTGTCTCATCACTACACACCTCTGGGCGGCCGGATCGTCAGTTTCACCTCCGACTGGCTTCATAGTAACAAGGGGATTTCACCACCAGCCGACCTAGCACCAGTAAGCATATGGCGCTGCCTTGTGCTGACCATACGACCAACCTTCAGCCCCGAGGCTCAAGAGCGCAACGTACAAAGAGCTTTCACCGTATGACGATTGAGCTCACGCAAGTCATAAACCTTTAATTCATTGAATTACAACAGCGAATTCTACAATCCAAATACGCGACTTTCAGCATTCCACTACCCTGGGGGGCCGTGCGAGGCAACGTTATAGACGTTTCGCGTAAAGCGGTGACGTACGTAAATTAGGGCCATAATGTGTTGAATGACGA [...]
+
+</datablock>
+</sample>
+</individual>
+<individual name="8">
+<sample name="8">
+<datablock type="DNA">
+CCTAGAGTAATTATCCAGCTAAGAGCAAGACGTACTGGATGAATGAACTATCAATTGTTCAAGCGAGCAGACTAATTTGTGGGAAACTTCTCTGACGGCAACTCAATAGAGGTGTGCTGTCTCATCACTACACACCTCTGGGCGGCCGGATCGTCAGTTTCACCTCCGACAGGCTTCATAGTAACAAGGGGATTTCACCACAAGCCGACCTAGCACCAGTAAGCATATGGCGCTGCCTTGTGCTGACCATACGACCAACCTTCAGCCCCGAGGCTCAAGAGCGCAACGTACAAAGAGCTTTCACCGTATGACGATTGAGCTCACGCAAGTCATAAACCTTTAATTCATTGAATTACAACAGCGAATTCTACAATCCAAATACGCGACTTTCAGCATTCCACTACCCTGGGGGGCCGTGCGAGGCAACGTTATAGACGTTTCGCGTAAAGCGGTGACGTACGTAAATTAGGGCCATAATGTGTTGAATGACGA [...]
+
+</datablock>
+</sample>
+</individual>
+<individual name="15">
+<sample name="15">
+<datablock type="DNA">
+CCTAGAGTAATTATCCAGCTAAGAGCAAGACGTTCTGGATGAATGAACTATCAATTGTTCAAGCGAGCAGACTAATTTGTGGGAAACTTCTCTGACGGCAACTCAATAGAGGTGTGCTGTCTCATCACTACACACCTCTGGGCGGCCGGATCGTCAGCTTCACCTCCGACAGGCTTCATAGTAACAAGGGGATTTCACCACAAGCCGACCTAGCACCAGTAAGCATATGGCGCTGCCTTGTGCTGACCATACGACCAACCTTCAGCCCCGAGGCTCAAGAGCGCAACGTACAAAGAGCTTTCACCGTATGACGATTGAGCTCACGCAAGTCATAAACCTTTAATTCATTGAATTACAACAGCGAATTCTACAATCCAAATACGCGACTTTCAGCATTCCACTACCCTGGGGGGCCGTGCGAGGCAACGTTATAGACGTTTCGCGTAAAGCGGTGACGTACGTAAATTAGGGCGATAATGTGTTGAATGACGA [...]
+
+</datablock>
+</sample>
+</individual>
+<individual name="14">
+<sample name="14">
+<datablock type="DNA">
+CCTAGAGTAATTATCCAGCTAAGAGCAAGACGTTCTGGATGAATGAACTATCAATTGTTCAAGCGAGCAGACTAATTTGTGGGAAACTTCTCTGACGGCAACTCAATAGAGGTGTGCTGTCTCATCACTACACACCTCTGGGCGGCCGGATCGTCAGTTTCACCTCCGACAGGCTTCATAGTAACAAGGGGATTTCACCACAAGCCGACCTAGCACCAGTAAGCATATGGCGCTGCCTTGTGCTGACCATACGACCAACCTTCAGCCCCGAGGCTCAAGAGCGCAACGTACAAAGAGCTTTCACCGTATGACGATTGAGCTCACGCAAGTCATAAACCTTTAATTCATTGAATTACAACAGCGAATTCTACAATCCAAATACGCGACTTTCAGCATTCCACTACCCTGGGGGGCCGTGCGAGGCAACGTTATAGACGTTTCGCGTAAAGCGGTGACGTACGTAAATTAGGGCGATAATGTGTTGAATGACGA [...]
+
+</datablock>
+</sample>
+</individual>
+<individual name="5">
+<sample name="5">
+<datablock type="DNA">
+CCTAGAGTAATTATCCAGCTAAGAGCAAGACGTTCTGGATGAATGAACTATCAATTGTTCAAGCGAGCAGACTAATTTGTGGGAAACTTCTCTGACGGCAACTCAATAGAGGTGTGCTGTCTCATCACTACACACCTCTGGGCGGCCGGATCGTCAGTTTCACCTCCGACAGGCTTCATAGTAACAAGGGGATTTCACCACAAGCCGACCTAGCACCAGTAAGCATATGGCGCTGCCTTGTGCTGACCATACGACCAACCTTCAGCCCCGAGGCTCAAGAGCGCAACGTACAAAGAGCTTTCACCGTATGACGATTGAGCTCACGCAAGTCATAAACCTTTAATTCATTGAATTACAACAGCGAATTCTACAATCCAAATACGCGACTTTCAGCATTCCACTACCCTGGGGGGCCGTGCGAGGCAACGTTATAGACGTTTCGCGTAAAGCGGTGACGTACGTAAATTAGGGCGATAATGTGTTGAATGACGA [...]
+
+</datablock>
+</sample>
+</individual>
+<individual name="16">
+<sample name="16">
+<datablock type="DNA">
+CCTAGAGTAATTATCCAGCTAAGAGCAAGACGTTCTGGATGAATGAACTATCAATTGTTCAAGCGAGCAGACTAATTTGTGGGAAACTTCTCTGACGGCAACTCAATAGAGGTGTGCTGTCTCATCACTACACACCTCTGGGCGGCCGGATCGTCAGTTTCACCTCCGACAGGCTTCATAGTAACAAGGGGATTTCACCACAAGCCGACCTAGCACCAGTAAGCATATGGCGCTGCCTTGTGCTGACCATACGACCAACCTTCAGCCCCGAGGCTCAAGAGCGCAACGTACAAAGAGCTTTCACCGTATGACGATTGAGCTCACGCAAGTCATAAACCTTTAATTCATTGAATTACAACAGCGAATTCTACAATCCAAATACGCGACTTTCAGCATTCCACTACCCTGGGGGGCCGTGCGAGGCAACGTTATAGACGTTTCGCGTAAAGCGGTGACGTACGTAAATTAGGGCGATAATGTGTTGAATGACGA [...]
+
+</datablock>
+</sample>
+</individual>
+<individual name="17">
+<sample name="17">
+<datablock type="DNA">
+CCTAGAGTAATTATCCAGCTAAGAGCAAGACGTTCTGGATGAATGAACTATCAATTGTTCAAGCGAGCAGACTAATTTGTGGGAAACTTCTCTGACGGCAACTCAATAGAGGTGTGCTGTCTCATCACTACACACCTCTGGGCGGCCGGATCGTCAGTTTCACCTCCGACAGGCTTCATAGTAACAAGGGGATTTCACCACAAGCCGACCTAGCACCAGTAAGCATATGGCGCTGCCTTGTGCTGACCATACGACCAACCTTCAGCCCCGAGGCTCAAGAGCGCAACGTACAAAGAGCTTTCACCGTATGACGATTGAGCTCACGCAAGTCATAAACCTTTAATTCATTGAATTACAACAGCGAATTCTACAATCCAAATACGCGACTTTCAGCATTCCACTACCCTGGGGGGCCGTGCGAGGCAACGTTATAGACGTTTCGCGTAAAGCGGTGACGTACGTAAATTAGGGCGATAATGTGTTGAATGACGA [...]
+
+</datablock>
+</sample>
+</individual>
+</population>
+<population name="Pop2">
+<individual name="34">
+<sample name="34">
+<datablock type="DNA">
+CCTAGAGTAATTATCCAGCTAAGAGCAAGACGTACTGGATGAATGAACTATCAATTGTTCAAGCGAGCAGACTAATTTGTGGGAAACTTCTCTGACGGCAACTCAATAGAGGTGTGCTGTCTCATCACTACACACCTCTGGGCGGCCGGATCGTCAGTTTCACCTCCGACAGGCTTCATAGTAACAAGGGGATTTCACCACCAGCCGACCTAGCACCAGTAAGCATATGGCGCTGCCTTGTGCTGACCATACGACCAACCTTCAGCCCCGAGGCTCAAGAGCGCAACGTACAAAGAGCTTTCACCGTATGACGATTGAGCTCACGCAAGTCATAAACCTTTAATTCATTGAATTATAACAGCGAATTCTACAATCCAAATACGCGGCTTTCAGCATTCCACTACCCTGGGGGGCCGTGCGAGGCAACGTTATAGACGTTTCGCGTAAAGCGGTGACGTACGTAAATTAGGGCCATAATGTGTTGAATGACGA [...]
+
+</datablock>
+</sample>
+</individual>
+<individual name="21">
+<sample name="21">
+<datablock type="DNA">
+CCTAGAGTAATTATCCAGCTAAGAGCAAGACGTACTGGATGAATGAACTATCAATTGTTCAAGCGAGCAGACTAATTTGTGGGAAACTTCTCTGACGGCAACTCAATAGAGGTGTGCTGTCTCATCACTACACACCTCTGGGCGGCCGGATCGTCAGTTTCACCTCCGACAGGCTTCATAGTAACAAGGGGATTTCACCACCAGCCGACCTAGCACCAGTAAGCATATGGCGCTGCCTTGTGCTGACCATACGACCAACCTTCAGCCCCGAGGCTCAAGAGCGCAACGTACAAAGAGCTTTCACCGTATGACGATTGAGCTCACGCAAGTCATAAACCTTTAATTCATTGAATTATAACAGCGAATTCTACAATCCAAATACGCGGCTTTCAGCATTCCACTACCCTGGGGGGCCGTGCGAGGCAACGTTATAGACGTTTCGCGTAAAGCGGTGACGTACGTAAATTAGGGCCATAATGTGTTGAATGACGA [...]
+
+</datablock>
+</sample>
+</individual>
+<individual name="25">
+<sample name="25">
+<datablock type="DNA">
+CCTAGAGTAATTATCCAGCTAAGAGCAAGACGTACTGGATGAATGAACTATCAATTGTTCAAGCGAGCAGACTAATTTGTGGGAAACTTCTCTGACGGCAACTCAATAGAGGTGTGCTGTCTCATCACTACACACCTCTGGGCGGCCGGATCGTCAGTTTCACCTCCGACAGGCTTCATAGTAACAAGGGGATTTCACCACCAGCCGACCTAGCACCAGTAAGCATATGGCGCTGCCTTGTGCTGACCATACGACCAACCTTCAGCCCCGAGGCTCAAGAGCGCAACGTACAAAGAGCTTTCACCGTATGACGATTGAGCTCACGCAAGTCATAAACCTTTAATTCATTGAATTATAACAGCGAATTCTACAATCCAAATACGCGGCTTTCAGCATTCCACTACCCTGGGGGGCCGTGCGAGGCAACGTTATAGACGTTTCGCGTAAAGCGGTGACGTACGTAAATTAGGGCCATAATGTGTTGAATGACGA [...]
+
+</datablock>
+</sample>
+</individual>
+<individual name="23">
+<sample name="23">
+<datablock type="DNA">
+CCTAGAGTAATTATCCAGCTAAGAGCAAGACGTACTGGATGAATGAACTATCAATTGTTCAAGCGAGCAGACTAATTTGTGGGAAACTTCTCTGACGGCAACTCAATAGAGGTGTGCTGTCTCATCACTACACACCTCTGGGCGGCCGGATCGTCAGTTTCACCTCCGACAGGCTTCATAGTAACAAAGGGATTTCACCACCAGCCGACCTAGCACCAGTAAGCATATGGCGCTGCCTTGTGCTGACCATACGACCAACCTTCAGCCCCGAGGCTCAAGAGCGCAACGTACAAAGAGCTTTCACCGTATGACGATTGAGCTCACGCAAGTCATAAACCTTTAATTCATTGAATTATAACAGCAAATTCTACAATCCAAATACGCGGCTTTCAGCATTCCACTACCCTGGGGGGCCGTGCGAGGCAACGTTATAGACGTTTCGCGTAAAGCGGTGACGTACGTAAATTAGGGCCATAATGTGTTGAATGACGA [...]
+
+</datablock>
+</sample>
+</individual>
+<individual name="31">
+<sample name="31">
+<datablock type="DNA">
+CCTAGAGTAATTATCCAGCTAAGAGCAAGACGTACTGGATGAATGAACTATCAATTGTTCAAGCGAGCAGACTAATTTGTGGGAAACTTCTCTGACGGCAACTCAATAGAGGTGTGCTGTCTCATCACTACACACCTCTGGGCGGCCGGATCGTCAGTTTCACCTCCGACAGGCTTCATAGTAACAAGGGGATTTCACCACCAGCCGACCTAGCACCAGTAAGCATATGGCGCTGCCTTGTGCTCACCATACGACCAACCTTCAGCCCCGAGGCTCAAGAGCGCAACGTACAAAGAGCTTTCACCGTATGACGATTGAGCTCACGCAAGTCATAAACCTTTAATTCATTGAATTATAACAGCGAATTCTACAATCCAAATACGCGGCTTTCAGCATTCCACTACCCTGGGGGGCCGTGCGAGGCAACGTTATAGACGTTTCGCGTAAAGCGGTGACGTACGTAAATTAGGGCCATAATGTGTTGAATGACGA [...]
+
+</datablock>
+</sample>
+</individual>
+<individual name="37">
+<sample name="37">
+<datablock type="DNA">
+CCTAGAGTAATTATCCAGCTAAGAGCAAGACGTACTGGATGAATGAACTATCAATTGTTCAAGCGAGCAGACTAATTTGTGGGAAACTTCTCTGACGGCAACTCAATAGAGGTGTGCTGTCTCATCACTACACACCTCTGGGCGGCCGGATCGTCAGTTTCACCTCCGACAGGCTTCATAGTAACAAGGGGATTTCACCACCAGCCGACCTAGCACCAGTAAGCATATGGCGCTGCCTTGTGCTCACCATACGACCAACCTTCAGCCCCGAGGCTCAAGAGCGCAACGTACAAAGAGCTTTCACCGTATGACGATTGAGCTCACGCAAGTCATAAACCTTTAATTCATTGAATTATAACAGCGAATTCTACAATCCAAATACGCGGCTTTCAGCATTCCACTACCCTGGGGGGCCGTGCGAGGCAACGTTATAGACGTTTCGCGTAAAGCGGTGACGTACGTAAATTAGGGCCATAATGTGTTGAATGACGA [...]
+
+</datablock>
+</sample>
+</individual>
+<individual name="40">
+<sample name="40">
+<datablock type="DNA">
+CCTAGAGTAATTATCCAGCTAAGAGCAAGACGTACTGGATGAATGAACTATCAATTGTTCAAGCGAGCAGACTAATTTGTGGGAAACTTCTCTGACGGCAACTCAATAGAGGTGTGCTGTCTCATCACTACACACCTCTGGGCGGCCGGATCGTCAGTTTCACCTCCGACAGGCTTCATAGTAACAAGGGGATTTCACCACCAGCCGACCTAGCACCAGTAAGCATATGGCGCTGCCTTGTGCTGACCATACGACCAACCTTCAGCCCCGAGGCTCAAGAGCGCAACGTACAAAGAGCTTTCACCGTATGACGATTGAGCTCACGCAAGTCATAAACCTTTAATTCATTGAATTATAACAGCGAATTCTACAATCCAAATACGCGGCTTTCAGCATTCCACTACCCTGGGGGGCCGTGCGAGGCAACCTTATAGACGTTTCGCGTAAAGCGGTGACGTACGTAAATTAGGGCCATAATGTGTTGAATGACGA [...]
+
+</datablock>
+</sample>
+</individual>
+<individual name="32">
+<sample name="32">
+<datablock type="DNA">
+CCTAGAGTAATTATCCAGCTAAGAGCAAGACGTACTGGATGAATGAACTATCAATTGTTCAAGCGAGCAGACTAATTTGTGGGAAACTTCTCTGACGGCAACTCAATAGAGGTGTGCTGTCTAATCACTACACACCTCTGGGCGGCCGGATCGTCAGTTTCACCTCCGACAGGCTTCATAGTAACAAGGGGATTTCACCACCAGCCGACCTAGCACCAGTAAGCATATGGCGCTGCCTTGTGCTGACCATACGACCAACCTTCAGCCCCGAGGCTCAAGAGCGCAACGTACAAAGAGCTTTCACCGTATGACGATTGAGCTCACGCAAGTCATAAACCTTTAATTCATTGAATTATAACAGCGAATTCTACAATCCAAATACGCGGCTTTCAGCATTCCACTACCCCGGGGGGCCGTGCGAGGCAACCTTATAGACGTTTCGCGTAAAGCGGTGACGTACGTAAATTAGGGCCATAATGTGTTGAATGACGA [...]
+
+</datablock>
+</sample>
+</individual>
+<individual name="22">
+<sample name="22">
+<datablock type="DNA">
+CCTAGAGTAATTATCCAGCTAAGAGCAAGACGTACTGGATGAATGAACTATCAATTGTTCAAGCGAGCAGACTAATTTGTGGGAAACTTCTCTGACGGCAACTCAATAGAGGTGTGCTGTCTAATCACTACACACCTCTGGGCGGCCGGATCGTCAGTTTCACCTCCGACAGGCTTCATAGTAACAAGGGGATTTCACCACCAGCCGACCTAGCACCAGTAAGCATATGGCGCTGCCTTGTGCTGACCATACGACCAACCTTCAGCCCCGAGGCTCAAGAGCGCAACGTACAAAGAGCTTTCACCGTATGACGATTGAGCTCACGCAAGTCATAAACCTTTAATTCATTGAATTATAACAGCGAATTCTACAATCCAAATACGCGGCTTTCAGCATTCCACTACCCCGGGGGGCCGTGCGAGGCAACCTTATGGACGTTTCGCGTAAAGCGGTGACGTACGTAAATTAGGGCCATAATGTGTTGAATGACGA [...]
+
+</datablock>
+</sample>
+</individual>
+<individual name="33">
+<sample name="33">
+<datablock type="DNA">
+CCTAGAGTAATTATCCAGCTAAGAGCAAGACGTACTGGATGAATGAACTATCAATTGTTCAAGCGAGCAGACTAATTTGTGGGAAACTTCTCTGACGGCAACTCAATAGAGGTGTGCTGTCTAATCACTACACACCTCTGGGCGGCCGGATCGTCAGTTTCACCTCCGACAGGCTTCATAGTAACAAGGGGATTTCACCACCAGCCGACCTAGCACCAGTAAGCATATGGCGCTGCCTTGTGCTGACCATACGACCAACCTTCAGCCCCGAGGCTCAAGAGCGCAACGTACAAAGAGCTTTCACCGTATGACGATTGAGCTCACGCAAGTCATAAACCTTTAATTCATTGAATTATAACAGCGAATTCTACAATCCAAATACGCGGCTTTCAGCATTCCACTACCCCGGGGGGCCGTGCGAGGCAACCTTATGGACGTTTCGCGTAAAGCGGTGACGTACGTAAATTAGGGCCATAATGTGTTGAATGACGA [...]
+
+</datablock>
+</sample>
+</individual>
+<individual name="35">
+<sample name="35">
+<datablock type="DNA">
+CCTAGAGTAATTATCCAGCTAAGAGCAAGACGTACTGGATGAATGAACTATCAATTGTTCAAGCGAGCAGACTAATTTGTGGGAAACTTCTCTGACGGCAACTCAATAGAGGTGTGCTGTCTCATCACTACACACCTCTGGGCGGCCGGATCGTCAGTTTCACCTCCGACAGGCTTCATAGTAACAAGGGGATTTCACCACCAGCCGACCTAGCACCAGTAAGCATATGGCGCTGCCTTGTGCTGACCATACGACCAACCTTCAGCCCCGAGGCTCAAGAGCGCAACGTACAAAGAGCTTTCACCGTATGACGATTGAGCTCACGCAAGTCATAAACCTTTAATTCATTGAATTATAACAGCGAATTCTACAATCCAAATACGCGGCTTTCAGCATTCCACTACCCCGGGGGGCCGTGCGAGGCAACCTTATAGACGTTTCGCGTAAAGCGGTGACGTACGTAAATTAGGGCCATAATGTGTTGAATGACGA [...]
+
+</datablock>
+</sample>
+</individual>
+<individual name="29">
+<sample name="29">
+<datablock type="DNA">
+CCTAGAGTAATTATCCAGCTAAGAGCAAGACGTACTGGATGAATGAACTATCAATTGTTCAAGCGAGCAGACTAATTTGTGGGAAACTTCTCTGACGGCAACTCAATAGAGGTGTGCTGTCTCATCACTACACACCTCTGGGCGGCCGGATCGTCAGTTTCACCTCCGACAGGCTTCATAGTAACAAGGGGATTTCACCACCAGCCGACCTAGCACCAGTAAGCATATGGCGCTGCCTTGTGCTGACCATACGACCAACCTTCAGCCCCGAGGCTCAAGAGCGCAACGTACAAAGAGCTTTCACCGTATGACGATTGAGCTCACGCAAGTCATAAACCTTTAATTCATTGAATTATAACAGCGAATTCTACAATCCAAATACGCGGCTTTCAGCATTCCACTACCCCGGGGGGCCGTGCGAGGCAACCTTATAGACGTTTCGCGTAAAGCGGTGACGTACGTAAATTAGGGCCATAATGTGTTGAATGACGA [...]
+
+</datablock>
+</sample>
+</individual>
+<individual name="38">
+<sample name="38">
+<datablock type="DNA">
+CCTAGAGTAATTATCCAGCTAAGAGCAAGACGTACTGGATGAATGAACTATCAATTGTTCAAGCGAGCAGACTAATTTGTGGGAAACTTCTCTGACGGCAACTCAATAGAGGTGTGCTGTCTCATCACTACACACCTCTGGGCGGCCGGATCGTCAGATTCACCTCCGACAGGCTTCATAGTAACAAGGGGATTTCACCACCAGCCGACCTAGCACCAGTAAGCATATGGCGCTGCCTTGTGCTGACCATACGACCAACCTTCAGCCCCGAGGCTCAAGAGCGCAACGTACAAAGAGCTTTCACCGTATGACGATTGAGCTCACGCAAGTCATAAACCTTTAATTCATTGAATTATAACAGCGAATTCTACAATCCAAATACGCGGCTTTCAGCATTCCACTACCCTGGGGGGCCGTGCGAGGCAACGTTATAGACGTTTCGCGTAAAGCGGTGACGTACGTAAATTAGGGCCATAATGTGTTGAATGACGA [...]
+
+</datablock>
+</sample>
+</individual>
+<individual name="26">
+<sample name="26">
+<datablock type="DNA">
+CCTAGAGTAATTATCCAGCTAAGAGCAAGACGTTCTGGATGAATGAACTATCAATTGTTCAAGCGAGCAGACTAATTTGTGGGAAACTTCTCTGACGGCAACTCAATAGAGGTGTGCTGTCTCATCACTACACACCTCTGGGCGGCCGGATCGTCAGTTTCACCTCCGACAGGCTTCATAGTAACAAGGGGATTTCACCACCAGCCAACCTAGCACCAGTAAGCATATGGCGCTGCCTTGTGCTGACCATACGACCAACCTTCAGCCCCGAGGCTCAAGAGCGCAACGTACAAAGAGCTTTCACCGTATGACGATTGAGCTCACGCAAGTCATAAACCTTTAATTCATTGAATTATAACAGCGAATTCTACAATCCAAATACGCGGCTTTCAGCATTCCACTACCCTGGGGGGCCGTGCGAGGCAACGTTATAGACGTTTCGCGTAAAGCGGTGACGTACGTAAATTAGGGCCATAATGTGTTGAATGACGA [...]
+
+</datablock>
+</sample>
+</individual>
+<individual name="36">
+<sample name="36">
+<datablock type="DNA">
+CCTAGAGTAATTATCCAGCTAAGAGCAAGACGTTCTGGATGAATGAACTATCAATTGTTCAAGCGAGCAGACTAATTTGTGGGAAACTTCTCTGACGGCAACTCAATAGAGGTGTGCTGTCTCATCACTACACACCTCTGGGCGGCCGGATCGTCAGTTTCACCTCCGACAGGCTTCATAGTAACAAGGGGATTTCACCACCAGCCAACCTAGCACCAGTAAGCATATGGCGCTGCCTTGTGCTGACCATACGACCAACCTTCAGCCCCGAGGCTCAAGAGCGCAACGTACAAAGAGCTTTCACCGTATGACGATTGAGCTCACGCAAGTCATAAACCTTTAATTCATTGAATTATAACAGCGAATTCTACAATCCAAATACGCGGCTTTCAGCATTCCACTACCCTGGGGGGCCGTGCGAGGCAACGTTATAGACGTTTCGCGTAAAGCGGTGACGTACGTAAATTAGGGCCATAATGTGTTGAATGACGA [...]
+
+</datablock>
+</sample>
+</individual>
+<individual name="30">
+<sample name="30">
+<datablock type="DNA">
+CCTAGAGTAATTATCCAGCTAAGAGCAAGACGTTCTGGATGAATGAACTATCAATTGTTCAATCGAGCAGACTAATTTGTGGGAAACTTCTCTGACGGCAACTCAATAGAGGTGTGCTGTCTCATCACTACACACCTCTGGGCGGCCGGATCGTCAGTTTCACCTCCGACAGGCTTCATAGTAACAAGGGGATTTCACCACCAGCCGACCTAGCACCAGTAAGCATACGGCGCTGCCTTGTGCTGACCATACGACCAACCTTCAGCCCCGAGGCTCAAGAGCGCAACGTACAAAGAGCTTTCACCGTATGACGATTGAGCTCACGCAAGTCATAAACCTTTGATTCATTGAATTATAACAGCGAATTCTACAATCCAAATACGCGGCTTTCAGCATTCCACTACCCTGGGGGGCCGTGCGAGGCAACGTTATAGACGTTTCGCGTAAAGCGGTGACGTACGTAAATTAGGGCCATAATGTGTTGAATGACGA [...]
+
+</datablock>
+</sample>
+</individual>
+<individual name="24">
+<sample name="24">
+<datablock type="DNA">
+CCTAGAGTAATTATCCAGCTAAGAGCAAGACGTTCTGGATGAATGAACTATCAATTGTTCAATCGAGCAGACTAATTTGTGGGAAACTTCTCTGACGGCAACTCAATAGAGGTGTGCTGTCTCATCACTACACACCTCTGGGCGGCCGGATCGTCAGTTTCACCTCCGACAGGCTTCATAGTAACAAGGGGATTTCACCACCAGCCGACCTAGCACCAGTAAGCATACGGCGCTGCCTTGTGCTGACCATACGACCAACCTTCAGCCCCGAGGCTCAAGAGCGCAACGTACAAAGAGCTTTCACCGTATGACGATTGAGCTCACGCAAGTCATAAACCTTTGATTCATTGAATTATAACAGCGAATTCTACAATCCAAATACGCGGCTTTCAGCATTCCACTACCCTGGGGGGCCGTGCGAGGCAACGTTATAGACGTTTCGCGTAAAGCGGTGACGTACGTAAATTAGGGCCATAATGTGTTGAATGACGA [...]
+
+</datablock>
+</sample>
+</individual>
+<individual name="27">
+<sample name="27">
+<datablock type="DNA">
+CCTAGAGTAATTATCCAGCTAAGAGCAAGACGTTCTGGATGAATGAACTATCAATTGTTCAATCGAGCAGACTAATTTGTGGGAAACTTCTCTGACGGCAACTCAATAGAGGTGTGCTGTCTCATCACTACACACCTCTGGGCGGCCGGATCGTCAGTTTCACCTCCGACAGGCTTCATAGTAACAAGGGGATTTCACCACCAGCCGACCTAGCACCAGTAAGCATACGGCGCTGCCTTGTGCTGACCATACGACCAACCTTCAGCCCCGAGGCTCAAGAGCGCAACGTACAAAGAGCTTTCACCGTATGACGATTGAGCTCACGCAAGTCATAAACCTTTGATTCATTGAATTATAACAGCGAATTCTACAATCCAAATACGCGGCTTTCAGCATTCCACTACCCTGGGGGGCCGTGCGAGGCAACGTTATAGACGTTTCGCGTAAAGCGGTGACGTACGTAAATTAGGGCCATAATGTGTTGAATGACGA [...]
+
+</datablock>
+</sample>
+</individual>
+<individual name="28">
+<sample name="28">
+<datablock type="DNA">
+CCTAGAGTAATTATCCAGCTAAGAGCAAGACGTTCTGGATGAATGAACTATCAATTGGTCAAGCGAGCAGACTAATTTGTGGGAAACTTCTCTGACGGCAACTCAATAGAGGTGTGCTGTCTCATCACTACACACCTCTGGGCGGCCGGATCGTCAGATTCACCTCCGACAGGCTTCATAGTAACAAGGGGATTTCACCACCAGCCGACCTAGCACCAGTAAGCATATGGCGCTGCCTTGTGCTGACCATACGACCAACCTTCAGCCCCGAGGCTCAAGAGCGCAACGTACAAAGAGCTTTCACCGTATGACGATTGAGCTCACGCAAGTCATAAACCTTTAATTCATTGAATTATAACAGCGAATTCTACAATCCAAATACGCGGCTTTCAGCATTCCACTACCCTGGGGGGCCGTGCGAGGCAACGTTATAGACGTTTCGCGTAAAGCGGTGACGTACGTAAATTAGGGCCATAATGTGTTGAATGACGA [...]
+
+</datablock>
+</sample>
+</individual>
+<individual name="39">
+<sample name="39">
+<datablock type="DNA">
+CCTAGAGTAATTATCCAGCTAAGAGCAAGACGTTCTGGATGAATGAACTATCAATTGGTCAAGCGAGCAGACTAATTTGTGGGAAACTTCTCTGACGGCAACTCAATAGAGGTGTGCTGTCTCATCACTACACACCTCTGGGCGGCCGGATCGTCAGATTCACCTCCGACAGGCTTCATAGTAACAAGGGGATTTCACCACCAGCCGACCTAGCACCAGTAAGCATATGGCGCTGCCTTGTGCTGACCATACGACCAACCTTCAGCCCCGAGGCTCAAGAGCGCAACGTACAAAGAGCTTTCACCGTATGACGATTGAGCTCACGCAAGTCATAAACCTTTAATTCATTGAATTATAACAGCGAATTCTACAATCCAAATACGCGGCTTTCAGCATTCCACTACCCTGGGGGGCCGTGCGAGGCAACGTTATAGACGTTTCGCGTAAAGCGGTGACGTACGTAAATTAGGGCCATAATGTGTTGAATGACGA [...]
+
+</datablock>
+</sample>
+</individual>
+</population>
+    </region>
+  </data>
+</lamarc>
diff --git a/doc/testfiles/infile.growmigheat b/doc/testfiles/infile.growmigheat
new file mode 100644
index 0000000..d935e48
--- /dev/null
+++ b/doc/testfiles/infile.growmigheat
@@ -0,0 +1,393 @@
+<lamarc version="2.1">
+<!-- Created by the Lamarc program -->
+  <chains>
+    <replicates>1</replicates>
+    <bayesian-analysis>No</bayesian-analysis>
+    <heating>
+      <adaptive>false</adaptive>
+      <temperatures> 1 1.1 2 3 8</temperatures>
+      <swap-interval>10</swap-interval>
+    </heating>
+    <strategy>
+      <resimulating>0.833333</resimulating>
+      <tree-size>0.166667</tree-size>
+      <haplotyping>0</haplotyping>
+      <trait-arranger>0</trait-arranger>
+    </strategy>
+    <initial>
+      <number>10</number>
+      <samples>50</samples>
+      <discard>1000</discard>
+      <interval>20</interval>
+    </initial>
+    <final>
+      <number>2</number>
+      <samples>750</samples>
+      <discard>1000</discard>
+      <interval>20</interval>
+    </final>
+  </chains>
+  <format>
+    <seed>1005</seed>
+    <verbosity>verbose</verbosity>
+    <progress-reports>verbose</progress-reports>
+    <results-file>outfile.growmigheat</results-file>
+    <use-in-summary>false</use-in-summary>
+    <in-summary-file>insumfile.growmigheat</in-summary-file>
+    <use-out-summary>true</use-out-summary>
+    <out-summary-file>outsumfile.growmigheat</out-summary-file>
+    <use-curvefiles>true</use-curvefiles>
+    <curvefile-prefix>curvefile</curvefile-prefix>
+    <use-tracefile>true</use-tracefile>
+    <tracefile-prefix>tracefile</tracefile-prefix>
+    <use-newicktreefile>false</use-newicktreefile>
+    <newicktreefile-prefix>newick</newicktreefile-prefix>
+    <out-xml-file>menuinfile</out-xml-file>
+  </format>
+  <forces>
+    <coalescence>
+      <start-values> 0.035516 0.004228</start-values>
+      <method> WATTERSON WATTERSON</method>
+      <max-events>10000</max-events>
+      <profiles> fixed fixed </profiles>
+      <constraints> unconstrained unconstrained </constraints>
+      <prior type="log">
+        <paramindex> all </paramindex>
+        <lower> 1e-05 </lower>
+        <upper> 10 </upper>
+      </prior>
+    </coalescence>
+    <migration>
+      <start-values> 0 100 100 0</start-values>
+      <method> PROGRAMDEFAULT FST FST PROGRAMDEFAULT</method>
+      <max-events>10000</max-events>
+      <profiles> none fixed fixed none </profiles>
+      <constraints> invalid unconstrained unconstrained invalid </constraints>
+      <prior type="log">
+        <paramindex> all </paramindex>
+        <lower> 0.01 </lower>
+        <upper> 1000 </upper>
+      </prior>
+    </migration>
+    <growth type="CURVE">
+      <start-values> 1 1</start-values>
+      <method> USER USER</method>
+      <max-events>10000</max-events>
+      <profiles> fixed fixed </profiles>
+      <constraints> unconstrained unconstrained </constraints>
+      <prior type="linear">
+        <paramindex> all </paramindex>
+        <lower> -500 </lower>
+        <upper> 1000 </upper>
+      </prior>
+    </growth>
+  </forces>
+  <data>
+    <region name="tempdna---0">
+      <model name="F84">
+        <normalize>false</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <relative-murate>1</relative-murate>
+        <base-freqs> 0.25 0.25 0.25 0.25</base-freqs>
+        <ttratio>2</ttratio>
+      </model>
+      <effective-popsize>1</effective-popsize>
+      <spacing>
+        <block>
+          <map-position>0</map-position>
+          <length>1000</length>
+          <offset>0</offset>
+        </block>
+      </spacing>
+    <population name="  Popmig0">
+      <individual name="00_0019   ">
+        <sample name="00_0019   ">
+          <datablock type="DNA">
+            AAAGTGGAATGGCACTAGGTGGGCGGTCTGCTGAAAGTGTTTACTTGTTGTGAAGCTACCGTACAGTAGGTTCGCAATTCGGCTGCCGTAGTCGGCAACCGTTAAACGATGCTATACCGGCAATTTGGCATCGTGAAGAATTCAAGCGAGGAGCACATATGGGGGCTTTTGCAGCTTGAACTGTTTTACCCGGTCCGAACGAGTAGGTTTAAGGTGGACACGGCAATTTCCGTGAACCAATTGTCGTGGTACTCGTATATGTCTAAATGGGTCTTCTGCCGCCCGCAATAAGCTGGAGAGAAGAGCTAGAGAGTCTAGGTTCCATGTCGTTGGGAACGGGCGTAGGCTCACTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCCGCTTGATGCTTGTGCAAACCTCGATGGGATTCGACAGACGGCTTTTTTATATTATCTAAGGCGGGAGTACGGACTACCCTTTGTGCCTGAGCTT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0001   ">
+        <sample name="00_0001   ">
+          <datablock type="DNA">
+            AAAGTGGAATGGCACTAGGTGGGCGGTCTGCTGAAAGTGTTTACTTGTTGTGAAGCTACCGTACAGTAGGTTCGCAATTCGGCTGCCGTAGTCGGCAACCGTTAAACGATGCTATACCGGCAATTTGGCATCGTGAAGAATTCAAGCGAGGAGCACATATGGGGGCTTTTGCAGCTTGAACTGTTTTACCCGGTCCGAACGAGTAGGTTTAAGGTGGACACGGCAATTTCCGTGAACCAATTGTCGTGGTACTCGTGTATGTCTAAATGGGTCTTCTGCCGCCCACAATAAGCTGGAGAGAAGAGCTAGAGAGTCTAGGTTCCATGTCGTTGGGAACGGGCGTAGGCTCACTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCCGCTTGATGCTTGTGCAAACCTCGATGGGATTCGACAGACGGCTTTTTTATATTATCTAAGGCGGGAGTACGGACTACCCTTTGTGCCTGAGCTT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0006   ">
+        <sample name="00_0006   ">
+          <datablock type="DNA">
+            AAAGTGGAATGGCACTAGGTGGGCGGTCTGCTGAAAGTGTTTACTTGTTGTGAAGCTACCGTACAGTAGGTTCGCAATTCGGCTGCCGTAGTCGGCAATCGTTAAACGATGCTATACCGGCAATTTGGCATCGTGAAGAATTCAAGCGAGGAGCACATATGGGGGCTTTTGCAGCTTGAACTGTTTTACCCGGTCCGAACGAGTAGGTTTAAGGTGGACACGGCAATTTCCGTGAACCAATTGTCGTGGTACTCGTGTATGTCTAAATGGGTCTTCTGCCGCCCGCAATAAGCTGGAGAGAAGAGCTAGAGAGTCTAGGTTCCATGTCGTTGGGAACGGGCGTAGGCTCACTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCCGCTTGATGCTTGTGCAAACCTCGATGGGATTCGACAGACGGCTTTTTTATATTATCTAAGGCGGGAGTACGGACTACCCTCTGTGCCTGAGCTT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0009   ">
+        <sample name="00_0009   ">
+          <datablock type="DNA">
+            AAAGTGGAATGGCACTAGGTGGGCGGTCTGCTGAAAGTGTTTACTTGTTGTGAAGCTACCGTACAGTAGGTTCGCAATTCGGCTGCCGTAGTCGGCAATCGTTAAACGATGCTATACCGGCAATTTGGCATCGTGAAGAATTCAAGCGAGGAGCACATATGGGGGCTTTTGCAGCTTGAACTGTTTTACCCGGTCCGAACGAGTAGGTTTAAGGTGGACACGGCAATTTCCGTGAACCAATTGTCGTGGTACTCGTGTATGTCTAAATGGGTCTTCTGCCGCCCGCAATAAGCTGGAGAGAAGAGCTAGAGAGTCTAGGTTCCATGTCGTTGGGAACGGGCGTAGGCTCACTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCCGCTTGATGCTTGTGCAAACCTCGATGGGATTCGACAGACGGCTTTTTTATATTATCTAAGGCGGGAGTACGGACTACCCTCTGTGCCTGAGCTT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0003   ">
+        <sample name="00_0003   ">
+          <datablock type="DNA">
+            AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGCCGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGGTGACCGTTAAGCGATGCTATACCGGCCATTTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGGGGGCTTTTGCAGCTAGGACTGTTTTACTCGGTCCGCACGGGTAGGTGTACGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCCTCGCAATAAGCTGCAGAGAAGGGCTAGAGAGTCTATGTTCCCTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGATGCCTCTGTAAACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTA [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0004   ">
+        <sample name="00_0004   ">
+          <datablock type="DNA">
+            AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGCCGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGGTGACCGTTAAGCGATGCTATACCGGCCATTTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGGGGGCTTTTGCAGCTAGGACTGTTTTACTCGGTCCGCACGGGTAGGTGTACGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCCTCGCAATAAGCTGCAGAGAAGGGCTAGAGAGTCTATGTTCCCTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGATGCCTCTGTAAACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTA [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0000   ">
+        <sample name="00_0000   ">
+          <datablock type="DNA">
+            AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGCCGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGGTGACCGTTAAGCGATGCTATACCGGCCATTTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGGGGGCTTTTGCAGCTAGGACTGTTTTACTCGGTCCGCACGGGTAGGTGTAAGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATATCTAAATGGGTCTTCTGCCCCCCGCAATAAGCTGCAGAGAAGGGCTAGAGAGTCTATGTTCCCTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGATGCCTCTGTAAACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTA [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0010   ">
+        <sample name="00_0010   ">
+          <datablock type="DNA">
+            AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGCCGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGGTGACCGTTAAGCGATGCTATACCGGCCATTTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGAGGGCTTTTGCAGCTAGGACTGTTTTACTCGGTCCGCACGGGTAGGTGTAAGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCCCCGCAATAAGCTGCAGAGAAGGGCTAGAGAGTCTATGTTCCCTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGATGCCTCTGTAAACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTA [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0016   ">
+        <sample name="00_0016   ">
+          <datablock type="DNA">
+            AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGCCGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGGTGACCGTTAAGCGATGCTATACCGGCCATTTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGAGGGCTTTTGCAGCTAGGACTGTTTTACTCGGTCCGCACGGGTAGGTGTAAGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCCCCGCAATAAGCTGCAGAGAAGGGCTAGAGAGTCTATGTTCCCTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGATGCCTCTGTAAACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTA [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0017   ">
+        <sample name="00_0017   ">
+          <datablock type="DNA">
+            AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGCCGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGGTGACCGTTAAGCGATGCTATACCGGCCATTTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGAGGGCTTTTGCAGCTAGGACTGTTTTACTCGGTCCGCACGGGTAGGTGTAAGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCCCCGCAATAAGCTGCAGAGAAGGGCTAGAGAGTCTATGTTCCCTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGATGCCTCTGTAAACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTA [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0011   ">
+        <sample name="00_0011   ">
+          <datablock type="DNA">
+            AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGCCGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGGTGACCGTTAAGCGATGCTATACCGGCCATTTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGGGGGCTTTTGCAGCTAGGACTGTTTTACTCGGTCCGCACGGGTAGGTGTAAGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCCCCGCAATAAGCTGCAGAGAAGGGCTAGAGAGTCTATGTTCCCTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGATGCCTCTGTAAACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTA [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0002   ">
+        <sample name="00_0002   ">
+          <datablock type="DNA">
+            AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGCCGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGGTGACCGTTAAGCGATGCTATACCGGCCATTTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGGGGGCTTTTGCAGCTAGGACTGTTTTACTCGGTCCGCACGGGTAGGTGTAAGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCCCCGCAATAAGCTGCAGAGAAGGGCTAGAGAGTCTGTGTTCCCTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGATGCCTCTGTAGACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTA [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0005   ">
+        <sample name="00_0005   ">
+          <datablock type="DNA">
+            AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGCCGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGGTGACCGTTAAGCGATGCTATACCGGCCATTTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGGGGGCTTTTGCAGCTAGGACTGTTTTACTCGGTCCGCACGGGTAGGTGTAAGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCCCCGCAATAAGCTGCAGAGAAGGGCTAGAGAGTCTATGTTCCCTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGATGCCTCTGTAAACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTA [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0008   ">
+        <sample name="00_0008   ">
+          <datablock type="DNA">
+            AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGCCGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGGTGACCGTTAAGCGATGCTATACCGGCCATTTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGGGGGCTTTTGCAGCTAGGACTGTTTTACTCGGTCCGCACGGGTAGGTGTAAGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCCCCGCAATAAGCTGCAGAGAAGGGCTAGAGAGTCTATGTTCCCTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGATGCCTCTGTAAACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTA [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0007   ">
+        <sample name="00_0007   ">
+          <datablock type="DNA">
+            AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGACGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGGTGACCGTTAAGCGATGCTATACCGGCCATTTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGGGGGCTTTTGCAGCTAGGACTGTTTTACTCGGTCCGCACGGGTAGGTGTAAGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCCCCGCAATAAGCTGCAGATAAGGGCTAGAGAGTCTATGTTCCCTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGACGCCTTTGTAAACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTA [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0014   ">
+        <sample name="00_0014   ">
+          <datablock type="DNA">
+            AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGACGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGGTGACCGTTAAGCGATGCTATACCGGCCATTTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGGGGGCTTTTGCAGCTAGGACTGTTTTACTCGGTCCGCACGGGTAGGTGTAAGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCCCCGCAATAAGCTGCAGATAAGGGCTAGAGAGTCTATGTTCCCTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGACGCCTTTGTAAACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTA [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0012   ">
+        <sample name="00_0012   ">
+          <datablock type="DNA">
+            AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGCCGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGGTGACCGTTAAGCGATGCTATACCGGCCATTTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGGGGGCTTTTGCAGCTAGGACTGTTTTACTCGGTCCGCACGGGTAGGTGTAAGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCCCCGCAATAAGCTGCAGAGAAGGGCTAGAGAGTCTATGTTCCCTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGATGCCTCTGTAAACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTA [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0013   ">
+        <sample name="00_0013   ">
+          <datablock type="DNA">
+            AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGCCGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGGTGACCGTTAAGCGATGCTATACCGGCCATTTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGGGGGCTTTTGCAGCTAGGACTGTTTTACTCGGTCCGCACGGGTAGGTGTAAGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCCCCGCAATAAGCTGCAGAGAAGGGCTAGAGAGTCTATGTTCCCTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGATGCCTCTGTAAACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTA [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0015   ">
+        <sample name="00_0015   ">
+          <datablock type="DNA">
+            AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGCCGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGGTGACCGTTAAGCGATGCTATACCGGCCATTTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGGGGGCTTTTGCAGCTAGGACTGTTTTACTCGGTCCGCACGGGTAGGTGTAAGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCCCCGCAATAAGCTGCAGAGAAGGGCTAGAGAGTCTATGTTCCCTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGATGCCTCTGTAAACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTA [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0018   ">
+        <sample name="00_0018   ">
+          <datablock type="DNA">
+            AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGCCGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGGTGACCGTTAAGCGATGCTATACCGGCCATTTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGGGGGCTTTTGCAGCTAGGACTGTTTTACTCGGTCCGCACGGGTAGGTGTAAGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCCCCGCAATAAGCTGCAGAGAAGGGCTAGAGAGTCTATGTTCCCTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGATGCCTCTGTAAACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTA [...]
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    <population name="  Popmig1">
+      <individual name="01_0022   ">
+        <sample name="01_0022   ">
+          <datablock type="DNA">
+            AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGCCGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGGTGACCGTTAAGCGATGCTATACCGGCCATTTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGGGGGCTTTTGCAGCTAGGACTGTTTTACTCGGTCCGCACGGGTAGGTGTAAGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCTCCGCAATAAGCTGCAGAGAAGGGCTAGAGAGTCTATGTTCCCTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGATGCCTCTGTAAACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTA [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0035   ">
+        <sample name="01_0035   ">
+          <datablock type="DNA">
+            AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGCCGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGGTGACCGTTAAGCGATGCTATACCGGCCATTTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGGGGGCTTTTGCAGCTAGGACTGTTTTACTCGGTCCGCACGGGTAGGTGTAAGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCCCCGCAATAAGCTGCAGAGAAGGGCTAGAGAGTCTATGTTCCCTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGATGCCTCTGTAAACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTA [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0024   ">
+        <sample name="01_0024   ">
+          <datablock type="DNA">
+            AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGCCGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGGTGACCGTTAAGCGATGCTATACCGGCCATTTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGGGGGCTTTTGCAGCTAGGACTGCTTTACTCGGTCCGCACGGGTAGGTGTAAGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCCCCGCAATAAGCTGCAGAGAAGGGCTAGAGAGTCTATGTTCCCTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGATGCCTCTGTAAACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTA [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0033   ">
+        <sample name="01_0033   ">
+          <datablock type="DNA">
+            AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGCCGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGGTGACCGTTGAGCGATGCTATACCGGCCATTTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGGGGGCTTTTGCAGCTAGGACTGCTTTACTCGGTCCGCACGGGTAGGTGTAAGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCCCCGCAATAAGCTGCAGAGAAGGGCTAGAGAGTCTATGTTCCCTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGATGCCTCTGTAAACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTA [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0030   ">
+        <sample name="01_0030   ">
+          <datablock type="DNA">
+            AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGCCGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGGTGACCGTTAAGCGATGCTATACCGGCCATTTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGGGGGCTTTTGCAGCTAGGACTGCTTTACTCGGTCCGCACGGGTAGGTGTAAGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCCCCGCAATAAGCTGCAGAGAAGGGCTAGAGAGTCTATGTTCCCTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGATGCCTCTGTAAACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTA [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0034   ">
+        <sample name="01_0034   ">
+          <datablock type="DNA">
+            AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGCCGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGGTGACCGTTAAGCGATGCTATACCGGCCATTTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGGGGGCTTTTGCAGCTAGGACTGCTTTACTCGGTCCGCACGGGTAGGTGTAGGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCCCCGCAATAAGCTGCAGAGAAGGGCTAGAGAGTCTATGTTCCCTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGATGCCTCTGTAAACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTA [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0039   ">
+        <sample name="01_0039   ">
+          <datablock type="DNA">
+            AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGCCGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGGTGACCGTTAAGCGATGCTATACCGGCCATTTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGGGGGCTTTTGCAGCTAGGACTGCTTTACTCGGTCCGCACGGGTAGGTGTAGGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCCCCGCAATAAGCTGCAGAGAAGGGCTAGAGAGTCTATGTTCCCTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGATGCCTCTGTAAACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTA [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0028   ">
+        <sample name="01_0028   ">
+          <datablock type="DNA">
+            AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGCCGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGGTGACCGTTAAGCGATGCTATACCGGCCATGTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGGGGGCTTTTGCAGCTAGGACTGTTTTACTCGGTCCGCACGGGTAGGTGTAAGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCCCCGCAATAAGCTGCAGAGAAGGGCTAGAGAGTCTATGTTCCCTGTCGTCGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGATGCCTCTGTAAACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTA [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0025   ">
+        <sample name="01_0025   ">
+          <datablock type="DNA">
+            AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGCCGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGGTGACCGTTAAGCGATGCTATACCGGCCATGTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGGGGGCTTTTGCAGCTAGGACTGTTTTACTCGGTCCGCACGGGTAGGTGTAAGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCCCCGCAATAAGCTGCAGAGAAGGGCTAGAGAGTCTATGTTCCCTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGATGCCTCTGTAAACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTA [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0031   ">
+        <sample name="01_0031   ">
+          <datablock type="DNA">
+            AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGCCGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGGTGACCGTTAAGCGATGCTATACCGGCCATGTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGGGGGCTTTTGCAGCTAGGACTGTTTTACTCGGTCCGCACGGGTAGGTGTAAGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCCCCGCAATAAGCTGCAGAGAAGGGCTAGAGAGTCTATGTTCCCTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGATGCCTCTGTAAACTTAGATGGGACTCGACAGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTA [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0029   ">
+        <sample name="01_0029   ">
+          <datablock type="DNA">
+            AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGCCGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGATGACCGTTAAGCGATGCTATACCGGCCATTTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGGGGGCTTTTGCAGCTAGGACTGTTTTACTCGGTCCGCACGGGTAGGTGTAAGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCCCCGCAATAAGCTGCAGAGAAGGGCTAGAGAGTCTATGTTCCCTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGATGCCTCTGTAAACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTA [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0037   ">
+        <sample name="01_0037   ">
+          <datablock type="DNA">
+            AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGCCGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGATGACCGTTAAGCGATGCTATACCGGCCATTTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGGGGGCTTTTGCAGCTAGGACTGTTTTACTCGGTCCGCACGGGTAGGTGTAAGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCCCCGCAATAAGCTGCAGAGAAGGGCTAGAGAGTCTATGTTCCCTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGATGCCTCTGTAAACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTA [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0027   ">
+        <sample name="01_0027   ">
+          <datablock type="DNA">
+            AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGCCGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGATGACCGTTACGCGATGCTATACCGGCCATTTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGGGGGCTTTTGCAGCTAGGACTGTTTTACTCGGTCCGCACGGGTAGGTGTAAGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCCCCGCAATAAGCTGCAGAGAAGGGCTAGAGAGTCTATGTTCCCTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGATGCCTCTGTAAACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTA [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0032   ">
+        <sample name="01_0032   ">
+          <datablock type="DNA">
+            AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGCCGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGATGACCGTTACGCGATGCTATACCGGCCATTTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGGGGGCTTTTGCAGCTAGGACTGTTTTACTCGGTCCGCACGGGTAGGTGTAAGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCCCCGCAATAAGCTGCAGAGAAGGGCTAGAGAGTCTATGTTCCCTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGATGCCTCTGTAAACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTA [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0038   ">
+        <sample name="01_0038   ">
+          <datablock type="DNA">
+            AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGCCGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGATGACCGTTACGCGATGCTATACCGGCCATTTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGGGGGCTTTTGCAGCTAGGACTGTTTTACTCGGTCCGCACGGGTAGGTGTAAGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCCCCGCAATAAGCTGCAGAGAAGGGCTAGAGAGTCTATGTTCCCTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGATGCCTCTGTAAACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTA [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0020   ">
+        <sample name="01_0020   ">
+          <datablock type="DNA">
+            AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGCCGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGATGACCGTTACGCGATGCTATACCGGCCATTTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGGGGGCTTTTGCAGCTAGGACTGTTTTACTCGGTCCGCACGGGTAGGTGTAAGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCCCCGCAATAAGCTGCAGAGAAGGGCTAGAGAGTCTATGTTCCCTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGATGCCTCTGTAAACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTA [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0026   ">
+        <sample name="01_0026   ">
+          <datablock type="DNA">
+            AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGCCGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGATGACCGTTACGCGATGCTATACCGGCCATTTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGGGGGCTTTTGCAGCTAGGACTGTTTTACTCGGTCCGCACGGGTAGGTGTAAGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCCCCGCAATAAGCTGCAGAGAAGGGCTAGAGAGTCTATGTTCCCTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGATGCCTCTGTAAACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTA [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0036   ">
+        <sample name="01_0036   ">
+          <datablock type="DNA">
+            AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGCCGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGATGACCGTTAAGCGATGCTATACCGGCCATTTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGGGGGCTTTTGCAGCTAGGACTGTTTTACTCGGTCCGCACGGGTAGGTGTAAGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCCCCGCAATAAGCTGCAGAGAAGGGCTAGAGAGTCTATGTTCCCTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGATGCCTCTGTAAACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTA [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0021   ">
+        <sample name="01_0021   ">
+          <datablock type="DNA">
+            AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGCCGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGATGACCGTTAAGCGATGCTATACCGGCCATTTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGGGGGCTTTTGCAGCTAGGACTGTTTTACTCGGTCCGCACGGGTAGGTGTAAGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCCCCGCAATAAGCTGCAGAGAAGGGCTAGAGAGTCTATGTTCCTTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGATGCCTCTGTAAACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTA [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0023   ">
+        <sample name="01_0023   ">
+          <datablock type="DNA">
+            AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGCCGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGATGACCGTTAAGCGATGCTATACCGGCCATTTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGGGGGCTTTTGCAGCTAGGACTGTTTTACTCGGTCCGCACGGGTAGGTGTAAGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCCCCGCAATAAGCTGCAGAGAAGGGCTAGAGAGTCTATGTTCCCTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGATGCCTCTGTAAACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTA [...]
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    </region>
+  </data>
+</lamarc>
diff --git a/doc/testfiles/infile.multicat b/doc/testfiles/infile.multicat
new file mode 100644
index 0000000..9c5229b
--- /dev/null
+++ b/doc/testfiles/infile.multicat
@@ -0,0 +1,129 @@
+<lamarc version="2.1">
+<!-- Created by the Lamarc program -->
+  <chains>
+    <replicates>1</replicates>
+    <bayesian-analysis>No</bayesian-analysis>
+    <heating>
+      <adaptive>false</adaptive>
+      <temperatures> 1</temperatures>
+      <swap-interval>1</swap-interval>
+    </heating>
+    <strategy>
+      <resimulating>0.833333</resimulating>
+      <tree-size>0.166667</tree-size>
+      <haplotyping>0</haplotyping>
+      <trait-arranger>0</trait-arranger>
+    </strategy>
+    <initial>
+      <number>10</number>
+      <samples>500</samples>
+      <discard>1000</discard>
+      <interval>20</interval>
+    </initial>
+    <final>
+      <number>2</number>
+      <samples>10000</samples>
+      <discard>1000</discard>
+      <interval>20</interval>
+    </final>
+  </chains>
+  <format>
+    <seed>1005</seed>
+    <verbosity>verbose</verbosity>
+    <progress-reports>verbose</progress-reports>
+    <results-file>outfile.multicat</results-file>
+    <use-in-summary>false</use-in-summary>
+    <in-summary-file>insumfile.multicat</in-summary-file>
+    <use-out-summary>true</use-out-summary>
+    <out-summary-file>outsumfile.multicat</out-summary-file>
+    <use-curvefiles>true</use-curvefiles>
+    <curvefile-prefix>curvefile</curvefile-prefix>
+    <use-tracefile>true</use-tracefile>
+    <tracefile-prefix>tracefile</tracefile-prefix>
+    <use-newicktreefile>false</use-newicktreefile>
+    <newicktreefile-prefix>newick</newicktreefile-prefix>
+    <out-xml-file>menuinfile</out-xml-file>
+  </format>
+  <forces>
+    <coalescence>
+      <start-values> 0.01</start-values>
+      <method> USER</method>
+      <max-events>1000</max-events>
+      <profiles> percentile </profiles>
+      <constraints> unconstrained </constraints>
+      <prior type="log">
+        <paramindex> all </paramindex>
+        <lower> 1e-05 </lower>
+        <upper> 10 </upper>
+      </prior>
+    </coalescence>
+  </forces>
+  <data>
+    <region name="Finland">
+      <model name="GTR">
+        <normalize>false</normalize>
+        <categories>
+          <num-categories>2</num-categories>
+          <rates> 0.142652 1.85735</rates>
+          <probabilities> 0.5 0.5</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <relative-murate>1</relative-murate>
+        <base-freqs> 0.30884 0.31279 0.13133 0.24704</base-freqs>
+        <gtr-rates> 5.40506 147.777 4.27459 3.58012 96.3679 1</gtr-rates>
+      </model>
+      <effective-popsize>1</effective-popsize>
+      <spacing>
+        <block>
+          <map-position>0</map-position>
+          <length>68</length>
+          <offset>0</offset>
+        </block>
+      </spacing>
+    <population name="Population KFMH">
+      <individual name="germ_10   ">
+        <sample name="germ_10   ">
+          <datablock type="DNA">
+            TCATTTCCGGTGCAAACCGAATCTCAGCTTGATTAATCTGGATCACCCAGAAGAGCTCTTAAAACGCA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="germ_11   ">
+        <sample name="germ_11   ">
+          <datablock type="DNA">
+            AGATTTGGGGTGCAATGGGAATCTCTCGTTGATTATAGTGGATCAGGGAGAAGAGGACTTAAAACCGT
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="germ_20   ">
+        <sample name="germ_20   ">
+          <datablock type="DNA">
+            ACTTTTGCCGTGCATAGCCAATCTGACCATGATTTAACAGGATCTCGCTGAAGACCAGATAAAAGGGA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="germ_21   ">
+        <sample name="germ_21   ">
+          <datablock type="DNA">
+            ACAATTGCGCTGCTAAGCGTATCACACCTAGATAAAACTCGATGACGCACAAGTGCAGTAAAATCGGA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="germ_30   ">
+        <sample name="germ_30   ">
+          <datablock type="DNA">
+            ACATATGCGGAGGAAAGCGATTGTCACCTTCAATAAACTGCAACACGCAGTACAGCAGTTTATACGGA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="germ_31   ">
+        <sample name="germ_31   ">
+          <datablock type="DNA">
+            ACATTAGCGGTCCAAAGCGAAACTCACCTTGTTTAAACTGGTTCACGCAGATGAGCAGTTATAACGGA
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    </region>
+  </data>
+</lamarc>
diff --git a/doc/testfiles/infile.multilocus b/doc/testfiles/infile.multilocus
new file mode 100644
index 0000000..1f8525f
--- /dev/null
+++ b/doc/testfiles/infile.multilocus
@@ -0,0 +1,345 @@
+<lamarc version="2.1">
+<!-- Created by the Lamarc program -->
+  <chains>
+    <replicates>1</replicates>
+    <bayesian-analysis>No</bayesian-analysis>
+    <heating>
+      <adaptive>false</adaptive>
+      <temperatures> 1</temperatures>
+      <swap-interval>10</swap-interval>
+    </heating>
+    <strategy>
+      <resimulating>0.833333</resimulating>
+      <tree-size>0.166667</tree-size>
+      <haplotyping>0</haplotyping>
+      <trait-arranger>0</trait-arranger>
+    </strategy>
+    <initial>
+      <number>10</number>
+      <samples>500</samples>
+      <discard>1000</discard>
+      <interval>20</interval>
+    </initial>
+    <final>
+      <number>2</number>
+      <samples>10000</samples>
+      <discard>1000</discard>
+      <interval>20</interval>
+    </final>
+  </chains>
+  <format>
+    <seed>3001</seed>
+    <verbosity>normal</verbosity>
+    <progress-reports>normal</progress-reports>
+    <results-file>outfile</results-file>
+    <use-in-summary>false</use-in-summary>
+    <in-summary-file>insumfile</in-summary-file>
+    <use-out-summary>false</use-out-summary>
+    <out-summary-file>outsumfile</out-summary-file>
+    <use-curvefiles>true</use-curvefiles>
+    <curvefile-prefix>curvefile</curvefile-prefix>
+    <use-tracefile>true</use-tracefile>
+    <tracefile-prefix>tracefile</tracefile-prefix>
+    <use-newicktreefile>false</use-newicktreefile>
+    <newicktreefile-prefix>newick</newicktreefile-prefix>
+    <out-xml-file>menuinfile</out-xml-file>
+  </format>
+  <forces>
+    <coalescence>
+      <start-values> 0.01</start-values>
+      <method> PROGRAMDEFAULT</method>
+      <max-events>100000</max-events>
+      <profiles> percentile </profiles>
+      <constraints> unconstrained </constraints>
+      <prior type="log">
+        <paramindex> all </paramindex>
+        <lower> 1e-05 </lower>
+        <upper> 10 </upper>
+      </prior>
+    </coalescence>
+  </forces>
+  <data>
+    <region name="loci">
+      <model name="F84">
+        <normalize>false</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <relative-murate>1</relative-murate>
+        <base-freqs> 0.25 0.25 0.25 0.25</base-freqs>
+        <ttratio>2</ttratio>
+      </model>
+      <model name="F84">
+        <normalize>false</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <relative-murate>1</relative-murate>
+        <base-freqs> calculated </base-freqs>
+        <ttratio>2</ttratio>
+      </model>
+      <model name="F84">
+        <normalize>false</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <relative-murate>1</relative-murate>
+        <base-freqs> calculated </base-freqs>
+        <ttratio>2</ttratio>
+      </model>
+      <model name="F84">
+        <normalize>false</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <relative-murate>1</relative-murate>
+        <base-freqs> calculated </base-freqs>
+        <ttratio>2</ttratio>
+      </model>
+      <model name="F84">
+        <normalize>false</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <relative-murate>1</relative-murate>
+        <base-freqs> calculated </base-freqs>
+        <ttratio>2</ttratio>
+      </model>
+      <effective-popsize>1</effective-popsize>
+      <spacing>
+        <block>
+          <map-position>85</map-position>
+          <length>3</length>
+          <offset>0</offset>
+        </block>
+        <block>
+          <map-position>39</map-position>
+          <length>46</length>
+          <offset>0</offset>
+        </block>
+        <block>
+          <map-position>88</map-position>
+          <length>12</length>
+          <offset>0</offset>
+        </block>
+        <block>
+          <map-position>1</map-position>
+          <length>38</length>
+          <offset>0</offset>
+        </block>
+        <block>
+          <map-position>0</map-position>
+          <length>1</length>
+          <offset>0</offset>
+        </block>
+      </spacing>
+    <population name="seattle">
+      <individual name="00_0001   ">
+        <sample name="00_0001   ">
+          <datablock type="DNA">
+            GAC
+          </datablock>
+          <datablock type="DNA">
+            GATGGATTTTATTCGCTAGCCCATGTGTGATTAGAAGGGTAACCGA
+          </datablock>
+          <datablock type="DNA">
+            ATTCCACAGCTA
+          </datablock>
+          <datablock type="DNA">
+            AAGCCGTTTGGCCCAGGGCTAGGGGGTGCCAGGGTCAT
+          </datablock>
+          <datablock type="DNA">
+            C
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0006   ">
+        <sample name="00_0006   ">
+          <datablock type="DNA">
+            GAC
+          </datablock>
+          <datablock type="DNA">
+            GATGGATTTTATTCGCTAGCCCATGTGTGATTAGAAGGGTAACCGA
+          </datablock>
+          <datablock type="DNA">
+            ATTCCACAGCTA
+          </datablock>
+          <datablock type="DNA">
+            AAGCAATTTCGCCCAGGGCTAGGGGGTGCCGGGGTCAT
+          </datablock>
+          <datablock type="DNA">
+            C
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0007   ">
+        <sample name="00_0007   ">
+          <datablock type="DNA">
+            GAC
+          </datablock>
+          <datablock type="DNA">
+            GATGGATTTTATTCGCTAGCCCATGTGTGATTAGAAGGGTAACCGA
+          </datablock>
+          <datablock type="DNA">
+            ATTCCACAGCTA
+          </datablock>
+          <datablock type="DNA">
+            AAACAGTTTCGCCCAGGGCTAGGGGGTGCCAGGGTCAT
+          </datablock>
+          <datablock type="DNA">
+            C
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0002   ">
+        <sample name="00_0002   ">
+          <datablock type="DNA">
+            GAC
+          </datablock>
+          <datablock type="DNA">
+            GATGGATTTTATTCGCTAGCCCATGTGTGATTAGAAGGGTAACCGA
+          </datablock>
+          <datablock type="DNA">
+            ATGCCACAGCTA
+          </datablock>
+          <datablock type="DNA">
+            AAGCAGTTTCGCCCAGGGCTAGGGGGTGCCAGGGTCAT
+          </datablock>
+          <datablock type="DNA">
+            C
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0008   ">
+        <sample name="00_0008   ">
+          <datablock type="DNA">
+            GAC
+          </datablock>
+          <datablock type="DNA">
+            GATGGATTTTATTCGCTAGCCCATGTGTGATTAGAAGGGTAACCGA
+          </datablock>
+          <datablock type="DNA">
+            ATGCCACAGCTA
+          </datablock>
+          <datablock type="DNA">
+            AAGCAGTTTCGCCCAGGGCTAGGGGGTGCCAGGGTCAT
+          </datablock>
+          <datablock type="DNA">
+            C
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0003   ">
+        <sample name="00_0003   ">
+          <datablock type="DNA">
+            GAC
+          </datablock>
+          <datablock type="DNA">
+            GATGGATTTTATTCGCTAGCCCATGTGTGCTTAGAAGGGTAACCGA
+          </datablock>
+          <datablock type="DNA">
+            ATGCCACAGCTA
+          </datablock>
+          <datablock type="DNA">
+            AAGCAGTTTCGCCCAGGGCTAGGGGGTGCCAGGGTCAT
+          </datablock>
+          <datablock type="DNA">
+            C
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0009   ">
+        <sample name="00_0009   ">
+          <datablock type="DNA">
+            GAC
+          </datablock>
+          <datablock type="DNA">
+            GATGGATTTTATTCGCTAGCCCATGTGTGATTAGAAGGGTAACCGG
+          </datablock>
+          <datablock type="DNA">
+            ATGCCACAGCTA
+          </datablock>
+          <datablock type="DNA">
+            AAGCAGTTTCGCCCAGGGCTAGGGGGTGCCAGGGTCAT
+          </datablock>
+          <datablock type="DNA">
+            C
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0005   ">
+        <sample name="00_0005   ">
+          <datablock type="DNA">
+            GGC
+          </datablock>
+          <datablock type="DNA">
+            GATGGATTTTATTCGCTAGCCCATGTGTGATTAGAAGGGTAACCGG
+          </datablock>
+          <datablock type="DNA">
+            ATGCCACAACTA
+          </datablock>
+          <datablock type="DNA">
+            AAGCAGTTTCGCCCAGGGCTAGGGGGTGCCAGGGTCAT
+          </datablock>
+          <datablock type="DNA">
+            C
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0000   ">
+        <sample name="00_0000   ">
+          <datablock type="DNA">
+            GAC
+          </datablock>
+          <datablock type="DNA">
+            GATGGATTTTATTCGCTTGCCCATGTGTGATTAGAAGGGTAACCGA
+          </datablock>
+          <datablock type="DNA">
+            ATGCCACAACTA
+          </datablock>
+          <datablock type="DNA">
+            AAGCAGTTTCGCCCAGGGCTAGGGGGTGCCAGGGTCAT
+          </datablock>
+          <datablock type="DNA">
+            C
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0004   ">
+        <sample name="00_0004   ">
+          <datablock type="DNA">
+            GAC
+          </datablock>
+          <datablock type="DNA">
+            GGTGGATTTTATTCGCTAGCCCACGTGTGATTAGAAGGGTAACCGA
+          </datablock>
+          <datablock type="DNA">
+            ATGCCACAACTA
+          </datablock>
+          <datablock type="DNA">
+            AAGCAGTTTCGCCCAGGGCTAGGGGGTGCCAGGGTCAT
+          </datablock>
+          <datablock type="DNA">
+            C
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    </region>
+  </data>
+</lamarc>
diff --git a/doc/testfiles/infile.quick b/doc/testfiles/infile.quick
new file mode 100644
index 0000000..8794331
--- /dev/null
+++ b/doc/testfiles/infile.quick
@@ -0,0 +1,129 @@
+<lamarc version="2.1">
+<!-- Created by the Lamarc program -->
+  <chains>
+    <replicates>1</replicates>
+    <bayesian-analysis>No</bayesian-analysis>
+    <heating>
+      <adaptive>false</adaptive>
+      <temperatures> 1</temperatures>
+      <swap-interval>1</swap-interval>
+    </heating>
+    <strategy>
+      <resimulating>0.833333</resimulating>
+      <tree-size>0.166667</tree-size>
+      <haplotyping>0</haplotyping>
+      <trait-arranger>0</trait-arranger>
+    </strategy>
+    <initial>
+      <number>3</number>
+      <samples>200</samples>
+      <discard>1000</discard>
+      <interval>20</interval>
+    </initial>
+    <final>
+      <number>1</number>
+      <samples>500</samples>
+      <discard>1000</discard>
+      <interval>20</interval>
+    </final>
+  </chains>
+  <format>
+    <seed>1005</seed>
+    <verbosity>verbose</verbosity>
+    <progress-reports>verbose</progress-reports>
+    <results-file>outfile.quick</results-file>
+    <use-in-summary>false</use-in-summary>
+    <in-summary-file>insumfile.quick</in-summary-file>
+    <use-out-summary>false</use-out-summary>
+    <out-summary-file>outsumfile.quick</out-summary-file>
+    <use-curvefiles>true</use-curvefiles>
+    <curvefile-prefix>curvefile</curvefile-prefix>
+    <use-tracefile>true</use-tracefile>
+    <tracefile-prefix>tracefile</tracefile-prefix>
+    <use-newicktreefile>false</use-newicktreefile>
+    <newicktreefile-prefix>newick</newicktreefile-prefix>
+    <out-xml-file>menuinfile</out-xml-file>
+  </format>
+  <forces>
+    <coalescence>
+      <start-values> 0.01</start-values>
+      <method> USER</method>
+      <max-events>1000</max-events>
+      <profiles> percentile </profiles>
+      <constraints> unconstrained </constraints>
+      <prior type="log">
+        <paramindex> all </paramindex>
+        <lower> 1e-05 </lower>
+        <upper> 10 </upper>
+      </prior>
+    </coalescence>
+  </forces>
+  <data>
+    <region name="region 1">
+      <model name="F84">
+        <normalize>false</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <relative-murate>1</relative-murate>
+        <base-freqs> calculated </base-freqs>
+        <ttratio>2</ttratio>
+      </model>
+      <effective-popsize>1</effective-popsize>
+      <spacing>
+        <block>
+          <map-position>0</map-position>
+          <length>68</length>
+          <offset>0</offset>
+        </block>
+      </spacing>
+    <population name="Population JYRM">
+      <individual name="germ_10   ">
+        <sample name="germ_10   ">
+          <datablock type="DNA">
+            TCATTTCCGGTGCAAACCGAATCTCAGCTTGATTAATCTGGATCACCCAGAAGAGCTCTTAAAACGCA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="germ_11   ">
+        <sample name="germ_11   ">
+          <datablock type="DNA">
+            AGATTTGGGGTGCAATGGGAATCTCTCGTTGATTATAGTGGATCAGGGAGAAGAGGACTTAAAACCGT
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="germ_20   ">
+        <sample name="germ_20   ">
+          <datablock type="DNA">
+            ACTTTTGCCGTGCATAGCCAATCTGACCATGATTTAACAGGATCTCGCTGAAGACCAGATAAAAGGGA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="germ_21   ">
+        <sample name="germ_21   ">
+          <datablock type="DNA">
+            ACAATTGCGCTGCTAAGCGTATCACACCTAGATAAAACTCGATGACGCACAAGTGCAGTAAAATCGGA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="germ_30   ">
+        <sample name="germ_30   ">
+          <datablock type="DNA">
+            ACATATGCGGAGGAAAGCGATTGTCACCTTCAATAAACTGCAACACGCAGTACAGCAGTTTATACGGA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="germ_31   ">
+        <sample name="germ_31   ">
+          <datablock type="DNA">
+            ACATTAGCGGTCCAAAGCGAAACTCACCTTGTTTAAACTGGTTCACGCAGATGAGCAGTTATAACGGA
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    </region>
+  </data>
+</lamarc>
diff --git a/doc/testfiles/infile.regheat b/doc/testfiles/infile.regheat
new file mode 100644
index 0000000..66e74f1
--- /dev/null
+++ b/doc/testfiles/infile.regheat
@@ -0,0 +1,195 @@
+<lamarc version="2.1">
+<!-- Created by the Lamarc program -->
+  <chains>
+    <replicates>1</replicates>
+    <bayesian-analysis>No</bayesian-analysis>
+    <heating>
+      <adaptive>true</adaptive>
+      <temperatures> 1 2 3 4</temperatures>
+      <swap-interval>1</swap-interval>
+    </heating>
+    <strategy>
+      <resimulating>0.833333</resimulating>
+      <tree-size>0.166667</tree-size>
+      <haplotyping>0</haplotyping>
+      <trait-arranger>0</trait-arranger>
+    </strategy>
+    <initial>
+      <number>10</number>
+      <samples>500</samples>
+      <discard>1000</discard>
+      <interval>20</interval>
+    </initial>
+    <final>
+      <number>2</number>
+      <samples>10000</samples>
+      <discard>1000</discard>
+      <interval>20</interval>
+    </final>
+  </chains>
+  <format>
+    <seed>1005</seed>
+    <verbosity>verbose</verbosity>
+    <progress-reports>verbose</progress-reports>
+    <results-file>outfile.regheat</results-file>
+    <use-in-summary>false</use-in-summary>
+    <in-summary-file>insumfile.regheat</in-summary-file>
+    <use-out-summary>true</use-out-summary>
+    <out-summary-file>outsumfile.regheat</out-summary-file>
+    <use-curvefiles>true</use-curvefiles>
+    <curvefile-prefix>curvefile</curvefile-prefix>
+    <use-tracefile>true</use-tracefile>
+    <tracefile-prefix>tracefile</tracefile-prefix>
+    <use-newicktreefile>false</use-newicktreefile>
+    <newicktreefile-prefix>newick</newicktreefile-prefix>
+    <out-xml-file>menuinfile</out-xml-file>
+  </format>
+  <forces>
+    <coalescence>
+      <start-values> 0.01</start-values>
+      <method> USER</method>
+      <max-events>1000</max-events>
+      <profiles> percentile </profiles>
+      <constraints> unconstrained </constraints>
+      <prior type="log">
+        <paramindex> all </paramindex>
+        <lower> 1e-05 </lower>
+        <upper> 10 </upper>
+      </prior>
+    </coalescence>
+  </forces>
+  <data>
+    <region name="multi">
+      <model name="F84">
+        <normalize>false</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <relative-murate>1</relative-murate>
+        <base-freqs> calculated </base-freqs>
+        <ttratio>2</ttratio>
+      </model>
+      <effective-popsize>1</effective-popsize>
+      <spacing>
+        <block>
+          <map-position>0</map-position>
+          <length>68</length>
+          <offset>0</offset>
+        </block>
+      </spacing>
+    <population name="finland">
+      <individual name="germ_10   ">
+        <sample name="germ_10   ">
+          <datablock type="DNA">
+            TCATTTCCGGTGCAAACCGAATCTCAGCTTGATTAATCTGGATCACCCAGAAGAGCTCTTAAAACGCA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="germ_11   ">
+        <sample name="germ_11   ">
+          <datablock type="DNA">
+            AGATTTGGGGTGCAATGGGAATCTCTCGTTGATTATAGTGGATCAGGGAGAAGAGGACTTAAAACCGT
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="germ_20   ">
+        <sample name="germ_20   ">
+          <datablock type="DNA">
+            ACTTTTGCCGTGCATAGCCAATCTGACCATGATTTAACAGGATCTCGCTGAAGACCAGATAAAAGGGA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="germ_21   ">
+        <sample name="germ_21   ">
+          <datablock type="DNA">
+            ACAATTGCGCTGCTAAGCGTATCACACCTAGATAAAACTCGATGACGCACAAGTGCAGTAAAATCGGA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="germ_30   ">
+        <sample name="germ_30   ">
+          <datablock type="DNA">
+            ACATATGCGGAGGAAAGCGATTGTCACCTTCAATAAACTGCAACACGCAGTACAGCAGTTTATACGGA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="germ_31   ">
+        <sample name="germ_31   ">
+          <datablock type="DNA">
+            ACATTAGCGGTCCAAAGCGAAACTCACCTTGTTTAAACTGGTTCACGCAGATGAGCAGTTATAACGGA
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    </region>
+    <region name="multi2">
+      <model name="F84">
+        <normalize>false</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <relative-murate>1</relative-murate>
+        <base-freqs> calculated </base-freqs>
+        <ttratio>2</ttratio>
+      </model>
+      <effective-popsize>1</effective-popsize>
+      <spacing>
+        <block>
+          <map-position>0</map-position>
+          <length>68</length>
+          <offset>0</offset>
+        </block>
+      </spacing>
+    <population name="finland">
+      <individual name="germ_10   ">
+        <sample name="germ_10   ">
+          <datablock type="DNA">
+            TCATTTCCGGTGCAAACCGAATCTCAGCTTGATTAATCTGGATCACCCAGAAGAGCTCTTAAAACGCA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="germ_11   ">
+        <sample name="germ_11   ">
+          <datablock type="DNA">
+            AGATTTGGGGTGCAATGGGAATCTCTCGTTGATTATAGTGGATCAGGGAGAAGAGGACTTAAAACCGT
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="germ_20   ">
+        <sample name="germ_20   ">
+          <datablock type="DNA">
+            ACTTTTGCCGTGCATAGCCAATCTGACCATGATTTAACAGGATCTCGCTGAAGACCAGATAAAAGGGA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="germ_21   ">
+        <sample name="germ_21   ">
+          <datablock type="DNA">
+            ACAATTGCGCTGCTAAGCGTATCACACCTAGATAAAACTCGATGACGCACAAGTGCAGTAAAATCGGA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="germ_30   ">
+        <sample name="germ_30   ">
+          <datablock type="DNA">
+            ACATATGCGGAGGAAAGCGATTGTCACCTTCAATAAACTGCAACACGCAGTACAGCAGTTTATACGGA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="germ_31   ">
+        <sample name="germ_31   ">
+          <datablock type="DNA">
+            ACATTAGCGGTCCAAAGCGAAACTCACCTTGTTTAAACTGGTTCACGCAGATGAGCAGTTATAACGGA
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    </region>
+  </data>
+</lamarc>
diff --git a/doc/testfiles/infile_gamma1.xml b/doc/testfiles/infile_gamma1.xml
new file mode 100644
index 0000000..9d77c9b
--- /dev/null
+++ b/doc/testfiles/infile_gamma1.xml
@@ -0,0 +1,2417 @@
+<lamarc version="2.1">
+<!-- Created by the Lamarc program -->
+  <forces>
+    <coalescence>
+      <start-values> 0.01 </start-values>
+      <method> WATTERSON</method>
+      <max-events>1000</max-events>
+      <profiles> percentile </profiles>
+      <constraints> unconstrained </constraints>
+    </coalescence>
+    <gamma-over-regions>
+      <start-values> 1 </start-values>
+      <profiles> percentile </profiles>
+      <constraints> unconstrained </constraints>
+    </gamma-over-regions>
+  </forces>
+  <chains>
+    <replicates>1</replicates>
+    <bayesian-analysis>No</bayesian-analysis>
+    <heating>
+      <adaptive>false</adaptive>
+      <temperatures> 1</temperatures>
+      <swap-interval>1</swap-interval>
+    </heating>
+    <strategy>
+      <resimulating>0.833333</resimulating>
+      <tree-size>0.166667</tree-size>
+    </strategy>
+    <initial>
+      <number>10</number>
+      <samples>1000</samples>
+      <discard>1000</discard>
+      <interval>20</interval>
+    </initial>
+    <final>
+      <number>2</number>
+      <samples>5000</samples>
+      <discard>1000</discard>
+      <interval>20</interval>
+    </final>
+  </chains>
+  <format>
+    <seed>1005</seed>
+    <verbosity>verbose</verbosity>
+    <progress-reports>verbose</progress-reports>
+    <results-file>outfile.1Pop15RegThetaIsOneHundredthAlphaIs1</results-file>
+    <use-in-summary>false</use-in-summary>
+    <in-summary-file>outsumfile.1Pop15RegThetaIsOneHundredthAlphaIs1</in-summary-file>
+    <use-out-summary>true</use-out-summary>
+    <out-summary-file>outsumfile.1Pop15RegThetaIsOneHundredthAlphaIs1</out-summary-file>
+    <use-curvefiles>false</use-curvefiles>
+    <curvefile-prefix>curvefile</curvefile-prefix>
+    <use-tracefile>true</use-tracefile>
+    <tracefile-prefix>tracefileAlpha1</tracefile-prefix>
+    <use-newicktreefile>false</use-newicktreefile>
+    <newicktreefile-prefix>newick</newicktreefile-prefix>
+    <out-xml-file>menuinfile_1Pop15RegThetaIsOneHundredthAlphaIs1.xml</out-xml-file>
+  </format>
+<data>
+    <region name="LinkageGroup01">
+      <model name="F84">
+        <normalize>false</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <relative-murate>1</relative-murate>
+        <base-freqs> calculated </base-freqs>
+        <ttratio>2</ttratio>
+      </model>
+      <effective-popsize>1</effective-popsize>
+		<population name="PopAlphaIs1">
+			<individual name="00_0008   ">
+				<sample name="00_0008   ">
+					<datablock type="DNA">
+						GTGGCAAACCACCTCCCCGGAAAGCTCTGAAATTCTCCTTTCTTGTGCGGCTCATCATCATTGAGTCTACCAGGCCCCAACCGGTACCCCACTTGGTCTCTCCTCTCTCCGACTCTAAGACATTTTTGCTCACTTCTTACAAAGAGTATCTGTACGATAGATTGCGATGTAGTTTATCAGACGGTGGTATTCATGTCCCGTAATCTGCAGCTTGGTGGAAAGCCATTTGCCACGGCGTATCTAGAAAAACTTTGTTCCGGGCGAACACTTTGCATCGCCGGTCACCAATCTTTGGACTGAACCTAAACGTGCGGTGCATTATATTCATGCTCAGTACATTGGTAGACATACAATGCGCGCACCTTAGTGGACCGAAAGCGCACGCTACTTAGACACAGTTGGGTCAGAGCTTGTTCGAGCGGGAGCAATGGCTCCAAGCTACACTACCGAATTGCGCCGGGTGCGTTCTGATCCATTGAATAGATG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0007   ">
+				<sample name="00_0007   ">
+					<datablock type="DNA">
+						GTGGCAAACCACCTCCCCGGAAAGCTCTGAAATTCTCCTTTCTTGTGCGGCTCATCATCATTGAGTCCACCAGGCCTAAACCGGTACCCCACTTGGTCTCTCCGCTCTCCGACTCGAAGACATTTTTGCTCACTTCTTACAAAGAATATCTGTACGATAGATTGCGATGTAGTTTATCAGACGGTGGTATTCATGTCCCGTAATCTGCAGCTTGGTGGAAAGCCATTTGCCACGGCCTATCTAGAAAAACTTTGTTCCGGGCGAACACTTTGCATCGCCGGTCACCAATCTTTGGACTGAACCTAAACGTGCGGTGCATTATATTCATGCTCAGTACATTGGTAGACATACAATGCGCGCACCTTAGTGGACCGAAAGCGCACGCTACTTAGACACAGTTGGGTCAGAGCTTGTTCGAGCTGGAGCAATGGCTCCAAGCTACACTACCGAATTGCGCCGGGTGCGTTCTGATCCGTTGAATAGATG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0012   ">
+				<sample name="00_0012   ">
+					<datablock type="DNA">
+						GTGGCAAACCACCTCCCCGGAAAGCTCTGAAATTCTCCTTTCTTGTGCGGCTCATCATCATTGAGTCCACCAGGCCTAAACCGGTACCCCACTTGGTCTCTCCGCTCTCCGACTCGAAGACATTTTTGCTCACTTCTTACAAAGAATATCTGTACGATAGATTGCGATGTAGTTTATCAGACGGTGGTATTCATGTCCCGTAATCTGCAGCTTGGTGGAAAGCCATTTGCCACGGCCTATCTAGAAAAACTTTGTTCCGGGCGAACACTTTGCATCGCCGGTCACCAATCTTTGGACTGAACCTAAACGTGCGGTGCATTATATTCATGCTCAGTACATTGGTAGACATACAATGCGCGCACCTTAGTGGACCGAAAGCGCACGCTACTTAGACACAGTTGGGTCAGAGCTTGTTCGAGCTGGAGCAATGGCTCCAAGCTACACTACCGAATTGCGCCGGGTGCGTTCTGATCCGTTGAATAGATG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0005   ">
+				<sample name="00_0005   ">
+					<datablock type="DNA">
+						GTGGCAAACCACCTCCCCGGAAAGCTCTGAAATTCTCCTTTCTTGTGCGGCTCATCATCATTGAGTCCACCAGGCCTCAACCGGTACCCCACTTGGTCTCTCCGCTCTCCGACTCGAAGACATTTTTGCTCACTTCTTACAAAGAATATCTGTACGATAGATTGCGATGTAGTTTATCAGACGGTGGTATTCATGTCCTGTAATCTGCAGCTTGGTGGAAAGCCATTTGCCACGGCCTATCTAGAAAAACTTTGTTCCGGGCGAACACTTTGCATCGCCGGTCACCAATCTTTGGACTGAACCTAAACGTGCGGTGCATTATATTCATGCTCAGTACATTGGTAGACATACAATGCGCGCACCTTAGTGGACCGAAAGCGCACGCTACTTAGACACAGTTGGGTCAGAGCTTGTTCGAGATGGAGCAATGGCTCCAAGCTACACTACCGAATTGCGCCGGGTGCGTTCTGATCCATTGAATAGATG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0017   ">
+				<sample name="00_0017   ">
+					<datablock type="DNA">
+						GTGGCAAACCACCTCCCCGGAAAGCTCTGAAATTCTCCTTTCTTGTGCGGCTCATCATCATTGAGTCCACCAGGCCTCAACCGGTACCCCACTTGGTCTCTCCGCTCTCCGACTCGAAGACATTTTTGCTCACTTCTTACAAAGAATATCTGTACGATAGATTGCGATGTAGTTTATCAGACGGTGGTATTCATGTCCTGTAATCTGCAGCTTGGTGGAAAGCCATTTGCCACGGCCTATCTAGAAAAACTTTGTTCCGGGCGAACACTTTGCATCGCCGGTCACCAATCTTTGGACTGAACCTAAACGTGCGGTGCATTATATTCATGCTCAGTACATTGGTAGACATACAATGCGCGCACCTTAGTGGACCGAAAGCGCACGCTACTTAGACACAGTTGGGTCAGAGCTTGTTCGAGATGGAGCAATGGCTCCAAGCTACACTACCGAATTGCGCCGGGTGCGTTCTGATCCATTGAATAGATG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0011   ">
+				<sample name="00_0011   ">
+					<datablock type="DNA">
+						GTGGCAAACCACCTCCCCGGAAAGCTCTGAAATTCTCCTTTCTTGTGCGGCTCATCATCATTGAGTCCACCAGGCCTCAACCGGTACCCCACTTGGTCTCTCCGCTCTTCGACTCGAAGACATTTTTGCTCACTTCTTACAAAGAATATCTGTACGATAGATTGCGATGTAGTTTATCAGACGGTGGTATTCATGTCCCGTAATCTGCAGCTTGGTGGAAAGCCATTTGCCACGGCCCATCTAGAAAAACTTTGTTCCGGGCGAACACTTTGCATCGCCGGTCACCAATCTTTGGACTGAACCTAAACGTGCGGTGCATTATATTCATGCTCAGTACATTGGTAGACATACAATGCGCGCACCTTAGTGGACCGAAAGCGCACGCTACTTAGACACAGTTGGGTCAGAGCTTGTTCGAGCTGGAGCAATGGCTCCAAGCTACACTACCGAATTGCGCCGGGTGCGTTCTGATCCATTGAATAGATG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0018   ">
+				<sample name="00_0018   ">
+					<datablock type="DNA">
+						GTGGCAAGCCACCTCCCCGGAAAGCTCTGAAATTCTCCTTTCTTGTGCGGCTCATCATCATTGAGTCCACCAGGCCTCAACCGGTACCCCTCTTGGTCTCTCCGCTCTCCGACTCGAAGACATTTTTGCTCACTTCTTACAAAGAATATCTGTACGATAGATTGCGATGTAGTTTATCAGACGGTGGTATTCATGTCCCGTAATCTGCAGCTTGGTGGAAAGCCATTTGCCACGGCCTATCTAGAAAAACTTTGTTCCGGGCGAACACTTTGCATCGCCGGTCACCAATCTTTGGACTGAACCTAAACGTGCGGTGCATTATATTCATGCTCAGTACATTGGTAGACATACAATGCGCGCACCTTAGTGGACCGAAAGCGCACGCTACTTAGACACAGTTGGGTCAGAGCTTGTTCGAGCTGGAGCAATGGCTCCAAGCTACACTACCGAATTGCGCCGGGTGCGTTCTGATCCATTGAATAGATG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0004   ">
+				<sample name="00_0004   ">
+					<datablock type="DNA">
+						GTGGCAAACCACCTCCCCGGAAAGCTCTGAAATTCTCCTTTCTTGTGCGGCTCATCATCATTGAGTCCACCAGGCCTCAACCGGTACCCCTCTTGGTCTCTCCGCTCTCCGGCTCGAAGACATTTTTGCTCACTTCTTACAAAGAATATCTGTACGATAGATTGCGATGTAGTTTATCAGACGGTGGTATTCATGTCCCGTAATCTGCAGCTTGGTGGAAAGCCATTTGCCACGGCCTATCTAGAAAAACTTTGTTCCGGGCGAACACTTTGCATCGCCGGTCACCAATCTTTGGACTGAACCTAAACGTGCGGTGCATTATATTCATGCTCAGTACATTGGTAGACATACAATGCGCGCACCTTAGTGGACCGAAAGCGCACGCTACTTAGACACAGTTGGGTCAGAGCTTGTTCGAGCTGGAGCAATGGCTCCAAGCTACACTACCGAATTGCGCCGGGTGCGTTCTGATCCATTGAATAGATG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0000   ">
+				<sample name="00_0000   ">
+					<datablock type="DNA">
+						GTGGCAAACCACCTCCCCGGAAAGCTCTGAAATTCTCCTTTCTTGTGCGGCTCATCATCATTGAGTCCACCAGGCCTCAACCGGTACCCCTCTTGGTCTCTCCGCTCTCCGACTCGAAGACATTTTTGCTCACTTCTTACAAAGAATATCTGTACGATAGATTGCGATGTAGTTTATCAGACGGTGGTATTCATGTCCCGTAATCTGCAGCTTGGTGGAAAGCCATTTGCCACGGCCTATCTAGAAAAACTTTGTTCCGGGCGAACACTTTGCATCGCCGGTCACCAATCTTTGGACTGAACCTAAACGTGCGGTGCATTATATTCATGCTCAGTACATTGGTAGACATACAATGCGCGCACCTTAGTGGACCGAAAGCGCACGCTACTTAGACACAGTTGGGTCAGAGCTTGTTCGAGCTGGAGCAATGGCTCCAAGCTACACTACCGAATTGCGCCGGGTGCGTTCTGATCCATTGAATAGATG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0001   ">
+				<sample name="00_0001   ">
+					<datablock type="DNA">
+						GTGGCAAACCACCTCCCCGGAAAGCTCTGAAATTCTCCTTTCTTGTGCGGCTCATCATCATTGAGTCCACCAGGCCTCAACCGGTACCCCTCTTGGTCTCTCCGCTCTCCGACTCGAAGACATTTTTGCTCACTTCTTACAAAGAATATCTGTACGATAGATTGCGATGTAGTTTATCAGACGGTGGTATTCATGTCCCGTAATCTGCAGCTTGGTGGAAAGCCATTTGCCACGGCCTATCTAGAAAAACTTTGTTCCGGGCGAACACTTTGCATCGCCGGTCACCAATCTTTGGACTGAACCTAAACGTGCGGTGCATTATATTCATGCTCAGTACATTGGTAGACATACAATGCGCGCACCTTAGTGGACCGAAAGCGCACGCTACTTAGACACAGTTGGGTCAGAGCTTGTTCGAGCTGGAGCAATGGCTCCAAGCTACACTACCGAATTGCGCCGGGTGCGTTCTGATCCATTGAATAGATG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0013   ">
+				<sample name="00_0013   ">
+					<datablock type="DNA">
+						GTGGCAAACCACCTCCCCGGAAAGCTCTGAAATTCTCCTTTCTTGTGCGGCTCATCATCATTGAGTCCACCAGGCCTCAACCGGTACCCCTCTTGGTCTCTCCGCTCTCCGACTCGAAGACATTTTTGCTCACTTCTTACAAAGAATATCTGTACGATAGATTGCGATGTAGTTTATCAGACGGTGGTATTCATGTCCCGTAATCTGCAGCTTGGTGGAAAGCCATTTGCCACGGCCTATCTAGAAAAACTTTGTTCCGGGCGAACACTTTGCATCGCCGGTCACCAATCTTTGGACTGAACCTAAACGTGCGGTGCATTATATTCATGCTCAGTACATTGGTAGACATACAATGCGCGCACCTTAGTGGACCGAAAGCGCACGCTACTTAGACACAGTTGGGTCAGAGCTTGTTCGAGCTGGAGCAATGGCTCCAAGCTACACTACCGAATTGCGCCGGGTGCGTTCTGATCCATTGAATAGATG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0003   ">
+				<sample name="00_0003   ">
+					<datablock type="DNA">
+						GTGGCAAACCACCTCCCCGGAAAGCTCTGAAATTCTCCTTTCTTGTGCGGCTCATCATCATTGAGTCCACCAGGCCTCAACCGGTACCCCTCTTGGTCTCTCCGCTCTCCGACTCGAAGACATTTTTGCTCACTTCTTACAAAGAATATCTGTACGATAGATTGCGATGTAGTTTATCAGACGGTGGTATTCATGTCCCGTAATCTGCAGCTTGGTGGAAAGCCATTTGCCACGGCCTATCTAGAAAAACTTTGTTCCGGGCGAACACTTTGCATCGCCGGTCACCAATCTTTGGACTGAACCTAAACGTGCGGTGCATTATATTCATGCTCAGTACATTGGTAGACATACAATGCGCGCACCTTAGTGGACCGAAAGCGCACGCTACTTAGACACAGTTGGGTCAGAGCTTGTTCGAGCTGGAGCAATGGCTCCAAGCTACACTACCGAATTGCGCCGGGTGCGTTCTGATCCATTGAATAGATG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0010   ">
+				<sample name="00_0010   ">
+					<datablock type="DNA">
+						GTGGCAAACCACCTCCCCGGAAAGCTCTGAAATTCTCCTTTCTTGTGCGGCTCATCATCATTGAGTCCACCAGGCCTCAACCGGTACCCCTCTTGGTCTCTCCGCTCTCCGACTCGAAGACATTTTTGCTCACTTCTTACAAAGAATATCTGTACGATAGATTGCGATGTAGTTTATCAGACGGTGGTATTCATGTCCCGTAATCTGCAGCTTGGTGGAAAGCCATTTGCCACGGCCTATCTAGAAAAACTTTGTTCCGGGCGAACACTTTGCATCGCCGGTCACCAATCTTTGGACTGAACCTAAACGTGCGGTGCATTATATTCATGCTCAGTACATTGGTAGACATACAATGCGCGCACCTTAGTGGACCGAAAGCGCACGCTACTTAGACACAGTTGGGTCAGAGCTTGTTCGAGCTGGAGCAATGGCTCCAAGCTACACTACCGAATTGCGCCGGGTGCGTTCTGATCCATTGAATAGATG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0006   ">
+				<sample name="00_0006   ">
+					<datablock type="DNA">
+						GTGGCAAACCACCTCCCCGGAAAGCTCTGAAATTCTCCTTTCTTGTGCGGCTCATCATCATTGAGTCCACCAGGCCTCAACCGGTACCCCTCTTGGTCTCTCCGCTCTCCGACTCGAAGACATTTTTGCTCACTTCTTACAAAGAATATCTGTACGATAGATTGCGATGTAGTTTATCAGACGGTGGTATTCATGTCCCGTAATCTGCAGCTTGGTGGAAAGCCATTTGCCACGGCCTATCTAGAAAAACTTTGTTCCGGGCGAACACTTTGCATCGCCGGTCACCAATCTTTGGACTGAACCTAAACGTGCGGTGCATTATATTCATGCTCAGTACATTGGTAGACATACAATGCGCGCACCTTAGTGGACCGAAAGCGCACGCTACTTAGACACAGTTGGGTCAGAGCTTGTTCGAGCTGGAGCAATGGCTCCAAGCTACACTACCGAATTGCGCCGGGTGCGTTCTGATCCATTGAATAGATG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0014   ">
+				<sample name="00_0014   ">
+					<datablock type="DNA">
+						GTGGCAAACCACCTCCCCGGAAAGCTCTGAAATTCTCCTTTCTTGTGCGGCTCATCATCATTGAGTCCACCAGGCCTCAACCGGTACCCCTCTTGGTCTCTCCGCTCTCCGACTCGAAGACATTTTTGCTCACTTCTTACAAAGAATATCTGTACGATAGATTGCGATGTAGTTTATCAGACGGTGGTATTCATGTCCCGTAATCTGCAGCTTGGTGGAAAGCCATTTGCCACGGCCTATCTAGAAAAACTTTGTTCCGGGCGAACACTTTGCATCGCCGGTCACCAATCTTTGGACTGAACCTAAACGTGCGGTGCATTATATTCATGCTCAGTACATTGGTAGACATACAATGCGCGCACCTTAGTGGACCGAAAGCGCACGCTACTTAGACACAGTTGGGTCAGAGCTTGTTCGAGCTGGAGCAATGGCTCCAAGCTACACTACCGAATTGCGCCGGGTGCGTTCTGATCCATTGAATAGATG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0019   ">
+				<sample name="00_0019   ">
+					<datablock type="DNA">
+						GTGGCAAACCACCTCCCCGGAAAGCTCTGAAATTCTCCTTTCTTGTGCGGCTCATCATCATTGAGTCCACCAGGCCTCAACCGGTACCCCTCTTGGTCTCTCCGCTCTCCGACTCGAAGACATTTTTGCTCACTTCTTACAAAGAATATCTGTACGATAGATTGCGATGTAGTTTATCAGACGGTGGTATTCATGTCCCGTAATCTGCAGCTTGGTGGAAAGCCATTTGCCACGGCCTATCTAGAAAAACTTTGTTCCGGGCGAACACTTTGCATCGCCGGTCACCAATCTTTGGACTGAACCTAAACGTGCGGTGCATTATATTCATGCTCAGTACATTGGTAGACATACAATGCGCGCACCTTAGTGGACCGAAAGCGCACGCTACTTAGACACAGTTGGGTCAGAGCTTGTTCGAGCTGGAGCAATGGCTCCAAGCTACACTACCGAATTGCGCCGGGTGCGTTCTGATCCATTGAATAGATG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0015   ">
+				<sample name="00_0015   ">
+					<datablock type="DNA">
+						GTGGCAAACCACCTCCCCGGAAAGCTCTGAAATTCTCCTTTCTTGTGCGGCTCATCATCATTGAGTCCACCAGGCCTCAACCGGTACCCCACTTGGTCTCTCCGCTCTCCGACTCGAAGACATTTTTGCTCACTTCTTACAAAGAATATCTGTACGATAGGTTGCGATGTAGTTTATTAGACGGTGGTATTCATGTCCCGTAATCTGCAGCTTGGTGGAAAGCCATTTGCCACGGCCTATCTAGAAAAACTTTGTTCCGGGCGAACACTTTGCATCGCCGGTCACCAATCTTTGGACTGAACCTAAACGTGCGGTGCATTATATTCATGCTCAGTACATTGGTAGACATACAATGCGCGCACCTTAGTGGACCGAAAGCGCACGCTACTTAGACACAGTTGGGTCAGAGCTTGTTCGAGCTGGAGCAATGGCTCCAAGCTACAATACCGAATTGCGCCGGGTGCGTTCTGATCCATTGAATAGATG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0016   ">
+				<sample name="00_0016   ">
+					<datablock type="DNA">
+						GTGGCAAACCACCTCCCCGGAAAGCTCTGAAATTCTCCTTTCTTGTGCGGCTCATCATCATTGAGTCCACCAGGCCTCAACCGGTACCCCACTTGGTCTCTCCGCTCTCCGACTCGAAGACATTTTTGCTCACTTCTTACAAAGAATATCTGTACGATAGATTGCGATGTAGTTTATCAGACGGTGGTATTCATGTCCCGTAATCTGCAGCTTGGTGGAAAGCCATTTGCCACGGCCTATCTAGAAAAACTTTGTTCCGGGCGAACACTTTGCATCGCCGGTTACCAATCTTTGGACTGAACCTAAACGTGCGGTGCATTATATTCATGCTCAGTACATTGGTAGACATACAATGCGCGCACCTTAGTGGACCGAAAGCGCACGCTACTTAGACGCAGTTGGGTCAGAGCTTGTTCGAGCTGGAGCAATGGCTCCAAGCTACACTACCGAATTGCGCCGGGTGCGTTCTGATCCATTGAATAGATG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0002   ">
+				<sample name="00_0002   ">
+					<datablock type="DNA">
+						GTGGCAAACCACCTCCCCGGAAAGCTCTGAAATTCTCCTTTCTTGTGCGGCTCATCATCATTGAGTCCACCAGGCCTCAACCGGTACCCCACTTGGTCTCTCCGCTCTCCGACTCGAAGACATTTTTGCTCACTTCTTACAAAGAATATCTGTACGATAGATTGCGATGTAGTTTATCAGACGGTGGTATTCATGTCCCGTAATCTGCAGCTTGGTGGAAAGCCATTTGCCACGGCCTATCTAGAAAAACTTTGTTCCGGGCGAACACTTTGCATCGCCGGTTACCAATCTTTGGACTGAACCTAAACGTGCGGTGCATTATATTCATGCTCAGTACATTGGTAGACATACAATGCGCGCACCTTAGTGGACCGAAAGCGCACGCTACTTAGACGCAGTTGGGTCAGAGCTTGTTCGAGCTGGAGCAATGGCTCCAAGCTACACTACCGAATTGCGCCGGGTGCGTTCTGATCCATTGAATAGATG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0009   ">
+				<sample name="00_0009   ">
+					<datablock type="DNA">
+						GTGGCAAACCACCTCCCCGGAAAGCTCTGAAATTCTCCTTTCTTGTGCGGCTCATCATCATTGAGTCCACCAGGCCTCAACCGGTACCCCACTTGGTCTCTCCGCTCTCCGACTCGAAGACATTTTTGCTCACTTCTTACAAAGAATATCTGTACGATAGATTGCGATGTAGTTTATCAGACGGTGGTATTCATGTCCCGTAATCTGCAGCTTGGTGGAAAGCCATTTGCCACGGCCTATCTAGAAAAACTTTGTTCCGGGCGAACACTTTGCATCGCCGGTTACCAATCTTTGGACTGAACCTAAACGTGCGGTGCATTATATTCATGCTCAGTACATTGGTAGACATACAATGCGCGCACCTTAGTGGACCGAAAGCGCACGCTACTTAGACGCAGTTGGGTCAGAGCTTGTTCGAGCTGGAGCAATGGCTCCAAGCTACACTACCGAATTGCGCCGGGTGCGTTCTGATCCATTGAATAGATG [...]
+					</datablock>
+				</sample>
+			</individual>
+		</population>
+	</region>
+	<region name="LinkageGroup02">
+      <model name="F84">
+        <normalize>false</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <relative-murate>1</relative-murate>
+        <base-freqs> calculated </base-freqs>
+        <ttratio>2</ttratio>
+      </model>
+      <effective-popsize>1</effective-popsize>
+		<population name="PopAlphaIs1">
+			<individual name="00_0011   ">
+				<sample name="00_0011   ">
+					<datablock type="DNA">
+						TGGGAGCAGGACCGTAGTTTGGAACTCAGGCAAGTAACAGAATGCGTCAACCGGCTAGGCTTCAACTATAACCTGATTCTGACGGGCGGAAGCTTATTATGTCGCACCCTCTGATAGGGCCGCATCGTCATACAAGCCGTAGTAGAAAGTATCTGGTGTCCAAAGCGGTAAGCGTATGGGATAGCGTGAGGTTTCCCGGCTCCGTTGTTTCAGCCCGACTTCTTAGTCTGCATATGGGATTCAGAGTGGCGCCCATGGGCTGATAGATGATCAACTTTTCGTCTACCCAAGTAACACTGGACATTTATATTAATATCTTCTTCTAATTTGATCTAGCCATTCCTAAATGATTTCGTTAAACGGAAAGCTCCTCTGCCCCTAAAAAATACACTTTGTGATCCGAGGCCTCAAGACTAGAATTCGCACAATGGATTCATTCCCCGCCGCTCTGTCCCGGTACCCGGATACCTGAGGCCATGGTGGAGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0016   ">
+				<sample name="00_0016   ">
+					<datablock type="DNA">
+						TGGGAGCAGGACCGTAGTTTGGAACTCAGGCAAGTAACAGAATGCGTCAACCGGCTAGGCTTCAACTATAACCTGATTCTGACGGGCGGAAGCTTATTATGTCGCACCCTCTGATAGGGCCGCATCGTCATACAAGCCGTAGTAGAAAGTATCTGGTGTCCAAAGCGGTAAGCGTATGGGATAGCGTGAGGTTTCCCGGCTCCGTTGTTTCAGCCCGACTTCTTAGTCTGCATATGGGATTCAGAGTGGCGCCCATGGGCTGATAGATGATCAACTTTTCGTCTACCCAAGTAACACTGGACATTTATATTAATATCTTCTTCTAATTTGATCTAGCCATTCCTAAATGATTTCGTTAAACGGAAAGCTCCTCTGCCCCTAAAAAATACACTTTGTGATCCGAGGCCTCAAGACTAGAATTCGCACAATGGATTCATTCCCCGCCGCTCTGTCCCGGTACCCGGATACCTGAGGCCATGGTGGAGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0008   ">
+				<sample name="00_0008   ">
+					<datablock type="DNA">
+						TGGGAGCAGGACCGTAGTTTGGAACTCAGGCAAGTAACAGAATGCGTCAACCGGCTAGGCTTCAACTATAACCTGATTCTGACGGGCGGAAGCTTATTATGTCGCACCCTCTGATAGGGCCGCATCGTCATACAAGCCGTAGTAGAAAGTATCTGGTGTCCAAAGCGGTAAGCGTATGGGATAGCGTGAGGTTTCCCGGCTCCGTTGTTTCAGCCCGACTTCTTAGTCTGCATATGGGATTCAGAGTGGCGCCCATGGGCTGATAGATGATCAACTTTTCGTCTACCCAAGTAACACTGGACATTTATATTAATATCTTCTTCTAATTTGATCTAGCCATTCCTAAATGATTTCGTTAAACGGAAAGCTCCTCTGCCCCTAAAAAATACACTTTGTGATCCGAGGCCTCAAGACTAGAATTCGCACAATGGATTCATTCCCCGCCGCTCTGTCCCGGTACCCGGATACCTGAGGCCATGGTGGAGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0018   ">
+				<sample name="00_0018   ">
+					<datablock type="DNA">
+						TGGGAGCAGGACCGTAGTTTGGAACTCAGGCAAGTAACAGAATGCGTCAACCGGCTAGGCTTCAACTATAACCTGATTCTGACGGGCGGAAGCTTATTATGTCGCACCCTCTGATAGGGCCGCATCGTCATACAAGCCGTAGTAGAAAGTATCTGGTGTCCAAAGCGGTAAGCGTATGGGATAGCGTGAGGTTTCCCGGCTCCGTTGTTTCAGCCCGACTTCTTAGTCTGCATATGGGATTCAGAGTGGCGCCCATGGGCTGATAGATGATCAACTTTTCGTCTACCCAAGTAACACTGGACATTTATATTAATATCTTCTTCTAATTTGATCTAGCCATTCCTAAATGATTTCGTTAAACGGAAAGCTCCTCTGCCCCTAAAAAATACACTTTGTGATCCGAGGCCTCAAGACTAGAATTCGCACAATGGATTCATTCCCCGCCGCTCTGTCCCGGTACCCGGATACCTGAGGCCATGGTGGAGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0004   ">
+				<sample name="00_0004   ">
+					<datablock type="DNA">
+						TGGGAGCAGGACCGTAGTTTGGAACTCAGGCAAGTAACAGAATGCGTCAACCGGCTAGGCTTCAACTATAACCTGATTCTGACGGGCGGAAGCTTATTATGTCGCACCCTCTGATAGGGCCGCATCGTCATACAAGCCGTAGTAGAAAGTATCTGGTGTCCAAAGCGGTAAGCGTATGGAATAGCGTGAGGTTTCCCGGCTCCGTTGTTTCAGCCCGACTTCTTAGTCTGCATATGGGATTCAGTGTGGCGCCCATGGGCTGATAGATGATCAACTTTTCGTCTACCCAAGTAACACTGGACATTTATATTAATATCTTCTTCTAATTTGATCTAGCCATTCCTAAATGATTTCGTTAAACGGAAAGCTCCTCTGCCCCTAAAAAATACACTTTGTGATCCGAGGCCTCAAGACTAGATTTCGCACAATGGATTCATTCCCCGCCGCTCTGTCCCGGTACCCGGATACCTGAGGCCATGGTGGAGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0015   ">
+				<sample name="00_0015   ">
+					<datablock type="DNA">
+						TGGGAGCAGGACCGTAGTTTGGAACTCAGGCAAGTAACAGAATGCGTCAACCGGCTAGGCTTCAACTATAACCTGATTCTGACGGGCGGAAGCTTATTATGTCGCACCCTCTGATAGGGCCGCATCGTCATACAAGCCGTAGTAGAAAGTATCTGGTGTCCAAAGCGGTAAGCGTATGGAATAGCGTGAGGTTTCCCGGCTCCGTTGTTTCAGCCCGACTTCTTAGTCTGCATATGGGATTCAGTGTGGCGCCCATGGGCTGATAGATGATCAACTTTTCGTCTACCCAAGTAACACTGGACATTTATATTAATATCTTCTTCTAATTTGATCTAGCCATTCCTAAATGATTTCGTTAAACGGAAAGCTCCTCTGCCCCTAAAAAATACACTTTGTGATCCGAGGCCTCAAGACTAGAATTCGCACAATGGATTCATTCCCCGCCGCTCTGTCCCGGTACCCGGATACCTGAGGCCATGGTGGAGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0009   ">
+				<sample name="00_0009   ">
+					<datablock type="DNA">
+						TGGGAGCAGGACCGTAGTTTGGAACTCAGGCAAGTAACAGAATGCGTCAACCGGCTAGGCTTCAACTATAACCTGATTCTGACGGGCGGAAGCTTATTATGTCGCACCCTCTGATAGGGCCGCATCGTCATACAAGCCGTAGTAGAAAGTATCTGGTGTCCAAAGCGGTAAGCGTATGGAATAGCGTGAGGTTTCCCGGCTCCGTTGTTTCAGCCCGACTTCTTAGTCTGCATATGGGATTCAGTGTGGCGCCCATGGGCTGATAGATGATCAACTTTTCGTCTACCCAAGTAACACTGGACATTTATATTAATATCTTCTTCTAATTTGATCTAGCCATTCCTAAATGATTTCGTTAAACGGAAAGCTCCTCTGCCCCTAAAAAATACACTTTGTGATCCGAGGCCTCAAGACTAGAATTCGCACAATGGATTCATTCCCCGCCGCTCTGTCCCGGTACCCGGATACCTGAGGCCATGGTGGAGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0019   ">
+				<sample name="00_0019   ">
+					<datablock type="DNA">
+						TGGGAGCAGGACCGTAGTTTGGAACTCAGGCAAGTAACAGAATGCGTCAACCGGCTAGGCTTCAACTATAACCTGATTCTGACGGGCGGAAGCTTATTATGTCGCACCCTCTGATAGGGCCGCATCGTCATACAAGCCGTAGTAGAAAGTATCTGGTGTCCAAAGCGGTAAGCGTATGGAATAGCGTGAGGTTTCCCGGCTCCGTTGTTTCAGCCCGACTTCTTAGTCTGCATATGGGATTCAGTGTGGCGCCCATGGGCTGATAGATGATCAACTTTTCGTCTACCCAAGTAACACTGGACATTTATATTAATATCTTCTTCTAATTTGATCTAGCCATTCCTAAATGATTTCGTTAAACGGAAAGCTCCTCTGCCCCTAAAAAATACACTTTGTGATCCGAGGCCTCAAGACTAGAATTCGCACAATGGATTCATTCCCCGCCGCTCTGTCCCGGTACCCGGATACCTGAGGCCATGGTGGAGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0000   ">
+				<sample name="00_0000   ">
+					<datablock type="DNA">
+						TGGAAGCAGGACCGTAGTTTGGAACTCGGGCAAGTAACAGAATGCGTCAACCGGCTGGGCTCCAACTATAACCTGATTCTGACGGGCGGAAGCTTATTATGTCGCACCCTCTGATAGGGCCGCATCGTCTTACAAGTCGTAGTAGAAAGTATCTGGTGTCCAAAGCGGTAAGTGTATGGAATAGCGTGAGGTTTCCCGGCTCCGTTGTTTCAGCCCGACTTCTTAGTCTGCATATGGGATTCAGTGTGGCGCCCATGGGCTGATAGATGATCAACCTTTCGTCTGCCCAAGTAACACTGGACATTTATATTAATATCTTCTTCTAACTTGATCTAGCCATTCCTAAATGATTTCGTTAAACGGAAAGCTCCTCTGCCCCTAAAAAATACACTTTGTGATCCGAGGCCTCAAGACTAGAATTCGCACAATGGATTCATTCCCCGCCGCTCTGTCCCGGTACCCGGATACCTGAGGCCATGTTGGAGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0003   ">
+				<sample name="00_0003   ">
+					<datablock type="DNA">
+						TGGAAGCAGGACCGTAGTTTGGAACTCGGGCAAGTAACAGAATGCGTCAACCGGCTGGGCTCCAACTATAACCTGATTCTGACGGGCGGAAGCTTATTATGTCGCACCCTCTGATAGGGCCGCATCGTCTTACAAGTCGTAGTAGAAAGTATCTGGTGTCCAAAGCGGTAAGTGTATGGAATAGCGTGAGGTTTCCCGGCTCCGTTGTTTCAGCCCGACTTCTTAGTCTGCATATGGGATTCAGTGTGGCGCCCATGGGCTGATAGATGATCAACCTTTCGTCTGCCCAAGTAACACTGGACATTTATATTAATATCTTCTTCTAACTTGATCTAGCCATTCCTAAATGATTTCGTTAAACGGAAAGCTCCTCTGCCCCTAAAAAATACACTTTGTGATCCGAGGCCTCAAGACTAGAATTCGCACAATGGATTCATTCCCCGCCGCTCTGTCCCGGTACCCGGATACCTGAGGCCATGTTGGAGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0007   ">
+				<sample name="00_0007   ">
+					<datablock type="DNA">
+						TGGAAGCAGGACCGTAGTTTGGAACTCGGGCAAGTAACAGAATGCGTCAACCGGCTGGGCTCCAACTATAACCTGATTCTGACGGGCGGAAGCTTATTATGTCGCACCCTCTGATAGGGCCGCATCGTCTTACAAGTCGTAGTAGAAAGTATCTGGTGTCCAAAGCGGTAAGTGTATGGAATAGCGTGAGGTTTCCCGGCTCCGTTGTTTCAGCCCGACTTCTTAGTCTGCATATGGGATTCAGTGTGGCGCCCATGGGCTGATAGATGATCAACCTTTCGTCTGCCCAAGTAACACTGGACATTTATATTAATATCTTCTTCTAACTTGATCTAGCCATTCCTAAATGATTTCGTTAAACGGAAAGCTCCTCTGCCCCTAAAAAATACACTTTGTGATCCGAGGCCTCAAGACTAGAATTCGCACAATGGATTCATTCCCCGCCGCTCTGTCCCGGTACCCGGATACCTGAGGCCATGTTGGAGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0012   ">
+				<sample name="00_0012   ">
+					<datablock type="DNA">
+						TGGAAGCAGGACCGTAGTTTGGAACTCAGGCAAGTAACAGAATGCGTCAACCGGCTGGGCTCCAACTATAACCTGATTCTGACGGGCGGAAGCTTATTATGTCGCACCCTCTGATAGGGCCGCATCGTCTTACAAGTCGTAGTAGAAAGTATCTGGTGTCCAAAGCGGTAAGTGTATGGAATAGCGTGAGGTTTCCCGGCTCCGTTGTTTCAGCCCGACTTCTTAGTCTGCATATGGGATTCAGTGTGGCGCCCATGGGCTGATAGATGATCAACCTTTCGTCTGCCCAAGTAACACTGGACATTTATATTAATATCTTCTTCTAACTTGATCTAGCCATTCCTAAATGATTTCGTTAAACGGAAAGCTCCTCTGCCCCTAAAAAATACACTTTGTGATCCGAGGCCTCAAGACTAGAATTCGCACAATGGATTCATTCCCCGCCGCTCTGTCCCGGTACCCGGATACCTGAGGCCATGTTGGAGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0001   ">
+				<sample name="00_0001   ">
+					<datablock type="DNA">
+						TGGAAGCAGGACCGTAGTTTGGAACTCAGGCAAGTAACAGAATGCGTCAACCGGCTGGGCTCCAACTATAACCTGATTCTGACGGGCGGAAGCTTATTATGTCGCACCCTCTGATAGGGCCGCATCGTCTTACAAGTCGTAGTAGAAAGTATCTGGTGTCCAAAGCGGTAAGTGTATGGAATAGCGTGAGGTTTCCCGGCTCCGTTGTTTCAGCCCGACTTCTTAGTCTGCATATGGGATTCAGTGTGGCGCCCATGGGCTGATAGATGATCAACCTTTCGTCTGCCCAAGTAACACTGGACATTTATATTAATATCCTCTTCTAACTTGATCTAGCCATTCCTAAATGATTTCGTTAAACGGAAAGCTCCTCTGCCCCTAAAAAATACACTTTGTGATCCGAGGCCTCAAGACTAGAATTCGCACAATGGATTCATTCCCCGCCGCTCTGTCCCGGTACCCGGATACCTGAGGCCATGTTGGAGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0006   ">
+				<sample name="00_0006   ">
+					<datablock type="DNA">
+						TGGAAGCAGGACCGTAGTTTGGAACTCAGGCAAGTAACAGAATGCGTCAACCGGCTGGGCTCCAACTATAACCTGATTCTGACGGGCGGAAGCTTATTATGTCGCACCCTCTGATAGGGCCGCATCGTCTTACAAGTCGTAGTAGAAAGTATCTGGTGTCCAAAGCGGTAAGTGTATGGAATAGCGTGAGGTTTCCCGGCTCCGTTGTTTCAGCCCGACTTCTTAGTCTGCATATGGGATTCAGTGTGGCGCCCATGGGCTGATAGATGATCAACCTTTCGTCTGCCCAAGTAACACTGGACATTTATATTAATATCTTCTTCTAACTTGATCTAGCCATTCCTAAATGATTTCGTTAAACGGAAAGCTCCTCTGCCCCTAAAAAATACACTTTGTGATCCGAGGCCTCAAGACTAGAATTCGCACAATGGATTCATTCCCCGCCGCTCTGTCCCGGTACCCGGATACCTGAGGCCATGTTGGAGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0017   ">
+				<sample name="00_0017   ">
+					<datablock type="DNA">
+						TGGGAGCAGGACCGTAGTTTGGAACTCAGGCAAGTAACAGAATGCGTCAACCGGCTGGGCTCCAACTATAACCTGATTCTGACGGGCGGAAGCTTATTATGTCGCACCCTCTGATAGGGCCGCATCGTCATACAAGTCGTAGTAGGAAGTATCTGGTGTCCAAAGCGGTAAGCGTATGGAATAGCGTGAGGTTTCCCGGCTCCGTTGTTTCAGCCCGACTTCTTAGTCTGCATATGGGATTCAGTGTGGCGCCCATGGGCTGATAGATGATCAACCTTTCGTCTGCCCAAGTAACACTGGACATTTATATTAATATCTTCTTCTAATTTGATCTAGCCATTCCTAAATGATTTCGTTAAACGGAAAGCTCCTCTGCCCCTAAAAAATACACTTTGTGATCCGAGGCCTCAAGACTAGAATTCGCACAATGGATTCATTCCCCGCCGCTCTGTCCCGGTACCCGGATACCTGAGGCCATGTTGGAGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0013   ">
+				<sample name="00_0013   ">
+					<datablock type="DNA">
+						TGGGAGCAGGACCGTAGTTTGGAACTCAGGCAAGTAGCAGAATGCGTCAACCGGCTGGGCTCCAACTATAACCTGATTCTGACGGGCGGAAGCTTATTATGTCGCACCCTCTGATAGGGCCGCATCGTCATACAAGTCGTAGTAGAAAGCATCTGGTGTCCAAAGCGGTAAGCGTATGGAATAGCGTGAGGTTTCCCGGCTCCGTTGTTTCAGCCCGACTTCTTAGTCTGCATATGGGATTCAGTGTGGCGCCCATGGGCTGATAGATGATCAACCTTTCGTCTGCCCAAGTAACACTGGACATTTATATTAATATCTTCTTCTAATTTGATCTAGCCATTCCTAAATGATTTCGTTAAACGGAAAGCTCCTCTGCCCCTAAAAAATACACTTTGTGATCCGAGGCCTCAAGACTAGAATTCGCACAATGGATTCATTCCCCGCCGCTCTGTCCCGGTACCCGGATACCTGAGGCCATGTTGGAGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0002   ">
+				<sample name="00_0002   ">
+					<datablock type="DNA">
+						TGGGAGCAGGACCGTAGTTTGGAACTCAGGCAAGTAGCAGAATGCGTCAACCGGCTGGGCTCCAACTATAACCTGATTCTGACGGGCGGAAGCTTATTATGTCGCACCCTCTGATAGGGCCGCATCGTCATACAAGTCGTAGTAGAAAGCATCTGGTGTCCAAAGCGGTAAGCGTATGGAATAGCGTGAGGTTTCCCGGCTCCGTTGTTTCAGCCCGACTTCTTAGTCTGCATATGGGATTCAGTGTGGCGCCCATGGGCTGATAGATGATCAACCTTTCGTCTGCCCAAGTAACACTGGACATTTATATTAATATCTTCTTCTAATTTGATCTAGCCATTCCTAAATGATTTCGTTAAACGGAAAGCTCCTCTGCCCCTAAAAAATACACTTTGTGATCCGAGGCCTCAAGACTAGAATTCGCACAATGGATTCATTCCCCGCCGCTCTGTCCCGGTACCCGGATACCTGAGGCCATGTTGGAGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0010   ">
+				<sample name="00_0010   ">
+					<datablock type="DNA">
+						TGGGAGCAGGACCGTAGTTTGGAACTCAGGCAAGTAGCAGAATGCGTCAACCGGCTGGGCTCCAACTATAACCTGATTCTGACGGGCGGAAGCTTATTATGTCGCACCCTCTGATAGGGCCGCATCGTCATACAAGTCGTAGTAGAAAGCATCTGGTGTCCAAAGCGGTAAGCGTATGGAATAGCGTGAGGTTTCCCGGCTCCGTTGTTTCAGCCCGACTTCTTAGTCTGCATATGGGATTCAGTGTGGCGCCCATGGGCTGATAGATGATCAACCTTTCGTCTGCCCAAGTAACACTGGACATTTATATTAATATCTTCTTCTAATTTGATCTAGCCATTCCTAAATGATTTCGTTAAACGGAAAGCTCCTCTGCCCCTAAAAAATACACTTTGTGATCCGAGGCCTCAAGACTAGAATTCGCACAATGGATTCATTCCCCGCCGCTCTGTCCCGGTACCCGGATACCTGAGGCCATGTTGGAGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0005   ">
+				<sample name="00_0005   ">
+					<datablock type="DNA">
+						TGGGAGCAGGACCGTAGTTTGGAACTCAGGCAAGTAGCAGAATGCGTCAACCGGCTGGGCTCCAACTATAACCTGATTCTGACAGGCGGAAGCTTATTATGTCGCACCCTCTGATAGGGCCGCATCGTCATACAAGTCGTAGTAGAAAGCATCTGGTGTCCAAAGCGGTAAGCGTATGGAATAGCGTGAGGTTTCCCGGCTCCGTTGTTTCAGCCCGACTTCTTAGTCTGCATATGGGATTCAGTGTGGCGCCCATGGGCTGATAGATGATCAACCTTTCGTCTGCCCAAGTAACACTGGACATTTATATTAATATCTTCTTCTAATTTGATCTAGCCATTCCTAAGTGATTTCGTTAAACGGAAAGCTCCTCTGCCCCTAAAAAATACACTTTGTGATCCGAGGCCTCAAGACTAGAATTCGCACAATGGATTCATTCCCCGCCGCTCTGTCCCGGTACGCGGATACCTGAGGCCATGTTGGAGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0014   ">
+				<sample name="00_0014   ">
+					<datablock type="DNA">
+						TGGGAGGAGGACCGTAGTTTGGAACTCAGGCAAGTAGCAGAATGCGTCAACCGGCTGGGCTCCAACTATAACCTGATTCTGACAGGCGGAAGCTTATTATGTCGCACCCTCTGATAGGGCCGCATCGTCATACAAGTCGTAGTAGAAAGCATCTGGTGTCCAAAGCGGTAAGCGTATGGAATAGCGTGAGGTTTCCCGGCTCCGTTGTTTCAGCCCGACTTCTTAGTCTGCATATGGGATTCAGTGTGGCGCCCATGGGCTGATAGATGATCAACCTTTCGTCTGCCCAAGTAACACTGGACATTTATATTAATATCTTCTTCTAATTTGATCTAGCCATTCCTAAGTGATTTCGTTAAACGGAAAGCTCCTCTGCCCCTAAAAAATACACTTTGTGATCCGAGGCCTCAAGACTAGAATTCGCACAATGGATTCATTCCCCGCCGCTCTGTCCCGGTACGCGGATACCTGAGGCCATGTTGGAGC [...]
+					</datablock>
+				</sample>
+			</individual>
+		</population>
+	</region>
+	<region name="LinkageGroup03">
+      <model name="F84">
+        <normalize>false</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <relative-murate>1</relative-murate>
+        <base-freqs> calculated </base-freqs>
+        <ttratio>2</ttratio>
+      </model>
+      <effective-popsize>1</effective-popsize>
+		<population name="PopAlphaIs1">
+			<individual name="00_0012   ">
+				<sample name="00_0012   ">
+					<datablock type="DNA">
+						CTTAGATTAGTCTCACCCTCAACATCGCAGACTGCACATGAAGAGTCTCTGCGGATTCTTTGGGAGGAAAAGCTCCGAGGTAAGCTACATCTATAGTACGGCGGGGCTCCTCTACCAGCCTTCTCATACGCAAGTTGGAAATGCTGTCCTTAATAGCCGACGACAATGCCTGGAGTAACATTCTGAGTTTTTAATACCTTCGGCTGTCCAGAAAGGGGTCATGCCCGAGTGGAATAATTCCCGGTGGTATGCAGTTAGATCACAAAAGCCCCGAGGCCACTGAAAATAGTTCTAACAATGACTGGTTGACGGGAGAATGCGGCAGAATTTGGATCGGCTCAACAACCTGGCGTATATAAACAGGGAAAACACAGATATCATGGTAGAGGACGGACTATTAAGAATTCACGATACTCCGTAGTCCCCTCGACTGGCCCCTGAAGATTAGAGGACTTATTTACCAGGGTTAGACCCGCCGACGCGGAC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0015   ">
+				<sample name="00_0015   ">
+					<datablock type="DNA">
+						CTTAGATTAGTCTCACCCTCAACATCGCAGACTGCACATGAAGAGTCTCTGCGGATTCTTTGGGAGGAAAAGCTCCGAGGTAAGCTACATCTATAGTACGGCGGGGCTCCTCTACCAGCCTTCTCATACGCAAGTTGGAAATGCTGTCCTTAATAGCCGACGACAATGCCTGGAGTAACATTCTGAGTTTTTAATACCTTCGGCTGTCCAGAAAGGGGTCATGCCCGAGTGGAATAATTCCCGGTGGTATGCAGTTAGATCACAAAAGCCCCGAGGCCACTGAAAATAGTTCTAACAATGACTGGTTGACGGGAGAATGCGGCAGAATTTGGATCGGCTCAACAACCTGGCGTATATAAACAGGGAAAACACAGATATCATGGTAGAGGACGGACTATTAAGAATTCACGATACTCCGTAGTCCCCTCGACTGGCCCCTGAAGATTAGAGGACTTATTTACCAGGGTTAGACCCGCCGACGCGGAC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0016   ">
+				<sample name="00_0016   ">
+					<datablock type="DNA">
+						CTTAGATTAGTCTCACCCTCAACATCGCAGACTGCACATGAAGAGTCTCTGCGGATTCTTTGGGAGGAAAAGCTCCGAGGTAAGCTACATCTATAGTACGGCGGGGCTCCTCTACCAGCCTTCTCATACGCAAGTTGGAAATGCTGTCCTTAATAGCCGACGACAATGCCTGGAGTAACATTCTGAGTTTTTAATACCTTCGGCAGTCCAGAAAGGGGTCATGCCCGAGTGGAATAATTCCCGGTGGTATGCAGTTAGATCACAAAAGCCCCGAGGCCACTGAAAATAGTTCTAACAATGACTGGTTGACGGGAGAATGCGGCAGAATTTGGATCGGCTCAACAACCTGGCGTATATAAACAGGGAAAACACAGATATCATGGTAGAGGACGGACTATTAAGAATTCACGATACTCCGTAGTCCCCTCGACTGGCCCCTGAAGATTAAAGGACTTATTTACCAGGGTTAGACCCGCCGACGCGGAC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0013   ">
+				<sample name="00_0013   ">
+					<datablock type="DNA">
+						CTTAGATTAGTCTCACCCTCAACATCGCAGACTGCACATGAAGAGTCTCTGCGGATTCTTTGGGAGGAAAAGCTCCGAGGTAAGCTACATCTATAGTACGGCGGGGCTTCTCTACCAGCCTTCTCATACGCAAGTTGGAAATGCTGTCCTTAATAGCCGACGACAATGCCTGGAGTAACATTCTGAGTTTTTAATACCTTCGGCAGTCCAGAAAGGGGTCATGCCCGAGTGGAATAATTCCCGGTGGTATGCAGTTAGATCACAAAAGCCCCGAGGCCACTGAAAATAGTTCTAACAATGACTGGTTGACGGGAGAATGCGGCAGAATTTGGATCGGCTCAACAACCTGGCGTATATAAACAGGGAAAACACAGATATCATGGTAGAGGACGGACTATTAAGAATTCACGATACTCCGTAGTCCCCTCGACTGGCCCCTGAAGATTAAAGGACTTATTTACCAGGGTTAGACCCGCCGACGCGGAC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0000   ">
+				<sample name="00_0000   ">
+					<datablock type="DNA">
+						CTTAGATTAGTCTCACCCTCAACATCGCAGACTGCACATGAAGAGTCTCTGCGGATTCTTTGGGAGGAAAAGCTCCGAGGTAAGCTACATCTATAGTACGGCGGGGCTTCTCTACCAGCCTTCTCATACGCAAGTTGGAAATGCTGTCCTTAATAGCCGACGACAATGCCTGGAGTAACATTCTGAGTTTTTAATACCTTCGGCAGTCCAGAAAGGGGTCATGCCCGAGTGGAATAATTCCCGGTGGTATGCAGTTAGATCACAAAAGCCCCGAGGCCACTGAAAATAGTTCTAACAATGACTGGTTGACGGGAGAATGCGGCAGAATTTGGATCGGCTCAACAACCTGGCGTATATAAACAGGGAAAACACAGATATCATGGTAGAGGACGGACTATTAAGAATTCACGATACTCCGTAGTCCCCTCGACTGGCCCCTGAAGATTAAAGGACTTATTTACCAGGGTTAGACCCGCCGACGCGGAC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0007   ">
+				<sample name="00_0007   ">
+					<datablock type="DNA">
+						CTTAGATTAGTCTCACCCTCAACATCGCAGACTGCACATGAAGAGTCTCTGCGGATTCTTTGGGAGGAAAAGCTCCGAGGTAAGCTACATCTATAGTACGGCGGGGCTTCTCTACCAGCCTTCTCATACGCAAGTTGGAAATGCTGTCCTTAATAGCCGACGACAATGCCTGGAGTAACATTCTGAGTTTTTAATACCTTCGGCAGTCCAGAAAGGGGTCATGCCCGAGTGGAATAATTCCCGGTGGTATGCAGTTAGATCACAAAAGCCCCGAGGCCACTGAAAATAGTTCTAACAATGACTGGTTGACGGGAGAATGCGGCAGAATTTGGATCGGCTCAACAACCTGGCGTATATAAACAGGGAAAACACAGATATCATGGTAGAGGACGGACTATTAAGAATTCACGATACTCCGTAGTCCCCTCGACTGGCCCCTGAAGATTAAAGGACTTATTTGCCAGGGTTAGACCCGCCGACGCGGAC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0009   ">
+				<sample name="00_0009   ">
+					<datablock type="DNA">
+						CATAGATTAGTCTCATCCTCAGCATCGCAAACGGCACATGAAGAGTCTCTGCGGATTCTTTGGGAGGAAAGGCTCCGAGGTGAGCTACATCTATAGTACGGCGAGGCTCCTCTACCAGCCTTCTCATACGCAAGTTGGAAATGCTGTCCTTAATAGGCGACGAGAATGCCTGGTGTAACATTCTGAGTTTTTAATACCTTCGGCAGTCCAGAAAGGGGTCATGCCCGAGTGGAATAATTCCCGGCAGTATGCGGTTAGATCACAAAAGCCCCGAGGCCACTGAAAATAGTTTTAACAATGACTGGTTGACGGGAGAATGCGGCAGAATTTGGATCGGACCAACAACCTGGCGTATATAAACAGGGAAAACGCAGATATCATGGTAGAGGACGGACTGTTAAGAATTCACGATACTCCGTAGTCCCCTCGACTAGCCCCTGAAGATTAAAGGAGCTATTTGCCAGGGTTAGCCCCGACGACGCGGCC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0010   ">
+				<sample name="00_0010   ">
+					<datablock type="DNA">
+						CATAGATTAGTCTCATCCTCAGCATCGCAAACGGCACATGAAGAGTCTCTGCGGATTCTTTGGGAGGAAAGGCTCCGAGGTGAGCTACATCTATAGTACGGCGAGGCTCCTCTACCAGCCTTCTCATACGCAAGTTGGAAATGCTGTCCTTAATAGGCGACGAGAATGCCTGGTGTAACATTCTGAGTTTTTAATACCTTCGGCAGTCCAGAAAGGGGTCATGCCCGAGTGGAATAATTCCCGGCAGTATGCGGTTAGATCACAAAAGCCCCGAGGCCACTGAAAATAGTTTTAACAATGACTGGTTGACGGGAGAATGCGGCAGAATTTGGATCGGACCAACAACCTGGCGTATATAAACAGGGAAAACGCAGATATCATGGTAGAGGACGGACTGTTAAGAATTCACGATACTCCGTAGTCCCCTCGACTAGCCCCTGAAGATTAAAGGAGCTATTTGCCAGGGTTAGCCCCGACGACGCGGCC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0004   ">
+				<sample name="00_0004   ">
+					<datablock type="DNA">
+						CATAGATTAGTCTCATCCTCAGCATCGCAAACGGCACATGAAGAGTCTCTGCGGATTCTTTGGGAGGAAAGGCTCCGAGGTGAGCTACATCTATAGTACGGCGAGGCTCCTCTACCAGCCTTCTCATACGCAAGTTGGAAATGCTGTCCTTAATAGGCGACGAGAATGCCTGGTGTAACATTCTGAGTTTTTAATACCTTCGGCAGTCCAGAAAGGGGTCATGCCCGAGTGGAATAATTCCCGGCAGTATGCGGTTAGATCACAAAAGCCCCGAGGCCACTGAAAATAGTTTTAACAATGACTGGTTGACGGGAGAATGCGGCAGAATTTGGATCGGACCAACAACCTGGCGTATATAAACAGGGAAAACGCAGATATCATGGTAGAGGACGGACTGTTAAGAATTCACGATACTCCGTAGTCCCCTCGACTAGCCCCTGAAGATTAAAGGAGCTATTTGCCAGGGTTAGCCCCGACGACGCGGCC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0011   ">
+				<sample name="00_0011   ">
+					<datablock type="DNA">
+						CATAGATTAGTCTCATCCTCAGCATCGCAAACGGCACATGAAGAGTCTCTGCGGATTCTTTGGGAGGAAAGGCTCCGAGGTGAGCTACATCTATAGTACGGCGAGGCTCCTCTACCAGCCTTCTCATACGCAAGTTGGAAATGCTGTCCTTAATAGGCGACGAGAATGCCTGGTGTAACATTCTGAGTTTTTAATACCTTCGGCAGTCCAGAAAGGGGTCATGCCCGAGTGGAATAATTCCCGGCAGTATGCGGTTAGATCACAAAAGCCCCGAGGCCACTGAAAATAGTTTTAACAATGACTGGTTGACGGGAGAATGCGGCAGAATTTGGATCGGACCAACAACCTGGCGTATATAAACAGGGAAAACGCAGATATCATGGTAGAGGACGGACTGTTAAGAATTCACGATACTCCGTAGTCCCCTCGACTAGCCCCTGAAGATTAAAGGAGCTATTTGCCAGGGTTAGCCCCGACGACGCGGCC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0003   ">
+				<sample name="00_0003   ">
+					<datablock type="DNA">
+						CATAGATTAGTCTCATCCTCAGCATCGCAAACGGCACATGAAGAGTCTCTGCGGATTCTTTGGGAGGAAAGGCTCCGAGGTGAGCTACATCTATAGTACGGCGAGGCTCCTCTACCAGCCTTCTCATACGCAAGTTGGAAATGCTGTCCTTAATAGGCGACGAGAATGCCTGGTGTAGCATTCTGAGTTTTTAATACCTTCGGCAGTCCAGAAAGGGGTCATGCCCGAGTGGAATAATTCCCGGCAGTATGCAGTTAGATCACAAAAGCCCCGAGGCCACTGAAAATAGTTTTAACAATGACTGGTTGACGGGAGAATGCGGCAGAATTTGGATCGGACCAACAACCTGGCGTATATAAACAGGGAAAACGCAGATATCATGGTAGAGGACGGACTGTTAAGAATTCACGATACTCCGTAGTCCCCTCGACTAGCCCCTGAAGATTAAAGGAGCTATTTGCCAGGGTTAGCCCCGACGACGCGGAC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0018   ">
+				<sample name="00_0018   ">
+					<datablock type="DNA">
+						CATAGATTAGTCTCATCCTCAGCATCGCAAACGGCACATGAAGAGTCTCTGCGGATTCTTTGGGAGGAAAGGCTCCGAGGTGAGCTACATCTATAGTACGGCGAGGCTCCTCTACCAGCCTTCTCATACGCAAGTTGGAAATGCTGTCCTTAATAGGCGACGAGAATGCCTGGTGTAGCATTCTGAGTTTTTAATACCTTCGGCAGTCCAGAAAGGGGTCATGCCCGAGTGGAATAATTCCCGGCAGTATGCAGTTAGATCACAAAAGCCCCGAGGCCACTGAAAATAGTTTTAACAATGACTGGTTGACGGGAGAATGCGGCAGAATTTGGATCGGACCAACAACCTGGCGTATATAAACAGGGAAAACGCAGATATCATGGTAGAGGACGGACTGTTAAGAATTCACGATACTCCGTAGTCCCCTCGACTAGCCCCTGAAGATTAAAGGAGCTATTTGCCAGGGTTAGCCCCGACGACGCGGAC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0002   ">
+				<sample name="00_0002   ">
+					<datablock type="DNA">
+						CATAGATTAGTCTCATCCTCAGCATCGCAAACGGCACATGAAGAGTCTCTGCGGATTCTTTGGGAGGAAAGGCTCCGAGGTGAGCTACATCTATAGTACGGCGAGGCTCCTCTACCAGCCTTCTCATACGCAAGTTGGAAATGCTGTCCTTAATAGGCGACGAGAATGCCTGGTGTAACATTCTGAGTTTTTAATACCTTCGGCAGTCCAGAAAGGGGTCATGCCCGAGTGGAATAATTCCCGGCAGTATGCAGTTAGATCACAAAAGCCCCGAGGCCACTGAAAATAGTTTTAACAATGACTGGTTGACGGGAGAATGCGGCAGAATTTGGATCGGACCAACAACCTGGCGTATATAAACAGGGAAAACGCAGATATCATGGTAGAGGACGGACTGTTAAGAATTCACGATACTCCGTAGTCCCCTCGACTAGCCCCTGAAGATTAAAGGAGCTATTTGCCAGGGTTAGCCCCGACGACGCGGAC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0005   ">
+				<sample name="00_0005   ">
+					<datablock type="DNA">
+						CATAGATTAGTCTCATCCTCAGCATCGCAAACGGCACATGAAGAGTCTCTGCGGATTCTTTGGGAGGAAAGGCTCCGAGGTGAGCTACATCTATAGTACGGCGAGGCTCCTCTACCAGCCTTCTCATACGCAAGTTGGAAATGCTGTCCTTAATAGGCGACGAGAATGCCTGGTGTAACATTCTGAGTTTTTAATACCTTCGGCAGTCCAGAAAGGGGTCATGCCCGAGTGGAATAATTCCCGGCAGTATGCAGTTAGATCACAAAAGCCCCGAGGCCACTGAAAATAGTTTTAACAATGACTGGTTGACGGGAGAATGCGGCAGAATTTGGATCGGACCAACAACCTGGCGTATATAAACAGGGAAAACGCAGATATCATGGTAGAGGACGGACTGTTAAGAATTCACGATACTCCGTAGTCCCCTCGACTAGCCCCTGAAGATTAAAGGAGCTATTTGCCAGGGTTAGCCCCGACGACGCGGAC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0006   ">
+				<sample name="00_0006   ">
+					<datablock type="DNA">
+						CATAGATTAGTCTCATCCTCAGCATCGCAAACGGCACATGAAGAGTCTCTGCGGATTCTTTGGGAGGAAAGGCTCCGAGGTGAGCTACATCTATAGTACGGCGAGGCTCCTCTACCAGCCTTCTCATACGCAAGTTGGAAATGCTGTCCTTAATAGGCGACGAGAATGCCTGGTGTAACATTCTGAGTTTTTAATACCTTCGGCAGTCCAGAAAGGGGTCATGCCCGAGTGGAATAATTCCCGGCAGTATGCAGTTAGATCACAAAAGCCCCGAGGCCACTGAAAATAGTTTTAACAATGACTGGTTGACGGGAGAATGCGGCAGAATTTGGATCGGACCAACAACCTGGCGTATATAAACAGGGAAAACGCAGATATCATGGTAGAGGACGGACTGTTAAGAATTCACGATACTCCGTAGTCCCCTCGACTAGCCCCTGAAGATTAAAGGAGCTATTTGCCAGGGTTAGCCCCGACGACGCGGAC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0017   ">
+				<sample name="00_0017   ">
+					<datablock type="DNA">
+						CATAGATTAGTCTCATCCTCAGCATCGCAAACGGCACATGAAGAGTCTCTGCGGATTCTTTGGGAGGAAAGGCTCCGAGGTGAGCTACATCTATAGTACGGCGAGGCTCCTCTACCAGCCTTCTCATACGCAAGTTGGAAATGCTGTCCTTAATAGGCGACGAGAATGCCTGGTGTAACATTCTGAGTTTTTAATACCTTCGGCAGTCCAGAAAGGGGTCATGCCCGAGTGGAATAATTCCCGGCAGTATGCAGTTAGATCACAAAAGCCCCGAGGCCACTGAAAATAGTTTTAACAATGACTGGTTGACGGGAGAATGCGGCAGAATTTGGATCGGACCAACAACCTGGCGTATATAAACAGGGAAAACGCAGATATCATGGTAGAGGACGGACTGTTAAGAATTCACGATACTCCGTAGTCCCCTCGACTAGCCCCTGAAGATTAAAGGAGCTATTTGCCAGGGTTAGCCCCGACGACGCGGAC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0001   ">
+				<sample name="00_0001   ">
+					<datablock type="DNA">
+						CATAGATTAGTCTCATCCTCAGCATCGCAAACGGCACATGAAGAGTCTCTGCGGATTCTTTGGGAGGAAAGGCTCCGAGGTGAGCTACATCTATAGTACGGCGGGGCTCCTCTACCAGCCTTCTCATACGCAAGTTGGAAATGCTGTCCTTAATAGGCGACGAGAATGCCTGGTGTAACATTCTGAGTTTTTAATACCTTCGGCAGTCCAGAAAGGGGTTATGCCCGAGTGGAATAATTCCCGGCAGTATGCAGTTAGATCACAAAAGCCCCGAGGCCACTGAAAATAGTTTTAACAATGACTGGTTGACGGGAGAAAGCGGCAGAATTTGGATCGGACCAACAACCTGGCGTATATAAACAGGGAAAACGCAGATATCATGGTAGAGGACGGACTGTTAAGAATTCACGATACTCCGTAGTCCCCTCGACTAGCCCCTGAAGATTAAAGGAGCTATTTGCCAGGGTTAGCCCCGACGACGCGGAC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0019   ">
+				<sample name="00_0019   ">
+					<datablock type="DNA">
+						CATGGATTAGTCTCATCCTCAGCATCGCAAACGGCACATGAAGAGTCTCTGCGGATTCTTTGGGAGGAAAGGCTCCGAGGTGAGCTACATCTATAGTACGGCGGGGCTCCTCTACCAGCCTTCTCATACGCAAGTTGGAAATGCTGTCCTTAATAGGCGACGAGAATGCCTGGTGTAACATTCTGAGTTTTTAATACCTTCGGCAGTCCAGAAAGGGGTCATGCCCGAGTGGAATATTTCCCGGCAGTATGCAGTTAGATCACAAAAGCCCCGAGGTCACTGAAAATAGTTTTAACAATGACTGGTTGACGGGAGAATGCGGCAGAATTTGGATCGGACCAACAACCTGGCGTATATAAACAGGGAAAACGCAGATATCATGGTAGAGGACGGACTGTTAAGAATTCACGATACTCCGTAGTCCCCTCGACTAGCCCCTGAAGATTAAAGGAGCTATTTGCCAGGGTTAGCCCCGACGACGCGGAC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0008   ">
+				<sample name="00_0008   ">
+					<datablock type="DNA">
+						CATGGATTAGTCTCATCCTCAGCATCGCAAACGGCACATGAAGAGTCTCTGCGGATTCTTTGGGAGGAAAGGCTCCGAGGTGAGCTACATCTATAGTACGGCGGGGCTCCTCTACCAGCCTTCTCATACGCAAGTTGGAAATGCTGTCCTTAATAGGCGACGAGAATGCCTGGTGTAACATTCTGAGTTTTTAATACCTTCGGCAGTCCAGAAAGGGGTCATGCCCGAGTGGAATATTTCCCGGCAGTATGCAGTTAGATCACAAAAGCCCCGAGGCCACTGAAAATAGTTTTAACAATGACTGGTTGACGGGAGAATGCGGCAGAATTTGGATCGGACCAACAACCTGGCGTATATAAACAGGGAAAACGCAGATATCATGGTAGAGGACGGACTGTTAAGAATTCACGATACTCCGTAGTCCCCTCGACTAGCCCCTGAAGATTAAAGGAGCTATTTGCCAGGGTTAGCCCCGACGACGCGGAC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0014   ">
+				<sample name="00_0014   ">
+					<datablock type="DNA">
+						CATGGATTAGTCTCATCCTCAGCATCGCAAACGGCACATGAAGAGTCTCTGCGGATTCTTTGGGAGGAAAGGCTCCGAGGTGAGCTACATCTATAGTACGGCGGGGCTCCTCTACCAGCCTTCTCATACGCAAGTTGGAAATGCTGTCCTTAATAGGCGACGAGAATGCCTGGTGTAACATTCTGAGTTTTTAATACCTTCGGCAGTCCAGAAAGGGGTCATGCCCGAGTGGAATATTTCCCGGCAGTATGCAGTTAGATCACAAAAGCCCCGAGGCCACTGAAAATAGTTTTAACAATGACTGGTTGACGGGAGAATGCGGCAGAATTTGGATCGGACCAACAACCTGGCGTATATAAACAGGGAAAACGCAGATATCATGGTAGAGGACGGACTGTTAAGAATTCACGATACTCCGTAGTCCCCTCGACTAGCCCCTGAAGATTAAAGGAGCTATTTGCCAGGGTTAGCCCCGACGACGCGGAC [...]
+					</datablock>
+				</sample>
+			</individual>
+		</population>
+	</region>
+	<region name="LinkageGroup04">
+      <model name="F84">
+        <normalize>false</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <relative-murate>1</relative-murate>
+        <base-freqs> calculated </base-freqs>
+        <ttratio>2</ttratio>
+      </model>
+      <effective-popsize>1</effective-popsize>
+		<population name="PopAlphaIs1">
+			<individual name="00_0005   ">
+				<sample name="00_0005   ">
+					<datablock type="DNA">
+						AGCCTTGCGTCTGAGCGGATATTTTACATGACATTATGGTCGGCTTTCTGTTGGAGAGAAGTCAGACGAGCACCATACACCCTCTCAGAAGTGCGACTAGGATTAGCCGCCTTAGGGGAGGGCTCCCTCGAGTGCCCGGTTAGATTGATGTATATAGTACTTTCTTGGCGAAACTTAATTACATCGAGTCCGTCAGCGGAGCTTAGTTTAGACTTGTTGTCACAGACTTACTTTCCGCGGCTAAAACATTGCAGCCTGAGAAAGACAGTACCCTCTATCATCCCCTCTCGCAAAGCTAGCTGTGTCGTTTATCTTGTCCCACAAGATAGACCTTGGCCAGAATACACGAAAATCCAAATGCTAATATGACAACAATAGGACGCGGCCCACGTCCAGATACCACAGACGTAAGTTGATGGGAATCCCCCAACCAGAAGCGAGGGTGGGCTGACATATAGGGTTCTATGATGATCCTAGACCGTGATT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0014   ">
+				<sample name="00_0014   ">
+					<datablock type="DNA">
+						AGCCTTGCGCCTGAGCGGATATTTTATTTGACATTATGGTCGGTTCTCTGTCGGAGAGAAATCAGATGAGCACCATACACCCTCTCAGAAGTGCGACTAGGACTAGCCGCTTTAGGGGAGAGCTCCCTCGAGGGGCCGGTTAGATTGATGTATATAGTACTTCCCTGGCAAAACTTAACCACATCGAGTCCGTCAGCGGAGGTTTGTTTAGACTTGTTGTTACAGACTTACTTTCCGCGGCTAAAATATTGCAGCCTGAGAAAGACAGTACCCTCTATCACCCCCTCTCGCAAAGCTAGCCGTGTCGTTTATCTTGTCCCGCAAGGTAGACCTTGGCCGAAATACACGAAACTCCAAATGCTAAAATGACAACAATAGGACGCGGCCCACGTCCAGATACCACAGACGTACGTTGATGGGGATTCCTCAACCAAAAGCGAGGTTGGGCTGACATATAGGGTCCTATGATGATCCCAGACCGCGATT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0007   ">
+				<sample name="00_0007   ">
+					<datablock type="DNA">
+						AGCCTTGCGCCTGAGCGGATATTTTATTTGACATTATGGTCGGTTCTCTGTCGGAGAGAAATCAGATGAGCACCATACACCCTCTCAGAAGTGCGACTAGGACTAGCCGCTTTAGGGGAGAGCTCCCTCGAGGGGCCGGTTAGATTGATGTATATAGTACTTCCCTGGCAAAACTTAACCACATCGAGTCCGTCAGCGGAGGTTTGTTTAGACTTGTTGTTACAGACTTACTTTCCGCGGCTAAAATATTGCAGCCTGAGAAAGACAGTACCCTCTATCACCCCCTCTCGCAAAGCTAGCCGTGTCGTTTATCTTGTCCCGCAAGGTAGACCTTGGCCGAAATACACGAAACTCCAAATGCTAAAATGACAACAATAGGACGCGGCCCACGTCCAGATACCACAGACGTACGTTGATGGGGATTCCTCAACCAAAAGCGAGGTTGGGCTGACATATAGGGTCCTATGATGATCCCAGACCGCGATT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0012   ">
+				<sample name="00_0012   ">
+					<datablock type="DNA">
+						AGCCTTGCGCCTGAGCGGATATTTTATTTGACATTATGGTCGGTTCTCTGTCGGAGAGAAATCAGATGAGCACCATACACCCTCTCAGAAGTGCGACTAGGACTAGCCGCTTTAGGGGAGAGCTCCCTCGAGGGGCCGGTTAGATTGATGTATATAGTACTTCCCTGGCAAAACTTAACCACATCGAGTCCGTCAGCGGAGGTTTGTTTAGACTTGTTGTTACAGACTTACTTTCCGCGGCTAAAATATTGCAGCCTGACAAAGACAGTACCCTCTATCACCCCCTCTCGCAAAGCTAGCCGTGTCGTTTATCTTGTCCCGCAAGGTAGACCTTGGCCGAAATACACGAAACTCCAAATGCTAAAATGACAACAATAGGACGCGGCCCACGTCCAGATACCACAGACGTACGTTGATGGGGATTCCTCAACCAAAAGCGAGGTTGGGCTGACATATAGGGTCCTATGATGATCCCAGACCGCGATT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0013   ">
+				<sample name="00_0013   ">
+					<datablock type="DNA">
+						AGCCTTGCGCCTGAGCGGATATTTTATTTGACATTAGGGTCGGTTCTCTGTCGGAGAGAAATCAGATGAGCACCATACACCCTCTCAGAAGTGCGACTAGGACTAACCGCTTTAGGGGAGAGCTCCCTCGAGGGGCCGGTTAGATTGATGTATATAGTACTTCCCTGGCAAAACTTAACCACATCGAGTCCGTCAGCGGAGGTTTGTTTAGACTTATTGTTACAGACTTACTTTCCGCGGCTAAAATATTGCAGCCTGAGAAAGACAGTACCCTCTATCATCCCCTCTCGCAAAGCTAGCCGTGTCGTTTATCTTGTCCCGCAAGGTAGACCTTGGCCGAAATACACGAAACTCCAAATGCTAAAATGACAACAATAGGACGCGGCCCACGTCCAGATACCACAGACGTACGTTGATGGGGATTCCTCAACCAAAAGCGAGGTTGGGCTGACATATAGGGTTCTATGATGATCCCAGACCGCGATT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0015   ">
+				<sample name="00_0015   ">
+					<datablock type="DNA">
+						AGCCTTGCGCCTGAGCGGATATTTTATTTGACATTAGGGTCGGTTCTCTGTCGGAGAGAAATCAGATGAGCACCATACACCCTCTCAGAAGTGCGACTAGGACTAACCGCTTTAGGGGAGAGCTCCCTCGAGGGGCCGGTTAGATTGATGTATATAGTACTTCCCTGGCAAAACTTAACCACATCGAGTCCGTCAGCGGAGGTTTGTTTAGACTTATTGTTACAGACTTACTTTCCGCGGCTAAAATATTGCAGCCTGAGAAAGACAGTACCCTCTATCATCCCCTCTCGCAAAGCTAGCCGTGTCGTTTATCTTGTCCCGCAAGGTAGACCTTGGCCGAAATACACGAAACTCCAAATGCTAAAATGACAACAATAGGACGCGGCCCACGTCCAGATACCACAGACGTACGTTGATGGGGATTCCTCAACCAAAAGCGAGGTTGGGCTGACATATAGGGTTCTATGATGATCCCAGACCGCGATT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0002   ">
+				<sample name="00_0002   ">
+					<datablock type="DNA">
+						AGCCTTGCGCCTGAGCGGATATTTTATTTGACATTATGGTCGGTTCTCTGTCGGAGAGAAATCAGATGAGCACCATACACCCTCTCAGAAGTGCGACTAGGACTAGCCGCTTTAGGGGAAAGCTCCCTCGAGGGGCCGGTTAGATTGATGTATATAGTACTTCCCTGGCAAAACTTAACCACATCGAGTCCGTCAGCGGAGGTTTGTTTAGACTTGTTGTTACAGACTTACTTTCCGCGGCTAAAATATTGCAGCCTAAGAAAGACAGTACCCTCTATCATCCCCTCTCGCAAAGCTAGCCGTGTCGTTTATCTTGTCCCGCAAGGTAGACCTTGGCCGAAATACACGAAACTCCAAATGCTAAAATGACAACAATAGGACGCGGCCCACGTCCAGATACCACAGACGTACGTTGATGGGGATTCCTCAACCAAAAGCGAGGTTGGGCTGACATATAGGGTTCTATGATGATCCCAGACCGCGATT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0003   ">
+				<sample name="00_0003   ">
+					<datablock type="DNA">
+						AGCCTTGCGCCTGAGCGGATATTTTATTTGACATTATGGTCGGTTCTCTGTCGGAGAGAAATCAGATGAGCACCATACACCCTCTCAGAAGTGCGACTAGGACTAGCCGCTTTAGGGGAGAGCTCCCTCGAGGGGCCGGTTAGATTGATGTATATAGTACTTCCCTGGCAAAACTTAACCACATCGAGTCCGTCAGCGGAGGTTTGTTTAGACTTGTTGTTACAGACTTACTTTCCGCGGCTAAAATATTGCAGCCTAAGAAAGACAGTACCCTCTATCATCCCCTCTCGCAAAGCTAGCCGTGTCGTTTATCTTGTCCCGCAAGGTAGACCTTGGCCGAAATACACGAAACTCCAAATGCTAAAATGACAACAATAGGACGCGGCCCACGTCCAGATACCACAGACGTACGTTGATGGGGATTCCTCAACCAAAAGCGAGGTTGGGCTGACATATAGGGTTCTATGATGATCCCAGACCGCGATT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0010   ">
+				<sample name="00_0010   ">
+					<datablock type="DNA">
+						AGCCTTGCGCCTGAGCGGATATTTTATTTGACATTATGGTCGGTTCTCTGTCGGAGAGAAATCAGATGAGCACCATACACCCTCTCAGAAGTGCGACTAGGACTAGCCGCTTTAGGGGAGAGCTCCCTCGAGGGGCCGGTTAGATTGATGTATATAGTACTTCCCTGGCAAAACTTAACCACATCGAGTCCGTCAGCGGAGGTTTGTTTAGACTTGTTGTTACAGACTTACTTTCCGCGGCTAAAATATTGCAGCCTAAGAAAGACAGTACCCTCTATCATCCCCTCTCGCAAAGCTAGCCGTGTCGTTTATCTTGTCCCGCAAGGTAGACCTTGGCCGAAATACACGAAACTCCAAATGCTAAAATGACAACAATAGGACGCGGCCCACGTCCAGATACCACAGACGTACGTTGATGGGGATTCCTCAACCAAAAGCGAGGTTGGGCTGACATATAGGGTTCTATGATGATCCCAGACCGCGATT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0019   ">
+				<sample name="00_0019   ">
+					<datablock type="DNA">
+						AGCCTTGCGCCTGAGCGGATATTTTGTTTGACATTATGGTCGGCTCTCTGTTGGAGAGAAATCAGATGAGCACCATACACCTTCTCAGAAGTGCGACTAGGACTAGCCGCTTTAGGGGAGGGCTCCCTCGAGGGGCCGGTTAGATTGATGTATATAATACTTCCCTGGCAAAACTTAATTACATCGAGTCCGTCAGCGGAGGTTTGTTTAGACTTGTTGTTACAGACTTACTTTCCGCGGTTAAAATATTGCAGCCTGAGAAAGACAGTACCCTCTCTCATCCCCTCTCGCAAAGCTAGCCGTGTCGTTTATCTTGTCCCGCAAGGTAGACCTTGACCGAAATACACGAAAGTCCAAATGCTAAAATGACAACAATAGGACGCGGCCCACGTCCAGATACCACAGACGTACGTTGATGGGGATTCCCCAACCAAAAGCGAGGGTGGGCTGACATATAGGGTTCTATGATGATCCTAGACCGTGATT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0011   ">
+				<sample name="00_0011   ">
+					<datablock type="DNA">
+						AGCCTTGCGCCTGAGCGGATATTTTATTTGACATTATGGTCGGCTCTCTGTTGGAGAGAAATCAGATGAGCACCATACGCCCTCTCAGAAGTGCGACTAGGACTAGCCGCTTTAGGGGAGGGCTCCCTCGAGGGGCCGGTTAGATTGATGTATATAATACTTTCCTGGCAAAACTTAATTACATCGAGTCCGTCAGCGGAGGTTTGTTTAGACTTGTTGTTACAGACTTACTTTCCGCGGCTAAAATATTGCAGCCTGAGAAAGACAGTACCCTCTATTATCCCCTCTCGCAAAGCTAGCCGTGTCATTTATCTTGTCCCGCAAGGTAGACCTTGGCCGAAATACACGAAAGTCCAAATGCTAAAATGACAACAATCGGACGCGGCCCACGTCCAGATACCACAGACGTACGTTGACGGGGATTCCCCAACCAAAAGCGAGGGTGGGCTGACATATAGGGTTCTATGATGATCCTAGACCGTGATT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0009   ">
+				<sample name="00_0009   ">
+					<datablock type="DNA">
+						AGCCTTGCGCCTGAGCGGATATTTTATTTGACATTATGGTCGGCTCTCTGTTGGAGAGAAATCAGATGAGCACCATACGCCCTCTCAGAAGTGCGACTAGGACTAGCCGCTTTAGGGGAGGGCTCCCTCGAGGGGCCGGTTAGATTGATGTATATAATACTTTCCTGGCAAAACTTAATTACATCGAGTCCGTCAGCGGAGGTTTGTTTAGACTTGTTGTTACAGACTTACTTTCCGCGGCTAAAATATTGCAGCCTGAGAAAGACAGTACCCTCTATTATCCCCTCTCGCAAAGCTAGCCGTGTCATTTATCTTGTCCCGCAAGGTAGACCTTGGCCGAAATACACGAAAGTCCAAATGCTAAAATGACAACAATCGGACGCGGCCCACGTCCAGATACCACAGACGTACGTTGACGGGGATTCCCCAACCAAAAGCGAGGGTGGGCTGACATATAGGGTTCTATGATGATCCTAGACCGTGATT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0016   ">
+				<sample name="00_0016   ">
+					<datablock type="DNA">
+						AGCCTTGCGCCTGAGCGGATATTTTATTTGACATTATGGTCGGCTCTCTGTTGGAGAGAAATCAGATGAGCACCATACGCCCTCTCAGAAGTGCGACTAGGACTAGCCGCTTTAGGGGAGGGCTCCCTCGAGGGGCCGGTTAGATTGATGTATATAATACTTTCCTGGCAAAACTTAATTACATCGAGTCCGTCAGCGGAGGCTTGTTTAGACTTGTTGTTACAGACTTACTTTCCGCGGCTAAAATATTGCAGCCTGAGAAAGACAGTACCCCCTATTATCCCCTCTCGCAAAGCTAGCCGTGTCATTTATCTTGTCCCGCAAGGTAGACCTTGGCCGAAATACACGAAAGTCCAAATGCTAAAATGACAACAATCGGACGCGGCCCACGTCCAGATACCACAGACGTACGTTGACGGGGATTCCCCAACCAAAAGCGAGGGTGGGCTGACATATAGGGTTCTATGATGATCCTAGACCGTGATT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0004   ">
+				<sample name="00_0004   ">
+					<datablock type="DNA">
+						AGCCTTGCGCCTGAGCGGATATTTTATTTCACATTATGGTCGGCTCTCTGTTGGAGAGAAATCAGATGAGCACCATACACCCTCTCAGAAGTGCGACTAGGACTAGCCGCTTTAGAGGAGGGCTCCCTCGAGGGGCCGGTTAGATTGATGTATATAATACTTTCCTGGCAAAACTTAATTACATCGAGTCCGTCAGCGGAGGTTTGTTTAGACTTGTTGTTACAGACTTACTTTCCGCGGCTAAAGTATTGCAGCCTGAGAAAGACAGTACCCTCTATTATCCCCTCTCGCAAAGCTAGCCGTGTCGTTTATCATGTCCCGCAAAGTAGACCTTGGCCGAAATACACGAAAGTCCAAATGCTAAAATGACAACAATAGGACGCGGCCCACGTCCAGATACCACAGACGTACGTTGACGGGGATTCCCCAACCAAAAGCGAGGGTGGGCTGACATATAGGGTTCTATGATGATCCTAGACCGTGATT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0017   ">
+				<sample name="00_0017   ">
+					<datablock type="DNA">
+						AGCCTTGCGCCTGAGCGGATATTTTATTTCACATTATGGTCGGCTCTCTGTTGGAGAGAAATCAGATGAGCACCATACACTCTCTCAGAAGTGCGACTAGGACTAGCCGCTTTAGAGGAGGGCTCCCTCGAGGGGCCGGTTAGATTGATGTATATAATACTTTCCTGGCAAAACTTAATTACATCGAGTCCGTCAGCGGAGGTTTGTTTAGACTTGTTGTTACAGACTTACTTTCCGCGGCTAAAGTATTGCAGCCTGAGAAAGACAGTACCCTCTATTATCCCCTCTCGCAAAGCTAGCCGTGTCGTTTATCATGTCCCGCAAAGTAGACCTTGGCCGAAATACACGAAAGTCCAAATGCTAAAATGACAACAATAGGACGCGGCCCACGTCCAGATACCACAGACGTACGTTGACGGGGATTCCCCAACCAAAAGCGAGGGTGGGCTGACATATAGGGTTCTATGATGATCCTAGACCGTGATT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0001   ">
+				<sample name="00_0001   ">
+					<datablock type="DNA">
+						GGCCTTGCGCCTGAGCGGATATTTTATTTGACATTATGGTCGGCTCTCTGTTGGAGAGAAATCAGATGAGCACCATACACCCTCTCAGAAGTGCGACTAGGACTAGCCGCTTTAGGGGAGGGCTCCCTCGAGGGGCCGGTTAGATTGATGTATATAATACTTTCCTGGCAAAACTTAATTACATCGAGTCCGTCAGCGGAGGTTTGTTTAGACTTGTTGTTACAGACTTACTTTCCGCGGCTAAAATATTGCAGCCTGAGAAAGACAGTACCCTCTATTATCCCCTCTCGCAAAGCTAGCCGTGTCGTTTATCATGTCCCGCAAGGTAGACCTTGGCCGAAATACACGAAAGTCCAAATGCTAAAATGACAACAATAGGACGCGGCCCACGTCCAGATACCACAGACGTACGTTGACGGGGATTCCCCAACCAAAAGCGAGGGTGGGCTGACATATAGGGTTCTATGATGATCCTAGACCGTGATT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0006   ">
+				<sample name="00_0006   ">
+					<datablock type="DNA">
+						GGCCTTGCGCCTGAGCGGATATTTTATTTGACATTATGGTCGGCTCTCTGTTGGAGAGAAATCAGATGAGCACCATACACCCTCTCAGAAGTGCGACTAGGACTAGCCGCTTTAGGGGAGGGCTCCCTCGAGGGGCCGGTTAGATTGATGTATATAATACTTTCCTGGCAAAACTTAATTACATCGAGTCCGTCAGCGGAGGTTTGTTTAGACTTGTTGTTACAGACTTACTTTCCGCGGCTAAAATATTGCAGCCTGAGAAAGACAGTACCCTCTATTATCCCCTCTCGCAAAGCTAGCCGTGTCGTTTATCATGTCCCGCAAGGTAGACCTTGGCCGAAATACACGAAAGTCCAAATGCTAAAATGACAACAATAGGACGCGGCCCACGTCCAGATACCACAGACGTACGTTGACGGGGATTCCCCAACCAAAAGCGAGGGTGGGCTGACATATAGGGTTCTATGATGATCCTAGACCGTGATT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0008   ">
+				<sample name="00_0008   ">
+					<datablock type="DNA">
+						GGCCTTGCGCCTGAGCGGATATTTTATTTGACATTATGGTCGGCTCTCTGTTGGAGAGAAATCAGATGAGCACCATACACCCTCTCAGAAGTGCGACTAGGACTAGCCGCTTTAGGGGAGGGCTCCCTCGAGGGGCCGGTTAGATTGATGTATATAATACTTTCCTGGCAAAACTTAATTACATCGAGTCCGTCAGCGGAGGTTTGTTTAGACTTGTTGTTACAGACTTACTTTCCGCGGCTAAAATATTGCAGCCTGAGAAAGACAGTACCCTCTATTATCCCCTCTCGCAAAGCTAGCCGTGTCGTTTATCATGTCCCGCAAGGTAGACCTTGGCCGAAATACACGAAAGTCCAAATGCTAAAATGACAACAATAGGACGCGGCCCACGTCCAGATACCACAGACGTACGTTGACGGGGATTCCCCAACCAAAAGCGAGGGTGGGCTGACATATAGGGTTCTATGATGATCCTAGACCGTGATT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0000   ">
+				<sample name="00_0000   ">
+					<datablock type="DNA">
+						GGCTTTGCGCCCGAGCGGATATTTTATTTGATATTATGGTCGGCTCTCTGTTGGAGAGAAATCAGATGAGCACCATACACCCTCTCAGAAGTGCGACTAGGACTAGCCGCTTTAGGGGAGGGCTCCCTCGAGGGGCCGGTTAGATTGATGTATATAATACTTTCCTGGCAAAACTTAATTACATCGAGTCCGTCAGCGGAGGTTTGTTTAGACTTGTTGTTACAGACTTACTTTCCGCGGCTAAAATATTGCAGCCTGAGAAAGACAGTACCCTCTATTATCCCCTCTCGCAAAGCTAGCCGTGTCGTTTATCATGTCCCGCAAGGTAGACCTTGGCCGAAATACACGAAAGTCCAAATGCTAAAATGACAACAATAGGACGCGGCCCACGTCCAGATACCACAGACGTACGTTGACGGGGATTCCCCAACCAAAAGCGAGGGTGGGCTGACATATAGGGTTCTATGATGATCCTAGACCGTGATT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0018   ">
+				<sample name="00_0018   ">
+					<datablock type="DNA">
+						GGCTTTGCGCCCGAGCGGATATTTTATTTGACATTATGGTCGGCTCTCTGTTGGAGAGAAATCAGATGAGCACCATACACCCTCTCAGAAGTGCGACTAGGACTAGCCGCTTTAGGGGAGGGCTCCCTCGAGGGGCCGGTTAGATTGATGTATATAATACTTTCCTGGCAAAACTTAATTACATCGAGTCCGTCAGCGGAGGTTTGTTTAGACTTGTTGTTACAGACTTACTTTCCGCGGCTAAAATATTGCAGCCTGAGAAAGACAGTACCCTCTATTATCCCCTCTCGCAAAGCTAGCCGTGTCGTTTATCATGTCCCGCAAGGTAGACCTTGGCCGAAATACACGAAAGTCCAAATGCTAAAATGACAACAATAGGACGCGGCCCACGTCCAGATACCACAGACGTACGTTGACGGGGATTCCCCAACCAAAAGCGAGGGTGGGCTGACATATAGGGTTCTATGATGATCCTAGACCGTGATT [...]
+					</datablock>
+				</sample>
+			</individual>
+		</population>
+	</region>
+	<region name="LinkageGroup05">
+      <model name="F84">
+        <normalize>false</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <relative-murate>1</relative-murate>
+        <base-freqs> calculated </base-freqs>
+        <ttratio>2</ttratio>
+      </model>
+      <effective-popsize>1</effective-popsize>
+		<population name="PopAlphaIs1">
+			<individual name="00_0002   ">
+				<sample name="00_0002   ">
+					<datablock type="DNA">
+						CTCCGGAAAAGCAACGCTAGCAGGATTGCCCCGTACAGCCCTAATTAACATGGAAACACCATACAATAGCAACTCGTGGCCAATTATTCAGCAACAAAAGCGTTATATGTGATCTAGAAGGTGTCCTTATTAGGCAGGGCGGTAGCAGAAACTCGCTATAGGTCAGAAGGCGCGGGCCGTGGAAACGAAGCATGAGGTGTACTATTTCAAATAAGACCAGTCATGTGTTCTGACGAGGCGACGGCAATACCGCACCACTGCTCGGGGGCCGGCAGGTGAGTGGGCACCAGTGCGTTGATGCACGGACGGAGACCGGTTCCCCCCACCACGCGGCTACCTGGTGTATTGCTCCGGGACTCTTGAGCCAGAGCATGTTGTGTGTGGTGAGCGGAAGCGCCGGGCGTCAATGATATAATATTGCGACGTAATATCTGAGGGCCCTCCCAGCTTTCATAATAAAGCCTTCGGGGGTGCCATTTCACGGAG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0016   ">
+				<sample name="00_0016   ">
+					<datablock type="DNA">
+						CTCCGGAAAAGCAACGCTAGCAGGATTGCCCCGTACAGCCCTAATTAACATGGAAACACCATACAATAGCAACTCGTGGCCAATTATTCAGCAACAAAAGCGTTATATGTGATCTAGAAGGTGTCCTTATTTGGCAGGGCGGTAGCAGAAACTCGCTATAGGTCAGAAGGCGCGGGCCGTGGAAACGAAGCATGAGGTGTACTATTTCAAATAAGACCAGTCATGTGTTCTGACGAGGCGACGGCAATACCGCACCACTGCTCGGGGGCCGGCAGGTGAGTGGGCACCAGTGCGTTGATGCACGGACGGAGACCGGTTCCCCCCACCACGCGGCTACCTGGTGTATTGCTCCGGGACTCTTGAGCCAGAGCATGTTGTGTGTGGTGAGCGGAAGCGCCGGGCGTCAATGATATAATATTGCGACGTAATATCTGAGGGCCCTCCCAGCTTTCATAATAAAGCCTTCGGGGGTGCCATTTCACGGAG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0018   ">
+				<sample name="00_0018   ">
+					<datablock type="DNA">
+						CTCCGGAAAAGCAACGCTAGCAGGATTGCCCCGTACAGCCCTAATTAACATGGAAACACCATACAATAGCAACTCGTGGCCAATTATTCAGCAACAAAAGCGTTATATGTGATCTAGAAGGTGTCCTTATTAGGCAGGGCGGTAGCAGAAACTCGCTATAGGTCAGAAGGCGCGGGCCGTGGAAACGAAGCATGAGGTGTACTATTTCAAATAAGACCAGTCATGTGTTCTGACGAGGCGACGGCAATACCGCACCACTGCTCGGGGGCCGGCAGGTGAGTGGGCACCAGTGCGTTGATGCACGGACGGAGACCGGTTCCCCCCACCACGCGGCTACCTGGTGTATTGCTCCGGGACTCTTGAGCCAGAGCATGTTGTGTGTGGTGAGCGGAAGCGCCGGGCGTCAATGATATAATATTGCGACGTAATATCTGAGGGCCCTCCCAGCTTTCATAATAAAGCCTTCGGGGGTGCCATTTCACGGAG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0019   ">
+				<sample name="00_0019   ">
+					<datablock type="DNA">
+						CTCCGGAAAAGCAACGCTAGCAGGATTGCCCCGTACAGCCCTAATTAACATGGAAACACCATACAATAGCAACTCGTGGCCAATTATTCAGCAACAAAAGCGTTATATGTGATCTAGAAGGTGTCCTTATTAGGCAGGGCGGTAGCAGAAACTCGCTATAGGTCAGAAGGCGCGGGCCGTGGAAACGAAGCATGAGGTGTACTATTTCAAATAAGACCAGTCATGTGTTCTGACGAGGCGACGGCAATACCGCACCACTGCTCGGGGGCCGGCAGGTGAGTGGGCACCAGTGCGTTGATGCACGGACGGAGACCGGTTCCCCCCACCACGCGGCTACCTGGTGTATTGCTCCGGGACTCTTGAGCCAGAGCATGTTGTGTGTGGTGAGCGGAAGCGCCGGGCGTCAATGATATAATATTGCGACGTAATATCTGAGGGCCCTCCCAGCTTTCATAATAAAGCCTTCGGGGGTGCCATTTCACGGAG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0006   ">
+				<sample name="00_0006   ">
+					<datablock type="DNA">
+						CTCCGGAAAAGCAACGCTAGCAGGATTGCCCCGTACAGCCCTAATTAACATGGAAACACCATACAATAGCAACTCGTGGCCAATTATTCAGCAACAAAAGCGTTATATGTGATCTAGAAGGTGTCCTTATTAGGCAGGGCGGTAGCAGAAACTCGCTATAGGTCAGAAGGCGCGGGCCGTGGAAACGAAGCATGAGGTGTACTATTTCAAATAAGACCAGTCATGTGTTCTGACGAGGCGACGGCAATACCGCACCACTGCTCGGGGGCCGGCAGGTGAGTGGGCACCAGTGCGTTGATGCACGGACGGAGACCGGTTCCCCCCACCACGCGGCTACCTGGTGTATTGCTCCGGGACTCTTGAGCCAGAGCATGTTGTGTGTGGTGAGCGGAAGCGCCGGGCGTCAATGATATAATATTGCGACGTAATATCTGAGGGCCCTCCCAGCTTTCATAATAAAGCCTTCGGGGGTGCCATTTCACGGAG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0008   ">
+				<sample name="00_0008   ">
+					<datablock type="DNA">
+						CTCCGGAAAAGCAACGCTAGCAGGATTGCCCCGTACAGCCCTAATTAACATGGAAACACCATACAATAGCAACTCGTGGCCAATTATTCAGCAACAAAAGCGTTATATGTGATCTAGAAGGTGTCCTTATTAGGCAGGGCGGTAGCAGAAACTCGCTATAGGTCAGAAGGCGCGGGCCGTGGAAACGAAGCATGAGGTGTACTATTTCAAATAAGACCAGTCATGTGTTCTGACGAGGCGACGGCAATACCGCACCACTGCTCGGGGGCCGGCAGGTGAGTGGGCACCAGTGCGTTGATGCACGGACGGAGACCGGTTCCCCCCACCACGCGGCTACCTGGTGTATTGCTCCGGGACTCTTGAGCCAGAGCATGTTGTGTGTGGTGAGCGGAAGCGCCGGGCGTCAATGATATAATATTGCGACGTAATATCTGAGGGCCCTCCCAGCTTTCATAATAAAGCCTTCGGGGGTGCCATTTCACGGAG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0001   ">
+				<sample name="00_0001   ">
+					<datablock type="DNA">
+						CTCCGGAAAAGCAACGCTAGCAGGATTGCCCCGTACAGCCCTAATTAACATGGAAACACCATACAATAGCAACTCGTGGCCAATTATTCAGCAACAAAAGCGTTATATGTGATCTAGAAGGTGTCCTTATTAGGCAGGGCGGTAGCAGAAACTCGCTATAGGTCAGAAGGCGCGGGCCGTGGAAACGAAGCATGAGGTGTACTATTTCAAATAAGACCAGTCATGTGTTCTGACGAGGCGACGGCAATACCGCACCACTGCTCGGGGGCCGGCAGGTGAGTGGGCACCAGTGCGTTGATGCACGGACGGAGACCGGTTCCCCCCACCACGCGGCTACCTGGTGTATTGCTCCGGGACTCTTGAGCCAGAGCATGTTGTGTGTGGTGAGCGGAAGCGCCGGGCGTCAATGATATAATATTGCGACGTAATATCTGAGGGCCCTCCCAGCTTTCATAATAAAGCCTTCGGGGGTGCCATTTCACGGAG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0017   ">
+				<sample name="00_0017   ">
+					<datablock type="DNA">
+						CTCCGGAAAAGCAACGCTAGCAGGATTGCCCCGTACAGCCCTAATTAACATGGAAACACCATACAATAGCAACTCGTGGCCAATTATTCAGCAACAAAAGCGTTATATGTGATCTAGAAGGTGTCCTTATTAGGCAGGGCGGTAGCAGAAACTCGCTATAGGTCAGAAGGCGCGGGCCGTGGAAACGAAGCATGAGGTGTACTATTTCAAATAAGACCAGTCATGTGTTCTGACGAGGCGACGGCAATACCGCACCACTGCTCGGGGGCCGGCAGGTGAGTGGGCACCAGTGCGTTGATGCACGGACGGAGACCGGTTCCCCCCACCACGCGGCTACCTGGTGTATTGCTCCGGGACTCTTGAGCCAGAGCATGTTGTGTGTGGTGAGCGGAAGCGCCGGGCGTCAATGATATAATATTGCGACGTAATATCTGAGGGCCCTCCCAGCTTTCATAATAAAGCCTTCGGGGGTGCCATTTCACGGAG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0010   ">
+				<sample name="00_0010   ">
+					<datablock type="DNA">
+						CTCCGGAAAAGCAACGCTAGCAGGATTGCCCCGTACAGCCCTAATTAACATGGAAACACCATACAATAGCAACTCGTGGCCAATTATTCAGCAACAAAAGCGTTATATGTGATCTAGAAGGTGTCCTTATTAGGCAGGGCGGTAGCAGAAACTCGCTATAGGTCAGAAGGCGCGGGCCGTGGAAACGAAGCATGAGGTGTACTATTTCAAATAAGACCAGTCATGTGTTCTGACGAGGCGACGGCAATACCGCACCACTGCTCGGGGGCCGGCAGGTGAGTGGGCACCAGTGCGTTGATGCACGGACGGAGACCGGTTCCCCCCACCACGCGGCTACCTGGTGTATTGCTCCGGGACTCTTGAGCCAGAGCATGTTGTGTGTGGTGAGCGGAAGCGCCGGGCGTCAATGATATAATATTGCGACGTAATATCTGAGGGCCCTCCCAGCTTTCATAATAAAGCCTTCGGGGGTGCCATTTCACGGAG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0003   ">
+				<sample name="00_0003   ">
+					<datablock type="DNA">
+						CTCCGGAAAAGCAACGCTAGCAGGATTGCCCCGTACAGCCCTAATTAACATGGAAACACCATACAATAGCAACTCGTGGCCAATTATTCAGCAACAAAAGCGTTATATGTGATCTAGAAGGTGTCCTTATTAGGCAGGGCGGTAGCAGAAACTCGCTATAGGTCAGAAGGCGCGGGCCGTGGAAACGAAGCATGAGGTGTACTATTTCAAATAAGACCAGTCATGTGTTCTGACGAGGCGACGGCAATACCGCACCACTGCTCGGGGGCCGGCAGGTGAGTGGGCACCAGTGCGTTGATGCACGGACGGAGACCGGTTCCCCCCACCACGCGGCTACCTGGTGTATTGCTCCGGGACTCTTGAGCCAGAGCATGTTGTGTGTGGTGAGCGGAAGCGCCGGGCGTCAATGATATAATATTGCGACGTAATATCTGAGGGCCCTCCCAGCTTTCATAATAAAGCCTTCGGGGGTGCCATTTCACGGAG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0007   ">
+				<sample name="00_0007   ">
+					<datablock type="DNA">
+						CTCCGGAAAAGCAACGCTAGCAGGATTGCCCCGTACAGCCCTAATTAACATGGAAACACCATACAATAGCAACTCGTGGCCAATTATTCAGCAACAAAAGCGTTATATGTGATCTAGAAGGTGTCCTTATTAGGCAGGGCGGTAGCAGAAACTCGCTATAGGTCAGAAGGCGCGGGCCGTGGAAACGAAGCATGAGGTGTACTATTTCAAATAAGACCAGTCATGTGTTCTGACGAGGCGACGGCAATACCGCACCACTGCTCGGGGGCCGGCAGGTGAGTGGGCACCAGTGCGTTGATGCACGGACGGAGACCGGTTCCCCCCACCACGCGGCTACCTGGTGTATTGCTCCGGGACTCTTGAGCCAGAGCATGTTGTGTGTGGTGAGCGGAAGCGCCGGGCGTCAATGATATAATATTGCGACGTAATATCTGAGGGCCCTCCCAGCTTTCATAATAAAGCCTTCGGGGGTGCCATTTCACGGAG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0011   ">
+				<sample name="00_0011   ">
+					<datablock type="DNA">
+						CTCCGGAAAAGCAACGCTAGCAGGATTGCCCCGTACAGCCCTAATTAACATGGAAACACCATACAATAGCAACTCGTGGCCAATTATTCAGCAACAAAAGCGTTATATGTGATCTAGAAGGTGTCCTTATTAGGCAGGGCGGTAGCAGAAACTCGCTATAGGTCAGAAGGCGCGGGCCGTGGAAACGAAGCATGAGGTGTACTATTTCAAATAAGACCAGTCATGTGTTCTGACGAGGCGACGGCAATACCGCACCACTGCTCGGGGGCCGGCAGGTGAGTGGGCGCCAGTGCGTTGATGCACGGACGGAGACCGGTTCCCCCCACCACGCGGCTACCTGGTGTATTGCTCCGGGACTCTTGAGCCAGAGCATGTTGTGTGTGGTGAGCGGAAGCGCCGGGCGTCAATGATATAATATTGCGACGTAATATCTGAGGGCCCTCCCAGCTTTCATAATAAAGCCTTCGGGGGTGCCATTTCACGGAG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0004   ">
+				<sample name="00_0004   ">
+					<datablock type="DNA">
+						CTCCGGAAAAGCAACGCTAGCAGGATTGCCCCGTACAGCCCTAATTAACATGGAAACACCATACAATAGCAACTCGTGGCCAATTATTCAGCAACAAAAGCGTTATATGTGATCTAGAAGGTGTCCTTATTAGGCAGGGCGGTAGCAGAAACTCGCTATAGGTCAGAAGGCGCGGGCCGTGGAAACGAAGCATGAGGTGTACTATTTCAAATAAGACCAGTCATGTGTTCTGACGAGGCGACGGCAATACCGCACCACTGCTCGGGGGCCGGCAGGTGAGTGGGCGCCAGTGCGTTGATGCACGGACGGAGACCGGTTCCCCCCACCACGCGGCTACCTGGTGTATTGCTCCGGGACTCTTGAGCCAGAGCATGTTGTGTGTGGTGAGCGGAAGCGCCGGGCGTCAATGATATAATATTGCGACGTAATATCTGAGGGCCCTCCCAGCTTTCATAATAAAGCCTTCGGGGGTGCCATTTCACGGAG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0012   ">
+				<sample name="00_0012   ">
+					<datablock type="DNA">
+						CTCCGGAAAAGCAACGCTAGCAGGATTGCCCCGTACAGCCCTAATTAACATGGAAACACCATACAATAGCAACTCGTGGCCAATTATTCAGCAACAAAAGCGTTATATGTGATCTAGAAGGTGTCCTTATTAGGCAGGGCGGTAGCAGAAACTCGCTATAGGTCAGAAGGCGCGGGCCGTGGAAACGAAGCATGAGGTGTACTATTTCAAATAAGACCAGTCATGTGTTCTGACGAGGCGACGGCAATACCGCACCACTGCTCGGGGGCCGGCAGGTGAGTGGGCGCCAGTGCGTTGATGCACGGACGGAGACCGGTTCCCCCCACCACGCGGCTACCTGGTGTATTGCTCCGGGACTCTTGAGCCAGAGCATGTTGTGTGTGGTGAGCGGAAGCGCCGGGCGTCAATGATATAATATTGCGACGTAATATCTGAGGGCCCTCCCAGCTTTCATAATAAAGCCTTCGGGGGTGCCATTTCACGGAG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0013   ">
+				<sample name="00_0013   ">
+					<datablock type="DNA">
+						CTCCGGAAAAGCAACGCTAGCAGGATTGCCCCGTACAGCCCTAATTAACATGGAAACACCATACAATAGCAACTCGTGGCCAATTATTCAGCAACAAAAGCGTTATATGTGATCTAGAAGGTGTCCTTATTAGGCAGGGCGGTAGCAGAAACTCGCTATAGGTCAGAAGGCGCGGGCCGTGGAAACGAAGCATGAGGTGTACTATTTCAAATAAGACCAGTCATGTGTTCTGACGAGGCGACGGCAATACCGCACCACTGCTCGGGGGCCGGCAGGTGAGTGGGCGCCAGTGCGTTGATGCACGGACGGAGACCGGTTCCCCCCACCACGCGGCTACCTGGTGTATTGCTCCGGGACTCTTGAGCCAGAGCATGTTGTGTGTGGTGAGCGGAAGCGCCGGGCGTCAATGATATAATATTGCGACGTAATATCTGAGGGCCCTCCCAGCTTTCATAATAAAGCCTTCGGGGGTGCCATTTCACGGAG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0015   ">
+				<sample name="00_0015   ">
+					<datablock type="DNA">
+						CTCCGGAAAAGCAACGCTAGCAGGATTGCCCCGTACAGCCCTAATTAACATGGAAACACCATACAATAGCAACTCGTGGCCAATTATTCAGCAACAAAAGCGTTATATGTGATCTAGAAGGTGTCCTTATTAGGCAGGGCGGTAGCAGAAACTCGCTATAGGTCAGAAGGCGCGGGCCGTGGAAACGAAGCATGAGGTGTACTATTTCAAATAAGACCAGTCATGTGTTCTGACGAGGCGACGGCAATACCGCACCACTGCTCGGGGGCCGGCAGGTGAGTGGGCGCCAGTGCGTTGATGCACGGACGGAGACCTGTTCCCCCCACCACGCGGCTACCTGGTGTATTGCTCCGGGACTCTTGAGCCAGAGCATGTTGTGTGTGGTGAGCGGAAGCGCCGGGCGTCAATGATATAATATTGCGACGTAATATCTGAGGGCCCTCCCAGCTTTCATAATAAAGCCTTCGGGGGTGCCATTTCACGGAG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0005   ">
+				<sample name="00_0005   ">
+					<datablock type="DNA">
+						CTCCGGAAAAGCAACGCTAGCAGGATTGCCCCGTACAGCCCTAATTAACATGGAAACACCATACAATAGCAACTCGTGGCCAATTATTCAGCAACAAAAGCGTTATATGTGATCTAGAAGGTGTCCTTATTAGGCAGGGCGGTAGCAGAAACTCGCTATAGGTCAGAAGGCGCGGGCCGTGGAAACGAAGCATGAGGTGTACTATTTCAAATAAGACCAGTCATGTGTTCTGACGAGGCGACGGCAATACCGCACCACTGCTCGGGGGCCGGCAGGTGAGTGGGCGCCAGTGCGTTGATGCACGGACGGAGACCGGTTCCCCCCACCACGCGGCTACCTGGTGTATTGCTCCGGGACTCTTGAGCCAGAGCATGTTGTGTGTGGTGAGCGGAAGCGCCGGGCGTCAATGATATAATATTGCGACGTAATATCTGAGGGCCCTCCCAGCTTTCATAATAAAGCCTTCGGGGGTGCCATTTCACGGAG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0000   ">
+				<sample name="00_0000   ">
+					<datablock type="DNA">
+						CTCCGGAAAAGCAACGCTAGCAGGATTGCCCCGTACAGCCCTAATTAACATGGAAACACCATACAATAGCAACTCGTGGCCAATTATTCAGCAACAAAAGCGTTATATGTGATCTAGAAGGTGTCCTTATTAGGCAGGGCGGTAGCAGAAACTCGCTATAGGTCAGAAGGCGCGGGCCGTGGAAACGAAGCATGAGGTGTACTATTTCAAATAAGACCAGTCATGTGTTCTGACGAGGCGACGGCAATACCGCACCACTGCTCGGGGGCCGGCAGGTGAGTGGGCGCCAGTGCGTTGATGCACGGACGGAGACCGGTTCCCCCCACCACGCGGCTACCTGGTGTATTGCTCCGGGACTCTTGAGCCAGAGCATGTTGTGTGTGGTGAGCGGAAGCGCCGGGCGTCAATGATATAATATTGCGACGTAATATCTGAGGGCCCTCCCAGCTTTCATAATAAAGCCTTCGGGGGTGCCATTTCACGGAG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0009   ">
+				<sample name="00_0009   ">
+					<datablock type="DNA">
+						CTCCGGAAAAGCAACGCTAGCAGGATTGCCCCGTACAGCCCTAATTAACATGGAAACACCATACAATAGCAACTCGTGGCCAATTATTCAGCAACAAAAGCGTTATATGTGATCTAGAAGGTGTCCTTATTAGGCAGGGCGGTAGCAGAAACTCGCTATAGGTCAGAAGGCGCGGGCCGTGGAAACGAAGCATGAGGTGTACTATTTCAAATAAGACCAGTCATGTGTTCTGACGAGGCGACGGCAATACCGCACCACTGCTCGGGGGCCGGCAGGTGAGTGGGCGCCAGTGCGTTGATGCACGGACGGAGACCGGTTCCCCCCACCACGCGGCTACCTGGTGTATTGCTCCGGGACTCTTGAGCCAGAGCATGTTGTGTGTGGTGAGCGGAAGCGCCGGGCGTCAATGATATAATATTGCGACGTAATATCTGAGGGCCCTCCCAGCTTTCATAATAAAGCCTTCGGGGGTGCCATTTCACGGAG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0014   ">
+				<sample name="00_0014   ">
+					<datablock type="DNA">
+						CTCCGGAAAAGCAACGCTAGCAGGATTGCCCCGTACAGCCCTAATTAACATGGAAACACCATACAATAGCAACTCGTGGCCAATTATTCAGCAACAAAAGCGTTATATGTGATCTAGAAGGTGTCCTTATTAGGCAGGGCGGTAGCAGAAACTCGCTATAGGTCAGAAGGCGCGGGCCGTGGAAACGAAGCATGAGGTGTACTATTTCAAATAAGACCAGTCATGTGTTCTGACGAGGCGACGGCAATACCGCACCACTGCTCGGGGGCCGGCAGGTGAGTGGGCGCCAGTGCGTTGATGCACGGACGGAGACCGGTTCCCCCCACCACGCGGCTACCTGGTGTATTGCTCCGGGACTCTTGAGCCAGAGCATGTTGTGTGTGGTGAGCGGAAGCGCCGGGCGTCAATGATATAATATTGCGACGTAATATCTGAGGGCCCTCCCAGCTTTCATAATAAAGCCTTCGGGGGTGCCATTTCACGGAG [...]
+					</datablock>
+				</sample>
+			</individual>
+		</population>
+	</region>
+	<region name="LinkageGroup06">
+      <model name="F84">
+        <normalize>false</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <relative-murate>1</relative-murate>
+        <base-freqs> calculated </base-freqs>
+        <ttratio>2</ttratio>
+      </model>
+      <effective-popsize>1</effective-popsize>
+		<population name="PopAlphaIs1">
+			<individual name="00_0001   ">
+				<sample name="00_0001   ">
+					<datablock type="DNA">
+						AGCGTCCCTGAGAACATCTTGGAAGCTTAAAGTTGATTCGGACGTTAACGGTGTGGTCAGAGTCGTTCAGATGAGTTATCCGACCGTTACGTTAGACCTAACGATTTGGGATACGTAACTTCCATTAATGTAACGTGAAACGAGACAGGATTTTCAGTACCAACACCAAGGAAGAAAATTGTTATTAACCTGTGAATTGAATTACGAGGTCAGCTTCCCCCTGATGCCATCACCACTTTCATTTTCGATCGGCGTGTAGCCCTGAGACTAGTGGTTATGGGTGAGTTCGAGAAGGGCAAATACGCCCTTAACTCGTCAGCGCCCACGTCTCAAAGCATGGTATGAGAACTTCACTATGTCGGCAACGCTCTTGGACTGACATGGTCAACTGGTGCACCATCCCAATCTAGCATAGCACCCCGTTGGGCGTAAACCCTGAGTATCACTATGCAAGAAGTCTATAGTCCCTCCATTACGAGCGCGCAT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0000   ">
+				<sample name="00_0000   ">
+					<datablock type="DNA">
+						AGCGTCCCTGAGAACATCTTGGAAGCTTAAAGTTGATTCGGACGTTAACGGTGTGGTCAGAGTCGTTCAGATGAGTTATCCGACCGTTACGTTAGACCTAACGATTTGGGATACGTAACTTCCATTAATGTAACGTGAAACGAGACAGGATTTTCAGTACCAACACCAAGGAAGAAAATTGTTATTAACCTGTGAATTGAATTACGAGGTCAGCTTCCCCCTGATGCCATCACCACTTTCATTTTCGATCGGCGTGTAGCCCTGAGACTAGTGGTTATGGGTGAGTTCGAGAAGGGCAAATACGCCCTTAACTCGTCAGCGCCCACGTCTCAAAGCATGGTATGAGAACTTCACTATGTCGGCAACGCTCTTGGACTGACATGGTCAACTGGTGCACCATCCCAATCTAGCATAGCACCCCGTTGGGCGTAAACCCTGAGTATCACTATGCAAGAAGTCTATAGTCCCTCCATTACGAGCGCGCAT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0012   ">
+				<sample name="00_0012   ">
+					<datablock type="DNA">
+						AGCGTCCCTGAGAACATCTTGGAAGCTTAAAGTTGATTCGGACGTTAACGGTGTGGTCAGAGTCGTTCAGATGAGTTATCCGACCGTTACGTTAGACCTAACGATTTGGGATACGTAACTTCCATTAATGTAACGTGAAACGAGACAGGATTTTCAGTACCAACACCAAGGAAGAAAATTGTTATTAACCTGTGAATTGAATTACGAGGTCAGCTTCCCCCTGATGCCATCACCACTTTCATTTTCGATCGGCGTGTAGCCCTGAGACTAGTGGTTATGGGTGAGTTCGAGAAGGGCAAATACGCCCTTAACTCGTCAGCGCCCACGTCTCAAAGCATGGTATGAGAACTTCACTATGTCGGCAACGCTCTTGGACTGACATGGTCAACTGGTGCACCATCCCAATCTAGCATAGCACCCCGTTGGGCGTAAACCCTGAGTATCACTATGCAAGAAGTCTATAGTCCCTCCATTACGAGCGCGCAT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0017   ">
+				<sample name="00_0017   ">
+					<datablock type="DNA">
+						AGCGTCCCTGAGAACATCTTGGAAGCTTAAAGTTGATTCGGACGTTAACGGTGTGGTCAGAGTCGTTCAGATGAGTTATCCGACCGTTACGTTAGACCTAACGATTTGGGATACGTAACTTCCATTAATGTAACGTGAAACGAGACAGGATTTTCAGTACCAACACCAAGGAAGAAAATTGTTATTAACCTGTGAATTGAATTACGAGGTCAGCTTCCCCCTGATGCCATCACCACTTTCATTTTCGATCGGCGTGTAGCCCTGAGACTAGTGGTTATGGGTGAGTTCGAGAAGGGCAAATACGCCCTTAACTCGTCAGCGCCCACGTCTCAAAGCATGGTATGAGAACTTCACTATGTCGGCAACGCTCTTGGACTGACATGGTCAACTGGTGCACCATCCCAATCTAGCATAGCACCCCGTTGGGCGTAAACCCTGAGTATCACTATGCAAGAAGTCTATAGTCCCTCCATTACGAGCGCGCAT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0018   ">
+				<sample name="00_0018   ">
+					<datablock type="DNA">
+						AGCGTCCCTGAGAACATCTTGGAAGCTTAAAGTTGATTCGGACGTTAACGGTGTGGTCAGAGTCGTTCAGATGAGTTATCCGACCGTTACGTTAGACCTAACGATTTGGGATACGTAACTTCCATTAATGTAACGTGAAACGAGACAGGATTTTCAGTACCAACACCAAGGAAGAAAATTGTTATTAACCTGTGAATTGAATTACGAGGTCAGCTTCCCCCTGATGCCATCACCACTTTCATTTTCGATCGGCGTGTAGCCCTGAGACTAGTGGTTATGGGTGAGTTCGAGAAGGGCAAATACGCCCTTAACTCGTCAGCGCCCACGTCTCAAAGCATGGTATGAGAACTTCACTATGTCGGCAACGCTCTTGGACTGACATGGTCAACTGGTGCACCATCCCAATCTAGCATAGCACCCCGTTGGGCGTAAACCCTGAGTATCACTATGCAAGAAGTCTATAGTCCCTCCATTACGAGCGCGCAT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0004   ">
+				<sample name="00_0004   ">
+					<datablock type="DNA">
+						AGCGTCCCTGAGAACATCTTGGAAGCTTAAAGTTGATTCGGACGTTAACGGTGTGGTCAGAGTCGTTCAGATGAGTTATCCGACCGTTACGTTAGACCTAACGATTTGGGATACGTAACTTCCATTAATGTAACGTGAAACGAGACAGGATTTTCAGTACCAACACCAAGGAAGAAAATTGTTATTAACCTGTGAATTGAATTACGAGGTCAGCTTCCCCCTGATGCCATCACCACTTTCATTTTCGATCGGCGTGTAGCCCTGAGACTAGTGGTTATGGGTGAGTTCGAGAAGGGCAAATACGCCCTTAACTCGTCAGCGCCCACGTCTCAAAGCATGGTATGAGAACTTCACTATGTCGGCAACGCTCTTGGACTGACATGGTCAACTGGTGCACCATCCCAATCTAGCATAGCACCCCGTTGGGCGTAAACCCTGAGTATCACTATGCAAGAAGTCTATAGTCCCTCCATTACGAGCGCGCAT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0014   ">
+				<sample name="00_0014   ">
+					<datablock type="DNA">
+						AGCGTCCCTGAGAACATCTTGGAAGCTTAAAGTTGATTCGGACGTTAACGGTGTGGTCAGAGTCGTTCAGATGAGTTATCCGACCGTTACGTTAGACCTAACGATTTGGGATACGTAACTTCCATTAATGTAACGTGAAACGAGACAGGATTTTCAGTACCAACACCAAGGAAGAAAATTGTTATTAACCTGTGAATTGAATTACGAGGTCAGCTTCCCCCTGATGCCATCACCACTTTCATTTTCGATCGGCGTGTAGCCCTGAGACTAGTGGTTATGGGTGAGTTCGAGAAGGGCAAATACGCCCTTAACTCGTCAGCGCCCACGTCTCAAAGCATGGTATGAGAACTTCACTATGTCGGCAACGCTCTTGGACTGACATGGTCAACTGGTGCACCATCCCAATCTAGCATAGCACCCCGTTGGGCGTAAACCCTGAGTATCACTATGCAAGAAGTCTATAGTCCCTCCATTACGAGCGCGCAT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0005   ">
+				<sample name="00_0005   ">
+					<datablock type="DNA">
+						AGCGTCCCTGAGAACATCTTGGAAGCTTAAAGTTGATTCGGACGTTAACGGTGTGGTCAGAGTCGTTCAGATGAGTTATCCGACCGTTACGTTAGACCTAACGATTTGGGATACGTAACTTCCATTAATGTAACGTGAAACGAGACAGGATTTTCAGTACCAACACCAAGGAAGAAAATTGTTATTAACCTGTGAATTGAATTACGAGGTCAGCTTCCCCCTGATGCCATCACCACTTTCATTTTCGATCGGCGTGTAGCCCTGAGACTAGTGGTTATGGGTGAGTTCGAGAAGGGCAAATACGCCCTTAACTCGTCAGCGCCCACGTCTCAAAGCATGGTATGAGAACTTCACTATGTCGGCAACGCTCTTGGACTGACATGGTCAACTGGTGCACCATCCCAATCTAGCATAGCACCCCGTTGGGCGTAAACCCTGAGTATCACTATGCAAGAAGTCTATAGTCCCTCCATTACGAGCGCGCAT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0015   ">
+				<sample name="00_0015   ">
+					<datablock type="DNA">
+						AGCGTCCCTGAGAACATCTTGGAAGCTTAAAGTTGATTCGGACGTTAACGGTGTGGTCAGAGTCGTTCAGATGAGTTATCCGACCGTTACGTTAGACCTAACGATTTGGGATACGTAACTTCCATTAATGTAACGTGAAACGAGACAGGATTTTCAGTACCAACACCAAGGAAGAAAATTGTTATTAACCTGTGAATTGAATTACGAGGTCAGCTTCCCCCTGATGCCATCACCACTTTCATTTTCGATCGGCGTGTAGCCCTGAGACTAGTGGTTATGGGTGAGTTCGAGAAGGGCAAATACGCCCTTAACTCGTCAGCGCCCACGTCTCAAAGCATGGTATGAGAACTTCACTATGTCGGCAACGCTCTTGGACTGACATGGTCAACTGGTGCACCATCCCAATCTAGCATAGCACCCCGTTGGGCGTAAACCCTGAGTATCACTATGCAAGAAGTCTATAGTCCCTCCATTACGAGCGCGCAT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0019   ">
+				<sample name="00_0019   ">
+					<datablock type="DNA">
+						AGCGTCCCTGAGAACATCTTGGAAGCTTAAAGTTGATTCGGACGTTAACGGTGTGGTCAGAGTCGTTCAGATGAGTTATCCGACCGTTACGTTAGACCTAACGATTTGGGATACGTAACTTCCATTAATGTAACGTGAAACGAGACAGGATTTTCAGTACCAACACCAAGGAAGAAAATTGTTATTAACCTGTGAATTGAATTACGAGGTCAGCTTCCCCCTGATGCCATCACCACTTTCATTTTCGATCGGCGTGTAGCCCTGAGACTAGTGGTTATGGGTGAGTTCGAGAAGGGCAAATACGCCCTTAACTCGTCAGCGCCCACGTCTCAAAGCATGGTATGAGAACTTCACTATGTCGGCAACGCTCTTGGACTGACATGGTCAACTGGTGCACCATCCCAATCTAGCATAGCACCCCGTTGGGCGTAAACCCTGAGTATCACTATGCAAGAAGTCTATAGTCCCTCCACTACGAGCGCGCAT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0013   ">
+				<sample name="00_0013   ">
+					<datablock type="DNA">
+						AGCGTCCCTGAGAACATCTTGGAAGCTTAAAGTTGATTCGGACGTTAACGGTGTGGTCAGAGTCGTTCAGATGAGTTATCCGACCGTTACGTTAGACCTAACGATTTGGGATACGTAACTTCCATTAATGTAACGTGAAACGAGACAGGATTTTCAGTACCAACACCAAGGAAGAAAATTGTTATTAACCTGTGAATTGAATTACGAGGTCAGCTTCCCCCTGATGCCATCACCACTTTCATTTTCGATCGGCGTGTAGCCCTGAGACTAGTGGTTATGGGTGAGTTCGAGAAGGGCAAATACGCCCTTAACTCGTCAGCGCCCACGTCTCAAAGCATGGTATGAGAACTTCACTATGTCGGCAACGCTCTTGGACTGACATGGTCAACTGGTGCACCATCCCAATCTAGCATAGCACCCCGTTGGGCGTAAACCCTGAGTATCACTATGCAAGAAGTCTATAGTCCCTCCATTACGAGCGCGCAT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0003   ">
+				<sample name="00_0003   ">
+					<datablock type="DNA">
+						AGCGTCCCTGAGAACATCTTGGAAGCTTAAAGTTGATTCGGACGTTAACGGTGTGGTCAGAGTCGTTCAGATGAGTTATCCGACCGTTACGTTAGACCTAACGATTTGGGATACGTAACTTCCATTAATGTAACGTGAAACGAGACAGGATTTTCAGTACCAACACCAAGGAAGAAAATTGTTATTAACCTGTGAATTGAATTACGAGGTCAGCTTCCCCCTGATGCCATCACCACTTTCATTTTCGATCGGCGTGTAGCCCTGAGACTAGTGGTTATGGGTGAGTTCGAGAAGGGCAAATACGCCCTTAACTCGTCAGCGCCCACGTCTCAAAGCATGGTATGAGAACTTCACTATGTCGGCAACGCTCTTGGACTGACATGGTCAACTGGTGCACCATCCCAATCTAGCATAGCACCCCGTTGGGCGTAAACCCTGAGTATCACTATGCAAGAAGTCTATAGTCCCTCCATTACGAGCGCGCAT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0006   ">
+				<sample name="00_0006   ">
+					<datablock type="DNA">
+						AGCGTCCCTGAGAACATCTTGGAAGCTTAAAGTTGATTCGGACGTTAACGGTGTGGTCAGAGTCGTTCAGATGAGTTATCCGACCGTTACGTTAGACCTAACGATTTGGGATACGTAACTTCCATTAATGTAACGTGAAACGAGACAGGATTTTCAGTACCAACACCAAGGAAGAAAATTGTTATTAACCTGTGAATTGAATTACGAGGTCAGCTTCCCCCTGATGCCATCACCACTTTCATTTTCGATCGGCGTGTAGCCCTGAGACTAGTGGTTATGGGTGAGTTCGAGAAGGGCAAATACGCCCTTAACTCGTCAGCGCCCACGTCTCAAAGCATGGTATGAGAACTTCACTATGTCGGCAACGCTCTTGGACTGACATGGTCAACTGGTGCACCATCCCAATCTAGCATAGCACCCCGTTGGGCGTAAACCCTGAGTATCACTATGCAAGAAGTCTATAGTCCCTCCATTACGAGCGCGCAT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0007   ">
+				<sample name="00_0007   ">
+					<datablock type="DNA">
+						AGCGTCCCTGAGAACATCTTGGAAGCTTAAAGTTGATTCGGACGTTAACGGTGTGGTCAGAGTCGTTCAGATGAGTTATCCGACCGTTACGTTAGACCTAACGATTTGGGATACGTAACTTCCATTAATGTAACGTGAAACGAGACAGGATTTTCAGTACCAACACCAAGGAAGAAAATTGTTATTAACCTGTGAATTGAATTACGAGGTCAGCTTCCCCCTGATGCCATCACCACTTTCATTTTCGATCGGCGTGTAGCCCTGAGACTAGTGGTTATGGGTGAGTTCGAGAAGGGCAAATACGCCCTTAACTCGTCAGCGCCCACGTCTCAAAGCATGGTATGAGAACTTCACTATGTCGGCAACGCTCTTGGACTGACATGGTCAACTGGTGCACCATCCCAATCTAGCATAGCACCCCGTTGGGCGTAAACCCTGAGTATCACTATGCAAGAAGTCTATAGTCCCTCCATTACGAGCGCGCAT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0016   ">
+				<sample name="00_0016   ">
+					<datablock type="DNA">
+						AGCGTCCCTGAGAACATCTTGGAAGCTTAAAGTTGATTCGGACGTTAACGGTGTGGTCAGAGTCGTTCAGATGAGTTATCCGACCGTTACGTTAGACCTAACGATTTGGGATACGTAACTTCCATTAATGTAACGTGAAACGAGACAGGATTTTCAGTACCAACACCAAGGAAGAAAATTGTTATTAACCTGTGAATTGAATTACGAGGTCAGCTTCCCCCTGATGCCATCACCACTTTCATTTTCGATCGGCGTGTAGCCCTGAGACTAGTGGTTATGGGTGAGTTCGAGAAGGGCAAATACGCCCTTAACTCGTCAGCGCCCACGTCTCAAAGCATGGTATGAGAACTTCACTATGTCGGCAACGCTCTTGGACTGACATGGTCAACTGGTGCACCATCCCAATCTAGCATAGCACCCCGTTGGGCGTAAACCCTGAGTATCACTATGCAAGAAGTCTATAGTCCCTCCATTACGAGCGCGCAT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0002   ">
+				<sample name="00_0002   ">
+					<datablock type="DNA">
+						AGCGTCCCTGAGAACATCTTGGAAGCTTAAAGTTGATTCGGACGTTAACGGTGTGGTCAGAGTCGTTCAGATGAGTTATCCGACCGTTACGTTAGACCTAACGATTTGGGATACGTAACTTCCATTAATGTAACGTGAAACGAGACAGGATTTTCAGTACCAACACCAAGGAAGAAAATTGTTATTAACCTGTGAATTGAATTACGAGGTCAGCTTCCCCCTGATGCCATCACCACTTTCATTTTCGATCGGCGTGTAGCCCTGAGACTAGTGGTTATGGGTGAGTTCGAGAAGGGCAAATACGCCCTTAACTCGTCAGCGCCCACGTCTCAAAGCATGGTATGAGAACTTCACTATGTCGGCAACGCTCTTGGACTGACATGGTCAACTGGTGCACCATCCCAATCTAGCATAGCACCCCGTTGGGCGTAAACCCTGAGTATCACTATGCAAGAAGTCTATAGTCCCTCCATTACGAGCGCGCAT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0009   ">
+				<sample name="00_0009   ">
+					<datablock type="DNA">
+						AGCGTCCCTGAGAACATCTTGGAAGCTTAAAGTTGATTCGGACGTTAACGGTGTGGTCAGAGTCGTTCAGATGAGTTATCCGACCGTTACGTTAGACCTAACGATTTGGGATACGTAACTTCCATTAATGTAACGTGAAACGAGACAGGATTTTCAGTACCAACACCAAGGAAGAAAATTGTTATTAACCTGTGAATTGAATTACGAGGTCAGCTTCCCCCTGATGCCATCACCACTTTCATTTTCGATCGGCGTGTAGCCCTGAGACTAGTGGTTATGGGTGAGTTCGAGAAGGGCAAATACGCCCTTAACTCGTCAGCGCCCACGTCTCAAAGCATGGTATGAGAACTTCACTATGTCGGCAACGCTCTTGGACTGACATGGTCAACTGGTGCACCATCCCAATCTAGCATAGCACCCCGTTGGGCGTAAACCCTGAGTATCACTATGCAAGAAGTCTATAGTCCCTCCATTACGAGCGCGCAT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0010   ">
+				<sample name="00_0010   ">
+					<datablock type="DNA">
+						AGCGTCCCTGAGAACATCTTGGAAGCTTAAAGTTGATTCGGACGTTAACGGTGTGGTCAGAGTCGTTCAGATGAGTTATCCGACCGTTACGTTAGACCTAACGATTTGGGATACGTAACTTCCATTAATGTAACGTGAAACGAGACAGGATTTTCAGTACCAACACCAAGGAAGAAAATTGTTATTAACCTGTGAATTGAATTACGAGGTCAGCTTCCCCCTGATGCCATCACCACTTTCATTTTCGATCGGCGTGTAGCCCTGAGACTAGTGGTTATGGGTGAGTTCGAGAAGGGCAAATACGCCCTTAACTCGTCAGCGCCCACGTCTCAAAGCATGGTATGAGAACTTCACTATGTCGGCAACGCTCTTGGACTGACATGGTCAACTGGTGCACCATCCCAATCTAGCATAGCACCCCGTTGGGCGTAAACCCTGAGTATCACTATGCAAGAAGTCTATAGTCCCTCCATTACGAGCGCGCAT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0008   ">
+				<sample name="00_0008   ">
+					<datablock type="DNA">
+						AGCGTCCCTGAGAACATCTTGGAAGCTTAAAGTTGATTCGGACGTTAACGGTGTGGTCAGAGTCGTTCAGATGAGTTATCCGACCGTTACGTTAGACCTAACGATTTGGGATACGTAACTTCCATTAATGTAACGTGAAACGAGACAGGATTTTCAGTACCAACACCAAGGAAGAAAATTGTTATTAACCTGTGAATTGAATTACGAGGTCAGCTTCCCCCTGATGCCATCACCACTTTCATTTTCGATCGGCGTGTAGCCCTGAGACTAGTGGTTATGGGTGAGTTCGAGAAGGGCAAATACGCCCTTAACTCGTCAGCGCCCACGTCTCAAAGCATGGTATGAGAACTTCACTATGTCGGCAACGCTCTTGGACTGACATGGTCAACTGGTGCACCATCCCAATCTAGCATAGCACCCCGTTGGGCGTAAACCCTGAGTATCACTATGCAAGAAGTCTATAGTCCCTCCATTACGAGCGCGCAT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0011   ">
+				<sample name="00_0011   ">
+					<datablock type="DNA">
+						AGCGTCCCTGAGAACATCTTGGAAGCTTAAAGTTGATTCGGACGTTAACGGTGTGGTCAGAGTCGTTCAGATGAGTTATCCGACCGTTACGTTAGACCTAACGATTTGGGATACGTAACTTCCATTAATGTAACGTGAAACGAGACAGGATTTTCAGTACCAACACCAAGGAAGAAAATTGTTATTAACCTGTGAATTGAATTACGAGGTCAGCTTCCCCCTGATGCCATCACCACTTTCATTTTCGATCGGCGTGTAGCCCTGAGACTAGTGGTTATGGGTGAGTTCGAGAAGGGCAAATACGCCCTTAACTCGTCAGCGCCCACGTCTCAAAGCATGGTATGAGAACTTCACTATGTCGGCAACGCTCTTGGACTGACATGGTCAACTGGTGCACCATCCCAATCTAGCATAGCACCCCGTTGGGCGTAAACCCTGAGTATCACTATGCAAGAAGTCTATAGTCCCTCCATTACGAGCGCGCAT [...]
+					</datablock>
+				</sample>
+			</individual>
+		</population>
+	</region>
+	<region name="LinkageGroup07">
+      <model name="F84">
+        <normalize>false</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <relative-murate>1</relative-murate>
+        <base-freqs> calculated </base-freqs>
+        <ttratio>2</ttratio>
+      </model>
+      <effective-popsize>1</effective-popsize>
+		<population name="PopAlphaIs1">
+			<individual name="00_0002   ">
+				<sample name="00_0002   ">
+					<datablock type="DNA">
+						CCCGCTAGCCTATATAGACGTTCTCGTCCTTTGATGGGTAAGAGCAGTTCACGTCGTGCATCCCCTCTCAGCACAGCCCGCTGGACCCCAAGCTGTAGCTGTTCTCGTGTTTATTTTATCAGAGGAGACTTATGTCACGCTACACTTCCCCGGTTGATACTAGACGCGCACTCCTGTGCCTGCACCAATCCCTCAAAGAATTGAGAGGTCCACGGAAGACCCTAAGGCGACATATGCTTTGTCAGGAACTAACCGTATTAGCCTGTTTAGATCACCAATGGTCAGCCCACCGGTTTACGTGATCTACCGCATTTTTTCCAATCAAACCCCTCTGAGGCTCTTAATATGGGACCAAAGGATTCAGGAGGGTAAGCAAATAAGTATCGGAGTTACCTGATGAACTCTCAACGTATGTCCTGTCTTCCGGATTGGTCAGGGGCCTGTGAGACATCGTGCGAGGCAGTTGGACACTCCTGTCAAAATACA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0006   ">
+				<sample name="00_0006   ">
+					<datablock type="DNA">
+						CCCGCTAGCCTATATAGACGTTCTCGTCCTTTGATGGGTAAGAGCAGTTCACGTCGTGCATCCCCTCTCAGCACAGCCCGCTGGACCCCAAGCTGTAGCTGTTCTCGTGTTTATTTTATCAGAGGAGACTTATGTCACGCTACACTTCCCCGGTTGATACTAGACGCGCACTCCTGTGCCTGCACCAATCCCTCAAAGAATTGAGAGGTCCACGGAAGACCCTAAGGCGACATATGCTTTGTCAGGAACTAACCGTATTAGCCTGTTTAGATCACCAATGGTCAGCCCACCGGTTTACGTGATCTACCGCATTTTTTCCAATCAAACCCCTCTGAGGCTCTTAATATGGGACCAAAGGATTCAGGAGGGTAAGCAAATAAGTATCGGAGTTACCTGATGAACTCTCAACGTATGTCCTGTCTTCCGGATTGGTCAGGGGCCTGTGAGACATCGTGCGAGGCAGTTGGACACTCCTGTCAAAATACA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0008   ">
+				<sample name="00_0008   ">
+					<datablock type="DNA">
+						CCCGCTAGCCTATATAGACGTTCTCGTCCTTTGATGGGTAAGAGCAGTTCACGTCGTGCATCCCCTCTCAGCACAGCCCGCTGGACCCCAAGCTGTAGCTGTTCTCGTGTTTATTTTATCAGAGGAGACTTATGTCACGCTACACTTCCCCGGTTGATACTAGACGCGCACTCCTGTGCCTGCACCAATCCCTCAAAGAATTGAGAGGTCCACGGAAGACCCTAAGGCGACATATGCTTTGTCAGGAACTAACCGTATTAGCCTGTTTAGATCACCAATGGTCAGCCCACCGGTTTACGTGATCTACCGCATTTTTTCCAATCAAACCCCTCTGAGGCTCTTAATATGGGACCAAAGGATTCAGGAGGGTAAGCAAATAAGTATCGGAGTTACCTGATGAACTCTCAACGTATGTCCTGTCTTCCGGATTGGTCAGGGGCCTGTGAGACATCGTGCGAGGCAGTTGGACACTCCTGTCAAAATACA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0012   ">
+				<sample name="00_0012   ">
+					<datablock type="DNA">
+						CCCGCTAGCCTATATAGACGTTCTCGTCCTTTGATGGGTAAGAGCAGTTCACGTCGTGCATCCCCTCTCAGCACAGCCCGCTGGACCCCAAGCTGTAGCTGTTCTCGTGTTTATTTTATCAGAGGAGACTTATGTCACGCTACACTTCCCCGGTTGATACTAGACGCGCACTCCTGTGCCTGCACCAATCCCTCAAAGAATTGAGAGGTCCACGGAAGACCCTAAGGCGACATATGCTTTGTCAGGAACTAACCGTATTAGCCTGTTTAGATCACCAATGGTCAGCCCACCGGTTTACGTGATCTACCGCATTTTTTCCAATCAAACCCCTCTGAGGCTCTTAATATGGGACCAAAGGATTCAGGAGGGTAAGCAAATAAGTATCGGAGTTACCTGATGAACTCTCAACGTATGTCCTGTCTTCCGGATTGGTCAGGGGCCTGTGAGACATCGTGCGAGGCAGTTGGACACTCCTGTCAAAATACA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0015   ">
+				<sample name="00_0015   ">
+					<datablock type="DNA">
+						CCCGCTAGCCTATATAGACGTTCTCGTCCTTTGATGGGTAAGAGCAGTTCACGTCGTGCATCCCCTCTCAGCACAGCCCGCTGGACCCCAAGCTGTAGCTGTTCTCGTGTTTATTTTATCGGAGGAGACTTATGTCACACTACACTTCCCCGGTTGATACTAGACGCGCACTCCTGTGCCTGCACCAATCCCTCAAAGAATTGAGAGGTCCACGGAAGACCCTAAGGCGACATATGCTTTGTCAGGAACTAACCGTATTAGCCTGTTTAGATCACCAATGGTCAGCCCACCGGTTTACGTGATCTACCGCATTTTTTCCAATCAAACCCCTCTGAGGCTCTTAATATGGGACCAAAGGATTCAGGAGGGTAAGCAAATAAGTATCGGAGTTACCTGATGAACTCTCAACGTATGTCCTGTCTTCCGGATTGGTCAGGGGCCTGTGAGACATCGTGCGAGGCAGTTGGACACTCCTGTCAAAATACA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0000   ">
+				<sample name="00_0000   ">
+					<datablock type="DNA">
+						CCCGCTAGCCTATATAGACGTTCTCGTCCTTTGATGGGTAAGAGCAGTTCACGTCGTGCATCCCCTCTCAGCACAGCCCGCTGGACCCCAAGCTGTAGCTGTTCTCGTGTTTATTTTATCGGAGGAGACTTATGTCACACTACACTTCCCCGGTTGATACTAGACGCGCACTCCTGTGCCTGCACCAATCCCTCAAAGAATTGAGAGGTCCACGGAAGACCCTAAGGCGACATATGCTTTGTCAGGAACTAACCGTATTAGCCTGTTTAGATCACCAATGGTCAGCCCACCGGTTTACGTGATCTACCGCATTTTTTCCAATCAAACCCCTCTGAGGCTCTTAATATGGGACCAAAGGATTCAGGAGGGTAAGCAAATAAGTATCGGAGTTACCTGATGAACTCTCAACGTATGTCCTGTCTTCCGGATTGGTCAGGGGCCTGTGAGACATCGTGCGAGGCAGTTGGACACTCCTGTCAAAATACA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0005   ">
+				<sample name="00_0005   ">
+					<datablock type="DNA">
+						CCCGCTAGCCTATATAGACGTTCTCGTCCTTTGATGGGTAAGAGCAGTTCACGTCGTGCATCCCCTCTCAGCACAGCCCGCTGGACCCCAAGCTGTAGCTGTTCTCGTGTTTATTTTATCGGAGGAGACTTATGTCACACTACACTTCCCCGGTTGATACTAGACGCGCACTCCTGTGCCTGCACCAATCCCTCAAAGAATTGAGAGGTCCACGGAAGACCCTAAGGCGACATATGCTTTGTCAGGAACTAACCGTATTAGCCTGTTTAGATCACCAATGGTCAGCCCACCGGTTTACGTGATCTACCGCATTTTTTCCAATCAAACCCCTCTGAGGCTCTTAATATGGGACCAAAGGATTCAGGAGGGTAAGCAAATAAGTATCGGAGTTACCTGATGAACTCTCAACGTATGTCCTGTCTTCCGGATTGGTCAGGGGCCTGTGAGACATCGTGCGAGGCAGTTGGACACTCCTGTCAAAATACA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0010   ">
+				<sample name="00_0010   ">
+					<datablock type="DNA">
+						CCCGCTAGCCTATATAGACGTTCTCGTCCTTTGATGGGTAAGAGCAGTTCACGTCGTGCATCCCCTCTCAGCACAGCCCGCTGGACCCCAAGCTGTAGCTGTTCTCGTGTTTATTTTATCGGAGGAGACTTATGTCACACTACACTTCCCCGGTTGATACTAGACGCGCACTCCTGTGCCTGCACCAATCCCTCAAAGAATTGAGAGGTCCACGGAAGACCCTAAGGCGACATATGCTTTGTCAGGAACTAACCGTATTAGCCTGTTTAGATCACCAATGGTCAGCCCACCGGTTTACGTGATCTACCGCATTTTTTCCAATCAAACCCCTCTGAGGCTCTTAATATGGGACCAAAGGATTCAGGAGGGTAAGCAAATAAGTATCGGAGTTACCTGATGAACTCTCAACGTATGTCCTGTCTTCCGGATTGGTCAGGGGCCTGTGAGACATCGTGCGAGGCAGTTGGACACTCCTGTCAAAATACA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0014   ">
+				<sample name="00_0014   ">
+					<datablock type="DNA">
+						CCCGCTAGCCTATATAGACGTTCTCGTCCTTTGATGGGTAAGAGCAGTTCACGTCGTGCATCCCCTCTCAGCACAGCCCGCTGGACCCCAAGCTGTAGCTGTTCTCGTGTTTATTTTATCGGAGGAGACTTATGTCACACTACACTTCCCCGGTTGATACTAGACGCGCACTCCTGTGCCTGCACCAATCCCTCAAAGAATTGAGAGGTCCACGGAAGACCCTAAGGCGACATATGCTTTGTCAGGAACTAACCGTATTAGCCTGTTTAGATCACCAATGGTCAGCCCACCGGTTTACGTGATCTACCGCATTTTTTCCAATCAAACCCCCCTGAGGCTCTTAATATGGGACCAAAGGATTCAGGAGGGTAAGCAAATAAGTATCGGAGTTACCTGATGAACTCTCAACGTATGTCCTGTCTTCCGGATTGGTCAGGGGCCTGTGAGACATCGTGCGAGGCAGTTGGACACTCCTGTCAAAATACA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0013   ">
+				<sample name="00_0013   ">
+					<datablock type="DNA">
+						CCCGCTAGCCTATATAGACGTTCTCGTCCTTTGATGGGTAAGAGCAGTTCACGTCGTGCATCCCCTCTCAGCACAGCCCGCTGGACCCCAAGCTGTAGCTGTTCTCGTGTTTATTTTATCGGAGGAGACTTATGTCACACTACACTTCCCCGGTTGATACTAGACGCGCACTCCTGTGCCTGCACCAATCCCTCAAAGAATTGAGAGGTCCACGGAAGACCCTAAGGCGACATATGCTTTGTCAGGAACTAACCGTATTAGCCTGTTTAGATCACCAATGGTCAGCCCACCGGTTTACGTGATCTACCGCATTTTTTCCAATCAAACCCCTCTGAGGCTCTTAATATGGGACCAAAGGATTCAGGAGGGTAAACAAATAAGTATCGGAGTTACCTGATGAACTCCCAACGTATGTCCTGTCTTCCGGATTGGTCAGGGGCCTGTGAGACATCGTGCGAGGCAGTTGGACACTCCTGTCAAAATACA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0016   ">
+				<sample name="00_0016   ">
+					<datablock type="DNA">
+						CCCGCTAGCCTATATAGACGTTCTCGTCCTTTGATGGGTAAGAGCAGTTCACGTCGTGCATCCCCTCTCAGCACAGCCCGCTGGACCCCAAGCTGTAGCTGTTCTCGTGTTTATTTTATCGGAGGAGACTTATGTCACACTACACTTCCCCGGTTGATACTAGACGCGCACTCCTGTGCCTGCACCAATCCCTCAAAGAATTGAGAGGTCCACGGAAGACCCTAAGGCGACATATGCTTTGTCAGGAACTAACCGTATTAGCCTGTTTAGATCACCAATGGTCAGCCCACCGGTTTACGTGATCTACCGCATTTTTTCCAATCAAACCCCTCTGAGGCTCTTAATATGGGACCAAAGGATTCAGGAGGGTAAGCAAATAAGTATCGGAGTTACCTGATGAACTCCCAACGTATGTCCTGTCTTCCGGATTGGTCAGGGGCCTGTGAGACATCGTGCGAGGCAGTTGGACACTCCTGTCAAAATACA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0001   ">
+				<sample name="00_0001   ">
+					<datablock type="DNA">
+						CCCGCTAGCCTATATAGACGTTCTCGTCCTTTGATGGGTAAGAGCAGTTCACGTCGTGCATCCCCTCTCAGCACAGCCCGCTGGACCCCAAGCTGTAGCTGTTCTCGTGTTTATTTTATCGGAGGAGACTTATGTCACACTACACTTCCCCGGTTGATACTAGACGCGCACTCCTGTGCCTGCACCAATCCCTCAAAGAATTGAGAGGTCCACGGAAGACCCTAAGGCGACATATGCTTTGTCAGGAACTAACCGTATTAGCCTGTTTAGATCACCAATGGTCAGCCCACCGGTTTACGTGATCTACCGCATTTTTTCCAATCAAACCCCTCTGAGGCTCTTAATATGGGACCAAAGGATTCAGGAGGGTAAGCAAATAAGTATCGGAGTTACCTGATGAACTCCCAACGTATGTCCTGTCTTCCGGATTGGTCAGGGGCCTGTGAGACATCGTGCGAGGCAGTTGGACACTCCTGTCAAAATACA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0007   ">
+				<sample name="00_0007   ">
+					<datablock type="DNA">
+						CCCGCTAGCCTATATAGACGTTCTCGTCCTTTGATGGGTAAGAGCAGTTCACGTCGTGCATCCCCTCTCAGCACAGCCCGCTGGACCCCAAGCTGTAGCTGTTCTCGTGTTTATTTTATCGGAGGAGACTTATGTCACACTACACTTCCCCGGTTGATACTAGACGCGCACTCCTGTGCCTGCACCAATCCCTCAAAGAATTGAGAGGTCCACGGAAGACCCTAAGGCGACATATGCTTTGTCAGGAACTAACCGTATTAGCCTGTTTAGATCACCAATGGTCAGCCCACCGGTTTACGTGATCTACCGCATTTTTTCCAATCAAACCCCTCTGAGGCTCTTAATATGGGACCAAAGGATTCAGGAGGGTAAGCAAATAAGTATCGGAGTTACCTGATGAACTCCCAACGTATGTCCTGTCTTCCGGATTGGTCAGGGGCCTGTGAGACATCGTGCGAGGCAGTTGGACACTCCTGTCAAAATACA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0017   ">
+				<sample name="00_0017   ">
+					<datablock type="DNA">
+						CCCGCTAGCCTATATAGACGTTCTCGTCCTTTGATGGGTAAGAGCAGTTCACGTCGTGCATCCCCTCTCAGCACAGCCCGCTGGACCCCAAGCTGTAGCTGTTCTCGTGTTTATTTTATCGGAGGAGACTTATGTCACACTACACTTCCCCGGTTGATACTAGACGCGCACTCCTGTGCCTGCACCAATCCCTCAAAGAATTGAGAGGTCCACGGAAGACCCTAAGGCGACATATGCTTTGTCAGGAACTAACCGTATTAGCCTGTTTAGATCACCAATGGTCAGCCCACCGGTTTACGTGATCTACCGCATTTTTTCCAATCAAACCCCTCTGAGGCTCTTAATATGGGACCAAAGGATTCAGGAGGGTAAGCAAATAAGTATCGGAGTTACCTGATGAACTCCCAACGTATGTCCTGTCTTCCGGATTGGTCAGGGGCCTGTGAGACATCGTGCGAGGCAGTTGGACACTCCTGTCAAAATACA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0003   ">
+				<sample name="00_0003   ">
+					<datablock type="DNA">
+						CCCGCTAGCCTATATAGACGTTCTCGTCCTTTGATGGGTAAGAGCAGTTCACGTCGTGCATCCCCTCTCAGCACAGCCCGCTGGACCCCAAGCTGTAGCTGTTCTCGTGTTTATTTTATCGGAGGAGACTTATGTCACACTACACTTCCCCGGTTGATACTAGACGCGCACTCCTGTGCCTGCACCAATCCCTCAAAGAATTGAGAGGTCCACGGAAGACCCTAAGGCGACATATGCTTTGTCAGGAACTAACCGTATTAGCCTGTTTAGATCACCAATGGTCAGCCCACCGGTTTACGTGATCTACCGCATTTTTTCCAATCAAACCCCTCTGAGGCTCTTAATATGGGACCAAAGGATTCAGGAGGGTAAGCAAATAAGTATCGGAGTTACCTGATGAACTCCCAACGTATGTCCTGTCTTCCGGATTGGTCAGGGGCCTGTGAGACATCGTGCGAGGCAGTTGGACACTCCTGTCAAAATACA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0018   ">
+				<sample name="00_0018   ">
+					<datablock type="DNA">
+						CCCGCTAGCCTATATAGACGTTCTCGTCCTTTGATGGGTAAGAGCAGTTCACGTCGTGCATCCCCTCTCAGCACAGCCCGCTGGACCCCAAGCTGTAGCTGTTCTCGTGTTTATTTTATCGGAGGAGACTTATGTCACACTACACTTCCCCGGTTGATACTAGACGCGCACTCCTGTGCCTGCACCAATCCCTCAAAGAATTGAGAGGTCCACGGAAGACCCTAAGGCGACATATGCTTTGTCAGGAACTAACCGTATTAGCCTGTTTAGATCACCAATGGTCAGCCCACCGGTTTACGTGATCTACCGCATTTTTTCCAATCAAACCCCTCTGAGGCTCTTAATATGGGACCAAAGGATTCAGGAGGGTAAGCAAATAAGTATCGGAGTTACCTGATGAACTCCCAACGTATGTCCTGTCTTCCGGATTGGTCAGGGGCCTGTGAGACATCGTGCGAGGCAGTTGGACACTCCTGTCAAAATACA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0004   ">
+				<sample name="00_0004   ">
+					<datablock type="DNA">
+						CCCGCTAGCCTATATAGACGTTCTCGTCCTTTGATGGGTAAGAGCAGTTCACGTCGTGCATCCCCTCTCAGCACAGCCCGCTGGACCCCAAGCTGTAGCTGTTCTCGTGTTTATTTTATCGGAGGAGACTTATGTCACACTACACTTCCCGGGTTGATACTAGACGCGCACTCCTGTGCCTGCACCAATCCCTCAAAGAATTGAGAGGTCCACGGAAGACCCTAAGGCGACATATGCTTTGTCAGGAACTAACCGTATTAGCCTGTTTAGATCACCAATGGTCAGCCCACCGGTTTACGTGATCTACCGCATTTTTTCCAATCAAACCCCTCTGAGGCTCTTAATATGGGACCAAAGGATTCAGGAGGGTAAGCAAATAAGTATCGGAGTTACCTGATGAACTCCCAACGTATGTCCTGCCTTCCGGATTGGTCAGGGGCCTGTGAGACATCGTGCGAGGCAGTTGGACACTCCTGTCAAAATACA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0009   ">
+				<sample name="00_0009   ">
+					<datablock type="DNA">
+						CCCGCTAGCCTATATAGACGTTCTCGTCCTTTGATGGGTAAGAGCAGTTCACGTCGTGCATCCCCTCTCAGCACAGCCCGCTGGACCCCAAGCTGTAGCTGTTCTCGTGTTTATTTTATCGGAGGAGACTTATGTCACACTACACTTCCCCGGTTGATACTAGACGCGCACTCCTGTGCCTGCACCAATCCCTCAAAGAATTGAGAGGTCCACGGAAGACCCTAAGGCGACATATGCTTTGTCAGGAACTAACCGTATTAGCCTGTTTAGATCACCAATGGTCAGCCCACCGGTTTACGTGATCTACCGCATTTTTTCCAATCAAACCCCTCTGAGGCTCTTAATATGGGACCAAAGGATTCAGGAGGGTAAGCAAATAAGTATCGGAGTTACCTGATGAACTCCCAACGTATGTCCTGCCTTCCGGATTGGTCAGGGGCCTGTGAGACATCGTGCGAGGCAGTTGGACACTCCTGTCAAAATACA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0011   ">
+				<sample name="00_0011   ">
+					<datablock type="DNA">
+						CCCGCTAGCCTATATAGACGTTCTCGTCCTTTGATGGGTAAGAGCAGTTCACGTCGTGCATCCCCTCTCAGCACAGCCCGCTGGACCCCAAGCTGTAGCTGTTCTCGTGTTTATTTTATCGGAGGAGACTTATGTCACACTACACTTCCCCGGTTGATACTAGACGCGCACTCCTGTGCCTGCACCAATCCCTCAAAGTATTGAGAGGTCCACGGAAGACCCTAAGGCGACATATGCTTTGTCAGGAACTAACCGTATTAGCCTGTTTAGATCACCAATGGTCAGCCCACCGGTTTACGTGATCTACCGCATTTTTTCCAATCAAACCCCTCTGAGGCTCTTAATATGGGACCAAAGGATTCAGGAGGGTAAGCAAATAAGTATCGGAGTTACCTGATGAACTCCCAACGTATGTCCTGCCTTCCGGATTGGTCAGGGGCCTGTGAGACATCGTGCGAGGCAGTTGGACACTCCTGTCAAAATACA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0019   ">
+				<sample name="00_0019   ">
+					<datablock type="DNA">
+						CCCGCTAGCCTATATAGACGTTCTCGTCCTTTGATGGGTAAGAGCAGTTCACGTCGTGCATCCCCTCTCAGCACAGCCCGCTGGACCCCAAGCTGTAGCTGTTCTCGTGTTTATTTTATCGGAGGAGACTTATGTCACACTACACTTCCCCGGTTGATACTAGACGCGCACTCCTGTGCCTGCACCAATCCCTCAAAGTATTGAGAGGTCCACGGAAGACCCTAAGGCGACATATGCTTTGTCAGGAACTAACCGTATTAGCCTGTTTAGATCACCAATGGTCAGCCCACCGGTTTACGTGATCTACCGCATTTTTTCCAATCAAACCCCTCTGAGGCTCTTAATATGGGACCAAAGGATTCAGGAGGGTAAGCAAATAAGTATCGGAGTTACCTGATGAACTCCCAACGTATGTCCTGCCTTCCGGATTGGTCAGGGGCCTGTGAGACATCGTGCGAGGCAGTTGGACACTCCTGTCAAAATACA [...]
+					</datablock>
+				</sample>
+			</individual>
+		</population>
+	</region>
+	<region name="LinkageGroup08">
+      <model name="F84">
+        <normalize>false</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <relative-murate>1</relative-murate>
+        <base-freqs> calculated </base-freqs>
+        <ttratio>2</ttratio>
+      </model>
+      <effective-popsize>1</effective-popsize>
+		<population name="PopAlphaIs1">
+			<individual name="00_0016   ">
+				<sample name="00_0016   ">
+					<datablock type="DNA">
+						GCGACTAAAGCAGTAAGTTGATACGGCTGCGGTAGGTCAGGATAATCGGTTTGTGTCACCCGCACCGAAATCTAGCCCTCAAATCTCACGTTTGTTGCTCCGGTGCGTCCAGTTTTGTTCGGCAGAAGTGAAGAATCTAATCCGACCTCAGTGAGCCCAGGAACAGCGGGATAGTGAGGACAGTTTGGTCGGCTCCCACTGTATGTCGGGGTCTGACGCTACTGGAGGGTCTGTCCTGCCATTTTGGGGACTAAGGTAAGCTCTTCTACGAAGTCCCCAGGCTGCGCTCAGAGACTAACCTGACGCACTCTGTTCCCAGATGATGCTCCCGCGCCCTGTTGGCCGCCAGCTTTGCCTCTGCCTCGAAGGGTTAGCCTCCGGAGCAGTTCGCGCGACAACGCTGTGTTGACCGTCCCTGGAAATTCCCCTCGTTGTTGATATAGATACGGTACATATGCATTTAGGTTCTCCCTAACATGGGGGATC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0002   ">
+				<sample name="00_0002   ">
+					<datablock type="DNA">
+						GCGACTAAAGCAGTAAGTTGATACGGCTGCGGTAGGTCAGGATAATCGGTTTGTGTCACCCGCACCGAAATCTAGCCCTCAAATCTCACGTTTGTTGCTCCGGTGCGTCCAGCTTTGTTCGGCAGAAGTGAAGAATCTAATCCGACCTCAGTGAGCCCAGGAACAGCGGGATAGTGAGGACAGTTTGGTCGGCTCCCACTGTATGTCGGGGTCTGACGCTACTGGAGGGTCTGTCCTGCCATTTTGGGGACTAAGGTAAGCTCTTCTACGAAGTCCCCAGGCTGCGCTCAGAGACTAACCTGACGCACTCTGTTCCCAGATGATGCTCCCGCGCCCTGTTGGCCGCCAGCTTTGCCTCTGCCTCGAAGGGTTAGCCTCCGGAGCAGTTCGCGCGACAACGCTGTGTTTACCGTCCCTGGAAATTCCCCTCGTTGTTGATATAGATACGGTACATATGCATTTAGGTTCTCCCTAACATGGGGGATA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0018   ">
+				<sample name="00_0018   ">
+					<datablock type="DNA">
+						GCGACTAAAGCAGTAAGTTGATACGGCTGCGGTAGGTCAGGATAATCGGTTTGTGTCACCCGCACCGAAATCTAGCCCTCAAATCTCACGTTTGTTGCTCCGGTGCGTCCAGCTTTGTTCGGCAGAAGTGAAGAATCTAATCCGACCTCAGTGAGCCCAGGAACAGCGGGATAGTGAGGACAGTTTGGTCGGCTCCCACTGTATGTCGGGGTCTGACGCTACTGGAGGGTCTGTCCTGCCATTTTGGGGACTAAGGTAAGCTCTTCTACGAAGTCCCCAGGCTGCGCTCAGAGACTAACCTGACGCACTCTGTTCCCAGATGATGCTCCCGCGCCCTGTTGGCCGCCAGCTTTGCCTCTGCCTCGAAGGGTTAGCCTCCGGAACAGTTCGCGCGACAACGCTGTGTTGACCGTCCCTGGAAATTCCCCTCGTTGTTGATATAGATACGGTACATATGCATTTAGGTTCTCCCTAACATGGGGGATC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0006   ">
+				<sample name="00_0006   ">
+					<datablock type="DNA">
+						GCGACTAAAGCAGTAAGTTGATACGGCTGCGGTAGGTCAGGATAATCGGTTTGTGTCACCCGCACCGAAATCTAGCCCTCAAATCTCACGTTTGTTGCTCCGGTGCGTCCAGCTTTGTTCGGCAGAAGTGAAGGATCTAATCCGACCTCAGTGAGCCCAGGAACAGCGGGATAGTGAGGACAGTTTGGTCGGCTCCCACTGTATGTCGGGGTCTGACGCTACTGGAGGGTCTGTCCTGCCATTTTGGGGACTAAGGTAAGCTCTTCTACGAAGTCCCCAGGCTGCGCTCAGAGACTAACCTGACGCACTCTGTTCCCAGATGATGCTCCCGCGCCCTGTTGGCCGCCAGCTTTGCCTCTGCCTCGAAGGGTTAGCCTCCGGAACAGTTCGCGCGACAACGCTGTGTTGACCGTCCCTGGAAATTCCCCTCGTTGTTGATATAGATACGGTACATATGCATTTAGGTTCTCCCTAACATGGGGGATC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0008   ">
+				<sample name="00_0008   ">
+					<datablock type="DNA">
+						GCGACTAAAGCAGTAAGTTGATACGGCTGCGGTAGGTCAGGATAATCGGTTTGTGTCACCCGCACCGAAATCTAGCCCTCAAATCTCACGTTTGTTGCTCCGGTGCGTCCAGCTTTGTTCGGCAGAAGTGAAGGATCTAATCCGACCTCAGTGAGCCCAGGAACAGCGGGATAGTGAGGACAGTTTGGTCGGCTCCCACTGTATGTCGGGGTCTGACGCTACTGGAGGGTCTGTCCTGCCATTTTGGGGACTAAGGTAAGCTCTTCTACGAAGTCCCCAGGCTGCGCTCAGAGACTAACCTGACGCACTCTGTTCCCAGATGATGCTCCCGCGCCCTGTTGGCCGCCAGCTTTGCCTCTGCCTCGAAGGGTTAGCCTCCGGAACAGTTCGCGCGACAACGCTGTGTTGACCGTCCCTGGAAATTCCCCTCGTTGTTGATATAGATACGGTACATATGCATTTAGGTTCTCCCTAACATGGGGGATC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0003   ">
+				<sample name="00_0003   ">
+					<datablock type="DNA">
+						GCGACTAAAGCAGTAAGTTGATACGGCTGCGGTAGGTCAGGATAATCGGTTTGTGTCACCCGCACCGAAATCTAGCCCTCAAATCTCACGTTTGTTGCTCCGGTGCGTCCAGCTTTGTTCGGCAGAAGTGAAGAATCTAATCCGACCTCAGTGAGCCCAGGAACAGCGGGATAGTGAGGACAGTTTGGTCGGCTCCCACTGTATGTCGGGGTCTGACGCTACTGGAGGGTCTGTCCTGCCATTTTGGGGACTAAGGTAAGCTCTTCTACGAAGTCCCCAGGCTGCGCTCAGAGACTAACCTGACGCACTCTGTTCCCAGATGATGCTCCCGCGCCCTGTTGGCCGCCAGCTTTGCCTCTGCCTCGAAGGGTTAGCCTCCGGAGCAGTTCGCGCGACAACGCTGTGTTGACCGTCCCTGGAGATTCCCCTCGTTGTTGATATAGATACGGTACATATGCATTTAGGTTCTCCCTAACATGGGGGATC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0011   ">
+				<sample name="00_0011   ">
+					<datablock type="DNA">
+						GCGACTAAAGCAGTAAGTTGATACGGCTGCGGTAGGTCAGGATAATCGGTTTGTGTCACCCGCACCGAAATCTAGCCCTCAAATCTCACGTTTGTTGCTCCGGTGCGTCCAGCTTTGTTCGGCAGAAGTGAAGAATCTAATCCGACCTCAGTGAGCCCAGGAACAGCGGGATAGTGAGGACAGTTTGGTCGGCTCCCACTGTATGTCGGGGTCTGACGCTACTGGAGGGTCTGTCCTGCCATTTTGGGGACTAAGGTAAGCTCTTCTACGAAGTCCCCAGGCTGCGCTCAGAGACTAACCTGACGCACTCTGTTCCCAGATGATGCTCCCGCGCCCTGTTGGCCGCCAGCTTTGCCTCTGCCTCGAAGGGTTAGCCTCCGGAGCAGTTCGCGCGACAACGCTGTGTTGACCGTCCCTGGAGATTCCCCTCGTTGTTGATATAGATACGGTACATATGCATTTAGGTTCTCCCTAACATGGGGGATC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0019   ">
+				<sample name="00_0019   ">
+					<datablock type="DNA">
+						GCGACTAAAGCAGTAAGTTGATACGGCTGCGGTAGGTCAGGATAATCGGTTTGTGTCACCCGCACCGAAATCTAGCCCTCAAATCTCACGTTTGTTGCTCCGGTGCGTCCAGCTTTGTTCGGCAGAAGTGAAGAATCTAATCCGACCTCAGTGAGCCCAGGAACAGCGGGATAGTGAGGACAGTTTGGTCGGCTCCCACTGTATGTCGGGGTCTGACGCTACTGGAGGGTCTGTCCTGCCATTTTGGGGACTAAGGTAAGCTCTTCTACGAAGTCCCCAGGCTGCGCTCAGAGACTAACCTGACGCACTCTGTTCCCAGATGATGCTCCCGCGCCCTGTTGGCCGCCAGCTTTGCCTCTGCCTCGAAGGGTTAGCCTCCGGAGCAGTTCGCGCGACAACGCTGTGTTGACCGTCCCTGGAGATTCCCCTCGTTGTTGATATAGATACGGTACATATGCATTTAGGTTCTCCCTAACATGGGGGATC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0004   ">
+				<sample name="00_0004   ">
+					<datablock type="DNA">
+						GCGACTAAAGCAGTAAGTTGATACGGCTGCGGTAGGTCAGGATAATCGGTTTGTGTCACCCGCACCGAAATCTAGCCCTCAAATCTCACGTTTGTTGCTCCGGTGCGTCCAGCTTTGTTCGGCAGAAGTGAAGAATCTAATCCGACCTCAGTGAGCCCAGGAACAGCGGGATAGTGAGGACAGTTTGGTCGGCTCCCACTGTATGTCGGGGTCTGACGCTACTGGAGGGTCTGTCCTGCCATTTTGGGGACTAAGGTAAGCTCTTCTACGAAGTCCCCAGGCTGCGCTCAGAGACTAACCTGACGCACTCTGTTCCCAGATGATGCTCCCGCGCCCTGTTGGCCGCCAGCTTTGCCTCTGCCTCGAAGGGTTAGCCTCCGGAGCAGTTCGCGCGACAACGCTGTGTTGACCGTCCCTGGAGATTCCCCTCGTTGTTGATATAGATACGGTACATATGCATTTAGGTTCTCCCTAACATGGGGGATC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0013   ">
+				<sample name="00_0013   ">
+					<datablock type="DNA">
+						GCGACTAAAGCAGTAAGTTGATACGGCTGCGGTAGGTCAGGATAATCGGTTTGTGTCACCCGCACCGAAATCTAGCCCTCAAATCTCACGTTTGTTGCTCCGGTGCGTCCAGCTTTGTTCGGCAGAAGTGAAGAATCTAATCCGACCTCAGTGAGCCCAGGAACAGCGGGATAGTGAGGACAGTTTGGTCGGCTCCCACTGTATGTCGGGGTCTGACGCTACTGGAGGGTCTGTCCTGCCATTTTGGGGACTAAGGTAAGCTCTTCTACGAAGTCCCCAGGCTGCGCTCAGAGACTAACCTGACGCACTCTGTTCCCAGATGATGCTCCCGCGCCCTGTTGGCCGCCAGCTTTGCCTCTGCCTCGAAGGGTTAGCCTCCGGAGCAGTTCGCGCGACAACGCTGTGTTGACCGTCCCTGGAGATTCCCCTCGTTGTTGATATAGATACGGTACATATGCATTTAGGTTCTCCCTAACATGGGGGATC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0009   ">
+				<sample name="00_0009   ">
+					<datablock type="DNA">
+						GCGACTAAAGCAGTAAGTTGATACGGCTGCGGTAGGTCAGGATAATCGGTTTGTGTCACCCGCACCGAAATCTAGCCCTCAAATCTCACGTTTGTTGCTCCGGTGCGTCCAGCTTTGTTCGGCAGAAGTGAAGAATCTAATCCGACCTCAGTGAGCCCAGGAACAGCGGGATAGTGAGGACAGTTTGGTCGGCTCCCACTGTATGTCGGGGTCTGACGCTACTGGAGGGTCTGTCCTGCAATTTTGGGGACTAAGGTAAGCTCTTCTACGAAGTCCCCAGGCTGCGCTCAGAGACTAACCTGACGCACTCTGTTCCCAGATGATGCTCCCGCGCCCTGTTGGCCGCCAGCTTTGCCTCTGCCTCGAAGGGTTAGCCTCCGGAGCAGTTCGCGCGACAACGCTGTGTTGACCGTCCCTGGAGATTCCCCTCGTTGTTGATATAGATACGGTACATATGCATTTAGGTTCTCCCTAACATGGGGGATC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0012   ">
+				<sample name="00_0012   ">
+					<datablock type="DNA">
+						GCGACTAAAGCAGTAAGTTGATACGGCTGCGGTAGGTCAGGATAATCGGTTTGTGTCACCCGCACCGAAATCTAGCCCTCAAATCTCACGTTTGTTGCTCCGGTGCGTCCAGCTTTGTTCGGCAGAAGTGAAGAATCTAATCCGACCTCAGTGAGCCCAGGAACAGCGGGATAGTGAGGACAGTTTGGTCGGCTCCCACTGTATGTCGGGGTCTGACGCTACTGGAGGGTCTGTCCTGCCATTTTGGGGACTAAGGTAAGCTCTTCTACGAAGTCCCCAGGCTGCGCTCAGAGACTAACCTGACGCACTCTGTTCCCAGATGATGCTCCCGCGCCCTGTTGGCCGCCAGCTTTGCCTCTGCCTCGAAGGGTTAGCCTCCGGAGCAGTTCGCGCGACAACGCTGTGTTGACCGTCCCTGGAGATTCCCCTCGTTGTTGATATAGATACGGTACATATGCATTTAGGTTCTCCCTAACATGGGGGATC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0000   ">
+				<sample name="00_0000   ">
+					<datablock type="DNA">
+						GCGACTAAAGCAGTAAGTTGATACGGCTGCGGTAGGTCAGGATAATCGGTTTGTGTCACCCGCACCGAAATCTAGCCCTCAAATCTCACGTTTGTTGCTCCGGTGCGTCCAGCTTTGTTCGGCAGAAGTGAAGAATCTAATCCGACCTCAGTGAGCCCAGGAACAGCGGGATAGTGAGGACAGTTTGGTCGGCTCCCACTGTATGTCGGGGTCTGACGCTACTGGAGGGTCTGTCCTGCCATTTTGGGGACTAAGGTAAGCTCTTCTACGAAGTCCCCAGGCTGCGCTCAGAGACTAACCTGACGCACTCTGTTCCCAGATGATGCTCCCGCGCCCTGTTGGCCGCCAGCTTTGCCTCTGCCTCGAAGGGTTAGCCTCCGGAGCAGTTCGCGCGACAACGCTGTGTTGACCGTCCCTGGAGATTCCCCTCGTTGTTGATATAGATACGGTACATATGCATTTAGGTTCTCCCTAACATGGGGGATC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0005   ">
+				<sample name="00_0005   ">
+					<datablock type="DNA">
+						GCGACTAAAGCAGTAAGTTGATACGGCTGCGGTAGGTCAGGATAATCGGTTTGTGTCACCCGCACCGAAATCTAGCCCTCAAATCTCACGTTTGTTGCTCCGGTGCGTCCAGCTTTGTTCGGCAGAAGTGAAGAATCTAATCCGACCTCAGTGAGCCCAGGAACAGCGGGATAGTGAGGACAGTTTGGTCGGCTCCCACTGTATGTCGGGGTCTGACGCTACTGGAGGGTCTGTCCTGCCATTTTGGGGACTAAGGTAAGCTCTTCTACGAAGTCCCCAGGCTGCGCTCAGAGACTAACCTGACGCACTCTGTTCCCAGATGATGCTCCCGCGCCCTGTTGGCCGCCAGCTTTGCCTCTGCCTCGAAGGGTTAGCCTCCGGAGCAGTTCGCGCGACAACGCTGTGTTGACCGTCCCTGGAGATTCCCCTCGTTGTTGATATAGATACGGTACATATGCATTTAGGTTCTCCCTAACATGGGGGATC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0001   ">
+				<sample name="00_0001   ">
+					<datablock type="DNA">
+						GCGACTAAAGCAGTAAGTTGATACGGCTGCGGTAGGTCAGGATAATCGGTTTGTGTCACCCGCACCGAAATCTAGCCCTCAAATCTCACGTTTGTTGCTCCGGTGCGTCCAGCTTTGTTCGGCAGAAGTGAAGAATCTAATCCGACCTCAGTGAGCCCAGGAACAGCGGGATAGTGAGGACAGTTTGGTCGGCTCCCACTGTATGTCGGGGTCTGACGCTACTGGAGGGTCTGTCCTGCCATTTTGGGGACTAAGGTAAGCTCTTCTACGAAGTCCCCAGGCTGCGCTCAGAGACTAACCTGACGCACTCTGTTCCCAGATGATGCTCCCGCGCCCTGTTGGCCGCCAGCTTTGCCTCTGCCTCGAAGGGTTAGCCTCCGGAGCAGTTCGCGCGACAACGCTGTGTTGACCGTCCCTGGAAATTCCCCTCGTTGTTGATATAGATACGGTACATATGCATTTAGGTTCTCCCTAACATGGGGGATC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0007   ">
+				<sample name="00_0007   ">
+					<datablock type="DNA">
+						GCGACTAAAGCAGTAAGTTGATACGGCTGCGGTAGGTCAGGATAATCGGTTTGTGTCACCCGCACCGAAATCTAGCCCTCAAATCTCACGTTTGTTGCTCCGGTGCGTCCAGCTTTGTTCGGCAGAAGTGAAGAATCTAATCCGACCTCAGTGAGCCCAGGAACAGCGGGATAGTGAGGACAGTTTGGTCGGCTCCCACTGTATGTCGGGGTCTGACGCTACTGGAGGGTCTGTCCTGCCATTTTGGGGACTAAGGTAAGCTCTTCTACGAAGTCCCCAGGCTGCGCTCAGAGACTAACCTGACGCACTCTGTTCCCAGATGATGCTCCCGCGCCCTGTTGGCCGCCAGCTTTGCCTCTGCCTCGAAGGGTTAGCCTCCGGAGCAGTTCGCGCGACAACGCTGTGTTGACCGTCCCTGGAAATTCCCCTCGTTGTTGATATAGATACGGTACATATGCATTTAGGTTCTCCCTAACATGGGGGATC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0010   ">
+				<sample name="00_0010   ">
+					<datablock type="DNA">
+						GCGACTAAAGCAGTAAGTTGATACGGCTGCGGTAGGTCAGGATAATCGGTTTGTGTCACCCGCACCGAAATCTAGCCCTCAAATCTCACGTTTGTTGCTCCGGTGCGTCCAGCTTTGTTCGGCAGAAGTGAAGAATCTAATCCGACCTCAGTGAGCCCAGGAACAGCGGGATAGTGAGGACAGTTTGGTCGGCTCCCACTGTATGTCGGGGTCTGACGCTACTGGAGGGTCTGTCCTGCCATTTTGGGGACTAAGGTAAGCTCTTCTACGAAGTCCCCAGGCTGCGCTCAGAGACTAACCTGACGCACTCTGTTCCCAGATGATGCTCCCGCGCCCTGTTGGCCGCCAGCTTTGCCTCTGCCTCGAAGGGTTAGCCTCCGGAGCAGTTCGCGCGACAACGCTGTGTTGACCGTCCCTGGAAATTCCCCTCGTTGTTGATATAGATACGGTACATATGCATTTAGGTTCTCCCTAACATGGGGGATC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0015   ">
+				<sample name="00_0015   ">
+					<datablock type="DNA">
+						GCGACTAAAGCAGTAAGTTGATACGGCTGCGGTAGGTCAGGATAATCGGTTTGTGTCACCCGCACCGAAATCTAGCCCTCAAATCTCACGTTTGTTGCTCCGGTGCGTCCAGCTTTGTTCGGCAGAAGTGAAGAATCTAATCCGACCTCAGTGAGCCCAGGAACAGCGGGATAGTGAGGACAGTTTGGTCGGCTCCCACTGTATGTCGGGGTCTGACGCTACTGGAGGGTCTGTCCTGCCATTTTGGGGACTAAGGTAAGCTCTTCTACGAAGTCCCCAGGCTGCGCTCAGAGACTAACCTGACGCACTCTGTTCCCAGATGATGCTCCCGCGCCCTGTTGGCCGCCAGCTTTGCCTCTGCCTCGAAGGGTTAGCCTCCGGAGCAGTTCGCGCGACAACGCTGTGTTGACCGTCCCTGGAAATTCCCCTCGTTGTTGATATAGATACGGTACATATGCATTTAGGTTCTCCCTAACATGGGGGATC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0014   ">
+				<sample name="00_0014   ">
+					<datablock type="DNA">
+						GCGACTAAAGCAGTAAGTTGATACGGCTGCGGTAGGTCAGGATAATCGGTTTGTGTCACCCGCACCGAAATCTAGCCCTCAAATCTCACGTTTGTTGCTCCGGTGCGTCCAGCTTTGTTCGGCAGAAGTGAAGAATCTAATCCGACCTCAGTGAGCCCAGGAACAGCGGGATAGTGAGGACAGTTTGGTCGGCTCCCACTGTATGTCGGGGTCTGACGCTACTGGAGGGTCTGTCCTGCCATTTTGGGGACTAAGGTAAGCTCTTCTACGAAGTCCCCAGGCTGCGCTCAGAGACTAACCTGACGCACTCTGTTCCCAGATGATGCTCCCGCGCCCTGTTGGCCGCCAGCTTTGCCTCTGCCTCGAAGGGTTAGCCTCCGGAGCAGTTCGCGCGACAACGCTGTGTTGACCGTCCCTGGAAATTCCCCTCGTTGTTGATATAGATACGGTACATATGCATTTAGGTTCTCCCTAACATGGGGGATC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0017   ">
+				<sample name="00_0017   ">
+					<datablock type="DNA">
+						GCGACTAAAGCAGTAAGTTGATACGGCTGCGGTAGGTCAGGATAATCGGTTTGTGTCACCCGCACCGAAATCTAGCCCTCAAATCTCACGTTTGTTGCTCCGGTGCGTCCAGCTTTGTTCGGCAGAAGTGAAGAATCTAATCCGACCTCAGTGAGCCCAGGAACAGCGGGATAGTGAGGACAGTTTGGTCGGCTCCCACTGTATGTCGGGGTCTGACGCTACTGGAGGGTCTGTCCTGCCATTTTGGGGACTAAGGTAAGCTCTTCTACGAAGTCCCCAGGCTGCGCTCAGAGACTAACCTGACGCACTCTGTTCCCAGATGATGCTCCCGCGCCCTGTTGGCCGCCAGCTTTGCCTCTGCCTCGAAGGGTTAGCCTCCGGAGCAGTTCGCGCGACAACGCTGTGTTGACCGTCCCTGGAAATTCCCCTCGTTGTTGATATAGATACGGTACATATGCATTTAGGTTCTCCCTAACATGGGGGATC [...]
+					</datablock>
+				</sample>
+			</individual>
+		</population>
+	</region>
+	<region name="LinkageGroup09">
+      <model name="F84">
+        <normalize>false</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <relative-murate>1</relative-murate>
+        <base-freqs> calculated </base-freqs>
+        <ttratio>2</ttratio>
+      </model>
+      <effective-popsize>1</effective-popsize>
+		<population name="PopAlphaIs1">
+			<individual name="00_0008   ">
+				<sample name="00_0008   ">
+					<datablock type="DNA">
+						TAGAAATCTCGTGTGTCCCTCATGCACGTGTTGGCTCAGAAGGGTACGCAGGGTCTTTTGGTGTTCACCTTGCGAAGGGCACGCGTGTATCGCACTTGCCTCTGAGACCATATACCATAGTAAGTCGGCTTAGTCCGCGCAGGGCTGACCACGAGAGGTGAGCGCTAAATAATTTAAGACCTCCGATGGCAGTACCGGTCACCTCGATCCTATGTCTATTACGGTGCGGGATAAGACATATTAGTTATGCGGTCATTAGCTCTGTTAAGTCCCCACTATTTCAGTAGTCACGTGCATCCGAGCACTCCACTATCTAGAGACAAGCACTGAAATAAGCACACTTGATATTTCTTCCTGTTGCGGATAGACATGACTGTGTTACACTTTCCACATCAGTAAAATCAGGAGTCCACTAGTTCGGTCAGAGGGTTATTGATCTAACCGTCACTGCTTGCGGAGTGTCGCTACTAGGGTACTAACGACTGA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0016   ">
+				<sample name="00_0016   ">
+					<datablock type="DNA">
+						TAGAAATCTCGTGTGTCCCTCATGCACGTGTTGGCTCAGAAGGGTACGCAGGGTCTTTTGGTGTTCACCTTGCGAAGGGCACGCGTGTATCGCACTTGCCTCTGAGACCATATACCATAGTAAGTCGGCTTAGTCCGCGCAGGGCTGACCACGAGAGGTGAGCGCTAAATAATTTAAGACCTCCGATGGCAGTACCGGTCACCTCGATCCTATGTCTATTACGGTGCGGGATAAGACATATTAGTTATGCGGTCATTAGCTCTGTTAAGTCCCCACTATTTCAGTAGTCACGTGCATCCGAGCACTCCACTATCTAGAGACAAGCACTGAAATAAGCACACTTGATATTTCTTCCTGTTGCGGATAGACATGACTGTGTTACACTTTCCACATCAGTAAAATCAGGAGTCCACTAGTTCGGTCAGAGGGTTATTGATCTAACTGTCACTGCTTGCGGAGTGTCGCTACTAGGGTACTAACGACTGA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0013   ">
+				<sample name="00_0013   ">
+					<datablock type="DNA">
+						TAGAAATCTCGTGTGTCCCTCATGCACGTGTTGGCTCAGAAGGGTACGCAGGGTCTTTTGGTGTTCACCTTGCGAAGGGCACGCGTGTATCGCACTTGCCTCTGAGACCATATACCATAGTAAGTCGGCTTAGTCCGCGCAGGGCTGACCACGAGAGGTGAGCGCTAAATAATTTAAGACCTCCGATGGCAGTACCGGTCACCTCGATCCTATGTCTATTACGGTGCGGGATAAGACATATTAGTTATGCGGTCATTAGCTCTGTTAAGTCCCCACTATTTCAGTAGTCACGTGCATCCGAGCACTCCACTATCTAGAGACAAGCACTGAAATAAGCACACTTGATATTTCTTCCTGTTGCGGATAGACATGACTGTGTTACACTTTCCACATCAGTAAAATCAGGAGTCCACTAGTTCGGTCAGAGGGTTATTGATCTAACTGTCACTGCTTGCGGAGTGTCGCTACTAGGGTACTAACGACTGA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0017   ">
+				<sample name="00_0017   ">
+					<datablock type="DNA">
+						TAGAAATCTCGTGTGTCCCTCATGCACGTGTTGGCTCAGAAGGGTACGCAGGGTCTTTTGGTGTTCACCTTGCGAAGGGCACGCGTGTATCGCACTTGCCTCTGAGACCATATACCATAGTAAGTCGGCTTAGTCCGCGCAGGGCTGACCACGAGAGGTGAGCGCTAAATAATTTAAGACCTCCGATGGCAGTACCGGTCACCTCGATCCTATGTCTATTACGGTGCGGGATAAGACATATTAGTTATGCGGTCATTAGCTCTGTTAAGTCCCCACTATTTCAGTAGTCACGTGCATCCGAGCACTCCACTATCTAGAGACAAGCACTGAAATAAGCACACTTGATATTTCTTCCTGTTGCGGATAGACATGACTGTGTTACACTTTCCACATCAGTAAAATCAGGAGTCCACTAGTTCGGTCAGAGGGTTATTGATCTAACTGTCACTGCTTGCGGAGTGTCGCTACTAGGGTACTAACGACTGA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0019   ">
+				<sample name="00_0019   ">
+					<datablock type="DNA">
+						TAGAAATCTCGTGTGTCCCTCATGCACGTGTTGGCTCAGAAGGGTACGCAGGGTCTTTTGGTGTTCACCTTGCGAAGGGCACGCGTGTATCGCACTTGCCTCTGAGACCATATACCATAGTAAGTCGGCTTAGTCCGCGCAGGGCTGACCACGAGAGGTGAGCGCTAAATAATTTAAGACCTCCGATGGCAGTACCGGTCACCTCGATCCTATGTCTATTACGGTGCGGGATAAGACATATTAGTTATGCGGTCATTAGCTCTGTTAAGTCCCCACTATTTCAGTAGTCACGTGCATCCGAGCACTCCACTATCTAGAGACAAGCACTGAAATAAGCACACTTGATATTTCTTCCTGTTGCGGATAGACATGACTGTGTTACACTTTCCACATCAGTAAAATCAGGAGTCCACTAGTTCGGTCAGAGGGTTATTGATCTAACTGTCACTGCTTGCGGAGTGTCGCTACTAGGGTACTAACGACTGA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0006   ">
+				<sample name="00_0006   ">
+					<datablock type="DNA">
+						TAGAAATCTCGTGTGTCCCTCATGCACGTGTTGGCTCAGAAGGGTACGCAGGGTCTTTTGGTGTTCACCTTGCGAAGGGCACGCGTGTATCGCACTTGCCTCTGAGACCATATACCATAGTAAGTCGGCTTAGTCCGCGCAGGGCTGACCACGAGAGGTGAGCGCTAAATAATTTAAGACCTCCGATGGCAGTACCGGTCACCTCGATCCTATGTCTATTACGGTGCGGGATAAGACATATTAGTTATGCGGTCATTAGCTCTGTTAAGTCCCCACTATTTCAGTAGTCACGTGCATCCGAGCACTCCACTATCTAGAGACAAGCACTGAAATAAGCACACTTGATATTTCTTCCTGTTGCGGATAGACATGACTGTGTTACACTTTCCACATCAGTAAAATCAGGAGTCCACTAGTTCGGTCAGAGGGTTATTGATCTAACTGTCACTGCTTGCGGAGTGTCGCTACTAGGGTACTAACGACTGA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0009   ">
+				<sample name="00_0009   ">
+					<datablock type="DNA">
+						TAGAAATCTCGTGTGTCCCTCATGCACGTGTTGGCTCAGAAGGGTACGCAGGGTCTTTTGGTGTTCACCTTGCGAAGGGCACGCGTGTATCGCACTTGCCTCTGAGACCATATACCATAGTAAGTCGGCTTAGTCCGCGCAGGGCTGACCACGAGAGGTGAGCGCTAAATAATTTAAGACCTCCGATGGCAGTACCGGTCACCTCGATCCTATGTCTATTACGGTGCGGGATAAGACATATTAGTTATGCGGTCATTAGCTCTGTTAAGTCCCCACTATTTCAGTAGTCACGTGCATCCGAGCACTCCACTATCTAGAGACAAGCACTGAAATAAGCACACTTGATATTTCTTCCTGTTGCGGATAGACATGACTGTGTTACACTTTCCACATCAGTAAAATCAGGAGTCCACTAGTTCGGTCAGAGGGTTATTGATCTAACTGTCACTGCTTGCGGAGTGTCGCTACTAGGGTACTAACGACTGA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0003   ">
+				<sample name="00_0003   ">
+					<datablock type="DNA">
+						TAGAAATTTCGTGTGTCCCTCATGCACGTGTTGGCTCAGAAGGGTACGCAGGGTCTTTTGGTGTTCACCTTGCGAAGGGCACGCGTGTATCGCACTTGCCTCTGAGACCATATACCATAGTAAGTCGGCTTAGTCCGCGCAGGGCTGACCACGAGAGGTGAGCGCTAAATAATTTAAGACCTCTGATGGCAGTACCGGTCACCTCGATCCTATGTCTATTACGGTGCGGGATAAGACATATTAGTTATGCGGTCATTAGCTCTGTTAAGTCCCCACTATTTCAGTAGTCACGTGCATCCGAGCACTCCACTATCTAGAGACAAGCACTGAAATAAGCACACTTGATATTTCTTCCTGTTGCGGATAGACATGACTGTGTTACACTTTCCACATCAGTAAAATCAGGAGTCCACTAGTTCGGTCAGAGGGTTATTGATCTAACTGTCACTGCTTGCGGAGTGTCGCTACTAGGGTACTAACGACTGA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0012   ">
+				<sample name="00_0012   ">
+					<datablock type="DNA">
+						TAGAAATTTCGTGTGTCCCTCATGCACGTGTTGGCTCAGAAGGGTACGCAGGGTCTTTTGGTGTTCACCTTGCGAAGGGCACGCGTGTATCGCACTTGCCTCTGAGACCATATACCATAGTAAGTCGGCTTAGTCCGCGCAGGGCTGACCACGAGAGGTGAGCGCTAAATAATTTAAGACCTCTGATGGCAGTACCGGTCACCTCGATCCTATGTCTATTACGGTGCGGGATAAGACATATTAGTTATGCGGTCATTAGCTCTGTTAAGTCCCCACTATTTCAGTAGTCACGTGCATCCGAGCACTCCACTATCTAGAGACAAGCACTGAAATAAGCACACTTGATATTTCTTCCTGTTGCGGATAGACATGACTGTGTTACACTTTCCACATCAGTAAAATCAGGAGTCCACTAGTTCGGTCAGAGGGTTATTGATCTAACTGTCACTGCTTGCGGAGTGTCGCTACTAGGGTACTAACGACTGA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0001   ">
+				<sample name="00_0001   ">
+					<datablock type="DNA">
+						TAGAAATTTCGTGTGTCCCTCATGCACGTGTTGGCTCAGAAGGGTACGCAGGGTCTTTTGGTGTTCACCTTGCGAAGGGCACGCGTGTATCGCACTTGCCTCTGAGACCATATACCATAGTAAGTCGGCTTAGTCCGCGCAGGGCTGACCACGAGAGGTGAGCGCTAAATAATTTAAGACCTCTGATGGCAGTACCGGTCACCTCGATCCTATGTCTATTACGGTGCGGGATAAGACATATTAGTTATGCGGTCATTAGCTCTGTTAAGTCCCCACTATTTCAGTAGTCACGTGCATCCGAGCACTCCACTATCTAGAGACAAGCACTGAAATAAGCACACTTGATATTTCTTCCTGTTGCGGATAGACATGACTGTGTTACACTTTCCACATCAGTAAAATCAGGAGTCCACTAGTTCGGTCAGAGGGTTATTGATCTAACTGTCACTGCTTGCGGAGTGTCGCTACTAGGGTACTAACGACTGA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0004   ">
+				<sample name="00_0004   ">
+					<datablock type="DNA">
+						TAGAAATTTCGTGTGTCCCTCATGCACGTGTTGGCTCAGAAGGGTACGCAGGGTCTTTTGGTGTTCACCTTGCGAAGGGCACGCGTGTATCGCACTTGCCTCTGAGACCATATACCATAGTAAGTCGGCTTAGTCCGCGCAGGGCTGACCACGAGAGGTGAGCGCTAAATAATTTAAGACCTCTGATGGCAGTACCGGTCACCTCGATCCTATGTCTATTACGGTGCGGGATAAGACATATTAGTTATGCGGTCATTAGCTCTGTTAAGTCCCCACTATTTCAGTAGTCACGTGCATCCGAGCACTCCACTATCTAGAGACAAGCACTGAAATAAGCACACTTGATATTTCTTCCTGTTGCGGATAGACATGACTGTGTTACACTTTCCACATCAGTAAAATCAGGAGTCCACTAGTTCGGTCAGAGGGTTATTGATCTAACTGTCACTGCTTGCGGAGTGTCGCTACTAGGGTACTAACGACTGA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0000   ">
+				<sample name="00_0000   ">
+					<datablock type="DNA">
+						TAGAAATTTCGTGTGTCCCTCATGCACGTGTTGGCTCAGAAGGGTACGCAGGGTCTTTTGGTGTTCACCTTGCGAAGGGCACGCGTGTATCGCACTTGCCTCTGAGACCATATACCATAGTAAGTCGGCTTAGTCCGCGTAGGGCTGACCACGAGAGGTGAGCGCTAAATAATTTAAGACCTCTGATGGCAGTACCGGTCACCTCGATCCTACGTCTATTACGGTGCGGGATAAGACATATTAGTTATGCGGTCATTAGCTCTGTTAAGTCCCCACTATTTCAGTAGTCACGTGCATCCGAGCACTCCACTATCTAGAGACAAGCACTGAAATAAGCACACTTAATATTTCTTCCTGTTGCGGATAGACATGACTGTGCTACACTTTCCACATCAGTAGAATCAGGAGTCCACTAGTTCGGTCGGAGGGTTATTGATCTAACTGTCACTGCTTGCGGAGTGTCACTACTAGGGTACCAACGACTGA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0005   ">
+				<sample name="00_0005   ">
+					<datablock type="DNA">
+						TAGAAATTTCGTGTGTCCCTCATGCACGTGTTGGCTCAGAAGGGTACGCAGGGTCTTTTGGTGTTCACCTTGCGAAGGGCACGCGTGTATCGCACTTGCCTCTGAGACCATATACCATAGTAAGTCGGCTTAGTCCGCGTAGGGCTGACCACGAGAGGTGAGCGCTAAATAATTTAAGACCTCTGATGGCAGTACCGGTCACCTCGATCCTACGTCTATTACGGTGCGGGATAAGACATATTAGTTATGCGGTCATTAGCTCTGTTAAGTCCCCACTATTTCAGTAGTCACGTGCATCCGAGCACTCCACTATCTAGAGACAAGCACTGAAATAAGCACACTTAATATTTCTTCCTGTTGCGGATAGACATGACTGTGCTACACTTTCCACATCAGTAGAATCAGGAGTCCACTAGTTCGGTCGGAGGGTTATTGATCTAACTGTCACTGCTTGCGGAGTGTCACTACTAGGGTACCAACGACTGA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0018   ">
+				<sample name="00_0018   ">
+					<datablock type="DNA">
+						TAGAAATTTCGTGTGTCCCTCATGCACGTGTTGGCTCAGAAGGGTACGCAGGGTCTTTTGGTGTTCACCTTGCGAAGGGCACGCGTGTATCGCACTTGCCTCTGAGACCATATACCATAGTAAGTCGGCTTAGTCCGCGTAGGGCTGACCACGAGAGGTGAGCGCTAAATAATTTAAGACCTCTGATGGCAGTACCGGTCACCTCGATCCTACGTCTATTACGGTGCGGGATAAGACATATTAGTTATGCGGTCATTAGCTCTGTTAAGTCCCCACTATTTCAGTAGTCACGTGCATCCGAGCACTCCACTATCTAGAGACAAGCACTGAAATAAGCACACTTAATATTTCTTCCTGTTGCGGATAGACATGACTGTGCTACACTTTCCACATCAGTAGAATCAGGAGTCCACTAGTTCGGTCGGAGGGTTATTGATCTAACTGTCACTGCTTGCGGAGTGTCACTACTAGGGTACCAACGACTGA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0014   ">
+				<sample name="00_0014   ">
+					<datablock type="DNA">
+						TAGAAATTTCGTGTGTCCCTCATGCACGTGTTGGCTCAGAAGGGTACGCAGGGTCTTTTGGTGTTCACCTTGCGAAGGGCACGCGTGTATCGCACTTGCCTCTGAGACCATATACCATAGTAAGTCGGCTTAGTCCGCGTAGGGCTGACCACGAGAGGTGAGCGCTAAATAATTTAAGACCTCTGATGGCAGTACCGGTCACCTCGATCCTACGTCTATTACGGTGCGGGATAAGACATATTAGTTATGCGGTCATTAGCTCTGTTAAGTCCCCACTATTTCAGTAGTCACGTGCATCCGAGCACTCCACTATCTAGAGACAAGCACTGAAATAAGCACACTTAATATTTCTTCCTGTTGCGGATAGACATGACTGTGCTACACTTTCCACATCAGTAGAATCAGGAGTCCACTAGTTCGGTCGGAGGGTTATTGATCTAACTGTCACTGCTTGCGGAGTGTCACTACTAGGGTACCAACGACTGA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0002   ">
+				<sample name="00_0002   ">
+					<datablock type="DNA">
+						TAGAAATTTCGTGTGTCCCTCATGCACGTGTTGGCTCAGAAGGGTACGCAGGGTCTTTTGGTGTTCACCTTGCGAAGGGCACGCGTGTATCGCACTTGCCTCTGAGACCATATACCATAGTAAGTCGGCTTAGTCCGCGTAGGGCTGACCACGAGAGGTGAGCGCTAAATAATTTAAGACCTCTGATGGCAGTACCGGTCACCTCGATCCTACGTCTATTACGGTGCGGGATAAGACATATTAGTTATGCGGTCATTAGCTCTGTTAAGTCCCCACTATTTCAGTAGTCACGTGCATCCGAGCACTCCACTATCTAGAGACAAGCACTGAAATAAGCACACTTAATATTTCTTCCTGTTGCGGATAGACATGACTGTGCTACACTTTCCACATCAGTAGAATCAGGAGTCCACTAGTTCGGTCGGAGGGTTATTGATCTAACTGTCACTGCTTGCGGAGTGTCACTACTAGGGTACCAACGACTGA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0007   ">
+				<sample name="00_0007   ">
+					<datablock type="DNA">
+						TAGAAATTTCGTGTGTCCCTCATGCACGTGTTGGCTCAGAAGGGTACGCAGGGTCTTTTGGTGTTCACCTTGCGAAGGGCACGCGTGTATCGCACTTGCCTCTGAGACCATATACCATAGTAAGTCGGCTTAGTCCGCGTAGGGCTGACCACGAGAGGTGAGCGCTAAATAATTTAAGACCTCTGATGGCAGTACCGGTCACCTCGATCCTACGTCTATTACGGTGCGGGATAAGACATATTAGTTATGCGGTCATTAGCTCTGTTAAGTCCCCACTATTTCAGTAGTCACGTGCATCCGAGCACTCCACTATCTAGAGACAAGCACTGAAATAAGCACACTTAATATTTCTTCCTGTTGCGGATAGACATGACTGTGCTACACTTTCCACATCAGTAGAATCAGGAGTCCACTAGTTCGGTCGGAGGGTTATTGATCTAACTGTCACTGCTTGCGGAGTGTCACTACTAGGGTACCAACGACTGA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0010   ">
+				<sample name="00_0010   ">
+					<datablock type="DNA">
+						TAGAAATTTCGTGTGTCCCTCATGCACGTGTTGGCTCAGAAGGGTACGCAGGGTCTTTTGGTGTTCACCTTGCGAAGGGCACGCGTGTATCGCACTTGCCTCTGAGACCATATACCATAGTAAGTCGGCTTAGTCCGCGTAGGGCTGACCACGAGAGGTGAGCGCTAAATAATTTAAGACCTCTGATAGCAGTACCGGTCACCTCGATCCTATGTCTATTACGGTGCGGGATAAGACATATTAGTCATGCGGTCATTAGCTCTGTTAAGTCCCCACTATTTCAGTAGTCACGTGCATCCGAGCACTCCACTATCTAGAGACAAGCACTGAAATAAGCACACTTGATATTTCTTCCTGTTGCGAATAGACATGACTGTGTTACACTTTCCACATCAGTAGAATCAGGAGTCCACTAGTTCGGTCGGAGGGTTATTGATCTAACTGTCACTGCTTGCGGAGTGTCACTACTAGGGTACCAACGACTGA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0011   ">
+				<sample name="00_0011   ">
+					<datablock type="DNA">
+						TAGAAATTTCGTGTGTCCCTCATGCACGTGTTGGCTCAGAAGGGTACGCAGGGTCTTTTGGTGTTCACCTTGCGAAGGGCACGCGTGTATCGCACTTGCCTCTGAGACCATATACCATAGTAAGTCGGCTTAGTCCGCGTAGGGCTGACCACGAGAGGTGAGCGCTAAATAATTTAAGACCTCTGATAGCAGTACCGGTCACCTCGATCCTATGTCTATTACGGTGCGGGATAAGACATATTAGTTATGCGGTCATTAGCTCTGTTAAGTCCCCACTATTTCAGTAGTCACGTGCATCCGAGCACTCCACTATCTAGAGACAAGCACTGAAATAAGCACACTTGATATTTCTTCCTGTTGCGAATAGACATGACTGTGTTACACTTTCCACATCAGTAGAATCAGGAGTCCACTAGTTCGGTCGGAGGGTTATTGATCTAACTGTCACTGCTTGCGGAGTGTCACTACTAGGGTACCAACGACTGA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0015   ">
+				<sample name="00_0015   ">
+					<datablock type="DNA">
+						TAGAAATTTCGTGTGTCCCTCATGCACGTGTTGGCTCAGAAGGGTACGCAGGGTCTTTTGGTGTTCACCTTGCGAAGGGCACGCGTGTATCGCACTTGCCTCTGAGACCATATACCATAGTAAGTCGGCTTAGTCCGCGTAGGGCTGACCACGAGAGGTGAGCGCTAAATAATTTAAGACCTCTGATAGCAGTACCGGTCACCTCGATCCTATGTCTATTACGGTGCGGGATAAGACATATTAGTTATGCGGTCATTAGCTCTGTTAAGTCCCCACTATTTCAGTAGTCACGTGCATCCGAGCACTCCACTATCTAGAGACAAGCACTGAAATAAGCACACTTGATATTTCTTCCTGTTGCGAATAGACATGACTGTGTTACACTTTCCACATCAGTAGAATCAGGAGTCCACTAGTTCGGTCGGAGGGTTATTGATCTAACTGTCACTGCTTGCGGAGTGTCACTACTAGGGTACCAACGACTGA [...]
+					</datablock>
+				</sample>
+			</individual>
+		</population>
+	</region>
+	<region name="LinkageGroup10">
+      <model name="F84">
+        <normalize>false</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <relative-murate>1</relative-murate>
+        <base-freqs> calculated </base-freqs>
+        <ttratio>2</ttratio>
+      </model>
+      <effective-popsize>1</effective-popsize>
+		<population name="PopAlphaIs1">
+			<individual name="00_0007   ">
+				<sample name="00_0007   ">
+					<datablock type="DNA">
+						GCGATCGGCTACATCTAACGGGGATTTAGTCGTGTAACGATGTCCACACGAAACAAACGAGATTACATACAAGTACATTCATCGTAGCTACCGACCGAGGATGCTGTAGGCATAACCTGTGAGCATCCGCTAGCTAGAGGGATAGAAATATGAGGCTTCACTTATATTTGAGGAATGGTGCCCTAAGCCGCCAGTCGTGATGACCCGGCTCAATGATTACTCTACGTGCCCTTCTGGAAGATGTCTTAGTGCCTCACAATAATCCTGTTCGTATCACACAAGCAGAAGATTTTACGCTAAGGCTTTCCTCTGTGGTATCTTTGCGGTCACACACATCGGTGACACGGTGGGAGGAAACTGGCTCGTTATGGACTCCTCAGGTTTCCATCCAGGCGGGAGTGGTCAGCAGATCAGTAGCCTTGCATGTTGCCGTGTTGTAAGCTATCAGAATGTTTCCCTCCTATGCACCGGGTCGTAGGCTAGTCT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0003   ">
+				<sample name="00_0003   ">
+					<datablock type="DNA">
+						GCGATCGGCTACATCTAACGGGGATTTAGTCGTGTAACGATGTCCACACGAAACAAACGAGATTACATACAAGTACATTCATCGTAGCTACCGACCGAGGATGCTGTAGGCATAACCTGTGAGCATCCGCTAGCTAGAGGGATAGAAATATGAGGCTTCACTTATATTTGAGGAATGGTGCCCTAAGCCGCCAGTCGTGATGACCCGGCTCAATGATTACTCTACGTGCCCTTCTGGAAGATGTCTTAGTGCCTCACAATAATCCTGTTCGTATCACACAAGCAGAAGATTTTACGCTAAGGCTTTCCTCTGTGGTATCTTTGCGGTCACACACATCGGTGACACGGTGGGAGGAAACTGGCTCGTTATGGACTCCTCAGGTTTCCATCCAGGCGGGAGTGGTCAGCAGATCAGTAGCCTTGCATGTTGCCGTGTTGTAAGCTATCAGAATGTTTCCCTCCTATGCACCGGGTCGTAGGCTAGTCT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0018   ">
+				<sample name="00_0018   ">
+					<datablock type="DNA">
+						GCGATCGGCTACATCTAACGGGGATTTAGTCGTGTAACGATGTCCACACGAAACAAACGAGATTACATACAAGTACATTCATCGTAGCTACCGACCGAGGATGCTGTAGGCATAACCTGTGAGCATCCGCTAGCTAGAGGGATAGAAATATGAGGCTTCACTTATATTTGAGGAATGGTGCCCTAAGCCGCCAGTCGTGATGACCCGGCTCAATGATTACTCTACGTGCCCTTCTGGAAGATGTCTTAGTGCCTCACAATAATCCTGTTCGTATCACACAAGCAGAAGATTTTACGCTAAGGCTTTCCTCTGTGGTATCTTTGCGGTCACACACATCGGTGACACGGTGGGAGGAAACTGGCTCGTTATGGACTCCTCAGGTTTCCATCCAGGCGGGAGTGGTCAGCAGATCAGTAGCCTTGCATGTTGCCGTGTTGTAAGCTATCAGAATGTTTCCCTCCTATGCACCGGGTCGTAGGCTAGTCT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0002   ">
+				<sample name="00_0002   ">
+					<datablock type="DNA">
+						GCGATCGGCTACATCTAACGGGGATTTAGTCGTGTAACGATGTCCACACGAAACAAACGAGATTACATACAAGTACATTCATCGTAGCTACCGACCGAGGATGCTGTAGGCATAACCTGTGAGCATCCGCTAGCTAGAGGGATAGAAATATGAGGCTTCACTTATATTTGAGGAATGGTGCCCTAAGCCGCCAGTCGTGATGACCCGGCTCAATGATTACTCTACGTGCCCTTCTGGAAGATGTCTTAGTGCCTCACAATAATCCTGTTCGTATCACACAAGCAGAAGATTTTACGCTAAGGCTTTCCTCTGTGGTATCTTTGCGGTCACACACATCGGTGACACGGTGGGAGGAAACTGGCTCGTTATGGACTCCTCAGGTTTCCATCCAGGCGGGAGTGGTCAGCAGATCAGTAGCCTTGCATGTTGCCGTGTTGTAAGCTATCAGAATGTTTCCCTCCTATGCACCGGGTCGTAGGCTAGTCT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0008   ">
+				<sample name="00_0008   ">
+					<datablock type="DNA">
+						GCGATCGGCTACATCTAACGGGGATTTAGTCGTGTAACGATGTCCACACGAAACAAACGAGATTACATACAAGTACATTCATCGTAGCTACCGACCGAGGATGCTGTAGGCATAACCTGTGAGCATCCGCTAGCTAGAGGGATAGAAATATGAGGCTTCACTTATATTTGAGGAATGGTGCCCTAAGCCGCCAGTCGTGATGACCCGGCTCAATGATTACTCTACGTGCCCTTCTGGAAGATGTCTTAGTGCCTCACAATAATCCTGTTCGTATCACACAAGCAGAAGATTTTACGCTAAGGCTTTCCTCTGTGGTATCTTTGCGGTCACACACATCGGTGACACGGTGGGAGGAAACTGGCTCGTTATGGACTCCTCAGGTTTCCATCCAGGCGGGAGTGGTCAGCAGATCAGTAGCCTTGCATGTTGCCGTGTTGTAAGCTATCAGAATGTTTCCCTCCTATGCACCGGGTCGTAGGCTAGTCT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0012   ">
+				<sample name="00_0012   ">
+					<datablock type="DNA">
+						GCGATCGGCTACATCTAACGGGGATTTAGTCGTGTAACGATGTCCACACGAAACAAACGAGATTACATACAAGTACATTCATCGTAGCTACCGACCGAGGATGCTGTAGGCATAACCTGTGAGCATCCGCTAGCTAGAGGGATAGAAATATGAGGCTTCACTTATATTTGAGGAATGGTGCCCTAAGCCGCCAGTCGTGATGACCCGGCTCAATGATTACTCTACGTGCCCTTCTGGAAGATGTCTTAGTGCCTCACAATAATCCTGTTCGTATCACACAAGCAGAAGATTTTACGCTAAGGCTTTCCTCTGTGGTATCTTTGCGGTCACACACATCGGTAACACGGTGGGAGGAAACTGGCTCGTTATGGACTCCTCAGGTTTCCATCCAGGCGGGAGTGGTCAGCAGATCAGTAGCCTTGCATGTTGCCGTGTTGTAAGCTATCAGAATGTTTCCCTCCTATGCACCGGGTCGTAGGCTAGTCT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0000   ">
+				<sample name="00_0000   ">
+					<datablock type="DNA">
+						GCGATCGGCTACATCTAACGGGGATTTAGTCGTGTAACGATGTCCACACGAAACAAACGAGATTACATACAAGTACATTCATCGTAGCTACCGACCGAGGATGCTGTAGGCATAACCTGTGAGCATCCGCTAGCTAGAGGGATAGAAATATGAGGCTTCACTTATATTTGAGGAATGGTGCCCTAAGCCGCCAGTCGTGATGACCCGGCTCAATGATTACTCTACGTGCCCTTCTGGAAGATGTCTTAGTGCCTCACAATAATCCTGTTCGTATCACACAAGCAGAAGATTTTACGCTAAGGCTTTCCTCTGTGGTATCTTTGCGGTCACACACATCGGTAACACGGTGGGAGGAAACTGGCTCGTTATGGACTCCTCAGGTTTCCATCCAGGCGGGAGTGGTCAGCAGATCAGTAGCCTTGCATGTTGCCGTGTTGTAAGCTATCAGAATGTTTCCCTCCTATGCACCGGGTCGTAGGCTAGTCT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0009   ">
+				<sample name="00_0009   ">
+					<datablock type="DNA">
+						GCGATCGGCTACATCTAACGGGGATTTAGTCGTGTAACGATGTCCACACGAAACAAACGAGATTACATACAAGTACATTCATCGTAGCTACCGACCGAGGATGCTGTAGGCATAACCTGTGAGCATCCGCTAGCTAGAGGGATAGAAATATGAGGCTTCACTTATATTTGAGGAATGGTGCCCTAAGCCGCCAGTCGTGATGACCCGGCTCAATGATTACTCTACGTGCCCTTCTGGAAGATGTCTTAGTGCCTCACAATAATCCTGTTCGTATCACACAAGCAGAAGATTTTACGCTAAGGCTTTCCTCTGTGGTATCTTTGCGGTCACACACATCGGTAACACGGTGGGAGGAAACTGGCTCGTTATGGACTCCTCAGGTTTCCATCCAGGCGGGAGTGGTCAGCAGATCAGTAGCCTTGCATGTTGCCGTGTTGTAAGCTATCAGAATGTTTCCCTCCTATGCACCGGGTCGTAGGCTAGTCT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0015   ">
+				<sample name="00_0015   ">
+					<datablock type="DNA">
+						GCGATCGGCTACATCTAACGGGGATTTAGTCGTGTAACGATGTCCACACGAAACAAACGAGATTACATACAAGTACGTTCATCGTAGCTACCGACCGAGGATGCTGTAGGCATAACCTGTGAGCATCCGCTAGCTAGAGGGATAGAAATATGAGGCTTCACTTATATTTGAGGAATGGTGCCCTAAGCCGCCAGTCGTGATGACCCGGCTCAATGATTACTCTACGTGCCCTTCTGGAAGATGTCTTAGTGCCTCACAATAATCCTGTTCGTATCACACAAGCAGAAGATTTTACGCTAAGGCTTTCCTCTGTGGTATCTTTGCGGTCACACACATCGGTAACACGGTGGGAGGAAACTGGCTCGTTATGGACTCCTCAGGTTTCCATCCAGGCGGGAGTGGTCAGCAGATCAGTAGCCTTGCATGTTGCCGTGTTGTAAGCTATCAGAATGTTTCCCTCCTATGCACCGGGTCGTAGGCTAGTCT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0014   ">
+				<sample name="00_0014   ">
+					<datablock type="DNA">
+						GCGATCGGCTACATCTAACGGGGATTTAGTCGTGTAACGATGTCCACACGAAACAAACGAGATTACATACAAGTACGTTCATCGTAGCTACCGACCGAGGATGCTGTAGGCATAACCTGTGAGCATCCGCTAGCTAGAGGGATAGAAATATGAGGCTTCACTTATATTTGAGGAATGGTGCCCTAAGCCGCCAGTCGTGATGACCCGGCTCAATGATTACTCTACGTGCCCTTCTGGAAGATGTCTTAGTGCCTCACAATAATCCTGTTCGTATCACACAAGCAGAAGATTTTACGCTAAGGCTTTCCTCTGTGGTATCTTTGCGGTCACACACATCGGTAACACGGTGGGAGGAAACTGGCTCGTTATGGACTCCTCAGGTTTCCATCCAGGCGGGAGTGGTCAGCAGATCAGTAGCCTTGCATGTTGCCGTGTTGTAAGCTATCAGAATGTTTCCCTCCTATGCACCGGGTCGTAGGCTAGTCT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0017   ">
+				<sample name="00_0017   ">
+					<datablock type="DNA">
+						GCGATCGGCTACATCTAACGGGGATTTAGTCGTGTAACGATGTCCACACGAAACAAACGAGATTACATACAAGTACGTTCATCGTAGCTACCGACCGAGGATGCTGTAGGCATAACCTGTGAGCATCCGCTAGCTAGAGGGATAGAAATATGAGGCTTCACTTATATTTGAGGAATGGTGCCCTAAGCCGCCAGTCGTGATGACCCGGCTCAATGATTACTCTACGTGCCCTTCTGGAAGATGTCTTAGTGCCTCACAATAATCCTGTTCGTATCACACAAGCAGAAGATTTTACGCTAAGGCTTTCCTCTGTGGTATCTTTGCGGTCACACACATCGGTAACACGGTGGGAGGAAACTGGCTCGTTATGGACTCCTCAGGTTTCCATCCAGGCGGGAGTGGTCAGCAGATCAGTAGCCTTGCATGTTGCCGTGTTGTAAGCTATCAGAATGTTTCCCTCCTATGCACCGGGTCGTAGGCTAGTCT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0013   ">
+				<sample name="00_0013   ">
+					<datablock type="DNA">
+						GCGATCGGCTACATCTAACGGGGATTTAGTCGTGTAACGATGTCCACACGAAACAAACGAGATTACATACAAGTACGTTCATCGTAGCTACCGACCGAGGATGCTGTAGGCATAACCTGTGAGCATCCGCTAGCTAGAGGGATAGAAATATGAGGCTTCACTTATATTTGAGGAATGGTGCCCTAAGCCGCCAGTCGTGATGACCCGGCTCAATGATTACTCTACGTGCCCTTCTGGAAGATGTCTTAGTGCCTCACAATAATCCTGTTCGTATCACACAAGCAGAAGATTTTACGCTAAGGCTTTCCTCTGTGGTATCTTTGCGGTCACACACATCGGTAACACGGTGGGAGGAAACTGGCTCGTTATGGACTCCTCAGGTTTCCATCCAGGCGGGAGTGGTCAGCAGATCAGTAGCCTTGCATGTTGCCGTGTTGTAAGCTATCAGAATGTTTCCCTCCTATGCACCGGGTCGTAGGCTAGTCT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0005   ">
+				<sample name="00_0005   ">
+					<datablock type="DNA">
+						GCGATCGGCTACATCTAACGGGGATTTAGTCGTGTAACGATGTCCACACGAAACAAACGAGATTACATACAAGTACGTTCATCGTAGCTACCGACCGAGGATGCTGTAGGCATAACCTGTGAGCATCCGCTAGCTAGAGGGATAGAAATATGAGGCTTCACTTATATTTGAGGAATGGTGCCCTAAGCCGCCAGTCGTGATGACCCGGCTCAATGATTACTCTACGTGCCCTTCTGGAAGATGTCTTAGTGCCTCACAATAATCCTGTTCGTATCACACAAGCAGAAGATTTTACGCTAAGGCTTTCCTCTGTGGTATCTTTGCGGTCACACACATCGGTAACACGGTGGGAGGAAACTGGCTCGTTATGGACTCCTCAGGTTTCCATCCAGGCGGGAGTGGTCAGCAGATCAGTAGCCTTGCATGTTGCCGTGTTGTAAGCTATCAGAATGTTTCCCTCCTATGCACCGGGTCGTAGGCTAGTCT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0006   ">
+				<sample name="00_0006   ">
+					<datablock type="DNA">
+						GCGATCGGCTACATCTAACGGGGATTTAGTCGTGTAACGATGTCCACACGAAACAAACGAGATTACATACAAGTACGTTCATCGTAGCTACCGACCGAGGATGCTGTAGGCATAACCTGTGAGCATCCGCTAGCTAGAGGGATAGAAATATGAGGCTTCACTTATATTTGAGGAATGGTGCCCTAAGCCGCCAGTCGTGATGACCCGGCTCAATGATTACTCTACGTGCCCTTCTGGAAGATGTCTTAGTGCCTCACAATAATCCTGTTCGTATCACACAAGCAGAAGATTTTACGCTAAGGCTTTCCTCTGTGGTATCTTTGCGGTCACACACATCGGTAACACGGTGGGAGGAAACTGGCTCGTTATGGACTCCTCAGGTTTCCATCCAGGCGGGAGTGGTCAGCAGATCAGTAGCCTTGCATGTTGCCGTGTTGTAAGCTATCAGAATGTTTCCCTCCTATGCACCGGGTCGTAGGCTAGTCT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0016   ">
+				<sample name="00_0016   ">
+					<datablock type="DNA">
+						GCGATCGGCTACATCTAACGGGGATTTAGTCGTGTAACGATGTCCACACGAAACAAACGAGATTACATACAAGTACGTTCATCGTAGCTACCGACCGAGGATGCTGTAGGCATAACCTGTGAGCATCCGCTAGCTAGAGGGATAGAAATATGAGGCTTCACTTATATTTGAGGAATGGTGCCCTAAGCCGCCAGTCGTGATGACCCGGCTCAATGATTACTCTACGTGCCCTTCTGGAAGATGTCTTAGTGCCTCACAATAATCCTGTTCGTATCACACAAGCAGAAGATTTTACGCTAAGGCTTTCCTCTGTGGTATCTTTGCGGTCACACACATCGGTAACACGGTGGGAGGAAACTGGCTCGTTATGGACTCCTCAGGTTTCCATCCAGGCGGGAGTGGTCAGCAGATCAGTAGCCTTGCATGTTGCCGTGTTGTAAGCTATCAGAATGTTTCCCTCCTATGCACCGGGTCGTAGGCTAGTCT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0010   ">
+				<sample name="00_0010   ">
+					<datablock type="DNA">
+						GCGATCGGCTACATCTAACGGGGATTTAGTCGTGTAACGATGTCCACACGAAACAAACGAGATTACATACAAGTACGTTCATCGTAGCTACCGACCGAGGATGCTGTAGGCATAACCTGTGAGCATCCGCTAGCTAGAGGGATAGAAATATGAGGCTTCACTTATATTTGAGGAATGGTGCCCTAAGCCGCCAGTCGTGATGACCCGGCTCAATGATTACTCTACGTGCCCTTCTGGAAGATGTCTTAGTGCCTCACAATAATCCTGTTCGTATCACACAAGCAGAAGATTTTACGCTAAGGCTTTCCTCTGTGGTATCTTTGCGGTCACACACATCGGTAACACGGTGGGAGGAAACTGGCTCGTTATGGACTCCTCAGGTTTCCATCCAGGCGGGAGTGGTCAGCAGATCAGTAGCCTTGCATGTTGCCGTGTTGTAAGCTATCAGAATGTTTCCCTCCTATGCACCGGGTCGTAGGCTAGTCT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0011   ">
+				<sample name="00_0011   ">
+					<datablock type="DNA">
+						GCGATCGGCTACATCTAACGGGGATTTAGTCGTGTAACGATGTCCACACGAAACAAACGAGATTACATACAAGTACGTTCATCGTAGCTACCGACCGAGGATGCTGTAGGCATAACCTGTGAGCATCCGCTAGCTAGAGGGATAGAAATATGAGGCTTCACTTATATTTGAGGAATGGTGCCCTAAGCCGCCAGTCGTGATGACCCGGCTCAATGATTACTCTACGTGCCCTTCTGGAAGATGTCTTAGTGCCTCACAATAATCCTGTTCGTATCACACAAGCAGAAGATTTTACGCTAAGGCTTTCCTCTGTGGTATCTTTGCGGTCACACACATCGGTAACACGGTGGGAGGAAACTGGCTCGTTATGGACTCCTCAGGTTTCCATCCAGGCGGGAGTGGTCAGCAGATCAGTAGCCTTGCATGTTGCCGTGTTGTAAGCTATCAGAATGTTTCCCTCCTATGCACCGGGTCGTAGGCTAGTCT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0001   ">
+				<sample name="00_0001   ">
+					<datablock type="DNA">
+						GCGATCGGCTACATCTAACGGGGATTTAGTCGTGTAACGATGTCCACACGAAACAAACGAGATTACATACAAGTACGTTCATCGTAGCTACCGACCGAGGATGCTGTAGGCATAACCTGTGAGCATCCGCTAGCTAGAGGGATAGAAATATGAGGCTTCACTTATATTTGAGGAATGGTGCCCTAAGCCGCCAGTCGTGATGACCCGGCTCAATGATTACTCTACGTGCCCTTCTGGAAGATGTCTTAGTGCCTCACAATAATCCTGTTCGTATCACACAAGCAGAAGATTTTACGCTAAGGCTTTCCTCTGTGGTATCTTTGCGGTCACACACATCGGTAACACGGTGGGAGGAAACTGGCTCGTTATGGACTCCTCAGGTTTCCATCCAGGCGGGAGTGGTCAGCAGATCAGTAGCCTTGCATGTTGCCGTGTTGTAAGCTATCAGAATGTTTCCCTCCTATGCACCGGGTCGTAGGCTAGTCT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0004   ">
+				<sample name="00_0004   ">
+					<datablock type="DNA">
+						GCGATCGGCTACATCTAACGGGGATTTAGTCGTGTAACGATGTCCACACGAAACAAACGAGATTACATACAAGTACGTTCATCGTAGCTACCGACCGAGGATGCTGTAGGCATAACCTGTGAGCATCCGCTAGCTAGAGGGATAGAAATATGAGGCTTCACTTATATTTGAGGAATGGTGCCCTAAGCCGCCAGTCGTGATGACCCGGCTCAATGATTACTCTACGTGCCCTTCTGGAAGATGTCTTAGTGCCTCACAATAATCCTGTTCGTATCACACAAGCAGAAGATTTTACGCTAAGGCTTTCCTCTGTGGTATCTTTGCGGTCACACACATCGGTAACACGGTGGGAGGAAACTGGCTCGTTATGGACTCCTCAGGTTTCCATCCAGGCGGGAGTGGTCAGCAGATCAGTAGCCTTGCATGTTGCCGTGTTGTAAGCTATCAGAATGTTTCCCTCCTATGCACCGGGTCGTAGGCTAGTCT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0019   ">
+				<sample name="00_0019   ">
+					<datablock type="DNA">
+						GCGATCGGCTACATCTAACGGGGATTTAGTCGTGTAACGATGTCCACACGAAACAAACGAGATTACATACAAGTACGTTCATCGTAGCTACCGACCGAGGATGCTGTAGGCATAACCTGTGAGCATCCGCTAGCTAGAGGGATAGAAATATGAGGCTTCACTTATATTTGAGGAATGGTGCCCTAAGCCGCCAGTCGTGATGACCCGGCTCAATGATTACTCTACGTGCCCTTCTGGAAGATGTCTTAGTGCCTCACAATAATCCTGTTCGTATCACACAAGCAGAAGATTTTACGCTAAGGCTTTCCTCTGTGGTATCTTTGCGGTCACACACATCGGTAACACGGTGGGAGGAAACTGGCTCGTTATGGACTCCTCAGGTTTCCATCCAGGCGGGAGTGGTCAGCAGATCAGTAGCCTTGCATGTTGCCGTGTTGTAAGCTATCAGAATGTTTCCCTCCTATGCACCGGGTCGTAGGCTAGTCT [...]
+					</datablock>
+				</sample>
+			</individual>
+		</population>
+	</region>
+	<region name="LinkageGroup11">
+      <model name="F84">
+        <normalize>false</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <relative-murate>1</relative-murate>
+        <base-freqs> calculated </base-freqs>
+        <ttratio>2</ttratio>
+      </model>
+      <effective-popsize>1</effective-popsize>
+		<population name="PopAlphaIs1">
+			<individual name="00_0009   ">
+				<sample name="00_0009   ">
+					<datablock type="DNA">
+						TTGGGCAGCGATGTTTAGTAAAGGGCGCGAAGGGTCCTGTCTTTGTCGCGGCGTTTTAACAGTTAGTAGCTTAGTGCTAAAACGAAAGTAAGGTATTGACTTGAAGGTAAGCCATGAAGACCCTTCCGACGTCGCTAACGGTTATACGTGAACGCCCCTTCGGTATCGACCAATTCCTGCCCAAGATGTCCACGTTAGGTCGCTTGCCAAATATAAGGACAATACAACGTTACGCCTACCACTGACGCCAAGGGTCTCAATAGGAGCTCATATCGGGTGACCCAACCGCGTATTATGCAGGTGGCGAGCAAGGGATAAGGACAGTTGACTATGGTGTAAGCCTTCGAAATTTGGTGTTAGGTAACTGTTAACAGATTTGTACCAATCGGTTCGATTCTTGGCCCTTCCAAACGAGCACAGGATAATGTGGATATCGCAAGTCACATGGTCAAGTAGAAATCCAAAACATCTGAAGTTACGGGGGGG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0017   ">
+				<sample name="00_0017   ">
+					<datablock type="DNA">
+						TTGGGCAGCGATGTTTAGTAAAGGGCGCGAAGGGTCCTGTCTTTGTCGCGGCGTTTTAACAGTTAGTAGCTTAGTGCTAAAACGAAAGTAAGGTATTGACTTGAAGGTAAGCCATGAAGACCCTTCCGACGTCGCTAACGGTTATACGTGAACGCCCCTTCGGTATCGACCAATTCCTGCCCAAGATGTCCACGTTAGGTCGCTTGCCAAATATAAGGACAATACAACGTTACGCCTACCACTGACGCCAAGGGTCTCAATAGGAGCTCATATCGGGTGACCCAACCGCGTATTATGCAGGTGGCGAGCAAGGGATAAGGACAGTTGACTATGGTGTAAGCCTTCGAAATTTGGTGTTAGGTAACTGTTAACAGATTTGTACCAATCGGTTCGATTCTTGGCCCTTCCAAACGAGCACAGGATAATGTGGATATCGCAAGTCACATGGTCAAGTAGAAATCCAAAACATCTGAAGTTACGGGGGGG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0004   ">
+				<sample name="00_0004   ">
+					<datablock type="DNA">
+						TTGGGCAGCGATGTTTAGTAAAGGGCGCGAAGGGTCCTGTCTTTGTCGCGGCGTTTTAACAGTTAGTAGCTTAGTGCTAAAACGAAAGTAAGGTATTGACTTGAAGGTAAGCCATGAAGACCCTTCCGACGTCGCTAACGGTTATACGTGAACGCCCCTTCGGTATCGACCAATTCCTGCCCAAGATGTCCACGTTAGGTCGCTTGCCAAATATAAGGACAATACAACGTTACGCCTACCACTGACGCCAAGGGTCTCAATAGGAGCTCATATCGGGTGACCCAACCGCGTATTATGCAGGTGGCGAGCAAGGGATAAGGACAGTTGACTATGGTGTAAGCCTTCGAAATTTGGTGTTAGGTAACTGTTAACAGATTTGTACCAATCGGTTCGATTCTTGGCCCTTCCAAACGAGCACAGGATAATGTGGATATCGCAAGTCACATGGTCAAGTAGAAATCCAAAACATCTGAAGTTACGGGGGGG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0007   ">
+				<sample name="00_0007   ">
+					<datablock type="DNA">
+						TTGGGCAGCGATGTTTAGTAAAGGGCGCGAAGGGTCCTGTCTTTGTCGCGGCGTTTTAACAGTTAGTAGCTTAGTGCTAAAACGAAAGTAAGGTATTGACTTGAAGGTAAGCCATGAAGACCCTTCCGACGTCGCTAACGGTTATACGTGAACGCCCCTTCGGTATCGACCAATTCCTGCCCAAGATGTCCACGTTAGGTCGCTTGCCAAATATAAGGACAATACAACGTTACGCCTACCACTGACGCCAAGGGTCTCAATAGGAGCTCATATCGGGTGACCCAACCGCGTATTATGCAGGTGGCGAGCAAGGGATAAGGACAGTTGACTATGGTGTAAGCCTTCGAAATTTGGTGTTAGGTAACTGTTAACAGATTTGTACCAATCGGTTCGATTCTTGGCCCTTCCAAACGAGCACAGGATAATGTGGATATCGCAAGTCACATGGTCAAGTAGAAATCCAAAACATCTGAAGTTACGGGGGGG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0018   ">
+				<sample name="00_0018   ">
+					<datablock type="DNA">
+						TTGGGCAGCGATGTTTAGTAAAGGGCGCGAAGGGTCCTGTCTTTGTCGCGGCGTTTTAACAGTTAGTAGCTTAGTGCTAAAACGAAAGTAAGGTATTGACTTGAAGGTAAGCCATGAAGACCCTTCCGACGTCGCTAACGGTTATACGTGAACGCCCCTTCGGTATCGACCAATTCCTGCCCAAGATGTCCACGTTAGGTCGCTTGCCAAATATAAGGACAATACAACGTTACGCCTACCACTGACGCCAAGGGTCTCAATAGGAGCTCATATCGGGTGACCCAACCGCGTATTATGCAGGTGGCGAGCAAGGGATAAGGACAGTTGACTATGGTGTAAGCCTTCGAAATTTGGTGTTAGGTAACTGTTAACAGATTTGTACCAATCGGTTCGATTCTTGGCCCTTCCAAACGAGCACAGGATAATGTGGATATCGCAAGTCACATGGTCAAGTAGAAATCCAAAACATCTGAAGTTACGGGGGGG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0013   ">
+				<sample name="00_0013   ">
+					<datablock type="DNA">
+						TTGGGCAGCGATGTTTAGTAAAGGGCGCGAAGGGTCCTGTCTTTGTCGCGGCGTTTTAACAGTTAGTAGCTTAGTGCTAAAACGAAAGTAAGGTATTGACTTGAAGGTAAGCCATGAAGACCCTTCCGACGTCGCTAACGGTTATACGTGAACGCCCCTTCGGTATCGACCAATTCCTGCCCAAGATGTCCACGTTAGGTCGCTTGCCAAATATAAGGACAATACAACGTTACGCCTACCACTGACGCCAAGGGTCTCAATAGGAGCTCATATCGGGTGACCCAACCGCGTATTATGCAGGTGGCGAGCAAGGGATAAGGACAGTTGACTATGGTGTAAGCCTTCGAAATTTGGTGTTAGGTAACTGTTAACAGATTTGTACCAATCGGTTCGATTCTTGGCCCTTCCAAACGAGCACAGGATAATGTGGATATCGCAAGTCACATGGTCAAGTAGAAATCCAAAACATCTGAAGTTACGGGGGGG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0010   ">
+				<sample name="00_0010   ">
+					<datablock type="DNA">
+						TTGGGCAGCGATGTTTAGTAAAGGGCGCGAAGGGTCCTGTCTTTGTCGCGGCGTTTTAACAGTTAGTAGCTTAGTGCTAAAACGAAAGTAAGGTATTGACTTGAAGGTAAGCCATGAAGACCCTTCCGACGTCGCTAACGGTTATACGTGAACGCCCCTTCGGTATCGACCAATTCCTGCCCAAGATGTCCACGTTAGGTCGCTTGCCAAATATAAGGACAATACAACGTTACGCCTACCACTGACGCCAAGGGTCTCAATAGGAGCTCATATCGGGTGACCCAACCGCGTATTATGCAGGTGGCGAGCAAGGGATAAGGACAGTTGACTATGGTGTAAGCCTTCGAAATTTGGTGTTAGGTAACTGTTAACAGATTTGTACCAATCGGTTCGATTCTTGGCCCTTCCAAACGAGCACAGGATAATGTGGATATCGCAAGTCACATGGTCAAGTAGAAATCCAAAACATCTGAAGTTACGGGGGGG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0019   ">
+				<sample name="00_0019   ">
+					<datablock type="DNA">
+						TTGGGCAGCGATGTTTAGTAAAGGGCGCGAAGGGTCCTGTCTTTGTCGCGGCGTTTTAACAGTTAGTAGCTTAGTGCTAAAACGAAAGTAAGGTATTGACTTGAAGGTAAGCCATGAAGACCCTTCCGACGTCGCTAACGGTTATACGTGAACGCCCCTTCGGTATCGACCAATTCCTGCCCAAGATGTCCACGTTAGGTCGCTTGCCAAATATAAGGACAATACAACGTTACGCCTACCACTGACGCCAAGGGTCTCAATAGGAGCTCATATCGGGTGACCCAACCGCGTATTATGCAGGTGGCGAGCAAGGGATAAGGACAGTTGACTATGGTGTAAGCCTTCGAAATTTGGTGTTAGGTAACTGTTAACAGATTTGTACCAATCGGTTCGATTCTTGGCCCTTCCAAACGAGCACAGGATAATGTGGATATCGCAAGTCACATGGTCAAGTAGAAATCCAAAACATCTGAAGTTACGGGGGGG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0003   ">
+				<sample name="00_0003   ">
+					<datablock type="DNA">
+						TTGGGCAGCGATGTTTAGTAAAGGGCGCGAAGGGTCCTGTCTTTGTCGCGGCGTTTTAACAGTTAGTAGCTTAGTGCTAAAACGAAAGTAAGGTATTGACTTGAAGGTAAGCCATGAAGACCCTTCCGACGTCGCTAACGGTTATACGTGAACGCCCCTTCGGTATCGACCAATTCCTGCCCAAGATGTCCACGTTAGGTCGCTTGCCAAATATAAGGACAATACAACGTTACGCCTACCACTGACGCCAAGGGTCTCAATAGGAGCTCATATCGGGTGACCCAACCGCGTATTATGCAGGTGGCGAGCAAGGGATAAGGACAGTTGACTATGGTGTAAGCCTTCGAAATTTGGTGTTAGGTAACTGTTAACAGATTTGTACCAATCGGTTCGATTCTTGGCCCTTCCAAACGAGCACAGGATAATGTGGATATCGCAAGTCACATGGTCAAGTAGAAATCCAAAACATCTGAAGTTACGGGGGGG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0016   ">
+				<sample name="00_0016   ">
+					<datablock type="DNA">
+						TTGGGCAGCGATGTTTAGTAAAGGGCGCGAAGGGTCCTGTCTTTGTCGCGGCGTTTTAACAGTTAGTAGCTTAGTGCTAAAACGAAAGTAAGGTATTGACTTGAAGGTAAGCCATGAAGACCCTTCCGACGTCGCTAACGGTTATACGTGAACGCCCCTTCGGTATCGACCAATTCCTGCCCAAGATGTCCACGTTAGGTCGCTTGCCAAATATAAGGACAATACAACGTTACGCCTACCACTGACGCCAAGGGTCTCAATAGGAGCTCATATCGGGTGACCCAACCGCGTATTATGCAGGTGGCGAGCAAGGGATAAGGACAGTTGACTATGGTGTAAGCCTTCGAAATTTGGTGTTAGGTAACTGTTAACAGATTTGTACCAATCGGTTCGATTCTTGGCCCTTCCAAACGAGCACAGGATAATGTGGATATCGCAAGTCACATGGTCAAGTAGAAATCCAAAACATCTGAAGTTACGGGGGGG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0006   ">
+				<sample name="00_0006   ">
+					<datablock type="DNA">
+						GTGGGCAGCGATGTTTAGTAAAGGGCGCGAAGGGTCCTGTCTTTGTCGCGGCGTTTTAACAGTTAGTAGCTTAGTGCTAAAACGAAAGTAAGGTATTGACTTGAAGGTAAGCCATGAAGACCCTTCCGACGTCGCTAACGGTTATATGTGAACGCCCCTTCGGTATCGACCAATTCCTGCCCAAGATGTCCACGTTAGGTCGCTTGCCAAATATAAGGACAATACAACGTTACGCCTACCACTGACGCCAAGGGTCTCAATAGGAGCTCATATCGGGTGACCCAACCGCGTATTATGCAGGTGGCGAGCAAGGGATAAGGACAGTTGACTATGGTGTAAGCCTTCGAAATTTGGTGTTAGGTAACTGTTAACAGATTTGTACCAATCGGTTCGATTCTTGGCCCTTCCAAACGAGCACAGGATAATGTGGATATCGCAAGTCACATGGTCAAGTAGAAATCCAAAACATCTGAAGTTACGGGGGGG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0015   ">
+				<sample name="00_0015   ">
+					<datablock type="DNA">
+						GTGGGCAGCGATGTTTAGTAAAGGGCGCGAAGGGTCCTGTCTTTGTCGCGGCGTTTTAACAGTTAGTAGCTTAGTGCTAAAACGAAAGTAAGGTATTGACTTGAAGGTAAGCCATGAAGACCCTTCCGACGTCGCTAACGGTTATATGTGAACGCCCCTTCGGTATCGACCAATTCCTGCCCAAGATGTCCACGTTAGGTCGCTTGCCAAATATAAGGACAATACAACGTTACGCCTACCACTGACGCCAAGGGTCTCAATAGGAGCTCATATCGGGTGACCCAACCGCGTATTATGCAGGTGGCGAGCAAGGGATAAGGACAGTTGACTATGGTGTAAGCCTTCGAAATTTGGTGTTAGGTAACTGTTAACAGATTTGTACCAATCGGTTCGATTCTTGGCCCTTCCAAACGAGCACAGGATAATGTGGATATCGCAAGTCACATGGTCAAGTAGAAATCCAAAACATCTGAAGTTACGGGGGGG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0000   ">
+				<sample name="00_0000   ">
+					<datablock type="DNA">
+						GTGGGCAGCGATGTTTAGTAAAGGGCGCGAAGGGTCCTGTCTTTGTCGCGGCGTTTTAACAGTTAGTAGCTTAGTGCTAAAACGAAAGTAAGGTATTGACTTGAAGGTAAGCCATGAAGACCCTTCCGACGTCGCTAACGGTTATATGTGAACGCCCCTTCGGTATCGACCAATTCCTGCCCAAGATGTCCACGTTAGGTCGCTTGCCAAATATAAGGACAATACAACGTTACGCCTACCACTGACGCCAAGGGTCTCAATAGGAGCTCATATCGGGTGACCCAACCGCGTATTATGCAGGTGGCGAGCAAGGGATAAGGACAGTTGACTATGGTGTAAGCCTTCGAAATTTGGTGTTAGGTAACTGTTAACAGATTTGTACCAATCGGTTCGATTCTTGGCCCTTCCAAACGAGCACAGGATAATGTGGATATCGCAAGTCACATGGTCAAGTAGAAATCCAAAACATCTGAAGTTACGGGGGGG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0005   ">
+				<sample name="00_0005   ">
+					<datablock type="DNA">
+						GTGGGCAGCGATGTTTAGTAAAGGGCGCGAAGGGTCCTGTCTTTGTCGCGGCGTTTTAACAGTTAGTAGCTTAGTGCTAAAACGAAAGTAAGGTATTGACTTGAAGGTAAGCCATGAAGACCCTTCCGACGTCGCTAACGGTTATATGTGAACGCCCCTTCGGTATCGACCAATTCCTGCCCAAGATGTCCACGTTAGGTCGCTTGCCAAATATAAGGACAATACAACGTTACGCCTACCACTGACGCCAAGGGTCTCAATAGGAGCTCATATCGGGTGACCCAACCGCGTATTATGCAGGTGGCGAGCAAGGGATAAGGACAGTTGACTATGGTGTAAGCCTTCGAAATTTGGTGTTAGGTAACTGTTAACAGATTTGTACCAATCGGTTCGATTCTTGGCCCTTCCAAACGAGCACAGGATAATGTGGATATCGCAAGTCACATGGTCAAGTAGAAATCCAAAACATCTGAAGTTACGGGGGGG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0014   ">
+				<sample name="00_0014   ">
+					<datablock type="DNA">
+						TTGGGCAGCGATGTTTAGTAAAGGGCGCGAAGGGTCCTGTCTTTGTCGCGGCGTTTTAACAGTTAGTAGCTTAGTGCTAAAACGAAAGTAAGGTATTGACTTGAAGGTAAGCCATGAAGACCCTTCCGACGTCGCTAACGGTTATACGTGAACGCCCCTTCGGTATCGACCAATTCCTGCCCAAGATGTCCACGTTAGGTCGCTTGCCAAATATAAGGACAATACAACGTTACGCCTACCACTGACGCCAAGGGTCTCAATAGGAGCTCATATCGGGTGACCCAACCGCGTATTATGCAGGTGGCGAGCAAGGGATAAGGACAGTTGACTATGGTGTAAGCCTTCGAAATTTGGTGTTAGGTAACTGTTAACAGATTTGTACCAATCGGTTCGATTCTTGGCCCTTCCAAACGAGCACAGGATAATGTGGATATCGCAAGTCACATGGTCAAGTAGAAATCCAAAACATCTGAAGTTACGGGGGGG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0001   ">
+				<sample name="00_0001   ">
+					<datablock type="DNA">
+						TTGGGCAGCGATGTTTAGTAAAGGGCGCGAAGGGTCCTGTCTTTGTCGCGGCGTTTTAACAGTTAGTAGCTTAGTGCTAAAACGAAAGTAAGGTATTGACTTGAAGGTAAGCCATGAAGACCCTTCCGACGTCGCTAACGGTTATACGTGAACGCCCCTTCGGTATCGACCAATTCCTGCCCAAGATGTCCACGTTAGGTCGCTTGCCAAATATAAGGACAATACAACGTTACGCCTACCACTGACGCCAAGGGTCTCAATAGGAGCTCATATCGGGTGACCCAACCGCGTATTATGCAGGTGGCGAGCAAGGGATAAGGACAGTTGACTATGGTGTAAGCCTTCGAAATTTGGTGTTAGGCAACTGTTAACAGATTTGTACCAATCGGTTCGATTCTTGGCCCTTCCAAACGAGCACAGGATAATGTGGATATCGCAAGTCACATGGTCAAGTAGAAATCCAAAACATCTGAAGTTACGGGGGGG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0008   ">
+				<sample name="00_0008   ">
+					<datablock type="DNA">
+						TTGGGCAGCGATGTTTAGTAAAGGGCGCGAAGGGTCCTGTCTTTGTCGCGGCGTTTTAACAGTTAGTAGCTTAGTGCTAAAACGAAAGTAAGGTATTGACTTGAAGGTAAGCCATGAAGACCCTTCCGACGTCGCTAACGGTTATACGTGAACGCCCCTTCGGTATCGACCAATTCCTGCCCAAGATGTCCACGTTAGGTCGCTTGCCAAATATAAGGACAATACAACGTTACGCCTACCACTGACGCCAAGGGTCTCAATAGGAGCTCATATCGGGTGACCCAACCGCGTATTATGCAGGTGGCGAGCAAGGGATAAGGACAGTTGACTATGGTGTAAGCCTTCGAAATTTGGTGTTAGGCAACTGTTAACAGATTTGTACCAATCGGTTCGATTCTTGGCCCTTCCAAACGAGCACAGGATAATGTGGATATCGCAAGTCACATGGTCAAGTAGAAATCCAAAACATCTGAAGTTACGGGGGGG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0011   ">
+				<sample name="00_0011   ">
+					<datablock type="DNA">
+						TTGGGCAGCGATGTTTAGTAAAGGGCGCGAAGGGTCCTGTCTTTGTCGCGGCGTTTTAACAGTTAGTAGCTTAGTGCTAAAACGAAAGTAAGGTATTGACTTGAAGGTAAGCCATGAAGACCCTTCCGACGTCGCTAACGGTTATACTTGAACGCCCCTTCGGTATCGACCAATTCCTGCCCAAGATGTCCACGTTAGGTCGCTTGCCAAATATAAGGACAATACAACGTTACGCCTACCACTGACGCCAAGAGTCTCAATAGGAGCTCATATCGGGTGACCCAACCGCGTATTATGCAGGTGGCGAGCAAGGGATAAGGACAGTTGACTATGGTGTAAGCCTTCGAAATTTGGTGTTAGGTAACTGTTAACAGATTTGTACCAATCGGTTCGATTCTTGGCCCTTCCAAACGAGCACAGGATAATGTGGATATCGCAAGTCACATGGTCAAGTAGAAATCCAAAACATCTGAAGTTACGGGGGGG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0002   ">
+				<sample name="00_0002   ">
+					<datablock type="DNA">
+						TTGGGCAGCGATGTTTAGTAAAGGGCGCGAAGGGTCCTGTCTTTGTCGCGGCGTTTTAACAGTTAGTAGCTTAGTGCTAAAACGAAAGTAAGGTATTGACTTGAAGGTAAGCCATGAAGACCCTTCCGACGTCGCTAACGGTTATACGTGAACGCCCCTTCGGTATCGACCAATTCCTGCCCAAGATGTCCACGTTAGGTCGCTTGCCAAATATAAGGACAATACAACGTTACGCCTACCACTGACGCCAAGGGTCTCAATAGGAGCTCATATCGGGTGACCCAACCGCGTATTATGCAGGTGGCGAGCAAGGGATAAGGACAGTTGACTATGGTGTAAGCCTTCGAAATTTGGTGTTAGGTAACTGTTAACAGATTTGTACCAATCGGTTCGATTCTTGGCCCTTCCAAACGAGCACAGGATAATGTGGATATCGCAAGTCACATGGTCAAGTAGAAATCCAAAACATCTGAAGTTACGGGGGGG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0012   ">
+				<sample name="00_0012   ">
+					<datablock type="DNA">
+						TTGGGCAGCGATGTTTAGTAAAGGGCGCGAAGGGTCCTGTCTTTGTCGCGGCGTTTTAACAGTTAGTAGCTTAGTGCTAAAACGAAAGTAAGGTATTGACTTGAAGGTAAGCCATGAAGACCCTTCCGACGTCGCTAACGGTTATACGTGAACGCCCCTTCGGTATCGACCAATTCCTGCCCAAGATGTCCACGTTAGGTCGCTTGCCAAATATAAGGACAATACAACGTTACGCCTACCACTGACGCCAAGGGTCTCAATAGGAGCTCATATCGGGTGACCCAACCGCGTATTATGCAGGTGGCGAGCAAGGGATAAGGACAGTTGACTATGGTGTAAGCCTTCGAAATTTGGTGTTAGGTAACTGTTAACAGATTTGTACCAATCGGTTCGATTCTTGGCCCTTCCAAACGAGCACAGGATAATGTGGATATCGCAAGTCACATGGTCAAGTAGAAATCCAAAACATCTGAAGTTACGGGGGGG [...]
+					</datablock>
+				</sample>
+			</individual>
+		</population>
+	</region>
+	<region name="LinkageGroup12">
+      <model name="F84">
+        <normalize>false</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <relative-murate>1</relative-murate>
+        <base-freqs> calculated </base-freqs>
+        <ttratio>2</ttratio>
+      </model>
+      <effective-popsize>1</effective-popsize>
+		<population name="PopAlphaIs1">
+			<individual name="00_0008   ">
+				<sample name="00_0008   ">
+					<datablock type="DNA">
+						GGCTTACCTGCATTAGCGAGGGCGGGTACGTGCTGTGCCCATTTAGTTCTGAACCGGTATATATGGCGGAGCTCCCTAAGTCTCAGTATTGGATGGCCCATGCGGTAGGGATAAGCATAGGCAGACGTCCCAGATCTACGTCACGCCCAGGTTTTCGTTAAGTTAGCACCATCACAGCAACTTAACGGTGCTGCACTCGCAATTGGCTTACTATCAATAATTCTTCTCAGTAAACCCATTATTTAGGAGCGGTACAACGTGACATCGCCAAGATTATGACCTTGAGCTGTTTAATGTCGCATAGGACCTGCCCTAAGACCGAGGTCCCGCTATCGAGATACTAACTACCACCCTCCTCTCTGTCCCGTGACTGAAGGAAACACTCGATACCCGGTGCTTATCGGGAATACAACTTGCCGACATACTAATGGGGGCCGCCGAGGGTACGACTAAATGCGCGAATTGACCTGCTTATGAGTGGCTTAC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0013   ">
+				<sample name="00_0013   ">
+					<datablock type="DNA">
+						GGCTTACCTGCATTAGCGAGGACGGGTACGTGCTGTGCCCATTTAGTTCTGAACCGGTATATATGGCGGAGCTCCCTAAGTCTCAGTGTTGGATGGCCCATGCGGTAGGGATAAGCATAGGCAGACGTCCCAGATCTACGTCACGCCCAGATGTTCGTTAAATTAGCACCATCACAGCAACTTAACGGTGCTGCACTCGCAATTGGCTTACTATCAATAATTCTTCTCAGTAAACCCATTATATAGGAGCGGTACAACGTGACATCGCCAAGATTATGACCTTGAGCTGTTTAATGTTGCATAGGACCTCCCCTAAGACCGAGGTCCCGCTATCGAGATACTAACTACCACCCTCCTCTCCGTCCCGTGACTAAAGAAAACACTCGATACCCGGTGCTTATCGGGAATACAACTTCCCGACATACTAATGAGGGCCGCCGAGGGTACGACTAAATGCGCGAATTGAGCTGCTTATGAGTGGCTTAC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0018   ">
+				<sample name="00_0018   ">
+					<datablock type="DNA">
+						GGCTTACCTGCATTAGCGAGGACGGGTACGTGCTGTGCCCATTTAGTTCTGAACCGGTATATATGGCGGAGCTCCCTAAGTCTCAGTATTGGATGGCCCATGCGGTAGGGATAAGCATAGGCAGACGTCCCAGATCTACGTCACGCCCAGATTTTCGTTAAATTAGCACCATCACAGCAACTTAACGGTGCTGCACTCGCAATTGGCTTACTATCAATAATTCTTCTCAGTAAACCCATTATATAGGAGCGGTACAACGTGACATCGCCAAGATTATGACCTTGAGCTGTTTAATGTTGCATAGGACCTCCCCTAAGACCGAGGTCCCGCTATCGAGATACTAACTACCACCCTCCTCTCCGTCCCGTGACTAAAGAAAACACTCGATACCCGGTGCTTATCGGGAATACAACTTCCCGACATACTAATGAGGGCCGCCGAGGGTACGACTAAATGCGCGAATTGAGCTGCTTATGAGTGGCTTAC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0007   ">
+				<sample name="00_0007   ">
+					<datablock type="DNA">
+						GGCTTACCTGCATTAGCGAGGACGGGTACGTGCTGTGCCCATTTAGTTCTGAACCGGTATATATGGCGGAGCTCCCTAAGTCTCAGTATTGGATGGCCCATGCGGTAGGGATAAGCATAGGCAGACGTCCCAGATCTACGTCACGCCCAGATTTTCGTTAAATTAGCACCATCACAGCAACTTAACGGTGCTGCACTCGCAATTGGCTTACTATCAATAATTCTTCTCAGTAAACCCATTATATAGGAGCGGTACAACGTGACATCGCCAAGATTATGACCTTGAGCTGTTTAATGTTGCATAGGACCTCCCCTAAGACTGAGGTCCCGCTATCGAGATACTAACTACCACCCTCCTCTCCGTCCCGTGACTAAAGAAAACACTCGATACCCGGTGCTTATCGGGAATACAACTTCCCGACATACTAATGAGGGCCGCCGAGGGTACGACTAAATGCACGAATTGAGCTGCTTATGAGTGGCTTAC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0012   ">
+				<sample name="00_0012   ">
+					<datablock type="DNA">
+						GGCTTACCTGCATTAGCGAGGACGGGTACGTGCTGTGCCCATTTAGTTCTGAACCGGTATATATGGCGGAGCTCCCTAAGTCTCAGTATTGGATGGCCCATGCGGTAGGGATAAGCATAGGCAGACGTCCCAGATCTACGTCACGCCCAGATTTTCGTTAAATTAGCACCATCACAGCAACTTAACGGTGCTGCACTCGCAATTGGCTTACTATCAATAATTCTTCTCAGTAAACCCATTATATAGGAGCGGTACAACGTGACATCGCCAAGATTATGACCTTGAGCTGTTTAATGTTGCATAGGACCTCCCCTAAGACTGAGGTCCCGCTATCGAGATACTAACTACCACCCTCCTCTCCGTCCCGTGACTAAAGAAAACACTCGATACCCGGTGCTTATCGGGAATACAACTTCCCGACATACTAATGAGGGCCGCCGAGGGTACGACTAAATGCACGAATTGAGCTGCTTATGAGTGGCTTAC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0009   ">
+				<sample name="00_0009   ">
+					<datablock type="DNA">
+						GGCTTACCTGCATTAGCGAGGACGGGTACGTGCTGTGCCCATTTAGTTCTGAACCGGTATATATGGCGGAGCTCCTTAAGTCTCAGTATTGGATGGCCCATGCGGTAGGGATAAGCATAGGCAGACGTCCCAGATCTACGTCACGCCCAGATTTTCGTTAAATTAGCACCATCACAGCAACTTGACGGTGCTGCACTCGCAATTGGCTTACTATCAATAATTCTTCTCAGTAAACCCAATATATAGGAGCGGTACAACGTGACATCGCCAAGATTATGACCTTGAGCTGTTTAATGTTGCATAGGACCTCCCCTAAGACCGAGGTCCCGCTATCGAGATACTAACTACCACCCTCCTCTCCGTCCCGTGACTAAAGAAACCACTCGATACCCGGTGCTTATCGGGAATACAACTTCCCGACATACTAATGAGGGCCGCCGAGGGTACGACTAAATGCGCGAATTGAACTGCTTATGAGTAGCTTAC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0011   ">
+				<sample name="00_0011   ">
+					<datablock type="DNA">
+						GGCTTACCTGCATTAGCGAGGACGGGTACGTGCTGTGCCCATTTAGTTCTGAACCGGTATATATGGCGGAGCTCCTTAAGTCTCAGTATTGGATGGCCCATGCGGTAGGGATAAGCATAGGCAGACGTCCCAGATCTACGTCACGCCCAGATTTTCGTTAAATTAGCACCATCACAGCAACTTGACGGTGCTGCACTCGCAATTGGCTTACTATCAATAATTCTTCTCAGTAAACCCAATATATAGGAGCGGTACAACGTGACATCGCCAAGATTATGACCTTGAGCTGTTTAATGTTGCATAGGACCTCCCCTAAGACCGAGGTCCCGCTATCGAGATACTAACTACCACCCTCCTCTCCGTCCCGTGACTAAAGAAACCACTCGATACCCGGTGCTTATCGGGAATACAACTTCCCGACATACCAATGAGGGCCGCCGAGGGTACGACTAAATGCGCGAATTGAACTGCTTATGAGTAGCTTAC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0001   ">
+				<sample name="00_0001   ">
+					<datablock type="DNA">
+						GGCTTACCTGCATTAGCGAGGACGGGTACGTGCTGTGCCCATTTAGTTCTGAACCGGTATATATGGCGGAGCTCCTTAAGTCTCAGTATTGGATGGCCCATGCGGTAGGGATAAGCATAGGCAGACGTCCCAGATCTACGTCACGCCCAGATTTTCGTTAAATTAGCACCATCACAGCAACTTGACGGTGCTGCACTCGCAATTGGCTTACTATCAATAATTCTTCTCAGTAAACCCAATATATAGGAGCGGTACAACGTGACATCGCCAAGATTATGACCTTGAGCTGTTTAATGTTGCATAGGACCTCCCCTAAGACTGAGGTCCCGCTATCGAGATACTAACTACCACCCTCCTCTCCGTCCCGTGACTAAAGAAACCACTCGATACCCAGTGCTTATCGGGAATACAACTTCCCGACATACTAATGAGGGCCGCCGAGGGTACGACTAAATGCGCGAATTGAACTGCTTATGAGTAGCTTAC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0002   ">
+				<sample name="00_0002   ">
+					<datablock type="DNA">
+						GGCTTACCTGCATTAGCGAGGACGGGTACGTGCTGTGCCCATTTAGTTCTGAACCGGTATATATGGCGGAGCTCCTTAAGTCTCAGTATTGGATGGCCCATGCGGTAGGGATAAGCATAGGCAGACGTCCCAGATCTACGTCACGCCCAGATTTTCGTTAAATTAGCACCATCACAGCAACTTGACGGTGCTGCACTCGCAATTGGCTTACTATCAATAATTCTTCTCAGTAAACCCAATATATAGGAGCGGTACAACGTGACATCGCCAAGATTATGACCTTGAGCTGTTTAATGTTGCATAGGACCTCCCCTAAGACCGAGGTCCCGCTATCGAGATACTAACTACCACCCTCCTCTCCGTCCCGTGACTAAAGAAACCACTCGATACCCGGTGCTTATCGGGAATACAACTTCCCGACATACTAATGAGGGCCGCCGAGGGTACGACTAAATGCGCGAATTGAACTGCTTATGAGTAGCTTAC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0004   ">
+				<sample name="00_0004   ">
+					<datablock type="DNA">
+						GGCTTACCTGCATTAGCGAGGACGGGTACGTGCTGTGCCCATTTAGTTCTGAACCGGTATATATGGCGGAGCTCCTTAAGTCTCAGTATTGGATGGCCCATGCGGTAGGGATAAGCATAGGCAGACGTCCCAGATCTACGTCACGCCCAGATTTTCGTTAAATTAGCACCATCACAGCAACTTGACGGTGCTGCACTCGCAATTGGCTTACTATCAATAATTCTTCTCAGTAAACCCAATATATAGGAGCGGTACAACGTGACATCGCCAAGATTATGACCTTGAGCTGTTTAATGTTGCATAGGACCTCCCCTAAGACCGAGGTCCCGCTATCGAGATACTAACTACCACCCTCCTCTCCGTCCCGTGACTAAAGAAACCACTCGATACCCGGTGCTTATCGGGAATACAACTTCCCGACATACTAATGAGGGCCGCCGAGGGTACGACTAAATGCGCGAATTGAACTGCTTATGAGTAGCTTAC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0003   ">
+				<sample name="00_0003   ">
+					<datablock type="DNA">
+						GGCTTACCTGCATTAGCGAGGACGGGTACGTGCTGTGCCCATTTAGTTCTGAACCGGTATATATGGCGGAGCTCCCTAAGTCTCAGTATTGGATGGCCCATGCGGTAGGGATAAGCATAGGCAGACGTCCCAGATCTACGTCACGCCCAGATTTTCGTTAAATTAGCACCATCACAGCAACTTAACGGTGCTGCACTCGCAATTGGCTTACTATCAATAATTCTTCTCAGTAAACCCAATATATAGGAGCGGTACAACGTGACATCGCCAAGATTATGACCTTGAGCTGTTTAATGTTGCATAGGACCTCCCCTAAGACCGAGGTCCCTCTATCGAGATACTAACTACCACCCTCCTCTCCGTCCCGTGACTAAAGAAAACACTCGATACCCGGTGTTTATCGGGAATACAACATCCCGACATACTAATGAGGGCCGCCGAGGGTACGACTAAATGCGCGAATTGAACTGCTTATGAGTGGCTTAC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0006   ">
+				<sample name="00_0006   ">
+					<datablock type="DNA">
+						GGCTTACCTGCATTAGCGAGGACGGGTACGTGCTGTGCCCATTTAGTTCTGAACCGGTATATATGGCGGAGCTCCCTAAGTCTCAGTATTGGATGGCCCATGCGGTAGGGATAAGCATAGGCAGACGTCCCAGATCTACGTCACGCCCAGATTTTCGTTAAATTAGCACCATCACAGCAACTTAACGGTGCTGCACTCGCAATTGGCTTACTATCAATAATTCTTCTCAGTAAACCCAATATATAGGAGCGGTACAACGTGACATCGCCAAGATTATGACCTTGAGCTGTTTAATGTTGCATAGGACCTCCCCTAAGACCGAGGTCCCGCTATCGAGATACTAACTACCACCCTCCTCTCCGTCCCGTGACTAAAGAAAACACTCGATACCCGGTGTTTATCGGGAATACAACATCCCGACATACTAATGAGGGCCGCCGAGGGTACGACTAAATGCGCGAATTGAACTGCTTATGAGTGGCTTAC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0000   ">
+				<sample name="00_0000   ">
+					<datablock type="DNA">
+						GGCTTACCTGCATTAGCGAGGACGGGTACGTGCTGTGCCCATTTAGTTCTGAACCGGTATATATGGCGGAGCTCCCTAAGTCTCAGTATTGGATGGCCCATGCGGTAGGGATAAGCATAGGCAGACGTCCCAGATCTACGTCACGCCCAGATTTTCGTTAAATTAGCACCATCACAGCAACTTAACGGTGCTGCACTCGCAATTGGCTTACTATCAATAATTCTTCTCAGTAAACCCAATATATAGGAGCGGTACAACGTGACATCGCCAAGATTATGACCTTGAGCTGTTTAATGTTGCATAGGACCTCCCCTAAGACCGAGGTCCCGCTATCGAGATACTAACTACCACCCTCCTCTCCGTCCCGTGACTAAAGAAAACACTCGATACCCGGTGTTTATCGGGAATACAACATCCCGACATACTAATGAGGGCCGCCGAGGGTACGACTAAATGCGCGAATTGAACTGCTTATGAGTGGCTTAC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0015   ">
+				<sample name="00_0015   ">
+					<datablock type="DNA">
+						GGCTTACCTGCATTAGCGAGGACGGGTACGTGCTGTGCCCATTTAGTTCTGAACCGGTATATATGGCGGAGCTCCCTAAGTCTCAGTATTGGATGGCCCATGCGGTAGGGATAAGCATAGGCAGACGTCCCAGATCTACGTCACGCCCAGATTTTCGTTAAATTAGCACCATCACAGCAACTTAACGGTGCTGCACTCGCAATTGGCTTACTATCAATAATTCTTCTCAGTAAACCCAATATATAGGAGCGGTACAACGTGACATCGCCAAGATTATGACCTTGAGCTGTTTAATGTTGCATAGGACCTCCCCTAAGACCGAGGTCCCGCTATCGAGATACTAACTACCACCCTCCTCTCCGTCCCGTGACTAAAGAAAACACTCGATACCCGGTGTTTATCGGGAATACAACATCCCGACATACTAATGAGGGCCGCCGAGGGTACGACTAAATGCGCGAATTGAACTGCTTATGAGTGGCTTAC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0014   ">
+				<sample name="00_0014   ">
+					<datablock type="DNA">
+						GGCTTACCTGTATTAGCGAGGACGGGTACGTGCTGTGCCCATTTAGTTCTGAACCGGTATATATGGCGGAGCTCCCTAAGTCTCAGTATTGGATGGCCCATGCGGTAGGGATAAGCATAGGCAGACGTCCCAGATCTACGTCACGCCCAGATTTTCGTTAAATTAGCACCATCACAGCAACTTAACGGTGCTGCACTCGCAATTGGCTTACTATCAATAATTCTTCTCAGTAAACCCAATATATAGGAGCGGTACAACGTGACATCGGCAAGATTATGATCTTGAGCTGTTTAATGTTGCATAGGACCTCCCCTAAGACCGAGGTCCCGCTATCGAGATACTAACTACCACCCTCCTCTCCGTCCCGTGACTAAAGAAAACACTCGATACCCGGTGCTTATCGGGAATCCAACTTCCCGACATACTAATGAGGGCCGCCGAGGGTACGACTAAATGCGCGAATTGAACTGCTTATGAGTGGCTTAC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0019   ">
+				<sample name="00_0019   ">
+					<datablock type="DNA">
+						GGCTTACCTGTATTAGCGAGGACGGGTACGTGCTGTGCCCATTTAGTTCTGAACCGGTATATATGGCGGAGCTCCCTAAGTCTCAGTATTGGATGGCCCATGCGGTAGGGATAAGCATAGGCAGACGTCCCAGATCTACGTCACGCCCAGATTTTCGTTAAATTAGCACCATCACAGCAACTTAACGGTGCTGCACTCGCAATTGGCTTACTATCAATAATTCTTCTCAGTAAACCCAATATATAGGAGCGGTACAACGTGACATCGGCAAGATTATGATCTTGAGCTGTTTAATGTTGCATAGGACCTCCCCTAAGACCGAGGTCCCGCTATCGAGATACTAACTACCACCCTCCTCTCCGTCCCGTGACTAAAGAAAACACTCGATACCCGGTGCTTATCGGGAATCCAACTTCCCGACATACTAATGAGGGCCGCCGAGGGTACGACTAAATGCGCGAATTGAACTGCTTATGAGTGGCTTAC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0017   ">
+				<sample name="00_0017   ">
+					<datablock type="DNA">
+						GGCTTACCTGTGTTAGCGAGGACGGGTACGTGCTGTGCCCATTTAGTTCTGAACCGGTATATATGGCGGAGCTCCCTAAGTCTCAGTATTGGATGGCCCATGCGGTAGGGATAAGCATAGGCAGACGTCCCAGATCTACGTCACGCCCAGATTTTCGTTAAATTAGCACCATCACAGCAACTTAACGGTGCTGCACTCGCAATTGGCTTACTATCAATAATTCTTCTCAGTAAACCCAATATATAGGAGCGGTACAACGTGACATCGCCAAGATTATGATCTTGAGCTGTTTAATGTTGCATAGGACCTCCCCTAAGACCGAGGTCCCGCTATCGAGATACTAACTACCACCCTCCTCTCCGTCCCGTGACTAAAGAAAACACTCGATACCCGGTGCTTATCGGGAATACAACTTCCCGACATACTAATGAGAGCCGCCGAGGGTACGACTAAATGCGCGAATTGAACTGCTTATGAGTGGCTTAC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0005   ">
+				<sample name="00_0005   ">
+					<datablock type="DNA">
+						GGCTTACCTGTATTAGCGAGGACGGGTACGTGCTGTGCCCATTTAGTTCTGAACCGGTATATATGGCGGAGCTCCCTAAGTCTCAGTATTGGATGGCCCATGCGGTAGGGATAAGCATAGGCAGACGTCCCAGATCTACGTCACGCCCAGATTTTCGTTAAATTAGCACCATCACAGCAACTTAACGGTGCTGCACTCGCAATTTGCTTACTATCAATAATTCTTCTCAGTAAACCCAATATATAGGAGCGGTACAACGTGACATCGCCAAGATTATGATCTTGAGCTGTTTAATGTTGCATAGGACCTCCCCTAAGACCGAGGTCCCGCTATCGAGATACTAACTACCACCCTCCTCTCCGTCCCGTGACTAAAGAAAACACTCGATACCCGGTGCTTATCGGGAATACAACTTCCCGACATACTAATGAGGGCCGCCGAGGGTACGACTAAATGCGCGAATTGAACTGCTTATGAGTGGCTTAC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0010   ">
+				<sample name="00_0010   ">
+					<datablock type="DNA">
+						GGCTTACCTGTATTAGCGAGGACGGGTACGTGCTGTGCCCATTTAGTTCTGAACCGGTATATATGGCGGAGCTCCCTAAGTCTCAGTATTGGATGGCCCATGCGGTAGGGATAAGCATAGGCAGACGTCCCAGATCTACGTCACGCCCAGATTTTCGTTAAATTAGCACCATCACAGCAACTTAACGGTGCTGCACTCGCAATTGGCTTACTATCAATAATTCTTCTCAGTAAACCCAATATATAGGAGCGGTACAACGTGACATCGCCAAGATTATGATCTTGAGCTGTTTAATGTTGCATAGGACCTCCCCTAAGACCGAGGTCCCGCTATCGAGATACTAACTACCACCCTCCTCTCCGTCCCGTGACTAAAGAAAACACTCGATACCCGGTGCTTATCGGGAATACAACTTCCCGACATACTAATGAGGGCCGCCGAGGGTACGACTAAATGCGCGAATTGAACTGCTTATGAGTGGCTTAC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0016   ">
+				<sample name="00_0016   ">
+					<datablock type="DNA">
+						GGCTTACCTGTATTAGCGAGGACGGGTACGTGCTGTGCCCATTTAGTTCTGAACCGGTATATATGGCGGAGCTCCCTAAGTCTCAGTATTGGATGGCCCATGCGGTAGGGATAAGCATAGGCAGACGTCCCAGATCTACGTCACGCCCAGATTTTCGTTAAATTAGCACCATCACAGCAACTTAACGGTGCTGCACTCGCAATTGGCTTACTATCAATAATTCTTCTCAGTAAACCCAATATATAGGAGCGGTACAACGTGACATCGCCAAGATTATGATCTTGAGCTGTTTAATGTTGCATAGGACCTCCCCTAAGACCGAGGTCCCGCTATCGAGATACTAACTACCACCCTCCTCTCCGTCCCGTGACTAAAGAAAACACTCGATACCCGGTGCTTATCGGGAATACAACTTCCCGACATACTAATGAGGGCCGCCGAGGGTACGACTAAATGCGCGAATTGAACTGCTTATGAGTGGCTTAC [...]
+					</datablock>
+				</sample>
+			</individual>
+		</population>
+	</region>
+	<region name="LinkageGroup13">
+      <model name="F84">
+        <normalize>false</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <relative-murate>1</relative-murate>
+        <base-freqs> calculated </base-freqs>
+        <ttratio>2</ttratio>
+      </model>
+      <effective-popsize>1</effective-popsize>
+		<population name="PopAlphaIs1">
+			<individual name="00_0009   ">
+				<sample name="00_0009   ">
+					<datablock type="DNA">
+						TTCTCCAACCGTTTGGATTTTGGACATCAAGAATCAATTGTAGATGTTCATCTGTGCCCCTATTAGCCTTGGTTGATCCATAAGTGTACGATTCAAAGGTAGATATTTGTTCGGTGTTGTTCCCGTCAGACAATAGCTACCTTAACATCGGATGCCCTATCTCGGTTGGCACCCTCTCACATTATCCGGTTGCCACCCGTAGCAGAGTCAGATTAGTCTCTGGCAACCATCGCTCGAAGCCCGGGAACCGCGTGGGGAACACAGATTGGAGGAGAATATACCCGTAAAAGCTTCTACTACAAGTTTACCACTCATGACCTCTGTCCGCCTGACAATCAGGAGCAGGTATGGTCTCGCGTGGGAAACCTACCCCGAACTAAACGCTTCTAGGGTGGTAATAACAGTGCAGGGCGGTACCCTGTGAGGGTGTCCTGGATAAGTGATCAGTCCAACTATCGCACATAATCTAGTGAGGTCTATCCTATA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0001   ">
+				<sample name="00_0001   ">
+					<datablock type="DNA">
+						TTCTCCAACCGTTTGGATTTTGGACATCAAGAATCAATTGTAGATGTTCATCTGTGCCCCTATTAGCCTTGGTTGATCCATAAGTGTACGATTCAAAGGTAGATATTTGTTCGGTGTTGTTCCCGTCAGACAATAGCTACCTTAACATCGGATGCCCTATCTCGGTTGGCACCCTCTCACATTATCCTGTTGCCACCCGTAGCAGAGTCAGATTAGTCTCTGGCAACCATCGCTCGAAGCCCGGGAACCGCGTGGGGAACGCAGATTGGAGGAGAATATACCCGTAAAAGCTTCTACTACAAGTTTACCACTCATGACCTCTGTCCGCCTGACAATCAGGAGCAGGTATGGTCTCGCGTGGGAAACCTACCCCGAACTAAACGCTTCTAGGGTGGTAATAACAGTGCAGGGCGGTACCCTGTGAGGGTGTCCTGGATAAGTGATCAGTCCAACTATCGCACATAATCTAGTGAGGTCTATCCTATA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0018   ">
+				<sample name="00_0018   ">
+					<datablock type="DNA">
+						TTCTCCAACCGTTTGGATTTTGGACATCAAGAATCAATTGTAGATGTTCATCTGTGCCCCTATTAGCCTTGGTTGATCCATAAGTGTACGATTCAAAGGTAGATATTTGTTCGGTGTTGTTCCCGTCAGACAATAGCTACCTTAACATCGGATGCCCTATCTCGGTTGGCACCCTCTCACATTATCCTGTTGCCACCCGTAGCAGAGTCAGATTAGTCTCTGGCAACCATCGCTCGAAGCCCGGGAACCGCGTGGGGAACGCAGATTGGAGGAGAATATACCCGTAAAAGCTTCTACTACAAGTTTACCACTCATGACCTCTGTCCGCCTGACAATCAGGAGCAGGTATGGTCTCGCGTGGGAAACCTACCCCGAACTAAACGCTTCTAGGGTGGTAATAACAGTGCAGGGCGGTACCCTGTGAGGGTGTCCTGGATAAGTGATCAGTCCAACTATCGCACATAATCTAGTGAGGTCTATCCTATA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0012   ">
+				<sample name="00_0012   ">
+					<datablock type="DNA">
+						TTCTCCAACCGTTTGGATTTTGGACATCAAGAATCAATTGTAGATGTTCATCTGTGCCCCTATTAGCCTTGGTTGATCCATAAGTGTACGATTCAAAGGTAGATATTTGTTCGGTGTTGTTCCCGTCAGACAATAGCTACCTTAACATCGGATGCCCTATCTCGGTTGGCACCCTCTCACATTATCCGGTTGCCACCCGTAGCAGAGTCAGATTAGTCTCTGGCAACCATCGCTCGAAGCCCGGGAACCGCGTGGGGAACACAGATTGGAGGAGAATATACCCGTAAAAGCTTCTACTACAAGTTTACCACTCATGACCTCTGTCCGCCTGACAATCAGGAGCAGGTATGGTCTCGCGTGGGAAACCTACCCCGAACTAAACGCTTCTAGGGTGGTAATAACAGTGCAGGGCGGTACCCTGTGAGGGTGTCCTGGATAAGTGATCAGTCCAACTATCGCACATAATCTAGTGAGGTCTATCCTATA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0015   ">
+				<sample name="00_0015   ">
+					<datablock type="DNA">
+						TTCTCCAACCGTTTGGATTTTGGACATCAAGAATCAATTGTAGATGTTCATCTGTGCCCCTATTAGCCTTGGTTGATCCATAAGTGTACGATTCAAAGGTAGATATTTGTTCGGTGTTGTTCCCGTCAGACAATAGCTACCTTAACATCGGATGCCCTATCTCGGTTGGCACCCTCTCACATTATCCGGTTGCCACCCGTAGCAGAGTCAGATTAGTCTCTGGCAACCATCGCTCGAAGCCCGGGAACCGCGTGGGGAACACAGATTGGAGGAGAATATACCCGTAAAAGCTTCTACTACAAGTTTACCACTCATGACCTCTGTCCGCCTGACAATCAGGAGCAGGTATGGTCTCGCGTGGGAAACCTACCCCGAACTAAACGCTTCTAGGGTGGTAATAACAGTGCAGGGCGGTACCCTGTGAGGGTGTCCTGGATAAGTGATCAGTCCAACTATCGCACATAATCTAGTGAGGTCTATCCTATA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0019   ">
+				<sample name="00_0019   ">
+					<datablock type="DNA">
+						TTCTCCAACCGTTTGGATTTTGGACATCAAGAATCAATTGTAGATGTTCATCTGTGCCCCTATTAGCCTTGGTTGATCCATAAGTGTACGATTCAAAGGTAGATATTTGTTCGGTGTTGTTCCCGTCAGACAATAGCTACCTTAACATCGGATGCCCTATCTCGGTTGGCACCCTCTCACATTATCCGGTTGCCACCCGTAGCAGAGTCAGATTAGTCTCTGGCAACCATCGCTCGAAGCCCGGGAACCGCGTGGGGAACACAGATTGGAGGAGAATATACCCGTAAAAGCTTCTACTACAAGTTTACCACTCATGACCTCTGTCCGCCTGACAATCAGGAGCAGGTATGGTCTCGCGTGGGAAACCTACCCCGAACTAAACGCTTCTAGGGTGGTAATAACAGTGCAGGGCGGTACCCTGTGAGGGTGTCCTGGATAAGTGATCAGTCCAACTATCGCACATAATCTAGTGAGGTCTATCCTATA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0014   ">
+				<sample name="00_0014   ">
+					<datablock type="DNA">
+						TTCTCCAACCGTTTGGATTTTGGACATCAAGAATCAATTGTAGATGTTCATCTGTGCCCCTATTGGCCTTGGTTGATCCATAAGTGTACGATTCAAAGGTAGATATTTGTTCGGTGTTGTTCCCGTCAGACAATAGATACCTTAACATCGGATGCCCTATCTCGGTTGGCACCCTCTCACATTATCCGGTTGCCACCCGTAGCAGAGTCAGATTAGTCTCTGGCAACCATCGCTCGAAGCCCGGGAACCGCGTGGGGAACACAGATTGGAGGAGAATATACCCGTAAAAGCTTCTACTACAAGTTTACCACTCATGACCTCCGTCCGCCTGACAATCAGGAGCAGGTATGGTCTCGCGTGGGAAACCTACCCCGAACTAAACGCTTCTAGGGTGGTAATAACAGTGCAGGGCGGTACCCTGTGAGGGTGTCCTGGATAAGTGATCAGTCTAACTATCGCACATAATCTAGTGAGGTCTATCCTAAA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0006   ">
+				<sample name="00_0006   ">
+					<datablock type="DNA">
+						TTCTCCAACCGTTTGGATTTTGGACATCAAGAATCAATTGTAGATGTTCATCTGTGCCCCTATTGGCCTTGGTTGATCCATAAGTGTACGATTCAAAGGTAGATATTTGTTCGGTGTTGTTCCCGTCAGACAATAGATACCTTAACATCGGATGCCCTATCTCGGTTGGCACCCTCTCACATTATCCGGTTGCCACCCGTAGCAGAGTCAGATTAGTCTCTGGCAACCATCGCTCGAAGCCCGGGAACCGCGTGGGGAACACAGATTGGAGGAGAATATACCCGTAAAAGCTTCTACTACAAGTTTACCACTCATGACCTCCGTCCGCCTGACAATCAGGAGCAGGTATGGTCTCGCGTGGGAAACCTACCCCGAACTAAACGCTTCTAGGGTGGTAATAACAGTGCAGGGCGGTACCCTGTGAGGGTGTCCTGGATAAGTGATCAGTCTAACTATCGCACATAATCTAGTGAGGTCTATCCTAAA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0013   ">
+				<sample name="00_0013   ">
+					<datablock type="DNA">
+						TTCTCCAACCGTTTGGATTTTGGACATCAAGAATCAATTGTAGATGTTCATCTGTGCCCCTATTGGCCTTGGTTGATCCATAAGTGTACGATTCAAAGGTAGATATTTGTTCGGTGTTGTTCCCGTCAGACAATAGATACCTTAACATCGGATGCCCTATCTCGGTTGGCACCCTCTCACATTATCCGGTTGCCACCCGTAGCAGAGTCAGATTAGTCTCTGGCAACCATCGCTCGAAGCCCGGGAACCGCGTGGGGAACACAGATTGGAGGAGAATATACCCGTAAAAGCTTCTACTACAAGTTTACCACTCATGACCTCCGTCCGCCTGACAATCAGGAGCAGGTATGGTCTCGCGTGGGAAACCTACCCCGAACTAAACGCTTCTAGGGTGGTAATAACAGTGCAGGGCGGTACCCTGTGAGGGTGTCCTGGATAAGTGATCAGTCTAACTATCGCACATAATCTAGTGAGGTCTATCCTAAA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0005   ">
+				<sample name="00_0005   ">
+					<datablock type="DNA">
+						TTCTCCAACCGTTTGGATTTTGGACATCAAGAATCAATTGTAGATGTTCATCTGTGCCCCTATTGGCCTTGGTTGATCCATAAGTGTACGATTCAAAGGTAGATATTTGTTCGGTGTTGTTCCCGTCAGACAATAGATACCTTAACATCGGATGCCCTATCTCGGTTGGCACCCTCTCACATTATCCGGTTGCCACCCGTAGCAGAGTCAGATTAGTCTCTGGCAACCATCGCTCGAAGCCCGGGAACCGCGTGGGGAACACAGATTGGAGGAGAATATACCCGTAAAAGCTTCTACTACAAGTTTACCACTCATGACCTCTGTCCGCCTGACAATCAGGAGCAGGTATGGTCTCGCGTGGGAAACCTACCCCGAACTAAACGCTTCTAGGGTGGTAATAACAGTGCAGGGCGGTACCCTGTGAGGGTGTCCTGGATAAGTGATCAGTCTAACTATCGCACATAATCTAGTGAGGTCTATCCTAAA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0016   ">
+				<sample name="00_0016   ">
+					<datablock type="DNA">
+						TTCTCCAACCGTTTGGATTTTGGACATCAAGAATCAATTGTAGATGTTCATCTGTGCCCCTATTGGCCTTGGTTGATCCATAAGTGTACGATTCAAAGGTAGATATTTGTTCGGTGTTGTTCCCGTCAGACAATAGATACCTTAACATCGGATGCCCTATCTCGGTTGGCACCCTCTCACATTATCCGGTTGCCACCCGTAGCAGAGTCAGATTAGTCTCTGGCAACCATCGCTCGAAGCCCGGGAACCGCGTGGGGAACACAGATTGGAGGAGAATATACCCGTAAAAGCTTCTACTACAAGTTTACCACTCATGACCTCTGTCCGCCTGACAATCAGGAGCAGGTATGGTCTCGCGTGGGAAACCTACCCCGAACTAAACGCTTCTAGGGTGGTAATAACAGTGCAGGGCGGTACCCTGTGAGGGTGTCCTGGATAAGTGATCAGTCTAACTATCGCACATAATCTAGTGAGGTCTATCCTAAA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0000   ">
+				<sample name="00_0000   ">
+					<datablock type="DNA">
+						TTCTCCAACCGTTTGGATTTTGGACATCAAGAATCAATTGTAGATGTTCATCTGTGCCCCTATTGGCCTTGGTTGATCCATAAGTGTACGATTCAAAGGTAGATATTTGTTCGGTGTTGTTCCCGTCAGACAATAGATACCTTAACATCGGATGCCCTATCTCGGTTGGCACCCTCTCACATTATCCGGTTGCCACCCGTAGCAGAGTCAGATTAGTCTCTGGCAACCATCGCTCGAAGCCCGGGAACCGCGTGGGGAACACAGATTGGAGGAGAATATACCCGTAAAAGCTTCTACTACAAGTTTACCACTCATGACCTCTGTCCGCCTGACAATCAGGAGCAGGTATGGTCTCGCGTGGGAAACCTACCCCGAACTAAACGCTTCTAGGGTGGTAATAACAGTGCAGGGCGGTACCCTGTGAGGGTGTCCTGGATAAGTGATCAGTCTAACTATCGCACATAATCTAGTGAGGTCTATCCTAAA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0003   ">
+				<sample name="00_0003   ">
+					<datablock type="DNA">
+						TTCTCCAACCGTTTGGATTTTGGACATCAAGAATCAATTGTAGATGTTCATCTGTGCCCCTATTGGCCTTGGTTGATCCATAAGTGTACGATTCAAAGGTAGATATTTGTTCGGTGTTGTTCCCGTCAGACAATAGATACCTTAACATCGGATGCCCTATCTCGGTTGGCACCCTCTCACATTATCCGGTTGCCACCCGTAGCAGAGTCAGATTAGTCTCTGGCAACCATCGCTCGAAGCCCGGGAACCGCGTGGGGAACACAGATTGGAGGAGAATATACCCGTAAAAGCTTCTACTACAAGTTTACCACTCATGACCTCTGTCCGCCTGACAATCAGGAGCAGGTATGGTCTCGCGTGGGAAACCTACCCCGAACTAAACGCTTCTAGGGTGGTAATAACAGTGCAGGGCGGTACCCTGTGAGGGTGTCCTGGATAAGTGATCAGTCTAACTATCGCACATAATCTAGTGAGGTCTATCCTAAA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0002   ">
+				<sample name="00_0002   ">
+					<datablock type="DNA">
+						TTCTCCAACCGTTTGGATTTTGGACATCAAGAATCAATTGTAGATGTTCATCTGTGCCCCTATTGGCCTTGGTTGATCCATAAGTGTACGATTCAAAGGTAGATATTTGTTCGGTGTTGTTCCCGTCAGACAATAGATACCTTAACATCGGATGCCCTATCTCGGTTGGCACCCTCTCACATTATCCGGTTGCCACCCGTAGCAGAGTCAGATTAGTCTCTGGCAACCATCGCTCGAAGCCCGGGAACCGCGTGGGGAACACAGATTGGAGGAGAATATACCCGTAAAAGCTTCTACTACAAGTTTACCACTCATGACCTCTGTCCGCCTGACAATCAGGAGCAGGTATGGTCTCGCGTGGGAAACCTACCCCGAACTAAACGCTTCTAGGGTGGTAACAACAGTGCAGGGCGGTACCCTGTGAGGGTGTCCTGGATAAGTGATCAGTCTAACTATCGCACATAATCTAGTGAGGTCTATCCTAAA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0007   ">
+				<sample name="00_0007   ">
+					<datablock type="DNA">
+						TTCTCCAACCGTTTGGATTTTGGACATCAAGAATCAATTGTAGATGTTCATCTGTGCCCCTATTGGCCTTGGTTGATCCATAAGTGTACGATTCAAAGGTAGATATTTGTTCGGTGTTGTTCCCGTCAGACAATAGATACCTTAACATCGGATGCCCTATCTCGGTTGGCACCCTCTCACATTATCCGGTTGCCACCCGTAGCAGAGTCAGATTAGTCTCTGGCAACCATCGCTCGAAGCCCGGGAACCGCGTGGGGAACACAGATTGGAGGAGAATATACCCGTAAAAGCTTCTACTACAAGTTTACCACTCATGACCTCTGTCCGCCTGACAATCAGGAGCAGGTATGGTCTCGCGTGGGAAACCTACCCCGAACTAAACGCTTCTAGGGTGGTAACAACAGTGCAGGGCGGTACCCTGTGAGGGTGTCCTGGATAAGTGATCAGTCTAACTATCGCACATAATCTAGTGAGGTCTATCCTAAA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0011   ">
+				<sample name="00_0011   ">
+					<datablock type="DNA">
+						TTCTCCAACCGTTTGGATTTTGGACATCAAGAATCAATTGTAGATGTTCATCTGTGCCCCTATTGGCCTTGGTTGATCCATAAGTGTACGATTCAAAGGTAGATATTTGTTCGGTGTTGTTCCCGTCAGACAATAGATACCTTAACATCGGATGCCCTATCTCGGTTGGCACCCTCTCACATTATCCGGTTGCCACCCGTAGCAGAGTCAGATTAGTCTCTGGCAACCATCGCTCGAAGCCCGGGAACCGCGTGGGGAACACAGATTGGAGGAGAATATACCCGTAAAAGCTTCTACTACAAGTTTACCACTCATGACCTCTGTCCGCCTGACAATCAGGAGCAGGTATGGTCTCGCGTGGGAAACCTACCCCGAACTAAACGCTTCTAGGGTGGTAATAACAGTGCAGGGCGGTACCCTGTGAGGGTGTCCTGGATAAGTGATCAGTCTAACTATCGCACATAATCTAGTGAGGTCTATCCTAAA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0008   ">
+				<sample name="00_0008   ">
+					<datablock type="DNA">
+						TTCTCCAACCGTTTGGATTTTGGACATCAAGAATCAATTGTAGATGTTCATCTGTGCCCCTATTGGCCTTGGTTGATCCATAAGTGTACGATTCAAAGGTAGATATTTGTTCGGTGTTGTTCCCGTCAGACAATAGATACCTTAACATCGGATGCCCTATCTCGGTTGGCACCCTCTCACATTATCCGGTTGCCACCCGTAGCAGAGTCAGATTAGTCTCTGGCAACCATCGCTCGAAGCCCGGGAACCGCGTGGGGAACACAGATTGGAGGAGAATATACCCGTAAAAGCTTCTACTACAAGTTTACCACTCATGACCTCTGTCCGCCTGACAATCAGGAGCAGGTATGGTCTCGCGTGGGAAACCTACCCCGAACTAAACGCTTCTAGGGTGGTAATAACAGTGCAGGGCGGTACCCTGTGAGGGTGTCCTGGATAAGTGATCAGTCTAACTATCGCACATAATCTAGTGAGGTCTATCCTAAA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0010   ">
+				<sample name="00_0010   ">
+					<datablock type="DNA">
+						TTCTCCAACCGTTTGGATTTTGGACATCAAGAATCAATTGTAGATGTTCATCTGTGCCCCTATTGGCCTTGGTTGATCCATAAGTGTACGATTCAAAGGTAGATATTTGTTCGGTGTTGTTCCCGTCAGACAATAGATACCTTAACATCGGATGCCCTATCTCGGTTGGCACCCTCTCACATTATCCGGTTGCCACCCGTAGCAGAGTCAGATTAGTCTCTGGCAACCATCGCTCGAAGCCCGGGAACCGCGTGGGGAACACAGATTGGAGGAGAATATACCCGTAAAAGCTTCTACTACAAGTTTACCACTCATGACCTCTGTCCGCCTGACAATCAGGAGCAGGTATGGTCTCGCGTGGGAAACCTACCCCGAACTAAACGCTTCTAGGGTGGTAATAACAGTGCAGGGCGGTACCCTGTGAGGGTGTCCTGGATAAGTGATCAGTCTAACTATCGCACATAATCTAGTGAGGTCTATCCTAAA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0004   ">
+				<sample name="00_0004   ">
+					<datablock type="DNA">
+						TTCTCCAACCGTTTGGATTTTGGACATCAAGAATCAATTGTAGATGTTCATCTGTGCCCCTATTGGCCTTGGTTGATCCATAAGTGTACGATTCAAAGGTAGATATTTGTTCGGTGTTGTTCCCGTCAGACAATAGATACCTTAACATCGGATGCCCTATCTCGGTTGGCACCCTCTCACATTATCCGGTTGCCACCCGTAGCAGAGTCAGATTAGTCTCTGGCAACCATCGCTCGAAGCCCGGGAACCGCGTGGGGAACACAGATTGGAGGAGAATATACCCGTAAAAGCTTCTACTACAAGTTTACCACTCATGACCTCTGTCCGCCTGACAATCAGGAGCAGGTATGGTCTCGCGTGGGAAACCTACCCCGAACTAAACGCTTCTAGGGTGGTAATAACAGTGCAGGGCGGTACCCTGTGAGGGTGTCCTGGATAAGTGATCAGTCTAACTATCGCACATAATCTAGTGAGGTCTATCCTAAA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0017   ">
+				<sample name="00_0017   ">
+					<datablock type="DNA">
+						TTCTCCAACCGTTTGGATTTTGGACATCAAGAATCAATTGTAGATGTTCATCTGTGCCCCTATTGGCCTTGGTTGATCCATAAGTGTACGATTCAAAGGTAGATATTTGTTCGGTGTTGTTCCCGTCAGACAATAGATACCTTAACATCGGATGCCCTATCTCGGTTGGCACCCTCTCACATTATCCGGTTGCCACCCGTAGCAGAGTCAGATTAGTCTCTGGCAACCATCGCTCGAAGCCCGGGAACCGCGTGGGGAACACAGATTGGAGGAGAATATACCCGTAAAAGCTTCTACTACAAGTTTACCACTCATGACCTCTGTCCGCCTGACAATCAGGAGCAGGTATGGTCTCGCGTGGGAAACCTACCCCGAACTAAACGCTTCTAGGGTGGTAATAACAGTGCAGGGCGGTACCCTGTGAGGGTGTCCTGGATAAGTGATCAGTCTAACTATCGCACATAATCTAGTGAGGTCTATCCTAAA [...]
+					</datablock>
+				</sample>
+			</individual>
+		</population>
+	</region>
+	<region name="LinkageGroup14">
+      <model name="F84">
+        <normalize>false</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <relative-murate>1</relative-murate>
+        <base-freqs> calculated </base-freqs>
+        <ttratio>2</ttratio>
+      </model>
+      <effective-popsize>1</effective-popsize>
+		<population name="PopAlphaIs1">
+			<individual name="00_0000   ">
+				<sample name="00_0000   ">
+					<datablock type="DNA">
+						GACTATCTGTGCCACAGCTGGTTTTTTACCACCAAGCATGCAAGTCTCCGGATGACTGAGCCCTCGGAGCCACGGCTAAATGGAGCCGCCAAAAGAGGAATATTCGAGGGTTTATAATCTCCGCTAGACGCTGTGCACGAGGCTCAAGTATCCCCCCACGTGAGCACAATCCCTATATGGCCGGGCCGATACGAGACACATAACCAGTGCATTGCCGCCCTATGTTAGGTAAAATCAAAAATACGTATTGCATTGTCTCCACGTGGAGAGCCGGTTAATCCTATAGGCGGGCAATGCTCTGTTGGTCTTCCAAGGATCTCCTCACTCCGCCTTGTCCCGCACCCCCCATGGCTCCTAACATTCTTGTTGGCCTCAGGAACATATCGAACGAGCTAGCTCCTGTTTTCTCGGTGTAGGATCTTTCATGTTGCCGTCCCGAGGCTCTGCTGCCGTTGCGCACCCCGCTCCACTTGGTAACCCCCACTA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0015   ">
+				<sample name="00_0015   ">
+					<datablock type="DNA">
+						GACTATCTGTGCCACAGCTGGTTTTTTACCACCAAGCATGCAAGTCTCCGGATGACTGAGCCCTCGGAGCCACGGCTAAATGGAGCCGCCAAAAGAGGAATATTCGAGGGTTTATAATCTCCGCTAGACGCTGTGCACGAGGCTCAAGTATCCCCCCACGTGAGCACAATCCCTATATGGCCGGGCCGATACGAGACACATAACCAGTGCATTGCCGCCCTATGTTAGGTAAAATCAAAAATACGTATTGCATTGTCTCCACGTGGAGAGCCGGTTAATCCTATAGGCGGGCAATGCTCTGTTGGTCTTCCAAGGATCTCCTCACTCCGCCTTGTCCCGCACCCCCCATGGCTCCTAACATTCTTGTTGGCCTCAGGAACATATCGAACGAGCTAGCTCCTGTTTTCTCGGTGTAGGATCTTTGATGTTGCCGTCCCGAGGCTCTGCTGCCGTTGCGCACCCCGCTCCACTTGGTAACCCCCACTA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0011   ">
+				<sample name="00_0011   ">
+					<datablock type="DNA">
+						GACTATCTGTGCCACAGCTGGTTTTTTACCACCAAGCGTGCAAGTCTCCGGATGACTGAGCCCTCGGAGCCACGGCTAAATGGAGCCGCCAAAAGAGGAATATTCGAGGGTTTATAATCTCCGCTAGACGCTGTGCACGAGGCTCAAGTATCCCCCCACGTGAGCACAATCCCTATATGGCCGGGCCGATACGAGACACATAACCAGTGCATTGCCGCCCTATGTTAGGTAAAATCAAAAATACGTATTGCATTGTCTCCACGTGGAGAGCCGGTTAATCCTATAGGCGGGCAATGCTCTGTTGGTCTTCCAAGGATCTCCTCACTCCGCCTTGTCCCGCACCCCCCATGGCTCCTAACATTCTTGTTGGCCTCAGGAACATATCGAACGAGCTAGCTCCTGTTTTCTCGGTGTAGGATCTTTCATGTTGCCGTCCCGAGGCTCTGCTGCCGTTGCGCACCCCGCTCCACTTGGTAACCCCCACTA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0007   ">
+				<sample name="00_0007   ">
+					<datablock type="DNA">
+						GACTATCTGTGCCACAGCTGGTTTTTTACCACCAAGCGTGCAAGTCTCCGGATGACTGAGCCCTCGGAGCCACGGCTAAATGGAGCCGCCAAAAGAGGAATATTCGAGGGTTTATAATCTCCGCTAGACGCTGTGCACGAGGCTCAAGTATCCCCCCACGTGAGCACAATCCCTATATGGCCGGGCCGATACGAGACACATAACCAGTGCATTGCCGCCCTATGTTAGGTAAAATCAAAAATACGTATTGCATTGTCTCCACGTGGAGAGCCGGTTAATCCTATAGGCGGGCAATGCTCTGTTGGTCTTCCAAGGATCTCCTCACTCCGCCTTGTCCCGCACCCCCCATGGCTCCTAACATTCTTGTTGGCCTCAGGAACATATCGAACGAGCTAGCTCCTGTTTTCTCGGTGTAGGATCTTTCATGTTGCCGTCCCGAGGCTCTGCTGCCGTTGCGCACCCCGCTCCACTTGGTAACCCCCACTA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0013   ">
+				<sample name="00_0013   ">
+					<datablock type="DNA">
+						GACTATCTGTGCCACAGCTGGTTTTTTACCACCAAGCGTGCAAGTCTCCGGATGACTGAGCCCTCGGAGCCACGGCTAAATGGAGCCGCCAAAAGAGGAATATTCGAGGGTTTATAATCTCCGCTAGACGCTGTGCACGAGGCTCAAGTATCCCCCCACGTGAGCACAATCCCTATATGGCCGGGCCGATACGAGACACATAACCAGTGCATTGCCGCCCTATGTTAGGTAAAATCAAAAATACGTATTGCATTGTCTCCACGTGGAGAGCCGGTTAATCCTATAGGCGGGCAATGCTCTGTTGGTCTTCCAAGGATCTCCTCACTCCGCCTTGTCCCGCACCCCCCATGGCTCCTAACATTCTTGTTGGCCTCAGGAACATATCGAACGAGCTAGCTCCTGTTTTCTCGGTGTAGGATCTTTCATGTTGCCGTCCCGAGGCTCTGCTGCCGTTGCGCACCCCGCTCCACTTGGTAACCCCCACTA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0005   ">
+				<sample name="00_0005   ">
+					<datablock type="DNA">
+						GACTATCTGTGCCACAGCTGGTTTTTTACCACCAAGCGTGCAAGTCTCCGGATGACTGAGCCCTCGGAGCCACGGCTAAATGGAGCCGCCAAAAGAGGAATATTCGAGGGTTTATAATCTCCGCTAGACGCTGTGCACGAGGCTCAAGTATCCCCCCACGTGAGCACAATCCCTATATGGCCGGGCCGATACGAGACACATAACCAGTGCATTGCCGCCCTATGTTAGGTAAAATCAAAAATACGTATTGCATTGTCTCCACGTGGAGAGCCGGTTAATCCTATAGGCGGGCAATGCTCTGTTGGTCTTCCAAGGATCTCCTCACTCCGCCTTGTCCCGCACCCCCCATGGCTCCTAACATTCTTGTTGGCCTCAGGAACATATCGAACGAGCTAGCTCCTGTTTTCTCGGTGTAGGATCTTTCATGTTGCCGTCCCGAGGCTCTGCTGCCGTTGCGCACCCCGCTCCACTTGGTAACCCCCACTA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0014   ">
+				<sample name="00_0014   ">
+					<datablock type="DNA">
+						GACTATCTGTGCCACAGCTGGTTTTTTACCACCAAGCGTGCAAGTCTCCGGATGACTGAGCCCTCGGAGCCACGGCTAAATGGAGCCGCCAAAAGAGGAATATTCGAGGGTTTATAATCTCCGCTAGACGCTGTGCACGAGGCTCAAGTATCCCCCCACGTGAGCACAATCCCTATATGGCCGGGCCGATACGAGACACATAACCAGTGCATTGCCGCCCTATGTTAGGTAAAATCAAAAATACGTATTGCATTGTCTCCACGTGGAGAGCCGGTTAATCCTATAGGCGGGCAATGCTCTGTTGGTCTTCCAAGGATCTCCTCACTCCGCCTTGTCCCGCACCCCCCATGGCTCCTAACATTCTTGTTGGCCTCAGGAACATATCGAACGAGCTAGCTCCTGTTTTCTCGGTGTAGGATCTTTCATGTTGCCGTCCCGAGGCTCTGCTGCCGTTGCGCACCCCGCTCCACTTGGTAACCCCCACTA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0016   ">
+				<sample name="00_0016   ">
+					<datablock type="DNA">
+						GACTATCTGTGCCACAGCTGGTTTTTTACCACCAAGCGTGCAAGTCTCCGGATGACTGAGCCCTCGGAGCCACGGCTAAATGGAGCCGCCAAAAGAGGAATATTCGAGGGTTTATAATCTCCGCTAGACGCTGTGCACGAGGCTCAAGTATCCCCCCACGTGAGCACAATCCCTATATGGCCGGGCCGATACGAGACACATAACCAGTGCATTGCCGCCCTATGTTAGGTAAAATCAAAAATACGTATTGCATTGTCTCCACGTGGAGAGCCGGTTAATCCTATAGGCGGGCAATGCTCTGTTGGTCTTCCAAGGATCTCCTCACTCCGCCTTGTCCCGCACCCCCCATGGCTCCTAACATTCTTGTTGGCCTCAGGAACATATCGAACGAGCTAGCTCCTGTTTTCTCGGTGTAGGATCTTTCATGTTGCCGTCCCGAGGCTCTGCTGCCGTTGCGCACCCCGCTCCACTTGGTAACCCCCACTA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0018   ">
+				<sample name="00_0018   ">
+					<datablock type="DNA">
+						GACTATCTGTGCCACAGCTGGTTTTTTACCACCAAGCGTGCAAGTCTCCGGATGACTGAGCCCTCGGAGCCACGGCCAAATGGAGCCGCCAAAAGAGGAATATTCGAGGGTTTATAATCTCCGCTAGACGCTGTGCACGAGGCTCAAGTATCCCCCCACGTGAGCACAATCCCTATATGGCCGGGCCGATACGAGACACATAACCAGTGCATTGCCGCCCTATGTTAGGTAAAATCAAAAATACGTATTGCATTGTCTCCACGTGGAGAGCCGGTTAATCCTATAGGCGGGCAATGCTCTGTTGGTCTTCCAAGGATCTCCTCACTCCGCCTTGTCCCGCACCCCCCATGGCTCCTAACATTCTTGTTGGCCTCAGGAACATATCGAACGAGCTAGCTCCTGTTTTCTCGGTGTAGGATCTTTCATGTTGCCGTCCCGAGGCTCTGCTGCCGTTGCGCACCCCGCTCCACTTGGTAACCCCCACTA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0004   ">
+				<sample name="00_0004   ">
+					<datablock type="DNA">
+						GACTATCTGTGCCACAGCTGGTTTTTTACCACCAAGCGTGCAAGTCTCCGGATGACTGAGCCCTCGGAGCCACGGCCAAATGGAGCCGCCAAAAGAGGAATATTCGAGGGTTTATAATCTCCGCTAGACGCTGTGCACGAGGCTCAAGTATCCCCCCACGTGAGCACAATCCCTATATGGCCGGGCCGATACGAGACACATAACCAGTGCATTGCCGCCCTATGTTAGGTAAAATCAAAAATACGTATTGCATTGTCTCCACGTGGAGAGCCGGTTAATCCTATAGGCGGGCAATGCTCTGTTGGTCTTCCAAGGATCTCCTCACTCCGCCTTGTCCCGCACCCCCCATGGCTCCTAACATTCTTGTTGGCCTCAGGAACATATCGAACGAGCTAGCTCCTGTTTTCTCGGTGTAGGATCTTTCATGTTGCCGTCCCGAGGCTCTGCTGCCGTTGCGCACCCCGCTCCACTTGGTAACCCCCACTA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0008   ">
+				<sample name="00_0008   ">
+					<datablock type="DNA">
+						GACTATCTGTGCCACAGCTGGTTTTTTACCACCAAGCGTGCAAGTCTCCGGATGACTGAGCCCTCGGAGCCACGGCCAAATGGAGCCGCCAAAAGAGGAATATTCGAGGGTTTATAATCTCCGCTAGACGCTGTGCACGAGGCTCAAGTATCCCCCCACGTGAGCACAATCCCTATATGGCCGGGCCGATACGAGACACATAACCAGTGCATTGCCGCCCTATGTTAGGTAAAATCAAAAATACGTATTGCATTGTCTCCACGTGGAGAGCCGGTTAATCCTATAGGCGGGCAATGCTCTGTTGGTCTTCCAAGGATCTCCTCACTCCGCCTTGTCCCGCACCCCCCATGGCTCCTAACATTCTTGTTGGCCTCAGGAACATATCGAACGAGCTAGCTCCTGTTTTCTCGGTGTAGGATCTTTCATGTTGCCGTCCCGAGGCTCTGCTGCCGTTGCGCACCCCGCTCCACTTGGTAACCCCCACTA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0001   ">
+				<sample name="00_0001   ">
+					<datablock type="DNA">
+						GACTATCTGTGCCACAGCTGGTTTTTTACCACCAAGCGTGCAAGTCTCCGGATGACTGAGCCCTCGGAGCCACGGCCAAATGGAGCCGCCAAAAGAGGAATATTCGAGGGTTTATAATCTCCGCTAGACGCTGTGCACGAGGCTCAAGTATCCCCCCACGTGAGCACAATCCCTATATGGCCGGGCCGATACGAGACACATAACCAGTGCATTGCCGCCCTATGTTAGGTAAAATCAAAAATACGTATTGCATTGTCTCCACGTGGAGAGCCGGTTAATCCTATAGGCGGGCAATGCTCTGTTGGTCTTCCAAGGATCTCCTCAATCCGCCTTGTCCCGCACCCCCCATGGCTCCTAACATTCTTGTTGGCCTCAGGAACATATCGAACGAGCTAGCTCCTGTTTTCTCGGTGTAGGATCTTTCATGTTGCCGTCCCGAGGCTCTGCTGCCGTTGCGCACCCCGCTCCACTTGGTAACCCCCACTA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0019   ">
+				<sample name="00_0019   ">
+					<datablock type="DNA">
+						GACTATCTGTGCCACAGCTGGTTTTTTACCACCAAGCGTGCAAGTCTCCGGATGACTGAGCCCTCGGAGCCACGGCCAAATGGAGCCGCCAAAAGAGGAATATTCGAGGGTTTATAATCTCCGCTAGACGCTGTGCACGAGGCTCAAGTATCCCCCCACGTGAGCACAATCCCTATATGGCCGGGCCGATACGAGACACATAACCAGTGCATTGCCGCCCTATGTTAGGTAAAATCAAAAATACGTATTGCATTGTCTCCACGTGGAGAGCCGGTTAATCCTATAGGCGGGCAATGCTCTGTTGGTCTTCCAAGGATCTCCTCACTCCGCCTTGTCCCGCACCCCCCATGGCTCCTAACATTCTTGTTGGCCTCAGGAACATATCGAACGAGCTAGCTCCTGTTTTCTCGGTGTAGGATCTTTCATGTTGCCGTCCCGAGGCTCTGCTGCCGTTGCGCACCCCGCTCCACTTGGTAACCCCCACTA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0010   ">
+				<sample name="00_0010   ">
+					<datablock type="DNA">
+						GACTATCTGTGCCACAGCTGGTTTTTTACCACCAAGCGTGCAAGTCTCCGGATGACTGAGCCCTCGGAGCCACGGCCAAATGGAGCCGCCAAAAGAGGAATATTCGAGGGTTTATAATCTCCGCTAGACGCTGTGCACGAGGCTCAAGTATCCCCCCACGTGAGCACAATCCCTATATGGCCGGGCCGATACGAGACACATAACCAGTGCATTGCCGCCCTATGTTAGGTAAAATCAAAAATACGTATTGCATTGTCTCCACGTGGAGAGCCGGTTAATCCTATAGGCGGGCAATGCTCTGTTGGTCTTCCAAGGATCTCCTCACTCCGCCTTGTCCCGCACCCCCCATGGCTCCTAACATTCTTGTTGGCCTCAGGAACATATCGAACGAGCTAGCTCCTGTTTTCTCGGTGTAGGATCTTTCATGTTGCCGTCCCGAGGCTCTGCTGCCGTTGCGCACCCCGCTCCACTTGGTAACCCCCACTA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0002   ">
+				<sample name="00_0002   ">
+					<datablock type="DNA">
+						GACTATCTGTGCCACAGCTGGTTTTTTACCACCAAGCGTGCAAGTCTCCGGATGACTGAGCCCTCGGAGCCACGGCCAAATGGACCCGCCAAAAGAGGAATATTCGAGGGTTTATAAACTCCGCTAGACGCTGTGCACGAGGCTCAAGTATCCCCCCACGTGAGCATAATCGCTATATGGCCGGGCCGATACGAGACACATAACCAGTGCATTGCCGCCCTATGTTAGGTAAAATCAAAAATACGTATTGCATTGTCTCCACGTGGAGAGCCGGTTAATCCTATAGGCGGGCAATGCTCTGTTGGTCTTCCAAGGATCTCCTCACTCCGCCTTGTCCCGCACCCCCCATGGCTCCTAACATTCTTGTTGGCCTCAGGAACATATCGAACGAGCTAGCTCCTGTTTTCTCGGTGTAGGATCTTTCATGTTGCCGTCCCGAGGCTCTGCTGCCGTTGCGCACCCCGCTCCACTTGGTAACCCCCACTA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0017   ">
+				<sample name="00_0017   ">
+					<datablock type="DNA">
+						GACTATCTGTGCCACAGCTGGTTTTTTACCACCAAGCGTGCAAGTCTCCGGATGACTGAGCCCTCGGAGCCACGGCCAAATGGACCCGCCAAAAGAGGAATATTCGAGGGTTTATAAACTCCGCTAGACGCTGTGCACGAGGCTCAAGTATCCCCCCACGTGAGCATAATCGCTATATGGCCGGGCCGATACGAGACACATAACCAGTGCATTGCCGCCCTATGTTAGGTAAAATCAAAAATACGTATTGCATTGTCTCCACGTGGAGAGCCGGTTAATCCTATAGGCGGGCAATGCTCTGTTGGTCTTCCAAGGATCTCCTCACTCCGCCTTGTCCCGCACCCCCCATGGCTCCTAACATTCTTGTTGGCCTCAGGAACATATCGAACGAGCTAGCTCCTGTTTTCTCGGTGTAGGATCTTTCATGTTGCCGTCCCGAGGCTCTGCTGCCGTTGCGCACCCCGCTCCACTTGGTAACCCCCACTA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0003   ">
+				<sample name="00_0003   ">
+					<datablock type="DNA">
+						GACTATCTGTGCCACAGCTGGTTTTTTACCACCAAGCGTGCAAGTCTCCGGATGACTGAGCCCTCGGAGCCACGGCCAAATGGAGCCGCCAAAAGAGGAATATTCGAGGGTTTATAAACTCCGCTAGACGCTGTGCACGAGGCTCAAGTATCCCCCCACGTGAGCATAATCGCTATATGGCCGGGCCGATACGAGACACATAACCAGTGCATTGCCGCCCTATGTTAGGTAAAATCAAAAATACGTATTGCATTGTCTCCACGTGGAGAGCCGGTTAATCCTATAGGCGGGCAATGCTCTGTTGGTCTTCCAAGGATCTCCTCACTCCGCCTTGTCCCGCACCCCCCATGGCTCCTAACATTCTTGTTGGCCTCAGGAACATATCGAACGAGCTAGCTCCTGTTTTCTCGGTGTAGGATCTTTCATGTTGCCGTCCCGAGGCTCTGCTGCCGTTGCGCACCCCGCTCCACTTGGTAACCCCCACTA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0009   ">
+				<sample name="00_0009   ">
+					<datablock type="DNA">
+						GACTATCTGTGCCACAGCTGGTTTTTTACCACCAAGCGTGCAAGTCTCCGGATGACTGAGCCCTCGGAGCCACGGCCAAATGGAGCCGCCAAAAGAGGAATATTCGAGGGTTTATAAACTCCGCTAGACGCTGTGCACGAGGCTCAAGTATCCCCCCACGTGAGCATAATCGCTATATGGCCGGGCCGATACGAGACACATAACCAGTGCATTGCCGCCCTATGTTAGGTAAAATCAAAAATACGTATTGCATTGTCTCCACGTGGAGAGCCGGTTAATCCTATAGGCGGGCAATGCTCTGTTGGTCTTCCAAGGATCTCCTCACTCCGCCTTGTCCCGCACCCCCCATGGCTCCTAACATTCTTGTTGGCCTCAGGAACATATCGAACGAGCTAGCTCCTGTTTTCTCGGTGTAGGATCTTTCATGTTGCCGTCCCGAGGCTCTGCTGCCGTTGCGCACCCCGCTCCACTTGGTAACCCCCACTA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0006   ">
+				<sample name="00_0006   ">
+					<datablock type="DNA">
+						GACTATCTGTGCCACAGCTGGTTTTTTACCACCAAGCGTGCAAGTCTCCGGATGACTGAGCCCTCGGAGCCACGGCCAAATGGAGCCGCCAAAAGAGGAATATTCGAGGGTTTATAAACTCCGCTAGACGCTGTGCACGAGGCTCAAGTATCCCCCCACGTGAGCATAATCGCTATATGGCCGGGCCGATACGAGACACATAACCAGTGCATTGCCGCCCTATGTTAGGTAAAATCAAAAATACGTATTGCATTGTCTCCACGTGGAGAGCCGGTTAATCCTATAGGCGGGCAATGCTCTGTTGGTCTTCCAAGGATCTCCTCACTCCGCCTTGTCCCGCACCCCCCATGGCTCCTAACATTCTTGTTGGCCTCAGGAACATATCGAACGAGCTAGCTCCTGTTTTCTCGGTGTAGGATCTTTCATGTTGCCGTCCCGAGGCTCTGCTGCCGTTGCGCACCCCGCTCCACTTGGTAACCCCCACTA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0012   ">
+				<sample name="00_0012   ">
+					<datablock type="DNA">
+						GACTATCTGTGCCACAGCTGGTTTTTTACCACCAAGCGTGCAAGTCTCCGGATGACTGAGCCCTCGGAGCCACGGCCAAATGGAGCCGCCAAAAGAGGAATATTCGAGGGTTTATAAACTCCGCTAGACGCTGTGCACGAGGCTCAAGTATCCCCCCACGTGAGCATAATCGCTATATGGCCGGGCCGATACGAGACACATAACCAGTGCATTGCCGCCCTATGTTAGGTAAAATCAAAAATACGTATTGCATTGTCTCCACGTGGAGAGCCGGTTAATCCTATAGGCGGGCAATGCTCTGTTGGTCTTCCAAGGATCTCCTCACTCCGCCTTGTCCCGCACCCCCCATGGCTCCTAACATTCTTGTTGGCCTCAGGAACATATCGAACGAGCTAGCTCCTGTTTTCTCGGTGTAGGATCTTTCATGTTGCCGTCCCGAGGCTCTGCTGCCGTTGCGCACCCCGCTCCACTTGGTAACCCCCACTA [...]
+					</datablock>
+				</sample>
+			</individual>
+		</population>
+	</region>
+	<region name="LinkageGroup15">
+      <model name="F84">
+        <normalize>false</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <relative-murate>1</relative-murate>
+        <base-freqs> calculated </base-freqs>
+        <ttratio>2</ttratio>
+      </model>
+      <effective-popsize>1</effective-popsize>
+		<population name="PopAlphaIs1">
+			<individual name="00_0007   ">
+				<sample name="00_0007   ">
+					<datablock type="DNA">
+						ACCCCGGGAAAGCATTCACTAACGGCTTAGCAAGGTGTAAGGCCCCTCCCATCGGCTTGACTACTGGGTGCTGCATGCCTTTGTATGCTATGCCTCGTTCTTACTGAAGCAGCGAGGTAGTAGAGCAGTAGAACTTGTCTCCTATGAGCTCAGCTGTGTTTCTCGCATTAATCTCGGATGGAGATCATTTGCCCCTGCTTTCTGATTAATGCTCGCGGTCATGCATTTTGTACGTGTGAAATAGACTAAATCAACGCATTTCTCATGAAAAGAAGCCCGCGTTAAAGTCGAGCACTTATTCACATGGCGCTCGCCAGTACAACAGCTAACGAACCCTGGATACTCTTGTTACCGTTGTACACGATAGAGCGTAAAAAAGTGGCGAGCCTCGGTATTCGACTGTGCAGGACTCCCCTTCCGACGTTGGCAAATACACCATATTAAACGCGATAGGAGAAATGGCTCAAAACCGCCTCTTGCTACGTG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0014   ">
+				<sample name="00_0014   ">
+					<datablock type="DNA">
+						ACCCCGGGAAAGCATTCACTAACGGCTTAGCAAGGTGTAAGGCCCCTCCCATCGGCTTGACTACTGGGTGCTGCATGCCTTTGTATGCTATGCCTCGTTCTTACTGAAGCAGCGAGGTAGTAGAGCAGTAGAACTTGTCTCCTATGAGCTCAGCTATGTTTCTCGCATTAATCTCGGATGGAGATCATTTGCCCCTGCTTTCTGATTAATGTTCGCGGTCATGCATTTTGTACGTGTGAAATAGACTAAATCAACGCATTTCTCATGAAAAGAAGCCCGCGTTAAAGTCGAGCACTTATTCACATGGCGCTCGCCAGTACAACAGCTAACGAACCCTGGATACTCTTGTTACCGTTGTACACGATAGAGCGTAAAAAAGTGGCGAGCCTCGGTATTCGACTGTGCAGGACTCCCCTTCCGACGTTGGCAAATACACCATATTAAACGCGATAGGAGAAATGGCTCAAAACCGCCTCTTGCTACGTG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0003   ">
+				<sample name="00_0003   ">
+					<datablock type="DNA">
+						ACCCCGGGAAAGCATTCACTAACGGCTTAGCAAGGTGTAAGGCCCCTCCCATCGGCTTGACTACTGGGTGCTGCATGCCTTTGTATGCTATGCCTCGTTCTTACTGAAGCAGCGAGGTAGTAGAGCAGTAGAACTTGTCTCCTATGAGCTCAGCTATGTTTCTCGCATTAATCTCGGATGGAGATCATTTGCCCCTGCTTTCTGATTAATGCTCGCGGTCATGCATTTTGTACGTGTGAAATAGACTAAATCAACGCATTTCTCATGAAAAGAAGCCCGCGTTAAAGTCGAGCACTTATTCACATGGCGCTCGCCAGTACAACAGCTAACGAACCCTGGATACTCTTGTTACCGTTGTACACGATAGAGCGTAAAAAAGTGGCGAGCCTCGGTATTCGACTGTGCAGGACTCCCCTTCCGACGTTGGCAAATACACCATATTAAACGCGATAGGAGAAATGGCTCAAAACCGCCTCTTGCTACGTG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0019   ">
+				<sample name="00_0019   ">
+					<datablock type="DNA">
+						ACCCCGGGAAAGCATTCACTAACGGCTTAGCAAGGTGTAAGGCCCCTCCCATCGGCTTGACTACTGGGTGCTGCATGCCTTTGTATGCTATGCCTCGTTCTTACTGAAGCAGCGAGGTAGTAGAGCAGTAGAACTTGTCTCCTATGAGCTCAGCTATGTTTCTCGCATTAATCTCGGATGGAGATCATTTGCCCCTGCTTTCTGATTAATGCTCGCGGTCATGCATTTTGTACGTGTGAAATAGACTAAATCAACGCATTTCTCATGAAAAGAAGCCCGCGTTAAAGTCGAGCACTTATTCACATGGCGCTCGCCAGTACAACAGCTAACGAACCCTGGATACTCTTGTTACCGTTGTACACGATAGAGCGTAAAAAAGTGGCGAGCCTCGGTATTCGACTGTGCAGGACTCCCCTTCCGACGTTGGCAAATACACCATATTAAACGCGATAGGAGAAATGGCTCAAAACCGCCTCTTGCTACGTG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0013   ">
+				<sample name="00_0013   ">
+					<datablock type="DNA">
+						ACCCCGGGAAAGCATTCACTAACGGCTTAGCAAGGTGTAAGGCCCCTCCCATCGGCTTGACTACTGGGTGCTGCATGCCTTTGTATGCTATGCCTCGTTCTTACTGAAGCAGCGAGGTAGCAGAGCAGTAGAACTTGTCTCTTATGAGCTCAGCTATGTTTCTCGCATTAATCTCGGATGGAGATCATTTGCCCCTGCTTTCTGATTAATGCTCGCGGTCATGCATTTTGTACGTGTGAAATAGACTAAATCAACGCATTTCTCATGAAAAGAAGCCCGCGTTAAAGTCGAGCACTTATTCACATGGCGCTCGCCAGTACAACAGCTAACGAACCCTGGATACTCTTGTTACCGTTGTACACGATAGAGCGTAAAAAAGTGGCGAGCCTCGGTATTCGACTGTGCAGGACTCCCCTTCCGACGTTGGCAAATACACCATATTAAACGCGATAGGAGAAATGGCTCAAAACCGCCTCTTGCTACGTG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0010   ">
+				<sample name="00_0010   ">
+					<datablock type="DNA">
+						ACCCCGGGAAAGCATTCACTAACGGCTTAGCAAGGTGTAAGGCCCCTCCCATCGGCTTGACTACTGGGTGCTGCATGCCTTTGTATGCTATGCCTCGTTCTTACTGAAGCAGCGAGGTAGTAGAGCAGTAGAACTTGTCTCTTATGAGCTCAGCTATGTTTCTCGCATTAATCTCGGATGGAGATCATTTGCCCCTGCTTTCTGATTAATGCTCGCGGTCATGCATTTTGTACGTGTGAAATAGACTAAATCAACGCATTTCTCATGAAAAGAAGCCCGCGTTAAAGTCGAGCACTTATTCACATGGCGCTCGCCAGTACAACAGCTAACGAACCCTGGATACTCTTGTTACCGTTGTACACGATAGAGCGTAAAAAAGTGGCGAGCCTCGGTATTCGACTGTGCAGGACTCCCCTTCCGACGTTGGCAAATACACCATATTAAACGCGATAGGAGAAATGGCTCAAAACCGCCTCTTGCTACGTG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0009   ">
+				<sample name="00_0009   ">
+					<datablock type="DNA">
+						ACCCCGGGAAAGCATTCACTAACGGCTTAGCAAGGTGTAAGGCCCCTCCCATCGGCTTGACTACTGGGTGCTGCATGCCTTTGTATGCTATGCCTCGTTCTTACTGAAGCAGCGAGGTAGTAGAGCAGTAGAACTTGTCTCTTATGAGCTCAGCTATGTTTCTCGCATTAATCTCGGATGGAGATCATTTGCCCCTGCTTTCTGATTAATGCTCGCGGTCATGCATTTTGTACGTGTGAAATAGACTAAATCAACGCATTTCTCATGAAAAGAAGCCCGCGTTAAAGTCGAGCACTTATTCACATGGCGCTCGCCAGTACAACAGCTAACAAACCCTGGATACTCTTGTTACCGTTGTACACGATAGAGCGTAAAAAAGTGGCGAGCCTCGGTATTCGACTGTGCAGGACTCCCCTTCCGACGTTGGCAAATACACCATATTAAACGCGATAGGAGAAATGGCTCAAAACCGCCTCTTGCTACGTG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0000   ">
+				<sample name="00_0000   ">
+					<datablock type="DNA">
+						ACCCCGGGAAAGCATTCACTAACGGCTTAGCAAGGTGTAAGGCCCCTCCCATCGGCTTGACTACTGGGTGCTGCATGCCTTTGTATGCTATGCCTCGTTCTTACTGAAGCAGCGAGGTAGTAGAGCAGTAGAACTTGTCTCTTATGAGCTCAGCTATGTTTCTCGCATTAATCTCGGATGGAGATCATTTGCCCCTGCTTTCTGATTAATGCTCGCGGTCATGCATTTTGTACGTGTGAAATAGACTAAATCAACGCATTTCTCATGAAAAGAAGCCCGCGTTAAAGTCGAGCACTTATTCACATGGCGCTCGCCAGTACAACAGCTAACAAACCCTGGATACTCTTGTTACCGTTGTACACGATAGAGCGTAAAAAAGTGGCGAGCCTCGGTATTCGACTGTGCAGGACTCCCCTTCCGACGTTGGCAAATACACCATATTAAACGCGATAGGAGAAATGGCTCAAAACCGCCTCTTGCTACGTG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0012   ">
+				<sample name="00_0012   ">
+					<datablock type="DNA">
+						ACCCCGGGAAAGCATTCACTAACGGCTTAGCAAGGTGTAAGGCCCCTCCCATCGGCTTGACTACTGGGTGCTGCATGCCTTTGTATGCTATGCCTCGTTCTTACTGAAGCAGCGAGGTAGTAGAGCAGTAGAACTTGTCTCTTATGAGCTCAGCTATGTTTCTCGCATTAATCTCGGATGGAGATCATTTGCCCCTGCTTTCTGATTAATGCTCGCGGTCATGCATTTTGTACGTGTGAAATAGACTAAATCAACGCATTTCTCATGAAAAGAAGCCCGCGTTAAAGTCGAGCACTTATTCACATGGCGCTCGCCAGTACAACAGCTAACAAACCCTGGATACTCTTGTTACCGTTGTACACGATAGAGCGTAAAAAAGTGGCGAGCCTCGGTATTCGACTGTGCAGGACTCCCCTTCCGACGTTGGCAAATACACCATATTAAACGCGATAGGAGAAATGGCTCAAAACCGCCTCTTGCTACGTG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0011   ">
+				<sample name="00_0011   ">
+					<datablock type="DNA">
+						ACCCCGGGAAAGCATTCACTAACGGCTTAGCAAGGTGTAAGGCCCCTCCCATCGGCTTGACTACTGGGTGCTGCATGCCTTTGTATGCTATGCCTCGTTCTTACTGAAGCAGCGAGGTAGTAGAGCAGTAGAACTTGTCTCTTATGAGCTCAGCTATGTTTCTCGCATTAATCTCGGATGGAGATCATTTGCCCCTGCTTTCTGATTAATGCTCGCGGTCATGCATTTTGTACGTGTGAAATAGACTAAATCAACGCATTTCTCATGAAAAGAAGCCCGCGTTAAAGTCGAGCACTTATTCACATGGCGCTCGCCAGTACAACAGCTAACGAACCCTGGATACTCTTGTTACCGTTGTACACGATAGAGCGTAAAAAAGTGGCGAGCCTCGGTATTCGACTGTGCAGGACTCCCCTTCCGACGTTGGCAAATACACCATATTAAACGCGATAGGAGAAATGGCTCAAAACCGCCTCTTGCTACGTG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0001   ">
+				<sample name="00_0001   ">
+					<datablock type="DNA">
+						ACCCCGGGAAAGCATTCACTAACGGCTTAGCAAGGTGTAAGGCCCCTCCCATCGGCTTGACTACTGGGTGCTGCATGCCTTTGTATGCTATGCCTCGTTCTTACTGAAGCAGCGAGGTAGTAGAGCAGTAGAACTTGTCTCTTATGAGCTCAGCTATGTTTCTCGCATTAATCTCGGATGGAGATCATTTGCCCCTGCTTTCTGATTAATGCTCGCGGTCATGCATTTTGTACGTGTGAAATAGACTAAATCAACGCATTTCTCATGAAAAGAAGCCCGCGTTAAAGTCGAGCACTTATTCACATGGCGCTCGCCAGTACAACAGCTAACGAACCCTGGATACTCTTGTTACCGTTGTACACGATAGAGCGTAAAAAAGTGGCGAGCCTCGGTATTCGACTGTGCAGGACTCCCCTTCCGACGTTGGCAAATACACCATATTAAACGCGATAGGAGAAATGGCTCAAAACCGCCTCTTGCTACGTG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0005   ">
+				<sample name="00_0005   ">
+					<datablock type="DNA">
+						ACCCCGGGAAAGCATTCACTAACGGCTTAGCAAGGTGTAAGGCCCCTCCCATCGGCTTGACTACTGGGTGCTGCATGCCTTTGTATGCTATGCCTCGTTCTTACTGAAGCAGCGAGGTAGTAGAGCAGTAGAACTTGTCTCTTATGAGCTCAGCTATGTTTCTCGCATTAATCTCGGATGGAGATCATTTGCCCCTGCTTTCTGATTAATGCTCGCGGTCATGCATTTTGTACGTGTGAAATAGACTAAATCAACGCATTTCTCATGAAAAGAAGCCCGCGTTAAAGTCGAGCACTTATTCACATGGCGCTCGCCAGTACAACAGCTAACGAACCCTGGATACTCTTGTTACCGTTGTACACGATAGAGCGTAAAAAAGTGGCGAGCCTCGGTATTCGACTGTGCAGGACTCCCCTTCCGACGTTGGCAAATACACCATATTAAACGCGATAGGAGAAATGGCTCAAAACCGCCTCTTGCTACGTG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0016   ">
+				<sample name="00_0016   ">
+					<datablock type="DNA">
+						ACCCCGGGAAAGCATTCACTAACGGCTTAGCAAGGTGTAAGGCCCCTCCCATCGGCTTGACTACTGGGTGCTGCATGCCTTTGTATGCTATGCCTCGTTCTTACTGAAGCAGCGAGGTAGTAGAGCAGTAGAACTTGTCTCTTATGAGCTCAGCTATGTTTCTCGCATTAATCTCGGATGGAGATCATTTGCCCCTGCTTTCTGATTAATGCTCGCGGTCATGCATTTTGTACGTGTGAAATAGACTAAATCAACGCATTTCTCATGAAAAGAAGCCCGCGTTAAAGTCGAGCACTTATTCACATGGCGCTCGCCAGTACAACAGCTAACGAACCCTGGATACTCTTGTTACCGTTGTACACGATAGAGCGTAAAAAAGTGGCGAGCCTCGGTATTCGACTGTGCAGGACTCCCCTTCCGACGTTGGCAAATACACCATATTAAACGCGATAGGAGAAATGGCTCAAAACCGCCTCTTGCTACGTG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0017   ">
+				<sample name="00_0017   ">
+					<datablock type="DNA">
+						ACCCCGGGAAAGCATTCACTAACGGCTTAGCAAGGTGTAAGGCCCCTCCCATCGGCTTGACTACTGGGTGCTGCATGCCTTTGTATGCTATGCCTCGTTCTTACTGAAGCAGCGAGGTAGTAGAGCAGTAGAACTTGTCTCTTATGAGCTCAGCTATGTTTCTCGCATTAATCTCGGATGGAGATCATTTGCCCCTGCTTTCTGATTAATGCTCGCGGTCATGCATTTTGTACGTGTGAAATAGACTAAATCAACGCATTTCTCATGAAAAGAAGCCCGCGTTAAAGTCGAGCACTTATTCACATGGCGCTCGCCAGTACAACAGCTAACGAACCCTGGATACTCTTGTTACCGTTGTACACGATAGAGCGTAAAAAAGTGGCGAGCCTCGGTATTCGACTGTGCAGGACTCCCCTTCCGACGTTGGCAAATACACCATATTAAACGCGATAGGAGAAATGGCTCAAAACCGCCTCTTGCTACGTG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0015   ">
+				<sample name="00_0015   ">
+					<datablock type="DNA">
+						ACCCCGGGAAAGCATTCACTAACGGCTTAGCAAGGTGTAAGGCCCCTCCCATCGGCTTGACTACTGGGTGCTGCATGCCTTTGTATGCTATGCCTCGTTCTTACTGAAGCAGCGAGGTAGTAGAGCAGTAGAACTTGTCTCTTATGAGCTCAGCTATGTTTCTCGCATTAATCTCGGATGGAGATCATTTGCCCCTGCTTTCTGATTAATGCTCGCGGTCATGCATTTTGTACGTGTGAAATAGACTAAATCAACGCATTTCTCATGAAAAGAAGCCCGCGTTAAAGTCGAGCACTTATTCACATGGCGCTCGCCAGTACAACAGCTAACGAACCCTGGATACTCTTGTTACCGTTGTACACGATAGAGCGTAAAAAAGTGGCGAGCCTCGGTATTCGACTGTGCAGGACTCCCCTTCCGACGTTGGCAAATACACCATATTAAACGCGATAGGAGAAATGGCTCAAAACCGCCTCTTGCTACGTG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0018   ">
+				<sample name="00_0018   ">
+					<datablock type="DNA">
+						ACCCCGGGAAAGCATTCACTAACGGCTTAGCAAGGTGTAAGGCCCCTCCCATCGGCTTGACTACTGGGTGCTGCATGCCTTTGTATGCTATGCCTCGTTCTTACTGAAGCAGCGAGGTAGTAGAGCAGTAGAACTTGTCTCTTATGAGCTCAGCTATGTTTCTCGCATTAATCTCGGATGGAGATCATTTGCCCCTGCTTTCTGATTAATGCTCGCGGTCATGCATTTTGTACGTGTGAAATAGACTAAATCAACGCATTTCTCATGAAAAGAAGCCCGCGTTAAAGTCGAGCACTTATTCACATGGCGCTCGCCAGTACAACAGCTAACGAACCCTGGATACTCTTGTTACCGTTGTACACGATAGAGCGTAAAAAAGTGGCGAGCCTCGGTATTCGACTGTGCAGGACTCCCCTTCCGACGTTGGCAAATACACCATATTAAACGCGATAGGAGAAATGGCTCAAAACCGCCTCTTGCTACGTG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0006   ">
+				<sample name="00_0006   ">
+					<datablock type="DNA">
+						ACCCCGGGAAAGCATTCACTAACGGCTTAGCAAGGTGTAAGGCCCCTCCCATCGGCTTGACTACTGGGTGCTGCATGCCTTTGTATGCTATGCCTCGTTCTTACTGAAGCAGCGAGGTAGTAGAGCAGTAGAACTTGTCTCTTATGAGCTCAGCTATGTTTCTCGCATTAATCTCGGATGGAGATCATTTGCCCCTGCTTTCTGATTAATGCTCGCGGTCATGCATTTTGTACGTGTGAAATAGACTAAATCAACGCATTTCTCATGAAAAGAAGCCCGCGTTAAAGTCGAGCACTTATTCACATGGCGCTCGCCAGTACAACAGCTAACGAACCCTGGATACTCTTGTTACCGTTGTACACGATAGAGCGTAAAAAAGTGGCGAGCCTCGGTATTCGACTGTGCAGGACTCCCCTTCCGACGTTGGCAAATACACCATATTAAACGCGATAGGAGAAATGGCTCAAAACCGCCTCTTGCTACGTG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0008   ">
+				<sample name="00_0008   ">
+					<datablock type="DNA">
+						ACCCCGGGAAAGCATTCACTAACGGCTTAGCAAGGTGTAAGGCCCCTCCCATCGGCTTGACTACTGGGTGCTGCATGCCTTTGTATGCTATGCCTCGTTCTTACTGAAGCAGCGAGGTAGTAGAGCAGTAGAACTTGTCTCTTATGAGCTCAGCTATGTTTCTCGCATTAATCTCGGATGGAGATCATTTGCCCCTGCTTTCTGATTAATGCTCGCGGTCATGCATTTTGTACGTGTGAAATAGACTAAATCAACGCATTTCTCATGAAAAGAAGCCCGCGTTAAAGTCGAGCACTTATTCACATGGCGCTCGCCAGTACAACAGCTAACGAACCCTGGATACTCTTGTTACCGTTGTACACGATAGAGCGTAAAAAAGTGGCGAGCCTCGGTATTCGACTGTGCAGGACTCCCCTTCCGACGTTGGCAAATACACCATATTAAACGCGATAGGAGAAATGGCTCAAAACCGCCTCTTGCTACGTG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0002   ">
+				<sample name="00_0002   ">
+					<datablock type="DNA">
+						ACCCCGGGAAAGCATTCACTAACGGCTTAGCAAGGTGTAAGGCCCCTCCCATCGGCTTGACTACTGGGTGCTGCATGCCTTTGTATGCTATGCCTCGTTCTTACTGAAGCAGCGAGGTAGTAGAGCAGTAGAACTTGTCTCTTATGAGCTCAGCTATGTTTCTCGCATTAATCTCGGATGGAGATCATTTGCCCCTGCTTTCTGATTAATGCTCGCGGTCATGCATTTTGTACGTGTGAAATAGACTAAATCAACGCATTTCTCATGAAAAGAAGCCCGCGTTAAAGTCGAGCACTTATTCACATGGCGCTCGCCAGTACAACAGCTAACGAACCCTGGATACTCTTGTTACCGTTGTACACGATAGAGCGTAAAAAAGTGGCGAGCCTCGGTATTCGACTGTGCAGGACTCCCCTTCCGACGTTGGCAAATACACCATATTAAACGCGATAGGAGAAATGGCTCAAAACCGCCTCTTGCTACGTG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0004   ">
+				<sample name="00_0004   ">
+					<datablock type="DNA">
+						ACCCCGGGAAAGCATTCACTAACGGCTTAGCAAGGTGTAAGGCCCCTCCCATCGGCTTGACTACTGGGTGCTGCATGCCTTTGTATGCTATGCCTCGTTCTTACTGAAGCAGCGAGGTAGTAGAGCAGTAGAACTTGTCTCTTATGAGCTCAGCTATGTTTCTCGCATTAATCTCGGATGGAGATCATTTGCCCCTGCTTTCTGATTAATGCTCGCGGTCATGCATTTTGTACGTGTGAAATAGACTAAATCAACGCATTTCTCATGAAAAGAAGCCCGCGTTAAAGTCGAGCACTTATTCACATGGCGCTCGCCAGTACAACAGCTAACGAACCCTGGATACTCTTGTTACCGTTGTACACGATAGAGCGTAAAAAAGTGGCGAGCCTCGGTATTCGACTGTGCAGGACTCCCCTTCCGACGTTGGCAAATACACCATATTAAACGCGATAGGAGAAATGGCTCAAAACCGCCTCTTGCTACGTG [...]
+					</datablock>
+				</sample>
+			</individual>
+		</population>
+	</region>
+</data>
+</lamarc>
+
diff --git a/doc/testfiles/infile_gamma25.xml b/doc/testfiles/infile_gamma25.xml
new file mode 100644
index 0000000..f9abc5f
--- /dev/null
+++ b/doc/testfiles/infile_gamma25.xml
@@ -0,0 +1,2416 @@
+<lamarc version="2.1">
+<!-- Created by the Lamarc program -->
+  <forces>
+    <coalescence>
+      <method> WATTERSON</method>
+      <max-events>1000</max-events>
+      <profiles> percentile </profiles>
+      <constraints> unconstrained </constraints>
+    </coalescence>
+    <gamma-over-regions>
+     <start-values> 25 </start-values>
+      <profiles> percentile </profiles>
+      <constraints> unconstrained </constraints>
+    </gamma-over-regions>
+  </forces>
+  <chains>
+    <replicates>1</replicates>
+    <bayesian-analysis>No</bayesian-analysis>
+    <heating>
+      <adaptive>false</adaptive>
+      <temperatures> 1</temperatures>
+      <swap-interval>1</swap-interval>
+    </heating>
+    <strategy>
+      <resimulating>0.833333</resimulating>
+      <tree-size>0.166667</tree-size>
+    </strategy>
+    <initial>
+      <number>10</number>
+      <samples>1000</samples>
+      <discard>1000</discard>
+      <interval>20</interval>
+    </initial>
+    <final>
+      <number>2</number>
+      <samples>5000</samples>
+      <discard>1000</discard>
+      <interval>20</interval>
+    </final>
+  </chains>
+  <format>
+    <seed>1005</seed>
+    <verbosity>verbose</verbosity>
+    <progress-reports>verbose</progress-reports>
+    <results-file>outfile.1Pop15RegThetaIsOneHundredthAlphaIs25</results-file>
+    <use-in-summary>false</use-in-summary>
+    <in-summary-file>insumfile.1Pop15RegThetaIsOneHundredthAlphaIs25</in-summary-file>
+    <use-out-summary>true</use-out-summary>
+    <out-summary-file>outsumfile.1Pop15RegThetaIsOneHundredthAlphaIs25</out-summary-file>
+    <use-curvefiles>false</use-curvefiles>
+    <curvefile-prefix>curvefile</curvefile-prefix>
+    <use-tracefile>true</use-tracefile>
+    <tracefile-prefix>tracefile_AlphaIs25</tracefile-prefix>
+    <use-newicktreefile>false</use-newicktreefile>
+    <newicktreefile-prefix>newick</newicktreefile-prefix>
+    <out-xml-file>menuinfile_1Pop15RegThetaIsOneHundredthAlphaIs25.xml</out-xml-file>
+  </format>
+<data>
+    <region name="LinkageGroup01">
+      <model name="F84">
+        <normalize>false</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <relative-murate>1</relative-murate>
+        <base-freqs> calculated </base-freqs>
+        <ttratio>2</ttratio>
+      </model>
+      <effective-popsize>1</effective-popsize>
+		<population name="PopAlphaIs25">
+			<individual name="00_0013   ">
+				<sample name="00_0013   ">
+					<datablock type="DNA">
+						ATTCCTTTTCCCTATATCCGTATGATAGGTGTCTTTAATGTCCAAGGAGTGCTCCCTATCAGGCAGTGGCAGGAGAGCACCACCGTTGGGATGCCTACTCCGAAGAGAACAATCGTTTTAGAACAATGGTCCGAGTAGGCCCCACTTCAAACTCCCGTGTACCCCTGCGGCCTGAGTTTCGACGCTTCATCGAACCAATTTGGTTAGATAGAGAGAGCAGCGTTGGATAATGAACCCTGCCTAGGCTCGTTGCTTTACGCCCCGGGTAGACGCTATTTACTATTCACGCTGGGAGCCTGTGAAAATGAGATACGATGTGAATAGTGGCGGGCCCTCTTCTGGTTCGGTAATTAGGATCCGAAACCTGCTAGTCCGCTGTTGTAGAGACTGAGAGTTTGACTCTCACCTAGACTGCGAGTCGCATGTGCGTCTGTCTTATAGGCATCTTACAAGCCCGGTCTGGACTACTTCAAATCTCATAACTAA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0015   ">
+				<sample name="00_0015   ">
+					<datablock type="DNA">
+						ATTCCTTTTCCCTATATCCGTATGATAGGTGTCTTTAATGTCCAAGGAGTGCTCCCTATCAGGCAGTGGCAGGAGAGCACCACCGTTGGGTTGCCTACTCCGAAGAGAACAATCGTTTTAGAACAATGGTCCGAGTAGGCCCCACTTCAAACTCCCGTGTACCCCTGCGGCCTGAGTTTCGACGCTTCATCGAACCAATTTGGTTAGATAGAGAGAGCAGCGTTGGATAATGAACCCTGCCTAGGCTCGTTGTTTTACGCCCCGGGTAGACGCTATTTACTATTCACGCTGGGAGCCTGTGAAAATGAGATACGATGTGAATAGTGGCGGGCCCTCTTCTGGTTCGGTAATTAGGATCCGAAGCCTGCTAGTCCGCTGTTGTAGGGACTGAGAGTTTGACTCTCACCTAGACTGCGAGTCGCATGTGCGTCTGTCTTATAGGCATCTTACAAGCCCGGTCTGGACTACTTCAAATCTCATAACTAA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0001   ">
+				<sample name="00_0001   ">
+					<datablock type="DNA">
+						ATTCCTTTTCCCTACATCCGTATGATAGGTGTCTTTAACGTCCAAGGAGTGCTCCCTATCAGGCAGTGGCAGGAGAGTACCACCGTTGGGTTGCCTACTCCGAAGAGAACAATCGTTTTAGAACACTGGCCCGAGTAGGCCCCACTTCAAACTCCCGTGTACCCCTGCGGCCTGAGTTTCGACGCTTCATCGAACCAATTTGGTTAGATAGAGAGAGCAGCGTTGGATAATGAACTCTGCCTAGGCTCGTTGTTTTACGCCCCGGGTAGACGCTATTTACTATTCACGCTGGGAGCCTGTGAAAATGAGACACGATGTGAATAGTGGCGGGCCCTCTTCTGGTTCGGTAATTAGGATCCAAAACCTGCCAGTCCGCTGTTGTAGGGACTGAGAGTTTGACTCTCACCTAGACTGCGAGTCGCATGTGCGTCTGTCTTATAGGCATCTTACAAGCCCGGTCTGGACTACTTCAAATCTCATAACTAG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0009   ">
+				<sample name="00_0009   ">
+					<datablock type="DNA">
+						ATTCCTTTTCCCTACATCCGTATGATAGGTGTCTTTAACGTCCAAGGAGTGCTCCCTATCAGGCAGTGGCAGGAGAGTACCACCGTTGGGTTGCCTACTCCGAAGAGAACAATCGTTTTAGAACAATGGCCCGAGTAGGCCCCACTTCAAACTCCCGTGTACCCCTGCGGCCTGAGTTTCGACGCTTCATCGAACCAATTTGGTTAGATAGAGAGAGCAGCGTTGGATAATGAACTCTGCCTAGGCTCGTTGTTTTACGCCCCGGGTAGACGCTATTTACTATTCACGCTGGGAGCCTGTGAAAATGAGACACGATGTGAATAGTGGCGGGCCCTCTTCTGGTTCGGTAATTAGGATCCAAAACCTGCCAGTCCGCTGTTCTAGGGACTGAGAGTTTGACTCTCACCTAGACTGCGAGTCGCATGTGCGTCTGTCTTATAGGCATCTTACAAGCCCGGTCTGGACTACTTCAAATCTCATAACTAA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0008   ">
+				<sample name="00_0008   ">
+					<datablock type="DNA">
+						ATTCCTTTTCCCTACATCCGTATGATAGGTGTCTTTAACGTCCAAGGAGTGCTCCCTATCAGGCAGTGGCAGGAGAGTACCACCGTTGGGTTGCCTACTCCGAAGAGAACAATCGTTTTAGAACAATGGCCCGAGTAGGCCCCACTTCAAACTCCCGTGTACCCCTGCGGCCTGAGTTTCGACGCTTCATCGAACCAATTTGGTTAGATAGAGAGAGCAGCGTTGGATAATGAACTCTGCCTAGGCTCGTTGTTTTACGCCCCGGGTAGACGCTATTTACTATTCACGCTGGGAGCCTGTGAAAATGAGACACGATGTGAATAGTGGCGGGCCCTCTTCTGGTTCGGTAATTAGGATCCAAAACCTGCCAGTCCGCTGTTCTAGGGACTGAGAGTTTGACTCTCACCTAGACTGCGAGTCGCATGTGCGTCTGTCTTATAGGCATCTTACAAGCCCGGTCTGGACTACTTCAAATCTCATAACTAA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0019   ">
+				<sample name="00_0019   ">
+					<datablock type="DNA">
+						ATTCCTTTTCCCTACATCCGTATGATAGGTGTCTTTAACGTCCAAGGAGTGCTCCCTATCAGGCAGTGGCAGGAGAGTACCACCGTTGGGTTGCCTACTCCGAAGAGAACAATCGTTTTAGAACAATGGCCCGAGTAGGCCCCACTTCAAACTCCCGTGTACCCCTGCGGCCTGAGTTTCGACGCTTCATCGAACCAATTTGGTTAGATAGAGAGAGCAGCGTTGGATAATGAACTCTGCCTAGGCTCGTTGTTTTACGCCCCGGGTAGACGCTATTTACTATTCACGCTGGGAGCCTGTGAAAATGAGACACGATGTGAATAGTGGCGGGCCCTCTTCTGGTTCGGTAATTAGGATCCAAAACCTGCCAGTCCGCTGTTCTAGGGACTGAGAGTTTGACTCTCACCTAGACTGCGAGTCGCATGTGCGTCTGTCTTATAGGCATCTTACAAGCCCGGTCTGGACTACTTCAAATCTCATAACTAA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0005   ">
+				<sample name="00_0005   ">
+					<datablock type="DNA">
+						ATTCCTTTTCCCTACATCCGTATGATAGGTGTCTTTAACGTCCAAGGAGTGCTCCCTATCAGGCAGTGGCAGGAGAGTACCACCGTTGGGTTGCCTACTCCGAAGAGAACAATCGTTTTAGAACAATGGCCCGAGTAGGCCCCACTTCAAACTCCCGTGTACCCCTGCGGCCTGAGTTTCGACGCTTCATCGAACCAATTTGGTTAGATAGAGAGAGCAGCGTTGGATAATGAACCCTGCCTAGGCTCGTTGTTTTACGCCCCGGGTAGACGCTATTTACTATTCACGCTGGGAGCCTGTGAAAATTAGACACGATGTGAATAGTGGCGGGCCCTCTTCTGGTTCGGTAATTAGGATCCAAAACCTGCCAGTCCGCTGTTGTAGGGACTGAGAGTTTGACTCTCACCTAGACTGCGAGTCGCATGTGCGTCTGTCTTATAGGCATCTTACAAGCCCGGTCTGGACTACTTCAAATCTCATAACTAA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0011   ">
+				<sample name="00_0011   ">
+					<datablock type="DNA">
+						ATTCCTTTTCCCTACATCCGTATGATAGGTGTCTTTAACGTCCAAGGAGTGCTCCCTATCAGGCAGTGGCAGGAGAGTACCACCGTTGGGTTGCCTACTCCGAAGAGAACAATCGTTTTAGAACAATGGCCCGAGTAGGCCCCACTTCAAACTCCCGTGTACCCCTGCGGCCTGAGTTTCGACGCTTCATCGAACCAATTTGGTTAGATAGAGAGAGCAGCGTTGGATAATGAACCCTGCCTAGGCTCGTTGTTTTACGCCCCGGGTAGACGCTATTTACTATTCACGCTGGGAGCCTGTGAAAATGAGACACGATGTGAATAGTGGCGGGCCCTCTTCTGGTTCGGTAATTAGGATCCAAAACCTGCCAGTCCGCTGTTGTAGGGACTGAGAGTTTGACTCTCACCTAGACTGCGAGTCGCATGTGCGTCTGTCTTATAGGCATCTTACAAGCCCGGTCTGGACTACTTCAAATCTCATAACTAA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0012   ">
+				<sample name="00_0012   ">
+					<datablock type="DNA">
+						ATTCCTTTTCCCTACATCCGTATGATAGGTGTCTTTAACGTCCAAGGAGTGCTCCCTATCAGGCAGTGGCAGGAGAGTACCACCGTTGGGTTGCCTACTCCGAAGAGAACAATCGTTTTAGAACAATGGCCCGAGTAGGCCCCACTTCAAACTCCCGTGTACCCCTGCGGCCTGAGTTTCGACGCTTCATCGAACCAATTTGGTTAGATAGAGAGAGCAGCGTTGGATAATGAACCCTGCCTAGGCTCGTTGTTTTACGCCCCGGGTAGACGCTATTTACTATTCACGCTGGGAGCCTGTGAAAATGAGACACGATGTGAATAGTGGCGGGCCCTCTTCTGGTTCGGTAATTAGGATCCAAAACCTGCCAGTCCGCTGTTGTAGGGACTGAGAGTTTGACTCTCACCTAGACTGCGAGTCGCATGTGCGTCTGTCTTATAGGCATCTTACAAGCCCGGTCTGGACTACTTCAAATCTCATAACTAA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0004   ">
+				<sample name="00_0004   ">
+					<datablock type="DNA">
+						ATTCCTTTTCCCTACATCCGTATGATAGGTGTCTTTAACGTCCAAGGAGTGCTCCCTATCAGGCAGTGGCAGGAGAGTACCACCGTTGGGTTGCCTACTCCGAAGAGAACAATCGTTTTAGAACAATGGCCCGAGTAGGCCCCACTTCAAACTCCCGTGTACCCCTGCGGCCTGAGTTTCGACGCTTCATCGAACCAATTTGGTTAGATAGAGAGAGCAGCGTTGGATAATGAACCCTGCCTAGGCTCGTTGTTTTACGCCCCGGGTAGACGCTATTTACTATTCACGCTGGGAGCCTGTGAAAATGAGACACGATGTGAATAGTGGCGGGCCCTCTTCTGGTTCGGTAATTAGGATCCAAAACCTGCCAGTCCGCTGTTGTAGGGACTGAGAGTTTGACTCTCACCTAGACTGCGAGTCGCATGTGCGTCTGTCTTATAGGCATCTTACAAGCCCGGTCTGGACTACTTCAAATCTCATAACTAA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0016   ">
+				<sample name="00_0016   ">
+					<datablock type="DNA">
+						ATTCCTTTTCCCTACATCCGTATGATAGGTGTCTTTAACGTCCAAGGAGTGCTCCCTATCAGGCAGTGGCAGGAGAGTACCACCGTTGGGTTGCCTACTCCGAAGAGAACAATCGTTTTAGAACAATGGCCCGAGTAGGCCCCACTTCAAACTCCCGTGTACCCCTGCGGCCTGAGTTTCGACGCTTCATCGAACCAATTTGGTTAGATAGAGAGAGCAGCGTTGGATAATGAACCCTGCCTAGGCTCGTTGTTTTACGCCCCGGGTAGACGCTATTTACTATTCACGCTGGGAGCCTGTGAAAATGAGACACGATGTGAATAGTGGCGGGCCCTCTTCTGGTTCGGTAATTAGGATCCAAAACCTGCCAGTCCGCTGTTGTAGGGACTGAGAGTTTGACTCTCACCTAGACTGCGAGTCGCATGTGCGTCTGTCTTATAGGCATCTTACAAGCCCGGTCTGGACTACTTCAAATCTCATAACTAA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0002   ">
+				<sample name="00_0002   ">
+					<datablock type="DNA">
+						ATTCCTTTTCCCTACATCCGTATGATAGGTGTCTTTAACGTCCAAGGAGTGCTCCCTATCAGGCAGTGGCAGGAGAGTACCACCGTTGGGTTGCCTACTCCGAAGAGAACAATCGTTTTAGAACAATGGCCCGAGTAGGCCCCACTTCAAACTCCCGTGTACCCCTGCGGCCTGAGTTTCGACGCTTCATCGAACCAATTTGGTTAGATAGAGAGAGCAGCGTTGGATAATGAACCCTGCCTAGGCTCGTTGTTTTACGCCCCGGGTAGACGCTATTTACTATTCACGCTGGGAGCCTGTGAAAATGAGACACGATGTGAATAGTGGCGGGCCCTCTTCTGGTTCGGTAATTAGGATCCAAAACCTGCCAGTCCGCTGTTGTAGGGACTGAGAGTTTGACTCTCACCTAGACTGCGAGTCGCATGTGCGTCTGTCTTATAGGCATCTTACAAGCCCGGTCTGGACTACTTCAAATCTCATAACTAA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0014   ">
+				<sample name="00_0014   ">
+					<datablock type="DNA">
+						ATTCCTTTTCCCTACATCCGTATGATAGGTGTCTTTAACGTCCAAGGAGTGCTCCCTATCAGGCAGTGGCAGGAGAGTACCACCGTTGGGTTGCCTACTCCGAAGAGAACAATCGTTTTAGAACAATGGCCCGAGTAGGCCCCACTTCAAACTCCCGTGTACCCCTGCGGCCTGAGTTTCGACGCTTCATCGAACCAATTTGGTTAGATAGAGAGAGCAGCGTTGGATAATGAACCCTGCCTAGGCTCGTTGTTTTACGCCCCGGGTAGACGCTATTTACTATTCACGCTGGGAGCCTGTGAAAATGAGACACGATGTGAATAGTGGCGGGCCCTCTTCTGGTTCGGTAATTAGGATCCAAAACCTGCCAGTCCGCTGTTGTAGGGACTGAGAGTTTGACTCTCACCTAGACTGCGAGTCGCATGTGCGTCTGTCTTATAGGCATCTTACAAGCCCGGTCTGGACTACTTCAAATCTCATAACTAA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0018   ">
+				<sample name="00_0018   ">
+					<datablock type="DNA">
+						ATTCCTTTTCCCTACATCCGTATGATAGGTGTCTTTAACGTCCAAGGAGTGCTCCCTATCAGGCAGTGGCAGGAGAGTACCACCGTTGGGTTGCCTACTCCGAAGAGAACAATCGTTTTAGAACAATGGCCCGAGTAGGCCCCACTTCAAACTCCCGTGTACCCCTGCGGCCTGAGTTTCGACGCTTCATCGAACCAATTTGGTTAGATAGAGAGAGCAGCGTTGGATAATGAACCCTGCCTAGGCTCGTTGTTTTACGCCCCGGGTAGACGCTATTTACTATTCACGCTGGGAGCCTGTGAAAATGAGACACGATGTGAATAGTGGCGGGCCCTCTTCTGGTTCGGTAATTAGGATCCAAAACCTGCCAGTCCGCTGTTGTAGGGACTGAGAGTTTGACTCTCACCTAGACTGCGAGTCGCATGTGCGTCTGTCTTATAGGCATCTTACAAGCCCGGTCTGGACTACTTCAAATCTCATAACTAA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0003   ">
+				<sample name="00_0003   ">
+					<datablock type="DNA">
+						ATTCCTTTTCCCTACATCCGTATGATAGGTGTCTTTAACGTCCAAGGAGTGCTCCCTATCAGGCAGTGGCAGGAGAGTATCACCGTTGGGTTGCCTACTCCGAAGAGAACAATCGTTTTAGAACAATGGCCCGAGTAGGCCCCACTTCAAACTCCCGTGTACCCCTGCGGCCTGAGTTTCGACGCTTCATCGAACTAATTTGGTTAGATAGAGAGAGCAGCGTTGGATAATGAACCCTGCCTAGGCTCGTTGTTTTACGCCCCGGGTAGACGCTATTTACTATTCACGCTGGGAGCCTGTGAAAATGAGATACGATGTGAATAGTGGCGGGCCCTCTTCTGGTTCGGTAATTAGGATCCAAAACCTGCCAGTCCGCTGTTGTAGGGACTGAGAGTTTGACTCTCACCTAGACTGCGAGTCGCATGTGCGTCTGTCTTATAGGCATCTTACAAGCCCGGTTTGGACTACTTCAAATCTCATAACTAA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0017   ">
+				<sample name="00_0017   ">
+					<datablock type="DNA">
+						ATTCCTTTTCCCTACATCCGTATGATAGGTGTCTTTAACGTCCAAGGAGTGCTCCCTATCAGGCAGTGGCAGGAGAGTACCACCGTTGGGTTGCCTACTCCGAAGAGAACAATCGTTTTAGAACAATGGCCCGAGTAGGCCCCACTTCAAACTCCCGTGTACCCCTGCGGCCTGAGTTTCGACGCTTCATCGAACCAATTTGGTTAGATAGAGAGAGCAGCGTTGGATAATGAACCCTGCCTAGGCTCGTTGTTTTACGCCCCGGGTAGACGCTATTTACTATTCACGCTGGAAGCCTGTGAAAATGAGATACGATGTGAATAGTGGCGGGCCCTCTTCTGGTTCGGTAATTAGGATCCAAAACCTGCCAGTCCGCTGTTGTAGGGACTGAGAGTTTGACTCTCACCTAGACTGCGAGTCGCATGTGCGTCTGTCTTATAGGCATCTTACAAGCCCGGTCTGGACTACTTCAAATCTCATAACTAA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0006   ">
+				<sample name="00_0006   ">
+					<datablock type="DNA">
+						ATTCCTTTTCCCTACATCCGTATGATAGGTGTCTTTAACGTCCAAGGAGTGCTCCCTATCAGGCAGTGGCAGGAGAGTACCACCGTTGGGTTGCCTACTCCGAAGAGAACAATCGTTTTAGAACAATGGCCCGAGTAGGCCCCACTTCAAACTCCCGTGTACCCCTGCGGCCTGAGTTTCGACGCTTCATCGAACCAATTTGGTTAGATAGAGAGAGCAGCGTTGGATAATGAACCCTGCCTAGGCTCGTTGTTTTACGCCCCGGGTAGACGCTATTTACTATTCACGCTGGAAGCCTGTGAAAATGAGATACGATGTGAATAGTGGCGGGCCCTCTTCTGGTTCAGTAATTAGGATCCAAAACCTGCCAGTCCGCTGTTGTAGGGACTGAGAGTTTGACTCTCACCTAGACTGCGAGTCGCATGTGCGTCTGTCTTATAGGCATCTTACAAGCCCGGTCTGGACTACTTCAAATCTCATAACTAA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0007   ">
+				<sample name="00_0007   ">
+					<datablock type="DNA">
+						ATTCCTTTTCCCTACATCCGTATGATAGGTGTCTTTAACGTCCAAGGAGTGCTCCCTATCAGGCAGTGGCAGGAGAGTACCACCGTTGGGTTGCCTACTCCGAAGAGAACAATCGTTTTAGAACAATGGCCCGAGTAGGCCCCACTTCAAACTCCCGTGTACCCCTGCGGCCTGAGTTTCGACGCTTCATCGAACCAATTTGGTTAGATAGAGAGAGCAGCGTTGGATAATGAACCCTGCCTAGGCTCGTTGTTTTACGCCCCGGGTAGACGCTATTTACTATTCACGCTGGAAGCCTGTGAAAATGAGATACGATGTGAATAGTGGCGGGCCCTCTTCTGGTTCAGTAATTAGGATCCAAAACCTGCCAGTCCGCTGTTGTAGGGACTGAGAGTTTGACTCTCACCTAGACTGCGAGTCGCATGTGCGTCTGTCTTATAGGCATCTTACAAGCCCGGTCTGGACTACTTCAAATCTCATAACTAA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0000   ">
+				<sample name="00_0000   ">
+					<datablock type="DNA">
+						ATTCCTTTTCCCTACATCCGTATGATAGGTGTCTTTAACGTCCAAGGAGTGCTCCCTATCAGGCAGTGGCAGGAGAGTACCACCGTTGGGTTGCCTACTCCGAAGAGAACAATCGTTTTAGAACAATGGCCCGAGTAGGCCCCACTTCAAACTCCCGTGTACCCCTGCGGCCTGAGTTTCGACGCTTCATCGAACCAATTTGGTTAGATAGAGAGAGCAGCGTTGGATAATGAACCCTGCCTAGGCTCGTTGTTTTACGCCCCGGGTAGACGCTATTTACTATTCACGCTGGGAGCCTGTGAAAATGAGATACGATGTGAATAGTGGCGGGCCCTCTTCTGGTTCGGTAATTAGGATCCAAAACCTGCCAGTCCGCTGTTGTAGGGACTGAGAGTTTGACTCTCACCTAGACTGCGAGTCGCATGTGCGTCTGTCTTATAGGCATCTTACAAGCCCGGTCTGGACTACTTCAAATCTCATAACTAA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0010   ">
+				<sample name="00_0010   ">
+					<datablock type="DNA">
+						ATTCCTTTTCCCTACATCCGTATGATAGGTGTCTTTAACGTCCAAGGAGTGCTCCCTATCAGGCAGTGGCAGGAGAGTACCACCGTTGGGTTGCCTACTCCGAAGAGAACAATCGTTTTAGAACAATGGCCCGAGTAGGCCCCACTTCAAACTCCCGTGTACCCCTGCGGCCTGAGTTTCGACGCTTCATCGAACCAATTTGGTTAGATAGAGAGAGCAGCGTTGGATAATGAACCCTGCCTAGGCTCGTTGTTTTACGCCCCGGGTAGACGCTATTTACTATTCACGCTGGGAGCCTGTGAAAATGAGATACGATGTGAATAGTGGCGGGCCCTCTTCTGGTTCAGTAATTAGGATCCAAAACCTGCCAGTCCGCTGTTGTAGGGACTGAGAGTTTGACTCTCACCTAGACTGCGAGTCGCATGTGCGTCTGTCTTATAGGCATCTTACAAGCCCGGTCTGGACTACTTCAAATCTCATAACTAA [...]
+					</datablock>
+				</sample>
+			</individual>
+		</population>
+	</region>
+	<region name="LinkageGroup02">
+      <model name="F84">
+        <normalize>false</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <relative-murate>1</relative-murate>
+        <base-freqs> calculated </base-freqs>
+        <ttratio>2</ttratio>
+      </model>
+      <effective-popsize>1</effective-popsize>
+		<population name="PopAlphaIs25">
+			<individual name="00_0014   ">
+				<sample name="00_0014   ">
+					<datablock type="DNA">
+						CGTCGACGCTGGCATTAACTGGCTGCATTCAGAAAGCGCACTAGTCGTGATATGTTATACATTCGACCTGAAGGGCAGCGCCCGTACGTCCTGCGGCATGATAAACATCAGTATAAGGACTGTTTGCGATTGTAATATTTATAATACAGTGTGGAATATAACCGTTATTGCGCCTCACAGTTACGCGCCGACAAATTTGGCGCATGATCTAAGAATCGGTGAACTATATCGCGAGTACTTATTCTGAATAAAGTAGATGAGGACGTAGTTCCATCGATTGCTCAAAGATTTTGTTTCTACGCCTAGTGTTTTAGAATGTAATCATACCATTCAGGACGTAATATTACGAACATGGGTGCCGGAGCCTGAGTTGGAACCGCGACAATACTCGACGTAAATCGGTCCACTCTGGCAATTAACCGTGACTCTCTAATAGCGCCTCAGCTGTGTATTCACACGAACGGGTCTACTACGAATCTGACACGT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0009   ">
+				<sample name="00_0009   ">
+					<datablock type="DNA">
+						CGTCGACGCTGGCATTAACTGGCTGCATTCAGAAAGCGCACTAGTCGTGATATGTTATACATTCGACCTGAAGGGCAGCGCCCGTACGTCCTGCGGCATGATAAACATCAGTATAAGGACTGTTTGCGATTGTAATATTTATAATACAGTGTGGAATATAACCGTTATTGCGCCTCACAGTTACGCGCCGACAAATTTGGCGCATGATCTAAGAATCGGTGAACTATATCGCGAGTACTTATTCTGAATAAAGTAGATGAGGACGTAGTTCCATCGATTGCTCAAAGATTTTGTTTCTACGCCTAGTGTTTTAGAATGTAATCATACCATTCAGGACGTTATATTACGAACATGGGTGCCGGAGCCTGAGTTGGAACCGCGACAATACTCGACGTAAATCGGTCCACTCTGGCAATTAACCGTGACTCTCTAATAGCGCCTCAGCTGTGTATTCACACGAACGGGTCTACTACGAATCTGACACGT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0012   ">
+				<sample name="00_0012   ">
+					<datablock type="DNA">
+						CGTCGACGCTGGCATTAACTGGCTGCATTCAGAAAGCGCACTAGTCGTGATATGTTATACATTCGACCTGAAGGGCAGCGCCCGTACGTCCTGCGGCATGATAAACATCAGTATAAGGACTGTTTGCGATTGTAATATTTATAATACAGTGTGGAATATAACCGTTATTGCGCCTCACAGTTACGCGCCGACAAATTTGGCGCATGATCTAAGAATCGGTGAACTATATCGCGAGTACTTATTCTGAATAAAGTAGATGAGGACGTAGTTCCATCGATTGCTCAAAGATTTTGTTTCTACGCCTAGTGTTTTAGAATGTAATCATACCATTCAGGACGTTATATTACGAACATGGGTGCCGGAGCCTGAGTTGGAACCGCGACAATACTCGACGTAAATCGGTCCACTCTGGCAATTAACCGTGACTCTCTAATAGCGCCTCAGCTGTGTATTCACACGAACGGGTCTACTACGAATCTGACACGT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0000   ">
+				<sample name="00_0000   ">
+					<datablock type="DNA">
+						CGTCGACGCTGGCATTAACTGGCTGCATTCAGAAAGCGCACTAGTCGTGATATGTTATACATTCGACCTGAAGGGCAGCGCCCGTACGTCCTGCGGCATGATAAACATCAGTATAAGGACTGTTTGCGATTGTAATATTTATAATACAGTGTGGAATATAACCGTTATTGCGCCTCACAGTTACGCGCCGACAAATTTGGCGCATGATCTAAGAATCGGTGAACTATATCGCGAGTACTTATTCTGAATAAAGTAGATGAGGACGTAGTTCCATCGATTGCTCAAAGATTTTGTTTCTACGCCTAGTGTTTTAGAATGTAATCATACCATTCAGGACGTAATATTACGAACATGGGTGCCGGAGCCTGAGTTGGAACCGCGACAATACTCGACGTAAATCGGTCCACTCTGGCAATTAACCGTGACTCTCTAATAGCGCCTCAGCTGTGTATTCACACGAACGGGTCTACTACGAATCTGACACGT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0003   ">
+				<sample name="00_0003   ">
+					<datablock type="DNA">
+						CGTCGACGCTGGCATTAACTGGCTGCATTCAGAAAGCGCACTAGTCGTGATATGTTATACATTCGACCTGAAGGGCAGCGCCCGTACGTCCTGCGGCATGATAAACATCAGTATAAGGACTGTTTGCGATTGTAATATTTATAATACAGTGTGGAATATAACCGTTATTGCGCCTCACAGTTACGCGCCGACAAATTTGGCGCATGATCTAAGAATCGGTGAACTATATCGCGAGTACTTATTCTGAATAAAGTAGATGAGGACGTAGTTCCATCGATTGCTCAAAGATTTTGTTTCTACGCCTAGTGTTTTAGAATGTAATCATACCATTCAGGACGTAATATTACGAACATGGGTGCCGGAGCCTGAGTTGGAACCGCGACAATACTCGACGTAAATCGGTCCACTCTGGCAATTAACCGTGACTCTCTAATAGCGCCTCAGCTGTGTATTCACACGAACGGGTCTACTACGAATCTGACACGT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0011   ">
+				<sample name="00_0011   ">
+					<datablock type="DNA">
+						CGTCGACGCTGGCATTAACTGGCTGCATTCAGAAAGCGCACTAGTCGTGATATGTTATACATTCGACCTGAAGGGCAGCGCCCGTACGTCCTGCGGCATGATAAACATCAGTATAAGGACTGTTTGCGATTGTAATATTTATAATACAGTGTGGAATATAACCGTTATTGCGCCTCACAGTTACGCGCCGACAAATTTGGCGCATGATCTAAGAATCGGTGAACTATATCGCGAGTACTTATTCTGAATAAAGTAGATGAGGACGTAGTTCCATCGATTGCTCAAAGATTTTGTTTCTACGCCTAGTGTTTTAGAATGTAATCATACCATTCAGGACGTAATATTACGAACATGGGTGCCGGAGCCTGAGTTGGAACGGCGACAATACTCGACGTAAATCGGTCCACTCTGGCAATTAACCGTGACTCTCTAATAGCGCCTCAGCTGTGTATTCACACGAACGGGTCTACTACGAATCTGACACGT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0019   ">
+				<sample name="00_0019   ">
+					<datablock type="DNA">
+						CGTCGACGCTGGCATTAACTGGCTGCATTCAGAAAGCGCACTAGTCGTGATATGTTATACATTCGACCTGAAGGGCAGCGCCCGTACGTCCTGCGGCATGATAAACATCAGTATAAGGACTGTTTGCGATTGTAATATTTATAATACAGTGTGGAATATAACCGTTATTGCGCCTCACAGTTACGCGCCGACAAATTTGGCGCATGATCTAAGAATCGGTGAACTATATCGCGAGTACTTATTCTGAATAAAGTAGATGAGGACGTAGTTCCATCGATTGCTCAAAGATTTTGTTTCTACGCCTAGTGTTTTAGAATGTAATCATACTATTCAGGACGTAATATTACGAACATGGGTGCCGGAGCCTGAGTTGGAACCGCGACAATACTCGACGTAAATCGGTCCACTCTGGCAATTAACCGTGACTCTCTAATAGCGCCTCAGCTGTGTATTCACACGAACGGGTCTACTACGAATCTGACACGT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0007   ">
+				<sample name="00_0007   ">
+					<datablock type="DNA">
+						CGTCGACGCTGGCATTAACTGGCTGCATTCAGAAAGCGCACTAGTCGTGATATGTTATACATTCGACCTGAAGGGCAGCGCCCGTACGTCCTGCGGCATGATAAACATCAGTATAAGGACTGTTTGCGATTGTAATATTTATAATACAGTGTGGAATATAACCGTTATTGCGCCTCACAGTTACGCGCCGACAAATTTGGCGCATGATCTAAGAATCGGTGAACTATATCGCGAGTACTTATTCTGAATAAAGTAGATGAGGACGTAGTTCCATCGATTGCTCAAAGATTTTGTTTCTACGCCTAGTGTTTTAGAATGTACTCATACCATTCAGGACGTAATATTACGAACATGGGTGCCGGAGCCTGAGTTGGAACCGCGACAATACTCGACGTAAATCGGTCCACTCTGGCAATTAACCGTGACTCTCTAATAGCGCCTCAGCTGTGTATTCACACGAACGGGTCTACTACGAATCTGACACGT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0013   ">
+				<sample name="00_0013   ">
+					<datablock type="DNA">
+						CGTCGACGCTGGCATTAACTGGCTGCATTCAGAAAGCGCACTAGTCGTGATATGTTATACATTCGACCTGAAGGGCAGCGCCCGTACGTCCTGCGGCATGATAAACATCAGTATAAGGACTGTTTGCGATTGTAATATTTATAATACAGTGTGGAATATAACCGTTATTGCGCCTCACAGTTACGCGCCGACAAATTTGGCGCATGATCTAAGAATCGGTGAACTATATCGCGAGTACTTATTCTGAATAAAGTAGATGAGGACGTAGCTCCATCGATTGCTCAAAGATTTTGTTTCTACGCCTAGTGTTTTAGAATGTACTCATACCATTCAGGACGTAATATTACGAACATGGGTGCCGGAGCCTGAGTTGGAACCGCGACAATACTCGACGTAAATCGGTCCACTCTGGCAATTAACCGTGACTCTCTAATAGCGCCTCAGCTGTGTATTCACACGAACGGGTCTACTACGAATCTGACACGT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0010   ">
+				<sample name="00_0010   ">
+					<datablock type="DNA">
+						CGTCGACGCTGGCATTAACTGGCTGCATTCAGAAAGCGCACTAGTCGTGATATGTTATACATTCGACCTGAAGGGCAGCGCCCGTACGTCCTGCGGCATGATAAACATCAGTATAAGGACTGTTTGCGATTGTAATATTTATAATACAGTGTGGAATATAACCGTTATTGCGCCTCACAGTTACGCGCCGACAAATTTGGCGCATGATCTAAGAATCGGTGAACTATATCGCGAGTACTTATTCTGAATAAAGTAGATGAGGACGTAGTTCCATCGATTGCTCAAAGATTTTGTTTCTACGCCTAGTGTTTTAGAATGTACTCATACCATTCAGGACGTAATATTACGAACATGGGTGCCGGAGCCTGAGTTGGAACCGCGACAATACTCGACGTAAATCAGTCCACTCTGGCAATTAACCGTGACTCTCTAATAGCGCCTCAGCTGTGTATTCACACGAACGGGTCTACTACGAATCTGACACGT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0016   ">
+				<sample name="00_0016   ">
+					<datablock type="DNA">
+						CGTCGACGCTGGCATTAACTGGCTGCATTCAGAAAGCGCACTAGTCGTGATATGTTATACATTCGACCTGAAGGGCAGCGCCCGTACGTCCTGCGGCATGATAAACATCAGTATAAGGACTGTTTGCGATTGTAATATTTATAATACAGTGTGGAATATAACCGTTATTGCGCCTCACAGTTACGCGCCGACAAATTTGGCGCATGATCTAAGAATCGGTGAACTATATCGCGAGTACTTATTCTGAATAAAGTAGATGAGGACGTAGTTCCATCGATTGCTCAAAGATTTTGTTTCTACGCCTAGTGTTTTAGAATGTACTCATACCATTCAGGACGTAATATTACGAACATGGGTGCCGGAGCCTGAGTTGGAACCGCGACAATACTCGACGTAAATCGGTCCACTCTGGCAATTAACCGTGACTCTCTAATAGCGCCTCAGCTGTGTATTCACACGAACGGGTCTACTACGAATCTGACACGT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0017   ">
+				<sample name="00_0017   ">
+					<datablock type="DNA">
+						CGTCGACGCTGGCATTAACTGGCTGCATTCAGAAAGCGCACTAGTCGTGATATGTTATACATTCGACCTGAAGGGCAGCGCCCGTACGTCCTGCGGCATGATAAACATCAGTATAAGGACTGTTTGCGATTGTAATATTTATAATACAGTGTGGAATATAACCGTTATTGCGCTTCACAGTTACGCGCCGACAAATTTGGCGCATGATCTAAGAATCGGTGAACTATATCGCGAGTACTTATTCTGAATAAAGTAGATGAGGACGTAGTTCCATCGATTGCTCATAGATTTTGTTTCTACGCCTAGTGTTTTAGAATGTAATCATACCATTCAGGACGTAATATTACGAACATGGGTGCCGGAGCCTGAGTTGGAACCGCGACAATACTCGACGTAAATCGGTCCACTCTGGCAATTAACCGTGACTCTCTAATAGCGCCTCAGCTGTGTATTCACACGAACGGGTCTACTACGAATCTGACACGT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0004   ">
+				<sample name="00_0004   ">
+					<datablock type="DNA">
+						CGTCGATGCTGGCATTAACTGGCTGCATTCAGAAAGCGCACTAGTCGTGATATGTTATACATTCGACCTGAAGGGCAGCGCCCGTACGTCCTGCGGCACGATAAACATCAGTATAAGGACTGTTTGCGATTGTAATGTTTATAATACAGTGTGGAATATAACCGTTATTGCGCTTCACAGTTACGCGCCGACAAATTTGGCGCATGATCTAAGAATCGGTGAACTATATCGCGAGTACTTATTCTGAATAAAGTAGATGAGGACGTAGTTCCATCGATTGCTCATAGATTTTGTTTCTACGCCTAGTGTCTTAGAATGTAATCATACCATTCAGGACGTAATATTACGAACATGGGTGCCGGAGCCTGAGTTGGAACGGCGACAATACTCGACGTAAATCGGTCCACTCTGGCAATTAACCGTGACTCTCTAATAGCGCCTCAGCTGTGTATTCACACGAACGGGTCTACTACGAATCTGACACGT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0015   ">
+				<sample name="00_0015   ">
+					<datablock type="DNA">
+						CGTCGACGCTGGCATTAACTGGCTGCATTCAGAAAGCGCACTAGTCGTGATATGTTATACATTCGACCTGAAGGGCAGCGCCCGTACGTCCTGCGGCATGATAAACATCAGTATAAGGACTGTTTGCGATTGTAATGTTTATAATACAGTGTGGAATATAACCGTTATTGCGCTTCACAGTTACGCGCCGACAAATTTGGCGCATGATCTAAGAATCGGTGAACTATATCGCGAGTACTTATTCTGAATAAAGTAGATGAGGACGTAGTTCCATCGATTGCTCATAGATTTTGTTTCTACGCCTAGTGTCTTAGAATGTAATCATACCATTCAGGACGTAATATTACGAACATGGGTGCCGGAGCCTGAGTTGGAACGGCGACAATACTCGACGTAAATCGGTCCACTCTGGCAATTAACCGTGACTCTCTAATAGCGCCTCAGCTGTGTATTCACACGAACGGGTCTACTACGAATCTGACACGT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0005   ">
+				<sample name="00_0005   ">
+					<datablock type="DNA">
+						CGTCGACGCTGGCATTAACTGGCTGCATTCAGAAAGCGCACTAGTCGTGATATGTTATACATTCGACCTGAAGGGCAGCGCCCGTACGTCCTGCGGCATGATAAACATCAGTATAAGGACTGTTTGCGATTGTAATGTTTATAATACAGTGTGGAATATAACCGTTATTGCGCTTCACAGTTACGCGCCGACAAATTTGGCGCATGATCTAAGAATCGGTGAACTATATCGCGAGTACTTATTCTGAATAAAGTAGATGAGGACGTAGTTCCATCGATTGCTCATAGATTTTGTTTCTACGCCTAGTGTCTTAGAATGTAATCATACCATTCAGGACGTAATATTACGAACATGGGTGCCGGAGCCTGAGTTGGAACGGCGACAATACTCGACGTAAATCGGTCCACTCTGGCAATTAACCGTGACTCTCTAATAGCGCCTCAGCTGTGTATTCACACGAACGGGTCTACTACGAATCTGACACGT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0006   ">
+				<sample name="00_0006   ">
+					<datablock type="DNA">
+						CGTCGACGCTGGCATTAACTGGCTGCATTCAGAAAGCGCACTAGTCGTGATATGTTATACATTCGACCTGAAGGGCAGCGCCCGTACGTCCTGCGGCATGATAAACATCAGTATAAGGACTGTTTGCGATTGTAATGTTTATAATACAGTGTGGAATATAACCGTTATTGCGCTTCACAGTTACGCGCCGACAAATTTGGCGCATGATCTAAGAATCGGTGAACTATATCGCGAGTACTTATTCTGAATAAAGTAGATGAGGACGTAGTTCCATCGATTGCTCATAGATTTTGTTTCTACGCCTAGTGTCTTAGAATGTAATCATACCATTCAGGACGTAATATTACGAACATGGGTGCCGGAGCCTGAGTTGGAACGGCGACAATACTCGACGTAAATCGGTCCACTCTGGCAATTAACCGTGACTCTCTAATAGCGCCTCAGCTGTGTATTCACACGAACGGGTCTACTACGAATCTGACACGT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0001   ">
+				<sample name="00_0001   ">
+					<datablock type="DNA">
+						CGTCGACGCTGGCATTAACTGGCTGCATTCAGAAAGCGCACTAGTCGTGATATGTTATACATTCGACCTGAAGGGCAGCGCCCGTACGTCCTGCGGCATGATAAACATCAGTATAAGGACTGTTTGCGATTGTAATGTTTATAATACAGTGTGGAATATAACCGTTATTGCGCTTCACAGTTACGCGCCGACAAATTTGGCGCATGATCTAAGAATCGGTGAACTATATCGCGAGTACTTATTCTGAATAAAGTAGATGAGGACGTAGTTCCATCGATTGCTCATAGATTTTGTTTCTACGCCTAGTGTCTTAGAATGTAATCATACCATTCAGGACGTAATATTACGAACATGGGTGCCGGAGCCTGAGTTGGAACGGCGACAATACTCGACGTAAATCGGTCCACTCTGGCAATTAACCGTGACTCTCTAATAGCGCCTCAGCTGTGTATTCACACGAACGGGTCTACTACGAATCTGACACGT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0002   ">
+				<sample name="00_0002   ">
+					<datablock type="DNA">
+						CGTCGACGCTGGCATTAACTGGCTGCATTCAGAAAGCGCACTAGTCGTGATATGTTATACATTCGACCTGAAGGGCAGCGCCCGTACGTCCTGCGGCATGATAAACATCAGTATAAGGACTGTTTGCGATTGTAATGTTTATAATACAGTGTGGAATATAACCGTTATTGCGCTTCACAGTTACGCGCCGACAAATTTGGCGCATGATCTAAGAATCGGTGAACTATATCGCGAGTACTTATTCTGAATAAAGTAGATGAGGACGTAGTTCCATCGATTGCTCATAGATTTTGTTTCTACGCCTAGTGTCTTAGAATGTAATCATACCATTCAGGACGTAATATTACGAACATGGGTGCCGGAGCCTGAGTTGGAACGGCGACAATACTCGACGTAAATCGGTCCACTCTGGCAATTAACCGTGACTCTCTAATAGCGCCTCAGCTGTGTATTCACACGAACGGGTCTACTACGAATCTGACACGT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0008   ">
+				<sample name="00_0008   ">
+					<datablock type="DNA">
+						CGTCGACGCTGGCATTAACTGGCTGCATTCAGAAAGCGCACTAGTCGTGATATGTTATACATTCGACCTGAAGGGCAGCGCCCGTACGTCCTGCGGCATGATAAACATCAGTATAAGGACTGTTTGCGATTGTAATGTTTATAATACAGTGTGGAATATAACCGTTATTGCGCTTCACAGTTACGCGCCGACAAATTTGGCGCATGATCTAAGAATCGGTGAACTATATCGCGAGTACTTATTCTGAATAAAGTAGATGAGGACGTAGTTCCATCGATTGCTCATAGATTTTGTTTCTACGCCTAGTGTCTTAGAATGTAATCATACCATTCAGGACGTAATATTACGAACATGGGTGCCGGAGCCTGAGTTGGAACGGCGACAATACTCGACGTAAATCGGTCCACTCTGGCAATTAACCGTGACTCTCTAATAGCGCCTCAGCTGTGTATTCACACGAACGGGTCTACTACGAATCTGACACGT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0018   ">
+				<sample name="00_0018   ">
+					<datablock type="DNA">
+						CGTCGACGCTGGCATTAACTGGCTGCATTCAGAAAGCGCACTAGTCGTGATATGTTATACATTCGACCTGAAGGGCAGCGCCCGTACGTCCTGCGGCATGATAAACATCAGTATAAGGACTGTTTGCGATTGTAATGTTTATAATACAGTGTGGAATATAACCGTTATTGCGCTTCACAGTTACGCGCCGACAAATTTGGCGCATGATCTAAGAATCGGTGAACTATATCGCGAGTACTTATTCTGAATAAAGTAGATGAGGACGTAGTTCCATCGATTGCTCATAGATTTTGTTTCTACGCCTAGTGTCTTAGAATGTAATCATACCATTCAGGACGTAATATTACGAACATGGGTGCCGGAGCCTGAGTTGGAACGGCGACAATACTCGACGTAAATCGGTCCACTCTGGCAATTAACCGTGACTCTCTAATAGCGCCTCAGCTGTGTATTCACACGAACGGGTCTACTACGAATCTGACACGT [...]
+					</datablock>
+				</sample>
+			</individual>
+		</population>
+	</region>
+	<region name="LinkageGroup03">
+      <model name="F84">
+        <normalize>false</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <relative-murate>1</relative-murate>
+        <base-freqs> calculated </base-freqs>
+        <ttratio>2</ttratio>
+      </model>
+      <effective-popsize>1</effective-popsize>
+		<population name="PopAlphaIs25">
+			<individual name="00_0002   ">
+				<sample name="00_0002   ">
+					<datablock type="DNA">
+						AAATGGCCTGGAGGTGGCTAGGAATGGTGTCGACTAGTTGACTAAGATCAGATCATTATCTTTACCGGCAGTATCTCCCCAAGCCGAGAAGATCATCCGATGACAAAAGATCAGGGTCACAAAGCGTCTCTGGTTCGGCGGGGTACTTTCGTGCGGCCACGTGGCGGAGGATACGCCTAGGGCGGGCGCATGACCCAGGTGCATGGTTGAAGAGCGCGCATAGTTCCGACGAACAATCGGCAATGATACACGTTATCCTTCCCAGAGGACAACACACTGACAGCCATTCTTGTCGAGGCTGGATGAGAGCCGTGTATTACATCACGCTCTACCATCGCACCTGTTAAGGCACCGTTCCAGCTCCTGTTGGTGCAAGCGGCGAGACTCATTACCCACACACACACCGCTGTGTACCAATACACAGCGGTCTTGAAAGAAAATGGGATCCCTTCGTATGAAGGAGTATCATGCGTGTCCTGCTCTTTA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0017   ">
+				<sample name="00_0017   ">
+					<datablock type="DNA">
+						AAATGGCCTGGAGGTGACTAGGAATGGTGTCGACTAGTTGACTAAGATCAGATCATTATCTTTACCGGCAGTATCTCCCCAAGCCGAGAAGATCATCCGATGACAAAAGATCAGGGTCACAAAGCGTCTCTGGTTCGGCGGGGTACTTTCGTGCGGCCACGTGGCGGAGGATACGCCTAGGGCGGGCGCATGACCCAGGTGCATGGTTGAAGGGCGCGCATAGTTCCGACGAACAATCGGCAATGATACACGTTATCCTTCCCAGAGGACAACACACTGACAGCCATGCTTGTCGAGGCTGGATGAGAGCCGTGTATTACATCACGCTCTACCATCGCACCTGTTAAGGCACCGTTCCAGCTCCTGTTGGTGCAAGCGGCGAGACTCATTACCCACACACACACCGCTGTGTACCAATACACAGCGGTCTTGAAAGAAAATGGGATCCCTTCGTATGAGGGAGTATCATGCGTGTCCTGCTCTTTA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0008   ">
+				<sample name="00_0008   ">
+					<datablock type="DNA">
+						AAATGGCCTGGAGGTGACTAGGAATGGTGTCGACTAGTTGACTAAGATCAGATCATTATCTTTACCGGCAGTATCTCCCCAAGCCGAGAAGATCATCCGATGACAAAAGATCAGGGTCACAAAGCGTCTCTGGTTCGGCGGGGTACTTTCGTGCGGCCACGTGGCGGAGGATACGCCTAGGGCGGGCGCATGACCCAGGTGCATGGTTGAAGGGCGCGCATAGTTCCGACGAACAATCGGCAATGATACACGTTATCCTTCCCAGAGGACAACAGACTGACAGCCATGCTTGTCGAGGCTGGATGAGAGCCGTGTATTACATCACGCTCTACCATCGCACCTGTTAAGGCACCGTTCCAGCTCCTGTTGGTGCAAGCGGCGAGACTCATTACCCACACACACACCGCTGTGTACCAATACACAGCGGTCTTGAAAGAAAATGGGATCCCTTCGTATGAGGGAGTATCATGCGTGTCCTGCTCTTTA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0000   ">
+				<sample name="00_0000   ">
+					<datablock type="DNA">
+						AAATGGCCTGGAGGTGACTAGGAATGGTGTCGACTAGTTGACTAAGATCAGATCATTATCTTTACCGGCAGTATCTCCCCAAGCCGAGAAGATCATCCGATGACAAAAGATCAGGGTCACAAAGCGTCTCTGGTTCGGCGGGGTACTTTCGTGCGGCCACGTGGCGGAGGATACGCCTAGGGCGGGCGCATGACCCAGGTGCATGGTTGAAGGGCGCGCATAGTTCCGACGAACAATCGGCAATGATACACGTTATCCTTCCCAGAGGACAACAGACTGACAGCCATGCTTGTCGAGGCTGGATGAGAGCCGTGTATTACATCACGCTCTACCATCGCACCTGTTAAGGCACCGTTCCAGCTCCTGTTGGTGCAAGCGGCGAGACTCATTACCCACACACACACCGCTGTGTACCAATACACAGCGGTCTTGAAAGAAAATGGGATCCCTTCGTATGAGGGAGTATCATGCGTGTCCTGCTCTTTA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0016   ">
+				<sample name="00_0016   ">
+					<datablock type="DNA">
+						AAATGGCCTGGAGGTGACTAGGAATGGTGTCGACTAGTTGACTAAGATCAGATCATTATCTTTACCGGCAGTATCTCCCCAAGCCGAGAAGATCATCCGATGACAAAAGATCAGGGTCACAAAGCGTCTCTGGTTCGGCGGGGTACTTTCGTGCGGCCACGTGGCGGAGGATACGCCTAGGGCGGGCGCATGACCCAGGTGCATGGTTGAAGGGCGCGCATAGTTCCGACGAACAATCGGCAATGATACACGTTATCCTTCCCAGAGGACAACAGACTGACAGCCATGCTTGTCGAGGCTGGATGAGAGCCGTGTATTACATCACGCTCTACCATCGCACCTGTTAAGGCACCGTTCCAGCTCCTGTTGGTGCAAGCGGCGAGACTCATTACCCACACACACACCGCTGTGTACCAATACACAGCGGTCTTGAAAGAAAATGGGATCCCTTCGTATGAGGGAGTATCATGCGTGTCCTGCTCTTTA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0015   ">
+				<sample name="00_0015   ">
+					<datablock type="DNA">
+						AAATGGCCTGGAGGTGACTAGGAATGGTGTCGACTAGTTGACTAAGATCAGATCATTATCTTTACCGGCAGTATCTCCCCAAGCCGAGAAGATCATCCGATGACAAAAGATCAGGGTCACAAAGCGTCTCTGGTTCGGCGGGGTACTTTCGTGCGGCCACGTGGCGGAGGATACGCCTAGGGCGGGCGCATGACCCAGGTGCATGGTTGAAGAGCGCGCATAGTTCCGACGAACAATCGGCAATGATACACGTTATCCTTCCCAGAGGACAACACACTGACACCCATGCTTGTCGAGGCTGGATGAGAGCCGTGTATTACATCACGCTCTACCATCGCACCTGTTAAGGCACCGTTCCAGCTCCTGTTGGTGCAAGCGGCGAGACTCATTACCCACACACACACCGCTGTGTACCAATACACAGCGGTCTTGAAAGAAAATGGGATCCCTTCGTATGAAGGAGTATCATGCGTGTCCTGCTCTTTA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0011   ">
+				<sample name="00_0011   ">
+					<datablock type="DNA">
+						AAATGGCCTGGAGGTGACTAGGAATGGTGTCGACTAGTTGACTAAGATCAGATCATTATCTTTACCGGCAGTATCTCCCCAAGCCGAGAAGATCATCCGATGACAAAAGATCAGGGTCACAAAGCGTCTCTGGTTCGGCGGGGTACTTTCGTGCGGCCACGTGGCGGAGGATACGCCTAGGGCGGGCGCATGACCCAGGTGCATGGTTGAAGAGCGCGCATAGTTCCGACGAACAATCGGCAATGATACACGTTATCCTTCCCAGAGGACAACACACTGACACCCATGCTTGTCGAGGCTGGATGAGAGCCGTGTATTACATCACGCTCTACCATCGCACCTGTTAAGGCACCGTTCCAGCTCCTGTTGGTGCAAGCGGCGAGACTCATTACCCACACACACACCGCTGTGTACCAATACACAGCGGTCTTGAAAGAAAATGGGATCCCTTCGTATGAAGGAGTATCATGCGTGTCCTGCTCTTTA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0013   ">
+				<sample name="00_0013   ">
+					<datablock type="DNA">
+						AAATGGCCTGGAGGTGACTAGGAATGGTGTCGACTAGTTGACTAAGATCAGATCATTATCTTTACCGGCAGTATCTCCCCAAGCCGAGAAGATCATCCGATGACAAAAGATCAGGGTCACAAAGCGTCTCTGGTTCGGCGGGGTACTTTCGTGCGGCCACGTGGCGGAGGATACGCCTAGGGCGGGCGCATGACCCAGGTGCATGGTTGAAGAGCGCGCATAGTTCCGACGAACAATCGGCAATGATACACGTTATCCTTCCCAGAGGACAACACACTGACACCCATGCTTGTCGAGGCTGGATGAGAGCCGTGTATTACATCACGCTCTACCATCGCACCTGTTAAGGCACCGTTCCAGCTCCTGTTGGTGCAAGCGGCGAGACTCATTACCCACACACACACCGCTGTGTACCAATACACAGCGGTCTTGAAAGAAAATGGGATCCCTTCGTATGAAGGAGTATCATGCGTGTCCTGCTCTTTA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0004   ">
+				<sample name="00_0004   ">
+					<datablock type="DNA">
+						AAATGGCCTGGAGGTGACTAGGAATGGTGTCGACTAGTTGACTAAGATCAGATCATTATCTTTACCGGCAGTATCTCCCCAAGCCGAGAAGATCATCCGATGACAAAAGATCAGGGTCACAAAGCGTCTATGGTTCGGCGGGGTACTTTCGTGCGGCCACGTGGCGGAGGATACGCCTAGGGCGGGCGCATGACCCAGGTGCATGGTTGAAGAGCGCGCATAGTTCCGACGAACAATCGGCAATGATACACGTTATCCTTCCCAGAGGACAACACACTGACAGCCATGCTTGTCGAGGCTGGATGAGAGCCGTGTATTACATCACGCTCTACCATCGCACCTGTTAAGGCACCGTTCCAGCTCCTGTTGGTGCAAGCGGCGAGACTCATTACCCACACACACACCGCTGTGTACCAATACACAGCGGTCTTGAAAGAAAATGGGATCCCTTCGTATGAAGGAGTATCATGCGTGTCCTGCTCTTTA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0014   ">
+				<sample name="00_0014   ">
+					<datablock type="DNA">
+						AAATGGCCTGGAGGTGACTAGGAATGGTGTCGACTAGTTGACTAAGATCAGATCATTATCTTTACCGGCAGTATCTCCCCAAGCCGAGAAGATCATCCGATGACAAAAGATCAGGGTCACAAAGCGTCTATGGTTCGGCGGGGTACTTTCGTGCGGCCACGTGGCGGAGGATACGCCTAGGGCGGGCGCATGACCCAGGTGCATGGTTGAAGAGCGCGCATAGTTCCGACGAACAATCGGCAATGATACACGTTATCCTTCCCAGAGGACAACACACTGACAGCCATGCTTGTCGAGGCTGGATGAGAGCCGTGTATTACATCACGCTCTACCATCGCACCTGTTAAGGCACCGTTCCAGCTCCTGTTGGTGCAAGCGGCGAGACTCATTACCCACACACACACCGCTGTGTACCAATACACAGCGGTCTTGAAAGAAAATGGGATCCCTTCGTATGAAGGAGTATCATGCGTGTCCTGCTCTTTA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0005   ">
+				<sample name="00_0005   ">
+					<datablock type="DNA">
+						AAATGGCCTGGAGGTGACTAGGAATGGTGTCGACTAGTTGACTAAGATCAGATCATTATCTTTACCGGCAGTATCTCCCCAAGCCGAGAAGATCATCCGATGACAAAAGATCAGGGTCACAAAGCGTCTATGGTTCGGCGGGGTACTTTCGTGCGGCCACGTGGCGGAGGATACGCCTAGGGCGGGCGCATGACCCAGGTGCATGGTTGAAGAGCGCGCATAGTTCCGACGAACAATCGGCAATGATACACGTTATCCTTCCCAGAGGACAACACACTGACAGCCATGCTTGTCGAGGCTGGATGAGAGCCGTGTATTACATCACGCTCTACCATCGCACCTGTTAAGGCACCGTTCCAGCTCCTGTTGGTGCAAGCGGCGAGACTCATTACCCACACACACACCGCTGTGTACCAATACACAGCGGTCTTGAAAGAAAATGGGATCCCTTCGTATGAAGGAGTATCATGCGTGTCCTGCTCTTTA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0012   ">
+				<sample name="00_0012   ">
+					<datablock type="DNA">
+						AAATGGCCTGGAGGTGACTAGGAATGGTGTCGACTAGTTGACTAAGATCAGATCATTATCTTTACCGGCAGTATCTCCCCAAGCCGAGAAGATCATCCGATGACAAAAGATCAGGGTCACAAAGCGTCTATGGTTCGGCGGGGTACTTTCGTGCGGCCACGTGGCGGAGGATACGCCTAGGGCGGGCGCATGACCCAGGTGCATGGTTGAAGAGCGCGCATAGTTCCGACGAACAATCGGCAATGATACACGTTATCCTTCCCAGAGGACAACACACTGACAGCCATGCTTGTCGAGGCTGGATGAGAGCCGTGTATTACATCACGCTCTACCATCGCACCTGTTAAGGCACCGTTCCAGCTCCTGTTGGTGCAAGCGGCGAGACTCATTACCCACACACACACCGCTGTGTACCAATACACAGCGGTCTTGAAAGAAAATGGGATCCCTTCGTATGAAGGAGTATCATGCGTGTCCTGCTCTTTA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0001   ">
+				<sample name="00_0001   ">
+					<datablock type="DNA">
+						AAATGGCCTGGAGGTGACTAGGAATGGTGTCGACTAGTTGACTAAGATCAGATCATTATCTTTACCGGCAGTATCTCCCCAAGCCGAGAAGATCATCCGATGACAAAAGATCAGGGTCACAAAGCGTCTATGGTTCGGCGGGGTACTTTCGTGCGGCCACGTGGCGGAGGATACGCCTAGGGCGGGCGCATGACCCAGGTGCATGGTTGAAGAGCGCGCATAGTTCCGACGAACAATCGGCAATGATACACGTTATCCTTCCCAGAGGACAACACACTGACAGCCATGCTTGTCGAGGCTGGATGAGAGCCGTGTATTACATCACGCTCTACCATCGCACCTGTTAAGGCACCGTTCCAGCTCCTGTTGGTGCAAGCGGCGAGACTCATTACCCACACACACACCGCTGTGTACCAATACACAGCGGTCTTGAAAGAAAATGGGATCCCTTCGTATGAAGGAGTATCATGCGTGTCCTGCTCTTTA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0007   ">
+				<sample name="00_0007   ">
+					<datablock type="DNA">
+						AAATGGCCTGGAGGTGACTAGGAATGGTGTCGACTAGTTGACTAAGATCAGATCATTATCTTTACCGGCAGTATCTCCCCAAGCCGAGAAGATCATCCGATGACAAAAGATCAGGGTCACAAAGCGTCTATGGTTCGGCGGGGTACTTTCGTGCGGCCACGTGGCGGAGGATACGCCTAGGGCGGGCGCATGACCCAGGTGCATGGTTGAAGAGCGCGCATAGTTCCGACGAACAATCGGCAATGATACACGTTATCCTTCCCAGAGGACAACACACTGACAGCCATGCTTGTCGAGGCTGGATGAGAGCCGTGTATTACATCACGCTCTACCATCGCACCTGTTAAGGCACCGTTCCAGCTCCTGTTGGTGCAAGCGGCGAGACTCATTACCCACACACACACCGCTGTGTACCAATACACAGCGGTCTTGAAAGAAAATGGGATCCCTTCGTATGAAGGAGTATCATGCGTGTCCTGCTCTTTA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0006   ">
+				<sample name="00_0006   ">
+					<datablock type="DNA">
+						AAATGGCCTGGAGGTGACTAGGAATGGTGTCGACTAGTTGATTAAGATCAGATCATTATCTTTACCGGCAGTATCTCCCCAAGCCGAGAAGATCATCCGATGACAAAAGATCAGGGTCACAAAGCGTCTATGGTTCGGCGGGGTACTTTCGTGCGGCCACGTGGCGGAGGATACGCCTAGGGCGGGCGCATGACCCAGGTGCATGGTTGAAGAGCGCGCATAGTTCCGACGAACAATCGGCAATGATACACGTTATCCTTCCCAGAGGACAACACACTGACAGCCATGCTTGTCGAGGCTGGATGAGAGCCGTGTATTACATCACGCTCTACCATCGCACCTGTTAAGGCACCGTTCCAGCTCCTGTTGGTGCAAGCGGCGAGACTCATTACCCACACACACACCGCTGTGTACCAATACACAGCGGTCTTGAAAGAAAATGGGATCCCTTCGTATGAAGGAGTATCATGCGTGTCCTGCTCTTTA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0018   ">
+				<sample name="00_0018   ">
+					<datablock type="DNA">
+						AAATGGCCTGGAGGTGACTAGGAATGGTGTCGACTAGTTGACTAAGATCAGATCATTATCTTTACCGGCAGTATCTCCCCAAGCCGAGAAGATCATCCGATGACAAAAGATCAGGGTCACAAAGCGTCTATGGTTCGGCGGGGTACTTTCGTGCGGCCACGTGGCGGAGGATACGCCTAGGGCGGGCGCATGACCCAGGTGCATGGTTGAAGAGCGCGCATAGTTCCGACGAACAATCGGCAATGATACACGTTATCCTTCCCAGAGGACAACACACTGACAGCCATGCTTGTCGAGGCTGGATGAGAGCCGTGTATTACATCACGCTCTACCATCGCACCTGTTAAGGCACCGTTCCAGCTCCTGTTGGTGCAAGCGGCGAGACTCATTACCCACACACACACCGCTGTGTACCAATACACAGCGGTCTTGAAAGAAAATGGGATCCCTTCGTATGAAGGAGTATCATGCGTGTCCTGCTCTTTA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0019   ">
+				<sample name="00_0019   ">
+					<datablock type="DNA">
+						AAATGGCCTGGAGGTGACTAGGAATGGTGTCGACTAGTTGACTAAGATCAGATCATTATCTTTACCGGCAGTATCTCCCCAAGCCGAGAAGATCATCCGATGACAAAAGATCAGGGTCACAAAGCGTCTATGGTTCGGCGGGGTACTTTCGTGCGGCCACGTGGCGGAGGATACGCCTAGGGCGGGCGCATGACCCAGGTGCATGGTTGAAGAGCGCGCATAGTTCCGACGAACAATCGGCAATGATACACGTTATCCTTCCCAGAGGACAACACACTGACAGCCATGCTTGTCGAGGCTGGATGAGAGCCGTGTATTACATCACGCTCTACCATCGCACCTGTTAAGGCACCGTTCCAGCTCCTGTTGGTGCAAGCGGCGAGACTCATTACCCACACACACACCGCTGTGTACCAATACACAGCGGTCTTGAAAGAAAATGGGATCCCTTCGTATGAAGGAGTATCATGCGTGTCCTGCTCTTTA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0009   ">
+				<sample name="00_0009   ">
+					<datablock type="DNA">
+						AAATGGCCTGGAGGTGACTAGGAATGGTGTCGACTAGTTGACTAAGATCAGATCATTATCTTTACCGGCAGTATCTCCCCAAGCCGAGAAGATCATCCGATGACAAAAGATCAGGGTCACAAAGCGTCTCTGGTTCGGCGGGGTACTTTCGTGCGGCCACGTGGCGGAGGATACGCCTAGGGCGGGCGCATGACCCAGGTGCATGGTTGAAGAGCGCGCATAGTTCCGACGAACAATCGGCAATGATACACGTTATCCTTCCCAGAGGACAACACACTGACAGCCATGCTTGTCGAGGCTGGATGAGAGCCGTGTATTACATCACGCTCTACCATCGCACCTGTTAAGGCACCGTTCCAGCTCGTGTTGGTGCAAGCGGCGAGACTCATTACCCACACACACACCGCTGAGTACCAATACACAGCGGTCTTGAAAGAAAATGGGATCCCTTCGTATGAAGGAGTATCATGCGTGTCCTGCTCTTTA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0003   ">
+				<sample name="00_0003   ">
+					<datablock type="DNA">
+						AAATGGCCTGGAGGTGACTAGGAATGGTGTCGACTAGTTGACTAAGATCAGATCATTATCTTTACCGGCAGTATCTCCCCAAGCCGAGAAGATCATCCGATGACAAAAGATCAGGGTCACAAAGCGTCTCTGGTTCGGCGGGGTACTTTCGTGCGGCCACGTGGCGGAGGATACGCCTAGGGCGGGCGCATGACCCAGGTGCATGGTTGAAGAGCGCGCATAGTTCCGACGAACAATCGGCAATGATACACGTTATCCTTCCCAGAGGACAACACACTGACAGCCATGCTTGTCGAGGCTGGATGAGAGCCGTGTATTACATCACGCTCTACCATCGCACCTGTTAAGGCACCGTTCCAGCTCGTGTTGGTGCAAGCGGCGAGACTCATTACCCACACACACACCGCTGTGTACCAATACACAGCGGTCTTGAAAGAAAATGGGATCCCTTCGTATGAAGGAGTATCATGCGTGTCCTGCTCTTTA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0010   ">
+				<sample name="00_0010   ">
+					<datablock type="DNA">
+						AAATGGCCTGGAGGTGACTAGGAATGGTGTCGACTAGTTGACTAAGATCAGATCATTATCTTTACCGGCAGTATCTCCCCAAGCCGAGAAGATCATCCGATGACAAAAGATCAGGGTCACAAAGCGTCTCTGGTTCGGCGGGGTACTTTCGTGCGGCCACGTGGCGGAGGATACGCCTAGGGCGGGCGCATGACCCAGGTGCATGGTTGAAGAGCGCGCATAGTTCCGACGAACAATCGGCAATGATACACGTTATCCTTCCCAGAGGACAACACACTGACAGCCATGCTTGTCGAGGCTGGATGAGAGCCGTGTATTACATCACGCTCTACCATCGCACCTGTTAAGGCACCGTTCCAGCTCGTGTTGGTGCAAGCGGCGAGACTCATTACCCACACACACACCGCTGTGTACCAATACACAGCGGTCTTGAAAGAAAATGGGATCCCTTCGTATGAAGGAGTATCATGCGTGTCCTGCTCTTTA [...]
+					</datablock>
+				</sample>
+			</individual>
+		</population>
+	</region>
+	<region name="LinkageGroup04">
+      <model name="F84">
+        <normalize>false</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <relative-murate>1</relative-murate>
+        <base-freqs> calculated </base-freqs>
+        <ttratio>2</ttratio>
+      </model>
+      <effective-popsize>1</effective-popsize>
+		<population name="PopAlphaIs25">
+			<individual name="00_0004   ">
+				<sample name="00_0004   ">
+					<datablock type="DNA">
+						CGATTCGGCCATGGAAGACCACGAGACCTTCCCCAAGCGTGTCCCACCATACAAAGAGCCCGCAACTTGCGCAGGCTAAGACAACTAGTCAGGTCTGATCTTGGAATAGAGGGGTAATATGAACGACGGAGTCCGCTAGAAGAGGGGCATCGCTTGTTTACATCACTGACCCCTACCTCCACGAATCTGTCTTTGAGGAAGCTATGTGATGCTCTGCAAACCATCGCTCAATTAGAAATGCACATTCTAGCGGTTAGTTGACCAATGGCCGCTGGCCCAGTCGCAAAGTGGGATCCATGTCATCACATGTTCCAATCCGTCCCGGCATACCCGAGAACCACACGGGCTAGTTGGTGTCGTGTTAAGGTTGACGGAGCGTCCATACGGTGTTGTGCTATAGCCGGTATCGTAGCCCGGGCTCCCAGATGGACACCAAGGAGTCCCGGCTGACTAATGCCACTTGTGTTTGCATCTGGCCCCAACATT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0003   ">
+				<sample name="00_0003   ">
+					<datablock type="DNA">
+						CGATTCGGCCATGGAAGACCTCGAGACCTTCCCCAAGCGTGTCCCACCATACAAAGAGCCCGCAACTTGCGCAGGCTAAGACAACTAGTCAGGTCTGATCTTGGAATAGAGGGGTAATATGAACGACGGAGTCCGCTAGAAGAGGGGCATCGCTTGTTTACATCACTGACCCCTACCTCCACGAGTCTGTCTTTGAGGAAGCTATGTGATGCTCTGCAAACCATCGCTCAATTAGAAATGCACATTCTAGCGGTTAGTTGACCAATGGCCGCTGGCCCAGTCGCAAAGTGGGATCCATGTCATCACATGTTCCAATCCGTCCCGGCATACCCGAGAACCACACGGGCTAGTTGGTGTCGTGTTAAGGTTGACGGAGCGTCCATACGGTGTTGTGCTATAGCCGGTATCGTAGCCCGGGCTCCCAGATGGACACCAAGGAGTCCCGGCTGACTAATGCCACTTGTGTTTGCATCTGGCCCCAACATT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0007   ">
+				<sample name="00_0007   ">
+					<datablock type="DNA">
+						CGATTCGGCCATGGAAGACCTCGAGACCTTCCCCAAGCGTGTCCCACCATACAAAGAGCCCGCAACTTGCGCAGGCTAAGACAACTAGTCAGGTCTGATCTTGGAATAGAGGGGTAATATGAACGACGGAGTCCGCTAGAAGAGGGGCATCGCTTGTTTACATCACTGACCCCTACCTCCACGAGTCTGTCTTTGAGGAAGCTATGTGATGCTCTGCAAACCATCGCTCAATTAGAAATGCACATTCTAGCGGTTAGTTGACCAATGGCCGCTGGCCCAGTCGCAAAGTGGGATCCATGTCATCACATGTTCCAATCCGTCCCGGCATACCCGAGAACCACACGGGCTAGTTGGTGTCGTGTTAAGGTTGACGGAGCGTCCATACGGTGTTGTGCTATAGCCGGTATCGTAGCCCGGGCTCCCAGATGGACACCAAGGAGTCCCGGCTGACTAATGCCACTTGTGTTTGCATCTGGCCCCAACATT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0012   ">
+				<sample name="00_0012   ">
+					<datablock type="DNA">
+						CGATTCGGCCATGGAAGACCACGAGACCTTCCCCAAGCGTGTCCCACCATACAAAGAGCCCGCAACTTGCGCAGGCTAAGACAACTAGTCAGGTCTGATCTTGGAATAGAGGGGTAATATGAACGACGGAGTCCGCTAGAAGAGGGGCATCGCTTGTTTACATCACTGACCCCTACCTCCACGAGTCTGTCTTTGAGGAAGCTATGTGATGCTCTGCAAACCATCGCTCATTTAGAAATGCACATTCTAGCGGTTAGTTGACCAATGGCCGCTGGCCCAGTCGCAAAGTGGGATCCATGTCATCACATGTTCCAATCCGTCCCGGCATACCCGAGAACCACACGGGCTAGTTGGTGTCGTGTTAAGGTTGACGGAGCGTCCATACGGTGTTGTGCTATAGCCGGTATCGTAGCCCGGGCTCCCAGATGGACACCAAGGAGTCCCGGCTGACTAATGCCACTTGTGTTTGCATCTGGCCCCAACATT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0016   ">
+				<sample name="00_0016   ">
+					<datablock type="DNA">
+						CGATTCGGCCATGGAAGACCACGAGACCTTCCCCAAGCGTGTCCCACCATACAAAGAGCCCGCAACTTGCGCAGGCTAAGACAACTAGTCAGGTCTGATCTTGGAATAGAGGGGTAATATGAACGACGGAGTCCGCTAGAAGAGGGGCATCGCTTGTTTACATCACTGACCCCTACCTCCACGAGTCTGTCTTTGAGGAAGCTATGTGATGCTCTGCAAACCATCGCTCATTTAGAAATGCACATTCTAGCGGTTAGTTGACCAATGGCCGCTGGCCCAGTCGCAAAGTGGGATCCATGTCATCACATGTTCCAATCCGTCCCGGCATACCCGAGAACCACACGGGCTAGTTGGTGTCGTGTTAAGGTTGACGGAGCGTCCATACGGTGTTGTGCTATAGCCGGTATCGTAGCCCGGGCTCCCAGATGGACACCAAGGAGTCCCGGCTGACTAATGCCACTTGTGTTTGCATCTGGCCCCAACATT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0014   ">
+				<sample name="00_0014   ">
+					<datablock type="DNA">
+						CGATTCGGCCATGGAAGACCACGAGACCTTCCCCAAGCGTGTCCCACCATACAAAGAGCCCGCAACTTGCGCAGGCTAAGACAACTAGTCAGGTCTGATCTTGGAATAGAGGGGTAATATGAACGACGGAGTCCGCTAGAAGAGGGGCATCGCTTGTTTACATCACTGACCCCTACCTCCACGAGTCTGTCTTTGAGGAAGCTATGTGATGCTCTGCAAACCATCGCTCATTTAGAAATGCACATTCTAGCGGTTAGTTGACCAATGGCCGCTGGCCCAGTCGCAAAGTGGGATCCATGTCATCACATGTTTCAATCCGTCCCGGCATACCCGAGAACCACACGGGCTAGTTGGTGTCGTGTTAAGGTTGACGGAGCGTCCATACGGTGTTGTGCTATAGCCGGTATCGTAGCCCGGGCTCCCAGATGGACACCAAGGAGTCCCGGCTGACTAATGCCACTTGTGTTTGCATCTGGCCCCAACATT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0002   ">
+				<sample name="00_0002   ">
+					<datablock type="DNA">
+						CGATTCGGCCATGGAAGACCACGAGACCTTCCCCAAGCGTGTCCCACCATACAAAGAGCCCGCAACTTGCGCAGGCTAAGACAACTAGTCAGGTCTGATCTTGGAATAGAGGGGTAATATGAACGACGGAGTCCGCTAGAAGAGGGGCATCGCTTGTTTACATCACTGACCCCTACCTCCACGAGTCTGTCTTTGAGGAAGCTATGTGATGCTCTGCAAACCATCGCTCATTTAGAAATGCACATTCTAGCGGTTAGTTGACCAATGGCCGCTGGCCCAGTCGCAAAGTGGGATCCATGTCATCACATGTTTCAATCCGTCCCGGCATACCCGAGAACCACACGGGCTAGTTGGTGTCGTGTTAAGGTTGACGGAGCGTCCATACGGTGTTGTGCTATAGCCGGTATCGTAGCCCGGGCTCCCAGATGGACACCAAGGAGTCCCGGCTGACTAATGCCACTTGTGTTTGCATCTGGCCCCAACATT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0000   ">
+				<sample name="00_0000   ">
+					<datablock type="DNA">
+						CGATTCGGCCATGGAAGACCACGAGACCTTCCCCAAGCGTGTCCCACCATACAAAGAGCCCGCAACTTGCGCAGGCTAAGACAACTAGTCAGGTCTGATCTTGGAATAGAGGGGTAATATGAACGACGGAGTCCGCTAGAAGAGGGGCATCGCTTGTTTACATCACTGACCCCTACCTCCACGAGTCTGTCTTTGAGGAAGCTATGTGATGCTCTGCAAACCATCGCTCATTTAGAAATGCACATTCTAGCGGTTAGTTGACCAATGGCCGCTGGCCCAGTCGCAAAGTGGGATCCATGTCATCACATGTTTCAATCCGTCCCGGCATACCCGAGAACCACACGGGCTAGTTGGTGTCGTGTTAAGGTTGACGGAGCGTCCATACGGTGTTGTGCTATAGCCGGTATCGTAGCCCGGGCTCCCAGATGGACACCAAGGAGTCCCGGCTGACTAATGCCACTTGTGTTTGCATCTGGCCCCAACATT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0010   ">
+				<sample name="00_0010   ">
+					<datablock type="DNA">
+						CGATTCGGCCATGGAAGACCACGAGACCTTCCCCAAGCGTGTCCCACCATACAAAGAGCCCGCAACTTGCGCAGGCTAAGACAACTAGTCAGGTCTGATCTTGGAATAGAGGGGTAATATGAACGACGGAGTCCGCTAGAAGAGGGGCATCGCTTGTTTACATCACTGACCCCTACCTCCACGAGTCTGTCTTTGAGGAAGCTATGTGATGCTCTGCAAACCATCGCTCATTTAGAAATGCACATTCTAGCGGTTAGTTGACCAATGGCCGCTGGCCCAGTCGCAAAGTGGGATCCATGTCATCACATGTTTCAATCCGTCCCGGCATACCCGAGAACCACACGGGCTAGTTGGTGTCGTGTTAAGGTTGACGGAGCGTCCATACGGTGTTGTGCTATAGCCGGTATCGTAGCCCGGGCTCCCAGATGGACACCAAGGAGTCCCGGCTGACTAATGCCACTTGTGTTTGCATCTGGCCCCAACATT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0019   ">
+				<sample name="00_0019   ">
+					<datablock type="DNA">
+						CGATTCGGCCATGGAAGACCACGAGACCTTCCCCAAGCGTGTCCCACCATACAAATAGCCCGCAACTTGCGCAGGCTAAGACAACTAGTCAGATCTGATCTTGGAATAGAGGGGTAATATGAACGACGGAGTCCGCTAGAAGAGGGGCATCGCTTGTTTACATCACTGACCCCTACCTCCACGAGTCTGTCTTTGAGGAAGCTATGTGATGCTCTGCAAACCATCGCTCATTTAGAAATGCACATTCTAGCGGTTAGTTGACCAATGGCCGCTGGCCCAGTCGCAAAGTGGGATCCATGTCATCACATGTTCCAATCCGTCCCGGCATACCCGAGAACCACACGGGCTAGTTGGTGTCGTGTTAAGGTTGACGGAGCGTCCATACGGTGTTGTGCTATAGCCGGTATCGTAGCCCGGGCTCCCAGATGGACACCAAGGAGTCCCGGCTGACTAATGCCACTTGTGTTTGCATCTGGCCCCAACATT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0008   ">
+				<sample name="00_0008   ">
+					<datablock type="DNA">
+						CGATTCGGCCATGGAAGACCACGAGACCTTCCCCAAGCGTGTCCCACCATACAAATAGCCCGCAACTTGCGCAGGCTAAGACAACTAGTCAGATCTGATCTTGGAATAGAGGGGTAATATGAACGACGGAGTCCGCTAGAAGAGGGGCATCGCTTGTTTACATCACTGACCCCTACCTCCACGAGTCTGTCTTTGAGGAAGCTATGTGATGCTCTGCAAACCATCGCTCATTTAGAAATGCACATTCTAGCGGTTAGTTGACCAATGGCCGCTGGCCCAGTCGCAAAGTGGGATCCATGTCATCACATGTTCCAATCCGTCCCGGCATACCCGAGAACCACACGGGCTAGTTGGTGTCGTGTTAAGGTTGACGGAGCGTCCATACGGTGTTGTGCTATAGCCGGTATCGTAGCCCGGGCTCCCAGATGGACACCAAGGAGTCCCGGCTGACTAATGCCACTTGTGTTTGCATCTGGCCCCAACATT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0001   ">
+				<sample name="00_0001   ">
+					<datablock type="DNA">
+						CGATTCGGCCATGGAAGACCACGAGACCTTCCCCAAGCGTGTCCCACCATACAAATAGCCCGCAACTTGCGCAGGCTAAGACAACTAGTCAGATCTGATCTTGGAATAGAGGGGTAATATGAACGACGGAGTCCGCTAGAAGAGGGGCATCGCTTGTTTACATCACTGACCCCTACCTCCACGAGTCTGTCTTTGAGGAAGCTATGTGATGCTCTGCAAACCATCGCTCATTTAGAAATGCACATTCTAGCGGTTAGTTGACCAATGGCCGCTGGCCCAGTCGCAAAGTGGGATCCATGTCATCACATGTTCCAATCCGTCCCGGCATACCCGAGAACCACACGGGCTAGTTGGTGTCGTGTTAAGGTTGACGGAGCGTCCATACGGTGTTGTGCTATAGCCGGTATCGTAGCCCGGGCTCCCAGATGGACACCAAGGAGTCCCGGCTGACTAATGCCACTTGTGTTTGCATCTGGCCCCAACATT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0018   ">
+				<sample name="00_0018   ">
+					<datablock type="DNA">
+						CGATTCGGCCATGGAAGACCACGAGACCTTCCCCAAGCGTGTCCCACCATACAAATAGCCCGCAACTTGCGCAGGCTAAGACAACTAGTCAGATCTGATCTTGGAATAGAGGGGTAATATGAACGACGGAGTCCGCTAGAAGAGGGGCATCGCTTGTTTACATCACTGACCCCTACCTCCACGAGTCTGTCTTTGAGGAAGCTATGTGATGCTCTGCAAACCATCGCTCATTTAGAAATGCACATTCTAGCGGTTAGTTGACCAATGGCCGCTGGCCCAGTCGCAAAGTGGGATCCATGTCATCACATGTTCCAATCCGTCCCGGCATACCCGAGAACCACACGGGCTAGTTGGTGTCGTGTTAAGGTTGACGGAGCGTCCATACGGTGTTGTGCTATAGCCGGTATCGTAGCCCGGGCTCCCAGATGGACACCAAGGAGTCCCGGCTGACTAATGCCACTTGTGTTTGCATCTGGCCCCAACATT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0005   ">
+				<sample name="00_0005   ">
+					<datablock type="DNA">
+						CGATTCGGCCATGGAAGACCACGAGACCTTCCCCAAGCGTGTCCCACCATACAAATAGCCCGCAACTTGCGCAGGCTAAGACAACTAGTCAGATCTGATCTTGGAATAGAGGGGTAATATGAACGACGGAGTCCGCTAGAAGAGGGGCATCGCTTGTTTACATCACTGACCCCTACCTCCACGAGTCTGTCTTTGAGGAAGCTATGTGATGCTCTGAAAACCATCGCTCATTTAGAAATGCACATTCTAGCGGTTAGTTGACCAATGGCCGCTGGCCCAGTCGCAAAGTGGGATCCATGTCATCACATGTTCCAATCCGTCCCGGCATACCCGAGAACCACACGGGCTAGTTGGTGTCGTGTTAAGGTTGACGGAGCGTCCATACGGTGTTGTGCTATAGCCGGTATCGTAGCCCGGGCTCCCAGATGGACACCAAGGAGTCCCGGCTGACTAATGCCACTTGTGTTTGCATCTGGCCCCAACATT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0015   ">
+				<sample name="00_0015   ">
+					<datablock type="DNA">
+						CGATTCGGCCATGGAAGACCACGAGACCTTCCCCAAGCGTGTCCCACCATACAAATAGCCCGCAACTTGCGCAGGCTAAGACAACTAGTCAGATCTGATCTTGGAATAGAGGGGTAATATGAACGACGGAGTCCGCTAGAAGAGGGGCATCGCTTGTTTACATCACTGACCCCTACCTCCACGAGTCTGTCTTTGAGGAAGCTATGTGATGCTCTGAAAACCATCGCTCATTTAGAAATGCACATTCTAGCGGTTAGTTGACCAATGGCCGCTGGCCCAGTCGCAAAGTGGGATCCATGTCATCACATGTTCCAATCCGTCCCGGCATACCCGAGAACCACACGGGCTAGTTGGTGTCGTGTTAAGGTTGACGGAGCGTCCATACGGTGTTGTGCTATAGCCGGTATCGTAGCCCGGGCTCCCAGATGGACACCAAGGAGTCCCGGCTGACTAATGCCACTTGTGTTTGCATCTGGCCCCAACATT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0006   ">
+				<sample name="00_0006   ">
+					<datablock type="DNA">
+						CGATTCGGCCATGGAAGACCACGAGACCTTCCCCAAGCGTGTCCCACCATACAAATAGCCCGCAACTTGCGCAGGCTAAGACAACTAGTCAGATCTGATCTTGGAATAGAGGGGTAATATGAACGACGGAGTCCGCTAGAAGAGGGGCATCGCTTGTTTACATCACTGACCCCTACCTCCACGAGTCTGTCTTTGAGGAAGCTATGTGATGCTCTGAAAACCATCGCTCATTTAGAAATGCACATTCTAGCGGTTAGTTGACCAATGGCCGCTGGCCCAGTCGCAAAGTGGGATCCATGTCATCACATGTTCCAATCCGTCCCGGCATACCCGAGAACCACACGGGCTAGTTGGTGTCGTGTTAAGGTTGACGGAGCGTCCATACGGTGTTGTGCTATAGCCGGTATCGTAGCCCGGGCTCCCAGATGGACACCAAGGAGTCCCGGCTGACTAATGCCACTTGTGTTTGCATCTGGCCCCAACATT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0013   ">
+				<sample name="00_0013   ">
+					<datablock type="DNA">
+						CGATTCGGCCATGGAAGACCACGAGACCTTCCCCAAGCGTGTCCCACCATACAAATAGCCCGCAACTTGCGCAGGCTAAGACAACTAGTCAGATCTGATCTTGGAATAGAGGGGTAATATGAACGACGGAGTCCGCTAGAAGAGGGGCATCGCTTGTTTACATCACTGACCCCTACCTCCACGAGTCTGTCTTTGAGGAAGCTATGTGATGCTCTGAAAACCATCGCTCATTTAGAAATGCACATTCTAGCGGTTAGTTGACCAATGGCCGCTGGCCCAGTCGCAAAGTGGGATCCATGTCATCACATGTTCCAATCCGTCCCGGCATACCCGAGAACCACACGGGCTAGTTGGTGTCGTGTTAAGGTTGACGGAGCGTCCATACGGTGTTGTGCTATAGCCGGTATCGTAGCCCGGGCTCCCAGATGGACACCAAGGAGTCCCGGCTGACTAATGCCACTTGTGTTTGCATCTGGCCCCAACATT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0009   ">
+				<sample name="00_0009   ">
+					<datablock type="DNA">
+						CGATTCGGCCATGGAAGACCACGAGACCTTCCCCAAGCGTGTCCCACCATACAAATAGCCCGCAACTTGCGCAGGCTAAGACAACTAGTCAGATCTGATCTTGGAATAGAGGGGTAATATGAACGACGGAGTCCGCTAGAAGAGGGGCATCGCTTGTTTACATCACTGACCCCTACCTCCACGAGTCTGTCTTTGAGGAAGCTATGTGATGCTCTGCAAACCATCGCTCATTTAGAAATGCACATTCTAGCGGTTAGTTGACCAATGGCCGCTGGCCCAGCCGCAAAGTGGGATCCATGTCATCACATGTTCCAATCCGTCCCGGCATACCCGAGAACCACACGGGCTAGTTGGTGTCGTGTTAAGGTTGACGGAGCGTCCATACGGTGTTGTGCTATAGCCGGTATCGTAGCCCGGGCTCCCAGATGGACACCAAGGAGTCCCGGCTGACTAATGCCACTTGTGTTTGCATCTGGCCCCAACATT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0011   ">
+				<sample name="00_0011   ">
+					<datablock type="DNA">
+						CGATTCGGCCATGGAAGACCACGAGACCTTCCCCAAGCGTGTCCCACCATACAAATAGCCCGCAACTTGCGCAGGCTAAGACAACTAGTCAGATCTGATCTTGGAATAGAGGGGTAATATGAACGACGGAGTCCGCTAGAAGAGGGGCATCGCTTGTTTACATCACTGACCCCTACCTCCACGAGTCTGTCTTTGAGGAAGCTATGTGATGCTCTGCAAACCATCGCTCATTTAGAAATGCACATTCTAGCGGTTAGTTGACCAATGGCCGCTGGCCCAGTCGCAAAGTGGGATCCATGTCATCACATGTTCCAATCCGTCCCGGCATACCCGAGAACCACACGGGCTAGTTGGTGTCGTGTTAAGGTTGACGGAGCGTCCATACGGTGTTGTGCTATAGCCGGTATCGTAGCCCGGGCTCCCAGATGGACACCAAGGAGTCCCGGCTGACTAATGCCACTTGTGTTTGCATCTGGCCCCAACATT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0017   ">
+				<sample name="00_0017   ">
+					<datablock type="DNA">
+						CGATTCGGCCATGGAAGACCACGAGACCTTCCCCAAGCGTGTCCCACCATACAAATAGCCCGCAACTTGCGCAGGCTAAGACAACTAGTCAGATCTGATCTTGGAATAGAGGGGTAATATGAACGACGGAGTCCGCTAGAAGAGGGGCATCGCTTGTTTACATCACTGACCCCTACCTCCACGAGTCTGTCTTTGAGGAAGCTATGTGATGCTCTGCAAACCATCGCTCATTTAGAAATGCACATTCTAGCGGTTAGTTGACCAATGGCCGCTGGCCCAGTCGCAAAGTGGGATCCATGTCATCACATGTTCCAATCCGTCCCGGCATACCCGAGAACCACACGGGCTAGTTGGTGTCGTGTTAAGGTTGACGGAGCGTCCATACGGTGTTGTGCTATAGCCGGTATCGTAGCCCGGGCTCCCAGATGGACACCAAGGAGTCCCGGCTGACTAATGCCACTTGTGTTTGCATCTGGCCCCAACATT [...]
+					</datablock>
+				</sample>
+			</individual>
+		</population>
+	</region>
+	<region name="LinkageGroup05">
+      <model name="F84">
+        <normalize>false</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <relative-murate>1</relative-murate>
+        <base-freqs> calculated </base-freqs>
+        <ttratio>2</ttratio>
+      </model>
+      <effective-popsize>1</effective-popsize>
+		<population name="PopAlphaIs25">
+			<individual name="00_0000   ">
+				<sample name="00_0000   ">
+					<datablock type="DNA">
+						CGGCAGGGAACGAGCTAGACAGTATCGCTTCTCACAAGGTGAGCGCAGCTATCGAGGAAGGTTAAGAAGGCACTAGAGGAAGATGACATGGGCGTCTCTACGGTTCGGCGTCCTTTGTATCCCTACTAGGCCCCTCGTCGTTTTAGGCAGCGTCCTTGCAGGTGCTACAAGAAGAGGACGTGGTCGAATACATAGTCGAGTGTGGACTCAGACCAGATCAGGTAAGCTCTTAGTCGTGACATTATCAACATGATAAACACGTGAGTTCTTTCTCTGATCTGGCACATGATACTCAGTTTGACAGCTACATAATTGCCATTAATCTTGGTATATCCGAATGCAAGGCAGTATTCCATAACTGTATGATGTGGTAGCTAGGCGCAATGATTGGCACAGCGGCGACCCAAGTCCCAGTTAAGAGTTGGTGGGTCACACCGATCTTGATCGCTACTAGAGAGGGAGGGAACGACACAACTGTGGAGGGAA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0007   ">
+				<sample name="00_0007   ">
+					<datablock type="DNA">
+						CGGCAGGGAACGAGCTAGACAGTATCGCTTCTCACAAGGTGAGCGCAGCTATCGAGGAAGGTTAAGAAGGCACTAGAGGAAGATGACATGGGCGTCTCTACGGTTCGGCGTCCTTTGTATCCCTACTAGGCCCCTCGTCGTTTTAGGCAGCGTCCTTGCAGGTGCTACAAGAAGAGGACGTGGTCGAATACATAGTCGAGTGTGGACTCAGACCAGATCAGGTAAGCTCTTAGTCGTGACATTATCAACATGATAAACACGTGAGTTCTTTCTCTGATCTGGCACATGATACTCAGTTTGACAGCTACATAATTGCCATTAATCTTGGTATATCCGAATGCAAGGCAGTATTCCATAACTGTATGATGTGGTAGCTAGGCGCAATGATTGGCACAGCGGCGACCCAAGTCCCAGTTAAGAGTTGGTGGGTCACACCGATCTTGATCGCTACTAGAGAGGGAGGGAACGACACAACTGTGGAGGGAA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0008   ">
+				<sample name="00_0008   ">
+					<datablock type="DNA">
+						CGGCAGGGAACGAGCTAGACAGTATCGCTTCTCACAAGGTGAGCGCAGCTATCGAGGAAAGTTAAGAAGGCACCAGAGGAAGATGATATGGACGTCTTTACGGTTCGGCGTCCTTTGTATCCCTACTAGGCCCCTCGTCGTTTTAGGCAGCGTCCTTGCAGGTGCTACAAGAAGAGCACGTGGTCGAATACATAGTCGAGTGTGGACTCAGACCAGATCAGGTAAGCTCTTAGTCGTGACATTATCAACATTATAAACACGTGAGTTCTTTCTCTGATCTGGCACATGATACTCAGTTAGACAGCAACATAATTGCCATTAATCTTGGTATATCCGAATGCAAGGCAGTATTCCATAACTGTATGATGTGGTAGCCAGGCGCGATGATTGGCACAGCGGCGACCCAAGTCCCAGTTAAGAGTTGGTGGGTCACACCGATCTTGACCGCTACTAGAGAGGGAGGGAACGACACAACTGCGGAGGGAA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0015   ">
+				<sample name="00_0015   ">
+					<datablock type="DNA">
+						CGGCAGGGAACGAGCTAGACAGTATCGCTTCTCACAAGGTGAGCGCAGCTATCGAGGAAAGTTAAGAAGGCACCAGAGGAAGATGATATGGACGTCTTTACGGTTCGGCGTCCTTTGTATCCCTACTAGGCCCCTCGTCGTTTTAGGCAGCGTCCTTGCAGGTGCTACAAGAAGAGCACGTGGTCGAATACATAGTCGAGTGTGGACTCAGACCAGATCAGGTAAGCTCTTAGTCGTGACATTATCAACATTATAAACACGTGAGTTCTTTCTCTGATCTGGCACATGATACTCAGTTAGACAGCAACATAATTGCCATTAATCTTGGTATATCCGAATGCAAGGCAGTATTCCATAACTGTATGATGTGGTAGCCAGGCGCGATGATTGGCACAGCGGCGACCCAAGTCCCAGTTAAGAGTTGGTGGGTCACACCGATCTTGACCGCTACTAGAGAGGGAGGGAACGACACAACTGCGGAGGGAA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0005   ">
+				<sample name="00_0005   ">
+					<datablock type="DNA">
+						CGGCAGGGAACGAGCTAGACAGTATCGCTTCTCACAAGGTGAGCGCAGCTATCGAGGAAAGTTAAGAAGGCACCAGAGGAAGATGATATGGACGTCTTTACGGTTCGGCGTCCTTTGTATCCCTACTAGGCCCCTCGTCGTTTTAGGCAGCGTCCTTGCAGGTGCTACAAGAAGAGCACGTGGTCGAATACATAGTCGAGTGTGGACTCAGACCAGATCAGGTAAGCTCTTAGTCGTGACATTATCAACATTATAAACACGTGAGTTCTTTCTCTGATCTGGCACATGATACTCAGTTAGACAGCAACATAATTGCCATTAATCTTGGTATATCCGAATGCAAGGCAGTATTCCATAACTGTATGATGTGGTAGCCAGGCGCGATGATTGGCACAGCGGCGACCCAAGTCCCAGTTAGGAGTTGGTGGGTCACACCGATCTTGACCGCTACTAGAGAGGGAGGGAACGACACAACTGCGGAGGGAA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0002   ">
+				<sample name="00_0002   ">
+					<datablock type="DNA">
+						CGGCAGGGAACGAGCTAGACAGTATCGCTTCTCACAAGGTGAGCGCAGCTATCGAGGAAAGTTAAGAAGGCACCAGAGGAAGATGATATGGACGTCTTTACGGTTCGGCGTCCTTTGTATCCCTACTAGGCCCCTCGTCGTTTTAGGCAGCGTCCTTGCAGGTGCTACAAGAAGAGCACGTGGTCGAATACATAGTCGAGTGTGGACTCAGACCAGATCAGGTAAGCTCTTAGTCGTGACATTATCAACATTATAAACACGTGAGTTCTTTCTCTGATCTGGCACATGATACTCAGTTAGACAGCAACATAATTGCCATTAATCTTGGTATATCCGAATGCAAGGCAGTATTCCATAACTGTATGATGTGGTAGCCAGGCGCGATGATTGGCACAGCGGCGACCCAAGTCCCAGTTAGGAGTTGGTGGGTCACACCGATCTTGACCGCTACTAGAGAGGGAGGGAACGACACAACTGCGGAGGGAA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0019   ">
+				<sample name="00_0019   ">
+					<datablock type="DNA">
+						CGGCAGGGAACGAGCTAGACAGTATCGCTTCTCACAAGGTGAGCGCAGCTATCGAGGAAAGTTAAGAAGGCACCAGAGGAAGATGATATGGACGTCTTTACGGTTCGGCGTCCTTTGTATCCCTACTAGGCCCCTCGTCGTTTTAGGCAGCGTCCTTGCAGGTGCTACAAGAAGAGCACGTGGTCGAATACATAGTCGAGTGTGGACTCAGACCAGATCAGGTAAGCTCTTAGTCGTGACATTATCAACATTATAAACACGTGAGTTCTTTCTCTGATCTGGCACATGATACTCAGTTAGACAGCAACATAATTGCCATTAATCTTGGTATATCCGAATGCAAGGCAGTATTCCATAACTGTATGATGTGGTAGCCAGGCGCGATGATTGGCACAGCGGCGACCCAAGTCCCAGTTAGGAGTTGGTGGGTCACACCGATCTTGACCGCTACTAGAGAGGGAGGGAACGACACAACTGCGGAGGGAA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0001   ">
+				<sample name="00_0001   ">
+					<datablock type="DNA">
+						CGGCAGGGAACGAGCTAGACAGTATCGCTTCTCACAAGGTGAGCGCAGCTATCGAGGAAAGTTAAGAAGGCACCAGAGGAAGATGATATGGACGTCTTTACGGTTCGGCGTCCTTTGTATCCCTACTAGGCCCCTCGTCGTTTTAGGCAGCGTCCTTGCAGGTGCTACAAGAAGAGGACGTGGTCGAATACATAGTCGAGTGTGGACTCAGACCAGATCAGGTAAGCTCTTAGTCGTGACATTATCAACATTATAAACACGTGAGTTCTTTCTCTGATCTGGCACATGATACTCAGTTAGACAGCAACATAATTGCCATTAATCTTGGTATATCCGAATGCAAGGCAGTATTCCATAACTGTATGATGTGGTAGCCAGGCGCGATGATTGGCACAGCGGCGACCCAAGTCCCAGTTAAGAGTTGGTGGGTCACACTGATCTTGACCGCTACTAGAGAGGGAGGGAACGACACAACTGCGGAGGGAA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0012   ">
+				<sample name="00_0012   ">
+					<datablock type="DNA">
+						CGGCAGGGAACGAGCTAGACAGTATCGCTTCTCACAAGGTGAGCGCAGCTATCGAGGAAAGTTAAGAAGGCACCAGAGGAAGATGATATGGACGTCTTTACGGTTCGGCGTCCTTTGTATCCCTACTAGGCCCCTCGTCGTTTTAGGCAGCGTCCTTGCAGGTGCTACAAGAAGAGGACGTGGTCGAATACATAGTCGAGTGTGGACTCAGACCAGATCAGGTAAGCTCTTAGTCGTGACATTATCAACATTATAAACACGTGAGTTCTTTCTCTGATCTGGCACATGATACTCAGTTAGACAGCAACATAATTGCCATTAATCTTGGTATATCCGAATGCAAGGCAGTATTCCATAACTGTATGATGTGGTAGCCAGGCGCGATGATTGGCACAGCGGCGACCCAAGTCCCAGTTAAGAGTTGGTGGGTCACACTGATCTTGACCGCTACTAGAGAGGGAGGGAACGACACAACTGCGGAGGGAA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0003   ">
+				<sample name="00_0003   ">
+					<datablock type="DNA">
+						CGGCAGGGAACGAGCTAGACAGTATCGCTTCTCACAAGGTGAGCGCAGCTATCGAGGAAAGTTAAGAAGGCACCAGAGGAAGATGATATGGACGTCTTTACGGTTCGGCGTCCTTTGTATCCCTACTAGGCCCCTCGTCGTTTTAGGCAGCGTCCTTGCAGGTGCTACAAGAAGAGGACGTGGTCGAATACATAGTCGAGTGTGGACTCAGACCAGATCAGGTAAGCTCTTAGTCGTGACATTATCAACATTATAAACACGTGAGTTCTTTCTCTGATCTGGCACATGATACTCAGTTAGACAGCAACATAATTGCCATTAATCTTGGTATATCCGAATGCAAGGCAGTATTCCATAACTGTATGATGTGGTAGCCAGGCGCGATGATTGGCACAGCGGCGACCCAAGTCCCAGTTAAGAGTTGGTGGGTCACACTGATCTTGACCGCTACTAGAGAGGGAGGGAACGACACAACTGCGGAGGGAA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0006   ">
+				<sample name="00_0006   ">
+					<datablock type="DNA">
+						CGGCAGGGAACGAGCTAGACAGTATCGCTTCTCACAAGGTGAGCGCAGCTATCGAGGAAAGTTAAGAAGGCACCAGAGGAAGATGATATGGACGTCTTTACGGTTCGGCGTCCTTTGTATCCCTACTAGGCCCCTCGTCGTTTTAGGCAGCGTCCTTGCAGGTGCTACAAGAAGAGGACGTGGTCGAATACATAGTCGAGTGTGGACTCAGACCAGATCAGGTAAGCTCTTAGTCGTGACATTATCAACATTATAAACACGTGAGTTCTTTCTCTGATCTGGCACATGATACTCAGTTAGACAGCAACATAATTGCCATTAATCTTGGTATATCCGAATGCAAGGCAGTATTCCATAACTGTATGATGTGGTAGCCAGGCGCGATGATTGGCACAGCGGCGACCCAAGTCCCAGTTAAGAGTTGGTGGGTCACACTGATCTTGACCGCTACTAGAGAGGGAGGGAACGACACAACTGCGGAGGGAA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0018   ">
+				<sample name="00_0018   ">
+					<datablock type="DNA">
+						CGGCAGGGAACGAGCTAGACAGTATCGCTTCTCACAAGGTGAGCGCAGCTATCGAGGAAAGTTAAGAAGGCACCAGAGGAAGATGATATGGACGTCTTTACGGTTCGGCGTCCTTTGTATCCCTACTAGGCCCCTCGTCGTTTTAGGCAGCGTCCTTGCAGGTGCTACAAGAAGAGGACGTGGTCGAATACATAGTCGAGTGTGGACTCAGACCAGATCAGGTAAGCTCTTAGTCGTGACATTATCAACATTATAAACACGTGAGTTCTTTCTCTGATCTGGCACATGATACTCAGTTAGACAGCAACATAATTGCCATTAATCTTGGTATATCCGAATGCAAGGCAGTATTCCATAACTGTATGATGTGGTAGCCAGGCGCGATGATTGGCACAGCGGCGACCCAAGTCCCAGTTAAGAGTTGGTGGGTCACACTGATCTTGACCGCTACTAGAGAGGGAGGGAACGACACAACTGCGGAGGGAA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0009   ">
+				<sample name="00_0009   ">
+					<datablock type="DNA">
+						CGGCAGGGAACGAGCTAGACAGTATCGCTTCTCACAAGGTGAGCGCAGCTATCGAGGAAAGTTAAGAAGGCACCAGAGGATGATGATATGGACGTCTTTACGGTTCGGCGTCCTTTGTATCCCTACTAGGCCCCTCGTCGTTTTAGGCAGCGTCCTTGCAGGTGCTACAAGAAGAGGACGTGGTCGAATACATAGTCGAGTGTGGACTCAGACCAGATCAGGTAAGCTCTTAGTCGTGACATTATCAACATTATAAACACGTGAGTTCTTTCTCTGATCTGGCACATGATACTCAGTTAGACAGCAACATAATTGCCATTAATCTTGGTATATCCGAATGCAAGGCAGTATTCCATAACTGTATGATGTGGTAGCCAGGCGCGATGATTGGCACAGCGGCGACCCAAGTCCCAGTTAAGAGTTGGTGGGTCACACTGATCTTGACCGCTACTAGAGAGGGAGGGAACGACACAACTGCGGAGGGAA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0011   ">
+				<sample name="00_0011   ">
+					<datablock type="DNA">
+						CGGCAGGGAACGAGCTAGACAGTATCGCTTCTCACAAGGTGAGCGCAGCTATCGAGGAAAGTTAAGAAGGCACCAGAGGATGATGATATGGACGTCTTTACGGTTCGGCGTCCTTTGTATCCCTACTAGGCCCCTCGTCGTTTTAGGCAGCGTCCTTGCAGGTGCTACAAGAAGAGGACGTGGTCGAATACATAGTCGAGTGTGGACTCAGACCAGATCAGGTAAGCTCTTAGTCGTGACATTATCAACATTATAAACACGTGAGTTCTTTCTCTGATCTGGCACATGATACTCAGTTAGACAGCAACATAATTGCCATTAATCTTGGTATATCCGAATGCAAGGCAGTATTCCATAACTGTATGATGTGGTAGCCAGGCGCGATGATTGGCACAGCGGCGACCCAAGTCCCAGTTAAGAGTTGGTGGGTCACACTGATCTTGACCGCTACTAGAGAGGGAGGGAACGACACAACTGCGGAGGGAA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0016   ">
+				<sample name="00_0016   ">
+					<datablock type="DNA">
+						CGGCAGGGAACGAGCTAGACAGTATCGCTTCTCACAAGGTGAGCGCAGCTATCGAGGAAAGTTAAGAAGGCACCAGAGGATGATGATATGGACGTCTTTACGGTTCGGCGTCCTTTGTATCCCTACTAGGCCCCTCGTCGTTTTAGGCAGCGTCCTTGCAGGTGCTACAAGAAGAGGACGTGGTCGAATACATAGTCGAGTGTGGACTCAGACCAGATCAGGTAAGCTCTTAGTCGTGACATTATCAACATTATAAACACGTGAGTTCTTTCTCTGATCTGGCACATGATACTCAGTTAGACAGCAACATAATTGCCATTAATCTTGGTATATCCGAATGCAAGGCAGTATTCCATAACTGTATGATGTGGTAGCCAGGCGCGATGATTGGCACAGCGGCGACCCAAGTCCCAGTTAAGAGTTGGTGGGTCACACTGATCTTGACCGCTACTAGAGAGGGAGGGAACGACACAACTGCGGAGGGAA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0017   ">
+				<sample name="00_0017   ">
+					<datablock type="DNA">
+						CGGCAGGGAACGAGCTAGACAGTATCGCTTCTCACAAGGTGAGCGCAGCTATCGAGGAAAGTTAAGAAGGCACCAGAGGATGATGATATGGACGTCTTTACGGTTCGGCGTCCTTTGTATCCCTACTAGGCCCCTCGTCGTTTTAGGCAGCGTCCTTGCAGGTGCTACAAGAAGAGGACGTGGTCGAATACATAGTCGAGTGTGGACTCAGACCAGATCAGGTAAGCTCTTAGTCGTGACATTATCAACATTATAAACACGTGAGTTCTTTCTCTGATCTGGCACATGATACTCAGTTAGACAGCAACATAATTGCCATTAATCTTGGTATATCCGAATGCAAGGCAGTATTCCATAACTGTATGATGTGGTAGCCAGGCGCGATGATTGGCACAGCGGCGACCCAAGTCCCAGTTAAGAGTTGGTGGGTCACACTGATCTTGACCGCTACTAGAGAGGGAGGGAACGACACAACTGCGGAGGGAA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0014   ">
+				<sample name="00_0014   ">
+					<datablock type="DNA">
+						CGGCAGGGAACGAGCTAGACAGTATCGCTTCTCACAAGGTGAGCGCAGCTATCGAGGAAAGTTAAGAAGGCACCAGAGGAAGATGACATGGACGTCTTTACGGTTCGGCGTCCTTTGTATCCCTACTAGGCCCCTCGTCGTTTTAGGCAGTGTCCTTGCAGGTGCTACAAGAAGAGGACGTGGTCGAATACATAGTCGAGTGTGGACTCAGACCAGATCAGGTAAGCTCTTAGTCGTGACATTATCAACATTATAAACACGTGAGTTCTTTCTCTGATCTGGCACATGATACTCAGTTAGACAGCTACATAATTGCCATTAATCTTGGTATATCTGAATGCAAGGGAGTATTCCATAACTGTATAATGTGGTAGCCAGGCGCAATGATTGGCACAGCGGCGACCCAAGTCCCAGTTAAGAGTTGGTGGATCACACCGATCTTGATCGCTACTAGAGAGGGAGGGAACGACACAACTGTGGAGGGAA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0013   ">
+				<sample name="00_0013   ">
+					<datablock type="DNA">
+						CGGCAGGGAACGAGCTAGACAGTATCGCTTCTCACAAGGTGAGCGCAGCTATCGAGGAAAGTTAAGAAGGCACCAGAGGAAGATGACATGGACGTCTTTACGGTTCGGCGTCCTTTGTATCCCTACTAGGCCCCTCGTCGTTTTAGGCAGCGTCCTTGCAGGTGCTACAAGAAGAGGACGTGGTCGAATACATAGTCGAGTGTGGACTCAGACCAGATCAGGTAAGCTCTTAGTCGTGACATTATCAACATTATAAACACGTGAGTTCTTTCTCTGATCTGGCACATGATACTCAGTTAGACATCTACATAATTGCCATTAATCTTGGTATATCCGAATGCAAGGGAGTATTCCATAACTGTATAATGTGGTAGCCAGGCGCAATGATTGGCACAGCGGCGACCCAAGTCCCAGTTAAGAGTTGGTGGATCACACCGATCTTGATCGCTACTAGAGAGGGAGGGAACGACACAACTGTGGAGGGAA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0004   ">
+				<sample name="00_0004   ">
+					<datablock type="DNA">
+						CGGCAGGGAACGAGCTAGACAGTATCGCTTCTCACAAGGTGAGCGCAGCTATCGAGGAAAGTTAAGAAGGCACCAGAGGAAGATGACATGGACGTCTTTACGGTTCGGCGTCCTTTGTATCCCTACTAGGCCCCTCGTCGTTTTAGGCAGCGTCCTTGCAGGTGCTACAAGAAGAGGACGTGGTCGAATACATAGTCGAGTGTGGACTCAGACCAGATCAGGTAAGCTCTTAGTCGTGACATTATCAACATTATAAACACGTGAGTTCTTTCTCTGATCTGGCACATGATACTCAGTTAGACAGCTACATAATTGCCATTAATCTTGGTATATCCGAATGCAAGGGAGTATTCCATAACTGTATAATGTGGTAGCCAGGCGCAATGATTGGCACAGCGGCGACCCAAGTCCCAGTTAAGAGTTGGTGGATCACACCGATCTTGATCGCTACTAGAGAGGGAGGGAACGACACAACTGTGGAGGGAA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0010   ">
+				<sample name="00_0010   ">
+					<datablock type="DNA">
+						CGGCAGGGAACGAGCTAGACAGTATCGCTTCTCACAAGGTGAGCGCAGCTATCGAGGAAAGTTAAGAAGGCACCAGAGGAAGATGACATGGACGTCTTTACGGTTCGGCGTCCTTTGTATCCCTACTAGGCCCCTCGTCGTTTTAGGCAGCGTCCTTGCAGGTGCTACAAGAAGAGGACGTGGTCGAATACATAGTCGAGTGTGGACTCAGACCAGATCAGGTAAGCTCTTAGTCGTGACATTATCAACATTATAAACACGTGAGTTCTTTCTCTGATCTGGCACATGATACTCAGTTAGACAGCTACATAATTGCCATTAATCTTGGTATATCCGAATGCAAGGGAGTATTCCATAACTGTATAATGTGGTAGCCAGGCGCAATGATTGGCACAGCGGCGACCCAAGTCCCAGTTAAGAGTTGGTGGATCACACCGATCTTGATCGCTACTAGAGAGGGAGGGAACGACACAACTGTGGAGGGAA [...]
+					</datablock>
+				</sample>
+			</individual>
+		</population>
+	</region>
+	<region name="LinkageGroup06">
+      <model name="F84">
+        <normalize>false</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <relative-murate>1</relative-murate>
+        <base-freqs> calculated </base-freqs>
+        <ttratio>2</ttratio>
+      </model>
+      <effective-popsize>1</effective-popsize>
+		<population name="PopAlphaIs25">
+			<individual name="00_0003   ">
+				<sample name="00_0003   ">
+					<datablock type="DNA">
+						ACGCCCAATGGATGTCGTAACCCCAGGAGCAGTATCCTGCAGTTGTAGCACGCGGCCTCTGGCAAGCGTATTGGTTTTTAATAGATCCCCTGAACAACGCGGAAGGTGGTTACCTACTCAAGCGACTGCTGGAAAGCACTGGATTCCTCTTTAGGCGACTTGTCTCGTCAGTATGTTCGCTCACCGTGATTGCACGGCAGGGGGAAGATAAACTCCTCGCGACTGTGTGGCGTCCGGGCAGGCTCGACCCCGGAGTGCACTCCGTGTTTTATCCAGCAGGGCAGGTGAGGCGGCGGGCTGATACAGGGAAACCTCAGTGACTACCGATTATAGGGGTCCTACCAGTACCCCTACCTGCGCGCCGGGCGCCCTGTGCAGTCTCCATAGGCTTGCGCAGACACTTAGCCTGCCCTTATCCTCAGTCATCGTTATGGCCCGTCTATTATCCTCGTATTACTTAGCCAGCTTCGTTCATCGACTTAATGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0015   ">
+				<sample name="00_0015   ">
+					<datablock type="DNA">
+						ACGCCCAATGGATGTCGTAACCCCAGGAGCAGTATCCTGCAGTTGTAGCACGCGGCCTCTGGCAAGCGTATTGGTTTTTAATAGATCCCCTGAACAACGCGGAAGGTGGTTACCTACTCAAGCGACTGCTGGAAAGCACTGGATTCCTCTTTAGGCGACTTGTCTCGTCAGTATGTTCGCTCACCGTGATTGCACGGCAGGGGGAAGATAAACTCCTCGCGACTGTGTGGCGTCCGGGCAGGCTCGACCCCGGAGTGCACTCCGTGTTTTATCCAGCAGGGCAGGTGAGGCGGCGGGCTGATACAGGGAAACCTCAGTGACTACCGATTATAGGGGTCCTACCAGTACCCCTACCTGCGCGCCGGGCGCCCTGTGCAGTCTCCATAGGCTTGCGCAGACACTTAGCCTGCCCTTATCCTCAGTCATCGTTATGGCCCGTCTATTATCCTCGTATTACTTAGCCAGCTTCGTGCATCGACTTAATGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0000   ">
+				<sample name="00_0000   ">
+					<datablock type="DNA">
+						ACGCCCAATGGATGTCGTAACCCCAGGAGCAGTATCCTGCAGTTGTAGCACGCGGCCTCTGGCAAGCGTATTGGTTTTTAATAGATCCCCTGAACAACGCGGAAGGTGGTTACCTACTCAAGCGACTGCTGGAAAGCACTGGATTCCTCTTTAGGCGACTTGTCTCGTCAGTATGTTCGCTCACCGTGATTGCACGGCAGGGGGAAGATAAACTCCTCGCGACTGTGTGGCGTCCGGGCAGGCTCGACCCCGGAGTGCACTCCGTGTTTTATCCAGCAGGGCAGGTGAGGCGGCGGGCTGATACAGGGAAACCTCAGTGACTACCGATTATAGGGGTCCTACCAGTACCCCTACCTGCGCGCCGGGCGCCCTGTGCAGTCTCCATAGGCTTGCGCAGACACTTAGCCTGCCCTTATCCTCAGTCATCGTTATGGCCCGTCTATTATCCTCGTATTACTTAGCCAGCTTCGTTCATCGACTTAATGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0011   ">
+				<sample name="00_0011   ">
+					<datablock type="DNA">
+						ACGCCCAATGGATGTCGTAACCCCAGGAGCAGTATCCTGCAGTTGTAGCACGCGGCCTCTGGCAAGCGTATTGGTTTTTAATAGATCCCCTGAACAACGCGGAAGGTGGTTACCTACTCAAGCGACTGCTGGAAAGCACTGGATTCCTCTTTAGGCGACTTGTCTCGTCAGTATGTTCGCTCACCGTGATTGCACGGCAGGGGGAAGATAAACTCCTCGCGACTGTGTGGCGTCCGGGCAGGCTCGACCCCGGAGTGCACTCCGTGTTTTATCCAGCAGGGCAGGTGAGGCGGCGGGCTGATACAGGGAAACCTCAGTGACTACCGATTATAGGGGTCCTACCAGTACCCCTACCTGCGCGCCGGGCGCCCTGTGCAGTCTCCATAGGCTTGCGCAGACACTTAGCCTGCCCTTATCCTCAGTCATCGTTATGGCCCGTCTATTATCCTCGTATTACTTAGCCAGCTTCGTTCATCGACTTAATGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0013   ">
+				<sample name="00_0013   ">
+					<datablock type="DNA">
+						ACGCCCAATGGATGTCGTAACCCCAGGAGCAGTATCCTGCAGTTGTAGCACGCGGCCTCTGGCAAGCGTATTGGTTTTTAATAGATCCCCTGAACAACGCGGAAGGTGGTTACCTACTCAAGCGACTGCTGGAAAGCACTGGATTCCTCTTTAGGCGACTTGTCTCGTCAGTATGTTCGCTCACCGTGATTGCACGGCAGGGGGAAGATAAACTCCTCGCGACTGTGTGGCGTCCGGGCAGGCTCGACCCCGGAGTGCACTCCGTGTTTTATCCAGCAGGGCAGGTGAGGCGGCGGGCTGATACAGGGAAACCTCAGTGACTACCGATTATAGGGGTCCTACCAGTACCCCTACCTGCGCGCCGGGCGCCCTGTGCAGTCTCCATAGGCTTGCGCAGACACTTAGGCTGCCCTTATCCTCAGTCATCGTTATGGCCCGTCTATTATCCTCGTATTACTTAGCCAGCTTCGTTCATCGACTTAATGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0016   ">
+				<sample name="00_0016   ">
+					<datablock type="DNA">
+						ACGCCCAATGGATGTCGTAACCCCAGGAGCAGTATCCTGCAGTTGTAGCACGCGGCCTCTGGCAAGCGTATTGGTTTTTAATAGATCCCCTGAACAACGCGGAAGGTGGTTACCTACTCAAGCGACTGCTGGAAAGCACTGGATTCCTCTTTAGGCGACTTGTCTCGTCAGTATGTTCGCTCACCGTGATTGCACGGCAGGGGGAAGATAAACTCCTCGCGACTGTGTGGCGTCCGGGCAGGCTCGACCCCGGAGTGCACTCCGTGTTTTATCCAGCAGGGCAGGTGAGGCGGCGGGCTGATACAGGGAAACCTCAGTGACTACCGATTATAGGGGTCCTACCAGTACCCCTACCTGCGCGCCGGGCGCCCTGTGCAGTCTCCATAGGCTTGCGCAGACACTTAGCCTGCCCTTATCCTCAGTCATCGTTATGGCCCGTCTATTATCCTCGTATTACTTAGCCAGCTTCGTTCATCGACTTAATGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0007   ">
+				<sample name="00_0007   ">
+					<datablock type="DNA">
+						ACGCCCAATGGATGTCGTAACCCCAGGAGCAGTATCCTGCAGTTGTAGCACGCGGCCTCTGGCAAGCGTATTGGTTTTTAATAGATCCCCTGAACAACGCGGAAGGTGGTTACCTACTCAAGCGACTGCTGGAAAGCACTGGATTCCTCTTTAGGCGACTTGTCTCGTCAGTATGTTCGCTCACCGTGATTGCACGGCAGGGGGAAGATAAACTCCTCGCGACTGTGTGGCGTCCGGGCAGGCTCGACCCCGGAGTGCACTCCGTGTTTTATCCAGCAGGGCAGGTGAGGCGGCGGGCTGATACAGGGAAACCTCAGTGACTACCGATTATAGGGGTCCTACCAGTACCCCTACCTGCGCGCCGGGCGCCCTGTGCAGTCTCCATAGGCTTGCGCAGACACTTAGCCTGCCCTTATCCTCAGTCATCGTTATGGCCCGTCTATTATCCTCGTATTACTTAGCCAGCTTCGTTCATCGACTTAATGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0004   ">
+				<sample name="00_0004   ">
+					<datablock type="DNA">
+						ACGCCCAATGGATGTCGTAACCCCAGGAGCAGTATCCTGCAGTTGTAGCACGCGGCCTCTGGCAAGCGTATTGGTTTTTAATAGATCCCCTGAACAACGCGGAAGGTGGTTACCTACTCAAGCGACTGCTGGAAAGCACTGGATTCCTCTTTAGGCGACTTGTCTCGTCAGTATGTTCGCTCACCGTGATTGCACGGCAGGGGGAAGATAAACTCCTCGCGACTATGTGGCGTCCGGGCAGGCTCGACCCCGGAGTGCACTCCGTGTTTTATCCAGCAGGGCAGGTGAGGCGGCGGGCTGATACAGGGAAACCTCAGTGACTACCGATTATAGGGGTCCTACCAGTACCCCTACCTGCGCGCCGGGCGCCCTGTGCAGTCTCCATAGGCTTGCGCAGGCACTTAGCCTGCCCTTATCCTCAGTCATCGTTATGGCCCGTCTATTATCCTCGTATTACTTAGCCAGCTTCGTTCATCGACTTAATGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0010   ">
+				<sample name="00_0010   ">
+					<datablock type="DNA">
+						ACGCCCAATGGATGTCGTAACCCCAGGAGCAGTATCCTGCAGTTGTAGCACGCGGCCTCTGGCAAGCGTATTGGTTTTTAATAGATCCCCTGAACAACGCGGAAGGTGGTTACCTACTCAAGCGACTGCTGGAAAGCACTGGATTCCTCTTTAGGCGACTTGTCTCGTCAGTATGTTCGCTCACCGTGATTGCACGGCAGGGGGAAGATAAACTCCTCGCGACTATGTGGCGTCCGGGCAGGCTCGACCCCGGAGTGCACTCCGTGTTTTATCCAGCAGGGCAGGTGAGGCGGCGGGCTGATACAGGGAAACCTCAGTGACTACCGATTATAGGGGTCCTACCAGTACCCCTACCTGCGCGCCGGGCGCCCTGTGCAGTCTCCATAGGCTTGCGCAGGCACTTAGCCTGCCCTTATCCTCAGTCATCGTTATGGCCCGTCTATTATCCTCGTATTACTTAGCCAGCTTCGTTCATCGACTTAATGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0018   ">
+				<sample name="00_0018   ">
+					<datablock type="DNA">
+						ACGCCCAATGGATGTCGTAACCCCAGGAGCAGTATCCTGCAGTTGTAGCACGCGGCCTCTGGCAAGTGTATTGGTTTTTAACAGATCCCCTGAACAACGCGGAAGGTGGTTACCTACTCAAGCGACTGCTGGAAAGCAGTGGATTCCTCTTTAGGCAACTTGTCTCGTCAGTATGCTCGCTCACCGTGATTGCACGCCAGGGGGAAGACAAACTCCTCGCGACTGTGTGGCGTCCGGGCAGGCTCGACCCCGGAGTGCACTCCGTGTTTTATCCAGCAGGGCAGGTGAAGCGGCGGGCTGATACAGGGAAACCTCAGTGACTACCGATTCTAGGGGTCCTACCAGTACCCCTACCTGCGCGCCGGGCGCCCTGTGCAGTCTCCATAGGCATGCGCAGACACTTAGTCTGCTCTTATCCGCAGTTATCGTTATGGCCCGTCTATTATCTTCGTATTACTTAGCCAGCTTCGTTTTTCGACTTGATGT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0001   ">
+				<sample name="00_0001   ">
+					<datablock type="DNA">
+						ACGCCCAATGGATGTCGTAACCCCAGGAGCAGTATCCTGCAGTTGTAGCACGCGGCCTCTGGCAAGTGTATTGGTTTTTAACAGATCCCCTGAACAACGCGGAAGGTGGTTACCTACTCAAGCGACTGCTGGAAAGCAGTGGATTCCTCTTTAGGCAACTTGTCTCGTCAGTATGCTCGCTCACCGTGATTGCACGCCAGGGGGAAGACAAACTCCTCGCGACTGTGTGGCGTCCGGGCAGGCTCGACCCCGGAGTGCACTCCGTGTTTTATCCAGCAGGGCAGGTGAAGCGGCGGGCTGATACAGGGAAACCTCAGCGACTACCGATTCTAGGGGTCCTACCAGTACCCCTACCTGCGCGCCGGGCGCCCTGTGCAGTCTCCATAGGCATGCGCAGACACTTAGTCTGCTCTTATCCGCAGTTATCGTTATGGCCCGTCTATTATCTTCGTATTACTTAGCCAGCTTCGTTTTTCGACTTAATGT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0009   ">
+				<sample name="00_0009   ">
+					<datablock type="DNA">
+						ACGCCCAATGGATGTCGTAACCCCAGGAGCAGTATCCTGCAGTTGTAGCACGCGGCCTCTGGCAAGTGTATTGGTTTTTAACAGATCCCCTGAACAACGCGGAAGGTGGTTACCTACTCAAGCGACTGCTGGAAAGCAGTGGATTCCTCTTTAGGCAACTTGTCTCGTCAGTATGCTCGCTCACCGTGATTGCACGCCAGGGGGAAGACAAACTCCTCGCGACTGTGTGGCGTCCGGGCAGGCTCGACCCCGGAGTGCACTCCGTGTTTTATCCAGCAGGGCAGGTGAAGCGGCGGGCTGATACAGGGAAACCTCAGTGACTACCGATTCTAGGGGTCCTACCAGTACCCCTACCTGCGCGCCGGGCGCCCTGTGCAGTCTCCATAGGCATGCGCAGACACTTAGTCTGCTCTTATCCGCAGTTATCGTTATGGCCCGTCTATTATCTTCGTATTACTTAGCCAGCTTCGTTTTTCGACTTAATGT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0005   ">
+				<sample name="00_0005   ">
+					<datablock type="DNA">
+						ACGCCCAATGGATGTCGTAACCCCAGGAGCAGTATCCTGCAGTTGTAGCACGCGGCCTCTGGCAAGTGTATTGGTTTTTAACAGATCCCCTGAACAACGCGGAAGGTGGTTACCTACTTAAGCGACTGCTGGAAAGCAGTGGATTCCTCTTTAGGCAACTTGTCTCGTCAGTATGCTCGCTCACCGTGATTGCACGCCAGGGGGAAGACAAACTCCTCGCGACTGTGTGGCGTCCGGGCAGGCTCGACCCCGGAGTGCACTCCGTGTTTTATCCAGCAGGGCAGGTGAAACGGCGGGCTGATACAGGGAAACCTCAGCGACTACCGATTCTAGGGGTCCTACCAGTACCCCTACCTGCGCGCCGGGCGCCCTGTGCAGTCTCCATAGGCATGCGCAGACACTTAGTCTGCTCTTATCCGCAGTTATCGTTATGGCCCGTCTATTATCTTCGTATTACTTAGCCAGCTTCGTTTTTCGACTTAATGT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0002   ">
+				<sample name="00_0002   ">
+					<datablock type="DNA">
+						ACGCCCAATGGATGTCGTAACCCCAGGAGCAGTATCCTGCAGTTGTAGCACGCGGCCTCTGGCAAGTGTATTGGTTTTTAACAGATCCCCTGAACAACGCGGAAGGTGGTTACCTACTTAAGCGACTGCTGGAAAGCAGTGGATTCCTCTTTAGGCAACTTGTCTCGTCAGTATGCTCGCTCACCGTGATTGCACGCCAGGGGGAAGACAAACTCCTCGCGACTGTGTGGCGTCCGGGCAGGCTCGACCCCGGAGTGCACTCCGTGTTTTATCCAGCAGGGCAGGTGAAGCGGCGGGCTGATACAGGGAAACCTCAGCGACTACCGATTCTAGGGGTCCTACCAGTACCCCTACCTGCGCGCCGGGCGCCCTGTGCAGTCTCCATAGGCATGCGCAGACACTTAGTCTGCTCTTATCCGCAGTTATCGTTATGGCCCGTCTATTATCTTCGTATTACTTAGCCAGCTTCGTTTTTCGACTTAATGT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0012   ">
+				<sample name="00_0012   ">
+					<datablock type="DNA">
+						ACGCCCAATGGATGTCGTAACCCCAGGAGCAGTATCCTGCAGTTGTAGCACGCGGCCTCTGGCAAGTGTATTGGTTTTTAACAGATCCCCTGAACAACGCGGAAGGTGGTTACCTACTTAAGCGACTGCTGGAAAGCAGTGGATTCCTCTTTAGGCAACTTGTCTCGTCAGTATGCTCGCTCACCGTGATTGCACGCCAGGGGGAAGACAAACTCCTCGCGACTGTGTGGCGTCCGGGCAGGCTCGACCCCGGAGTGCACTCCGTGTTTTATCCAGCAGGGCAGGTGAAGCGGCGGGCTGATACAGGGAAACCTCAGCGACTACCGATTCTAGGGGTCCTACCAGTACCCCTACCTGCGCGCCGGGCGCCCTGTGCAGTCTCCATAGGCATGCGCAGACACTTAGTCTGCTCTTATCCGCAGTTATCGTTATGGCCCGTCTATTATCTTCGTATTACTTAGCCAGCTTCGTTTTTCGACTTAATGT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0014   ">
+				<sample name="00_0014   ">
+					<datablock type="DNA">
+						ACGCCCAATGGATGTCGTAACCCCAGGAGCAGTATCCTGCAGTTGTAGCACGCGGCCTCTGGCAAGTGTATTGGTTTTTAACAGATCCCCTGAACAACGCGGAAGGTGGTTACCTACTTAAGCGACTGCTGGAAAGCAGTGGATTCCTCTTTAGGCAACTTGTCTCGTCAGTATGCTCGCTCACCGTGATTGCACGCCAGGGGGAAGACAAACTCCTCGCGACTGTGTGGCGTCCGGGCAGGCTCGACCCCGGAGTGCACTCCGTGTTTTATCCAGCAGGGCAGGTGAAGCGGCGGGCTGATACAGGGAAACCTCAGCGACTACCGGTTCTAGGGGTCCTACCAGTACCCCTACCTGCGCGCCGGGCGCCCTGTGCAGTCTCCATAGGCATGCGCAGACACTTAGTCTGCTCTTATCCGCAGTTATCGTTATGGCCCGTCTATTATCTTCGTATTACTTAGCCAGCTTCGTTTTTCGACTTAATGT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0006   ">
+				<sample name="00_0006   ">
+					<datablock type="DNA">
+						ACGCCCAATGGATGTCGTAACCCCAGGAGCAGTATCCTGCAGTTGTAGCACGCGGCCTCTGGCAAGTGTATTGGTTTTTAACAGATCCCCTGAACAACGCGGAAGGTGATTACCTACTCAAGCGACTGCTGGAAAGCAGTGGATTCCTCTTTAGGCAACTTGTCTCGTCAGTATGCTCGCTCACCGTGATTGCACGCCAGGGGGAAGACAAACTCCTCGCGACTGTGTGGCGTCCGGGCAGGCTCGACCCCGGAGTGCACTCCGTGTTTTATCCAGCAGGGCAGGTGAAGCGGCGGGCTGATACAGGGAAACCTCAGTGACTACCGATTCTAGGGGTCCTACCAGTACCCCTACCTGCGCGCCGGGCGCCCTGTGCAGTCTCCATAGGCATGCGCAGACACTTAGTCTGCTCTTATCCGCAGTTATCGTTATGGCCCGTCTATTATCTTCGTATTACTTAGCCAGCTTCGTTTTTCGACTTAATGT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0019   ">
+				<sample name="00_0019   ">
+					<datablock type="DNA">
+						ACGCCCAATGGATGTCGTAACCCCAGGAGCAGTATCCTGCAGTTGTAGCACGCGGCCTCTGGCAAGTGTATTGGTTTTTAACAGATCCCCTGAACAACGCGGAAGGTGGTTACCTACTCAAGCGACTGCTGGAAAGCAGTGGATTCCTCTTTAGGCAACTTGTCTCGTCAGTATGCTCGCTCACCGTGATTGCACGCCAGGGGGAAGACAAACTCCTCGCGACTGTGTGGCGTCCGGGCAGGCTCGACCCCGGAGTGCACTCCGTGTTTTATCCAGCAGGGCAGGTGAAGCGGCGGGCTGATACAGGGAAACCTCAGTGACTACCGATTCTAGGGGTCCTACCAGTACCCCTACCTGCGCGCCGGGCGCCCTGTGCAGTCTCCATAGGCATGCGCAGACACTTAGTCTGCTCTTATCCGCAGTTATCGTTATGGCCCGTCTATTATCTTCGTATTACTTAGCCAGCTTCGTTTTTCGACTTAATGT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0008   ">
+				<sample name="00_0008   ">
+					<datablock type="DNA">
+						ACGCCCAATGGATGTCGTAACCCCAGGAGCAGTATCCTGCAGTTGTAGCACGCGGCCTCTGGCAAGTGTATTGGTTTTTAACAGATCCCCTGAACAACGCGGAAGGTGGTTACCTACTCAAGCGACTGCTGGAAAGCAGTGGATTCCTCTTTAGGCAACTTGTCTCGTCAGTATGCTCGCTCACCGTGATTGCACGCCAGGGGGAAGACAAACTCCTCGCGACTGTGTGGCGTCCGGGCAGGCTCGACCCCGGAGTGCACTCCGTGTTTTATCCAGCAGGGCAGGTGAAGCGGCGGGCTGATACAGGGAAACCTCAGTGACTACCGATTCTAGGGGTCCTACCAGTACCCCTACCTGCGCGCCGGGCGCCCTGTGCAGTCTCCATAGGCATGCGCAGACACTTAGTCTGCTCTTATCCGCAGTTATCGTTATGGCCCGTCTATTATCTTCGTATTACTTAGCCAGCTTCGTTTTTCGACTTAATGT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0017   ">
+				<sample name="00_0017   ">
+					<datablock type="DNA">
+						ACGCCCAATGGATGTCGTAACCCCAGGAGCAGTATCCTGCAGTTGTAGCACGCGGCCTCTGGCAAGTGTATTGGTTTTTAACAGATCCCCTGAACAACGCGGAAGGTGGTTACCTACTCAAGCGACTGCTGGAAAGCAGTGGATTCCTCTTTAGGCAACTTGTCTCGTCAGTATGCTCGCTCACCGTGATTGCACGCCAGGGGGAAGACAAACTCCTCGCGACTGTGTGGCGTCCGGGCAGGCTCGACCCCGGAGTGCACTCCGTGTTTTATCCAGCAGGGCAGGTGAAGCGGCGGGCTGATACAGGGAAACCTCAGTGACTACCGATTCTAGGGGTCCTACCAGTACCCCTACCTGCGCGCCGGGCGCCCTGTGCAGTCTCCATAGGCATGCGCAGACACTTAGTCTGCTCTTATCCGCAGTTATCGTTATGGCCCGTCTATTATCTTCGTATTACTTAGCCAGCTTCGTTTTTCGACTTAATGT [...]
+					</datablock>
+				</sample>
+			</individual>
+		</population>
+	</region>
+	<region name="LinkageGroup07">
+      <model name="F84">
+        <normalize>false</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <relative-murate>1</relative-murate>
+        <base-freqs> calculated </base-freqs>
+        <ttratio>2</ttratio>
+      </model>
+      <effective-popsize>1</effective-popsize>
+		<population name="PopAlphaIs25">
+			<individual name="00_0001   ">
+				<sample name="00_0001   ">
+					<datablock type="DNA">
+						CAGCGTCCGCATTGAGCCTCAAAGGGGCTGTTGGGTGACGTCGTATGACGGTCGCCTGTCACATGGTTGTTCACTGTGGAAAGATAGGAACTTACACGACCGTGAGGAGGCTTTGAGTTTCGTCCCAGGGGGATGGCTTGATCTGTTGGGGGGTAGAATCAACTGACTGAGCAGCGGCGTCCTCGGGGAATGTGGGACAGAGTCTTGAGACGCTGCCAGAACGCCAGTGAGCGGGAAGTCCTGTAATACGGTCAGGCTCCAGGGGCCATTGCCCCAGTAGGCGATCACCTTGTAATAAGTCCCGCCTACCGGAGGGACTTTAATGGGTCGTATTCGTCATTATGCCGAGGGACCAATGGGTACCCACGGACAACCACTGTAGGGAAAGTGCACCATTCACTTTATCTAGCAAGAGAAGTCTCCATATCTTAGCGCATGACTCGTCGACACGTGTCGTATTTTTATCGACCCGAATATGGACTTACG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0013   ">
+				<sample name="00_0013   ">
+					<datablock type="DNA">
+						CAGCGTCCGCATTGAGCCTCAAAGGGGCTGTTGGGTGACGTCGTATGACGGTCGCCTGTCACATGGTTGTTCACTGTGGAAAGATAGGAACTTACACGACCGTGAGGAGGCTTTGAGTTTCGTCCTAGGGGGATGGCTTGATCTGTTGGGGGGTAGAATCAACTGACTGAGCAGCGGCGTCCTCGGGGCATGTGGGACAGAGTCTTGAGACGCTGCCAGAACGCCAGTGAGCGGGAAGTCCTGTAATACGGTCAGGCTCCAGGGGCCATTGCCCCAGTAGGCGATCACCTTGTAATAAGTCCCGCCTACCGGAGGGACTTTAATGGGTCGTATTCGTCATTATGCCGAGGGACCAATGGGTACCCACGGACAACCACTGTAGGGAAAGTGCACCATTCACTTTATCTAGCAAGAGAAGTCTCCATATCTTAGCGCATGACTCGTCGACACGTGTCGTATTTTTATCGACCCGAATATGGACTTACG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0006   ">
+				<sample name="00_0006   ">
+					<datablock type="DNA">
+						CAGCGTCCGCATTGAACCTCAAAGGGGCTGTTGGGTGACGTCGTATGACGGTCGCCTGTCACATGGTTGTTCACTGTGGAAAGATAGGAACTTACACGACCGTGAGGAGGCTTTGAGTTTCGTCCTAGGGGGATGGCTTGATCTGTTGGGGGGTAGAATCAACTGACTGAGCAGCGGCGTCCTCGGGGAATGTGGGACAGAGTCTTGAGACGCTGCCAGAACGCCAGTGAGCGGGAAGTCCTGTAATACGGTCAGGCTCCAGGGGCCATTGCCCCAGTAGGCGATCACCTTGTAATAAGTCCCGCCTACCGGAGGGACTTTAATGGGTCGTATTCGTCATTATGCCGAGGGAACAATGGGTACCCACGGACAACCACTGTAGGGAAAGTGCACCATTCACTTTATCTAGCAAGAGAAGTCTCCATATCTTAGCGCATGACTCGTCGACACGTGTCGTATTTTTATCGACCCGAATATGGACTTACG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0008   ">
+				<sample name="00_0008   ">
+					<datablock type="DNA">
+						CAGCGTCCGCATTGAGCCTCAAAGGGGCTGTTGGGTGACGTCGTATGACGGTCGCCTGTCACATGGTTGTTCACTGTGGAAAGATAGGAACTTACACGACCGTGAGGAGGCTTTGAGTTTCGTCCTAGGGGGATTGCTTGATCTGTTGGGGGGTAGAATCAACTGACTGAGCAGCGGCGTCCTCGGGGAATGTGGGACAGAGTCTTGAGACGCTGCCAGAACGCCAGTGAGCGGGAAGTCCTGTAATACGGTCAGGCTCCAGGGGCCATTGCCCCAGTAGGCGATCACCTTGTAATAAGTCCCGCCTACCGGAGGGACTTTAATGGGTCGTATTCGTCATTATGCCGAGGGACCAATGGGTACCCACGGACAACCACTGTAGGGAAAGTGCACCATTCACTTTATCTAGCAAGAGAAGTCTCCATATCTTAGCGCATGACTCGTCGACACGTGTCGTATTTTTATCGACCCGGATATGGACTTACG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0002   ">
+				<sample name="00_0002   ">
+					<datablock type="DNA">
+						CAGCGTCCGCATTGAGCCTCAAAGGGGCTGTTGGGTGACGTCGTATGACGGTCGCCTGTCACATGGTTGTTCACTGTGGAAAGATAGGAACTTACACGACCGTGAGGAGGCTTTGAGTTTCGTCCTAGGGGGATGGCTTGATCTGTTGGGGGGTAGAATCAACTGACTGAGCAGCGGCGTCCTCGGGGAATGTGGGACAGAGTCTTGAGACGCTGCCAGAACGCCAGTGAGCGGGAAGTCCTGTAATACGGTCAGGCTCCAGGGGCCATTGCCCCAGTAGGCGATCACCTTGTAATAAGTCCCGCCTACCGGAGGGACTTTAATGGGTCGTATTCGTCATTATGCCGAGGGACCAATGGGTACCCACGGACAACCACTGTAGGGAAAGTGCACCATTCACTTTATCTAGCAAGAGAAGTCTCCATATCTTAGCGCATGACTCGTCGACACGTGTCGTATTTTTATCGACCCGAATATGGACTTACG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0010   ">
+				<sample name="00_0010   ">
+					<datablock type="DNA">
+						CAGCGTCCGCATTGAGCCTCAAAGGGGCTGTTGGGTGACGTCGTATGACGGTCGCCTGTCACATGGTTGTTCACTGTGGAAAGATAGGAACTTACACGACCGTGAGGAGGCTTTGAGTTTCGTCCTAGGGGGATGGCTTGATCTGTTGGGGGGTAGAATCAACTGACTGAGCAGCGGCGTCCTCGGGGAATGTGGGACAGAGTCTTGAGACGCTGCCAGAACGCCAGTGAGCGGGAAGTCCTGTAATACGGTCAGGCTCCAGGGGCCATTGCCCCAGTAGGCGATCACCTTGTAATAAGTCCCGCCTACCGGAGGGACTTTAATGGGTCGTATTCGTCATTATGCCGAGGGACCAATGGGTACCCACGGACAACCACTGTAGGGAAAGTGCACCATTCACTTTATCTAGCAAGAGAAGTCTCCATATCTTAGCGCATGACTCGTCGACACGTGTCGTATTTTTATCGACCCGAATATGGACTTACG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0018   ">
+				<sample name="00_0018   ">
+					<datablock type="DNA">
+						CAGCGTCCGCATTGAGCCTCAAAGGGGCTGTTGGGTGACGTCGTATGACGGTCGCCTGTCACATGGTTGTTCACTGTGGAAAGATAGGAACTTACACGACCGTGAGGAGGCTTTGAGTTTCGTCCTAGGGGGATGGCTTGATCTGTTGGGGGGTAGAATCAACTGACTGAGCAGCGGCGTCCTCGGGGAATGTGGGACAGAGTCTTGAGACGCTGCCAGAACGCCAGTGAGCGGGAAGTCCTGTAATACGGTCAGGCTCCAGGGGCCATTGCCCCAGTAGGCGATCACCTTGTAATAAGTCCCGCCTACCGGAGGGACTTTAATGGGTCGTATTCGTCATTATGCCGAGGGACCAATGGGTACCCACGGACAACCACTGTAGGGAAAGTGCACCATTCACTTTATCTAGCAAGAGAAGTCTCCATATCTTAGCGCATGACTCGTCGACACGTGTCGTATTTTTATCGACCCGAATATGGACTTACG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0019   ">
+				<sample name="00_0019   ">
+					<datablock type="DNA">
+						CAGCGTCCGCATTGAGCCTCAAAGGGGCTGTTGGGTGACGTCGTATGACGGTCGCCTGTCACATGGTTGTTCACTGTGGAAAGATAGGAACTTACACGACCGTGAGGAGGCTTTGAGTTTCGTCCTAGGGGGATGGCTTGATCTGTTGGGGGGTAGAATCAACTGACTGAGCAGCGGCGTCCTCGGGGAATGTGGGACAGAGTCTTGAGACGCTGCCAGAACGCCAGTGAGCGGGAAGTCCTGTAATACGGTCAGGCTCCAGGGGCCATTGCCCCAGTAGGCGATCACCTTGTAATAAGTCCCGCCTACCGGAGGGACTTTAATGGGTCGTATTCGTCATTATGCCGAGGGACCAATGGGTACCCACGGACAACCACTGTAGGGAAAGTGCACCATTCACTTTATCTAGCAAGAGAAGTCTCCATATCTTAGCGCATGACTCGTCGACACGTGTCGTATTTTTATCGACCCGAATATGGACTTACG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0016   ">
+				<sample name="00_0016   ">
+					<datablock type="DNA">
+						CAGCGTCCGCATTGAGCCTCAAAGGGGCTGTTGGGTGACGTCGTATGACGGTCGCCTGTCACATGGTTGTTCACTGTGGAAAGATAGGAACTTACACGACCGTGAGGAGGCTTTGAGTTTCGTCCTAGGGGGATGGCTTGATCTGTTGGGGGGTAGAATCAACTGACTGAGCAGCGGCGTCCTCGGGGAATGTGGGACAGAGTCTTGAGACGCTGCCAGAACGCCAGTGAGCGGGAAGTCCTGTAATACGGTCAGGCTCCAGGGGCCATTGCCCCAGTAGGCGATCACCTTGTAATAAGTCCCGCCTACCGGAGGGACTTTAAAGGGTCGTATTCGTCATTATGCCGAGGGACCAATGGGTACCCACGGACAACCACTGTAGGGAAAGTGCACCATTCACTTTATCTAGCAAGAGAAGTCTCCATATCTTAGCGCATGACTCGTCGACACGTGTCGTATTTTTATCGACCCGAATATGGACTTACG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0007   ">
+				<sample name="00_0007   ">
+					<datablock type="DNA">
+						CAGCGTCCGCATTGAGCCTCAAAGGGGCTGTTGGGTGACGTCGTATGACGGTCGCCTGTCACATGGTTGTTCACTGTGGAAAGATAGGAACTTACACGACCGTGAGGAGGCTTTGAGTTTCGTCCTAGGGGGATGGCTTGATCTGTTGGGGGGTAGAATCAACTGACTGAGCAGCGGCGTCCTCGGGGAATGTGGGACAGAGTCTTGAGACGCTGCCAGAACGCCAGTGAGCGGGAAGTCCTGTAATACGGTCAGGCTCCAGGGGCCATTGCCCCAGTAGGCGATCACCTTGTAATAAGTCCCGCCTACCGGAGGGACTTTAAAGGGTCGTATTCGTCATTATGCCGAGGGACCAATGGGTACCCACGGACAACCACTGTAGGGAAAGTGCACCATTCACTTTATCTAGCAAGAGAAGTCTCCATATCTTAGCGCATGACTCGTCGACACGTGTCGTATTTTTATCGACCCGAATATGGACTTACG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0000   ">
+				<sample name="00_0000   ">
+					<datablock type="DNA">
+						CAGCGTCCGCATTGAGCCTCAAAGGGGCTGTTGGGTGACGTCGTATGACGGTCGCCTGTCACATGGTTGTTCACTGTGGAAAGATAGGAACTTACACGACCGTGAGGAGGCTTTGAGTTTCGTCCTAGGGGGATGGCTTGATCTGTTGGGGGGTAGAATCAACTGACTGAGCAGCGGCGTCCTCGGGGAATGTGGGACAGAGTCTTGAGACGCTGCCAGAACGCCAGTGAGCGGGAAGTCCTGTAATACGGTCAGGCTCCAGGGGCCATTGCCCCAGTAGGCGATCACCTTGTAATAAGTCCCGCCTACCGGAGGGACTTTAAAGGGTCGTATTCGTCATTATGCCGAGGGACCAATGGGTACCCACGGACAACCACTGTAGGGAAAGTGCACCATTCACTTTATCTAGCAAGAGAAGTCTCCATATCTTAGCGCATGACTCGTCGACACGTGTCGTATTTTTATCGACCCGAATATGGACTTACG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0004   ">
+				<sample name="00_0004   ">
+					<datablock type="DNA">
+						CAGCGTCCGCATTGAGCCTCAAAGGGGCTGTTGGGTGACGTCGTATGACGGTCGCCTGTCACATGGTTGTTCACTGTGGAAAGATAGGAACTTACACGACCGTGAGGAGGCTTTGAGTTTCGTCCTAGGGGGATGGCTTGATCTGTTGGGGGGTAGAATCAACTGACTGAGCAGCGGCGTCCTCGGGGAATGTGGGACAGAGTCTTGAGACGCTGCCAGAACGCCAGTGAGCGGGAAGTCCTGTAATACGGTCAGGCTCCAGGGGCCATTGCCCCAGTAGGCGATCACCTTGTAATAAGTCCCGCCTACCGGAGGGACTTTAAAGGGTCGTATTCGTCATTATGCCGAGGGACCAATGGGTACCCACGGACAACCACTGTAGGGAAAGTGCACCATTCACTTTATCTAGCAAGAGAAGTCTCCATATCTTAGCGCATGACTCGTCGACACGTGTCGTATTTTTATCGACCCGAATATGGACTTACG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0003   ">
+				<sample name="00_0003   ">
+					<datablock type="DNA">
+						CAGCGTCCGCATTGAGCCTCAAAGGGGCTGTTGGGTAACGTCGTATGACGGTCGCCTGTCACATGGTTGTTTACTGTGGAAAGATAGGAACTTACACGACCGTGAGGAGGCTTTGAGTTTCGTCCTAGGGGGATGGCTTGATCTGTTGGGGGGTAGAATCAACTGACTGAGCAGCGGCGTCCTCGGGGAATGTGGGACAGAGTCTTGAGACGCTGCCAGAACGCCAGTGAGCGGGAAGTCCTGTAATACGGTCAGGCTCCAGGGGCCATTGCCCCAGTAGGCGATCACCTTGTAATAAGTCCCGCCTACCGGAGGGACTTTAATGGGTCGTATTCGTCATTATGCCGAGGGACCAATGGGTACCCACGGACAACCACTGTAGGGAAAGTGCACCATTCACTTTATCTAGCAAGAGAAGTCTCCATATCTTAGCGCATGACTCGTCGACACGTGTCGTATTTTTATCGACCCGAATATGGACTTACG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0005   ">
+				<sample name="00_0005   ">
+					<datablock type="DNA">
+						CAGCGTCCGCATTGAGCCTCAAAGGGGCTGTTGGGTGACGTCGTATGACGGTCGCCTGTCACATGGTTGTTTACTGTGGAAAGATAGGAACTTACACGACCGTGAGGAGGCTTTGAGTTTCGTCCTAGGGGGATGGCTTGATCTGTTGGGGGGTAGAATCAACTGACTGAGCAGCGGCGTCCTCGGGGAATGTGGGACAGAGTCTTGAGACGCTGCCAGAACGCCAGTGAGCGGGAAGTCCTGTAATACGGTCAGGCTCCAGGGGCCATTGCCCCAGTAGGCGATCACCTTGTAATAAGTCCCGCCTACCGGAGGGACTTTAATGGGTCGTATTCGTCATTATGCCGAGGGACCAATGGGTACCCACGGACAACCACTGTAGGGAAAGTGCACCATTCACTTTATCTAGCAAGAGAAGTCTCCATATCTTAGCGCATGACTCGTCGACACGTGTCGTATTTTTATCGACCCGAATATGGACTTACG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0014   ">
+				<sample name="00_0014   ">
+					<datablock type="DNA">
+						CAGCGTCCGCATTGAGCCTCAAAGGGGCTGTTGGGTGACGTCGTATGACGGTCGCCTGTCACATGGTTGTTTACTGTGGAAAGATAGGAACTTACACGACCGTGAGGAGGCTTTGAGTTTCGTCCTAGGGGGATGGCTTGATCTGTTGGGGGGTAGAATCAACTGACTGAGCAGCGGCGTCCTCGGGGAATGTGGGACAGAGTCTTGAGACGCTGCCAGAACGCCAGTGAGCGGGAAGTCCTGTAATACGGTCAGGCTCCAGGGGCCATTGCCCCAGTAGGCGATCACCTTGTAATAAGTCCCGCCTACCGGAGGGACTTTAATGGGTCGTATTCGTCATTATGCCGAGGGACCAATGGGTACCCACGGACAACCACTGTAGGGAAAGTGCACCATTCACTTTATCTAGCAAGAGAAGTCTCCATATCTTAGCGCATGACTCGTCGACACGTGTCGTATTTTTATCGACCCGAATATGGACTTACG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0009   ">
+				<sample name="00_0009   ">
+					<datablock type="DNA">
+						CAGCGTCCGCATTGAGCCTCAAAGGGGCTGTTGGGTGACGTCGTATGACGGTCGCCTGTCACATGGTTGTTCACTGTGGAAAGATAGGAACTTACACGACCGTGAGGAGGCTTTGAGTTTCGTCCTAGGGGGATGGCTTGATCTGTTGGGGGGTAGAATCAACTGACTGAGCAGCGGCGTCCTCGGGGAATGTGGGACAGAGTCTTGAGACGCTGCCAGAACGCCAGTGAGCGGGAAGTCCTGTAATACGGTCAGGCTCCAGGGGCCATTGCCCCAGTAGGCGATCACCTTGTAATAAGTCCCGCCTACCGGAGGGACTTTAATGGGTCGTATTCGTCATTATGCCGAGGGACCAATGGGTACCCACGGACAACCACTGTAGGGAAAGTGCACCATTCACTTTATCTAGCAAGAGAAGTCTCCATATCTTAGCGCATGACTCGTCGACACGTGTCGTATTTTTATCGACCCGAATATGGACTTACG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0011   ">
+				<sample name="00_0011   ">
+					<datablock type="DNA">
+						CAGCGTCCGCATTGAGCCTCAAAGGGGCTGTTGGGTGACGTCGTATGACGGTCGCCTGTCACATGGTTGTTCACTGTGGAAAGATAGGAACTTACACGACCGTGAGGAGGCTTTGAGTTTCGTCCTAGGGGGATGGCTTGATCTGTTGGGGGGTAGAATCAACTGACTGAGCAGCGGCGTCCTCGGGGAATGTGGGACAGAGTCTTGAGACGCTGCCAGAACGCCAGTGAGCGGGAAGTCCTGTAATACGGTCAGGCTCCAGGGGCCATTGCCCCAGTAGGCGATCACCTTGTAATAAGTCCCGCCTACCGGAGGGACTTTAATGGGTCGTATTCGTCATTATGCCGAGGGACCAATGGGTACCCACGGACAACCACTGTAGGGAAAGTGCACCATTCACTTTATCTAGCAAGAGAAGTCTCCATATCTTAGCGCATGACTCGTCGACACGTGTCGTATTTTTATCGACCCGAATATGGACTTACG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0012   ">
+				<sample name="00_0012   ">
+					<datablock type="DNA">
+						CAGCGTCCGCATTGAGCCTCAAAGGGGCTGTTGGGTGACGTCGTATGACGGTCGCCTGTCACATGGTTGTTCACTGTGGAAAGATAGGAACTTACACGACCGTGAGGAGGCTTTGAGTTTCGTCCTAGGGGGATGGCTTGATCTGTTGGGGGGTAGAATCAACTGACTGAGCAGCGGCGTCCTCGGGGAATGTGGGACAGAGTCTTGAGACGCTGCCAGAACGCCAGTGAGCGGGAAGTCCTGTAATACGGTCAGGCTCCAGGGGCCATTGCCCCAGTAGGCGATCACCTTGTAATAAGTCCCGCCTACCGGAGGGACTTTAATGGGTCGTATTCGTCATTATGCCGAGGGACCAATGGGTACCCACGGACAACCACTGTAGGGAAAGTGCACCATTCACTTTATCTAGCAAGAGAAGTCTCCATATCTTAGCGCATGACTCGTCGACACGTGTCGTATTTTTATCGACCCGAATATGGACTTACG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0015   ">
+				<sample name="00_0015   ">
+					<datablock type="DNA">
+						CAGCGTCCGCATTGAGCCTCAAAGGGGCTGTTGGGTGACGTCGTATGACGGTCGCCTGTCACATGGTTGTTCACTGTGGAAAGATAGGAACTTACACGACCGTGAGGAGGCTTTGAGTTTCGTCCTAGGGGGATGGCTTGATCTGTTGGGGGGTAGAATCAACTGACTGAGCAGCGGCGTCCTCGGGGAATGTGGGACAGAGTCTTGAGACGCTGCCAGAACGCCAGTGAGCGGGAAGTCCTGTAATACGGTCAGGCTCCAGGGGCCATTGCCCCAGTAGGCGATCACCTTGTAATAAGTCCCGCCTACCGGAGGGACTTTAATGGGTCGTATTCGTCATTATGCCGAGGGACCAATGGGTACCCACGGACAACCACTGTAGGGAAAGTGCACCATTCACTTTATCTAGCAAGAGAAGTCTCCATATCTTAGCGCATGACTCGTCGACACGTGTCGTATTTTTATCGACCCGAATATGGACTTACG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0017   ">
+				<sample name="00_0017   ">
+					<datablock type="DNA">
+						CAGCGTCCGCATTGAGCCTCAAAGGGGCTGTTGGGTGACGTCGTATGACGGTCGCCTGTCACATGGTTGTTCACTGTGGAAAGATAGGAACTTACACGACCGTGAGGAGGCTTTGAGTTTCGTCCTAGGGGGATGGCTTGATCTGTTGGGGGGTAGAATCAACTGACTGAGCGGCGGCGTCCTCGGGGAATGTGGGACAGAGTCTTGAGACGCTGCCAGAACGCCAGTGAGCGGGAAGTCCTGTAATACGGTCAGGCTCCAGGGGCCATTGCCCCAGTAGGCGATCACCTTGTAATAAGTCCCGCCTACCGGAGGGACTTTAATGGGTCGTATTCGTCATTATGCCGAGGGACCAATGGGTACCCACGGACAACCACTGTAGGGAAAGTGCACCATTCACTTTATCTAGCAAGAGAAGTCTCCATATCTTAGCGCATGACTCGTCGACACGTGTCGTATTTTTATCGACCCGAATATGGACTTACG [...]
+					</datablock>
+				</sample>
+			</individual>
+		</population>
+	</region>
+	<region name="LinkageGroup08">
+      <model name="F84">
+        <normalize>false</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <relative-murate>1</relative-murate>
+        <base-freqs> calculated </base-freqs>
+        <ttratio>2</ttratio>
+      </model>
+      <effective-popsize>1</effective-popsize>
+		<population name="PopAlphaIs25">
+			<individual name="00_0003   ">
+				<sample name="00_0003   ">
+					<datablock type="DNA">
+						ATTGTCAAGAAACAATGTGAACACCGAGGGCTCAGCGTTTGACGTCCTGGGTCGATGCCGTACCGACTGAGTATAAAAAGCTTTCAAAATGCAAATATGATGGCAGGCGTATCTTAGAAATTGTTTGGAGATACGCAACCGTCTCTATAATACAAATAGTCTAGGAATGTCCACTAGTATGGCATATGGCACTTGAACTCTGGGATGCTATTTCCGTGCTCTTAGGTGGGATCGCGATAAGGTTAGCCGCAACGGTGGCCTACGATAGTATTACCGTGGCATGCACCCCTTGACGTTTAGGGCCAGGACGACAGGATTGACCTAGAGGGGACACTATCTGCCGCATGGTAGCGCACAAATCTATGGTCCTTAAGCCTGCGGAGCAGCAGCCCAGCCGCTTCGATGTCGAAGGCAAAGCCGCAAAAGTTATTCTGTAGATCCGTCTTGGAGCATCAGCTTGTGGGAATCTTCAAGGGCAGCCTCCGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0010   ">
+				<sample name="00_0010   ">
+					<datablock type="DNA">
+						ATTGTCAAGAAACAATGTGAACACCGAGGGCTCAGCGTTTGACGTCCTGGGTCGATGCCGTACCGACTGAGTATAAAAAGCTTTCAAAATGCAAATATGATGGCAGGCGTATCTTAGAAATTGTTTGGAGATACGCAACCGTCTCTATAATACAAATAGTCTAGGAATGTCCACTAGTATGGCATATGGCACTTGAACTCTGGGATGCTATTTCCGTGCTCTTAGGTGGGATCGCGATAAGGTTAGCCGCAACGGTGGCCTACGATAGTATTACCGTGGCATGCACCCCTTGACGTTTAGGGCCAGGACGACAGGATTGACCTAGAGGGGACACTATCTGCCGCATGGTAGCGCACAAATCTATGGTCCTTAAGCCTGCGGAGCAGCAGCCCAGCCGCTTCGATGTCGAAGGCAAAGCCGCAAAAGTTATTCTGTAGATCCGTCTTGGAGCATCAGCTTGTGGGAATCTTCAAGGGCAGCCTCCGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0008   ">
+				<sample name="00_0008   ">
+					<datablock type="DNA">
+						ATTGTCAAGAAACAATGTGAACACCGAGGGCTCAGCGTTTGACGTCCTGGGTCGATGCCGTACCGACTGAGTATAAAAAGCTTTCAAAATGCAAATATGATGGCAGGCGTATCTTAGAAATTGTTTGGAGATACGCAACCGTCTCTATAATACAAATAGTCTAGGAATGTCCACTAGTATGGCATATGGCACTTGAACTCTGGGATGCTATTTCCGTGCTCTTAGGTGGGATCGCGATAAGGTTAGCCGCAACGGTGGCCTACGATAGTATTACCGTGGCATGCACCCCTTGACGTTTAGGGCCAGGACGACAGGATTGACCTAGAGGGGACACTATCTGCCGCATGGTAGCGCACAAATCTATGGTCCTTAAGCCTGCGGAGCAGCAGCCCAGCCGCTTCGATGTCGAAGGCAAAGCCGCAAAAGTTATTCTGTAGATCCGTCTTGGAGCATCAGCTTGTGGGAATCTTCAAGGGCAGCCTCCGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0019   ">
+				<sample name="00_0019   ">
+					<datablock type="DNA">
+						ATTGTCAAGAAACAATGTGAACACCGAGGGCTCAGCGTTTGACGTCCTGGGTCGATGCCGTACCGACTGAGTATAAAAAGCTTTCAAAATGCAAATATGATGGCAGGCGTATCTTAGAAATTGTTTGGATATACGCAACCGTCTCTATAATACAAATAGTCTAGGAATGTCCACTAGTATGGCATATGGCACTTGAACTCTGGGATGCTATTTCCGTGCTCTTAGGTGGGATCGCGATAAGGTTAGCCGCAACGGTGGCCTACGATAGTATTACCGTGGCATGCACCCCTTGACGTTTAGGGCCAGGACGACAGGATTGACCTAGAGGGGACACTATCTGCCGCATGGTAGCGCACAAATCTATGGTCCTTAAGCCTGCGGAGCAGCAGCCCAGCCGCTTCGATGTCGAAGGCAAAGCCGCAAAAGTTATTCTGTAGATCCGTCTTGGAGCATCAGCTTGTGGGAATCTTCAAGGGCAGCCTCCGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0000   ">
+				<sample name="00_0000   ">
+					<datablock type="DNA">
+						GTTGTCAAGAAACAATGTGAACACCGAGGGCTCAGCGTTTGACGTCCTGGGTCGATGCCGTACCGACTGAGTATAAAAAGCTTTCAAAATGCAAATATGATGGCAGGCGTATCTTAGAAATTGTTTGGAGATACGCAACCGTCTCTATAATACAAATAGTCTAGGAATGTCCACTAGTATGGCATATGGCACTTGAACTCTGGGATGCTATTTCCGTGCTCTTAGGTGGGATCGCGATAAGGTTAGCCGCAACGGTGGCCTACGATAGTATTACCGTGGCATGCACCCCTTGACGTTTAGGGCCAGGACGACAGGATTGACCTAGAGGGGACACTATCTGCCGCATGGTAGCGCACAAATCTATGGTCCTTAAGCCTGCGGAGCAGCAGCCCAGCCGCTTCGATGTCGAAGGCAAAGCCGCAAAAGTTATTCTGTAGATCCGTCTTGGAGCATCAGCTTGTGGGAATCTTCAAGGGCAGCCTCCGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0001   ">
+				<sample name="00_0001   ">
+					<datablock type="DNA">
+						ATTGTCAAGAAACAATGTGAACACCGAGGGCTCAGCGTTTGACGTCCTGGGTCGATGCCGTACCGACTGAGTATAAAAAGCTTTCAAAATGCAAATATGATGGCAGGCGTATCTTAGAAATTGTTTGGAGATACGCAACCGTCTCTATAATACAAATAGTCTAGGAATGTCCACTAGTATGGCATATGGCACTTGAACTCTGGGATGCTATTTCCGTGCTCTTAGGTGGGATCGCGATAAGGTTAGCCGCAACGGTGGCCTACGATAGTATTACCGTGGCATGCACCCCTTGACGTTTAGGGCCAGGACGACAGGATTGACCTAGAGGGGACACTATCTGCCGCATGGTAGCGCACAAATCTATGGTCCTTAAGCCTGCGGAGCAGCAGCCCAGCCGCTTCGATGTCGAAGGCAAAGCCGCAAAAGTTATTCTGTAGATCCGTCTTGGAGCATCAGCTTGTGGGAATCTTCAAGGGCAGCCTCCGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0005   ">
+				<sample name="00_0005   ">
+					<datablock type="DNA">
+						ATTGTCAAGAAACAATGTGAACACCGAGGGCTCAGCGTTTGACGTCCTGGGTCGATGCCGTACCGACTGAGTATAAAAAGCTTTCAAAATGCAAATATGATGGCAGGCGTATCTTAGAAATTGTTTGGAGATACGCAACCGTCTCTATAATACAAATAGTCTAGGAATGTCCACTAGTATGGCATATGGCACTTGAACTCTGGGATGCTATTTCCGTGCTCTTAGGTGGGATCGCGATAAGGTTAGCCGCAACGGTGGCCTACGATAGTATTACCGTGGCATGCACCCCTTGACGTTTAGGGCCAGGACGACAGGATTGACCTAGAGGGGACACTATCTGCCGCATGGTAGCGCACAAATCTATGGTCCTTAAGCCTGCGGAGCAGCAGCCCAGCCGCTTCGATGTCGAAGGCAAAGCCGCAAAAGTTATTCTGTAGATCCGTCTTGGAGCATCAGCTTGTGGGAATCTTCAAGGGCAGCCTCCGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0002   ">
+				<sample name="00_0002   ">
+					<datablock type="DNA">
+						ATTGTCAAGAAACAATGTGAACACCGAGGGCTCAGCGTTTGACGTCCTGGGTCGATGCCGTACCGACTGAGTATAAAAAGCTTTCAAAATGCAAATATGATGGCAGGCGTATATTAGAAATTGTTTGGAGATACGCAACCGTCTCTATAATACAAATAATCTAGGAATGTCCACTAGTATGGCATATGGCACTTGAACCCTGGGATGCTATTTCCGTGCTCTTAGGTGGGACCGCGATAAGGTTAGCCGCAACGGTGGCCTACGATAGTATTACCGTGGCATGCACCCCTTGACGTTTAGGGCCAGGACGACAGGATTGACCTAGAGGGGACACTATCTGCCGCATGGTAGCGCACAAATCTATGGTCCTTAAGCCTGCGGAGCAGCAGCCCAGCCGCTTCGATGTCGAAGGCAAAGCCGCAAAAGTTATTCTGTAGATCCGTCTTGGAGCATCAGCTTGTGGGAATCTTCAAGGGCAGCCTCCGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0011   ">
+				<sample name="00_0011   ">
+					<datablock type="DNA">
+						ATTGTCAAGAAACAATGTGAACACCGAGGGCTCAGCGTTTGACGTCCTGGGTCGATGCCGTACCGACTGAGTATAAAAAGCTTTCAAAATGCAAATATGATGGCAGGCGTATATTAGAAATTGTTTGGAGATACGCAACCGTCTCTATAATACAAATAATCTAGGAATGTCCACTAGTATGGCATATGGCACTTGAACCCTGGGATGCTATTTCCGTGCTCTTAGGTGGGACCGCGATAAGGTTAGCCGCAACGGTGGCCTACGATAGTATTACCGTGGCATGCACCCCTTGACGTTTAGGGCCAGGACGACAGGATTGACCTAGAGGGGACACTATCTGCCGCATGGTAGCGCACAAATCTATGGTCCTTAAGCCTGCGGAGCAGCAGCCCAGCCGCTTCGATGTCGAAGGCAAAGCCGCAAAAGTTATTCTGTAGATCCGTCTTGGAGCATCAGCTTGTGGGAATCTTCAAGGGCAGCCTCCGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0014   ">
+				<sample name="00_0014   ">
+					<datablock type="DNA">
+						ATTGTCAAGAAACAATGTGAACACCGAGGGCTCAGCGTTTGACGTCCTGGGTCGATGCCGTACCGACTGAGTATAAAAAGCTTTCAAAATGCAAATATGATGGCAGGCGTATATTAGAAATTGTTTGGAGATACGCAACCGTCTCTATAATACAAATAATCTAGGAATGTCCACTAGTATGGCATATGGCACTTGAACCCTGGGATGCTATTTCCGTGCTCTTAGGTGGGACCGCGATAAGGTTAGCCGCAACGGTGGCCTACGATAGTATTACCGTGGCATGCACCCCTTGACGTTTAGGGCCAGGACGACAGGATTGACCTAGAGGGGACACTATCTGCCGCATGGTAGCGCACAAATCTATGGTCCTTAAGCCTGCGGAGCAGCAGCCCAGCCGCTTCGATGTCGAAGGCAAAGCCGCAAAAGTTATTCTGTAGATCCGTCTTGGAGCATCAGCTTGTGGGAATCTTCAAGGGCAGCCTCCGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0018   ">
+				<sample name="00_0018   ">
+					<datablock type="DNA">
+						ATTGTCAAGAAACAATGTGAACACCGAGGGCTCAGCGTTTGACGTCCTGGGTCGATGCCGTACCGACTGAGTATAAAAAGCTTTCAAAATGCAAATATGATGGCAGGCGTATATTAGAAATTGTTTGGAGATACGCAACCGTCTCTATAATACAAATAATCTAGGAATGTCCACTAGTATGGCATATGGCACTTGAACCCTGGGATGCTATTTCCGTGCTCTTAGGTGGGACCGCGATAAGGTTAGCCGCAACGGTGGCCTACGATAGTATTACCGTGGCATGCACCCCTTGACGTTTAGGGCCAGGACGACAGGATTGACCTAGAGGGGACACTATCTGCCGCATGGTAGCGCACAAATCTATGGTCCTTAAGCCTGCGGAGCAGCAGCCCAGCCGCTTCGATGTCGAAGGCAAAGCCGCAAAAGTTATTCTGTAGATCCGTCTTGGAGCATCAGCTTGTGGGAATCTTCAAGGGCAGCCTCCGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0007   ">
+				<sample name="00_0007   ">
+					<datablock type="DNA">
+						ATTGTCAAGAAACAATGTGAACACCGAGGGCTCAGCGTTTGACGTCCTGGGTCGATGCCGTACCGACTGAGTATAAAAAGCTTTCAAAATGCAAATATGATGGCAGGCGTATATTAGAAATTGTTTGGAGATACGCAACCGTCTCTATAATACAAATAATCTAGGAATGTCCACTAGTATGGCATATGGCACTTGAACCCTGGGATGCTATTTCCGTGCTCTTAGGTGGGACCGCGATAAGGTTAGCCGCAACGGTGGCCTACGATAGTATTACCGTGGCATGCACCCCTTGACGTTTAGGGCCAGGACGACAGGATTGACCTAGAGGGGACACTATCTGCCGCATGGTAGCGCACAAATCTATGGTCCTTAAGCCTGCGGAGCAGCAGCCCAGCCGCTTCGATGTCGAAGGCAAAGCCGCAAAAGTTATTCTGTAGATCCGTCTTGGAGCATCAGCTTGTGGGAATCTTCAAGGGCAGCCTCCGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0017   ">
+				<sample name="00_0017   ">
+					<datablock type="DNA">
+						ATTGTCAAGAAACAATGTGAACACCGAGGGCTCAGCGTTTGACGTCCTGGGTCGATGCCGTACCGACTGAGTATAAAAAGCTTTCAAAATGCAAATATGATGGCAGGCGTATATTAGAAATTGTTTGGAGATACGCAACCGTCTCTATAATACAAATAATCTAGGAATGTCCACTAGTATGGCATATGGCACTTGAACCCTGGGATGCTATTTCCGTGCTCTTAGGTGGGACCGCGATAAGGTTAGCCGCAACGGTGGCCTACGATAGTATTACCGTGGCATGCACCCCTTGACGTTTAGGGCCAGGACGACAGGATTGACCTAGAGGGGACACTATCTGCCGCATGGTAGCGCACAAATCTATGGTCCTTAAGCCTGCGGAGCAGCAGCCCAGCCGCTTCGATGTCGAAGGCAAAGCCGCAAAAGTTATTCTGTAGATCCGTCTTGGAGCATCAGCTTGTGGGAATCTTCAAGGGCAGCCTCCGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0013   ">
+				<sample name="00_0013   ">
+					<datablock type="DNA">
+						ATTGTCAAGAAACAACGTGAACACCGAGGGCTCAGCGTTTGACGTCCTGGGTCGATGCCGTACCGACTGAGTATAAAAAGCTTTAAAAATGCAAATATGATGGCAGGCGTATATTAGAAATTGTTTGGAGATACGCAACCGTCTCTATAATACAAATAGTCTAGGAATGTCCACTAGTATGGCATATGGCACTTGAACTCTGGGATGCTATTTCCGTGCTCTTCGGTGGGACCGCGATAAGGTTAGCCGCAACGGTGGCCTACGATAGTATTACCGTGGCATGCACCCCTTGACGTTTAGGGCCAGGACGACAGGATTGACCTAGAGGGGACACTATCTGCCGCATGGTAGCGCACAAATCTATGGTCCTTAAGCCTGCGGAGCAGCAGCCCAGCCGCTTCGATGTCGAAGGCAAAGCCGCAAAAGTTATTCTGTAGATCCGTCTTGGAGCATCAGCTTGTGGGAATCTTCAAGGGCAGCCTCCGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0006   ">
+				<sample name="00_0006   ">
+					<datablock type="DNA">
+						ATTGTCAAGAAACAACGTGAACACCGAGGGCTCAGCGTTTGACGTCCTGGGTCGATGCCGTACCGACTGAGTATAAAAAGCTTTAAAAATGCAAATATGATGGCAGGCGTATATTAGAAATTGTTTGGAGATACGCAACCGTCTCTATAATACAAATAGTCTAGGAATGTCCACTAGTATGGCATATGGCACTTGAACTCTGGGATGCTATTTCCGTGCTCTTCGGTGGGACCGCGATAAGGTTAGCCGCAACGGTGGCCTACGATAGTATTACCGTGGCATGCACCCCTTGACGTTTAGGGCCAGGACGACAGGATTGACCTAGAGGGGACACTATCTGCCGCATGGTAGCGCACAAATCTATGGTCCTTAAGCCTGCGGAGCAGCAGCCCAGCCGCTTCGATGTCGAAGGCAAAGCCGCAAAAGTTATTCTGTAGATCCGTCTTGGAGCATCAGCTTGTGGGAATCTTCAAGGGCAGCCTCCGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0009   ">
+				<sample name="00_0009   ">
+					<datablock type="DNA">
+						ATTGTCAAGAAACAACGTGAACACCGAGGGCTCAGCGTTTGACGTCCTGGGTCGATGCCGTACCGACTGAGTATAAAAAGCTTTAAAAATGCAAATATGATGGCAGGCGTATATTAGAAATTGTTTGGAGATACGCAACCGTCTCTATAATACAAATAGTCTAGGAATGTCCACTAGTATGGCATATGGCACTTGAACTCTGGGATGCTATTTCCGTGCTCTTCGGTGGGACCGCGATAAGGTTAGCCGCAACGGTGGCCTACGATAGTATTACCGTGGCATGCACCCCTTGACGTTTAGGGCCAGGACGACAGGATTGACCTAGAGGGGACACTATCTGCCGCATGGTAGCGCACAAATCTATGGTCCTTAAGCCTGCGGAGCAGCAGCCCAGCCGCTTCGATGTCGAAGGCAAAGCCGCAAAAGTTATTCTGTAGATCCGTCTTGGAGCATCAGCTTGTGGGAATCTTCAAGGGCAGCCTCCGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0004   ">
+				<sample name="00_0004   ">
+					<datablock type="DNA">
+						ATTGTCAAGAAACAACGTGAACACCGAGGGCTCAGCGTTTGACGTCCTGGGTCGATGCCGTACCGACTGAGTATAAAAAGCTTTCAAAATGCAAATATGATGGCAGGCGTATATTAGAAATTGGTTGGAGATACGCAACCGTCTCTATAATACAAATAGTCTAGGAATGTCCACTAGTATGGCATATGGCACTTGAACTCTGGGATGCTATTTCCGTGCTCTTAGGTGGGACCGCGATAAGGTTAGCCGCAACGGTGGCCTACGATAGTATTACCGTGGCATGCACCCCTTGACGTTTAGGGCCAGGACGACAGGATTGACCTAGAGGGGACACTATCTGCCGCATGGTAGCGCACAAATCTATGGTCCTTAAGCCTGCGGAGCAGCAGCCCAGCCGCTTCGATGTCGAAGGCAAAGCCGCAAAAGTTATTCTGTAGATCCGTCTTGGAGCATCAGCTTGTAGGAATCTTCAAGGGCAGCCTCCGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0012   ">
+				<sample name="00_0012   ">
+					<datablock type="DNA">
+						ATTGTCAAGAAACAACGTGAACACCGAGGGCCCAGCGTTTGACGTCCTGGGTCGATGCCGTACCGACTGAGTATAAAAAGCTTTCAAAATGCAAATATGATGGCAGGCGTATATTAGAAATTGGTTGGAGATACGCAACCGTCTCTATAATACAAATAGTCTAGGAATGTCCACTAGTATGGCATATGGCACTTGAACTCTGGGATGCTATTTCCGTGCTCTTAGGTGGGACCGCGATAAGGTTAGCCGCAACGGTGGCCTACGATAGTATTACCGTGGCATGCACCCCTTGACGTTTAGGGCCAGGACGACAGGATTGACCTAGAGGGGACACTATCTGCCGCATGGTAGCGCACAAATCTATGGTCCTTAAGCCTGCGGAGCAGCAGCCCAGCCGCTTCGATGTCGAAGGCAAAGCCGCAAAAGTTATTCTGTAGATCCGTCTTGGAGCATCAGCTTGTAGGAATCTTCAAGGGCAGCCTCCGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0015   ">
+				<sample name="00_0015   ">
+					<datablock type="DNA">
+						ATTGTCAAGAAACAACGTGAACACCGAGGGCCCAGCGTTTGACGTCCTGGGTCGATGCCGTACCGACTGAGTATAAAAAGCTTTCAAAATGCAAATATGATGGCAGGCGTATATTAGAAATTGGTTGGAGATACGCAACCGTCTCTATAATACAAATAGTCTAGGAATGTCCACTAGTATGGCATATGGCACTTGAACTCTGGGATGCTATTTCCGTGCTCTTAGGTGGGACCGCGATAAGGTTAGCCGCAACGGTGGCCTACGATAGTATTACCGTGGCATGCACCCCTTGACGTTTAGGGCCAGGACGACAGGATTGACCTAGAGGGGACACTATCTGCCGCATGGTAGCGCACAAATCTATGGTCCTTAAGCCTGCGGAGCAGCAGCCCAGCCGCTTCGATGTCGAAGGCAAAGCCGCAAAAGTTATTCTGTAGATCCGTCTTGGAGCATCAGCTTGTAGGAATCTTCAAGGGCAGCCTCCGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0016   ">
+				<sample name="00_0016   ">
+					<datablock type="DNA">
+						ATTGTCAAGAAACAACGTGAACACCGAGGGCCCAGCGTTTGACGTCCTGGGTCGATGCCGTACCGACTGAGTATAAAAAGCTTTCAAAATGCAAATATGATGGCAGGCGTATATTAGAAATTGGTTGGAGATACGCAACCGTCTCTATAATACAAATAGTCTAGGAATGTCCACTAGTATGGCATATGGCACTTGAACTCTGGGATGCTATTTCCGTGCTCTTAGGTGGGACCGCGATAAGGTTAGCCGCAACGGTGGCCTACGATAGTATTACCGTGGCATGCACCCCTTGACGTTTAGGGCCAGGACGACAGGATTGACCTAGAGGGGACACTATCTGCCGCATGGTAGCGCACAAATCTATGGTCCTTAAGCCTGCGGAGCAGCAGCCCAGCCGCTTCGATGTCGAAGGCAAAGCCGCAAAAGTTATTCTGTAGATCCGTCTTGGAGCATCAGCTTGTAGGAATCTTCAAGGGCAGCCTCCGC [...]
+					</datablock>
+				</sample>
+			</individual>
+		</population>
+	</region>
+	<region name="LinkageGroup09">
+      <model name="F84">
+        <normalize>false</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <relative-murate>1</relative-murate>
+        <base-freqs> calculated </base-freqs>
+        <ttratio>2</ttratio>
+      </model>
+      <effective-popsize>1</effective-popsize>
+		<population name="PopAlphaIs25">
+			<individual name="00_0002   ">
+				<sample name="00_0002   ">
+					<datablock type="DNA">
+						CATGGTCCAGTTGAACTGACCATGTAAATAGGTGTTTGTCAGACCCCCGCAGCGCTAATAAGATGCTATCTAAGTACTCTAGTCCAGACGGCATTACGTAGGCTTATTAGTCGGATTCTTTCAAAGGTCGAGGGACGCACCAACGGTTGGGCTGTACAGTACTAAGACCATTACTGTAGTCGAACCAGCGTTTCATTGAGCCTCGCTGTGTGGACAGAGCCATATTTTGGTGGAAGGCTCCGCGTGGTAGCTGATTACGAGTGCGTACCTCTGCTTATACGCACTGAGAGAATATCTGTAACGCGGTGCCTCTTATAGTATTAAAAGGAGTCATTTTCTCGTGTCCTATTATCAGAACCTAGGAATCTGACTTTGTCTCAGGTAGTACTTCATGCGCATATCAGAGCTCGGGTAGATAAGCTGTATAAGATGTTTCGAGCGACTGTCCCTTCCCGGTACCTGGGCGAACACACATTGGTCAGTCGA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0010   ">
+				<sample name="00_0010   ">
+					<datablock type="DNA">
+						CATGGTCCAGTTGAACTGACCATGTAAATAGGTGTTTGTCAGACCCCCGCAGCGCTAATAAGATGCTATCTAAGTACTCTAGTCCAGACGGCATTACGTAGGCTTATTAGTCGGATTCTTTCAAAGGTCGAGGGACGCACCAACGGTTGGGCTGTACAGTACTAAGACCATTACTGTAGTCGAACCAGCGTTTCATTGAGCCTCGCTGTGTGGACAGAGCCATATTTTGGTGGAAGGCTCCGCGTGGTAGCTGATTACGAGTGCGTACCTCTGCTTATACGCACTGAGAGAATATCTGTAACGCGGTGCCTCTTATAGTATTAAAAGGAGTCATTTTCTCGTGTCCTATTATCAGAACCTAGGAATCTGACTTTGTCTCAGGTAGTACTTCATGCGCATATCAGAGCTCGGGTAGATAAGCTGTATAAGATGTTTCGAGCGACTGTCCCTTCCCGGTACCTGGGCGAACACACATTGGTCAGTCGA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0016   ">
+				<sample name="00_0016   ">
+					<datablock type="DNA">
+						CATGGTCCAGTTGAACTGACCATGTAAATAGGTGTTTGTCAGACCCCCGCAGCGCTAATAAGATGCTATCTAAGTACTCTAGTCCAGACGGCATTACGTAGGCTTATTAGTCGGATTCTTTCAAAGGTCGAGGGACGCACCAACGGTTGGGCTGTACAGTACTAAGACCATTACTGTAGTCGAACCAGCGTTTCATTGAGCCTCGCTGTGTGGACAGAGCCATATTTTGGTGGAAGGCTCCGCGTGGTAGCTGATTACGAGTGCGTACCTCTGCTTATACGCACTGAGAGAATATCTGTAACGCGGTGCCTCTTATAGTATTAAAAGGAGTCATTTTCTCGTGTCCTATTATCAGAACCTAGGAATCTGACTTTGTCTCAGGTAGTACTTCATGCGCATATCAGAGCTCGGGTAGATAAGCTGTATAAGATGTTTCGAGCGACTGTCCCTTCCCGGTACCTGGGCGAACACACATTGGTCAGTCGA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0001   ">
+				<sample name="00_0001   ">
+					<datablock type="DNA">
+						CATGGTCCAGTTGAACTGACCATGTAAATAGGTGTTTGTCAGACCCCCGCAGCGCTAATAAGATGCTATCTAAGTACTCTAGTCCAGACGGCATTACGTAGGCTTATTAGTCGGATTCTTTCAAAGGTCGAGGGACGCACCAACGGTTGGGCTGTACAGTACTAAGACCATTACTGTAGTCGAACCAGCGTTTCATTGAGCCTCGCTGTGTGGACAGAGCCATATTTTGGTGGAAGGCTCCGCGTGGTAGCTGATTACGAGTGCGTACCTCTGCTTATACGCACTGAGAGAATATCTGTAACGCGGTGCCTCTTATAGTATTAAAAGGAGTCATTTTCTCGTGTCCTATTATCAGAACCTAGGAATCTGACTTTGTCTCAGGTAGTACTTCATGCGCATATCAGAGCTCGGGTAGATAAGCTGTATAAGATGTTTCGAGCGACTGTCCCTTCCCGGTACCTGGGCGAACACACATTGGTCAGTCGA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0015   ">
+				<sample name="00_0015   ">
+					<datablock type="DNA">
+						CATGGTCCAGTTGAACTGACCATGTAAATAGGTGTTTGTCAGACCCCCGCAGCGCTAATAAGATGCTATCTAAGTACTCTAGTCCAGACGGCATTACGTAGGCTTATTAGTCGGATTCTTTCAAAGGTCGAGGGACGCACCAACGGTTGGGCTGTACAGTACTAAGACCATTACTGTAGTCGAACCAGCGTTTCATTGAGCCTCGCTGTGTGGACAGAGCCATATTTTGGTGGAAGGCTCCGCGTGGTAGCTGATTACGAGTGCGTACCTCTGCTTATACGCACTGAGAGAATATCTGTAACGCGGTGCCTCTTATAGTATTAAAAGGAGTCATTTTCTCGTGTCCTATTATCAGAACCTAGGAATCTGACTTTGTCTCAGGTAGTACTTCATGCGCATATCAGAGCTCGGGTAGATAAGCTGTATAAGATGTTTCGAGCGACTGTCCCTTCCCGGTACCTGGGCGAACACACATTGGTCAGTCGA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0017   ">
+				<sample name="00_0017   ">
+					<datablock type="DNA">
+						CATGGTCCAGTTGAACTGACCATGTAAATAGGTGTTTGTCAGACCCCCGCAGCGCTAATAAGATGCTATCTAAGTACTCTAGTCCAGACGGCATTACGTAGGCTTATTAGTCGGATTCTTTCAAAGGTGGAGGGACGCACCAACGGTTGGGCTGTACAGTACTAAGACCATTACTGTAGTCGAACCAGCGTTTCATTGAGCCTCGCTGTGTGGACAGAGCCATATTTTGGTGGAAGGCTCCGCATGGTAGCTGATTACGAGTGCGTACCTCTGCTTATACGCACTGAGAGAATATCTGTAACGCGGTGCCTCTTATAGTATTAAAAGGAGTCATTTTCTCGTGTCCTATTATCAGAACCTAGGAATCTGACTTTATCTCAGGTAGTACTTCATGCGCATATCAGAGCTCGGGTAGATAAGCTGTATAAGATGTTTCGAGCGACTGTCCCTTCCCGGTACCTGGGCGAACACACATTGGTCAGTCGA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0000   ">
+				<sample name="00_0000   ">
+					<datablock type="DNA">
+						CATGGTCCAGTTGAACTGACCATGTAAATAGGTGTTTGTCAGACCCCCGCAGCTCTAATAAGATGCTATCTAAGTACTCTAGTCCAGACGGCATTACGTAGGCTTATTAGTCGGATTCTTTCAAAGGTCGAGGGACGCACCAACGGTTGGGCTGTACAGTACTAAGACCATTACTGTAGTCGAACCAGCGTTTCATTGAGCCTCGCTGTGTGGACAGAGCCATATTTTGGTGGAAGGCTCCGCGTGGTAGCTGATTACGAGTGCGTACCTCTGCTTATACGCACTGAGAGAATATCTGTAACGCGGTGCCTCTTATAGTATTAAAAGGAGTCATTTTCTCGTGTCCTATTATCAGAACCTAGGAATCTGACTTTGTCTCAGGTAGTACTTCATGCGCATATCAGAGCTCGGGTAGATAAGCTGTATAAGATGTTTCGAGCGACTGTCCCTTCCCGGTACCTGGGCGAACACACATTGGTCAGTCGA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0018   ">
+				<sample name="00_0018   ">
+					<datablock type="DNA">
+						CATGGTCCAGTTGAACTGACCATGTAAATAGGTGTTTGTCAGACCCCCGCAGCTCTAATAAGATGCTATCTAAGTACTCTAGTCCAGACGGCATTACGTAGGCTTATTAGTCGGATTCTTTCAAAGGTCGAGGGACGCACCAACGGTTGGGCTGTACAGTACTAAGACCATTACTGTAGTCGAACCAGCGTTTCATTGAGCCTCGCTGTGTGGACAGAGCCATATTTTGGTGGAAGGCTCCGCGTGGTAGCTGATTACGAGTGCGTACCTCTGCTTATACGCACTGAGAGAATATCTGTAACGCGGTGCCTCTTATAGTATTAAAAGGAGTCATTTTCTCGTGTCCTATTATCAGAACCTAGGAATCTGACTTTGTCTCAGGTAGTACTTCATGCGCATATCAGAGCTCGGGTAGATAAGCTGTATAAGATGTTTCGAGCGACTGTCCCTTCCCGGTACCTGGGCGAACACACATTGGTCAGTCGA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0004   ">
+				<sample name="00_0004   ">
+					<datablock type="DNA">
+						CATGGTCCAGTTGAACTGACCATGTAAATAGGTGTTTGTCAGACCCCCGCAGCGCTAATAAGATGCTATCTAAGTACTCTAGTCCAGACGGCATTACGTAGGCTTATTAGTCGGATTCTTTCAAAGGTCGAGGGACGCACCAACGGTTGGGCTGTACAGTACTAAGACCATTACTGTAGTCGAACCAGTGTTTCATTGGGCCTCGCTGTGTGGACAGAGCCATATTTTGGTGGAAGGCTCCGCGTGGTAGCTGATTACGAGTGCGTACCTCTGCTTATACGCACTGAGAGAATATCTGTAACGCGGTGCCTCTTATAGTATTAAAAGGAGTCATTTTCTCGTGTCCTATTATCAGAACCTAGGAATCTGACTTTGTCTCAGGTAGTACTTCATGCGCATATCAGAGCTCGGGTAGATAAGCTGTATAAGATGTTTCGAGCGACTGTCCCTTCCCGGTACCTGGGCGAACACACATTGGTCAGTCGA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0013   ">
+				<sample name="00_0013   ">
+					<datablock type="DNA">
+						CATGGTCCAGTTGAACTGACCATGTAAATAGGTGTTTGTCAGACCCCCGCAGCGCTAATAAGATGCTATCTAAGTACTCTAGTCCAGACGGCATTACGTAGGCTTATTAGTCGGATTCTTTCAAAGGTCGAGGGACGCACCAACGGTTGGGCTGTACAGTACTAAGACCATTACTGTAGTCGAACCAGTGTTTCATTGGGCCTCGCTGTGTGGACAGAGCCATATTTTGGTGGAAGGCTCCGCGTGGTAGCTGATTACGAGTGCGTACCTCTGCTTATACGCACTGAGAGAATATCTGTAACGCGGTGCCTCTTATAGTATTAAAAGGAGTCATTTTCTCGTGTCCTATTATCAGAACCTAGGAATCTGACTTTGTCTCAGGTAGTACTTCATGCGCATATCAGAGCTCGGGTAGATAAGCTGTATAAGATGTTTCGAGCGACTGTCCCTTCCCGGTACCTGGGCGAACACACATTGGTCAGTCGA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0007   ">
+				<sample name="00_0007   ">
+					<datablock type="DNA">
+						CATGGTCCAGTTGAACTGACCATGTAAATAGGTGTTTGTCAGACCCCCGCAGCGCTAATAAGATGCTATCTAAGTACTCTAGTCCAGACGGCATTACGTAGGCTTATTAGTCGGATTCTTTCAAAGGTCGAGGGACGCACCAACGGTTGGGCTGTACAGTACTAAGACCATTACTGTAGTCGAACCAGCGTTTCATTGGGCTTCGCTGTGTGGACAGAGCCATATTTTGGTGGAAGGCTCCGCGTGGTAGCTGATTACGAGTGCGTACCTCTGCTTATACGCACTGAGAGAATATCTGTAACGCGGTGCCTCTTATAGTATTAAAAGGAGTCATTTTCTCGTGTCCTATTATCAGAACCTAGGAATCTGACTTTGTCTCAGGTAGTACTTCATGCGCATATCAGAGCTCGGGTAGATAAGCTGTATAAGATGTTTCGAGCGACTGTCCCTTCCCGGTACCTGGGCGAACACACATTGGTCAGTCGA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0005   ">
+				<sample name="00_0005   ">
+					<datablock type="DNA">
+						CATGGTCCAGTTGAACTGACCATGTAAATAGGTGTTTGTCAGACCCCCGCAGCGCTAATAAGATGCTATCTAAGTACTCTAGTCCAGACGGCATTACGTAGGCTTATTAGTCGGATTCTTTCAAAGGTCGAGGGACGCACCAACGGTTGGGCTGTACAGTACTAAGACCATTACTGTAGTCGAACCAGCGTTTCATTGGGCTTCGCTGTGTGGACAGAGCCATATTTTGGTGGAAGGCTCCGCGTGGTAGCTGATTACGAGTGCGTACCTCTGCTTATACGCACTGAGAGAATATCTGTAACGCGGTGCCTCTTATAGTATTAAAAGGAGTCATTTTCTCGTGTCCTATTATCAGAACCTAGGAATCTGACTTTGTCTCAGGTAGTACTTCATGCGCATATCAGAGCTCGGGTAGATAAGCTGTATAAGATGTTTCGAGCGACTGTCCCTTCCCGGTACCTGGGCGAACACACATTGGTCAGTCGA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0006   ">
+				<sample name="00_0006   ">
+					<datablock type="DNA">
+						CATGGTCCAGTTGAACTGACCATGTAAATAGGTGTTTGTCAGACCCCCGCAGCGCTAATAAGATGCTATCTAAGTACTCTAGTCCAGACGGCATTACGTAGGCTTATTAGTCGGATTCTTTCAAAGGTCGAGGGACGCACCAACGGTTGGGCTGTACAGTACTAAGACCATTACTGTAGTCGAACCAGCGTTTCATTGGGCTTCGCTGTGTGGACAGAGCCATATTTTGGTGGAAGGCTCCGCGTGGTAGCTGATTACGAGTGCGTACCTCTGCTTATACGCACTGAGAGAATATCTGTAACGCGGTGCCTCTTATAGTATTAAAAGGAGTCATTTTCTCGTGTCCTATTATCAGAACCTAGGAATCTGACTTTGTCTCAGGTAGTACTTCATGCGCATATCAGAGCTCGGGTAGATAAGCTGTATAAGATGTTTCGAGCGACTGTCCCTTCCCGGTACCTGGGCGAACACACATTGGTCAGTCGA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0014   ">
+				<sample name="00_0014   ">
+					<datablock type="DNA">
+						CATGGTCCAGTTGAACTGACCATGTAAATAGGTGTTTGTCAGACCCCCGCAGCGCTAATAAGATGCTATCTAAGTACTCTAGTCCAGACGGCATTACGTAGGCTTATTAGTCGGATTCTTTCAAAGGTCGAGGGACGCACCAACGGTTGGGCTGTACAGTACTAAGACCATTACTGTAGTCGAACCAGCGTTTCATTGGGCCTCGCTGTGTGGACAGAGCCATATTTTGGTGGAAGGCTCCGCGTGGTAGCTGATTACGAGTGCGTACCTCTGCTTATACGCACTGAGAGAATATCTGTAACGCGGTGCCTCTTATAGTATTAAAAGGAGTCATTTTCTCGTGTCCTATTATCAGAAACTAGGAATCTGACTTTGTCTCAGGTAGTACTTCATGCGCATATCAGAGCTCGGGTAGATAAGCTGTATAAGATGTTTCGAGCGACTGTCCCTTCCCGGTACCTGGGCGAACACACATTGGTCAGTCGA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0019   ">
+				<sample name="00_0019   ">
+					<datablock type="DNA">
+						CATGGTCCAGTTGAACTGACCATGTAAATAGGTGTTTGTCAGACCCCCGCAGCGCTAATAAGATGCTATCTAAGTACTCTAGTCCAGACGGCATTACGTAGGCTTATTAGTCGGATTCTTTCAAAGGTCGAGGGACGCACCAACGGTTGGGCTGTACAGTACTAAGACCATTACTGTAGTCGAACCAGCGTTTCATTGGGCCTCGCTGTGTGGACAGAGCCATATTTTGGTGGAAGGCTCCGCGTGGTAGCTGATTACGAGTGCGTACCTCTGCTTATACGCACTGAGAGAATATCTGTAACGCGGTGCCTCTTATAGTATTAAAAGGAGTCATTTTCTCGTGTCCTATTATCAGAAACTAGGAATCTGACTTTGTCTCAGGTAGTACTTCATGCGCATATCAGAGCTCGGGTAGATAAGCTGTATAAGATGTTTCGAGCGACTGTCCCTTCCCGGTACCTGGGCGAACACACATTGGTCAGTCGA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0008   ">
+				<sample name="00_0008   ">
+					<datablock type="DNA">
+						CATGGTCCAGTTGAACTGACCATGTAAATAGGTGTTTGTCAGACCCCCGCAGCGCTAATAAGATGCTATCTAAGTACTCTAGTCCAGACGGCATTACGTAGGCTTATTAGTCGGATTCTTTCAAAGGTCGAGGGACGCACCAACGGTTGGGCTGTACAGTACTAAGACCATTACTGTAGTCGAACCAGCGTTTCATTGGGCCTCGCTGTGTGGACAGAGCCATATTTTGGTGGAAGGCTCCGCGTGGTAGCTGATTACGAGTGCGTACCTCTGCTTATACGCACTGAGAGAATATCTGTAACGCGGTGCCTCTTATAGTATTAAAAGGAGTCATTTTCTCGTGTCCTGTTATCAGAACCTAGGAATCTGACTTTGTCTCAGGTAGTACTTCATGCGCATATCAGAGCTCGGGTAGATAAGCTGTATAAGATGTTTCGAGCGACTGTCCCTTCCCGGTACCTGGGCGAACACACATTGGTCAGTCGA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0009   ">
+				<sample name="00_0009   ">
+					<datablock type="DNA">
+						CATGGTCCAGTTGAACTGACCATGTAAATAGGTGTTTGTCAGACCCCCGCAGCGCTAATAAGATGCTATCTAAGTACTCTAGTCCAGACGGCATTACGTAGGCTTATTAGTCGGATTCTTTCAAAGGTCGAGGGACGCACCAACGGTTGGGCTGTACAGTACTAAGACCATTACTGTAGTCGAACCAGCGTTTCATTGGGCCTCGCTGTGTGGACAGAGCCATATTTTGGTGGAAGGCTCCGCGTGGTAGCTGATTACGAGTGCGTACCTCTGCTTATACGCACTGAGAGAATATCTGTAACGCGGTGCCTCTTATAGTATTAAAAGGAGTCATTTTCTCGTGTCCTGTTATCAGAACCTAGGAATCTGACTTTGTCTCAGGTAGTACTTCATGCGCATATCAGAGCTCGGGTAGATAAGCTGTATAAGATGTTTCGAGCGACTGTCCCTTCCCGGTACCTGGGCGAACACACATTGGTCAGTCGA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0011   ">
+				<sample name="00_0011   ">
+					<datablock type="DNA">
+						CATGGTCCAGTTGAACTGACCATGTAAATAGGTGTTTGTCAGACCCCCGCAGCGCTAATAAGATGCTATCTAAGTACTCTAGTTCAGACGGCATTACGTAGGCTTATTAGTCGGATTCTTTCAAAGGTCGAGGGACGCACCAACGGTTGGGCTGTACAGTACTAAGACCATTACTGTAGTCGAACCAGCGTTTCATTGGGCCTCGCTGTGTGGACAGAGCCATATTTTGGTGGAAGGCTCCGCGTGGTAGCTGATTACGAGTGCGTACCTCTGCTTATACGCACTGAGAGAATATCTGTAACGCGGTGCCTCTTATAGTATTAAAAGGAGTCATTTTCTCGTGTCCTATTTTCAGAACCTAGGAATCTGACTTTGTCTCAGGTAGTACTTCATGCGCATATCAGAGCTCGGGTAGATAAGCTGTATAAGATGTTTCGAGCGACTGTCCCTTCCCGGTACCTGGGCGAACACACATTGGTCAGTCGA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0003   ">
+				<sample name="00_0003   ">
+					<datablock type="DNA">
+						CATGGTCCAGTTGAACTGACCATGTAAATAGGTGTTTGTCAGACCCCCGCAGCGCTAATAAGATGCTATCTAAGTACTCTAGTCCAGACGGCATTACGTAGGCTAATTAGTCGGATTCTTTCAAAGGTCGAGGGACGCACCAACGGTTGGGCTGTACAGTACTAAGACCATTACTGTAGTCGAACCAGCGTTTCATTGGGCCTCGCTGTGTGGACAGAGCCATATTTTGGTGGAAGGCTCCGCGTGGTAGCTGATTACGAGTGCGTACCTCTGCTTATACGCACTGAGAGAATATCTGTAACGCGGTGCCTCTTATAGTATTAAAAGGAGTCATTTTCTCGTGTCCTATTATCAGAACCTAGGAATCTGACTTTGTCTCAGGTAGTACTTCATGCGCATATCAGAGCTCGGGTAGATAAGCTGTATAAGATGTTTCGAGCGACTGTCCCTTCCCGGTACCTGGGCGAACACACATTGGTCAGTCGA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0012   ">
+				<sample name="00_0012   ">
+					<datablock type="DNA">
+						CATGGTCCAGTTGAACTGACCATGTAAATAGGTGTTTGTCAGACCCCCGCAGCGCTAATAAGATGCTATCTAAGTACTCTAGTCCAGACGGCATTACGTAGGCTTATTAGTCGGATTCTTTCAAAGGTCGAGGGACGCACCAACGGTTGGGCTGTACAGTACTAAGACCATTACTGTAGTCGAACCAGCGTTTCATTGGGCCTCGCTGTGTGGACAGAGCCATATTTTGGTGGAAGGCTCCGCGTGGTAGCTGATTACGAGTGCGTACCTCTGCTTATACGCACTGAGAGAATATCTGTAACGCGGTGCCTCTTATAGTATTAAAAGGAGTCATTTTCTCGTGTCCTATTATCAGAACCTAGGAATCTGACTTTGTCTCAGGTAGTACTTCATGCGCATATCAGAGCTCGGGTAGATAAGCTGTATAAGATGTTTCGAGCGACTGTCCCTTCCCGGTACCTGGGCGAACACACATTGGTCAGTCGA [...]
+					</datablock>
+				</sample>
+			</individual>
+		</population>
+	</region>
+	<region name="LinkageGroup10">
+      <model name="F84">
+        <normalize>false</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <relative-murate>1</relative-murate>
+        <base-freqs> calculated </base-freqs>
+        <ttratio>2</ttratio>
+      </model>
+      <effective-popsize>1</effective-popsize>
+		<population name="PopAlphaIs25">
+			<individual name="00_0010   ">
+				<sample name="00_0010   ">
+					<datablock type="DNA">
+						ACTGTAAATCCCGAGGCAGATCTAAGTTCGATTACAACTGCGCCGGGCGGTTTCCGCCCCTGCTAGAAGGGTGTAGCAGAACTAAGACCTGTCTACATCAAGAAAAGCATATTCTCATTGCTGAATCCACCACTTCAGATGATACTAATGCCCGTAACTACGAATACCGTGTGGTTCCTCCCGCAAACGAGGGTCGAGCCGACCCCACGTCTCACCCTGGCAGCATTGCAACAATCAGACTTCCTACCTGTTCCGGCACATGCTTGCTGCTATGAATTCGGATCATAGCCTTTCATATTTCTTATAGATGTCTCTTCGGCACTTCAAAGAAGAAAAGTGACAAAGGGATCCTGCCTCGGATGGGGGCCTATGTCATTATGCCCAGGATGGTATAGACATCTGCCCTTCATGCGTGAACCGTTTTAAACTAATTCAACATCAATCGTTCACACACATGTGCCTCGGCACCGTAGGCTCCTTCACGTT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0011   ">
+				<sample name="00_0011   ">
+					<datablock type="DNA">
+						ACTGTAAATCCCGAGGCAGATCTAAGTTCGATTACAACTGCGCCGGGCGGTTTCCGCCCCTGCTAGAAGGGTGTAGCAGAACTAAGACCTGTCTACATCAAGAAAAGCATATTCTCATTGCTGAATCCACCACTTCAGATGATACTAATGCCCGTAACTACGAATACCGTGTGGTTCCTCCCGCAAACGAGGGTCGAGCCGACCCCACGTCTCACCCTGGCAGCATTGCAACAATCAGACTTCCTACCTGTTCCGGCACATGCTTGCTGCTATGAATTCGGATCATAGCCTTTCATATTTCTTATAGATGTCTCTTCGGCACTTCAAAGAAGAAAAGTGACAAAGGGATCCTGCCTCGGATGGGGGCCTATGTCATTATGCCCAGGATGGTATAGACATCTGCCCTTCATGCGTGTACCGTTTTAAACTAATTCAACATCAATCGTTCACACACATGTGCCTCGGCACCGTAGGCTCCTTCACGTT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0007   ">
+				<sample name="00_0007   ">
+					<datablock type="DNA">
+						ACTGTAAATCCCGAGGCAGATCTAAGTTCGATTACAACTGCGCCGGGCGGTTTCCGCCCCTGCTAGAAGGGTGTAGCAGAGCTAAGACCTGTCTACATCAAGAAAAGCATATTCTCATTGCTGAATCCACCACTTCAGATGATACTAATGCCCGTAACTACGAATACCGTGTGGTTCCTCCCGCAAACGAGGGTCGAGCCGACCCCACGTCTCACCCTGGCAGCATTGCAACAATCAGACTTCCTACCTGTTCCGGCACATGCTTGCTGCTATGAATTCGGATCATAGCCTTTCATATTTCTTATAGATGTCTCTTCGGCACTTAAAAGAAGAAAAGTGACAAAGGGATCCTGCCTCGGATGGGGGCCTATGTCATTATGCCCAGGATGGTATAGACATCTGCCCTTCATGCGTGAACCGTTTTAAACTAATTCAACATCAATCGTTCACACACATGTGCCTCGGCACCGTAGGCTCCTTCACGTT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0019   ">
+				<sample name="00_0019   ">
+					<datablock type="DNA">
+						ACTGTAAATCCCGAGGCAGATCTAAGTTCGATTACAACTGCGCCGGGCGGTTTCCGCCCCTGCTAGAAGGGTGTAGCAGAGCTAAGACCTGTCTACATCAAGAAAAGCATATTCTCATTGCTGAATCCACCACTTCAGATGATACTAATGCCCGTAACTACGAATACCGTGTGGTTCCTCCCGCAAACGAGGGTCGAGCCGACCCCACGTCTCACCCTGGCAGCATTGCAACAATCAGACTTCCTACCTGTTCCGGCACATGCTTGCTGCTATGAATTCGGATCATAGCCTTTCATATTTCTTATAGATGTCTCTTCGGCACTTAAAAGAAGAAAAGTGACAAAGGGATCCTGCCTCGGATGGGGGCCTATGTCATTATGCCCAGGATGGTATAGACATCTGCCCTTCATGCGTGAACCGTTTTAAACTAATTCAACATCAATCGTTCACACACATGTGCCTCGGCACCGTAGGCTCCTTCACGTT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0014   ">
+				<sample name="00_0014   ">
+					<datablock type="DNA">
+						ACTGTAAATCCCGAGGCAGATCTAAGTTCGATTACAACTGCGCCGGGCGGTTTCCGCCCCTGCTAGAAGGGTGTAGCAGAGCTAAGACCTGTCTACATCAAGAAAAGCATATTCTCATTGCTGAATCCACCACTTCAGATGATACTAATGCCCGTAACTACGAATACCGTGTGGTTCCTCCCGCAAACGAGGGTCGAGCCGACCCCACGTCTCACCCTGGCAGCATTGCAACAATCAGACTTCCTACCTGTTCCGGCACATGCTTGCTGCTATGAATTCGGATCATAGCCTTTCATATTTCTTATAGATGTCTCTTCGGCACTTAAAAGAAGAAAAGTGACAAAGGGATCCTGCCTCGGATGGGGGCCTATGTCATTATGCCCAGGATGGTATAGACATCTGCCCTTCATGCGTGAACCGTTTTAAACTAATTCAACATCAATCGTTCACACACATGTGCCTCGGCACCGTAGGCTCCTTCACGTT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0008   ">
+				<sample name="00_0008   ">
+					<datablock type="DNA">
+						ACTGTAAATCCCGAGGCAGATCTAAGTTCGATTACAACTGCGCCGGGCGGTTTCCGCCCCTGCTAGAAGGGTGTAGCAGAGCTAAGACCTGTCTACATCAAGAAAAGCATATTCTCATTGCTGAATCCACCACTTCAGATGATACTAATGCCCGTAACTACGAATACCGTGTGGTTCCTCCCGCAAACGAGGGTCGAGCCGACCCCACGTCTCACCCTGGCAGCATTGCAACAATCAGACTTCCTACCTGTTCCGGCACATGCTTGCTGCTATGAATTCGGATCATAGCCTTTCATATTTCTTATAGATGTCTCTTCGGCACTTAAAAGAAGAAAAGTGACAAAGGGATCCTGCCTCGGATGGGGGCCTATGTCATTATGCCCAGGATGGTATAGACATCTGCCCTTCATGCGTGAACCGTTTTAAACTAATTCAACATCAATCGTTCACACACATGTGCCTCGGCACCGTAGGCTCCTTCACGTT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0001   ">
+				<sample name="00_0001   ">
+					<datablock type="DNA">
+						ACTGTAAATCCCGAGGCAGATCTAAGTTCGATTACAACTGCGCCGGGCGGTTTCCGCCCCTGCTAGAAGGGTGTAGCAGAGCTAAGACCTGTCTACATCAAGAAAAGCATATTCTCATTGCTGAATCCACCACTTCAGATGATACTAATGCCCGTAACTACGAATACCGTGTGGTTCCTCCCGCAAACGAGGGTCGAGCCGACCCCACGTCTCACCCTGGCAGCATTGCAACAATCAGACTTCCTACCTGTTCCGGCACATGCTTGCTGCTATGAATTCGGATCATAGCCTTTCATATTTCTTATAGATGTCTCTTCGGCACTTAAAAGAAGAAAAGTGACAAAGGGATCCTGCCTCGGATGGGGGCCTATGTCATTATGCCCAGGATGGTATAGACATCTGCCCTTCATGCGTGAACCGTTTTAAACTAATTCAACATCAATCGTTCACACACATGTGCCTCGGCACCGTAGGCTCCTTCACGTT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0018   ">
+				<sample name="00_0018   ">
+					<datablock type="DNA">
+						ACTGTAAATCCCGAGGCAGATCTAAGTTCGATTACAACTGCGCCGGGCGGTTTCCGCCCCTGCTAGAAGGGTGTAGCAGAGCTAAGACCTGTCTACATCAAGAAAAGCATATTCTCATTGCTGAATCCACCACTTCAGATGATACTAATGCCCGTAACTACGAATACCGTGTGGTTCCTCCCGCAAACGAGGGTCGAGCCGACCCCACGTCTCACCCTGGCAGCATTGCAACAATCAGACTTCCTACCTGTTCCGGCACATGCTTGCTGCTATGAATTCGGATCATAGCCTTTCATATTTCTTATAGATGTCTCTTCGGCACTTAAAAGAAGAAAAGTGACAAAGGGATCCTGCCTCGGATGGGGGCCTATGTCATTATGCCCAGGATGGTATAGACATCTGCCCTTCATGCGTGAACCGTTTTAAACTAATTCAACATCAATCGTTCACACACATGTGCCTCGGCACCGTAGGCTCCTTCACGTT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0003   ">
+				<sample name="00_0003   ">
+					<datablock type="DNA">
+						ACTGTAAATCCCGAGGCAGATCTAAGTTCGATTACAACTGCGCCGGGCGGTTTCCGACCCTGCTAGAAGGGTGTAGCAGAGCTAAGACCTGTCTACATCAAGAAAAGCATATTCTCATTGCTGAATCCACCACTTCAGATGATACTAATGCCCGTAACTACGAATACCGTGTGGTTCCTCCCGCAAACGAGGGTCGAGCCGACCCCACGTCTCACCCTGGCAGCATTGCAACAATCAGACTTCCTACCTGTTCCGGCACATGCTTGCTGCTATGAATTCGGATCATAGCCTTTCATATTTCTTATAGATGTCTCTTCGGCACTTCAAAGAAGAAAAGTGACAAAGGGATCCTGCCTCGGATGGGGGCCTATGTCATTATGCCCAGGATGGTATAGACATCTGCCCTTCATGCGTGAACCGTTTTAAACTAATTCAACACCAATCGTTCACACACATGTGCCTCGGTACCGTAGGCTCCTTCACGTT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0012   ">
+				<sample name="00_0012   ">
+					<datablock type="DNA">
+						ACTGTAAATCCCGAGGCAGATCTAAGTTCGATTACAACTGCGCCGGGCGGTTTCCGCCCCTGCTAGAAGGGTGTAGCAGAGCTAAGACCTGTCTACATCAAGAAAAGCATATTCTCATTGCTGAATCCACCACTTCAGATGATACTAATGCCCGTAACTACGAATACCGTGTGGTTCCTCCCGCAAACGAGGGTCGAGCCGACCCCACGTCTCACCCTGGCAGCATTGCAACAATCAGACTTCCTACCTGTTCCGGCACATGCTTGCTGCTATGAATTCGGATCATAGCCTTTCATATTTCTTATAGATGTCTCTTCGGCACTTCAAAGAAGAAAAGTGACAAAGGGATCCTGCCTCGGATGGGGGCCTATGTCATTATGCCCAGGATGGTATAGACATCTGCCCTTCATGCGTGAACCGTTTTAAACTAATTCAACACCAATCGTTCACACACATGTGCCTCGGTACCGTAGGCTCCTTCACGTT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0005   ">
+				<sample name="00_0005   ">
+					<datablock type="DNA">
+						ACTGTAAATCCCGAGGCAGACCTAAGTTCGATTACAACTGCGCCGGGCGGTTTCCGCCCCTGCTAGAAGGGTGTAGCAGAGCTAAGACCTGTCTACATCAAGAAAAGCATATTCTCATTGCTGAATCCACCACTTCAGATGATACTAATGCCCGTAACTACGAATACCGTGTGGTTCCTCCCGCAAACGAGGGTCGAGCCGACCCCACGTCTCACCCTGGCAGCATTGCAACAATCAGACTTCCTACCTGTTCCGGCACATGCTTGCTACTATGAATTCGGATCATAGCCTTTCATATTTCTTATAGATGTCACTTCGGCACTTCAAAGAAGAAAAGTGACAAAGGGATCCAGCCTCGGATGGGGGCCTATGTCATTATGCCCAGGATGGTATAGACATCTGCCCTTCATGCGTGAACCGTTTTAAACTAATTCAACATCAATCGTTCACACACATGTGCCTCGGCACCGTAGGCTCCTTCACGTT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0006   ">
+				<sample name="00_0006   ">
+					<datablock type="DNA">
+						ACTGTAAATCCCGAGGCAGATCTAAGTTCGATGACAGCTGCGCCGGGCGGTTTCCGCCCCTGCTAGAAGGGTGTAGCAGAGCTAAGACCTGTCTACATCAAGAAAAGCATATTCTCATCGCTGAATCCACCACTTCAGATGATACTAATGCCCGTAACTACGAATACCGTGTGGTTCCTCCCGCAAACGAGGGTCGAGCCGACCCCACGTCTCACCCTGGCAGCATTGCAACAATCAGACTTCCTACCTGTTCCGGCACATGCTTGCTGCTATGAATTCGGATCATAGCCTTTCATATTTCTTATAGATATCGCTTCGGCACTTCAAAGAAGAAAAATGACAAAGGGATCCAGCCTCGGATGGGGGCTTATGTCATTATGCCCAGGATGGTATAGACATCTGCCCTTCATGCGTGAACCGTTTTAAACTAATTCAACATCAATCGTTCACACACATGTGCCTCGGCACCGTAGGCTCCTTCACGTT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0015   ">
+				<sample name="00_0015   ">
+					<datablock type="DNA">
+						ACTGTAAATCCCGAGGCAGATCTAAGTTCGATTACAGCTGCGCCGGGCGGTTTCCGCCCCTGCTAGAAGGGTGTAGCAGAGCTAAGACCTGTCTACATCAAGAAAAGCATATTCTCATCGCTGAATCCACCACTTCAGATGATACTAATGCCCGTAACTACGAATACCGTGTGGTTCCTCCCGCAAACGAGGGTCGAGCCGACCCCACGTCTCACCCTGGCAGCATTGCAACAATCAGACTTCCTACCTGTTCCGGCACATGCTTGCTGCTATGAATTCGGATCATAGCCTTTCATATTTCTTATAGATATCGCTTCGGCACTTCAAAGAAGAAAAATGACAAAGGGATCCAGCCTCGGATGGGGGCCTATGTCATTATGCCCAGGATGGTATAGACATCTGCCCTTCATGCGTGAACCGTTTTAAACTAATTCAACATCAGTCGTTCACACACATGTGCCTCGGCACCGTAGGCTCCTTCACGTT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0002   ">
+				<sample name="00_0002   ">
+					<datablock type="DNA">
+						ACTGTAAATCCCGAGGCAGATCTAAGTTCGATTACAGCTGCGCCGGGCGGTTTCCGCCCCTGCTAGAAGGGTGTAGCAGAGCTAAGACCTGTCTACATCAAGAAAAGCATATTCTCATCGCTGAATCCACCACTTCAGATGATACTAATGCCCGTAACTACGAATACCGTGTGGTTCCTCCCGCAAACGAGGGTCGAGCCGACCCCACGTCTCACCCTGGCAGCATTGCAACAATCAGACTTCCTACCTGTTCCGGCACATGCTTGCTGCTATGAATTCGGATCATAGCCTTTCATATTTCTTATAGATATCGCTTCGGCACTTCAAAGAAGAAAAATGACAAAGGGATCCAGCCTCGGATGGGGGCCTATGTCATTATGCCCAGGATGGTATAGACATCTGCCCTTCATGCGTGAACCGTTTTAAACTAATTCAACATCAGTCGTTCACACACATGTGCCTCGGCACCGTAGGCTCCTTCACGTT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0016   ">
+				<sample name="00_0016   ">
+					<datablock type="DNA">
+						ACTGTAAATCCCGAGGCAGATCTAAGTTCGATTACAGCTGCGCCGGGCGGTTTCCGCCCCTGCTAGAAGGGTGTAGCAGAGCTAAGACCTGTCTACATCAAGAAAAGCATATTCTCATCGCTGAATCCACCACTTCAGATGATACTAATGCCCGTAACTACGAATACCGTGTGGTTCCTCCCGCAAACGAGGGTCGAGCCGACCCCACGTCTCACCCTGGCAGCATTGCAACAATCAGACTTCCTACCTGTTCCGGCACATGCTTGCTGCTATGAATTCGGATCATAGCCTTTCATATTTCTTATAGATATCGCTTCGGCACTTCAAAGAAGAAAAATGACAAAGGGATCCAGCCTCGGATGGGGGCCTATGTCATTATGCCCAGGATGGTATAGACATCTGCCCTTCATGCGTGAACCGTTTTAAACTAATTCAACATCAGTCGTTCACACACATGTGCCTCGGCACCGTAGGCTCCTTCACGTT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0004   ">
+				<sample name="00_0004   ">
+					<datablock type="DNA">
+						ACTGTAAATCCCGAGCCAGATCTAAGTTCGATTACAACTGCGCCGGGCGGTTTCCGCCCCTGCTAGAAGGGTGTAGCAGAGCTAAGACCTGTCTACATCAAGAAAAGCATATTCTCATCGCTGAATCCACCACTTCAGATGATACTAATGCCCGTAACTACGAATACCGTGTGGTTCCTCCCGCAAACGAGGGTCGAGCCGACCCCACGTCTCACCCTGGCAGCATTGCAACAATCAGACTTCCTACCTGTTCCGGCACATGCTTGCTGCTATGAATTCGGATCATAGCCTTTCATATTTCTTATAGATATCACTTCGGCACTTCAAAGAAGAAAAGTGACAAAGGGATCCAGCCTCGGATGGGGGCCTATGTCATTATGCCCAGGATGGTATAGACATCTGCCCTTCATGCGTGAACCGTTTTAAACTAATTCAACATCAATCGTTCACACACATGTGCCTCGGCACCGTAGGCTCCTTCACGTT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0013   ">
+				<sample name="00_0013   ">
+					<datablock type="DNA">
+						ACTGTAAATCCCGAGCCAGATCTAAGTTCGATTACAACTGCGCCGGGCGGTTTCCGCCCCTGCTAGAAGGGTGTAGCAGAGCTAAGACCTGTCTACATCAAGAAAAGCATATTCTCATCGCTGAATCCACCACTTCAGATGATACTAATGCCCGTAACTACGAATACCGTGTGGTTCCTCCCGCAAACGAGGGTCGAGCCGACCCCACGTCTCACCCTGGCAGCATTGCAACAATCAGACTTCCTACCTGTTCCGGCACATGCTTGCTGCTATGAATTCGGATCATAGCCTTTCATATTTCTTACAGATATCACTTCGGCACTTCAAAGAAGAAAAGTGACAAAGGGATCCAGCCTCGGATGGGGGCCTATGTCATTATGCCCAGGATGGTATAGACATCTGCCCTTCATGCGTGAACCGTTTTAAACTAATTCAACATCAATCGTTCACACACATGTGCCTCGGCACCGTAGGCTCCTTCACGTT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0017   ">
+				<sample name="00_0017   ">
+					<datablock type="DNA">
+						ACTGTAAATCCCGAGCCAGATCTAAGTTCGATTACAACTGCGCCGGGCGGTTTCCGCCCCTGCTAGAAGGGTGTAGCAGAGCTAAGACCTGTCTACATCAAGAAAAGCATATTCTCATCGCTGAATCCACCACTTCAGATGATACTAATGCCCGTAACTACGAATACCGTGTGGTTCCTCCCGCAAACGAGGGTCGAGCCGACCCCACGTCTCACCCTGGCAGCATTGCAACAATCAGACTTCCTACCTGTTCCGGCACATGCTTGCTGCTACGAATTCGGATCATAGCCTTTCATATTTCTTACAGATATCACTTCGGCACTTCAAAGAAGAAAAGTGACAAAGGGATCCAGCCTCGGATGGGGGCCTATGTCATTATGCCCAGGATGGTATAGACATCTGCCCTTCATGCGTGAACCGTTTTAAACTAATTCAACATCAATCGTTCACACACATGTGCCTCGGCACCGTAGGCTCCTTCACGTT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0000   ">
+				<sample name="00_0000   ">
+					<datablock type="DNA">
+						ACTGTAAATCCCGAGCCAGATCTAAGTTCGATTACAACTGCGCCGGGCGGTTTCCGCCCCTGCTAGAAGGGTGTAGCAGAGCTAAGACCTGTCTACATCAAGAAAAGCATATTCTCATCGCTGAATCCACCACTTCAGATGATACTAATGCCCGTAACTACGAATACCGTGTGGTTCCTCCCGCAAACGAGGGTCGAGCCGACCCCACGTCTCACCCTGGCAGCATTGCAACAATCAGACTTCCTACCTGTTCCGGCACATGCTTGCTGCTATGAATTCGGATCATAGCCTTTCATATTTCTTACAGATATCACTTCGGCACTTCAAAGAAGAAAAGTGACAAAGGGATCCAGCCTCGGATGGGGGCCTATGTCATTATGCCCAGGATGGTATAGACATCTGCCCTTCATGCGTGAACCGTTTTAAACTAATTCAACATCAATCGTTCACACACATGTGCCTCGGCACCGTAGGCTCCTTCACGTT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0009   ">
+				<sample name="00_0009   ">
+					<datablock type="DNA">
+						ACTGTAAATCCCGAGCCAGATCTAAGTTCGATTACAACTGCGCCGGGCGGTTTCCGCCCCTGCTAGAAGGGTGTAGCAGAGCTAAGACCTGTCTACATCAAGAAAAGCATATTCTCATCGCTGAATCCACCACTTCAGATGATACTAATGCCCGTAACTACGAATACCGTGTGGTTCCTCCCGCAAACGAGGGTCGAGCCGACCCCACGTCTCACCCTGGCAGCATTGCAACAATCAGACTTCCTACCTGTTCCGGCACATGCTTGCTGCTATGAATTCGGATCATAGCCTTTCATATTTCTTACAGATATCACTTCGGCACTTCAAAGAAGAAAAGTGACAAAGGGATCCAGCCTCGGATGGGGGCCTATGTCATTATGCCCAGGATGGTATAGACATCTGCCCTTCATGCGTGAACCGTTTTAAACTAATTCAACATCAATCGTTCACACACATGTGCCTCGGCACCGTAGGCTCCTTCACGTT [...]
+					</datablock>
+				</sample>
+			</individual>
+		</population>
+	</region>
+	<region name="LinkageGroup11">
+      <model name="F84">
+        <normalize>false</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <relative-murate>1</relative-murate>
+        <base-freqs> calculated </base-freqs>
+        <ttratio>2</ttratio>
+      </model>
+      <effective-popsize>1</effective-popsize>
+		<population name="PopAlphaIs25">
+			<individual name="00_0007   ">
+				<sample name="00_0007   ">
+					<datablock type="DNA">
+						CAAACCTTGTGGAACAGCTCCCACCCGATGCCTCGCAGTACATTAAATATAGTCATAAGAGACCATTGGACCCATATTTGTCCGATCGTACTAGTGGTAATCCAAACCGTGACCTGGATGACTGATCCCCTCATTTCCGAAAAGGTGGGAGCTTCGTTCACCAATTTTGGCGGTGGTCGCTTCAGCTGAGGTCGAAGGGTGATACGCCAGGGGGTTCAATGTTTGTCGCCACCGGAATCAATGAATCCGGCATGGAATAGCACTAATTCCGGTTACCGCGAATCGGTCCGGACACTTTCATTGTCTCCTCATTCGGGGACTGCATCTCGCGAAGCGACCATGTGTCGCTGCAAAAAAGCCTTGCACCTCATAAAACGTTTGCCCTTTACACGGGTTGGCTTTAGGCCGCTACGGCTGCGATAGTAGGCACTACCTATTAATATGCACGCCCAAAAGTAGTTAGGAGTGTTAGAAGCACGCGGCAGG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0002   ">
+				<sample name="00_0002   ">
+					<datablock type="DNA">
+						CAAACCTTGTGGAACAGCTCCCACCCGATGCCTCGCAGTACATTAAATATAGTCATAAGAGACCATTGGACCCATATTTGTCCGATCGTACTAGTGGTAATCCAAACCGTGACCTGGATGACTGATCCCCTCATTTCCGAAAAGGTGGGAGCTTCGTTCACCAATTTTGGCGGTGGTCGCTTCAGCTGAGGTCGAAGGGTGATACGCCAGGGGGTTCAATGTTTGTCGCCACCGGAATCAATGAATCCGGCATGGAATAGCACTAATTCCGGTTACCGCGAATCGGTCCGGACACTTTCATTGTCTCCTCATTCGGGGACTGCATCTCGCGAAGCGACCATGTGTCGCTGCAAAAAAGCCTTGCACCTCATAAAACGTTTGCCCTTTACACGGGTTGGCTTTAGGCCGCTACGGCTGCGATAGTAGGCACTACCTATTAATATGCACGCCCAAAAGTAGTTAGGAGTGTTAGAAGCACGCGGCAGG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0011   ">
+				<sample name="00_0011   ">
+					<datablock type="DNA">
+						CAAACCTTGTGGAACAGCTCCCACCCGATGCCTCGCAGTACATTAAATATAGTCATAAGAGACCATTGGACCCATATTTGTCCGATCGTACTAGTGGTAATCCAAACCGTGACCTGGATGACTGATCCCCTCATTTCCGAAAAGGTGGGAGCTTCGTTCACCAATTTTGGCGGTGGTCGCTTCAGCTGAGGTCGAAGGGTGATACGCCAGGGGGTTCAATGTTTGTCGCCACCGGAATCAATGAATCCGGCATGGAATAGCACTAATTCCGGTTACCGCGAATCGGTCCGGACACTTTCATTGTCTCCTCATTCGGGGACTGCATCTCGCGAAGCGACCATGTGTCGCTGCAAAAAAGCCTTGCACCTCATAAAACGTTTGCCCTTTACACGGGTTGGCTTTAGGCCGCTACGGCTGCGATAGTAGGCACTACCTATTAATATGCACGCCCAAAAGTAGTTAGGAGTGTTAGAAGCACGCGGCAGG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0019   ">
+				<sample name="00_0019   ">
+					<datablock type="DNA">
+						CAAACCTTGTGGAACAGCTCCCACCCGATGCCTCGCAGTACATTAAATATAGTCATAAGAGACCATTGGACCCATATTTGTCCGATCGTACTAGTGGTAATCCAAACCGTGACCTGGATGACTGATCCCCTCATTTCCGAAAAGGTGGGAGCTTCGTTCACCAATTTTGGCGGTGGTCGCTTCAGCTGAGGTCGAAGGGTGATACGCCAGGGGGTTCAATGTTTGTCGCCACCGGAATCAATGAATCCGGCATGGAATAGCACTAATTCCGGTTACCGCGAATCGGTCCGGACACTTTCATTGTCTCCTCATTCGGGGACTGCATCTCGCGAAGCGACCATGTGTCGCTGCAAAAAAGCCTTGCACCTCATAAAACGTTTGCCCTTTACACGGGTTGGCTTTAGGCCGCTACGGCTGCGATAGTAGGCACTACCTATTAATATGCACGCCCAAAAGTAGTTAGGAGTGTTAGAAGCACGCGGCAGG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0003   ">
+				<sample name="00_0003   ">
+					<datablock type="DNA">
+						CAAACCTTGTGGAACAGCTCCCACCCGATGCCTCGCAGAAGATTAAATATAGTCATAAGAGACCATTGGACCCATATTTGTCCGATCGTACTAGTGGTAATCCAAACCGTGACCTGGATGACTGATCCCCTCATTTCCGAAAAGGTGGGAGCTTCGTTCACCAATTTTGGCGGTGGTCGCTTCAGCTGAGGTCGAAGGGTGATACGCCAGGGGGTTCAATGTTTGTCGCCACCGGAATCAATGAATCCGGCATGGAATAGTACTAATTCCGGTTACCGCGAATCGGTCCGGACACTTTCATTGTCTCCTCATTCGGGGACTGCATCTCGCGAAGCGACCATGTGTCGCTGCAAAAAAGCCTTGCACCTCATAAAACGTTTACCCTTTACACGGGTTGGCTTTAGGCCGCTACGGCTGCGATAGTAGGCACTACCTATTAATATGCACGCCCAAAAGTAGTTAGGAGTGTTAGAAGCACGCGGCAGG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0012   ">
+				<sample name="00_0012   ">
+					<datablock type="DNA">
+						CAAACCTTGTGGAACAGCTCCCACCCGATGCCTCGCAGAAGATTAAATATAGTCATAAGAGACCATTGGACCCATATTTGTCCGATCGTACTAGTGGTAATCCAAACCGTGACCTGGATGACTGATCCCCTCATTTCCGAAAAGGTGGGAGCTTCGTTCACCAATTTTGGCGGTGGTCGCTTCAGCTGAGGTCGAAGGGTGATACGCCAGGGGGTTCAATGTTTGTCGCCACCGGAATCAATGAATCCGGCATGGAATAGTACTAATTCCGGTTACCGCGAATCGGTCCGGACACTTTCATTGTCTCCTCATTCGGGGACTGCATCTCGCGAAGCGACCATGTGTCGCTGCAAAAAAGCCTTGCACCTCATAAAACGTTTACCCTTTACACGGGTTGGCTTTAGGCCGCTACGGCTGCGATAGTAGGCACTACCTATTAATATGCACGCCCAAAAGTAGTTAGGAGTGTTAGAAGCACGCGGCAGG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0005   ">
+				<sample name="00_0005   ">
+					<datablock type="DNA">
+						CAAACCTTGTGGAACAGCTCCCACCCGATGCCTCGCAGAAGATTAAATATAGTCATAAGAGACCATTGGACCCATATTTGTCCGATCGTACTAGTGGTAATCCAAACCGTGACCTGGATGACTGATCCCCTCATTTCCGAAAAGGTGGGAGCTTCGTTCACCAATTTTGGCGGTGGTCGCTTCAGCTGAGGTCGAAGGGTGATACGCCAGGGGGTTCAATGTTTGTCGCCACCGGAATCAATGAATCCGGCATGGAATAGTACTAATTCCGGTTACCGCGAATCGGTCCGGACACTTTCATTGTCTCCTCATTCGGGGACTGCATCTCGCGAAGCGACCATGTGTCGCTGCAAAAAAGCCTTGCACCTCATAAAACGTTTACCCTTTACACGGGTTGGCTTTAGGCCGCTACGGCTGCGATAGTAGGCACTACCTATTAATATGCACGCCCAAAAGTAGTTAGGAGTGTTAGAAGCACGCGGCAGG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0017   ">
+				<sample name="00_0017   ">
+					<datablock type="DNA">
+						CAAACCTTGTGGAACAGCTCCCACCCGATGCCTCGCAGAAGATTAAATATAGTCATAAGAGACCATTGGACCCATATTTGTCCGATCGTACTAGTGGTAATCCAAACCGTGACCTGGATGACTGATCCCCTCATTTCCGAAAAGGTGGGAGCTTCGTTCACCAATTTTGGCGGTGGTCGCTTCAGCTGAGGTCGAAGGGTGATACGCCAGGGGGTTCAATGTTTGTCGCCACCGGAATCAATGAATCCGGCATGGAATAGTACTAATTCCGGTTACCGCGAATCGGTCCGGACACTTTCATTGTCTCCTCATTCGGGGACTGCATCTCGCGAAGCGACCATGTGTCGCTGCAAAAAAGCCTTGCACCTCATAAAACGTTTACCCTTTACACGGGTTGGCTTTAGGCCGCTACGGCTGCGATAGTAGGCACTACCTATTAATATGCACGCCCAAAAGTAGTTAGGAGTGTTAGAAGCACGCGGCAGG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0000   ">
+				<sample name="00_0000   ">
+					<datablock type="DNA">
+						CAAACCTTGTGGAACAGCTCCCACCCGATGCCTCGCAGAACATTAAATATAGTCATAAGAGACCATTGGACCCATATTTGTCCGATCGTACTAGTGGTAATCCAAACCGTGACCTGGATGACTGATCCCCTCATTTCCGAAAAGGTGGGAGCTTCGTTCACCAATTTTGGCGGTGGTCGCTTCAGCTGAGGTCGAAGGGTGATACGCCAGGGGGTTCAATGTTTGTCGCCACCGGAATCAATGAATCCGGCATGGAATAGCACTAATTCCGGTTACCGCGAATCGGTCCGGACACTTTCATTGTCTCCTCATTCGGGGACTGCATCTCGCGAAGCGACCATGTGTCGCTGCAAAAAAGCCTTGCACCTCATAAAACGTTTACCCTTTGCACGGGTTGGCTTTAGGCCGCTACGGCTGCGATAGTAGGCACTACCTATTAATATGCACGCCCAAAAGTAGTTAGGAGTGTTAGAAGCACGCGGCAGG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0006   ">
+				<sample name="00_0006   ">
+					<datablock type="DNA">
+						CAAACCTTGTGGAACAGCTCCCACCCGATGCCTCGCAGAACATTAAATATAGTCATAAGAGACCATTGGACCCATATTTGTCCGATCGTACTAGTGGTAATCCAAACCGTGACCTGGATGACTGATCCCCTCATTTCCGAAAAGGTGGGAGCTTCGTTCACCAATTTTGGCGGTGGTCGCTTCAGCTGAGGTCGAAGGGTGATACGCCAGGGGGTTCAATGTTTGTCGCCACCGGAATCAATGAATCCGGCATGGAATAGCACTAATTCCGGTTACCGCGAATCGGTCCGGACACTTTCATTGTCTCCTCATTCGGGGACTGCATCTCGCGAAGCGACCATGTGTCGCTGCAAAAAAGCCTTGCACCTCATAAAACGTTTACCCTTTACACGGGTTGGCTTTAGGCCGCTACGGCTGCGATAGTAGGCACTACCTATTAATATGCACGCCCAAAAGTAGTTAGGAGTGTTAGAAGCACGCGGCAGG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0010   ">
+				<sample name="00_0010   ">
+					<datablock type="DNA">
+						CAAACCTTGTGGAACAGCTCCCACCCGATGCCTCGCAGAACATTAAATATAGTCATAAGAGACCATTGGACCCATATTTGTCCGATCGTACTAGTGGTACTCCAAACCGTGACCTGGATGACTGATCCCCTCATTTCCGAAAAGGTGGGAGCTTCGTTCACCAATTTTGGCGGTGGTCGCTTCAGCTGAGGTCGAAGGGTGATACGCCAGGGGGTTCAATGTTTGTCGCCACCGGAATCAATGAATCCGGCATGGAATAGCACTAATTCCGGTTACCGCGAATCGGTCCGGACACTTTCATTGTCTCCTCATTCGGGGACTGCATCTCGCGAAGCGACCATGTGTCGCTTCAAAAAAGCCTTGCACCTCATAAAACGTTTACCCTTTACACGGGTTGGCTTTAGGCCGCTACGGCTGCGATAGTAGGCAGTACCTATTAATATGCACGCCCAAAAGTAGTTAGGAGTGTTAGAAGCACGCGGCAGG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0015   ">
+				<sample name="00_0015   ">
+					<datablock type="DNA">
+						CAAACCTTGTGGAACAGCTCCCACCCGATGCCTCGCAGAACATTAAATATAGTCATAAGAGACCATTGGACCCATATTTGTCCGATCGTACTAGTGGTAATCCAAACCGTGACCTGGATGACTGATCCCCTCATTTCCGAAAAGGTGGGAGCTTCGTTCACCAATTTTGGCGGTGGTCGCTTCAGCTGAGGTCGAAGGGTGATACGCCAGGGGGTTCAATGTTTGACGCCACCGGAATCAATGAATCCGGCATGGAATAGCACTAATTCCGGTTACCGCGAATCGGTCCGGACACTTTCATTGTCTCCTCATTCGGGGACTGCATCTCGCGAAGCGACCATGTGTCGCTGCAAAAAAGCCTTGCACCTCATAAAACGTTTACCCTTTACACGGGTTGGCTTTAGGCCGCTACGGCTGCGATAGTAGGCATTACCTATTAATATGCACGCCCAAAAGTAGTTAGGAGTGTTAGAAGCACGCGGCAGG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0001   ">
+				<sample name="00_0001   ">
+					<datablock type="DNA">
+						CAAACCTTGTGGAACAGCTCCCACCCGATGCCTCGCAGAACATTAAATATAGTCATAAGAGACCATTGGCCCCATATTTGTCCGATCGTACTAGTGGTAATCCAAACCGTGACCTGGATGACTGATCCCCTCATTTCCGAAAAGGTGGGAGCTTCGTTCACCAATTTTGGCGGTGGTCGCTTCAGCTGAGGTCGAAGGGTGATACGCCAGGGGGTTCAATGTTTGACGCCACCGGAATCAATGAATCCGGCATGGAATAGCACTAATTCCGGTTACCGCGAATCGGTCCGGACACTTTCATTGTCTCCTCATTCGGGGACTGCATCTCGCGAAGCGACCATGTGTCGCTGCAAAAAAGCCTTGCACCTCATAAAACGTTTACCCTTTACACGGGTTGGCTTTAGGCCGCTACGGCTGCGATAGTAGGCATTACCTATTAATATGCACGCCCAAAAGTAGTTAGGAGTGTTAGAAGCACGCGGCAGG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0014   ">
+				<sample name="00_0014   ">
+					<datablock type="DNA">
+						CAAACCTTGTGGAACAGCTCCCACCCGATGCCTCGCAGAACATTAAATATAGTCATAAGAGACCATTGGCCCCATATTTGTCCGATCGTACTAGTGGTAATCCAAACCGTGACCTGGATGACTGATCCCCTCATTTCCGAAAAGGTGGGAGCTTCGTTCACCAATTTTGGCGGTGGTCGCTTCAGCTGAGGTCGAAGGGTGATACGCCAGGGGGTTCAATGTTTGACGCCACCGGAATCAATGAATCCGGCATGGAATAGCACTAATTCCGGTTACCGCGAATCGGTCCGGACACTTTCATTGTCTCCTCATTCGGGGACTGCATCTCGCGAAGCGACCATGTGTCGCTGCAAAAAAGCCTTGCACCTCATAAAACGTTTACCCTTTACACGGGTTGGCTTTAGGCCGCTACGGCTGCGATAGTAGGCATTACCTATTAATATGCACGCCCAAAAGTAGTTAGGAGTGTTAGAAGCACGCGGCAGG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0016   ">
+				<sample name="00_0016   ">
+					<datablock type="DNA">
+						CAAACCTTGTGGAACAGCTCCCACCCGATGCCTCGCAGAACATTAAATATAGTCATAAGAGACCATTGGACCCATATTTGTCCGATCGTACTAGTGGTAATCCAAACCGTGACCTGGATGACTGATCCCCTCATTTCCGAAAAGGTGGGAGCTTCGTTCACCAATTTTGGCGGTGGTCGCTTCAGCTGAGGTCGAAGGGTGATACGCCAGGGGGTTCAATGTTTGACGCCACCGGAATCAATGAATCCGGCATGGAATAGCACTAATTCCGGTTACCGCGAATCGGTCCGGACACTTTCATTGTCTCCTCATTCGGGGACTGCATCTCGCGAAGCGACCATGTGTCGCTGCAAAAAAGCCTTGCACCTCATAAAACGTTTACCCTCTACACGGGTTGGCTTTAGGCCGCTACGGCTGCGATAGTAGGCATTACCTATTAATATGCACGCCCAAAAGTAGTTAGGAGTGTTAGAAGCACGCGGCAGG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0018   ">
+				<sample name="00_0018   ">
+					<datablock type="DNA">
+						CAAACCTTGTGGAACAGCTCCCACCCGATGCCTCGCAGAACATTAAATATAGTCATAAGAGACCATTGGACCCATATTTGTCCGATCGTACTAGTGGTAATCCAAACCGTGACCTGGATGACTGATCCCCTCATTTCCGAAAAGGTGGGAGCTTCGTTCACCAATTTTGGCGGTGGTCGCTTCAGCTGAGGTCGAAGGGTGATACGCCAGGGGGTTCAATGTTTGACGCCACCGGAATCAATGAATCCGGCATGGAATAGCACTAATTCCGGTTACCGCGAATCGGTCCGGACACTTTCATTGTCTCCTCATTCGGGGACTGCATCTCGCGAAGCGACCATGTGTCGCTGCAAAAAAGCCTTGCACCTCATAAAACGTTTACCCTTTACACGGGTTGGCTTTAGGCCGCTACGGCTGCGATAGTAGGCATTACCTATTAATATGCACGCCCAAAAGTAGTTAGGAGTGTTAGAAGCACGCGGCAGG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0008   ">
+				<sample name="00_0008   ">
+					<datablock type="DNA">
+						CAAACCTTGTGGAACAGCTCCCACCCGATGCCTCGCAGAACATTAAATATAGTCATAAGAGACCATTGGACCCATATTTGTCCGATCGTACTAGTGGTAATCCAAACCGTGACCTGGATGACTGATCCCCTCATTTCCGAAAAGGTGGGAGCTTCGTTCACCAACTTTGGCGGTGGTCGCTTCAGCTGAGGTCGAAGGGTGATACGCCAGGGGGTTCAATGTTTGACGCCACCGGAATCAATGAATCCGGCATGGAATAGCACTAATTCCGGTTACCGCGAATCGGTCCGGACACTTTCATTGTCTCCTCATTCGGGGACTGCATCTCGCGAAGCGACCATGTGTCGCTGCAAAAAAGCCTTGCACCTCATAAAACGTTTACCCTTTACACGGGTTGGCTTTAGGCCGCTACGGCTGCGATAGTAGGCATTACCTATTAATATGCACGCCCAAAAGTAGTTAGGAGTGTTAGAAGCACGCGGCAGG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0004   ">
+				<sample name="00_0004   ">
+					<datablock type="DNA">
+						CAAACCTTGTGGAACAGCTCCCACCCGATGCCTCGCAGAACATTAAATATAGTCATAAGAGACCATTGGACCCATATTTGTCCGATCGTACTAGTGGTAATCCAAACCGTGACCTGGATGACTGATCCCCTCATTTCCGAAAAGGTAGGAGCTTCGTTCACCAACTTTGGCGGTGGTCGCTTCAGCTGAGGTCGAAGGGTGATACGCCAGGGGGTTCAATGTTTGACGCCACCGGAATCAATGAATCCGGCATGGAATAGCACTAATTCCGGTTACCGCGAATCGGTCCGGACACTTTCATTGTCTCCTCATTCGGGGACTGCATCTCGCGAAGCGACCATGTGTCGCTGCAAAAAAGCCTTGCACCTCATAAAACGTTTACCCTTTACACGGGTTGGCTTTAGGCCGCTACGGCTGCGATAGTAGGCATTACCTATTAATATGCACGCCCAAAAGTAGTTAGGAGTGTTAGAAGCACGCGGCAGG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0009   ">
+				<sample name="00_0009   ">
+					<datablock type="DNA">
+						CAAACCTTGTGGAACAGCTCCCACCCGATGCCTCGCAGAACATTAAATATAGTCATAAGAGACCATTGGACCCATATTTGTCCGATCGTACTAGTGGTAATCCAAACCGTGACCTGGATGACCGATCCCCTCATTTCCGAAAAGGTGGGAGCTTCGTTCACCAACTTTGGCGGTGGTCGCTTCAGCTGAGGTCGAAGGGTGATACGCCAGGGGGTTCAATGTTTGACGCCACCGGAATCAATGAATCCGGCATGGAATAGCACTAATTCCGGTTACCGCGAATCGGTCCGGACACTTTCATTGTCTCCTCATTCGGGGACTGCATCTCGCGAAGCGACCATGTGTCGCTGCAAAAAAGCCTTGCACCTCATAAAACGTTTACCCTTTACACGGGTTGGCTTTAGGCCGCTACGGCTGCGATAGTAGGCATTACCTATTAATATGCACGCCCAAAAGTAGTTAGGAGTGTTAGAAGCACGCGGCAGG [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0013   ">
+				<sample name="00_0013   ">
+					<datablock type="DNA">
+						CAAACCTTGTGGAACAGCTCCCACCCGATGCCTCGCAGAACATTAAATATAGTCATAAGAGACCATTGGACCCATATTTGTCCGATCGTACTAGTGGTAATCCAAACCGTGACCTGGATGACCGATCCCCTCATTTCCGAAAAGGTGGGAGCTTCGTTCACCAACTTTGGCGGTGGTCGCTTCAGCTGAGGTCGAAGGGTGATACGCCAGGGGGTTCAATGTTTGACGCCACCGGAATCAATGAATCCGGCATGGAATAGCACTAATTCCGGTTACCGCGAATCGGTCCGGACACTTTCATTGTCTCCTCATTCGGGGACTGCATCTCGCGAAGCGACCATGTGTCGCTGCAAAAAAGCCTTGCACCTCATAAAACGTTTACCCTTTACACGGGTTGGCTTTAGGCCGCTACGGCTGCGATAGTAGGCATTACCTATTAATATGCACGCCCAAAAGTAGTTAGGAGTGTTAGAAGCACGCGGCAGG [...]
+					</datablock>
+				</sample>
+			</individual>
+		</population>
+	</region>
+	<region name="LinkageGroup12">
+      <model name="F84">
+        <normalize>false</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <relative-murate>1</relative-murate>
+        <base-freqs> calculated </base-freqs>
+        <ttratio>2</ttratio>
+      </model>
+      <effective-popsize>1</effective-popsize>
+		<population name="PopAlphaIs25">
+			<individual name="00_0011   ">
+				<sample name="00_0011   ">
+					<datablock type="DNA">
+						AGAAATGGAAAGTATATACAGACGTGGCAAAAGTCTTTGAGTTGTATGTGTTGAAGAGGGGCGGATAACATTAGGAGGGTATCTATTACTGCTAGCAGATGGAGCGATTTCCATTTCGCAGACGCCTGGTTCTCAGAGTAAGCTCTACAGTTGACAGACAAGAGGGGGTCCTAGACGTTGTAGGAGAGACTCGACTATAAGTGTGACGGCGTTGTGCAGTCCGTCAAGACCCCTCACTATTTCCGGCGGGAGTCTATATCTGGAATTAACAGCGCATCCTCATCCGGTGACAGCATTGTTGGGCCCTTTGTCGTGGGGCGAGTGTGCCGCTATCGAAGCCCGAGTAAGATGACACATAAAACCGTTACACCTTCCTGCGATATTGTATTCACGACTCGGTTAACATTTTAGCTGACCCCTCGCCCTCTGGAGTGAATCTTCCGGGCGCCGAGTGGATTCCTTCGCAGTGTGTTCTTGCAAGGGCAT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0008   ">
+				<sample name="00_0008   ">
+					<datablock type="DNA">
+						AGAAATGGAAAGTATATACAGACGTGGCGAAAGTCTTTGAGTTGTATGTGTTAAAGAGGGGCGGATAACATTAGGAGGGAATCTATTACTGCTAGCAGATGGAGCGATTTCCATTTCGCAGACGCCTGGTTCTCAGAGTAAGCTCTACAGTTGACAGACAAGAGGGGGTCCTAGACGTTGTAGGAGAGACTCGACTATAAGTGTGACGGCGTTGTGCAGTCCGTCAAGACCCCTCACTATTTCCGGCGGGAGTCTATATCTGGAATTAACAGCGCATCCTCATCCGGTGACGGCATTGTTGGGCCCTTTGTCGTGGGGCGAGTGTGCCGCTATCGAAGCCCGAGTAAGATGACACATAAAACCGTTACACCTTCCTGCGATATTGTATTCACGACTCGGTTAACATTTTAGCTGACCCCTCGCCCTCTGGAGTGAATCTTCCGGGCGCCGAGTGGATTCCTTCGCAGTGTGTTCTTGCAAGGGCAT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0015   ">
+				<sample name="00_0015   ">
+					<datablock type="DNA">
+						AGAAATGGAAAGTATATACAGACGTGGCGAAAGTCTTTGAGTTGTATGTGTTGAAGAGGGGCGGATAACATTAGGAGGGTATCTATTACTGCTAGCAGATGGAGCGATTTCCATTTCGCAGACGCCTGGTTCTCAGAGTAAGCTCTACAGTTGACAGACAAGAGGGGGTCCTAGACGTTGTAGGAGAGACTCGACTATAAGTGTGACGGCGTTGTGCAGTCCGTCAAGACCCCTCACTATTTCCGGCGGGAGTCTATATCTGGAATTAACAGCGCATCCTCATCCGGTGACGGCATTGTTGGGCCCTTTGTCGTGGGGCGAGTGTGCCGCTATCGAAGCCCGAGTAAGATGACACATAAAACCGTTACACCTTCCTGCGATATTGTATTCACGACTCGGTTAACATTTTAGCTGACCCCTCGCCCTCTGGAGTGAATCTTCCGGGCGCCGAGTGGATTCCTTCGCAGTGTGTTCTTGCAAGGGCAT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0016   ">
+				<sample name="00_0016   ">
+					<datablock type="DNA">
+						AGAAATGGAAAGTATATACAGACGTGGCGAAAGTCTTTGAGTTGTATGTGTTGAAGAGGGGCGGATAACATTAGGAGGGTATCTATTACTGCTAGCAGATGGAGCGATTTCCATTTCGCAGACGCCTGGTCCTCGGAGTAAGCTCTACAGTTGACAGACAAGAGGGGGTCCTAGACGTTGTAGGAGAGACTCGACTATAAGTGTGACGGCGTTGTGCAGTCCGTCAAGACCCCTCACTATTTCCGGCGGGAGTCTATATCTGGAATTAACAGCGCATCCTCATCCGGTGACAGCATTGTTGGGCCCTTTGTCGTGGGGCGAGTGTGCCGCTATCGAAGCCCGAGTAAGATGACACATAAAACCGTTACACCTTCCTGCGATATTGTATTCACGACTCGGTTAACATTTTAGCTGACCCCTCGCCCTCTGGAGTGAATCTTCCGGGCGCCGAGTGGGTTCCTTCGCAGTGTGTTCTTGCAAGGGCAT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0000   ">
+				<sample name="00_0000   ">
+					<datablock type="DNA">
+						AGAAATGGAAAGTATATACAGACGTGGCGAAAGTCTTTGAGTTGTATGTGTTGAAGAGGGGCGGATAACATTAGGAGGGTATCTATTACTGCTAGCAGATGGAGCGATTTCCATTTCGCAGACGCCTGGTCCTCGGAGTAAGCTCTACAGTTGACAGACAAGAGGGGGTCCTAGACGTTGTAGGAGAGACTCGACTATAAGTGTGACGGCGTTGTGCAGTCCGTCAAGACCCCTCACTATTTCCGGCGGGAGTCTATATCTGGAATTAACAGCGCATCCTCATCCGGTGACAGCATTGTTGGGCCCTTTGTCGTGGGGCGAGTGTGCCGCTATCGAAGCCCGAGTAAGATGACACATAAAACCGTTACACCTTCCTGCGATATTGTATTCACGACTCGGTTAACATTTTAGCTGACCCCTCGCCCTCTGGAGTGAATCTTCCGGGCGCCGAGTGGGTTCCTTCGCAGTGTGTTCTTGCAAGGGCAT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0006   ">
+				<sample name="00_0006   ">
+					<datablock type="DNA">
+						AGAAATGGAAAGTATATACAGACGTGGCGAAAGTCTTTGAGTTGTATGTGTTGAAGAGGGGCGGATAACATTAGGAGGGTATCTATTACTGCTAGCAGATGGAGCGATTTCCATTTCGCAGACGCCTGGTCCTCGGAGTAAGCTCTACAGTTGACAGACAAGAGGGGGTCCTAGACGTTGTAGGAGAGACTCGACTATAAGTGTGACGGCGTTGTGCAGTCCGTCAAGACCCCTCACTATTTCCGGCGGGAGTCTATATCTGGAATTAACAGCGCATCCTCATCCGGTGACAGCATTGTTGGGCCCTTTGTCGTGGGGCGAGTGTGCCGCTATCGAAGCCCGAGTAAGATGACACATAAAACCGTTACACCTTCCTGCGATATTGTATTCACGACTCGGTTAACATTTTAGCTGACCCCTCGCCCTCTGGAGTGAATCTTCCGGGCGCCGAGTGGGTTCCTTCGCAGTGTGTTCTTGCAAGGGCAT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0003   ">
+				<sample name="00_0003   ">
+					<datablock type="DNA">
+						AGAAATGGAAAGTATATACAGACGTGGCGAAAGTCTTTGAGTTGTATGTGTTGAAGAGGGGCGGATAACATTAGGAGGGTATCTATTACTGCTAGCAGATGGAGCGATTTCCATTTCGCAGACGCCTGGTCCTCGGAGTAAGCTCTACAGTTGACAGACAAGAGGGGGTCCTAGACGTTGTAGGAGAGACTCGACTATAAGTGTGACGGCGTTGTGCAGTCCGTCAAGACCCCTCACTATTTCCGGCGGGAGTCTATATCTGGAATTAACAGCGCATCCTCATCCGGTGACAGCATTGTTGGGCCCTTTGTCGTGGGGCGAGTGTGCCGCTATCGAAGCCCGAGTAAGATGACACATAAAACCGTTACACCTTCCTGCGATATTGTATTCACGACTCGGTTAACATTTTAGCTGACCCCTCGCCCTCTGGAGTGAATCTTCCGGGCGCCGAGTGGGTTCCTTCGCAGTGTGTTCTTGCAAGGGCAT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0018   ">
+				<sample name="00_0018   ">
+					<datablock type="DNA">
+						AGAAATGGAAAGTATATACAGACGTGGCGAAAGTCTTTGAGTTGTATGTGTTGAAGAGGGGCGGATAACATTAGGAGGGTATCTATTACTGCTAGCAGATGGAGCGATTTCCATTTCGCAGACGCCTGGTCCTCGGAGTAAGCTCTACAGTTGACAGACAAGAGGGGGTCCTAGACGTTGTAGGAGAGACTCGACTATAAGTGTGACGGCGTTGTGCAGTCCGTCAAGACCCCTCACTATTTCCGGCGGGAGTCTATATCTGGAATTAACAGCGCATCCTCATCCGGTGACAGCATTGTTGGGCCCTTTGTCGTGGGGCGAGTGTGCCGCTATCGAAGCCCGAGTAAGATGACACATAAAACCGTTACACCTTCCTGCGATATTGTATTCACGACTCGGTTAACATTTTAGCTGACCCCTCGCCCTCTGGAGTGAATCTTCCGGGCGCCGAGTGGGTTCCTTCGCAGTGTGTTCTTGCAAGGGCAT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0019   ">
+				<sample name="00_0019   ">
+					<datablock type="DNA">
+						AGAAATGGAAAGTATATACAGACGTGGCGAAAGTCTTTGAGTTGTATGTGTTGAAGAGGGGCGGATAACATTAGGAGGGTATCTATTACTGCTAGCAGATGGAGCGATTTCCATTTCGCAGACGCCTGGTCCTCGGAGTAAGCTCTACAGTTGACAGACAAGAGGGGGTCCTAGACGTTGTAGGAGAGACTCGACTATAAGTGTGACGGCGTTGTGCAGTCCGTCAAGACCCCTCACTATTTCCGGCGGGAGTCTATATCTGGAATTAACAGCGCATCCTCATCCGGTGACAGCATTGTTGGGCCCTTTGTCGTGGGGCGAGTGTGCCGCTATCGAAGCCCGAGTAAGATGACACATAAAACCGTTACACCTTCCTGCGATATTGTATTCACGACTCGGTTAACATTTTAGCTGACCCCTCGCCCTCTGGAGTGAATCTTCCGGGCGCCGAGTGGGTTCCTTCGCAGTGTGTTCTTGCAAGGGCAT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0013   ">
+				<sample name="00_0013   ">
+					<datablock type="DNA">
+						AGAAATGGAAAATATATACAGACGTGGCGAAAGTCTTTGAGTTGTATGCGTTGAAGAGAGGCGGATAACATTAGGAGGGTATCTATTACTGCTAGCAGATGGAGCGATTTCCATTTCGCAGACGCCTGGTTCTCAGAGTAAGCTCTACAGTTGACAGACAAGAGGGGGTCCTAGACGTTGTAGGAGAGACTCGACTATAAGTGTGACGGCGTTGTGCAGTCCGTCAAGACCCCTCACTATTTCCGGCGGGAGTCTATATCTGGAATTAACAGCGCATCCTCATCCGGTGACAGCATTGTTGGGCCCTTTGTCGTGGGGCGAGTGTGCCGCTATCGAAGCCCGAGTAAGATGACACATAAAACCGTTACACCTTCCTGCGATATTGTATTCACGACTCGGTTAACATTTTAGCTGACCCCTCGCCCTTTGGAGTGAATCTTCCGGGCGGCGAGTGGATTCCTTCGCAGTGTGTTCTTGCAAGGGCAT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0002   ">
+				<sample name="00_0002   ">
+					<datablock type="DNA">
+						AGAAATGGAAAATATATACAGACGTGGCGAAAGTCTTTGAGTTGTATGCGTTGAAGAGAGGCGGATAACATTAGGAGGGTATCTATTACTGCTAGCAGATGGAGCGATTTCCATTTCGCAGACGCCTGGTTCTCAGAGTAAGCTCTACAGTTGACAGACAAGAGGGGGTCCTAGACGTTGTAGGAGAGACTCGACTATAAGTGTGACGGCGTTGTGCAGTCCGTCAAGACCCCTCACTATTTCCGGCGGGAGTCTATATCTGGAATTAACAGCGCATCCTCATCCGGTGACAGCATTGTTGGGCCCTTTGTCGTGGGGCGAGTGTGCCGCTATCGAAGCCCGAGTAAGATGACACATAAAACCGTTACACCTTCCTGCGATATTGTATTCACGACTCGGTTAACATTTTAGCTGACCCCTCGCCCTTTGGAGTGAATCTTCCGGGCGGCGAGTGGATTCCTTCGCAGTGTGTTCTTGCAAGGGCAT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0005   ">
+				<sample name="00_0005   ">
+					<datablock type="DNA">
+						AGAAATGGAAAATATATACAGACGTGGCGAAAGTCTTTGAGTTGTATGCGTTGAAGAGAGGCGGATAACATTAGGAGGGTATCTATTACTGCTAGCAGATGGAGCGATTTCCATTTCGCAGACGCCTGGTTCTCAGAGTAAGCTCTACAGTTGACAGACAAGAGGGGGTCCTAGACGTTGTAGGAGAGACTCGACTATAAGTGTGACGGCGTTGTGCAGTCCGTCAAGACCCCTCACTATTTCCGGCGGGAGTCTATATCTGGAATTAACAGCGCATCCTCATCCGGTGACAGCATTGTTGGGCCCTTTGTCGTGGGGCGAGTGTGCCGCTATCGAAGCCCGAGTAAGATGACACATAAAACCGTTACACCTTCCTGCGATATTGTATTCACGACTCGGTTAACATTTTAGCTGACCCCTCGCCCTTTGGAGTGAATCTTCCGGGCGGCGAGTGGATTCCTTCGCAGTGTGTTCTTGCAAGGGCAT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0004   ">
+				<sample name="00_0004   ">
+					<datablock type="DNA">
+						AGAAATGGAAAATATATACAGACGTGGCGAAAGTCTTTGAGTTGTATGCGTTGAAGAGGGGCGGATAACATTAGGAGGGTATCTATTACTGCTAGCAGATGGAGCGATTTCCATTTCGCAGACGCCTGGTTCTCAGAGTAAGCTCTACAGTTGACAGACAAGAGGGGGTCCTAGACGTTGTAGGAGAGACTCGACTATAAGTGTGACGGCGTTGTGCAGTCCGTCAAGACCCCTCACTATTTCCGGCGGGAGTCTATATCTGGAATTAACAGCGCATCCTCATCCGGTGACAGCATTGTTGGGCCCTTTGTCGTGGGGCGAGTGTGCCGCTATCGAAGCCCGAGTAAGATGACACATAAAACCGTTACACCTTCCTGCGATATTGTATTCACGACTCGGTTAACATTTTAGCTGACCCCTCGCCCTTTGGAGTGAATCTTCCGGGCGGCGAGTGGATTCCTTCGCAGTGTGTTCTTGCAAGGGCAT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0009   ">
+				<sample name="00_0009   ">
+					<datablock type="DNA">
+						AGAAATGGAAAATATATACAGACGTGGCGAAAGTCTTTGAGTTGTATGCGTTGAAGAGGGGCGGATAACATTAGGAGGGTATCTATTACTGCTAGCAGATGGAGCGATTTCCATTTCGCAGACGCCTGGTTCTCAGAGTAAGCTCTACAGTTGACAGACAAGAGGGGGTCCTAGACGTTGTAGGAGAGACTCGACTATAAGTGGGACGGCGTTGTGCAGTCCGTCAAGACCCCTCACTATTTCCGGCGGGAGTCTATATCTGGAATTAACAGCGCATCCGCATCCGGTGACAGCATTGTTGGGCCCTTTGTCGTGGGGCGAGTGTGCCGCTATCGAAGCCCGAGTAAGATGACACATAAAACCGTTACACCTTCCTGCGATATTGTATTCACGACTCGGTTAACATTTTAGCTGACCCCTCGCCCTTTGGAGTGAATCTTCCGGGCGGCGAGTGGATTCCTTCGCAGTGTGTTCTTGCAAGGGCAT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0010   ">
+				<sample name="00_0010   ">
+					<datablock type="DNA">
+						AGAAATGGAAAATATATACAGACGTGGCGAAAGTCTTTGAGTTGTATGCGTTGAAGAGGGGCGGATAACATTAGGAGGGTATCTATTACTGCTAGCAGATGGAGCGATTTCCATTTCGCAGACGCCTGGTTCTCAGAGTAAGCTCTACAGTTGACAGACAAGAGGGGGTCCTAGACGTTGTAGGAGAGACTCGACTATAAGTGGGACGGCGTTGTGCAGTCCGTCAAGACCCCTCACTATTTCCGGCGGGAGTCTATATCTGGAATTAACAGCGCATCCGCATCCGGTGACAGCATTGTTGGGCCCTTTGTCGTGGGGCGAGTGTGCCGCTATCGAAGCCCGAGTAAGATGACACATAAAACCGTTACACCTTCCTGCGATATTGTATTCACGACTCGGTTAACATTTTAGCTGACCCCTCGCCCTTTGGAGTGAATCTTCCGGGCGGCGAGTGGATTCCTTCGCAGTGTGTTCTTGCAAGGGCAT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0001   ">
+				<sample name="00_0001   ">
+					<datablock type="DNA">
+						AGAAATGGAAAATATATACAGACGTGGCGAAAGTCTTTGAGTTGTATGCGTTGAAGAGGGGCGGATAACATTAGGAGGGTATCTATTACTGCTAGCAGATGGAGCGATTTCCATTTCGCAGACGCCTGGTTCTCAGAGTAAGCTCTACAGTTGACAGACAAGAGGGGGTCCTAGACGTTGTAGGAGAGACTCGACTATAAGTGGGACGGGGTTGTGCAGTCCGTCAAGACCCCTCACTATTTCCGGCGGGAGTCTATATCTGGAATTAACAGCGCATCCGCATCCGGTGACAGCATTGTTGGGCCCTTTGTCGTGGGGCGAGTGTGCCGCTATCGAAGCCCGAGTAAGATGACACATAAAACCGTTACACCTTCCTGCGATATTGTATTCACGACTCGGTTAACATTTTAGCTGACCCCTCGCCCTTTGGAGTGAATCTTCCGGGCGGCGAGTGGATTCCTTCGCAGTGTGTTCTTGCAAGGGCAT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0012   ">
+				<sample name="00_0012   ">
+					<datablock type="DNA">
+						AGAAATGGAAAATATATACAGACGTGGCGAAAGTCTTTGAGTTGTATGCGTTGAAGAGGGGCGGATAACATTAGGAGGGTATCTATTACTGCTAGCAGATGGAGCGATTTCCATTTCGCAGACGCCTGGTTCTCAGAGTAAGCTCTACAGTTGACAGACAAGAGGGGGTCCTAGACGTTGTAGGAGAGACTCGACTATAAGTGGGACGGCGTTGTGCAGTCCGTCAAGACCCCTCACTATTTCCGGCGGGAGTCTATATCTGGAATTAACAGCGCATCCGCATCCGGTGACAGCATTGTTGGGCCCTTTGTCGTGGGGCGAGTGTGCCGCTATCGAAGCCCGAGTAAGATGACACATAAAACCGTTACACCTTCCTGCGATATTGTATTCACGACTCGGTTAACATTTTAGCTGACCCCTCGCCCTTTGGAGTGAATCTTCCGGGCGGCGAGTGGATTCCTTCGCAGTGTGTTCTTGCAAGGGCAT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0014   ">
+				<sample name="00_0014   ">
+					<datablock type="DNA">
+						AGAAATGGAAAATATATACAGACGTGGCGAAAGTCTTTGAGTTGTATGCGTTGAAGAGGGGCGGATAACATTAGGAGGGTATCTATTACTGCTAGCAGATGGAGCGATTTCCATTTCGCAGACGCCTGGTTCTCAGAGTAAGCTCTACAGTTGACAGACAAGAGGGGGTCCTAGACGTTGTAGGAGAGACTCGACTATAAGTGGGACGGCGTTGTGCAGTCCGTCAAGACCCCTCACTATTTCCGGCGGGAGTCTATATCTGGAATTAACAGCGCATCCGCATCCGGTGACAGCATTGTTGGGCCCTTTGTCGTGGGGCGAGTGTGCCGCTATCGAAGCCCGAGTAAGATGACACATAAAACCGTTACACCTTCCTGCGATATTGTATTCACGACTCGGTTAACATTTTAGCTGACCCCTCGCCCTTTGGAGTGAATCTTCCGGGCGGCGAGTGGATTCCTTCGCAGTGTGTTCTTGCAAGGGCAT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0007   ">
+				<sample name="00_0007   ">
+					<datablock type="DNA">
+						AGAAATGGAAAATATATACAGACGTGGCGAAAGTCTTTGAGTTGTATGCGTTGAAGAGGGGCGGATAACATTAGGAGGGTATCTATTACTGCTAGCAGATGGAGCGATTTCCATTTCGCAGACGCCTGGTTCTCAGAGTAAGCTCTACAGTTGACAGACAAGAGGGGGTCCTAGACGTTGTAGGAGAGACTCGACTATAAGTGGGACGGCGTTGTGCAGTCCGTCAAGACCCCTCACTATTTCCGGCGGGAGTCTATATCTGGAATTAACAGCGCATCCGCATCCGGTGACAGCATTGTTGGGCCCTTTGTCGTGGGGCGAGTGTGCCGCTATCGAAGCCCGAGTAAGATGACACATAAAACCGTTACACCTTCCTGCGATATTGTATTCACGACTCGGTTAACATTTTAGCTGACCCCTCGCCCTTTGGAGTGAATCTTCCGGGCGGCGAGTGGATTCCTTCGCAGTGTGTTCTTGCAAGGGCAT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0017   ">
+				<sample name="00_0017   ">
+					<datablock type="DNA">
+						AGAAATGGAAAATATATACAGACGTGGCGAAAGTCTTTGAGTTGTATGCGTTGAAGAGGGGCGGATAACATTAGGAGGGTATCTATTACTGCTAGCAGATGGAGCGATTTCCATTTCGCAGACGCCTGGTTCTCAGAGTAAGCTCTACAGTTGACAGACAAGAGGGGGTCCTAGACGTTGTAGGAGAGACTCGACTATAAGTGGGACGGCGTTGTGCAGTCCGTCAAGACCCCTCACTATTTCCGGCGGGAGTCTATATCTGGAATTAACAGCGCATCCGCATCCGGTGACAGCATTGTTGGGCCCTTTGTCGTGGGGCGAGTGTGCCGCTATCGAAGCCCGAGTAAGATGACACATAAAACCGTTACACCTTCCTGCGATATTGTATTCACGACTCGGTTAACATTTTAGCTGACCCCTCGCCCTTTGGAGTGAATCTTCCGGGCGGCGAGTGGATTCCTTCGCAGTGTGTTCTTGCAAGGGCAT [...]
+					</datablock>
+				</sample>
+			</individual>
+		</population>
+	</region>
+	<region name="LinkageGroup13">
+      <model name="F84">
+        <normalize>false</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <relative-murate>1</relative-murate>
+        <base-freqs> calculated </base-freqs>
+        <ttratio>2</ttratio>
+      </model>
+      <effective-popsize>1</effective-popsize>
+		<population name="PopAlphaIs25">
+			<individual name="00_0016   ">
+				<sample name="00_0016   ">
+					<datablock type="DNA">
+						CTAACATTTGATTAATATGCTGGAGAGATTTCTATGGGGTTAGGCATACCGCATGCGTCGATTGATTGATTCTCACATTTGTTGTTTAGTTTGTCGTTTTTGGCCGTCATGACCATCGAATTTTCGGCGGGGTATCATCCCTATAAATTAAGCTATGATAGAACCTCCGCTCAAGATTTCGAAGTGCGATGGGAATGCTTTGCAACCCAACTCTAATTAAGGGCAACGATTGTCGAGCTAAGCTCCCTTGGTAGAGGTAGCCACGTGACTGCTCTACCCTAGGAATTCAATGCAGTCTCACAGATAATTGCCTGTTTTGTTTTAGTTTATCGAGATTGGCGAACTTTCCATAACCCTGCGTATTACTACCGCGTAGGTCCGCAGGCTGGGTTATTCCACCCGATCTTTCAACCCTGCCCGACCACCTCTGAATTACTGTAGGTCTGTCCCTTCAAACTGGCTCGTTGTATCCATAGCCTACCCTGA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0005   ">
+				<sample name="00_0005   ">
+					<datablock type="DNA">
+						CTAACATTTGATTAATATGCTGGAAAGATTTCTATGGGGTTAGGCATACCGCATGCGTCGATTGATTGATTCTCACATTTGTTGTTCAGTTTGTCGTGTTTGGCCGTCATGACCATCGAATTTTCGGCGGGGTATCATCCCTATAAATTAAGCTATGATAGAACCTCCGCTCAAGATTTCGAAGTGCGATGGAAATGCTTTGCAACCCAACTCTAATTAAGGGCAACGATTGTCGAGCTAGGCTCCCTTGGTAGAGGTAGCCACGTGACTGCTCTACCCTAGGAATTCAATGCAGTCTCACAGATAATTGCCTTTCTTGTTTTAGTTTATCGAGATTGGCGAATTTTCCATAACCCTGCGTATTACTACCGCGTAGGTCCGCAGGCTGGGTTATTCCACCCGATCTTACAACCTCGCCCGACCACCTCTGAATTACTGTAGGTCTGTCCCTTCAATCTGGCTCGTTGTATCCATAGCCTACCCTGA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0007   ">
+				<sample name="00_0007   ">
+					<datablock type="DNA">
+						CTAACATTTGATTAATATGCTGGAAAGATTTCTATGGGGTTAGGCATACCGCATGCGTCGATTGATTGATTCTCACATTTGTTGTTCAGTTTGTCGTGTTTGGCCGTCATGACCATCGAATTTTCGGCGGGGTATCATCCCTATAAATTAAGCTATGATAGAACCTCCGCTCAAGATTTCGAAGTGCGATGGAAATGCTTTGCAACCCAACTCTAATTAAGGGCAACGATTGTCGAGCTAGGCTCCCTTGGTAGAGGTAGCCACGTGACTGCTCTACCCTAGGAATTCAATGCAGTCTCACAGATAATTGCCTTTCTTGTTTTAGTTTATCGAGATTGGCGAATTTTCCATAACCCTGCGTATTACTACCGCGTAGGTCCGCAGGCTGGGTTATTCCACCCGATCTTACAACCTCGCCCGACCACCTCTGAATTACTGTAGGTCTGTCCCTTCAATCTGGCTCGTTGTATCCATAGCCTACCCTGA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0004   ">
+				<sample name="00_0004   ">
+					<datablock type="DNA">
+						CTAACATTTGATTAATATGCTGGAAAGATTTCTATGGGGTTAGGCATACCGCATGCGTCGATTGATTGATTCTCACATTTGTTGTTCAGTTTGTCGTGTTTGGCCGTCATGACCATCGAATTTTCGGCGGGGTATCATCCCTATAAATTAAGCTATGATAGAACCTCCGCTCAAGATTTCGAAGTGCGATGGAAATGCTTTGCAACCCAACTCTAATTAAGGGCAACGATTGTCGAGCTAAGCTCCCTTGGTAGAGGTAGCCACGTGACTGCTCTACCCTAGGAATTCAATGCAGTCTCACAGATAATTGCCTTTCTTGTTTTAGTTTATCGAGATTGGCGAATTTTCCATAACCCTGCGTATTACTACCGCGTAGGTCCGCAGGCTGGGTTATTCCACCCGATCTTACAACCTCGCCCGACCACCTCTGAATTACTGTAGGTCTGTCCCTTCAATCTGGCTCGTTGTATCCATAGCCTACCCTGA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0013   ">
+				<sample name="00_0013   ">
+					<datablock type="DNA">
+						CTAACATTTGATTAATATGCTGGAAAGATTTCTATGGGGTTAGGCATACCGCATGCGTCGATTGATTGATTCTCACATTTGTTGTTCAGTTTGTCGTGTTTGGCCGTCATGACCATCGAATTTTCGGCGGGGTATCATCCCTATAAATTAAGCTATGATAGAACCTCCGCTCAAGATTTCGAAGTGCGATGGAAATGCTTTGCAACCCAACTCTAATTAAGGGCAACGATTGTCGAGCTAAGCTCCCTTGGTAGAGGTAGCCACGTGACTGCTCTACCCTAGGAATTCAATGCAGTCTCACAGATAATTGCCTTTCTTGTTTTAGTTTATCGAGATTGGCGAATTTTCCATAACCCTGCGTATTACTACCGCGTAGGTCCGCAGGCTGGGTTATTCCACCCGATCTTACAACCTCGCCCGACCACCTCTGAATTACTGTAGGTCTGTCCCTTCAATCTGGCTCGTTGTATCCATAGCCTACCCTGA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0008   ">
+				<sample name="00_0008   ">
+					<datablock type="DNA">
+						CTAACATTTGATTAATATGCTGGAAAGATTTCTATGGGGTTAGGCATACCGCATGCGTCGATTGATTGATTCTCACATTTGTTGTTCAGTTTGTCGTGTTTGGCCGTCATGACCATCGAATTCTCGGCGGGGTATCATCCCTATAAATTAAGCTATGATAGAACCTCCGCTCAAGATTTCGAAGTGCGATGGAAATGCTTTGCAACCCAACTCTAATTAAGGGCAACGATTGTCGAGCTAAGCTCCCTTGGTAGAGGTAGCCACGTGACTGCTCTACCCTAGGAATTCAATGCAGTCTCACAGATAATTGCCTTTCTTGTTTTAGTTTATCGAGATTGGCGAATTTTCCATAACCCTGCGTATTACTACCGCGTAGGTCCGCAGGCTGGGTTATTCCACCCGATCTTACAACCTCGCCCGACCACCTCTGAATTACTGTAGGTCTGTCCCTTCAATCTGGCTCGTTGTATCCATAGCCTACCCTGA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0012   ">
+				<sample name="00_0012   ">
+					<datablock type="DNA">
+						CTAACATTTGATTAATACGCTGGAAAGATTTCTATGGGGTTAGGCATACCGCATGCGTCGATTGATTGATTCTCACATTTGTTGTTCAGTTTGTCGTGTTTGGCCGTCATGACCATCGAATTCTCGGCGGGGTATCATCCCTATAAATTAAGCTATGATAGAACCTCCGCTCAAGATTTCGAAGTGCGATGGAAATGCTTTGCAACCCAACTCTAATTAAGGGCAACGATTGTCGAGCTAAGCTCCCTTGGTAGAGGTAGCCACGTGACTGCTCTACCCTAGGAATTCAATGCAGTCTCACAGATAATTGCCTTTCTTGTTTTAGTTTATCGAGATTGGCGAATTTTCCATAACCCTGCGTATTACTACCGCGTAGGTCCGCAGGCTGGGTTATTCCACCCGATCTTACAACCTCGCCCGACCACCTCTGAATTACTGTAGGTCTGTCCCTTCAATCTGGCTCGTTGTATCCATAGCCTACCCTGA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0015   ">
+				<sample name="00_0015   ">
+					<datablock type="DNA">
+						CTAACATTTGATTAATACGCTGGAAAGATTTCTATGGGGTTAGGCATACCGCATGCGTCGATTGATTGATTCTCACATTTGTTGTTCAGTTTGTCGTGTTTGGCCGTCATGACCATCGAATTCTCGGCGGGGTATCATCCCTATAAATTAAGCTATGATAGAACCTCCGCTCAAGATTTCGAAGTGCGATGGAAATGCTTTGCAACCCAACTCTAATTAAGGGCAACGATTGTCGAGCTAAGCTCCCTTGGTAGAGGTAGCCACGTGACTGCTCTACCCTAGGAATTCAATGCAGTCTCACAGATAATTGCCTTTCTTGTTTTAGTTTATCGAGATTGGCGAATTTTCCATAACCCTGCGTATTACTACCGCGTAGGTCCGCAGGCTGGGTTATTCCACCCGATCTTACAACCTCGCCCGACCACCTCTGAATTACTGTAGGTCTGTCCCTTCAATCTGGCTCGTTGTATCCATAGCCTACCCTGA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0009   ">
+				<sample name="00_0009   ">
+					<datablock type="DNA">
+						CTAACATTTGATTAATATGCTGGAAAGATTTCTATGGGGTTAGGCATACCGCATGCGTCGATTGATTGATTCTCACATTTGTTGTTCAGTTTGTCGTGTTTGGCCGTCATGACCATCGAATTTTCGGCGGGGTATCATCCCTATAAATTAAGCTATGATAGAACCTCCGCTCAAGATTTCGAAGTGCGATGGAAATGCTTTGCAACCCAACTCTAATTAAGGGCAACGATTGTCGAGCTAAGCTCCCTTGGTAGAGGTAGCCACGTGACTGCTCTACCCTAGGAATTCAATGCAGTCTCACAGATAATTGCCTTTCTTGTTTTAGTTTATCGAGATTGGCGAATTTTCCATAACCCTGCGTATTACTACCGCGTAGGTCCGCAGGCTGGGTTATTCCACCCGATCTTACAACCTCGCCCGACCACCTCTGAATTACTGTAGGTCTGTCCCTTCAATCTGGCTCGTTGTATCCATAGCCTACCCTGA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0011   ">
+				<sample name="00_0011   ">
+					<datablock type="DNA">
+						CTAACATTTGATTAATATGCTGGAAAGATTTCTATGGGGTTAGGCATACCGCATGCGTCGATTGATTGATTCTCACATTTGTTGTTCAGTTTGTCGTGTTTGGCCGTCATGACCATCGAATTTTCGGCGGGGTATCATCCCTATAAATTAAGCTATGATAGAACCTCCGCTCAAGATTTCGAAGTGCGATGGAAATGCTTTGCAACCCAACTCTAATTAAGGGCAACGATTGTCGAGCTAAGCTCCCTTGGTAGAGGTAGCCACGTGACTGCTCTACCCTAGGAATTCAATGCAGTCTCACAGATAATTGCCTTTCTTGTTTTAGTTTATCGAGATTGGCGAATTTTCCATAACCCTGCGTATTACTACCGCGTAGGTCCGCAGGCTGGGTTATTCCACCCGATCTTACAACCTCGCCCGACCACCTCTGAATTACTGTAGGTCTGTCCCTTCAATCTGGCTCGTTGTATCCATAGCCTACCCTGA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0002   ">
+				<sample name="00_0002   ">
+					<datablock type="DNA">
+						CTAACATTTGATTAATATGCTGGAAAGATTCCTATGGGGTTAGGCATACCGCATGCGTCGATTGATTGATTCTCACATTTGTTGTTCAGTTTGTCGTGTTTGGCCGTCATGACCATCGAATTTTCGGCGGGGTATCATCCCTATAAATTAAGCTATGATAGAACCTCCGCTCAAGATTTCGAAGTGCGATGGAAATGCTTTGCAACCCAACTCTAATTAAGGGCAACGATTGTCGAGCTAAGCTCCCTTGGTAGAGGTAGCCACGTGACTGCTCTACCCTAGGAATTCAATGCAGTCTCACAGATAATTGCCTTTCTTGTTTTAGTTTATCGAGATTGGCGAATTTTCCATAACCCTGCGTATTACTACCGCGTAGGTCCGCAGGCTGGGTTATTCCACCCGATCTTACAACCTCGCCCGACCACCTCTGAATTACTGTAGGTCTGTCCCTTCAATCTGGCTCGTTGTATCCATAGCCTACCCTGA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0010   ">
+				<sample name="00_0010   ">
+					<datablock type="DNA">
+						CTAACATTTGATTAATATGCTGGAAAGATTTCTATGGGGTTAGGCATACCGCATGCGTCGATTGATTGATTCTCACATTTGTTGTTCAGTTTGTCGTGTTTGGCCGTCATGACCATCGAATTTTCGGCGGGGTATCATCCCTATAAATTAAGCTATGATAGAACCTCCGCTCAAGATTTCGAAGTGCGATGGAAATGCTTTGCAACCCAACTCTAATTAAGGGCAACGATTGTCGAGCTAAGCTCCCTTGGTAGAGGTAGCCACGTGACTGCTCTACCCTAGGAATTCAATGCAGTCTCACAGATAATTGCCTTTCTTGTTTTAGTTTATCGAGATTGGCGAATTTTCCATAACCCTGCGTATTACTACCGCGTAGGTCCGCAGGCTGGGTTATTCCACCCGATCTTACAACCTCGCCCGACCACCTCTGAATTACTGTAGGTCTGTCCCTTCAATCTGGCTCGTTGTATCCATAGCCTACCCTGA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0003   ">
+				<sample name="00_0003   ">
+					<datablock type="DNA">
+						CTAACATTTGATTAATATGCTGGAAAGATTTCTATGGGGTTAGGCATACCGCATGCGTCGATTGATTGATTCTCACATTTGTTGTTCAGTTTGTCGTGTTTGGCCGTCATGACCATCGAATTTTCGGCGGGGTGTCATCCCTATAAATTAAGCTATGATAGAACCTCCGCTCAAGATTTCGAAGTGCGATGGAAATGCTTTGCAACCCAACTCTAATTAAGGGCAACGATTGTCGAGCTAAGCTCCCTTGGTAGAGGTAGCCACGTGACTGCTCTACCCTAGGAATTCAATGCAGTCTCACAGATAATTGCCTTTCTTGTTTTAGTTTATCGAGATTGGCGAATTTTCCATAACCCTGCGTATTACTACCGCGTAGGTCCGCAGGCTGGGTTATTCCACCCGATCTTACAACCTCGCCCGACCACCTCTGAATTACTGTAGGTCTGTCCCTTCAATCTGGCTCGTTGTATCCATAGCCTACCCTGA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0018   ">
+				<sample name="00_0018   ">
+					<datablock type="DNA">
+						CTAACATTTGATTAATATGCTGGAAAGATTTCTATGGGGTTAGGCATACCGCATGCGTCGATTGATTGATTCTCACATTTGTTGTTCAGTTTGTCGTGTTTGGCCGTCATGACCATCGAATTTTCGGCGGGGTGTCATCCCTATAAATTAAGCTATGATAGAACCTCCGCTCAAGATTTCGAAGTGCGATGGAAATGCTTTGCAACCCAACTCTAATTAAGGGCAACGATTGTCGAGCTAAGCTCCCTTGGTAGAGGTAGCCACGTGACTGCTCTACCCTAGGAATTCAATGCAGTCTCACAGATAATTGCCTTTCTTGTTTTAGTTTATCGAGATTGGCGAATTTTCCATAACCCTGCGTATTACTACCGCGTAGGTCCGCAGGCTGGGTTATTCCACCCGATCTTACAACCTCGCCCGACCACCTCTGAATTACTGTAGGTCTGTCCCTTCAATCTGGCTCGTTGTATCCATAGCCTACCCTGA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0000   ">
+				<sample name="00_0000   ">
+					<datablock type="DNA">
+						CTAACATTTGATTAATATGCTGGAAAGATTTCTATGGGGTTAGGCATACCGCATGCGTCGATTGATTGATTCTCACATTTGTTGTTCAGTTTGTCGTGTTTGGCCGTCATGACCATCGAATTTTCGGCGGGGTATCATCCCTATAAATTAAGCTATGATAGAACCTCCGCTCAAGATTTCGAAGTGCGATGGAAATGCTTTGCAACCCAACTCTAATTAAGGGCAACGATTGTCGAGCTAAGCTCCCTTGGTAGAGGTAGCCACGTGACTGCTCTACCCTAGGAATTCAATGCAGTCTTACAGATAATTGCTTTTCTTGTTTTAGTTTATCGAGATTGGCGAATTTTCCATAACCCTGCGTATTACTACCGCGTAGGTCCGCAGGCTGGGTTATTCCACCCGATCTTACAACCTCGCCCGACCACCTCTGAATTACTGTAGGTCTGTCCCTTCAATCTGGCTCGTTGTATCCATAGCCTACCCTGA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0006   ">
+				<sample name="00_0006   ">
+					<datablock type="DNA">
+						CTAACATTTGATTAATATGCTGGAAAGATTTCTATGGGGTTAGGCATACCGCATGCGTCGATTGATTGATTCTCACATTTGTTGTTCAGTTTGTCGTGTTTGGCCGTCATGACCATCGAATTTTCGGCGGGGTATCATCCCTATAAATTAAGCTATGATAGAACCTCCGCTCAAGATTTCGAAGTGCGATGGAAATGCTTTGCAACCCAACTCTAATTAAGGGCAACGATTGTCGAGCTAAGCTCCCTTGGTAGAGGTAGCCACGTGACTGCTCTACCCTAGGAATTCAATGCAGTCTCACAGATAATTGCCTTTCTTGTTTTAGTTTATCGAGATTGGCGAATTTTCCATAACCCTGCGTATTACTACCGCGTAGGTCCGCAGGCTGGGTTATTCCACCCGATCTTACAACCTCGCCCGACCACCTCTGAATTACTGTAGGTCTGTCCCTTCAATCTGGCTCGTTGTATCCATAGCCTACCCTGA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0014   ">
+				<sample name="00_0014   ">
+					<datablock type="DNA">
+						CTAACATTTGATTAATATGCTGGAAAGATTTCTATGGGGTTAGGCATACCGCATGCGTCGATTGATTGATTCTCACATTTGTTGTTCAGTTTGTCGTGTTTGGCCGTCATGACCATCGAATTTTCGGCGGGGTATCATCCCTATAAATTAAGCTATGATAGAACCTCCGCTCAAGATTTCGAAGTGCGATGGAAATGCTTTGCAACCCAACTCTAATTAAGGGCAACGATTGTCGAGCTAAGCTCCCTTGGTAGAGGTAGCCACGTGACTGCTCTACCCTAGGAATTCAATGCAGTCTCACAGATAATTGCCTTTCTTGTTTTAGTTTATCGAGATTGGCGAATTTTCCATAACCCTGCGTATTACTACCGCGTAGGTCCGCAGGCTGGGTTATTCCACCCGATCTTACAACCTCGCCCGACCACCTCTGAATTACTGTAGGTCTGTCCCTTCAATCTGGCTCGTTGTATCCATAGCCTACCCTGA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0017   ">
+				<sample name="00_0017   ">
+					<datablock type="DNA">
+						CTAACATTTGATTAATATGCTGGAAAGATTTCTATGGGGTTAGGCATACCGCATGCGTCGATTGATTGATTCTCACATTTGTTGTTCAGTTTGTCGTGTTTGGCCGTCATGACCATCGAATTTTCGGCGGGGTATCATCCCTATAAATTAAGCTATGATAGAACCTCCGCTCAAGATTTCGAAGTGCGATGGAAATGCTTTGCAACCCAACTCTAATTAAGGGCAACGATTGTCGAGCTAAGCTCCCTTGGTAGAGGTAGCCACGTGACTGCTCTACCCTAGGAATTCAATGCAGTCTCACAGATAATTGCCTTTCTTGTTTTAGTTTATCGAGATTGGCGAATTTTCCATAACCCTGCGTATTACTACCGCGTAGGTCCGCAGGCTGGGTTATTCCACCCGATCTTACAACCTCGCCCGACCACCTCTGAATTACTGTAGGTCTGTCCCTTCAATCTGGCTCGTTGTATCCATAGCCTACCCTGA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0001   ">
+				<sample name="00_0001   ">
+					<datablock type="DNA">
+						CTAACATTTGATTAATATGCTGGAAAGATTTCTATGGGGTTAGGCATACCGCATGCGTCGATTGATTGATTCTCACATTTGTTGTTCAGTTTGTCGTGTTTGGCCGTCATGACCATCGAATTTTCGGCGGGGTATCATCCCTATAAATTAAGCTATGATAGAACCTCCGCTCAAGATTTCGAAGTGCGATGGAAATGCTTTGCAACCCAACTCTAATTAAGGGCAACGATTGTCGAGCTAAGCTCCCTTGGTAGAGGTAGCCACGTGACTGCTCTACCCTAGGAATTCAATGCAGTCTCACAGATAATTGCCTTTCTTGTTTTAGTTTATCGAGATTGGCGAATTTTCCATAACCCTGCGTATTACTACCGCGTAGGTCCGCAGGCTGGGTTATTCCACCCGATCTTACAACCTCGCCCGACCACCTCTGAATTACTGTAGGTCTGTCCCTTCAATCTGGCTCGTTGTATCCATAGCCTACCCTGA [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0019   ">
+				<sample name="00_0019   ">
+					<datablock type="DNA">
+						CTAACATTTGATTAATATGCTGGAAAGATTTCTATGGGGTTAGGCATACCGCATGCGTCGATTGATTGATTCTCACATTTGTTGTTCAGTTTGTCGTGTTTGGCCGTCATGACCATCGAATTTTCGGCGGGGTATCATCCCTATAAATTAAGCTATGATAGAACCTCCGCTCAAGATTTCGAAGTGCGATGGAAATGCTTTGCAACCCAACTCTAATTAAGGGCAACGATTGTCGAGCTAAGCTCCCTTGGTAGAGGTAGCCACGTGACTGCTCTACCCTAGGAATTCAATGCAGTCTCACAGATAATTGCCTTTCTTGTTTTAGTTTATCGAGATTGGCGAATTTTCCATAACCCTGCGTATTACTACCGCGTAGGTCCGCAGGCTGGGTTATTCCACCCGATCTTACAACCTCGCCCGACCACCTCTGAATTACTGTAGGTCTGTCCCTTCAATCTGGCTCGTTGTATCCATAGCCTACCCTGA [...]
+					</datablock>
+				</sample>
+			</individual>
+		</population>
+	</region>
+	<region name="LinkageGroup14">
+      <model name="F84">
+        <normalize>false</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <relative-murate>1</relative-murate>
+        <base-freqs> calculated </base-freqs>
+        <ttratio>2</ttratio>
+      </model>
+      <effective-popsize>1</effective-popsize>
+		<population name="PopAlphaIs25">
+			<individual name="00_0008   ">
+				<sample name="00_0008   ">
+					<datablock type="DNA">
+						AAAAGCCCCTTCCAGCGGAAACTCGTGTGCGAAAACATCTCATCCATACCAAATCCCCAAAACGGTCCCCAGCAATTGCCGGAAGAGTAGCGATGGCATCGAATCCTTGGGTTGGCGCGAAGGCCCGGGCAGGCTATTGCTCACCTACGTTAGTGCATCTCGGCAACGCGCGTGCCCCCCCTATCAAACCATACTATGTGAAGCCGGCAGGTGTACAGCTGGCTCTGCGACTCCCGGTGACCAGTGGCGTACCGATTTCACAAGGTGTATTCACCCGGGGCTACCCTGCGACGATTTGGCGACTAGTCAGGTTCCCTCAGGCTTTCTTTGGCGCAGGCATGATGCGAGGCAAGGGTCTCTAGTGCGACGGCAAGATGTGTGCTGGCGCGCCTGGCTGGTTTGTGTCGTCAGCTCGGTTACTTCACCGGTCGCGCCGGCACGGGGCCTTCCATTGCAACTAACCGCTCAAATTTTACCAGGGAGGTT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0011   ">
+				<sample name="00_0011   ">
+					<datablock type="DNA">
+						AAAAGCCCCTTCCAGCGGAAGCTCGTGTGCGAAAACATCTCATCCATACCAAATCCCCAAAACGGTCCCCAGCAATTGCCGGAAGAGTAGCGATGGCATCGAATCCTTGGGTTGGCGCGAAGGCCCGGGCAGGCTATTGCTTACCTACGTTAGTGCATCTCGGCAACGCGCGTGCCCCCCCTATCAAACCATACTATGTGAAGCCGGCAGGTGTACAGCTGGCTCTGCGACTCCCGGTGACCAGTGGCGTACCGATTTCACAAGGTGTATTCACCCGGAGCTACCCTGCGACGATTTGGCGACTAGTCAGGTTCCCTCAGGCTTTCTTTGGCGCAGGCATGATGCGAGGCAAGGGTCTCTATTGCGACGGCAAGATGTGTGCTAGCGCGCCTGGCTGGTTTGTGTCGTCAGCTTGGTTACTTCACCGGTCGCGCCGGCACGGGGCCTTCCATTGCAACTAACCGCTCAAATTTTACCAGGGAGGTT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0009   ">
+				<sample name="00_0009   ">
+					<datablock type="DNA">
+						AAAAGCCCCTTCCAGCGGAAGCTCGTGTGCGAAAACATCTCATCCATACCAAATCCCCAAAACGGTCCCCAGCAATTGCCGGAAGAGTAGCGATGGCATCGAATCCTTGGGTTGGCGCGAAGGCCCGGGCAGGCTATTGCTTACCTACGTTAGTGCATCTCGGCAACGCGCGTGCCCCCCCTATCAAACCATACTATGTGAAGCCGGCAGGTGTACAGCTGGCTCTGCGACTCCCGGTGACCAGTGGCGTACCGATTTCACAAGGTGTATTCACCCGGAGCTACCCTGCGACGATTTGGCGACTAGTCAGGTTCCCTCAGGCTTTCTTTGGCGCAGGCATGATGCGAGGCAAGGGTCTCTATTGCGACGGCAAGATGTGTGCTAGCGCGCCTGGCTGGTTTGTGTCGTCAGCTTGGTTACTTCACCGGTCGCGCCGGCACGGGGCCTTCCATTGCAACTAACCGCTCAAATTTTACCAGGGAGGTT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0013   ">
+				<sample name="00_0013   ">
+					<datablock type="DNA">
+						AAAAGCCCCTTCCAGCGGAAGCTCGTGTGCGAAAACATCTCATCCATACCAAATCCCCAAAACGGTCCCCAGCAATTGCCGGAAGAGTAGCGATGGCATCGAATCCTTGGGTTGGCGCGAAGGCCCGGGCAGGCTATTGCTTACCTACGTTAGTGCATCTCGGCAACGCGCGTGCCCCCCCTATCAAACCATACTATGTGAAGCCGGCAGGTGTACAGCTGGCTCTGCGACTCCCGGTGACCAGTGGCGTACCGATTTCACAAGGTGTATTCACCCGGAGCTACCCTGCGACGATTTGGCGACTAGTCAGGTTCCCTCAGGCTTTCTTTGGCGCAGGCATGATGCGAGGCAAGGGTCTCTATTGCGACGGCAAGATGTGTGCTAGCGCGCCTGGCTGGTTTGTGTCGTCAGCTTGGTTACTTCACCGGTCGCGCCGGCACGGGGCCTTCCATTGCAACTAACCGCTCAAATTTTACCAGGGAGGTT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0005   ">
+				<sample name="00_0005   ">
+					<datablock type="DNA">
+						AAAAGCCCCTTCCAGCGGAAGCTCGTGTGCGAAAACATCTCATCCATACCAAATCCCCAAAACGGTCCCCAGCAATTGCCGGAAGAGTAGCGATGGCATCGAATCCTTGGGTTGGCGCGAAGGCCCGGGCAGGCTATTGCTTACCTACGTTAGTGCATCTCGGCAACGCGCGTGCCCCCCCTATCAAACCATACTATGTGAAGCCGGCAGGTGTACAGCTGGCTCTGCGACTCCCGGTGACCAGTGGCGTACCGATTTCACAAGGTGTATTCACCCGGAGCTACCCTGCGACGATTTGGCGACTAGTCAGGTTCCCTCAGGCTTTCTTTGGCGCAGGCATGATGCGAGGCAAGGGTCTCTAGTGCGACGGCAAGATGTGTGCTAGCGCGCCTGGCTGGTTTGTGTCGTCAGCTTGGTTACTTCACCGGTCGCGCCGGCACGGGGCCTTCCATAGCAACTAACCGCTCAAATTTTACCAGGGAGGTT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0007   ">
+				<sample name="00_0007   ">
+					<datablock type="DNA">
+						AAAAGCCCCTTCCAGCGGAAGCTCGTGTGCGAAAACATCTCATCCATACCAAATCCCCAAAACGGTCCCCAGCAATTGCCGGAAGAGTAGCGATGGCATCGAATCCTTGGGTTGGCGCGAAGGCCCGGGCAGGCTATTGCTTACCTACGTTAGTGCATCTCGGCAACGCGCGTGCCCCCCCTATCAAACCATACTATGTGAAGCCGGCAGGTGTACAGCTGGCTCTGCGACTCCCGGTGACCAGTGGCGTACCGATTTCACAAGGTGTATTCACCCGGAGCTACCCTGCGACGATTTGGCGACTAGTCAGGTTCCCTCAGGCTTTCTTTGGCGCAGGCATGATGCGAGGCAAGGGTCTCTAGTGCGACGGCAAGATGTGTGCTAGCGCGCCTGGCTGGTTTGTGTCGTCAGCTTGGTTACTTCACCGGTCGCGCCGGCACGGGGCCTTCCATAGCAACTAACCGCTCAAATTTTACCAGGGAGGTT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0016   ">
+				<sample name="00_0016   ">
+					<datablock type="DNA">
+						AAAAGCCCCTTCCAGCGGAAGCTCGTGTGCGAAAACATCTCATCCATACCAAATCCCCAAAACGGTCCCCAGCAATTGCCGGAAGAGTAGCGATGGCATCGAATCCTTGGGTTGGCGCGAAGGCCCGGGCAGGCTATTGCTTACCTACGTTAGTGCATCTCGGCAACGCGCGTGCCCCCCCTATCAAACCATACTATGTGAAGCCGGCAGGTGTACAGCTGGCTCTGCGACTCCCGGTGACCAGTGGCGTACCGATTTCACAAGGTGTATTCACCCGGAGCTACCCTGCGACGATTTGGCGACTAGTCAGGTTCCCTCAGGCTTTCTTTGGCGCAGGCATGATGCGAGGCAAGGGTCTCTAGTGCGACGGCAAGATGTGTGCTAGCGCGCCTGGCTGGTTTGTGTCGTCAGCTTGGTTACTTCACCGGTCGCGCCGGCACGGGGCCTTCCATAGCAACTAACCGCTCAAATTTTACCAGGGAGGTT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0015   ">
+				<sample name="00_0015   ">
+					<datablock type="DNA">
+						AAAAGCCCCTTCCAGCGGAAGCTCGTGTGCGAAAACATCTCATCCATACCAAATCCCCAAAACGGTCCCCAGCAATTGCCGGAAGAGTAGCGATGGCATCGAATCCTTGGGTTGGCGCGAAGGCCCGGGCAGGCTATTGCTTACCTACGTTAGTGCATCTCGGCAACGCGCGTGCCCCCCCTATCAAACCATACTATGTGAAGCCGGCAGGTGTACAGCTGGCTCTGCGACTCCCGGTGACCAGTGGCGTACCGATTTCACAAGGTGTATTCACCCGGAGCTACCCTGCGACGATTTGGCGACTAGTCAGGTTCCCTCAGGCTTTCTTTGGCGCAGGCATGATGCGAGGCAAGGGTCTCTAGTGCGACGGCAAGATGTGTGCTAGCGCGCCTGGCTGGTTTGTGTCGTCAGCTTGGTTACTTCACCGGTCGCGCCGGCACGGGGCCTTCCATAGCAACTAACCGCTCAAATTTTACCAGGGAGGTT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0002   ">
+				<sample name="00_0002   ">
+					<datablock type="DNA">
+						AAAAGCCCCTTCCAGCGGAAGCTCGTGTGCGAAAACATCTCATCCATACCAAATCCCCAAAACGGTCCCCAGCAATTGCCGGAAGAGTAGCGATGGCATCGAATCCTTGGGTTGGCGCGAAGGCCCGGGCAGGCTATTGCTTACCTACGTTAGTGCATCTCGGCAACGCGCGTGCCCCCCCTATCAAACCATACTATGTGAAGCCGGCAGGTGTACAGCTGGCTCTGCGACTCCCGGTGACCAGTGGCGTACCGATTTCACAAGGTGTATTCACCCGGAGCTACCCTGCGACGATTTGGCGACTAGTCAGGTTCCCTCAGGCTTTCTTTGGCGCAGGCATGATGCGAGGCAAGGGTCTCTAGTGCGACGGCAAGATGTGTGCTAGCGCGCCTGGCTGGTTTGTGTCGTCAGCTTGGTTACTTCACCGGTCGCGCCGGCACGGGGCCTTCCATAGCAACTAACCGCTCAAATTTTACCAGGGAGGTT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0010   ">
+				<sample name="00_0010   ">
+					<datablock type="DNA">
+						AAAAGCCCCTTCCAGCGGAAGCTCGTGTGCGAAAACATCTCATCCATACCAAATCCCCAAAACGGTCCCCAGCAATTGCCGGAAGAGTAGCGATGGCATCGAATCCTTGGGTTGGCGCGAAGGCCCGGGCAGGCTATTGCTTACCTACGTTAGTGCATCTCGGCAACGCGCGTGCCCCCCCTATCAAACCATACTATGTGAAGCCGGCAGGTGTACAGCTGGCTCTGCGACTCCCGGTGACCAGTGGCGTACCGATTTCACAAGGTGTATTCACCCGGAGCTACCCTGCGACGATTTGGCGACTAGTCAGGTTCCCTCAGGCTTTCTTTGGCGCAGGCATGATGCGAGGCAAGGGTCTCTAGTGCGACGGCAAGATGTGTGCTAGCGCGCCTGGCTGGTTTGTGTCGTCAGCTTGGTTACTTCACCGGTCGCGCCGGCACGGGGCCTTCCATAGCAACTAACCGCTCAAATTTTACCAGGGAGGTT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0000   ">
+				<sample name="00_0000   ">
+					<datablock type="DNA">
+						AAAAGCCCCTTCCAGCGGAAGCTCGTGTGCGAAAACATCTCATCCATACCAAATCCCCAAAACGGTCCCCAGCAATTGCCGGAAGAGTAGCGATGGCATCGAATCCTTGGGTTGGCGCGAAGGCCCGGGCAGGCTATTGCTTACCTACGTTAGTGCATCTCGGCAACGCGCGTGCCCCCCCTATCAAACCATACTATGTGAAGCCGGCAGGTGTACAGCTGGCTCTGCGACTCCCGGTGACCAGTGGCGTACCGATTTCACAAGGTGTATTCACCCGGAGCTACCCTGCGACGATTTGGCGACTAGTCAGGTTCCCTCAGGCTTTCTTTGGCGCAGGCATGATGCGAGGCAAGGGTCTCTAGTGCGACGGCAAGATGTGTGCTAGCGCGCCTGGCTGGTTTGTGTCGTCAGCTTGGTTACTTCACCGGTCGCGCCGGCACGGGGCCTTCCATAGCAACTAACCGCTCAAATTTTACCAGGGAGGTT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0019   ">
+				<sample name="00_0019   ">
+					<datablock type="DNA">
+						AAAAGCCCCTTCCAGCGGAAGCTCGTGTGCGAAAACATCTCATCCATACCAAATCCCCAAAACGGTCCCCAGCAATTGCCGGAAGAGTAGCGATGGCATCGAATCCTTGGGTTGGCGCGAAGGCCCGGGCAGGCTATTGCTTACCTACGTTAGTGCATCTCGGCAACGCGCGTGCCCCCCCTATCAAACCATACTATGTGAAGCCGGCAGGTGTACAGCTGGCTCTGCGACTCCCGGTGACCAGTGGCGTACCGATTTCACAAGGTGTATTCACCCGGAGCTACCCTGCGACGATTTGGCGACTAGTCAGGTTCCCTCAGGCTTTCTTTGGCGCAGGCATGATGCGAGGCAAGGGTCTCTAGTGCGACGGCAAGATGTGTGCTAGCGCGCCTGGCTGGTTTGTGTCGTCAGCTTGGTTACTTCACCGGTCGCGCCGGCACGGGGCCTTCCATAGCAACTAACCGCTCAAATTTTACCAGGGAGGTT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0014   ">
+				<sample name="00_0014   ">
+					<datablock type="DNA">
+						AAAAGCCCCTTCCAGCGGAAGCTCGTGTGCGAAAACATCTCATCCATACCAAATCCCCAAAACGGTCCCCAGCAATTGCCGGAAGAGTAGCGATGGCATCGAATCCTTGGGTTGGCGCGAAGGCCCGGGCAGGCTATTGCTTACCTACGTTAGTGCATCTCGGCAACGCGCGTGCCCCCCCTATCAAACCATACTATGTGAAGCCGGCAGGTGTACAGCTGGCTCTGCGACTCCCGGTGACCAGTGGCGTACCGATTTCACAAGGTGTATTCACCCGGAGCTACCCTGCGACGATTTGGCGACTAGTCAGGTTCCCTCAGGCTTTCTTTGGCGCAGGCATGATGCGAGGCAAGGGTCTCTAGTGCGACGGCAAGATGTGTGCTAGCGCGCCTGGCTGGTTTGTGTCGTCAGCTTGGTTACTTCACCGGTCGCGCCGGCACGGGGCCTTCCATAGCAACTAACCGCTCAAATTTTACCAGGGAGGTT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0012   ">
+				<sample name="00_0012   ">
+					<datablock type="DNA">
+						AAAAGCCCCTTCCAGCGGAAGCTCGTGTGCGAAAACATCTCATCCATACCAAATCCCCAAAACGGTCCCCAGCAATTGCCGGAAGAGTAGCGATGGCATCGAATCCTTGGGTTGGCGCGAAGGCCCGGGCAGGCTATTGCTTACCTACGTTAGTGCATCTCGGCAACGCGCGTGCCCCCCCTATCAAACCATACTATGTGAAGCCGGCAGGTGTACAGCTGGCTCTGCGACTCCCGGTGACCAGTGGCGTACCGATTTCACAAGGTGTATTCACCCGGAGCTACCCTGCGACGATTTGGCGACTAGTCAGGTTCCCTCAGGCTTTCTTTGGCGCAGGCATGATGCGAGGCAAGGGTCTCTAGTGCGACGGCAAGATGTGTGCTAGCGCGCCTGGCTGGTTTGTGTCGTCAGCTTGGTTACTTCACCGGTCGCGCCGGCACGGGGCCTTCCATAGCAACTAACCGCTCAAATTTTACCAGGGAGGTT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0004   ">
+				<sample name="00_0004   ">
+					<datablock type="DNA">
+						AAAAGCCCCTTCCAGCGGAAGCTCGTGCGCGAAAACATCTCATCCATACCAAATCCCCAAAACGGTCCCCAGCAATTGCCGGAAGAGTAGCGATGGCATCGAATCCTTGGGTTGGCGCGAAGGCCCGGGCAGGCTATTGCTTACCTACGTTAGTGCATCTCGGCAACGCGCGTGCCCCCCCTATCAAACCATACTATGTGAAGCCGGCAGGTGTACAGCTGGCTCTGCGACTCCCGGTGACCAGTGGCGTACCGATTTCACAAGGTGTATTCACCCGGAGCTACCCTGCGACGATTTGGCGACTAGTCAGGTTCCCTCAGGCTTTCTTTGGCGCAGGCATGATGCGAGGCAAGGGTCGCTAGTGCGACGGCAAGATGTGTGCTAGCGCGCCTGGCTGGTTTGTGTCGTCAGCTTGGTTACTTCACCGGTCGCGCCGGCACGGGGCCTTCCATAGCAACTAACCGCTCAAATTTTACCAGGGAGGTT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0018   ">
+				<sample name="00_0018   ">
+					<datablock type="DNA">
+						AAAAGCCCCTTCCAGCGGAAGCTCGTGTGCGAAAACATCTCATCCATACCAAATCCCCAAAACGGTCCCCAGCAATTGCCGGAAGAGTAGCGATGGCATCGAATCCTTGGGTTGGCGCGAAGGCCCGGGCAGGCTATTGCTTACCTACGTTAGTGCATCTCGGCAACGCGCGTGCCCCCCCTATCAAACCATACTATGTGAAGCCGGCAGGTGTACAGCTGGCTCTGCGACTCCCGGTGACCAGTGGCGTACCGATTTCACAAGGTGTATTCACCCGGAGCTACCCTGCGACGATTTGGCGACTAGTCAGGTTCCCTCAGGCTTTCTTTGGCGCAGGCATGATGCGAGGCAAGGGTCTCTAGTGCGACGGCAAGATGTGTGCTAGCGCGCCTGGCTGGTTTGTGTCGTCAGCTTGGTTACTTCACCGGTCGCGCCGGCACGGGGCCTTCCATAGCAACTAACCGCTCAAATTTTACCAGGGAGGTT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0006   ">
+				<sample name="00_0006   ">
+					<datablock type="DNA">
+						AAAAGCCCCTTCCAGCGGAAGCTCGTGTGCGAAAACATCTCATCCATACCAAATCCCCAAAACGGTCCCCAGCAATTGCCGGAAGAGTAGCGATGGCATCGAATCCTTGGGTTGACGCGAAGGCCCGGGCAGGCTATTGCTTACCTACGTTAGTGCATCTCGGCAACGCGCGTGCCCCCCCTATCAAACCATACTATGTGAAGCCGGCAGGTGTACAGCTGGCTCTGCGACTCCCGGTGACCAGTGGCGTACCGATTTCACAAGGTGTATTCACCCGGAGCTACCCTGCGACGATTTGGCGACTAGTCAGGTTCCCTCAGGCTTTCTTTGGCGCAGGCATGATGCGAGGCAAGGGTCTCTAGTGCGACGGCAAGATGTGTGCTAGCGCGCCTGGCTGGTTTGTGTCGTCAGCTTGGTTACTTCACCGGTCGCGCCGGCACGGGGCCTTCCATTGCAACTAACCGCTCAAATTTTACCAGGGAGGTT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0017   ">
+				<sample name="00_0017   ">
+					<datablock type="DNA">
+						AAAAGCCCCTTCCAGCGGAAGCTCGTGTGCGAAAACATCTCATCCATACCAAATCCCCAAAACGGTCCCCAGCAATTGCCGGAAGAGTAGCGATGGCATCGAATCCTTGGGTTGGCGCGAAGGCCCGGGCAGGCTATTGCTTACCTACGTTAGTGCATCTCGGCAACGCGCGTGCCCCCCCTATCAAACCATACTATGTGAAGCCGGCAGGTGTACAGCTGGCTCTGCGACTCCCGGTGACCAGTGGCGTACCGATTTCACAAGGTGTATTCACCCGGAGCTACCCTGCGACGATTTGGCGACTAGTCAGGTTCCCTCAGGCTTTCTTTGGCGCAGGCATGATGCGAGGCAAGGGTCTCTAGTGCGACGGCAAGATGTGTGCTAGCGCGCCTGGCTGGTTTGTGTCGTCAGCTTGGTTACTTCACCGGTCGCGCCGGCACGGGGCCTTCCATTGCAACTAACCGCTCAAATTTTACCAGGGAGGTT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0001   ">
+				<sample name="00_0001   ">
+					<datablock type="DNA">
+						AAAAGCCCCTTCCAGCGGAAGCTCGTGTGCGAAAACATCTCATCCATACCAAATCCCCAAAACGGTCCCCAGCAATTGCCGGAAGAGTAGCGATGGCATCGAATCCTTGGGTTGGCGCGAAGGCCCGGGCAGGCTATTGCTTACCTACGTTAGTGCATCTCGGCAACGCGCGTGCCCCCCCTATCAAACCATACTATGTGAAGCCGGCAGGTGTACAGCTGGCTCTGCGACTCCCGGTGACCAGTGGCGTACCGATTTCACAAGGTGTATTCACCCGGAGCTACCCTGCGACGATTTGGCGACTAGTCAGGTTCCCTCAGGCTTTCTTTGGCGCAGGCATGATGCGAGGCAAGGGTCTCTAGTGCGACGGCAAGATGTGTGCTAGCGCGCCTGGCTGGTTTGTGTCGTCAGCTTGGTTACTTCACCGGTCGCGCCGGCACGGGGCCTCCCATTGCAACTAACCGCTCAAATTTTACCAGGGAGGTT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0003   ">
+				<sample name="00_0003   ">
+					<datablock type="DNA">
+						AAAAGCCCCTTCCAGCGGAAGCTCGTGTGCGAAAACATCTCATCCATACCAAATCCCCAAAACGGTCCCCAGCAATTGCCGGAAGAGTAGCGATGGCATCGAATCCTTGGGTTGGCGCGAAGGCCCGGGCAGGCTATTGCTTACCTACGTTAGTGCATCTCGGCAACGCGCGTGCCCCCCCTATCAAACCATACTATGTGAAGCCGGCAGGTGTACAGCTGGCTCTGCGACTCCCGGTGACCAGTGGCGTACCGATTTCACAAGGTGTATTCACCCGGAGCTACCCTGCGACGATTTGGCGACTAGTCAGGTTCCCTCAGGCTTTCTTTGGCGCAGGCATGATGCGAGGCAAGGGTCTCTAGTGCGACGGCAAGATGTGTGCTAGCGCGCCTGGCTGGTTTGTGTCGTCAGCTTGGTTACTTCACTGGTCGCGCCGGCACGGGGCCTTCCATTGCAACTAACCGCTCAAATTTTACCAGGGAGGTT [...]
+					</datablock>
+				</sample>
+			</individual>
+		</population>
+	</region>
+	<region name="LinkageGroup15">
+      <model name="F84">
+        <normalize>false</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <relative-murate>1</relative-murate>
+        <base-freqs> calculated </base-freqs>
+        <ttratio>2</ttratio>
+      </model>
+      <effective-popsize>1</effective-popsize>
+		<population name="PopAlphaIs25">
+			<individual name="00_0002   ">
+				<sample name="00_0002   ">
+					<datablock type="DNA">
+						CCATTTAAGACGCACGCTACTACGCCGGTAACCGCTCGTCGGGTGGTTCTCCATTTCGGTTCGGGCCTAGAAGGTGTAACGCATAAGCCTCCTCGAATCGCGCAGGGGGCACAGTCACTTCACTATCCAATGGGATCGGAACCCAACCGAAGAACGCTACATCAGTATCGTAAAAAATAATATGCGGGTAGCAGATTGCATATCTAGTGAGCGTTCGCGATCCCGCATAATTCTTAGCTTAATACCCACCTAATGAGCGCAACACTAATGTTCCGGTACACGTATAGGAAACTGACGTCGGGCAGCTACCATCAGATCGGGATCGCTGAATTGTGGCGCCAATTCATCGCGAGGACCTAGACTTTAAATATCTTGGATGTGCTTGGTTACGTCTATAGGAAGCTATTTGGGTCCGCTCCGCTGCTTCAAGGGCGACGATCGTAAGCGCGACTTGTGGTCTGACAGAAACTGCTCTTCCTGCATTCT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0003   ">
+				<sample name="00_0003   ">
+					<datablock type="DNA">
+						CCATTTAAGACGCACGCTACTACGCCGGTAACCGCTCGTCGGGTGGTTCTCCATTTCGGTTCGGGCCTAGAAGGTGTAACGCATAAGCCTCCTCGAATCGCGCAGGGGGCACAGTCACTTCACTATCCAATGGGATCGGAACCCAACCGAAGAACGCTACATCAGTATCGTAAAAAATAATATGCGGGTAGCAGATTGCATATCTAGTGAGCGTTCGCGATCCCGCATAATTCTTAGCTTAATACCCACCTAATGAGCGCAACACTAATGTTCCGGTACACGTATAGGAAACTGACGTCGGGCAGCTACCATCAGATCGGGATCGCTGAATTGTGGCGCCAATTCATCGCGAGGACCTAGACTTTAAATATCTTGGATGTGCTTGGTTACGTCTATAGGAAGCTATTTGGGTCCGCTCCGCTGCTTCAAGGGCGACGATCGTAAGCGCGACTTGTGGTCTGACAGAAACTGCTCTTCCTGCATTCT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0000   ">
+				<sample name="00_0000   ">
+					<datablock type="DNA">
+						CCATTTAAGACGCACGCTACTACGCCGGTAACCGCTCGTCGGGTGGTTCTCCATTTCGGTTCGGGCCTAGAAGGTGTAACGCATAAACCTCCTCGAATCGCGCAGGGGGCACAGTCACTTCACTATCCAATGGGATCGGAACCCAACCGAAGAACGCTACATCAGTATCGTAAAAAATAATATGCGGGTAGCAGATTGCATATCTAGTGAGCGTTCGCGATCCCGCATAATTCTTAGCTTAATACCCACCTAATGAGCGCAACACTAATGTTCCGGTACACGTATAGGAAACTGACGTCGGGCAGCTACCATCAGATCGGGATCGCTGAATTGTGGCGCCAATTCATCGCGAGGACCTAGACTTTAAATATCTTGGATGTGCTTGGTTACGTCTATAGGAAGCTATTTGGGTCCGCTCCGCTGCTTCAAGGGCGACGATCGTAAGCGCGACTTGTGGTCTGACAGAAACTGCTCTTCCTGCATTCT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0016   ">
+				<sample name="00_0016   ">
+					<datablock type="DNA">
+						CCATTTAAGACGCACGCTACTACGCCGGTAACCGCTCGTCGGGTGGTTCTCCATTTCGGTTCGGGCCTAGAAGGTGTAACGCATAAACCTCCTCGAATCGCGCAGGGGGCACAGTCACTTCACTATCCAATGGGATCGGAACCCAACCGAAGAACGCTACATCAGTATCGTAAAAAATAATATGCGGGTAGCAGATTGCATATCTAGTGAGCGTTCGCGATCCCGCATAATTCTTAGCTTAATACCCACCTAATGAGCGCAACACTAATGTTCCGGTACACGTATAGGAAACTGACGTCGGGCAGCTACCATCAGATCGGGATCGCTGAATTGTGGCGCCAATTCATCGCGAGGACCTAGACTTTAAATATCTTGGATGTGCTTGGTTACGTCTATAGGAAGCTATTTGGGTCCGCTCCGCTGCTTCAAGGGCGACGATCGTAAGCGCGACTTGTGGTCTGACAGAAACTGCTCTTCCTGCATTCT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0013   ">
+				<sample name="00_0013   ">
+					<datablock type="DNA">
+						CCATTTAAGACGCACGCTACTACGCCGGTAACCGCTCGTCGGGTGGTTCTCCATTTCGGTTCGGGCCTAGAAGGTGTAACGCATAAGCCTCCTCGAATCGCGCAGGGGGCACAGTCACTTCACTATCCAATGGGATCGGAACCCAACCGAAGAACGCTACATCAGTATCGTAAAAAATAATATGCGGGTAGCAGATTGCATATCTAGTGAGCGTTCGCGATCCCGCATAATTCTTAGCTTAATACCCACCTAATGAGCGCAACACTAGTGTTCCGGTACACGTATAGGAAACTGACGTCGGGCAGCTACCATCAGATCGGGATCGCTGAATTGTGGCGCCAATTCATCGCGAGGACCTAGACTTTAAATATCTTGGATGTGCTTGGTTACGTCTATAGGAAGCTATTTGGGTCCGCTCCGCTGCTTCAAGGGCGACGATCGTAAGCGCGACTTGTGGTCTGACAGAAACTGCTCTTCCTGCATTCT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0001   ">
+				<sample name="00_0001   ">
+					<datablock type="DNA">
+						CCATTTAAGACGCACGCTACTACGCCGGTAACCGCTCGTCGGGTGGTTCTCCATTTCGGTTCGGGCCTAGAAGGTGTAACGCATAAGCCTCCTCGAATCGCGCAGGGGGCACAGTCACTTCACTATCCAATGGGATCGGAACCCAACCGAAGAACGCTACATCAGTATCGTAAAAAATAATATGCGGGTAGCAGATTGCATATCTAGTGAGCGTTCGCGATCCCGCATAATTCTTAGCTTAATACCCACCTAATGAGCGCAACACTAATGTTCCGGTACACGTATAGGAAACTGACGTCGGGCAGCTACCATCAGATCGGGATCGCTGAATTGTGGCGCCAATTCATCGCGAGGACCTAGACTTTAAATATCTTGGATGTGCTTGGTTACGTCTATAGGAAGCTATTTGGGTCCGCTCCGCTGCTTCAAGGGCGACGATCGTAAGCGCGACTTGTGGTCTGACAGAAACTGCTCTTCCTGCATTCT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0012   ">
+				<sample name="00_0012   ">
+					<datablock type="DNA">
+						CCATTTAAGACGCACGCTACTACGCCGGTAACCGCTCGTCGGGTGGTTCTCCATTTCGGTTCGGGCCTAGAAGGTGTAACGCATAAGCCTCCTCGAATCGCGCAGGGGGCACAGTCACTTCACTATCCAATGGGATCGGAACCCAACCGAAGAACGCTACATCAGTATCGTAAAAAATAATATGCGGGTAGCAGATTGCATATCTAGTGAGCGTTCGCGATCCCGCATAATTCTTAGCTTAATACCCACCTAATGAGCGCAACACTAATGTTCCGGTACACGTATAGGAAACTGACGTCGGGCAGCTACCATCAGATCGGGATCGCTGAATTGTGGCGCCAATTCATCGCGAGGACCTAGACTTTAAATATCTTGGATGTGCTTGGTTACGTCTATAGGAAGCTATTTGGGTCCGCTCCGCTGCTTCAAGGGCGACGATCGTAAGCGCGACTTGTGGTCTGACAGAAACTGCTCTTCCTGCATTCT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0015   ">
+				<sample name="00_0015   ">
+					<datablock type="DNA">
+						CCATTTAAGACGCACGCTACTACGCCGGTAACCGCTCGTCGGGTGGTTCTCCATTTCGGTTCGGGCCTAGAAGGTGTAACGCATAAGCCTCCTCGAATCGCGCAGGGGGCACAGTCACTTCACTATCCAATGGGATCGGAACCCAACCGAAGAACGCTACATCAGTATCGTAAAAAATAATATGCGGGTAGCAGATTGCATATCTAGTGAGCGTTCGCGATCCCGCATAATTCTTAGCTTAATACCCACCTAATGAGCGCAACACTAATGTTCCGGTACACGTATAGGAAACTGACGTCGGGCAGCTACCATCAGATCGGGATCGCTGAATTGTGGCGCCAATTCATCGCGAGGACCTAGACTTTAAATATCTTGGATGTGCTTGGTTACGTCTATAGGAAGCTATTTGGGTCCGCTCCGCTGCTTCAAGGGCGACGATCGTAAGCGCGACTTGTGGTCTGACAGAAACTGCTCTTCCTGCATTCT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0017   ">
+				<sample name="00_0017   ">
+					<datablock type="DNA">
+						CCATTTAAGACGCACGCTACTACGCCGGTAACCGCTCGTCGGGTGGTTCTCCATTTCGGTTCGGGCCTAGAAGGTGTAACGCATAAGCCTCCTCGAATCGCGCAGGGGGCACAGTCACTTCACTATCCAATGGGATCGGAACCCAACCGAAGAACGCTACATCAGTATCGTAAAAAATAATATGCGGGTAGCAGATTGCATATCTAGTGAGCGTTCGCGATCCCGCATAATTCTTAGCTTAATACCCACCTAATGAGCGCAACACTAATGTTCCGGTACACGTATAGGAAACTGACGTCGGGCAGCTACCATCAGATCGGGATCGCTGAATTGTGGCGCCAATTCATCGCGAGGACCTAGACTTTAAATATCTTGGATGTGCTTGGTTACGTCTATAGGAAGCTATTTGGGTCCGCTCCGCTGCTTCAAGGGCGACGATCGTAAGCGCGACTTGTGGTCTGACAGAAACTGCTCTTCCTGCATTCT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0004   ">
+				<sample name="00_0004   ">
+					<datablock type="DNA">
+						CCATTTAAGACGCACGCTACTACGCCGGTAACCGCTCGTCGGGTGGTTCTCCATTTCGGTTCGGGCCTAGAAGGTGTAACGCATAAGCCTCCTCGAATCGCGCAGGGGGCACAGTCACTTCACTATCCAATGGGATCGGAACCCAACCGAAGAACGCTACATCAGTATCGTAAAAAATAATATGCGGGTAGCAGATTGCATATCTAGTGAGCGTTCGCGATCCCGCATAATTCTTAGCTTAATACCCACCTAATGAGCGCAACACTAATGTTCCGGTACACGTATAGGAAACTGACGTCGGGCAGCTACCATCAGATCGGGATCGCTGAATTGTGGCGCCAATTCATCGCGAGGACCTAGACTTTAAATATCTTGGATGTGCTTGGTTACGTCTATAGGAAGCTATTTGGGTCCGCTCCGCTGCTTCAAGGGCGACGATCGTAAGCGCGACTTGTGGTCTGACAGAAACTGCTCTTCCTGCATTCT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0005   ">
+				<sample name="00_0005   ">
+					<datablock type="DNA">
+						CCATTTAAGACGCACGCTACTACGCCGGTAACCGCTCGTCGGGTGGTTCTCCATTTCGGTTCGGGCCTAGAAGGTGTAACGCATAAGCCTCCTCGAATCGCGCAGGGGGCACAGTCACTTCACTATCCAATGGGATCGGAACCCAACCGAAGAACGCTACATCAGTATCGTAAAAAATAATATGCGGGTAGCAGATTGCATATCTAGTGAGCGTTCGCGATCCCGCATAATTCTTAGCTTAATACCCACCTAATGAGCGCAACACTAATGTTCCGGTACACGTATAGGAAACTGACGTCGGGCAGCTACCATCAGATCGGGATCGCTGAATTGTGGCGCCAATTCATCGCGAGGACCTAGACTTTAAATATCTTGGATGTGCTTGGTTACGTCTATAGGAAGCTATTTGGGTCCGCTCCGCTGCTTCAAGGGCGACGATCGTAAGCGCGACTTGTGGTCTGACAGAAACTGCTCTTCCTGCATTCT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0006   ">
+				<sample name="00_0006   ">
+					<datablock type="DNA">
+						CCATTTAAGACGCACGCTACTACGCCGGTAACCGCTCGTCGGGTGGTTCTCCATTTCGGTTCGGGCCTAGAAGGTGTAACGCATAAGCCTCCTCGAATCGCGCAGGGGGCACAGTCACTTCACTATCCAATGGGATCGGAACCCAACCGAAGAACGCTACATCAGTATCGTAAAAAATAATATGCGGGTAGCAGATTGCATATCTAGTGAGCGTTCGCGATCCCGCATAATACTTAGCTTAATACCCACCTGATGAGCGCAACACTAATGTTCCGGTACACGTATAGGAAACTGACGTCGGGCAGCTACCATCAGATCGGGATCACTGAATTGTGGCGCCAATTCATCGCGAGGACCTAGACTTTAAATATCTTGGATGTGCTTGGTTACGTCTATAGGAAGATATTTGGGTCCGCTCCGCTGCTTCAAGGGCGACGATCGTAAGCGCGACTTGTGGTCTGACAGAAACTGCTCTTCCTGCATTCT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0014   ">
+				<sample name="00_0014   ">
+					<datablock type="DNA">
+						CCATTTAAGACGCACGCTACTACGCCGGTAACCGCTCGTCGGATGGTTCTCCATTTCGGTTCGGGCCTAGAAGGTGTAACGCATAAGCCTCCTCGAATCGCGCAGGGGGCACAGTCACTTCACTATCCAATGGGATCGGAACCCATCCGAAGAACGCTACATCAGTATCGTAAAAAATAATATGCGGATAGCAGATTGCATATCTAGTGAGCGTTCGCGATCCCGCATAATACTTAGCTTAATACCCACCTAATGAGCGCAACACTAATGTTCCGGTACACGTATAGGAAACTGACGTCGGGCAGCTGCCATCAGATCGGGATCACTGAATTGTGGCGCCAATTCATCGCGAGGACCTAGACTTTAAATATCTTGGATGTGCTTGGTTACGTCTATAGGAAGCTATTTGGGTCCGCTCCGCTGTTTCAAGGGCGACGATCGTAAGCGCGACTTGTGGTCTGACAGAAACTGCTCTTCCTGCATTCT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0008   ">
+				<sample name="00_0008   ">
+					<datablock type="DNA">
+						CCATTTAAGACGCACGCTACTACGCCGGTAACCGCTCGTCGGATGGTTCTCCATTTCGGTTCGGGCCTAGAAGGTGTAACGCATAAGCCTCCTCGAATCGCGCAGGGGGCACAGTCACTTCACTATCCAATGGGATCGGAACCCATCCGAAGAACGCTACATCAGTATCGTAAAAAATAATATGCGGATAGCAGATTGCATATCTAGTGAGCGTTCGCGATCCCGCATAATACTTAGCTTAATACCCACCTAATGAGCGCAACACTAATGTTCCGGTACACGTATAGGAAACTGACGTCGGGCAGCTACCATCAGATCGGGATCACTGAATTGTGGCGCCAATTCATCGCGAGGACCTAGACTTTAAATATCTTGGATGTGCTTGGTTACGTCTATAGGAAGCTATTTGGGTCCGCTCCGCTGCTTCAAGGGCGACGATCGTAAGCGCGACTTGTGGTCTGACAGAAACTGCTCTTCCTGCATTCT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0009   ">
+				<sample name="00_0009   ">
+					<datablock type="DNA">
+						CCATTTAAGACGCACGCTACTACGCCGGTAACCGCTCGTCGGATGGTTCTCCATTTCGGTTCGGGCCTAGAAGGTGTAACGCATAAGCCTCCTCGAATCGCGCAGGGGGCACAGTCACTTCACTATCCAATGGGATCGGAACCCATCCGAAGAACGCTACATCAGTATCGTAAAAAATAATATGCGGATAGCAGATTGCATATCTAGTGAGCGTTCGCGATCCCGCATAATACTTAGCTTAATACCCACCTAATGAGCGCAACACTAATGTTCCGGTACACGTATAGGAAACTGACGTCGGGCAGCTACCATCAGATCGGGATCACTGAATTGTGGCGCCAATTCATCGCGAGGACCTAGACTTTAAATATCTTGGATGTGCTTGGTTACGTCTATAGGAAGCTATTTGGGTCCGCTCCGCTGCTTCAAGGGCGACGATCGTAAGCGCGACTTGTGGTCTGACAGAAACTGCTCTTCCTGCATTCT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0010   ">
+				<sample name="00_0010   ">
+					<datablock type="DNA">
+						CCATTTAAGACGCACGCTACTACGCCGGTAACCGCTCGTCGGATGGTTCTCCATTTCGGTTCGGGCCTAGAAGGTGTAACGCATAAGCCTCCTCGAATCGCGCAGGGGGCACAGTCACTTCACTATCCAATGGGATCGGAACCCATCCGAAGAACGCTACATCAGTATCGTAAAAAATAATATGCGGATAGCAGATTGCATATCTAGTGAGCGTTCGCGATCCCGCATAATACTTAGCTTAATACCCACCTAATGAGCGCAACACTAATGTTCCGGTACACGTATAGGAAACTGACGTCGGGCAGCTACCATCAGATCGGGATCACTGAATTGTGGCGCCAATTCATCGCGAGGACCTAGACTTTAAATATCTTGGATGTGCTTGGTTACGTCTATAGGAAGCTATTTGGGTCCGCTCCGCTGCTTCAAGGGCGACGATCGTAAGCGCGACTTGTGGTCTGACAGAAACTGCTCTTCCTGCATTCT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0007   ">
+				<sample name="00_0007   ">
+					<datablock type="DNA">
+						CCATTTAAGACGCACGCTACTACGCCGGTAACCGCTCGTCGGATGGTTCTCCATTTCGGTTCGGGCCTAGAAGGTGTAACGCATAAGCCTCCTCGAATCGCGCAGGGGGCACAGTCACTTCACTATCCAATGGGATCGGAACCCATCCGAAGAACGCTACATCAGTATCGTAAAAAATAATATGCGGATAGCAGATTGCATATCTAGTGAGCGTTCGCGATCCCGCATAATACTTAGCTTAATACCCACCTAATGAGCGCAACACTAATGTTCCGGTACACGTATAGGAAACTGACGTCGGGCAGCTACCATCAGATCGGGATCACTGAATTGTGGCGCCAATTCATCGCGAGGACCTAGACTTTAAATATCTTGGATGTGCTTGGTTACGTCTATAGGAAGCTATTTGGGTCCGCTCCGCTGCTTCAAGGGCGACGATCGTAAGCGCGACTTGTGGTCTGACAGAAACTGCTCTTCCTGCATTCT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0011   ">
+				<sample name="00_0011   ">
+					<datablock type="DNA">
+						CCATTTAAGACGCACGCTACTACGCCGGTAACCGCTCGTCGGATGGTTCTCCATTTCGGTTCGGGCCTAGAAGGTGTAACGCATAAGCCTCCTCGAATCGCGCAGGGGGCACAGTCACTTCACTATCCAATGGGATCGGAACCCATCCGAAGAACGCTACATCAGTATCGTAAAAAATAATATGCGGATAGCAGATTGCATATCTAGTGAGCGTTCGCGATCCCGCATAATACTTAGCTTAATACCCACCTAATGAGCGCAACACTAATGTTCCGGTACACGTATAGGAAACTGACGTCGGGCAGCTACCATCAGATCGGGATCACTGAATTGTGGCGCCAATTCATCGCGAGGACCTAGACTTTAAATATCTTGGATGTGCTTGGTTACGTCTATAGGAAGCTATTTGGGTCCGCTCCGCTGCTTCAAGGGCGACGATCGTAAGCGCGACTTGTGGTCTGACAGAAACTGCTCTTCCTGCATTCT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0018   ">
+				<sample name="00_0018   ">
+					<datablock type="DNA">
+						CCATTTAAGACGCACGCTACTACGCCGGTAACCGCTCGTCGGATGGTTCTCCATTTCGGTTCGGGCCTAGAAGGTGTAACGCATAAGCCTCCTCGAATCGCGCAGGGGGCACAGTCACTTCACTATCCAATGGGATCGGAACCCATCCGAAGAACGCTACATCAGTATCGTAAAAAATAATATGCGGATAGCAGATTGCATATCTAGTGAGCGTTCGCGATCCCGCATAATACTTAGCTTAATACCCACCTAATGAGCGCAACACTAATGTTCCGGTACACGTATAGGAAACTGACGTCGGGCAGCTACCATCAGATCGGGATCACTGAATTGTGGCGCCAATTCATCGCGAGGACCTAGACTTTAAATATCTTGGATGTGCTTGGTTACGTCTATAGGAAGCTATTTGGGTCCGCTCCGCTGCTTCAAGGGCGACGATCGTAAGCGCGACTTGTGGTCTGACAGAAACTGCTCTTCCTGCATTCT [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0019   ">
+				<sample name="00_0019   ">
+					<datablock type="DNA">
+						CCATTTAAGACGCACGCTACTACGCCGGTAACCGCTCGTCGGATGGTTCTCCATTTCGGTTCGGGCCTAGAAGGTGTAACGCATAAGCCTCCTCGAATCGCGCAGGGGGCACAGTCACTTCACTATCCAATGGGATCGGAACCCATCCGAAGAACGCTACATCAGTATCGTAAAAAATAATATGCGGATAGCAGATTGCATATCTAGTGAGCGTTCGCGATCCCGCATAATACTTAGCTTAATACCCACCTAATGAGCGCAACACTAATGTTCCGGTACACGTATAGGAAACTGACGTCGGGCAGCTACCATCAGATCGGGATCACTGAATTGTGGCGCCAATTCATCGCGAGGACCTAGACTTTAAATATCTTGGATGTGCTTGGTTACGTCTATAGGAAGCTATTTGGGTCCGCTCCGCTGCTTCAAGGGCGACGATCGTAAGCGCGACTTGTGGTCTGACAGAAACTGCTCTTCCTGCATTCT [...]
+					</datablock>
+				</sample>
+			</individual>
+		</population>
+	</region>
+</data>
+</lamarc>
+
diff --git a/doc/testfiles/sample_infile.xml b/doc/testfiles/sample_infile.xml
new file mode 100644
index 0000000..ce9a941
--- /dev/null
+++ b/doc/testfiles/sample_infile.xml
@@ -0,0 +1,369 @@
+<lamarc version="2.1">
+<!-- Created by the Lamarc program -->
+  <chains>
+    <replicates>1</replicates>
+    <bayesian-analysis>No</bayesian-analysis>
+    <heating>
+      <adaptive>true</adaptive>
+      <temperatures> 1</temperatures>
+      <swap-interval>10</swap-interval>
+    </heating>
+    <strategy>
+      <resimulating>0.454545</resimulating>
+      <tree-size>0.0909091</tree-size>
+      <haplotyping>0.454545</haplotyping>
+      <trait-arranger>0</trait-arranger>
+    </strategy>
+    <initial>
+      <number>10</number>
+      <samples>20</samples>
+      <discard>100</discard>
+      <interval>20</interval>
+    </initial>
+    <final>
+      <number>2</number>
+      <samples>50</samples>
+      <discard>100</discard>
+      <interval>20</interval>
+    </final>
+  </chains>
+  <format>
+    <convert-output-to-eliminate-zero> Yes </convert-output-to-eliminate-zero>
+    <seed>1005</seed>
+    <verbosity>verbose</verbosity>
+    <progress-reports>verbose</progress-reports>
+    <results-file>sample_outfile.txt</results-file>
+    <use-in-summary>false</use-in-summary>
+    <in-summary-file>sample_insumfile.xml</in-summary-file>
+    <use-out-summary>true</use-out-summary>
+    <out-summary-file>sample_outsumfile.xml</out-summary-file>
+    <use-curvefiles>true</use-curvefiles>
+    <curvefile-prefix>sample_curvefile</curvefile-prefix>
+    <use-tracefile>true</use-tracefile>
+    <tracefile-prefix>sample_tracefile</tracefile-prefix>
+    <use-newicktreefile>false</use-newicktreefile>
+    <newicktreefile-prefix>sample_newick</newicktreefile-prefix>
+    <out-xml-file>sample_menusettings_infile.xml</out-xml-file>
+  </format>
+  <forces>
+    <coalescence>
+      <start-values> 0.01</start-values>
+      <method> USER</method>
+      <max-events>1000</max-events>
+      <profiles> percentile </profiles>
+      <constraints> unconstrained </constraints>
+      <prior type="log">
+        <paramindex> all </paramindex>
+        <lower> 1e-05 </lower>
+        <upper> 10 </upper>
+      </prior>
+    </coalescence>
+  </forces>
+  <data>
+    <region name="coal">
+      <model name="F84">
+        <normalize>false</normalize>
+        <categories>
+          <num-categories>2</num-categories>
+          <rates> 0.666669 1.33333</rates>
+          <probabilities> 0.5 0.5</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <relative-murate>1</relative-murate>
+        <base-freqs> calculated </base-freqs>
+        <ttratio>2</ttratio>
+      </model>
+      <effective-popsize>1</effective-popsize>
+      <spacing>
+        <block>
+          <map-position>0</map-position>
+          <length>1000</length>
+          <locations> 2 9 18 46 51 78 82 89 90 98 104 106 114 172 197 199 222 293 326 343 391 397 417 424 426 435 438 443 464 488 499 526 546 556 575 576 595 602 614 617 621 629 642 650 653 674 690 694 699 702 711 718 719 729 770 848 851 854 856 857 915 923 940 958 974</locations>
+          <offset>0</offset>
+        </block>
+      </spacing>
+    <population name="ZMNE">
+      <individual name="GG-322166855">
+        <phase type="known">
+        </phase>
+        <sample name="00_0025   ">
+          <datablock type="SNP">
+            TGAAGCCCCAATTATGGTTACCACGACTTACCAGTACCTCTTAGCGACGTGGGCTCTATCTCTCA
+          </datablock>
+        </sample>
+        <sample name="00_0032   ">
+          <datablock type="SNP">
+            TGAAGCCCCAATTATGGTTACCACGACTTACCAGTACCTCTTAGCGACGTGGGCTCTATCTCTCA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="CA-1016758720">
+        <phase type="known">
+        </phase>
+        <sample name="00_0003   ">
+          <datablock type="SNP">
+            CCACATCGTAGACGTTCGTACCGCTTCTTACCAGTACCTCTTAGCGACGTGGGCTCTATCTCTCA
+          </datablock>
+        </sample>
+        <sample name="00_0022   ">
+          <datablock type="SNP">
+            CCACATCGTAGACGTTCGTACCGCTTCTTACCAGTACCTCTTAGCGACGTGGGCTCTATCTCTCA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="GT-438712109">
+        <phase type="known">
+        </phase>
+        <sample name="00_0000   ">
+          <datablock type="SNP">
+            CCACATCGTAGACGTTCGTGCCGCTTCTTATCAGTACCTCTTAGCGACGTGGGCTCTATCTCTCA
+          </datablock>
+        </sample>
+        <sample name="00_0024   ">
+          <datablock type="SNP">
+            CCACATCGTAGACGTTCGTACCGCTTCTTACCAGTACCTCTTAGCGACGTGGGCTCTATCTCTCA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="AT1063120482">
+        <phase type="known">
+        </phase>
+        <sample name="00_0013   ">
+          <datablock type="SNP">
+            CCACATCGTAGACGTTCGTGCCGCTTCTTATCAGTACCTCTAAGCGACGTGGGCTCTATCTCTCA
+          </datablock>
+        </sample>
+        <sample name="00_0027   ">
+          <datablock type="SNP">
+            CCACATCGTAGACGTTCGTGGCGCTTCTTATCAGTACCTCTTAGCGACGTGGGCTCTATCTCTAA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="TG-108633955">
+        <phase type="known">
+        </phase>
+        <sample name="00_0009   ">
+          <datablock type="SNP">
+            CCACATCGTAGACGTTCGTGCCGCTTCTTCTCAGTACCACTTAGCAACGTAGACTCTAACACTCA
+          </datablock>
+        </sample>
+        <sample name="00_0037   ">
+          <datablock type="SNP">
+            CCACATCGTAGACGTTCGTGCCGCTTCTTATCAGTACCTCTTAGCGACGTGGGCTCTATCTCTCA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="GC-816174348">
+        <phase type="known">
+        </phase>
+        <sample name="00_0034   ">
+          <datablock type="SNP">
+            CCACATCGTAGACGTTCGTACCGCTTCCTCTCAGTACCACTTAGCAACGTAGACTCTATCACTCA
+          </datablock>
+        </sample>
+        <sample name="00_0038   ">
+          <datablock type="SNP">
+            CCACATCGTAGACGTTCGTACCGCTTCCTCTCAGTACCACTTAGCAACGTAGACTCTATCACTCA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="GG-1252698729">
+        <phase type="known">
+        </phase>
+        <sample name="00_0023   ">
+          <datablock type="SNP">
+            CCACATCGTAGACGTTCGTACCGCTTCCTATCGGTACCACTTAGCAACGTAGACCCTATATCTCA
+          </datablock>
+        </sample>
+        <sample name="00_0030   ">
+          <datablock type="SNP">
+            CCACATCGTAGACGTTCGTACCGCTTCTTATCGGTACCACTTAGCAACGTAGACCCTATATCTCA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="GG-173374346">
+        <phase type="known">
+        </phase>
+        <sample name="00_0002   ">
+          <datablock type="SNP">
+            CCACATCGTAGACGTTCGTACCGCTTCTTATCGGTACCACTTAGCAACGTAGACCCTATATCTCA
+          </datablock>
+        </sample>
+        <sample name="00_0031   ">
+          <datablock type="SNP">
+            CCACATCGTAGACGTTCGTACCGCTTCTTATCGGTACCACTTAGCAACGTAGACCCTATATCTCA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="CA-1985314239">
+        <phase type="known">
+        </phase>
+        <sample name="00_0014   ">
+          <datablock type="SNP">
+            CCACATCGTAGACGTTCGTACCGCTTCTTATCGGTACCACTTAGCAACGTAGACCCTATCTCTCA
+          </datablock>
+        </sample>
+        <sample name="00_0018   ">
+          <datablock type="SNP">
+            CCACATCGTAGACGTTCGTACCGCTTCTTATCGGTACCACTTAGCAACGTAGACCCTATATCTCA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="GC-780785816">
+        <phase type="known">
+        </phase>
+        <sample name="00_0005   ">
+          <datablock type="SNP">
+            CCACATCGTAGACGTTCGTACCGCTTCTTATCGGTACCACTTAGCAACGTAGACCCTATCTCTCA
+          </datablock>
+        </sample>
+        <sample name="00_0036   ">
+          <datablock type="SNP">
+            CCACATCGTAGACGTTCGTACCGCTTCTTATCGGTACCACTTAGCAACGTAGACCCTATCTCTCA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="GG1351601435">
+        <phase type="known">
+        </phase>
+        <sample name="00_0001   ">
+          <datablock type="SNP">
+            CCACATCGTTGACATTCGCACTGGTTTTTATCGGTACCACTTAGCAACGTAGACCCTATCTCTCA
+          </datablock>
+        </sample>
+        <sample name="00_0010   ">
+          <datablock type="SNP">
+            CCACATCGTAGACGTTCGTACCGCTTCTTATCGGTACCACTTAGCAACGTAGACCCTATCTCCCA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="CA-402572214">
+        <phase type="known">
+        </phase>
+        <sample name="00_0004   ">
+          <datablock type="SNP">
+            CCACATCGTTGACATTCGCACTGGTTTTTATCGGTACCACTTAGCAACGTAGACCCTATCTCTCA
+          </datablock>
+        </sample>
+        <sample name="00_0011   ">
+          <datablock type="SNP">
+            CCACATCGTTGACATTCGCACTGGTTTTTATCGGTACCACTTAGCAACGTAGACCCTATCTCTCA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="AC1355139237">
+        <phase type="known">
+        </phase>
+        <sample name="00_0029   ">
+          <datablock type="SNP">
+            CCACATCGTTGACATTCGCACTGGTTTTTATCGGTACCACTTAGCAACGTAGACCCTATCTCTCA
+          </datablock>
+        </sample>
+        <sample name="00_0033   ">
+          <datablock type="SNP">
+            CCACATCGTTGACATTCGCACTGGTTTTTATCGGTACCACTTAGCAACGTAGACCCTATCTCTCA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="GA865732508">
+        <phase type="known">
+        </phase>
+        <sample name="00_0006   ">
+          <datablock type="SNP">
+            CCACATCGTTGACATTCGCACTGGTTTTTATCGGTACCACTTAGCAACGTAGACCCTATCTCTCA
+          </datablock>
+        </sample>
+        <sample name="00_0015   ">
+          <datablock type="SNP">
+            CCACATCGTTGACATTCGCACTGGTTTTTATCGGTACCACTTAGCAACGTAGACCCTATCTCTCA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="GA1359434591">
+        <phase type="known">
+        </phase>
+        <sample name="00_0028   ">
+          <datablock type="SNP">
+            CCACATCGTTGACATTCGCACTGGTTTTCATTATTACCACATAGTGTCTCAGGTTCATTCTCTCA
+          </datablock>
+        </sample>
+        <sample name="00_0035   ">
+          <datablock type="SNP">
+            CCACATCGTTGACATTCGCACTGGTTTTCATTATTACCACATAGTGTCTCAGGTTCATTCTCTCA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="AT-1640691234">
+        <phase type="known">
+        </phase>
+        <sample name="00_0007   ">
+          <datablock type="SNP">
+            CCGCATCGTTGACATTCGCACTGGTTTTCATTAGCGCCATATAGTGTATCAAGTTCATTCTCTCA
+          </datablock>
+        </sample>
+        <sample name="00_0026   ">
+          <datablock type="SNP">
+            CCACATCGTTGACACTCGCACTGGTTCTCATTAGCGCCATATAGTGTATCAAGTTCATTCTCTCA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="AG774387657">
+        <phase type="known">
+        </phase>
+        <sample name="00_0019   ">
+          <datablock type="SNP">
+            CCACATCGTTGACACTCGCACTGGTTCTCATTAGCGCCATATAGTGTATCAAGTTCATTCTCTCA
+          </datablock>
+        </sample>
+        <sample name="00_0020   ">
+          <datablock type="SNP">
+            CCACATCGTTGACACTCGCACTGGTTCTCATTAGCGCCATATAGTGTATCAAGTTCATTCTCTCA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="TA864034704">
+        <phase type="known">
+        </phase>
+        <sample name="00_0016   ">
+          <datablock type="SNP">
+            CCACATCGTTGACACTCGCACTGGTTCTCATTAGCGCCATATGGTGTATCAAGTTCATTCTCTCA
+          </datablock>
+        </sample>
+        <sample name="00_0021   ">
+          <datablock type="SNP">
+            CCACATCGTTGACACTCGCACTGGTTCTCATTAGCGCCATATAGTGTATCAAGTTCATTCTCTCA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="GT-545862557">
+        <phase type="known">
+        </phase>
+        <sample name="00_0017   ">
+          <datablock type="SNP">
+            CCACATCGTTGACATTCGCACTGGTTCTCATTAGCGGCATATGGTGTATCAAGTTCATTCTCTCA
+          </datablock>
+        </sample>
+        <sample name="00_0039   ">
+          <datablock type="SNP">
+            CCACATTGTTGACATTCGCACTGGTTCTCATTAGCGCCATATAATGTATCAAGTTTATTCTCTCA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="GT502571314">
+        <phase type="known">
+        </phase>
+        <sample name="00_0008   ">
+          <datablock type="SNP">
+            CCACATCGTTGACATTCGCACTGGTTCTCATTAGCGCCATATAGTGTATCAAGTTCATTCTTTCG
+          </datablock>
+        </sample>
+        <sample name="00_0012   ">
+          <datablock type="SNP">
+            CCACATCGTTGACATTCGCACTGGTTCTCATTAGCGCTATATAGTGTATCAAGTTCATTCTCTCG
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    </region>
+  </data>
+</lamarc>
diff --git a/doc/testfiles/sample_outfile.txt b/doc/testfiles/sample_outfile.txt
new file mode 100644
index 0000000..8dff339
--- /dev/null
+++ b/doc/testfiles/sample_outfile.txt
@@ -0,0 +1,317 @@
+************************************************************
+     LAMARC:  Maximum Likelihood Parameter Estimation
+    using Hastings-Metropolis Markov Chain Monte Carlo
+************************************************************
+version 2.1.8
+
+
+        Program started on Tue May 29 11:48:08 2012
+                finished on Tue May 29 11:48:20 2012
+
+===========================================================================
+Maximum Likelihood Estimates (MLEs) of Parameters
+===========================================================================
+
+
+                 Theta   
+Population       Theta1  
+Best Val (MLE)  0.020756 
+    Percentile           
+   99%   0.005  0.013896 
+   95%   0.025  0.015231 
+   90%   0.050  0.015979 
+   75%   0.125  0.017250 
+   50%   0.250  0.018600 
+           MLE  0.020756 
+   50%   0.750  0.023245 
+   75%   0.875  0.025235 
+   90%   0.950  0.027541 
+   95%   0.975  0.029154 
+   99%   0.995  0.032670 
+Theta1:  Theta for ZMNE
+
+
+

+===========================================================================
+Profile Likelihoods
+===========================================================================
+
+===========================================================================
+                           Overall Profile Tables                          
+===========================================================================
+
+Overall: Theta for ZMNE (Theta1):
+---------------------------------
+
+Percentile profile: Points shown indicate approximate confidence
+  intervals.
+
+Log Likelihoods:
+
+Percentile  Theta1   |   Ln(L)   
+  0.005    0.013896  | -3.309203 
+  0.025    0.015231  | -1.912364 
+  0.050    0.015979  | -1.344435 
+  0.125    0.017250  | -0.653339 
+  0.250    0.018600  | -0.219110 
+   MLE     0.020756  |  0.008348 
+  0.750    0.023245  | -0.219127 
+  0.875    0.025235  | -0.653311 
+  0.950    0.027541  | -1.344429 
+  0.975    0.029154  | -1.912416 
+  0.995    0.032670  | -3.309210 
+
+

+===========================================================================
+User Specified Options
+===========================================================================
+
+Force specific options:
+-----------------------
+
+Starting Parameters:
+
+Theta (USR)                               0.010000
+
+
+
+Search Strategy:
+----------------
+
+Type of analysis:  Likelihood
+
+Number of replicates:  1
+
+Markov Chain Parameters:
+                           Initial           Final
+  Number of Chains              10               2
+     Trees Sampled              20              50
+Sampling Increment              20              20
+   Trees Discarded             100             100
+
+Random number seed             1005                                    
+
+
+File options:
+-------------
+
+Read data from file:           sample_infile.xml                       
+Wrote summary file:            sample_outsumfile.xml                   
+Wrote to Tracer file(s):       sample_tracefile_coal_1.txt             
+
+
+Output summary options:
+-----------------------
+
+Calculate profile likelihoods? Yes                                     
+

+===========================================================================
+Data summary
+===========================================================================
+
+Number of populations:                                      1
+Number of regions:                                          1
+Total number of samples in all regions                     40
+
+
+Region summary:
+---------------
+
+Population      Variable  Relative  Relative  Pairwise   Sample  
+   Region       markers      Ne     rec rate   theta      size   
+1 ZMNE
+     1 coal        65        1        1.0    0.0152814     40    
+-----------------------------------------------------------------
+
+
+Summary of Data Model Parameters:
+---------------------------------
+
+
+---------------------------------------------------------------------------
+
+Parameters of a Felsenstein '84 model for the #1 segment of the coal region
+2 rate categories with correlated length 1
+Relative rate 0.666669  Frequency 0.5
+Relative rate 1.33333  Frequency 0.5
+Base frequencies: 0.222692, 0.31, 0.163077, 0.304231
+Transition/transversion ratio: 2
+---------------------------------------------------------------------------
+
+Input Genetic Data
+---------------------------------------------------------------------------
+   For the coal region
+---------------------------------------------------------------------------
+                                  #1                                  
+00_0025   TGAAGCCCCAATTATGGTTACCACGACTTACCAGTACCTCTTAGCGACGTGGGCTCTATCTCTCA
+00_0032   TGAAGCCCCAATTATGGTTACCACGACTTACCAGTACCTCTTAGCGACGTGGGCTCTATCTCTCA
+00_0003   CCACATCGTAGACGTTCGTACCGCTTCTTACCAGTACCTCTTAGCGACGTGGGCTCTATCTCTCA
+00_0022   CCACATCGTAGACGTTCGTACCGCTTCTTACCAGTACCTCTTAGCGACGTGGGCTCTATCTCTCA
+00_0000   CCACATCGTAGACGTTCGTGCCGCTTCTTATCAGTACCTCTTAGCGACGTGGGCTCTATCTCTCA
+00_0024   CCACATCGTAGACGTTCGTACCGCTTCTTACCAGTACCTCTTAGCGACGTGGGCTCTATCTCTCA
+00_0013   CCACATCGTAGACGTTCGTGCCGCTTCTTATCAGTACCTCTAAGCGACGTGGGCTCTATCTCTCA
+00_0027   CCACATCGTAGACGTTCGTGGCGCTTCTTATCAGTACCTCTTAGCGACGTGGGCTCTATCTCTAA
+00_0009   CCACATCGTAGACGTTCGTGCCGCTTCTTCTCAGTACCACTTAGCAACGTAGACTCTAACACTCA
+00_0037   CCACATCGTAGACGTTCGTGCCGCTTCTTATCAGTACCTCTTAGCGACGTGGGCTCTATCTCTCA
+00_0034   CCACATCGTAGACGTTCGTACCGCTTCCTCTCAGTACCACTTAGCAACGTAGACTCTATCACTCA
+00_0038   CCACATCGTAGACGTTCGTACCGCTTCCTCTCAGTACCACTTAGCAACGTAGACTCTATCACTCA
+00_0023   CCACATCGTAGACGTTCGTACCGCTTCCTATCGGTACCACTTAGCAACGTAGACCCTATATCTCA
+00_0030   CCACATCGTAGACGTTCGTACCGCTTCTTATCGGTACCACTTAGCAACGTAGACCCTATATCTCA
+00_0002   CCACATCGTAGACGTTCGTACCGCTTCTTATCGGTACCACTTAGCAACGTAGACCCTATATCTCA
+00_0031   CCACATCGTAGACGTTCGTACCGCTTCTTATCGGTACCACTTAGCAACGTAGACCCTATATCTCA
+00_0014   CCACATCGTAGACGTTCGTACCGCTTCTTATCGGTACCACTTAGCAACGTAGACCCTATCTCTCA
+00_0018   CCACATCGTAGACGTTCGTACCGCTTCTTATCGGTACCACTTAGCAACGTAGACCCTATATCTCA
+00_0005   CCACATCGTAGACGTTCGTACCGCTTCTTATCGGTACCACTTAGCAACGTAGACCCTATCTCTCA
+00_0036   CCACATCGTAGACGTTCGTACCGCTTCTTATCGGTACCACTTAGCAACGTAGACCCTATCTCTCA
+00_0001   CCACATCGTTGACATTCGCACTGGTTTTTATCGGTACCACTTAGCAACGTAGACCCTATCTCTCA
+00_0010   CCACATCGTAGACGTTCGTACCGCTTCTTATCGGTACCACTTAGCAACGTAGACCCTATCTCCCA
+00_0004   CCACATCGTTGACATTCGCACTGGTTTTTATCGGTACCACTTAGCAACGTAGACCCTATCTCTCA
+00_0011   CCACATCGTTGACATTCGCACTGGTTTTTATCGGTACCACTTAGCAACGTAGACCCTATCTCTCA
+00_0029   CCACATCGTTGACATTCGCACTGGTTTTTATCGGTACCACTTAGCAACGTAGACCCTATCTCTCA
+00_0033   CCACATCGTTGACATTCGCACTGGTTTTTATCGGTACCACTTAGCAACGTAGACCCTATCTCTCA
+00_0006   CCACATCGTTGACATTCGCACTGGTTTTTATCGGTACCACTTAGCAACGTAGACCCTATCTCTCA
+00_0015   CCACATCGTTGACATTCGCACTGGTTTTTATCGGTACCACTTAGCAACGTAGACCCTATCTCTCA
+00_0028   CCACATCGTTGACATTCGCACTGGTTTTCATTATTACCACATAGTGTCTCAGGTTCATTCTCTCA
+00_0035   CCACATCGTTGACATTCGCACTGGTTTTCATTATTACCACATAGTGTCTCAGGTTCATTCTCTCA
+00_0007   CCGCATCGTTGACATTCGCACTGGTTTTCATTAGCGCCATATAGTGTATCAAGTTCATTCTCTCA
+00_0026   CCACATCGTTGACACTCGCACTGGTTCTCATTAGCGCCATATAGTGTATCAAGTTCATTCTCTCA
+00_0019   CCACATCGTTGACACTCGCACTGGTTCTCATTAGCGCCATATAGTGTATCAAGTTCATTCTCTCA
+00_0020   CCACATCGTTGACACTCGCACTGGTTCTCATTAGCGCCATATAGTGTATCAAGTTCATTCTCTCA
+00_0016   CCACATCGTTGACACTCGCACTGGTTCTCATTAGCGCCATATGGTGTATCAAGTTCATTCTCTCA
+00_0021   CCACATCGTTGACACTCGCACTGGTTCTCATTAGCGCCATATAGTGTATCAAGTTCATTCTCTCA
+00_0017   CCACATCGTTGACATTCGCACTGGTTCTCATTAGCGGCATATGGTGTATCAAGTTCATTCTCTCA
+00_0039   CCACATTGTTGACATTCGCACTGGTTCTCATTAGCGCCATATAATGTATCAAGTTTATTCTCTCA
+00_0008   CCACATCGTTGACATTCGCACTGGTTCTCATTAGCGCCATATAGTGTATCAAGTTCATTCTTTCG
+00_0012   CCACATCGTTGACATTCGCACTGGTTCTCATTAGCGCTATATAGTGTATCAAGTTCATTCTCTCG
+
+

+===========================================================================
+Run Reports by Region
+===========================================================================
+
+"Accepted" is the observed rate at which any change to the proposal trees
+  was accepted.
+
+11:48:08  Beginning region: coal
+Initial Chain 1 of 10:
+11:48:09  Predicted end of chains for this region:  Tue May 29 11:48:22 2012
+
+11:48:09  Accepted    60% | Posterior lnL 1.90107653 | Data lnL -1531.22692
+No trees discarded due to limit violations.
+Haplotype-Arranger accepted       185/188 proposals
+Tree-Arranger accepted             45/182 proposals
+Tree-Size-Arranger accepted        10/30 proposals
+  Theta       0.014005  
+  
+Initial Chain 2 of 10:
+11:48:10  Predicted end of chains for this region:  Tue May 29 11:48:22 2012
+
+11:48:10  Accepted    51% | Posterior lnL 2.00607272 | Data lnL -1223.59186
+No trees discarded due to limit violations.
+Haplotype-Arranger accepted       178/182 proposals
+Tree-Arranger accepted             20/173 proposals
+Tree-Size-Arranger accepted         6/45 proposals
+  Theta       0.019801  
+  
+Initial Chain 3 of 10:
+11:48:11  Predicted end of chains for this region:  Tue May 29 11:48:22 2012
+
+11:48:11  Accepted    48% | Posterior lnL 0.31579072 | Data lnL -1144.89455
+No trees discarded due to limit violations.
+Haplotype-Arranger accepted       168/170 proposals
+Tree-Arranger accepted             22/191 proposals
+Tree-Size-Arranger accepted         2/39 proposals
+  Theta       0.023710  
+  
+Initial Chain 4 of 10:
+11:48:12  Predicted end of chains for this region:  Tue May 29 11:48:22 2012
+
+11:48:12  Accepted 54.25% | Posterior lnL 0.06297012 | Data lnL -1045.63873
+No trees discarded due to limit violations.
+Haplotype-Arranger accepted       196/199 proposals
+Tree-Arranger accepted             16/165 proposals
+Tree-Size-Arranger accepted         5/36 proposals
+  Theta       0.022223  
+  
+Initial Chain 5 of 10:
+11:48:13  Predicted end of chains for this region:  Tue May 29 11:48:22 2012
+
+11:48:13  Accepted  47.5% | Posterior lnL 0.14588871 | Data lnL -988.502230
+No trees discarded due to limit violations.
+Haplotype-Arranger accepted       173/178 proposals
+Tree-Arranger accepted             14/197 proposals
+Tree-Size-Arranger accepted         3/25 proposals
+  Theta       0.024275  
+  
+Initial Chain 6 of 10:
+11:48:13  Predicted end of chains for this region:  Tue May 29 11:48:20 2012
+
+11:48:13  Accepted 48.25% | Posterior lnL 0.00676027 | Data lnL -852.329277
+No trees discarded due to limit violations.
+Haplotype-Arranger accepted       165/166 proposals
+Tree-Arranger accepted             23/196 proposals
+Tree-Size-Arranger accepted         5/38 proposals
+  Theta       0.023819  
+  
+Initial Chain 7 of 10:
+11:48:14  Predicted end of chains for this region:  Tue May 29 11:48:20 2012
+
+11:48:14  Accepted 52.25% | Posterior lnL 0.01444143 | Data lnL -810.236477
+No trees discarded due to limit violations.
+Haplotype-Arranger accepted       192/197 proposals
+Tree-Arranger accepted             11/172 proposals
+Tree-Size-Arranger accepted         6/31 proposals
+  Theta       0.023160  
+  
+Initial Chain 8 of 10:
+11:48:15  Predicted end of chains for this region:  Tue May 29 11:48:20 2012
+
+11:48:15  Accepted  50.5% | Posterior lnL 0.09752478 | Data lnL -743.664624
+No trees discarded due to limit violations.
+Haplotype-Arranger accepted       187/188 proposals
+Tree-Arranger accepted             14/191 proposals
+Tree-Size-Arranger accepted         1/21 proposals
+  Theta       0.021484  
+  
+Initial Chain 9 of 10:
+11:48:16  Predicted end of chains for this region:  Tue May 29 11:48:20 2012
+
+11:48:16  Accepted 50.25% | Posterior lnL 0.01912259 | Data lnL -716.659601
+No trees discarded due to limit violations.
+Haplotype-Arranger accepted       180/181 proposals
+Tree-Arranger accepted             19/185 proposals
+Tree-Size-Arranger accepted         2/34 proposals
+  Theta       0.022171  
+  
+Initial Chain 10 of 10:
+11:48:17  Predicted end of chains for this region:  Tue May 29 11:48:20 2012
+
+11:48:17  Accepted    47% | Posterior lnL 0.01905120 | Data lnL -696.976981
+No trees discarded due to limit violations.
+Haplotype-Arranger accepted       173/173 proposals
+Tree-Arranger accepted             14/186 proposals
+Tree-Size-Arranger accepted         1/41 proposals
+  Theta       0.021476  
+  
+Final Chain 1 of 2:
+11:48:19  Predicted end of chains for this region:  Tue May 29 11:48:20 2012
+
+11:48:19  Accepted    45% | Posterior lnL 0.05740946 | Data lnL -693.380385
+No trees discarded due to limit violations.
+Haplotype-Arranger accepted       422/423 proposals
+Tree-Arranger accepted             28/483 proposals
+Tree-Size-Arranger accepted         0/94 proposals
+  Theta       0.020319  
+  
+Final Chain 2 of 2:
+11:48:20  Accepted  47.6% | Posterior lnL 0.00834775 | Data lnL -687.920715
+No trees discarded due to limit violations.
+Haplotype-Arranger accepted       438/448 proposals
+Tree-Arranger accepted             32/473 proposals
+Tree-Size-Arranger accepted         6/79 proposals
+  Theta       0.020756  
+  
+11:48:20  Beginning profiling, please be patient
+11:48:20  Finished profile 1 of 1.
+
+

diff --git a/doc/testfiles/sample_outsumfile.xml b/doc/testfiles/sample_outsumfile.xml
new file mode 100644
index 0000000..4d3031c
--- /dev/null
+++ b/doc/testfiles/sample_outsumfile.xml
@@ -0,0 +1,576 @@
+<XML-summary-file>
+<!-- Lamarc v. 2.1.8
+     Please do not modify. -->
+<chainpack>
+	<number> 0 0 0 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<stretchedtrees> 0 </stretchedtrees>
+		<zerodltrees> 0 </zerodltrees>
+		<accrate> 0.599999999999999978 </accrate>
+		<llikemle> 1.90107652637508018 </llikemle>
+		<llikedata> -1531.22692202612234 </llikedata>
+		<starttime> 1338317288 </starttime>
+		<endtime> 1338317289 </endtime>
+		<rates> <map> Haplotype-Arranger 185 188 </map> <map> Tree-Arranger 45 182 </map> <map> Tree-Size-Arranger 10 30 </map> </rates>
+		<estimates>
+			<thetas> 0.0140045543987855348 </thetas>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainpack>
+	<number> 0 0 1 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<stretchedtrees> 0 </stretchedtrees>
+		<zerodltrees> 0 </zerodltrees>
+		<accrate> 0.510000000000000009 </accrate>
+		<llikemle> 2.00607271911580964 </llikemle>
+		<llikedata> -1223.59185792918424 </llikedata>
+		<starttime> 1338317289 </starttime>
+		<endtime> 1338317290 </endtime>
+		<rates> <map> Haplotype-Arranger 178 182 </map> <map> Tree-Arranger 20 173 </map> <map> Tree-Size-Arranger 6 45 </map> </rates>
+		<estimates>
+			<thetas> 0.0198010394672644731 </thetas>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainpack>
+	<number> 0 0 2 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<stretchedtrees> 0 </stretchedtrees>
+		<zerodltrees> 0 </zerodltrees>
+		<accrate> 0.479999999999999982 </accrate>
+		<llikemle> 0.3157907228474926 </llikemle>
+		<llikedata> -1144.89455037797165 </llikedata>
+		<starttime> 1338317290 </starttime>
+		<endtime> 1338317291 </endtime>
+		<rates> <map> Haplotype-Arranger 168 170 </map> <map> Tree-Arranger 22 191 </map> <map> Tree-Size-Arranger 2 39 </map> </rates>
+		<estimates>
+			<thetas> 0.0237103739556533341 </thetas>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainpack>
+	<number> 0 0 3 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<stretchedtrees> 0 </stretchedtrees>
+		<zerodltrees> 0 </zerodltrees>
+		<accrate> 0.542499999999999982 </accrate>
+		<llikemle> 0.0629701176915088146 </llikemle>
+		<llikedata> -1045.63872658027321 </llikedata>
+		<starttime> 1338317291 </starttime>
+		<endtime> 1338317292 </endtime>
+		<rates> <map> Haplotype-Arranger 196 199 </map> <map> Tree-Arranger 16 165 </map> <map> Tree-Size-Arranger 5 36 </map> </rates>
+		<estimates>
+			<thetas> 0.0222229921859648204 </thetas>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainpack>
+	<number> 0 0 4 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<stretchedtrees> 0 </stretchedtrees>
+		<zerodltrees> 0 </zerodltrees>
+		<accrate> 0.474999999999999978 </accrate>
+		<llikemle> 0.145888709955695972 </llikemle>
+		<llikedata> -988.502230441417169 </llikedata>
+		<starttime> 1338317292 </starttime>
+		<endtime> 1338317293 </endtime>
+		<rates> <map> Haplotype-Arranger 173 178 </map> <map> Tree-Arranger 14 197 </map> <map> Tree-Size-Arranger 3 25 </map> </rates>
+		<estimates>
+			<thetas> 0.0242750236526722424 </thetas>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainpack>
+	<number> 0 0 5 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<stretchedtrees> 0 </stretchedtrees>
+		<zerodltrees> 0 </zerodltrees>
+		<accrate> 0.482499999999999984 </accrate>
+		<llikemle> 0.00676027254048549263 </llikemle>
+		<llikedata> -852.329276925133399 </llikedata>
+		<starttime> 1338317293 </starttime>
+		<endtime> 1338317293 </endtime>
+		<rates> <map> Haplotype-Arranger 165 166 </map> <map> Tree-Arranger 23 196 </map> <map> Tree-Size-Arranger 5 38 </map> </rates>
+		<estimates>
+			<thetas> 0.0238188990942800084 </thetas>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainpack>
+	<number> 0 0 6 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<stretchedtrees> 0 </stretchedtrees>
+		<zerodltrees> 0 </zerodltrees>
+		<accrate> 0.522499999999999964 </accrate>
+		<llikemle> 0.014441425822049353 </llikemle>
+		<llikedata> -810.23647721130817 </llikedata>
+		<starttime> 1338317293 </starttime>
+		<endtime> 1338317294 </endtime>
+		<rates> <map> Haplotype-Arranger 192 197 </map> <map> Tree-Arranger 11 172 </map> <map> Tree-Size-Arranger 6 31 </map> </rates>
+		<estimates>
+			<thetas> 0.023159943201709686 </thetas>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainpack>
+	<number> 0 0 7 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<stretchedtrees> 0 </stretchedtrees>
+		<zerodltrees> 0 </zerodltrees>
+		<accrate> 0.505000000000000004 </accrate>
+		<llikemle> 0.0975247797368335445 </llikemle>
+		<llikedata> -743.664623692001555 </llikedata>
+		<starttime> 1338317294 </starttime>
+		<endtime> 1338317295 </endtime>
+		<rates> <map> Haplotype-Arranger 187 188 </map> <map> Tree-Arranger 14 191 </map> <map> Tree-Size-Arranger 1 21 </map> </rates>
+		<estimates>
+			<thetas> 0.0214844741036189155 </thetas>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainpack>
+	<number> 0 0 8 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<stretchedtrees> 0 </stretchedtrees>
+		<zerodltrees> 0 </zerodltrees>
+		<accrate> 0.502499999999999947 </accrate>
+		<llikemle> 0.0191225884609427209 </llikemle>
+		<llikedata> -716.659600790623927 </llikedata>
+		<starttime> 1338317295 </starttime>
+		<endtime> 1338317296 </endtime>
+		<rates> <map> Haplotype-Arranger 180 181 </map> <map> Tree-Arranger 19 185 </map> <map> Tree-Size-Arranger 2 34 </map> </rates>
+		<estimates>
+			<thetas> 0.0221707791918490035 </thetas>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainpack>
+	<number> 0 0 9 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<stretchedtrees> 0 </stretchedtrees>
+		<zerodltrees> 0 </zerodltrees>
+		<accrate> 0.469999999999999973 </accrate>
+		<llikemle> 0.0190512003002608968 </llikemle>
+		<llikedata> -696.976980946530375 </llikedata>
+		<starttime> 1338317296 </starttime>
+		<endtime> 1338317297 </endtime>
+		<rates> <map> Haplotype-Arranger 173 173 </map> <map> Tree-Arranger 14 186 </map> <map> Tree-Size-Arranger 1 41 </map> </rates>
+		<estimates>
+			<thetas> 0.0214760006724358625 </thetas>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainpack>
+	<number> 0 0 10 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<stretchedtrees> 0 </stretchedtrees>
+		<zerodltrees> 0 </zerodltrees>
+		<accrate> 0.450000000000000011 </accrate>
+		<llikemle> 0.0574094588085992674 </llikemle>
+		<llikedata> -693.380384625162037 </llikedata>
+		<starttime> 1338317297 </starttime>
+		<endtime> 1338317299 </endtime>
+		<rates> <map> Haplotype-Arranger 422 423 </map> <map> Tree-Arranger 28 483 </map> <map> Tree-Size-Arranger 0 94 </map> </rates>
+		<estimates>
+			<thetas> 0.020318803741329744 </thetas>
+		</estimates>
+	</chainout>
+</chainpack>
+<chainsum>
+	<reg_rep> 0 0 </reg_rep>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce short
+			<shortpoint> 39 </shortpoint>
+			<shortwait> 0.792605537315372755 </shortwait>
+		</shortforce>
+	</treesum>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce short
+			<shortpoint> 39 </shortpoint>
+			<shortwait> 0.792605537315372755 </shortwait>
+		</shortforce>
+	</treesum>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce short
+			<shortpoint> 39 </shortpoint>
+			<shortwait> 0.792605537315372755 </shortwait>
+		</shortforce>
+	</treesum>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce short
+			<shortpoint> 39 </shortpoint>
+			<shortwait> 0.792605537315372755 </shortwait>
+		</shortforce>
+	</treesum>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce short
+			<shortpoint> 39 </shortpoint>
+			<shortwait> 0.792605537315372755 </shortwait>
+		</shortforce>
+	</treesum>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce short
+			<shortpoint> 39 </shortpoint>
+			<shortwait> 0.810790144765409893 </shortwait>
+		</shortforce>
+	</treesum>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce short
+			<shortpoint> 39 </shortpoint>
+			<shortwait> 0.810790144765409893 </shortwait>
+		</shortforce>
+	</treesum>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce short
+			<shortpoint> 39 </shortpoint>
+			<shortwait> 0.805781948314666763 </shortwait>
+		</shortforce>
+	</treesum>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce short
+			<shortpoint> 39 </shortpoint>
+			<shortwait> 0.821100062248226181 </shortwait>
+		</shortforce>
+	</treesum>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce short
+			<shortpoint> 39 </shortpoint>
+			<shortwait> 0.801216856634282326 </shortwait>
+		</shortforce>
+	</treesum>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce short
+			<shortpoint> 39 </shortpoint>
+			<shortwait> 0.789385419923617437 </shortwait>
+		</shortforce>
+	</treesum>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce short
+			<shortpoint> 39 </shortpoint>
+			<shortwait> 0.789385419923617437 </shortwait>
+		</shortforce>
+	</treesum>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce short
+			<shortpoint> 39 </shortpoint>
+			<shortwait> 0.789385419923617437 </shortwait>
+		</shortforce>
+	</treesum>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce short
+			<shortpoint> 39 </shortpoint>
+			<shortwait> 0.789385419923617437 </shortwait>
+		</shortforce>
+	</treesum>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce short
+			<shortpoint> 39 </shortpoint>
+			<shortwait> 0.789385419923617437 </shortwait>
+		</shortforce>
+	</treesum>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce short
+			<shortpoint> 39 </shortpoint>
+			<shortwait> 0.789385419923617437 </shortwait>
+		</shortforce>
+	</treesum>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce short
+			<shortpoint> 39 </shortpoint>
+			<shortwait> 0.789385419923617437 </shortwait>
+		</shortforce>
+	</treesum>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce short
+			<shortpoint> 39 </shortpoint>
+			<shortwait> 0.8002249276197273 </shortwait>
+		</shortforce>
+	</treesum>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce short
+			<shortpoint> 39 </shortpoint>
+			<shortwait> 0.807640030274727194 </shortwait>
+		</shortforce>
+	</treesum>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce short
+			<shortpoint> 39 </shortpoint>
+			<shortwait> 0.807640030274727194 </shortwait>
+		</shortforce>
+	</treesum>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce short
+			<shortpoint> 39 </shortpoint>
+			<shortwait> 0.807175881646565396 </shortwait>
+		</shortforce>
+	</treesum>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce short
+			<shortpoint> 39 </shortpoint>
+			<shortwait> 0.807175881646565396 </shortwait>
+		</shortforce>
+	</treesum>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce short
+			<shortpoint> 39 </shortpoint>
+			<shortwait> 0.783867789383589808 </shortwait>
+		</shortforce>
+	</treesum>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce short
+			<shortpoint> 39 </shortpoint>
+			<shortwait> 0.796500430275619342 </shortwait>
+		</shortforce>
+	</treesum>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce short
+			<shortpoint> 39 </shortpoint>
+			<shortwait> 0.816307850282119718 </shortwait>
+		</shortforce>
+	</treesum>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce short
+			<shortpoint> 39 </shortpoint>
+			<shortwait> 0.803527589520701579 </shortwait>
+		</shortforce>
+	</treesum>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce short
+			<shortpoint> 39 </shortpoint>
+			<shortwait> 0.803527589520701579 </shortwait>
+		</shortforce>
+	</treesum>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce short
+			<shortpoint> 39 </shortpoint>
+			<shortwait> 0.798356412000855498 </shortwait>
+		</shortforce>
+	</treesum>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce short
+			<shortpoint> 39 </shortpoint>
+			<shortwait> 0.798256933216647036 </shortwait>
+		</shortforce>
+	</treesum>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce short
+			<shortpoint> 39 </shortpoint>
+			<shortwait> 0.808032639249511297 </shortwait>
+		</shortforce>
+	</treesum>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce short
+			<shortpoint> 39 </shortpoint>
+			<shortwait> 0.795375033951906563 </shortwait>
+		</shortforce>
+	</treesum>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce short
+			<shortpoint> 39 </shortpoint>
+			<shortwait> 0.747442685905288617 </shortwait>
+		</shortforce>
+	</treesum>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce short
+			<shortpoint> 39 </shortpoint>
+			<shortwait> 0.741662279740677666 </shortwait>
+		</shortforce>
+	</treesum>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce short
+			<shortpoint> 39 </shortpoint>
+			<shortwait> 0.747849161412921126 </shortwait>
+		</shortforce>
+	</treesum>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce short
+			<shortpoint> 39 </shortpoint>
+			<shortwait> 0.747849161412921126 </shortwait>
+		</shortforce>
+	</treesum>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce short
+			<shortpoint> 39 </shortpoint>
+			<shortwait> 0.747849161412921126 </shortwait>
+		</shortforce>
+	</treesum>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce short
+			<shortpoint> 39 </shortpoint>
+			<shortwait> 0.824990538930289286 </shortwait>
+		</shortforce>
+	</treesum>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce short
+			<shortpoint> 39 </shortpoint>
+			<shortwait> 0.853050760459042046 </shortwait>
+		</shortforce>
+	</treesum>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce short
+			<shortpoint> 39 </shortpoint>
+			<shortwait> 0.853050760459042046 </shortwait>
+		</shortforce>
+	</treesum>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce short
+			<shortpoint> 39 </shortpoint>
+			<shortwait> 0.854846892448356543 </shortwait>
+		</shortforce>
+	</treesum>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce short
+			<shortpoint> 39 </shortpoint>
+			<shortwait> 0.853006191542226455 </shortwait>
+		</shortforce>
+	</treesum>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce short
+			<shortpoint> 39 </shortpoint>
+			<shortwait> 0.853006191542226455 </shortwait>
+		</shortforce>
+	</treesum>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce short
+			<shortpoint> 39 </shortpoint>
+			<shortwait> 0.853006191542226455 </shortwait>
+		</shortforce>
+	</treesum>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce short
+			<shortpoint> 39 </shortpoint>
+			<shortwait> 0.853006191542226455 </shortwait>
+		</shortforce>
+	</treesum>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce short
+			<shortpoint> 39 </shortpoint>
+			<shortwait> 0.853006191542226455 </shortwait>
+		</shortforce>
+	</treesum>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce short
+			<shortpoint> 39 </shortpoint>
+			<shortwait> 0.853006191542226455 </shortwait>
+		</shortforce>
+	</treesum>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce short
+			<shortpoint> 39 </shortpoint>
+			<shortwait> 0.853006191542226455 </shortwait>
+		</shortforce>
+	</treesum>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce short
+			<shortpoint> 39 </shortpoint>
+			<shortwait> 0.853006191542226455 </shortwait>
+		</shortforce>
+	</treesum>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce short
+			<shortpoint> 39 </shortpoint>
+			<shortwait> 0.853275325203173396 </shortwait>
+		</shortforce>
+	</treesum>
+	<treesum>
+		<ncopy> 1 </ncopy>
+		<shortforce> coalesce short
+			<shortpoint> 39 </shortpoint>
+			<shortwait> 0.853275325203173396 </shortwait>
+		</shortforce>
+	</treesum>
+</chainsum>
+<chainpack>
+	<number> 0 0 11 </number>
+	<chainout>
+		<badtrees> 0 </badtrees>
+		<tinytrees> 0 </tinytrees>
+		<stretchedtrees> 0 </stretchedtrees>
+		<zerodltrees> 0 </zerodltrees>
+		<accrate> 0.475999999999999979 </accrate>
+		<llikemle> 0.00834775005777513811 </llikemle>
+		<llikedata> -687.920714973015038 </llikedata>
+		<starttime> 1338317299 </starttime>
+		<endtime> 1338317300 </endtime>
+		<rates> <map> Haplotype-Arranger 438 448 </map> <map> Tree-Arranger 32 473 </map> <map> Tree-Size-Arranger 6 79 </map> </rates>
+		<estimates>
+			<thetas> 0.0207558381083275713 </thetas>
+		</estimates>
+	</chainout>
+</chainpack>
+<!-- End summary file
+	 Generated from run that started at: Tue May 29 11:48:08 2012
+	 and ended at: Tue May 29 11:48:20 2012 -->
+</XML-summary-file>
diff --git a/doc/testfiles/sample_tracefile_coal_1.txt b/doc/testfiles/sample_tracefile_coal_1.txt
new file mode 100644
index 0000000..939d64a
--- /dev/null
+++ b/doc/testfiles/sample_tracefile_coal_1.txt
@@ -0,0 +1,301 @@
+Step	Ln(Data Likelihood)
+20	-2388.79
+40	-2260.74
+60	-2095.72
+80	-2078.81
+100	-1961.78
+120	-1922.69
+140	-1892.04
+160	-1891.74
+180	-1893.42
+200	-1889.94
+220	-1882.14
+240	-1864.98
+260	-1863.6
+280	-1862.48
+300	-1852.4
+320	-1742.98
+340	-1684.91
+360	-1571.6
+380	-1569.57
+400	-1531.23
+420	-1384.36
+440	-1362.51
+460	-1341.53
+480	-1341.53
+500	-1341.53
+520	-1309.31
+540	-1308.74
+560	-1286.86
+580	-1261.51
+600	-1258.09
+620	-1256.02
+640	-1250.05
+660	-1250.05
+680	-1250.05
+700	-1231.99
+720	-1231.99
+740	-1229.01
+760	-1229.01
+780	-1225.69
+800	-1223.59
+820	-1182.74
+840	-1182.74
+860	-1168.96
+880	-1165.69
+900	-1165.33
+920	-1165.78
+940	-1165.78
+960	-1160.24
+980	-1148.07
+1000	-1147.2
+1020	-1147.91
+1040	-1147.91
+1060	-1147.91
+1080	-1145.85
+1100	-1143.11
+1120	-1143.11
+1140	-1143.8
+1160	-1143.73
+1180	-1144.5
+1200	-1144.89
+1220	-1129.77
+1240	-1130.46
+1260	-1130.46
+1280	-1107.78
+1300	-1101.79
+1320	-1101.79
+1340	-1101.79
+1360	-1101.79
+1380	-1101.79
+1400	-1102.94
+1420	-1102.8
+1440	-1087.56
+1460	-1087.32
+1480	-1056.23
+1500	-1055.15
+1520	-1055.15
+1540	-1053.58
+1560	-1050.86
+1580	-1050.86
+1600	-1045.64
+1620	-1024.66
+1640	-1024.66
+1660	-1024.66
+1680	-1019.96
+1700	-1020.09
+1720	-1017.56
+1740	-1015.61
+1760	-1015.58
+1780	-1002.67
+1800	-1002.41
+1820	-1002.41
+1840	-997.721
+1860	-993.276
+1880	-993.276
+1900	-992.513
+1920	-991.094
+1940	-991.094
+1960	-991.094
+1980	-990.924
+2000	-988.502
+2020	-939.815
+2040	-939.245
+2060	-939.245
+2080	-939.239
+2100	-937.564
+2120	-937.664
+2140	-935.327
+2160	-935.054
+2180	-881.799
+2200	-882.039
+2220	-875.181
+2240	-865.446
+2260	-863.057
+2280	-861.32
+2300	-859.673
+2320	-858.631
+2340	-854.507
+2360	-852.728
+2380	-852.728
+2400	-852.329
+2420	-835.004
+2440	-835.004
+2460	-835.004
+2480	-835
+2500	-835.354
+2520	-831.43
+2540	-821.323
+2560	-821.323
+2580	-820.502
+2600	-821.341
+2620	-821.696
+2640	-821.696
+2660	-820.773
+2680	-814.009
+2700	-814.932
+2720	-814.009
+2740	-810.101
+2760	-810.101
+2780	-810.236
+2800	-810.236
+2820	-779.786
+2840	-779.949
+2860	-779.949
+2880	-778.337
+2900	-778.337
+2920	-779.26
+2940	-779.26
+2960	-773.951
+2980	-773.951
+3000	-773.951
+3020	-773.951
+3040	-763.289
+3060	-761.171
+3080	-760.506
+3100	-761.429
+3120	-760.506
+3140	-758.668
+3160	-743.665
+3180	-743.665
+3200	-743.665
+3220	-742.259
+3240	-742.259
+3260	-742.541
+3280	-742.548
+3300	-742.548
+3320	-742.548
+3340	-740.299
+3360	-740.299
+3380	-741.31
+3400	-741.31
+3420	-741.31
+3440	-742.109
+3460	-741.42
+3480	-732.597
+3500	-731.41
+3520	-730.517
+3540	-730.486
+3560	-730.319
+3580	-726.868
+3600	-716.66
+3620	-712.289
+3640	-712.647
+3660	-710.726
+3680	-700.974
+3700	-700.972
+3720	-700.972
+3740	-700.898
+3760	-700.898
+3780	-700.898
+3800	-700.898
+3820	-699.742
+3840	-699.934
+3860	-700.008
+3880	-700.008
+3900	-697.021
+3920	-696.97
+3940	-697.376
+3960	-697.376
+3980	-696.977
+4000	-696.977
+4020	-692.496
+4040	-692.486
+4060	-692.457
+4080	-692.457
+4100	-692.601
+4120	-692.662
+4140	-692.662
+4160	-692.545
+4180	-692.545
+4200	-692.619
+4220	-692.619
+4240	-692.545
+4260	-691.255
+4280	-691.139
+4300	-691.139
+4320	-691.577
+4340	-691.577
+4360	-691.724
+4380	-691.724
+4400	-691.691
+4420	-691.691
+4440	-691.691
+4460	-691.617
+4480	-691.53
+4500	-691.56
+4520	-691.887
+4540	-692.079
+4560	-692.079
+4580	-692.079
+4600	-692.079
+4620	-692.697
+4640	-693.115
+4660	-693.042
+4680	-693.115
+4700	-693.107
+4720	-693.153
+4740	-693.153
+4760	-693.153
+4780	-693.153
+4800	-693.227
+4820	-693.153
+4840	-693.32
+4860	-693.281
+4880	-693.563
+4900	-693.602
+4920	-693.665
+4940	-693.665
+4960	-693.38
+4980	-693.38
+5000	-693.38
+5020	-693.328
+5040	-693.328
+5060	-693.613
+5080	-693.613
+5100	-693.613
+5120	-695.341
+5140	-695.341
+5160	-695.796
+5180	-701
+5200	-702.224
+5220	-699.989
+5240	-699.498
+5260	-699.498
+5280	-699.498
+5300	-699.498
+5320	-699.498
+5340	-699.498
+5360	-699.646
+5380	-701.526
+5400	-701.526
+5420	-701.432
+5440	-701.432
+5460	-700.117
+5480	-696.727
+5500	-697.186
+5520	-696.812
+5540	-696.812
+5560	-696.418
+5580	-696.416
+5600	-697.021
+5620	-696.842
+5640	-699.325
+5660	-698.266
+5680	-698.657
+5700	-698.657
+5720	-698.657
+5740	-697.774
+5760	-689.106
+5780	-689.106
+5800	-687.766
+5820	-687.958
+5840	-687.958
+5860	-687.958
+5880	-687.958
+5900	-687.958
+5920	-687.958
+5940	-687.958
+5960	-687.958
+5980	-687.921
+6000	-687.921
diff --git a/doc/testfiles/v2.0.infiles/infile.2pop b/doc/testfiles/v2.0.infiles/infile.2pop
new file mode 100644
index 0000000..8db521e
--- /dev/null
+++ b/doc/testfiles/v2.0.infiles/infile.2pop
@@ -0,0 +1,760 @@
+<lamarc>
+<!-- Created by the Lamarc program -->
+  <chains>
+    <replicates>1</replicates>
+    <heating>
+      <adaptive>false</adaptive>
+      <temperatures> 1</temperatures>
+      <swap-interval>1</swap-interval>
+    </heating>
+    <strategy>
+      <resimulating>1</resimulating>
+      <haplotyping>0</haplotyping>
+    </strategy>
+    <initial>
+      <number>10</number>
+      <samples>100</samples>
+      <discard>1000</discard>
+      <interval>20</interval>
+    </initial>
+    <final>
+      <number>2</number>
+      <samples>1000</samples>
+      <discard>1000</discard>
+      <interval>20</interval>
+    </final>
+  </chains>
+  <format>
+    <seed>1005</seed>
+    <verbosity>verbose</verbosity>
+    <progress-reports>verbose</progress-reports>
+    <results-file>outfile.2pop</results-file>
+    <use-in-summary>false</use-in-summary>
+    <in-summary-file>insumfile.2pop</in-summary-file>
+    <use-out-summary>true</use-out-summary>
+    <out-summary-file>outsumfile.2pop</out-summary-file>
+    <out-xml-file>menuinfile</out-xml-file>
+    <plotting>
+    </plotting>
+  </format>
+  <forces>
+    <coalescence>
+      <start-values> 0.0016436 0.0011111</start-values>
+      <method> WATTERSON WATTERSON</method>
+      <max-events>32000</max-events>
+      <profiles>percentile percentile </profiles>
+    </coalescence>
+    <migration>
+      <start-values> 0 100 100 0</start-values>
+      <method> USER USER USER USER</method>
+      <max-events>10000</max-events>
+      <profiles>none none none none </profiles>
+    </migration>
+    <recombination>
+      <start-values> 0.01</start-values>
+      <method> USER</method>
+      <max-events>1000</max-events>
+      <profiles>percentile </profiles>
+    </recombination>
+  </forces>
+  <data>
+    <region name="lpl">
+      <model name="F84">
+        <normalize>false</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <base-freqs> calculated </base-freqs>
+        <ttratio>2</ttratio>
+      </model>
+      <spacing>
+        <block>
+          <map-position>0</map-position>
+          <length>9734</length>
+          <locations> 105 109 144 324 342 478 550 735 1215 1219 1285 1546 1570 1755 1827 1938 1960 2130 2499 2588 2618 2637 2848 2986 2995 3021 3247 3289 3296 3552 3608 3722 3842 4015 4201 4342 4345 4417 4425 4508 4575 4822 4871 4906 4934 5084 5167 5370 5394 5440 5553 5559 5686 6175 6195 6202 6249 6282 6594 6677 6717 6759 6771 6792 6839 6862 6938 7311 7340 7356 7409 7750 8084 8085 8281 8288 8389 8498 8529 8533 8534 8640 8751 8848 9036 9398 9708 9717</locations>
+          <offset>0</offset>
+          <marker-weights> 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1</marker-weights>
+        </block>
+      </spacing>
+    <population name="Jackson">
+      <individual name="J01-1     ">
+        <sample name="J01-1     ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACAACGCACATCGCNNCTTGCGACCTCANACTGTGNTAGTGCATCGTAGATTCNGAGTCNGCATGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J01-3     ">
+        <sample name="J01-3     ">
+          <datablock type="SNP">
+            CAGTNTNTCTCACACAACGCACATAGCNNCTCACGTCTTTANACTGTGNTAGTGCATCGTAGATTCNGAGTCNGCCTGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J02-3     ">
+        <sample name="J02-3     ">
+          <datablock type="SNP">
+            CAGTNTNTCTCACACAACGCACATAGCNNCTCACGTCTTTANACTGTGNTAGTGCATCGTAGATTCNGAGTCNGCCTGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J02-57    ">
+        <sample name="J02-57    ">
+          <datablock type="SNP">
+            CCGTNTNTGCCACACAATACACAGCGGNNCTTGGGACCCTANGCCACGNTCGCGCACCGGGGGTTCNCGATANGCAGGCANTGGCAGA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J03-8     ">
+        <sample name="J03-8     ">
+          <datablock type="SNP">
+            CAGTNCNCGCCACACAACGCGCAGCGCNNCTTGCGACCCTANGCCACANTCGCGTACCGGGGGTTCNCGATCNGCAGGCANTGGCAGA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J03-17    ">
+        <sample name="J03-17    ">
+          <datablock type="SNP">
+            CCGTNTNTGCCACACAATACACAGCGCNNCCTGCGTCTTTANACTGTGNTAGTGCATCGTAGATTCNGAATCNGCATGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J04-18    ">
+        <sample name="J04-18    ">
+          <datablock type="SNP">
+            CAATNTNTCTCACACAACGCACATCGCNNCTTGCGAGCTTANGCCGCGNNCGCGCACCCGGGATTTNCAATANGCAGGCANTGGCAAA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J04-25    ">
+        <sample name="J04-25    ">
+          <datablock type="SNP">
+            CAGTNTNCCTCACACAACGCACATCGCNNCTTGCGACCTTANGCCGCGNNCGCGCACCCGGGATTTNCAATANGCAGGCANTGGCAAA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J05-43    ">
+        <sample name="J05-43    ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACAACGCACATCGCNNCTTGCGTCTTTANACTGTGNTAGTGCATCGTAGATTCNGAATCNGCATGAANTGTCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J05-45    ">
+        <sample name="J05-45    ">
+          <datablock type="SNP">
+            CAGCNTNTCTCACACAACGCACAGCGCNNCTTGCGACCTTTNGCCGCGNTCGCGCACCGGGGATTCNGAATCNTCAGGCANCGGCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J06-2     ">
+        <sample name="J06-2     ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACAACGCATATCGCNNCTTGCGACCTCANACTGTGNTAGTGCATCGTAGATTCNGAGTCNGCATGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J06-8     ">
+        <sample name="J06-8     ">
+          <datablock type="SNP">
+            CAGTNCNCGCCACACAACGGGCAGCGCNNCTTGCGACCCTANGCCACANTCGCGCACCGGGGGTTCNCGATCNGCAGGCANTGGCAGA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J07-64    ">
+        <sample name="J07-64    ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACATCGCACATCGCNNCTTGCGTCTTTANACTGTGNTAGTGCATCGGAGATTCNGAATCNGCATGAANTGTCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J07-12    ">
+        <sample name="J07-12    ">
+          <datablock type="SNP">
+            CAGTNTNCCTCACACAACGCACATCGCNNCTTGCGAGCTTANACTGCGNCCGCGCACCCTGGATTTNCAATANGGAGGCANTGGCAAA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J08-13    ">
+        <sample name="J08-13    ">
+          <datablock type="SNP">
+            AAGTNTNTGCTACACAATACACAGCACNNCTTGCGACCTTANGCNNNGNTCGCGCACCGGGGATTCNGAATCNTCAGGNNNCGGCAGA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J08-14    ">
+        <sample name="J08-14    ">
+          <datablock type="SNP">
+            CAGTNCNTGCCACACAACGCGCAGCGCNNCTTGCGACCCTANGCNNNGNTCGCGCATCGGGGGTTCNCGATCNGCAGCNNNTGGCAGA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J09-3     ">
+        <sample name="J09-3     ">
+          <datablock type="SNP">
+            CAGTNTNTCTCACACAACGCACATAGCNNCTCACGTCTTTANACTGTGNTAGTGCATCGTAGATTCNGAGTCNGCCTGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J09-15    ">
+        <sample name="J09-15    ">
+          <datablock type="SNP">
+            CAGTNCNTGCCACAGGATGCGCAGCGCNNCTTGCGTCTTTANACTGTGNTAGTGCATCGTAGATTCNGAGTCNGCATGAANTGTCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J10-16    ">
+        <sample name="J10-16    ">
+          <datablock type="SNP">
+            CAGTNTNCCTCNNACAACGCACAGCGCNNCTTGCGTCTTTANACTGTGNTAGTGCATCGTAGATTCNGAATCNGCATGCANTGGCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J10-11    ">
+        <sample name="J10-11    ">
+          <datablock type="SNP">
+            CAGTNTNTCTCNNACAATGCACATCGCNNCCTGCGTCTTTANACTGTGNTAGTGCATCGTAGATTCNGAATCNGCATGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J11-1     ">
+        <sample name="J11-1     ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACAACGCACATCGCNNCNNNNGACCTCANACTGTGNTAGTGCATCGTAGATTCNGAGTCNGCATGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J11-4     ">
+        <sample name="J11-4     ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACAACGCACATCGCNNCNNNNGTCTTTANACTGTGNTAGTGCATCGTAGATTCNGAGTCNGCCTGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J12-19    ">
+        <sample name="J12-19    ">
+          <datablock type="SNP">
+            CAGTNTNTCTCACACAACGCACAGCGCNNCTTGCGTCTTTANACNGTGNTAGTGCGTCGTAGATTCNGAATCNGCATGNNNTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J12-20    ">
+        <sample name="J12-20    ">
+          <datablock type="SNP">
+            CAATNCNTCTCACAGGATGCGCATCGCNNCCTGCGAGCTTANGCNGCGNTCGCGCACCCGGGATTCNCAATANGCAGGNNNTGGCAAA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J13-21    ">
+        <sample name="J13-21    ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACAACGCACATCGCNNCTTGCGTCCTTANACNGCGNCCGCGCACCCGGGATTTNCAATANGCAGGCCNTGGCAAA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J13-22    ">
+        <sample name="J13-22    ">
+          <datablock type="SNP">
+            CCGTNTNTGCCACACAATACACAGCGGNNCTTGGGACCCTANACNACGNTCGCGCACCGGGGGTTCNCGATANGCAGGCANTGGCAGA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J14-23    ">
+        <sample name="J14-23    ">
+          <datablock type="SNP">
+            CAGTNTNCCTCACACAACGCACATCGCNNCTTGCGACTTTANACNGTGNTAGTGCATCGTAGATTCNGAATCNGCATGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J14-24    ">
+        <sample name="J14-24    ">
+          <datablock type="SNP">
+            CAGTNTNTCTCACACAATGCACAGCGCNNCTTGCGTCCTTANACNGTGNTAGTGCATCGTAGATTCNGAGTCNGGATGAANTGTCGGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J15-10    ">
+        <sample name="J15-10    ">
+          <datablock type="SNP">
+            CAGTNTNCCTCACACAACGCACAGCGCNNCTTGCGACTTTANACTGNGNTAGTGCATCGTAGATTCNGAATCNGCATGCANTGTCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J15-26    ">
+        <sample name="J15-26    ">
+          <datablock type="SNP">
+            CAGTNTNTCTCACACAATGCACATCGCNNCTTGCGTCCTTANACTGNGNTAGCGCACCCGGGATTTNCAATANGGAGGAANTGGCGAA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J16-27    ">
+        <sample name="J16-27    ">
+          <datablock type="SNP">
+            CAGTNTNTCTCACACAACGCACATCGCNNCTCGCGTCTTTANACTGTGNTAGTGCATCGTAGATTCNGAGTCNGCATGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J16-28    ">
+        <sample name="J16-28    ">
+          <datablock type="SNP">
+            CAGCNTNTCTCACACAACGCACATCGCNNCCTGCGTCTTTANACTGTGNTAGTGCATCGTAGATTCNGAATCNGCATGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J17-8     ">
+        <sample name="J17-8     ">
+          <datablock type="SNP">
+            CAGTNCNCGCCACACAACGCGCAGCGCNNCTTGCGACCCTANGCCACGNTCGCGCACCGGGGGTTCNCGATCNGCAGCCANTGGCAGA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J17-29    ">
+        <sample name="J17-29    ">
+          <datablock type="SNP">
+            CAGTNTNTCTCACACAACGCACATCGCNNCTTGCGACCCTANGCCACGNTCGCGCATCGGGGGTTCNCGATANGCAGGCANTGGCAGA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J18-3     ">
+        <sample name="J18-3     ">
+          <datablock type="SNP">
+            CAGTNTNTCTCACACAACGCACATAGCNNCTCACATCTTTANACTGTGNTAGTGCATCGTAAATTCNGAGTCNGCCTGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J18-30    ">
+        <sample name="J18-30    ">
+          <datablock type="SNP">
+            CAGTNTNCCTCACACAACGCACAGCGCNNCTTGCGACTTTANACTGTGNTCGTGCATCGTAGATTCNGAATCNGCATGCANTGGCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J19-31    ">
+        <sample name="J19-31    ">
+          <datablock type="SNP">
+            CAGTNTNTCTCACACAACGCACAGCGCNNCTTGCGTCTTTANNCNGNGNTAGTGCATCGNAGATTCNGAATCNGCATGCANTGNCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J19-32    ">
+        <sample name="J19-32    ">
+          <datablock type="SNP">
+            CAGCNCNTCTCACTCAATGCACAGCGCNNCTTGGGACCTTANNCNGNGNCCGCGCACCCNGGATTTNCAATANGCAGGAANTGNCAAA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J20-33    ">
+        <sample name="J20-33    ">
+          <datablock type="SNP">
+            CAGTNCNTGCCACAGGATGCGCAGCGCNNCTTGCGTCTTTANACTGTGNTAGTGCATCGTAGATTCNGAGTCNGCATGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J20-34    ">
+        <sample name="J20-34    ">
+          <datablock type="SNP">
+            CAGTNTNTCTCACACAACGCACATCGCNNCTCGCGTCTTTANACTGTGNTAGTGCATCGTAGATTCNGAGTCNGGATGAANTGTCGGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J21-35    ">
+        <sample name="J21-35    ">
+          <datablock type="SNP">
+            CCGTNTNTGCCACACAATACACAGCGGNNCTTGGAACCCTANANNACGNTAGCGCACCGGGAGTTCNCGATCNGCAGGCANTGGCAGA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J21-36    ">
+        <sample name="J21-36    ">
+          <datablock type="SNP">
+            CAGTNTNTCTCCCACAATGCACAGCGCNNCTTGCGACTTTANANNGTGNTAGTGCATCGTAGATTCNGAATCNGCATGCANTGGCAGA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J22-11    ">
+        <sample name="J22-11    ">
+          <datablock type="SNP">
+            CAGTNTNTNTCCCACAATGCACATCGCNNTCTGCGTCTTTANACTGTGNTAGTGCATCGTAGATTCNGAATCNGCATGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J22-37    ">
+        <sample name="J22-37    ">
+          <datablock type="SNP">
+            CAGTNTNTNTCACACAACGCACATCGCNNCTTGCGTCTTTANACTGTGNCAGTGCATCGTAGATTCNGAATCNGGATGAANTGTCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J23-10    ">
+        <sample name="J23-10    ">
+          <datablock type="SNP">
+            CAGTNTNCCTCACACAACGCACAGCGCNNTTTGCGACTTTANACTGTGNTAGTGCATCGNAGATTCNGAATCNGCATGCANTGTCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J23-38    ">
+        <sample name="J23-38    ">
+          <datablock type="SNP">
+            AAGTNTNTGCTACACAATACACATCACNNCTTGCGTCCTTTNACTGCGNTCGCGCACCGNGGATTCNGAATANGGAGGAANCGGCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J24-39    ">
+        <sample name="J24-39    ">
+          <datablock type="SNP">
+            CAATNTNTCTCACACATCGCACATCGCNNCTTGCGAGCTTANGCCGCGNNCGCGCACCCNGGATTCNCAATCNGCAGGCANTGGCAAA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="J24-40    ">
+        <sample name="J24-40    ">
+          <datablock type="SNP">
+            CAGTNTNCCTCACACAACGCACATCGCNNCCTGCGTCTTTANACTGTGNNAGTGCATCGNAGATTCNGAATCNGGATGAANTGTCAGG
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    <population name="NorthKarelia">
+      <individual name="N01-1     ">
+        <sample name="N01-1     ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACAACGCACATCGCNNCTTGCGACCTCANACTGTGNTAGTGCATCGTAGATTCNGAGTCNGCATGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N01-41    ">
+        <sample name="N01-41    ">
+          <datablock type="SNP">
+            CAGTNTNTCTCACACAACGCACATAGCNNCTCACGTGTTCANACTGTGNTAGTGCATCGTAGATTCNGAGTCNGCCTGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N02-1     ">
+        <sample name="N02-1     ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACAACGCACATCGCNNCTTGCGACCTCANACTGTGNTAGTGCATCGTAGATTCNGAGTCNGCNTGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N02-42    ">
+        <sample name="N02-42    ">
+          <datablock type="SNP">
+            CAGTNTNTCTCACACAACGCACATAGCNNCTCACGTGCTTANACTGCGNTAGTGCATCGTAGATTCNGAGCCNGCNTGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N03-5     ">
+        <sample name="N03-5     ">
+          <datablock type="SNP">
+            CAGTNTNTCTCACACAACGCACATAGCNNCTTGCGACCTCANACTGTGNTAGTGCATCGTAGATTCNGAGTCNGCATGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N03-44    ">
+        <sample name="N03-44    ">
+          <datablock type="SNP">
+            CAATNTNTCTCACACAACACACATCGCNNCTTGCGACCTCANACTGCGNTAGTGCATCGTAGATTCNGAGTCNGCATGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N04-9     ">
+        <sample name="N04-9     ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACAACGCACATCGCNNCTTGCGACCTCANACNGCGNTAGTGCATCGTAGATTCNGAGTCNGCATGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N04-46    ">
+        <sample name="N04-46    ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACAACGCACATCGCNNCTTGCGAGCTCANGCNGCGNCCGCGCATCCGGGATTTNCAGTANGCAGGCANTGGCAAA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N05-4     ">
+        <sample name="N05-4     ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACAACGCACATCGCNNCTTGCGTCTTTANACTGTGNTAGTGCATCGTAGATTCNGAGTCNGCCTGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N05-47    ">
+        <sample name="N05-47    ">
+          <datablock type="SNP">
+            CAGTNTNTCTCACACAACGCACATAGCNNCTCACGACCTTTNGCCGCGNTCGCGCACCGGGGATTCNGAGTCNTCAGGCANCGGCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N06-5     ">
+        <sample name="N06-5     ">
+          <datablock type="SNP">
+            CAGTNTNTCTCACACAACGCACATAGCNNCTTGCGACCTCANACTGTGNTAGTGCATCGTAGATTCNGAGTCNGCATGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N06-48    ">
+        <sample name="N06-48    ">
+          <datablock type="SNP">
+            CAATNTNTCTCACACAACGCACATCGCNNCTTACGAGCTTANGCCGCGNTCGCGCATCCGGGATTTNCAGTANGCAGGCANTGGCAAA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N07-2     ">
+        <sample name="N07-2     ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACAACGCACATCGCNNCTTGCGACCTCANACTGTGNNAGTGCATCGTAGATTCNGAGTCNGCATGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N07-49    ">
+        <sample name="N07-49    ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACAACACACATCGCNNCTCGCGACCTCANGCCGCGNNCACGCATCCGGGATTTNCAGTANGCAGGCANTGGCAAA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N08-2     ">
+        <sample name="N08-2     ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACAACGCACATCGCNNCTTGCGACCTCANACTGTGNTAGTGCATCGTAGATTCNGAGTCNGCATGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N08-50    ">
+        <sample name="N08-50    ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACAACGCACATCGCNNCTTGCGTCCTTANGCCGCGNCCGCGCATCCGGGATTTNCAGTANGCAGGCCNTGGCAAA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N09-5     ">
+        <sample name="N09-5     ">
+          <datablock type="SNP">
+            CAGTNTNTCTCACACAACGCACATAGCNNCTTGCGACCTCANACTGTGNTAGTGCATCGTAGATTCNGAGTCNGCNTGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N09-51    ">
+        <sample name="N09-51    ">
+          <datablock type="SNP">
+            CAGTNTNTCTCACACAACGCACATAGCNNCTCACGTCCTTANACTGTGNTAGTGCATCGTAGATTCNGAGTCNGCNTGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N10-1     ">
+        <sample name="N10-1     ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACAACGCACATCGCNNCTTGCGACNTCANACTGTGNTAGTGCATCNTAGATTCNGAGTCNGCATGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N10-4     ">
+        <sample name="N10-4     ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACAACGCACATCGCNNCTTGCGTCNTTANACTGTGNTAGTGCATCNTAGATTCNGAGTCNGCCTGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N11-52    ">
+        <sample name="N11-52    ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACAACGCACATCGCNNCTTGCGTCCTCANACTGTGNTAGTGCATCGTAGATTCNGAGTCNGCATGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N11-2     ">
+        <sample name="N11-2     ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACAACGCACATCGCNNCTTGCGACCTCANACTGTGNTAGTGCATCGTAGATTCNGAGTCNGCATGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N12-1     ">
+        <sample name="N12-1     ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACAACGCACATCGCNNCTTGCGACCTCANACTGTGNTAGTGCATCGTAGATTCNGAGTCNGCATGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N12-53    ">
+        <sample name="N12-53    ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACAACGCACATCGCNNCTCGCGAGCTCANACTGTGNTAATGCATCGTAGATTCNGAGTCNGCATGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N13-54    ">
+        <sample name="N13-54    ">
+          <datablock type="SNP">
+            CAGTNTNTCTCACACAACGCACATAGCNNCTCACGTCTTTANACTGTGNTAGTGCACCGTAGAGCCNGAGTCNGCCTGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N13-55    ">
+        <sample name="N13-55    ">
+          <datablock type="SNP">
+            CAGCNTNTGCTACACAATACACAGCACNNCTTGCGACCTTANGCCGCGNTCGCGCACCGGGGATTCNGAGTCNTCAGGCANCGGCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N14-1     ">
+        <sample name="N14-1     ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACAACGCACATCGCNNCTTGCGACCTCANACTGTGNTAGTGCATCGTAGATTCNGAGTCNGCATGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N14-56    ">
+        <sample name="N14-56    ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACAACGCACATCGCNNCTTGCGAGCTCANACTGTGNTAGTGCATCGTAGATTCNGAGTCNGCATGAANCATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N15-7     ">
+        <sample name="N15-7     ">
+          <datablock type="SNP">
+            CAGTNTNTCTCACACAACGCACATCGCNNCTCACGTCTTTANACTGTGNTAGTGCATCGTAGATTCNGAGTCNGCCTGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N15-58    ">
+        <sample name="N15-58    ">
+          <datablock type="SNP">
+            AAGTNTNTGCTACACAATACACAGCACNNCTTGCGACCTTTNGCCGCGNTCGCGCACCGGGGATTCNGAGTCNTCCGGCANCGGCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N16-1     ">
+        <sample name="N16-1     ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACAACGCACATCGCNNCTTGCGACCTCANACTGTGNTAGTGCATCGTAGATTCNGAGTCNGCATGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N16-59    ">
+        <sample name="N16-59    ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACAACGCACATCGCNNCTCGCGACCTCANACTGTGNTAATGCATCGTAGATTCNGAGTCNGCATGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N17-4     ">
+        <sample name="N17-4     ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACAACGCACATCGCNNCTTGCGTCTTTANACTGTGNTAGTGCATCGTAGATTCNGAGTCNGCNTGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N17-60    ">
+        <sample name="N17-60    ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACAACGCACATCGCNNCTTGCGACCTCANACTGTGNTAGTGCATCCTAGATTCNGAGTCNGCNTGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N18-1     ">
+        <sample name="N18-1     ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACAACGCACATCGCNNCTTGCGACCTCANACTGTGNTAGTGCATCGTAGATTCNGAGTCNGCATGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N18-5     ">
+        <sample name="N18-5     ">
+          <datablock type="SNP">
+            CAGTNTNTCTCACACAACGCACATAGCNNCTTGCGACCTCANACTGTGNTAGTGCATCGTAGATTCNGAGTCNGCATGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N19-3     ">
+        <sample name="N19-3     ">
+          <datablock type="SNP">
+            CAGTNTNTCTCACACAACGCACATAGCNNCTCACGTCTTTANACTGTGNTAGTGCATCGTAGATTCNGAGTCNGCCTGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N19-61    ">
+        <sample name="N19-61    ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACAACGCACATCGCNNCTTGCGACCTCANACTGTGNTAGTGCATCCTAGATTCNGAGTCNGCCTGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N20-1     ">
+        <sample name="N20-1     ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACAACGCACATCGCNNCTTGCGACCTCANACTGTGNTAGTGCATCGTAGATTCNGAGTCNGCNTGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N20-7     ">
+        <sample name="N20-7     ">
+          <datablock type="SNP">
+            CAGTNTNTCTCACACAACGCACATCGCNNCTCACGTCTTTANACTGTGNTAGTGCATCGTAGATTCNGAGTCNGCNTGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N21-62    ">
+        <sample name="N21-62    ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACAACGCACATCGCNNCTTGCGACCTCANGCCGCGNCCGCGCACCCGGGATTTNCAATANGCAGGCANTGGCAAA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N21-63    ">
+        <sample name="N21-63    ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACAACGCACATCGCNNCTTGCGACCTTTNGCCGCGNTCGCGCACCGGGGATTCNGAATCNTCAGGCANCGGCAAA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N22-6     ">
+        <sample name="N22-6     ">
+          <datablock type="SNP">
+            AAGTNTNTNNTACACAATACACAGCACNNCTTGCGTCCTTANGCCGCGNCCGCGCACCNGGGATTTNCAATANGCAGGCANTGGCAAA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N22-65    ">
+        <sample name="N22-65    ">
+          <datablock type="SNP">
+            CAGTNTNCNNCAGACAACGCACATCGCNNCTTGCGTCCTCANGCCGCGNCCGCGCACCNGGGATTTNCAATANGCAGGCANTGGCAAA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N23-4     ">
+        <sample name="N23-4     ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACAACGCACATCGCNNCTTGCGTCTTTANACTGTGNTAGTGCATCGTAGATTCNGAGTCNGCCTGAANTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N23-66    ">
+        <sample name="N23-66    ">
+          <datablock type="SNP">
+            CAGTNTNTCTCACACAACGCACATAGCNNCTCACGACCTTTNGCCGCGNTCGCGCACCGGGGATTCNGAGTCNTCCGGCANCGGCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N24-9     ">
+        <sample name="N24-9     ">
+          <datablock type="SNP">
+            CAGTNTNCCTCAGACAACGCACATCGCNNCTTGCGACCTCANACTGCGNTAGNGCATCGTAGATTCNGAGTCNGCATGNNNTATCAGG
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="N24-67    ">
+        <sample name="N24-67    ">
+          <datablock type="SNP">
+            CAATNTNTCTCACACAACGCACATCGCNNCTTGCGAGCTTANGCCGCGNTAGNGCATCCGGGATTTNCAGTANGCAGGNNNTGGCAAA
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    </region>
+  </data>
+</lamarc>
diff --git a/doc/testfiles/v2.0.infiles/infile.baselocus b/doc/testfiles/v2.0.infiles/infile.baselocus
new file mode 100644
index 0000000..cb0faf5
--- /dev/null
+++ b/doc/testfiles/v2.0.infiles/infile.baselocus
@@ -0,0 +1,154 @@
+<lamarc>
+<!-- Created by the Lamarc program -->
+  <chains>
+    <replicates>1</replicates>
+    <bayesian-analysis>No</bayesian-analysis>
+    <heating>
+      <adaptive>false</adaptive>
+      <temperatures> 1</temperatures>
+      <swap-interval>10</swap-interval>
+    </heating>
+    <strategy>
+      <resimulating>0.833333</resimulating>
+      <tree-size>0.166667</tree-size>
+      <haplotyping>0</haplotyping>
+    </strategy>
+    <initial>
+      <number>10</number>
+      <samples>500</samples>
+      <discard>1000</discard>
+      <interval>20</interval>
+    </initial>
+    <final>
+      <number>2</number>
+      <samples>10000</samples>
+      <discard>1000</discard>
+      <interval>20</interval>
+    </final>
+  </chains>
+  <format>
+    <verbosity>normal</verbosity>
+    <progress-reports>normal</progress-reports>
+    <results-file>outfile</results-file>
+    <use-in-summary>false</use-in-summary>
+    <in-summary-file>insumfile</in-summary-file>
+    <use-out-summary>false</use-out-summary>
+    <out-summary-file>outsumfile</out-summary-file>
+    <use-curvefiles>true</use-curvefiles>
+    <curvefile-prefix>curvefile</curvefile-prefix>
+    <out-xml-file>menuinfile</out-xml-file>
+    <seed>3001</seed>
+  </format>
+  <forces>
+    <coalescence>
+      <start-values> 0.01</start-values>
+      <method> User </method>
+      <max-events>100000</max-events>
+      <profiles> percentile </profiles>
+      <constraints> unconstrained </constraints>
+      <prior type="log">
+        <paramindex> default </paramindex>
+        <lower> 1e-05 </lower>
+        <upper> 10 </upper>
+      </prior>
+    </coalescence>
+  </forces>
+  <data>
+    <region name="jointloci">
+      <model name="F84">
+        <normalize>false</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <relative-murate>1</relative-murate>
+        <base-freqs> 0.25 0.25 0.25 0.25 </base-freqs>
+        <ttratio>2</ttratio>
+      </model>
+      <effective-popsize>1</effective-popsize>
+      <spacing>
+        <block>
+          <map-position>0</map-position>
+          <length>100</length>
+          <locations> 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99</locations>
+          <offset>0</offset>
+          <marker-weights> 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1</marker-weights>
+        </block>
+      </spacing>
+    <population name="seattle">
+      <individual name="00_0001   ">
+        <sample name="00_0001   ">
+          <datablock type="DNA">
+            CAAGCCGTTTGGCCCAGGGCTAGGGGGTGCCAGGGTCATGATGGATTTTATTCGCTAGCCCATGTGTGATTAGAAGGGTAACCGAGACATTCCACAGCTA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0006   ">
+        <sample name="00_0006   ">
+          <datablock type="DNA">
+            CAAGCAATTTCGCCCAGGGCTAGGGGGTGCCGGGGTCATGATGGATTTTATTCGCTAGCCCATGTGTGATTAGAAGGGTAACCGAGACATTCCACAGCTA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0007   ">
+        <sample name="00_0007   ">
+          <datablock type="DNA">
+            CAAACAGTTTCGCCCAGGGCTAGGGGGTGCCAGGGTCATGATGGATTTTATTCGCTAGCCCATGTGTGATTAGAAGGGTAACCGAGACATTCCACAGCTA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0002   ">
+        <sample name="00_0002   ">
+          <datablock type="DNA">
+            CAAGCAGTTTCGCCCAGGGCTAGGGGGTGCCAGGGTCATGATGGATTTTATTCGCTAGCCCATGTGTGATTAGAAGGGTAACCGAGACATGCCACAGCTA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0008   ">
+        <sample name="00_0008   ">
+          <datablock type="DNA">
+            CAAGCAGTTTCGCCCAGGGCTAGGGGGTGCCAGGGTCATGATGGATTTTATTCGCTAGCCCATGTGTGATTAGAAGGGTAACCGAGACATGCCACAGCTA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0003   ">
+        <sample name="00_0003   ">
+          <datablock type="DNA">
+            CAAGCAGTTTCGCCCAGGGCTAGGGGGTGCCAGGGTCATGATGGATTTTATTCGCTAGCCCATGTGTGCTTAGAAGGGTAACCGAGACATGCCACAGCTA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0009   ">
+        <sample name="00_0009   ">
+          <datablock type="DNA">
+            CAAGCAGTTTCGCCCAGGGCTAGGGGGTGCCAGGGTCATGATGGATTTTATTCGCTAGCCCATGTGTGATTAGAAGGGTAACCGGGACATGCCACAGCTA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0005   ">
+        <sample name="00_0005   ">
+          <datablock type="DNA">
+            CAAGCAGTTTCGCCCAGGGCTAGGGGGTGCCAGGGTCATGATGGATTTTATTCGCTAGCCCATGTGTGATTAGAAGGGTAACCGGGGCATGCCACAACTA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0000   ">
+        <sample name="00_0000   ">
+          <datablock type="DNA">
+            CAAGCAGTTTCGCCCAGGGCTAGGGGGTGCCAGGGTCATGATGGATTTTATTCGCTTGCCCATGTGTGATTAGAAGGGTAACCGAGACATGCCACAACTA
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0004   ">
+        <sample name="00_0004   ">
+          <datablock type="DNA">
+            CAAGCAGTTTCGCCCAGGGCTAGGGGGTGCCAGGGTCATGGTGGATTTTATTCGCTAGCCCACGTGTGATTAGAAGGGTAACCGAGACATGCCACAACTA
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    </region>
+  </data>
+</lamarc>
diff --git a/doc/testfiles/v2.0.infiles/infile.bayes b/doc/testfiles/v2.0.infiles/infile.bayes
new file mode 100644
index 0000000..6c2a7aa
--- /dev/null
+++ b/doc/testfiles/v2.0.infiles/infile.bayes
@@ -0,0 +1,100 @@
+<lamarc>
+<!-- Created from the LamarcDS DataStore -->
+	<forces>
+		<coalescence>
+			<start-values> 0.01 </start-values>
+			<method> USER </method>
+			<max-events> 1000 </max-events>
+		</coalescence>
+	</forces>
+	<!-- -->
+	<chains>
+                <bayesian-analysis> true </bayesian-analysis>
+		<replicates> 1 </replicates>
+		<heating>
+			<temperatures> 1 </temperatures>
+			<swap-interval> 1 </swap-interval>
+		</heating>
+		<strategy>
+			<resimulating> .5 </resimulating>
+			<bayesian> .5 </bayesian>
+		</strategy>
+			<initial>
+				<number> 3 </number>
+				<samples> 200 </samples>
+				<discard> 1000 </discard>
+				<interval> 20 </interval>
+			</initial>
+		<final>
+			<number> 1 </number>
+			<samples> 500 </samples>
+			<discard> 1000 </discard>
+			<interval> 20 </interval>
+		</final>
+	</chains>
+	<!-- -->
+	<format>
+		<verbosity> verbose </verbosity>
+		<progress-reports> verbose </progress-reports>
+		<echo> true </echo>
+		<plotting>
+			<profile> false </profile>
+			<posterior> false </posterior>
+		</plotting>
+		<seed> 1005 </seed>
+		<parameter-file> parmfile </parameter-file>
+		<results-file> outfile.bayes </results-file>
+		<in-summary-file> insumfile.bayes </in-summary-file>
+		<out-summary-file> outsumfile.bayes </out-summary-file>
+	</format>
+	<!-- -->
+	<data>
+		<region name="region 1">
+			<population name="Population JYRM">
+				<individual name="germ_10   ">
+					<sample name="germ_10   ">
+						<datablock type="DNA">
+							TCATTTCCGGTGCAAACCGAATCTCAGCTTGATTAATCTGGATCACCCAGAAGAGCTCTTAAAACGCA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="germ_11   ">
+					<sample name="germ_11   ">
+						<datablock type="DNA">
+							AGATTTGGGGTGCAATGGGAATCTCTCGTTGATTATAGTGGATCAGGGAGAAGAGGACTTAAAACCGT
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="germ_20   ">
+					<sample name="germ_20   ">
+						<datablock type="DNA">
+							ACTTTTGCCGTGCATAGCCAATCTGACCATGATTTAACAGGATCTCGCTGAAGACCAGATAAAAGGGA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="germ_21   ">
+					<sample name="germ_21   ">
+						<datablock type="DNA">
+							ACAATTGCGCTGCTAAGCGTATCACACCTAGATAAAACTCGATGACGCACAAGTGCAGTAAAATCGGA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="germ_30   ">
+					<sample name="germ_30   ">
+						<datablock type="DNA">
+							ACATATGCGGAGGAAAGCGATTGTCACCTTCAATAAACTGCAACACGCAGTACAGCAGTTTATACGGA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="germ_31   ">
+					<sample name="germ_31   ">
+						<datablock type="DNA">
+							ACATTAGCGGTCCAAAGCGAAACTCACCTTGTTTAAACTGGTTCACGCAGATGAGCAGTTATAACGGA
+						</datablock>
+					</sample>
+				</individual>
+			</population>
+		</region>
+	</data>
+</lamarc>
+
diff --git a/doc/testfiles/v2.0.infiles/infile.coalgrowmig b/doc/testfiles/v2.0.infiles/infile.coalgrowmig
new file mode 100644
index 0000000..d52e1d8
--- /dev/null
+++ b/doc/testfiles/v2.0.infiles/infile.coalgrowmig
@@ -0,0 +1,386 @@
+<!--generated with:                                                          -->
+<!--                                                                         -->
+<!--jrectree (rand#???)/rectreedna/phybigpop, dna, coal & growth & mig       -->
+<!--theta = 0.01, migration = 100.0, growth = 0.0                            -->
+<!--2 populations with 20 individuals/population, 1000 markers/sites         -->
+<!--                                                                         -->
+<!--                                                                         -->
+<!--analyzed as:                                                             -->
+<!--                                                                         -->
+<!--dna (1000 markers, no spacing)                                           -->
+<!--full gtr model with 3 categories (PAUP* derived)                         -->
+<!--starting params (theta = watterson, mig = FST)                           -->
+<!--5 initial (1000 samples, 20 interval, 1000 discard)                      -->
+<!--2 final   (2500 samples, 20 interval, 1000 discard)                      -->
+<!--no heating                                                               -->
+<!--percentile profiles                                                      -->
+<!-- Created from the LamarcDS DataStore -->
+<!-- -->
+ <lamarc>
+<!-- Created by the Lamarc program -->
+  <chains>
+    <replicates>1</replicates>
+    <heating>
+      <adaptive>false</adaptive>
+      <temperatures> 1</temperatures>
+      <swap-interval>1</swap-interval>
+    </heating>
+    <strategy>
+      <resimulating>1</resimulating>
+      <haplotyping>0</haplotyping>
+    </strategy>
+    <initial>
+      <number>5</number>
+      <samples>1000</samples>
+      <discard>1000</discard>
+      <interval>20</interval>
+    </initial>
+    <final>
+      <number>2</number>
+      <samples>2500</samples>
+      <discard>1000</discard>
+      <interval>20</interval>
+    </final>
+  </chains>
+  <format>
+    <seed>1005</seed>
+    <verbosity>verbose</verbosity>
+    <progress-reports>verbose</progress-reports>
+    <results-file>outfile.coalgrowmig</results-file>
+    <use-in-summary>false</use-in-summary>
+    <in-summary-file>insumfile.coalgrowmig</in-summary-file>
+    <use-out-summary>true</use-out-summary>
+    <out-summary-file>outsumfile.coalgrowmig</out-summary-file>
+    <out-xml-file>menuinfile</out-xml-file>
+    <plotting>
+    </plotting>
+  </format>
+  <forces>
+    <coalescence>
+      <start-values> 0.01 0.01</start-values>
+      <method> USER USER</method>
+      <max-events>32000</max-events>
+      <profiles>percentile percentile </profiles>
+    </coalescence>
+    <migration>
+      <start-values> 0 100 100 0</start-values>
+      <method> FST FST FST FST</method>
+      <max-events>10000</max-events>
+      <profiles>percentile percentile percentile percentile </profiles>
+    </migration>
+    <growth>
+      <start-values> 1 1</start-values>
+      <method> USER USER</method>
+      <max-events>10000</max-events>
+      <profiles>percentile percentile </profiles>
+    </growth>
+  </forces>
+  <data>
+    <region name="coalgrowmig">
+      <model name="GTR">
+        <normalize>false</normalize>
+        <categories>
+          <num-categories>3</num-categories>
+          <rates> 0.059438 0.482404 2.41527</rates>
+          <probabilities> 0.33 0.33 0.34</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <base-freqs> 0.23467 0.25993 0.24313 0.26227</base-freqs>
+        <gtr-rates> 0.7403 4.3727 1.00429 1.67422 3.89754 1</gtr-rates>
+      </model>
+      <spacing>
+        <block>
+          <map-position>0</map-position>
+          <length>1000</length>
+          <locations> 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144  [...]
+          <offset>0</offset>
+          <marker-weights> 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 [...]
+        </block>
+      </spacing>
+    <population name="  Popmig0">
+      <individual name="00_0002   ">
+        <sample name="00_0002   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCTGACACAAGAGGCGGATACTTTAGCTATAAGAACCTCTCATGGCCCGTCACGAACAACGATACCGCGGGTCTAGAATCTGACGCTCGAAAGCCGTTTGCGGCCTAAGAACGATGGTCTTACTTGGTAGCTCTTAAAACCAACTAGGTGAACCATAGGACTCTAAACCCGGTATCCACAACGCTGCACATTCTCTGCACCCAATCGCCAGATAACATGTCCAATTTTCAGGTGCACGTGCGTAGCTCACGGATCGCCCCGACTTGGATGACGCTTAACTGGTTAATTTCGCGCGGTCGCCACTTCAGCTTTTACATGCAATTTGGCTTGATTCATCGGTTAGCAACAGGTGCCGAATGATTGGCTGAGGGCGTTGCTAGCTTTTCCACTCCTTCCTATTTCGGGTCAGGGTGCCCAAGGTTTATGTACCGTACCCTGCAAGCCAATCCGGGTAATGTGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0014   ">
+        <sample name="00_0014   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTGTAAGAACCCTTCATGGACCGTCGCGAGCAACGATACTGCGGGTTGAGGATCTGACACTGGAAAGCCGTTTGCGGCCTAAGAACCATGCTCTTACTTTTTAGTTCTTAAAACCAACTAGGTGAACTATAGGACTCTTAACCCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGGATAACATGTCCAATTTTCAGGTGCACGTGCGTACCCCACGGATCGCCCCGACTTGGATGACGCTTGACTGCTTAATTTCGCGCGGTTGCGACTTCAGCTTCTGGATGCAATTTGCCTTGATTCGTCGGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCTTCGCTGGCTTTTCCATTCCCCCCTATTTCGGGTTAGGGTGCCCAAGGTTTATGTACCGTACCCGGCAAGCCAACCCTGGTAATATGTGTCTGGGTT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0009   ">
+        <sample name="00_0009   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTGTAAGAACCCTTCATGGACCGTCGCGAGCAACGATACTGCGGGTTTAGGATCTGACACTGGAAAGCCGTTTGCGGCCTAAGAACCATGCTCTTACTTTGTAGTTCTTAAAACCAACTAGGTGAACTATAGGACTCTTAACCCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGGATAACATGTCCAACTTTCAGGTGCACGTACGTACCCCACGGATCGCCCCGACTTGGATGACGCTTAACTGCTTAATTTCGCGCGGTTGCGACCTCAGCTTCTGGATGCAATTTGCCTTGATTCGTCGGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCTTCGCTGGCTTTTCCATTCCCCCCTATTTCGGGTTAGGGTGCCCAAGGTTTATGTACCGTACCCGGCAAGCCAACCCGGGTAATATGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0013   ">
+        <sample name="00_0013   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTGTAAGAACCCTTCATGGACCGTCGCGAGCAACGATACTGCGGGTTTAGGATCTGACACTGGAAAGCCGTTTGCGGCCTAAGAACCATGCTCTTACTTTGTAGTTCTTAAAACCAACTAGGTGAACTATAGGACTCTTAACCCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGGATAACATGTCCAACTTTCAGGTGCACGTACGTACCCCACGGATCGCCCCGACTTGGATGACGCTTAACTGCTTAATTTCGCGCGGTTGCGACCTCAGCTTCTGGATGCAATTTGCCTTGATTCGTCGGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCTTCGCTGGCTTTTCCATTCCCCCCTATTTCGGGTTAGGGTGCCCAAGGTTTATGTACCGTACCCGGCAAGCCAACCCGGGTAATATGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0004   ">
+        <sample name="00_0004   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTGTAAGAACCCTTCATGGACCGTCGCGAGCAACGATACTGCGGGTTTAGGATCTGACACTGGAAAGCCGTTTGCGGCCTAAGAACCATGCTCTTACTTTGTAGTTCTTAAAACCAACTAGGTGAACTATAGGACTCTTAACCCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGGATAACATGTCCAACTTTCAGGTGCACGTACGTACCCCACGGATCGCCCCGACTTGGATGACGCTTAACTGCTTAATTTCGCGCGGTTGCGACCTCAGCTTCTGGATGCAATTTGCCTTGATTCGTCGGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCTTCGCTGGCTTTTCCATTCCCCCCTATTTCGGGTTAGGGTGCCCAAGGTTTATGTACCGTACCCGGCAAGCCAACCCGGGTAATATGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0017   ">
+        <sample name="00_0017   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTGTAAGAACCCTTCATGGACCGTCGCGAGCAACGATACTGCGGGTTTAGGATCTGACACTGGAAAGCCGTTTGCGGCCTAAGAACCATGCTCTTACTTTTTAGTTCTTAAAACCAACTAGGTGAACTATAGGACTCTTAACCCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGGATAACATGTCCAATTTTCAGGTGCACGTGCGTACCCCACGGATCGCCCCGACTTGGATGACGCTTGACTGCTTAATTTCGCGCGGTTGCGACTTCAGCTTCTGGATGCAATTTGCCTTGATTCGTCGGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCTTCGCTGGCTTTTCCATTCCCCCCTATTTCGGGTTAGGGTGCCCAAGGTTTATGTACCGTACCCGGCAAGCCAACCCTGGTAATATGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0018   ">
+        <sample name="00_0018   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTGTAAGAACCCTTCATGGACCGTCGCGAGCAACGATACTGCGGGTTTAGGATCTGACACTGGAAAGCCGTTTGCGGCCTAAGAACCATGCTCTTACTTTTTAGTTCTTAAAACCAACTAGGTGAACTATAGGACTCTTAACCCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGGATAACATGTCCAATTTTCAGGTGCACGTGCGTACCCCACGGATCGCCCCGACTTGGATGACGCTTGACTGCTTAATTTCGCGCGGTTGCGACTTCAGCTTCTGGATGCAATTTGCCTTGATTCGTCGGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCTTCGCTGGCTTTTCCATTCCCCCCTATTTCGGGTTAGGGTGCCCAAGGTTTATTTACCGTACCCGGCAAGCCAACCCTGGTAATATGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0000   ">
+        <sample name="00_0000   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTGTAAGAACCCTTCATGGACCGTCGCGAGCAACGATACTGCGGGTTTAGGATCTGACACTGGAAAGCCGTTTGCGGCCTAAGAACCATGCTCTTACTTTTTAGTTCTTAAAACCAACTAGGTGAACTATAGGACTCTTAACCCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGGATAACATGTCCAATTTTCAGGTGCACGTGCGTACCCCACGGATCGCCCCGACTTGGATGACGCTTGACTGCTTAATTTCGCGCGGTTGCGACTTCAGCTTCTGGATGCAATTTGCCTTGATTCGTCGGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCTTCGCTGGCTTTTCCATTCCCCCCTATTTCGGGTTAGGGTGCCCAAGGTTTATTTACCGTACCCGGCAAGCCAACCCTGGTAATATGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0003   ">
+        <sample name="00_0003   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTGTAAGAACCCTTCATGGACCGTCGCGAGCAACGATACTGCGGGTTTAGGATCTGACACTGGAAAGCCGTTTGCGGCCTAAGAACCATGCTCTTACTTTTTAGTTCTTAAAACCAACTAGGTGAACTATAGGACTCTTAACCCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGGATAACATGTCCAATTTTCAGGTGCACGTGCGTACCCCACGGATCGCCCCGACTTGGATGACGCTTGACTGCTTAATTTCGCGCGGTTGCGACTTCAGCTTCTGGATGCAATTTGCCTTGATTCGTCGGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCTTCGCTGGCTTTTCCATTCCCCCCTATTTCGGGTTAGGGTGCCCAAGGTTTATTTACCGTACCCGGCAAGCCAACCCTGGTAATATGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0006   ">
+        <sample name="00_0006   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTGTAAGAACCCTTCATGGACCGTCGCGAGCAACGATACTGCGGGTTTAGGATCTGACACTGGAAAGCCGTTTGCGGCCTAAGAACCATGCTCTTACTTTTTAGTTCTTAAAACCAACTAGGTGAACTATAGGACTCTTAACCCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGGATAACATGTCCAATTTTCAGGTGCACGTGCGTACCCCACGGATCGCCCCGACTTGGATGACGCTTGACTGCTTAATTTCGCGCGGTTGCGACTTCAGCTTCTGGATGCAATTTGCCTTGATTCGTCGGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCTTCGCTGGCTTTTCCATTCCCCCCTATTTCGGGTTAGGGTGCCCAAGGTTTATGTACCGTACCCGGCAAGCCAACCCTGGTAATATGTGTCTGGGTT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0011   ">
+        <sample name="00_0011   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTGTAAGAACCCTTCATGGACCGTCGCGAGCAACGATACTGCGGGTTTAGGATCTGACACTGGAAAGCCGTTTGCGGCCTAAGAACCATGCTCTTACTTTTTAGTTCTTAAAACCAACTAGGTGAACTATAGGACTCTTAACCCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGGATAACATGTCCAATTTTCAGGTGCACGTGCGTACCCCACGGATCGCCCCGACTTGGATGACGCTTGACTGCTTAATTTCGCGCGGTTGCGACTTCAGCTTCTGGATGCAATTTGCCTTGATTCGTCGGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCTTCGCTGGCTTTTCCATTCCCCCCTATTTCGGGTTAGGGTGCCCAAGGTTTATGTACCGTACCCGGCAAGCCAACCCTGGTAATATGTGTCTGGGTT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0015   ">
+        <sample name="00_0015   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTGTAAGAACCCTTCATGGACCGTCGCGAGCAACGATACTGCGGGTTTAGGATCTGACACTGGAAAGCCGTTTGCGGCCTAAGAACCATGCTCTTACTTTTTAGTTCTTAAAACCAACTAGGTGAACTATAGGACTCTTAACCCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGGATAACATGTCCAATTTTCAGGTGCACGTGCGTACCCCACGGATCGCCCCGACTTGGATGACGCTTGACTGCTTAATTTCGCGCGGTTGCGACTTCAGCTTCTGGATGCAATTTGCCTTGATTCGTCGGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCTTCGCTGGCTTTTCCATTCCCCCCTATTTCGGGTTAGGGTGCCCAAGGTTTATGTACCGTACCCGGCAAGCCAACCCTGGTAATATGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0019   ">
+        <sample name="00_0019   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTGTAAGAACCCTTCATGGACCGTCGCGAGCAACGATACTGCGGGTTTAGGATCTGACACTGGAAAGCCGTTTGCGGCCTAAGAACCATGCTCTTACTTTTTAGTTCTTAAAACCAACTAGGTGAACTATAGGACTCTTAGCCCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGGATAACATGTCCAATTTTCAGGTGCACGTGCGTACCCCACGGATCGCCCCGACTTGGATGACGCTTGACTGCTTAATTTCGCGCGGTTGCGACTTCAGCTTCTGGATGCAATTTGCCTTGATTCGTCGGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCTTCGCTGGCTTTTCCATTCCCCCCTATTTCGGGTTAGGGTGCCCAAGGTTTATGTACCGTACCCGGCAAGCCAACCCTGGTAATATGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0016   ">
+        <sample name="00_0016   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTGTAAGAACCCTTCATGGACCGTCGCGAGCAACGATACTGTGGGTTTAGGATCTGACACTGGAAAGCCGTTTGCGGCCTAAGAACCATGCTCTTACTTTTTAGTTCTTAAAACCAACTAGGTGAACTATAGGACTCTTAACCCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGGATAACATGTCCAATTTTCAGGTGCACGTGCGTACCCCACGGATCGCCCCGACTTGGATGACGCTTGACTGCTTAATTTCGCGCGGTTGCGACTTCAGCTTCTGGATGCAATTTGCCTTGATTCGTCGGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCTTCGCTGGCTTTTCCATTCCCCCCTATTTCGGGTTAGGGTGCCCAAGGTTTATGTACCGTACCCGGCAAGCCAACCCTGGTAATATGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0012   ">
+        <sample name="00_0012   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTGTAAGAACCCTTCATGGACCGTCGCGAGCAACGATACTGCGGGTTTAGGATCTGACACTGGAAAGCCGTTTGCGGCCTAAGAACCATGCTCTTACTTTTTAGTTCTTAAAACCAACTAGGTGAACTATAGGACTCTTAACCCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGGATAACATGTCCAATTTTCAGGTGCACGTGCGTACCCCACGGATCGCCCCGACTTGGATGACGCTTGACTGCTTAATTTCGCGCGGTTGCGACTTCAGCTTCTGGATGCAATTTGCCTTGATTCGTCGGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCTTCGCTGGCTTTTCCATTCCCCCCTATTTCGGGTTAGGGTGCCCAAGGTTTATGTACCGTACCCGGCAAGCCAACCCTGGTAATATGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0008   ">
+        <sample name="00_0008   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTGTAAGAACCCTTCATGGACCGTCGCGAGCAACGATACTGCGGGTTTAGGATCTGACACTGGAAAGCCGTTTGCGGCCTAAGAACCATGCTCTTACTTTGTAGTTCTTAAAACCAACTAGGTGAACTATAGGACTCTTAACCCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGGATAACATGTCCAACTTTCAGGTGCACGTACGTACCCCACGGATCGCCCCGACTTGGATGACGCTTAACTGCTTAATTTCGCGCGGTTGCGACCTCAGCTTCTGGATGCAATTTGCCTTGATTCGTCGGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCTTCGCTGGCTTTTCCATTCCCCCCTATTTCGGGTTAGGGTGCCCAAGGTTTATGTACCGTACCCGGCAAGCCAACCCGGGTAATATGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0001   ">
+        <sample name="00_0001   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTGTAAGAACCCTTCATGGACCGTCGCGAGCAACGATACTGCGGGTTTAGGATCTGACACTGGAAGGCCGTTTGCGGCCTAAGAACCATGCTCTTACTTTGTAGTTCTTAAAACCAACTAGGTGAACTATAGGACTCTTAACCCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGGATAACATGTCCAACTTTCAGGTGCACGTACGTACCCCACGGATCGCCCCGACTTGGATGACGCTTAACTGCTTAATTTCGCGCGGTTGCGACCTCAGCTTCTGGATGCAATTTGCCTTGATTCGTCGGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCTTCGCTGGCTTTTCCATTCCCCCCTATTTCGGGTTAGGGTGCCCAAGGTTTATGTACCGTACCCGGCAAGCCAACCCGGGTAATATGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0010   ">
+        <sample name="00_0010   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTGTAAGAACCCTTCATGGACCGTCGCGAGCAACGATACTGCGGGTTTAGGATCTGACACTGGAAGGCCGTTTGCGGCCTAAGAACCATGCTCTTACTTTGTAGTTCTTAAAACCAACTAGGTGAACTATAGGACTCTTAACCCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGGATAACATGTCCAACTTTCAGGTGCACGTACGTACCCCACGGATCGCCCCGACTTGGATGACGCTTAACTGCTTAATTTCGCGCGGTTGCGACCTCAGCTTCTGGATGCAATTTGCCTTGATTCGTCGGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCTTCGCTGGCTTTTCCATTCCCCCCTATTTCGGGTTAGGGTGCCCAAGGTTTATGTACCGTACCCGGCAAGCCAACCCGGGTAATATGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0005   ">
+        <sample name="00_0005   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTGTAAGAACCCTTCATGGACCGTCGCGAGCAACGATACTGCGGGTTTAGGATCTGACACTGGAAAGCCGTTTGCGGCCTAAGAACCATGCTCTTACTTTTTAGTTCTTAAAACCAACTAGGTGAACTATAGGACTCTTAACCCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGGATAACATGTCCAATTTTCAGGTGCACGTGCGTACCCCACGGATCGCCCCGACTTGGATGACGCTTGACTGCTTAATTTCGCGCGGTTGCGACTTCAGCTTCTGGATGCAATTTGCCTTGATTCGTCGGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCTTCGCTGGCTTTTCCATTCCCCCCTATTTCGGGTTAGGGTGCCCAAGGTTTATGTACCGTACCCGGCAAGCCAACCCTGGTAATATGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0007   ">
+        <sample name="00_0007   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTGTAAGAACCCTTCATGGACCGTCGCGAGCAACGATACTGCGGGTTTAGGATCTGACACTGGAAGGCCGTTTGCGGCCTAAGAACCATGCTCTTACTTTGTAGTTCTTAAAACCAACTAGGTGAACTATAGGACTCTTAACCCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGGATAACATGTCCAACTTTCAGGTGCACGTACGTACCCCACGGATCGCCCCGACTTGGATGACGCTTAACTGCTTAATTTCGCGCGGTTGCGACCTCAGCTTCTGGATGCAATTTGCCTTGATTCGTCGGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCTTCGCTGGCTTTTCCATTCCCCCCTATTTCGGGTTAGGGTGCCCAAGGTTTATGTACCGTACCCGGCAAGCCAACCCGGGTAATATGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    <population name="  Popmig1">
+      <individual name="01_0027   ">
+        <sample name="01_0027   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCTGACACAAGAGGCGGATACTTTAGCTATAAGAACCTCTCATGGCCCGTCACGAACAACGATACCGCGGGTCTAGAATCTGACGCTCGAAAGCCGTTTGCGGCCTAAGAACGATGGTCTTACTTGGTAGCTCTTAAAACCAACTAGGTGAACCATAGGACTCTAAACCCGGTATCCACAACGCTGCACATTCTCTGCACCCAATCGCCAGATAACATGTCCAATTTTCAGGTGCACGTGCGTAGCTCACGGATCGCCCCGACTTGGATGACGCTTAACTGGTTAATTTCGCGCGGTCGCCACTTCAGCTTTTACATGCAATTTGGCTTGATTCATCGGTTAGCAACAGGTGCCGAATGATTGGCTGAGGGCGTTGCTAGCTTTTCCACTCCTTCCTATTTCGGGTCAGGGTGCCCAAGGTTTATGTACCGTAGCCTGCAAGCCAATCCGGGTAATGTGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0021   ">
+        <sample name="01_0021   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTGTAAGAACCCTTCATGGACCGTCGCGAGCAACGATACTGCGGGTTTAGGATCTGACACTGGAAAGCCGTTTGCGGCCTAAGAACCATGCTCTTACTTTGTAGTTCTTAAAACCAACTAGGTGAACTATTGGACTCTTAACCCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGGATAACATGTCCAACTTTCAGGTGCACGTACGTACCCCACGGATCGCCCCGACTTGGATGACGCTTAACTGCTTAATTTCGCGCGGTTGCGACCTCAGCTTCTGGATGCAATTTGCCTTGATTCGTCGGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCTTCGCTGGCTTTTCCATTCCCCCCTATTTCGGGTTAGGGTGCCCAAGGTTTATGTACCGTACCCGGCAAGCCAACCCGGGTAATATGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0039   ">
+        <sample name="01_0039   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCTGACACAAGAGGCGGATACTTTAGCTATAAGAACCTCTCATGGCCCGTCACGAACAACGATACCGCGGGTCTAGAATCTGACGCTCGAAAGCCGTTTGCGGCCTAAGAACGATGGTCTTACTTGGTAGCTCTTAAAACCAACTAGGTGAACCATAGGACTCTAAACCCGGTATCCACAACGCTGCACATTCTCTGCACCCAATCGCCAGATAACATGTCCAATTTTCAGGTGCACGTGCGTAGCTCACGGATCGCCCCGACTTGGATGACGCTTAACTGGTTAATTTCGCGCGGTCGCCACTTCAGCTTTTACATGCAATTTGGCTTGATTCATCGGTTAGCAACAGGTGCCGAATGATTGGCTGAGGGCGTTGCTAGCTTTTCCACTCCTTCCTATTTCGGGTCAGGGTGCCCAAGGTTTATGTACCGTAGCCTGCAAGCCAATCCGGGTAATGTGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0028   ">
+        <sample name="01_0028   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCTGACACAAGAGGCGGATACTTTAGCTATAAGAACCTCTCATGGCCCGTCACGAACAACGATACCGCGGGTCTAGAATCTGACGCTCGAAAGCCGTTTGCGGCCTAAGAACGATGGTCTTACTTGGTAGCTCTTAAAACCAACTAGGTGAACCATAGGACTCTAAACCCGGTATCCACAACGCTGCACATTCTCTGCACCCAATCGCCAGATAACATGTCCAATTTTCAGGTGCACGTGCGTAGCTCACGGATCGCCCCGACTTGGATGACGCTTAACTGGTTAATTTCGCGCGGTCGCCACTTCAGCTTTTACATGCAATTTGGCTTGATTCATCGGTTAGCAACAGGTGCCGAATGATTGGCTGAGGGCGTTGCTAGCTTTTCCACTCCTTCCTATTTCGGGTCAGGGTGCCCAAGGTTTATGTACCGTAGCCTGCAAGCCAATCCGGGTAATGTGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0036   ">
+        <sample name="01_0036   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCTGACACAAGAGGCGGATACTTTAGCTATAAGAACCTCTCATGGCCCGTCACGAACAACGATACCGCGGGTCTAGAATCTGACGCTCGAAAGCCGTTTGCGGCCTAAGAACGATGGTCTTACTTGGTAGCTCTTAAAACCAACTAGGTGAACCATAGGACTCTAAACCCGGTATCCACAACGCTGCACATTCTCTGCACCCAATCGCCAGATAACATGTCCAATTTTCAGGTGCACGTGCGTAGCTCACGGATCGCCCCGACTTGGATGACGCTTAACTGGTTAATTTCGCGCGGTCGCCACTTCAGCTTTTACATGCAATTTGGCTTGATTCATCGGTTAGCAACAGGTGCCGAATGATTGGCTGAGGGCGTTGCTAGCTTTTCCACTCCTTCCTATTTCGGGTCAGGGTGCCCAAGGTTTATGTACCGTAGCCTGCAAGCCAATCCGGGTAATGTGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0038   ">
+        <sample name="01_0038   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTGTAAGAACCCTTCATGGACCGTCGCGAGCAACGATACTGCGGGTTTAGGATCTGACACTGGAAAGCCGTTTGCGGCCTAAGAACCATGCTCTTACTTTGTAGTTCTTAAAACCAACTAGGTGAACTATAGGACTCTTAACCCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGGATAACATGTCCAACTTTCAGGTGCACGTACGTACCCCACGGATCGCCCCGACTTGGATGACGCTTAACTGCTTAATTTCGCGCGGTTGCGACCTCAGCTTCTGGATGCAATTTGCCTTGATTCGTCGGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCTTCGCTGGCTTTTCCATTCCCCCCTATTTCGGGTTAGGGTGCCCAAGGTTTATGTACCGTACCCGGCAAGCCAACCCGGGTAATATGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0023   ">
+        <sample name="01_0023   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTATAAGAACCCTTCATGGACCGTCACGAGCAACGATACTGCGGGTTTAGGGTCTGACACTCGAAAGCCGTTTGCGGCCTAAGAACGATGCTCTTACTTGGTAGTTCTTAAAACCAACTAGGTGAACTATAGGACACTTAACTCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGGATAACAAGTCCAATTTTCAGGTGCACGTGCGTACCTCACGGATCGCCCCGACTTGGATGACGCTTGACTGCTTGATTTCGCGCGGTTGCGACTTCAGCTTCTGGATGCAATTTGCCTTGATTCATCAGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCGTCGCTGGCTTTTCCATTCCCCCCTGTTTCGGGTTAGAGTGCCCAAGGTTTATGTACCGTACCCGGCAAGCCACTCCGGGTAATCTGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0031   ">
+        <sample name="01_0031   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTATAAGAACCCTTCATGGACCGTCACGAGCAACGATACTGCGGGTTTAGGGTCTGACACTCGAAAGCCGTTTGCGGCCTAAGAACGATGCTCTTACTTGGTAGTTCTTAAAACCAACTAGGTGAACTATAGGACACTTAACTCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGGATAACAAGTCCAATTTTCAGGTGCACGTGCGTACCTCACGGATCGCCCCGACTTGGATGACGCTTGACTGCTTGATTTCGCGCGGTTGCGACTTCAGCTTCTGGATGCAATTTGCCTTGATTCATCAGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCGTCGCTGGCTTTTCCATTCCCCCCTATTTCGGGTTAGAGTGCCCAAGGTTTATGTACCGTACCCGGCAAGCCACTCCGGGTAATCTGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0020   ">
+        <sample name="01_0020   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTATAAGAACCCTTCATGGACCGTCACGAGCAACGATACTGCGGGTTTAGGGTCTGACACTCGAAAGCCGTTTGCGGCCTAAGAACGATGCTCTTACTTGGTAGTTCTTAAAACCAACTAGGTGAACTATAGGACACTTAACTCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGGATAACAAGTCCAATTTTCAGGTGCACGTGCGTACCTCACGGATCGCCCCGACTTGGATGACGCTTGACTGCTTGATTTCGCGCGGTTGCGACTTCAGCTTCTGGATGCAATTTGCCTTGATTCATCAGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCGTCGCTGGCTTTTCCATTCCCCCCTATTTCGGGTTAGAGTGCCCAAGGTTTATGTACCGTACCCGGCAAGCCACTCCGGGTAATCTGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0022   ">
+        <sample name="01_0022   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTATAAGAACCCTTCATGGACCGTCACGAGCAACGATACTGCGGGTTTAGGGTCTGACACTCGAAAGCCGTTTGCGGCCTAAGAACGATGCTCTTACTTGGTAGTTCTTAAAACCAACTAGGTGAACTATAGGACACTTAACTCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGGATAACAAGTCCAATTTTCAGGTGCACGTGCGTACCTCACGGATCGCCCCGACTTGGATGACGCTTGACTGCTTGATTTCGCGCGGTTGCGACTTCAGCTTCTGGATGCAATTTGCCTTGATTCATCAGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCGTCGCTGGCTTTTCCATTCCCCCCTATTTCGGGTTAGAGTGCCCAAGGTTTATGTACCGTACCCGGCAAGCCACTCCGGGTAATCTGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0026   ">
+        <sample name="01_0026   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTATAAGAACCCTTCATGGACCGTCACGAGCAACGATACTGCGGGTTTAGGGTCTGACACTCGAAAGCCGTTTGCGGCCTAAGAACGATGCTCTTACTTGGTAGTTCTTAAAACCAACTAGGTGAACTATAGGACACTTAACTCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGGATAACAAGTCCAATTTTCAGGTGCACGTGCGTACCTCACGGATCGCCCCGACTTGGATGACGCTTGACTGCTTGATTTCGCGCGGTTGCGACTTCAGCTTCTGGATGCAATTTGCCTTGATTCATCAGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCGTCGCTGGCTTTTCCATTCCCCCCTATTTCGGGTTAGAGTGCCCAAGGTTTATGTACCGTACCCGGCAAGCCACTCCGGGTAATCTGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0037   ">
+        <sample name="01_0037   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTATAAGAACCCTTCATGGACCGTCACGAGCAACGATACTGCGGGTTTAGGGTCTGACACTCGAAAGCCGTTTGCGGCCTAACAACGATGCTCTTACTTGGTAGTTCTTAAAACCAACTAGGTGAACTATAGGACACTTAACTCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGGATAACAAGTCCAATTTTCAGGTGCACGTGCGTACCTCACGGATCGCCCCGACTTGGATGACGCTTGACTGCTTGATTTCGCGCGGTTGCGACTTCAGCTTCTGGATGCAATTTGCCTTGATTCATCAGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCGTCGCTGGCTTTTCCATTCCCCCCTATTTCGGGTTAGAGTGCCCAAGGTTTATGTACCGTACCCGGCAAGCCACTCCGGGTAATCTGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0030   ">
+        <sample name="01_0030   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTGTAAGAACCCTTCATGGACCGTCGCGAGCAACGATACTGCGGGTTTAGGATCTGACACTGGAAAGCCGTTTGCGGCCTAAGAACCATGCTCTTACTTTGTAGTTCTTAAAACCAACTAGGTGAACTATTGGACTCTTAACCCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGGATAACATGTCCAACTTTCAGGTGCACGTACGTACCCCACGGATCGCCCCGACTTGGATGACGCTTAACTGCTTAATTTCGCGCGGTTGCGACCTCAGCTTCTGGATGCAATTTGCCTTGATTCGTCGGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCTTCGCTGGCTTTTCCATTCCCCCCTATTTCGGGTTAGGGTGCCCAAGGTTTATGTACCGTACCCGGCAAGCCAACCCGGGTAATATGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0025   ">
+        <sample name="01_0025   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTGTAAGAACCCTTCATGGACCGTCGCGAGCAACGATACTGCGGGTTTAGGATCTGACACTGGAAAGCCGTTTGCGGCCTAAGAACCATGCTCTTACTTTGTAGTTCTTAAAACCAACTAGGTGAACTATAGGACTCTTAACCCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGGATAACATGTCCAACTTTCAGGTGCACGTACGTACCCCACGGATCGCCCCGACTTGGATGACGCTTAACTGCTTAATTTCGCGCGGTTGCGACCTCAGCTTCTGGATGCAATTTGCCTTGATTCGTCGGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCTTCGCTGGCTTTTCCATTCCCCCCTATTTCGGGTTAGGGTGCCCAAGGTTTATGTACCGTACCCGGCAAGCCAACCCGGGTAATATGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0032   ">
+        <sample name="01_0032   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTGTAAGAACCCTTCATGGACCGTCGCGAGCAACGATACTGCGGGTTTAGGATCTGACACTGGAAAGCCGTTTGCGGCCTAAGAACCATGCTCTTACTTTGTAGTTCTTAAAACCAACTAGGTGAACTATAGGACTCTTAACCCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGGATAACATGTCCAACTTTCAGGTGCACGTACGTACCCCACGGATCGCCCCGACTTGGATGACGCTTAACTGCTTAATTTCGCGCGGTTGCGACCTCAGCTTCTGGATGCAATTTGCCTTGATTCGTCGGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCTTCGCTGGCTTTTCCATTCCCCCCTATTTCGGGTTAGGGTGCCCAAGGTTTATGTACCGTACCCGGCAAGCCAACCCGGGTAATATGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0035   ">
+        <sample name="01_0035   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTATAAGAACCCTTCATGGACCGTCACGAGCAACGATACTGCGGGTTTAGGGTCTGACACTCGAAAGCCGTTTGCGGCCTAAGAACGATGCTCTTACTTGGTAGTTCTTAAAACCAACTAGGTGAACTATAGGACACTTAACTCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGGATAACAAGTCCAATTTTCAGGTGCACGTGCGTACCTCACGGATCGCCCCGACTTGGATGACGCTTGACTGCTTGATTTCGCGCGGTTGCGACTTCAGCTTCTGGATGCAATTTGCCTTGATTCATCAGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCGTCGCTGGCTTTTCCATTCCCCCCTATTTCGGGTTAGAGTGCCCAAGGTTTATGTACCGTACCCGGCAAGCCACTCCGGGTAATCTGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0034   ">
+        <sample name="01_0034   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTGTAAGAACCCTTCATGGACCGTCGTGAGCAACGATACTGCGGGTTTAGGATCTGACACTGGAAGGCCGTTTGCGGCCTAAGAACCATGCTCTTACTTTGTAGTTCTTAAAACCAACTAGGTGAACTATAGGACTCTTAACCCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGAATAACATGTCCAACTTTCAGGTGCACGTACGTACCCCACGGATCGCCCCGACTTGGATGACGCTTAACTGCTTAATTTCGCGCGGTTGCGACCTCAGCTTCTGGATGCAATTTGCCTTGATTCGTCGGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCTTCGCTGGCTTTTCCATTCCCCCCTATTTCGGGTTAGGGTGCCCAAGGTTTATGTACCGTACCCGGCAAGCCAACCCGGGTAATATGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0033   ">
+        <sample name="01_0033   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTGTAAGAACCCTTCATGGACCGTCGCGAGCAACGATACTGCGGGTTTAGGATCTGACACTGGAAAGCCGTTTGCGGCCTAAGAACCATGCTCTTACTTTTTAGTTCTTAAAACCAACTAGGTGAACTATAGGACTCTTAACCCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGGATAACATGTCCAATTTTCAGGTGCACGTGCGTACCCCACGGATCGCCCCGACTTGGATGACGCTTGACTGCTTAATTTCGCGCGGTTGCGACTTCAGCTTCTGGATGCAATTTGCCTTGATTCGTCGGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCTTCGCTGGCTTTTCCATTCCCCCCTATTTCGGGTTAGGGTGCCCAAGGTTTATGTACCGTACCCGGCAAGCCAACCCTGGTAATATGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0024   ">
+        <sample name="01_0024   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTGTAAGAACCCTTCATGGACCGTCGCGAGCAACGATACTGCGGGTTTAGGATCTGACACTGGAAAGCCGTTTGCGGCCTAAGAACCATGCTCTTACTTTTTAGTTCTTAAAACCAACTAGGTGAACTATAGGACTCTTAACCCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGGATAACATGTCCAATTTTCAGGTGCACGTGCGTACCCCACGGATCGCCCCGACTTGGATGACGCTTGACTGCTTAATTTCGCGCGGTTGCGACTTCAGCTTCTGGATGCAATTTGCCTTGATTCGTCGGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCTTCGCTGGCTTTTCCATTCCCCCCTATTTCGGGTTAGGGTGCCCAAGGTTTATGTACCGTACCCGGCAAGCCAACCCTGGTAATATGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="01_0029   ">
+        <sample name="01_0029   ">
+          <datablock type="DNA">
+            CTGGTTTCCGACCCAACGCAAGAGGTGGATACTTTAGCTGTAAGAACCCTTCATGGACCGTCGCGAGCAACGATACTGCGGGTTTAGGATCTGACACTGGAAAGCCGTTTGCGGCCTAAGAACCATGCTCTTACTTTTTAGTTCTTAAAACCAACTAGGTGAACTATAGGACTCTTAACCCGGTGACCACAAAGCTGCACATTGTCTGCACCCAATCGCCGGATAACATGTCCAATTTTCAGGTGCACGTGCGTACCCCACGGATCGCCCCGACTTGGATGACGCTTGACTGCTTAATTTCGCGCGGTTGCGACTTCAGCTTCTGGATGCAATTTGCCTTGATTCGTCGGTTATCAAGAGGCCCCGAATGATTGGCTGAGGGCTTCGCTGGCTTTTCCATTCCCCCCTATTTCGGGTTAGGGTGCCCAAGGTTTATGTACCGTACCCGGCAAGCCAACCCTGGTAATATGTGTCTGGGCT [...]
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    </region>
+  </data>
+</lamarc>
diff --git a/doc/testfiles/v2.0.infiles/infile.coalmigmsat b/doc/testfiles/v2.0.infiles/infile.coalmigmsat
new file mode 100644
index 0000000..0181527
--- /dev/null
+++ b/doc/testfiles/v2.0.infiles/infile.coalmigmsat
@@ -0,0 +1,5113 @@
+<lamarc>
+<!--generated with:                                                          -->
+<!--                                                                         -->
+<!--peter's migtree(nogamma, rand#1450692337)/migdata, microsat, coal & mig  -->
+<!--theta = 10.0 (from: Ne = 1000, mu = 0.0025), all immigration rates = 1.0 -->
+<!--2 populations with 20 individuals/population                             -->
+<!--                                                                         -->
+<!--                                                                         -->
+<!--analyzed as:                                                             -->
+<!--                                                                         -->
+<!--unlinked micros (10 markers)                                             -->
+<!--stepwise model (with normalization)                                      -->
+<!--starting params (theta = watterson, mig = 10.0)                          -->
+<!--5 initial (1000 samples, 20 interval, 1000 discard)                      -->
+<!--2 final   (2500 samples, 20 interval, 1000 discard)                      -->
+<!--no profiles                                                              -->
+<!-- Created from the LamarcDS DataStore -->
+<!-- -->
+<!-- Created by the Lamarc program -->
+  <chains>
+    <replicates>1</replicates>
+    <heating>
+      <adaptive>false</adaptive>
+      <temperatures> 1</temperatures>
+      <swap-interval>1</swap-interval>
+    </heating>
+    <strategy>
+      <resimulating>1</resimulating>
+      <haplotyping>0</haplotyping>
+    </strategy>
+    <initial>
+      <number>5</number>
+      <samples>1000</samples>
+      <discard>1000</discard>
+      <interval>20</interval>
+    </initial>
+    <final>
+      <number>2</number>
+      <samples>2500</samples>
+      <discard>1000</discard>
+      <interval>20</interval>
+    </final>
+  </chains>
+  <format>
+    <seed>1005</seed>
+    <verbosity>verbose</verbosity>
+    <progress-reports>verbose</progress-reports>
+    <results-file>outfile.coalmigmsat</results-file>
+    <use-in-summary>false</use-in-summary>
+    <in-summary-file>insumfile.coalmigmsat</in-summary-file>
+    <use-out-summary>true</use-out-summary>
+    <out-summary-file>outsumfile.coalmigmsat</out-summary-file>
+    <out-xml-file>menuinfile</out-xml-file>
+    <plotting>
+    </plotting>
+  </format>
+  <forces>
+    <coalescence>
+      <start-values> 0.2351 0.2351</start-values>
+      <method> WATTERSON WATTERSON</method>
+      <max-events>32000</max-events>
+      <profiles>none none </profiles>
+    </coalescence>
+    <migration>
+      <start-values> 0 10 10 0</start-values>
+      <method> USER USER USER USER</method>
+      <max-events>10000</max-events>
+      <profiles>none none none none </profiles>
+    </migration>
+  </forces>
+  <data>
+    <region name="Ocharinka0">
+      <model name="Stepwise">
+        <normalize>true</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+      </model>
+      <spacing>
+        <block>
+          <map-position>0</map-position>
+          <length>1</length>
+          <locations> 0</locations>
+          <offset>0</offset>
+          <marker-weights> 1</marker-weights>
+        </block>
+      </spacing>
+    <population name="   pop-0">
+      <individual name="0BAA 0BBF ">
+        <sample name="0BAA 0BBF -1">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+        <sample name="0BAA 0BBF -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAQ 0BAL ">
+        <sample name="0BAQ 0BAL -1">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+        <sample name="0BAQ 0BAL -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAI 0BAP ">
+        <sample name="0BAI 0BAP -1">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+        <sample name="0BAI 0BAP -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBE 0BBL ">
+        <sample name="0BBE 0BBL -1">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+        <sample name="0BBE 0BBL -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBK 0BBG ">
+        <sample name="0BBK 0BBG -1">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+        <sample name="0BBK 0BBG -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAB 0BBJ ">
+        <sample name="0BAB 0BBJ -1">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+        <sample name="0BAB 0BBJ -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBB 0BBM ">
+        <sample name="0BBB 0BBM -1">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+        <sample name="0BBB 0BBM -2">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAC 0BBP ">
+        <sample name="0BAC 0BBP -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="0BAC 0BBP -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBD 0BBA ">
+        <sample name="0BBD 0BBA -1">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+        <sample name="0BBD 0BBA -2">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBC 0BAU ">
+        <sample name="0BBC 0BAU -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="0BBC 0BAU -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAG 0BAH ">
+        <sample name="0BAG 0BAH -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="0BAG 0BAH -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAN 0BBI ">
+        <sample name="0BAN 0BBI -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="0BAN 0BBI -2">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAO 0BAD ">
+        <sample name="0BAO 0BAD -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="0BAO 0BAD -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBO 0BAF ">
+        <sample name="0BBO 0BAF -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="0BBO 0BAF -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAR 0BAS ">
+        <sample name="0BAR 0BAS -1">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+        <sample name="0BAR 0BAS -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAJ 0BAW ">
+        <sample name="0BAJ 0BAW -1">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+        <sample name="0BAJ 0BAW -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBH 0BAK ">
+        <sample name="0BBH 0BAK -1">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+        <sample name="0BBH 0BAK -2">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAV 0BAM ">
+        <sample name="0BAV 0BAM -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="0BAV 0BAM -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAT 0BBN ">
+        <sample name="0BAT 0BBN -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="0BAT 0BBN -2">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAX 0BAE ">
+        <sample name="0BAX 0BAE -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="0BAX 0BAE -2">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    <population name="   pop-1">
+      <individual name="1BAU 1BAF ">
+        <sample name="1BAU 1BAF -1">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+        <sample name="1BAU 1BAF -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAX 1BAP ">
+        <sample name="1BAX 1BAP -1">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+        <sample name="1BAX 1BAP -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAC 1BBH ">
+        <sample name="1BAC 1BBH -1">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+        <sample name="1BAC 1BBH -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAO 1BBN ">
+        <sample name="1BAO 1BBN -1">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+        <sample name="1BAO 1BBN -2">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAQ 1BAL ">
+        <sample name="1BAQ 1BAL -1">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+        <sample name="1BAQ 1BAL -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAB 1BAT ">
+        <sample name="1BAB 1BAT -1">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+        <sample name="1BAB 1BAT -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBB 1BAR ">
+        <sample name="1BBB 1BAR -1">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+        <sample name="1BBB 1BAR -2">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBA 1BBL ">
+        <sample name="1BBA 1BBL -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="1BBA 1BBL -2">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAI 1BBM ">
+        <sample name="1BAI 1BBM -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="1BAI 1BBM -2">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBC 1BBG ">
+        <sample name="1BBC 1BBG -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="1BBC 1BBG -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAG 1BAM ">
+        <sample name="1BAG 1BAM -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="1BAG 1BAM -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBK 1BAA ">
+        <sample name="1BBK 1BAA -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="1BBK 1BAA -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBO 1BAH ">
+        <sample name="1BBO 1BAH -1">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+        <sample name="1BBO 1BAH -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAK 1BBE ">
+        <sample name="1BAK 1BBE -1">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+        <sample name="1BAK 1BBE -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAW 1BBI ">
+        <sample name="1BAW 1BBI -1">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+        <sample name="1BAW 1BBI -2">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAV 1BAE ">
+        <sample name="1BAV 1BAE -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="1BAV 1BAE -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAN 1BBJ ">
+        <sample name="1BAN 1BBJ -1">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+        <sample name="1BAN 1BBJ -2">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBP 1BAJ ">
+        <sample name="1BBP 1BAJ -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="1BBP 1BAJ -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBF 1BAD ">
+        <sample name="1BBF 1BAD -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="1BBF 1BAD -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBD 1BAS ">
+        <sample name="1BBD 1BAS -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="1BBD 1BAS -2">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    </region>
+    <region name="Ocharinka1">
+      <model name="Stepwise">
+        <normalize>true</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+      </model>
+      <spacing>
+        <block>
+          <map-position>0</map-position>
+          <length>1</length>
+          <locations> 0</locations>
+          <offset>0</offset>
+          <marker-weights> 1</marker-weights>
+        </block>
+      </spacing>
+    <population name="   pop-0">
+      <individual name="0BAA 0BBF ">
+        <sample name="0BAA 0BBF -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="0BAA 0BBF -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAQ 0BAL ">
+        <sample name="0BAQ 0BAL -1">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+        <sample name="0BAQ 0BAL -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAI 0BAP ">
+        <sample name="0BAI 0BAP -1">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+        <sample name="0BAI 0BAP -2">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBE 0BBL ">
+        <sample name="0BBE 0BBL -1">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+        <sample name="0BBE 0BBL -2">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBK 0BBG ">
+        <sample name="0BBK 0BBG -1">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+        <sample name="0BBK 0BBG -2">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAB 0BBJ ">
+        <sample name="0BAB 0BBJ -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="0BAB 0BBJ -2">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBB 0BBM ">
+        <sample name="0BBB 0BBM -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="0BBB 0BBM -2">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAC 0BBP ">
+        <sample name="0BAC 0BBP -1">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+        <sample name="0BAC 0BBP -2">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBD 0BBA ">
+        <sample name="0BBD 0BBA -1">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+        <sample name="0BBD 0BBA -2">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBC 0BAU ">
+        <sample name="0BBC 0BAU -1">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+        <sample name="0BBC 0BAU -2">
+          <datablock type="MICROSAT">
+            12 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAG 0BAH ">
+        <sample name="0BAG 0BAH -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="0BAG 0BAH -2">
+          <datablock type="MICROSAT">
+            13 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAN 0BBI ">
+        <sample name="0BAN 0BBI -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="0BAN 0BBI -2">
+          <datablock type="MICROSAT">
+            14 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAO 0BAD ">
+        <sample name="0BAO 0BAD -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="0BAO 0BAD -2">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBO 0BAF ">
+        <sample name="0BBO 0BAF -1">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+        <sample name="0BBO 0BAF -2">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAR 0BAS ">
+        <sample name="0BAR 0BAS -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="0BAR 0BAS -2">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAJ 0BAW ">
+        <sample name="0BAJ 0BAW -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="0BAJ 0BAW -2">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBH 0BAK ">
+        <sample name="0BBH 0BAK -1">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+        <sample name="0BBH 0BAK -2">
+          <datablock type="MICROSAT">
+            13 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAV 0BAM ">
+        <sample name="0BAV 0BAM -1">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+        <sample name="0BAV 0BAM -2">
+          <datablock type="MICROSAT">
+            13 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAT 0BBN ">
+        <sample name="0BAT 0BBN -1">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+        <sample name="0BAT 0BBN -2">
+          <datablock type="MICROSAT">
+            14 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAX 0BAE ">
+        <sample name="0BAX 0BAE -1">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+        <sample name="0BAX 0BAE -2">
+          <datablock type="MICROSAT">
+            14 
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    <population name="   pop-1">
+      <individual name="1BAU 1BAF ">
+        <sample name="1BAU 1BAF -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="1BAU 1BAF -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAX 1BAP ">
+        <sample name="1BAX 1BAP -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="1BAX 1BAP -2">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAC 1BBH ">
+        <sample name="1BAC 1BBH -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="1BAC 1BBH -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAO 1BBN ">
+        <sample name="1BAO 1BBN -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="1BAO 1BBN -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAQ 1BAL ">
+        <sample name="1BAQ 1BAL -1">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+        <sample name="1BAQ 1BAL -2">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAB 1BAT ">
+        <sample name="1BAB 1BAT -1">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+        <sample name="1BAB 1BAT -2">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBB 1BAR ">
+        <sample name="1BBB 1BAR -1">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+        <sample name="1BBB 1BAR -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBA 1BBL ">
+        <sample name="1BBA 1BBL -1">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+        <sample name="1BBA 1BBL -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAI 1BBM ">
+        <sample name="1BAI 1BBM -1">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+        <sample name="1BAI 1BBM -2">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBC 1BBG ">
+        <sample name="1BBC 1BBG -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="1BBC 1BBG -2">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAG 1BAM ">
+        <sample name="1BAG 1BAM -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="1BAG 1BAM -2">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBK 1BAA ">
+        <sample name="1BBK 1BAA -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="1BBK 1BAA -2">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBO 1BAH ">
+        <sample name="1BBO 1BAH -1">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+        <sample name="1BBO 1BAH -2">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAK 1BBE ">
+        <sample name="1BAK 1BBE -1">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+        <sample name="1BAK 1BBE -2">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAW 1BBI ">
+        <sample name="1BAW 1BBI -1">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+        <sample name="1BAW 1BBI -2">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAV 1BAE ">
+        <sample name="1BAV 1BAE -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="1BAV 1BAE -2">
+          <datablock type="MICROSAT">
+            13 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAN 1BBJ ">
+        <sample name="1BAN 1BBJ -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="1BAN 1BBJ -2">
+          <datablock type="MICROSAT">
+            13 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBP 1BAJ ">
+        <sample name="1BBP 1BAJ -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="1BBP 1BAJ -2">
+          <datablock type="MICROSAT">
+            14 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBF 1BAD ">
+        <sample name="1BBF 1BAD -1">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+        <sample name="1BBF 1BAD -2">
+          <datablock type="MICROSAT">
+            13 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBD 1BAS ">
+        <sample name="1BBD 1BAS -1">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+        <sample name="1BBD 1BAS -2">
+          <datablock type="MICROSAT">
+            13 
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    </region>
+    <region name="Ocharinka2">
+      <model name="Stepwise">
+        <normalize>true</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+      </model>
+      <spacing>
+        <block>
+          <map-position>0</map-position>
+          <length>1</length>
+          <locations> 0</locations>
+          <offset>0</offset>
+          <marker-weights> 1</marker-weights>
+        </block>
+      </spacing>
+    <population name="   pop-0">
+      <individual name="0BAA 0BBF ">
+        <sample name="0BAA 0BBF -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="0BAA 0BBF -2">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAQ 0BAL ">
+        <sample name="0BAQ 0BAL -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BAQ 0BAL -2">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAI 0BAP ">
+        <sample name="0BAI 0BAP -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BAI 0BAP -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBE 0BBL ">
+        <sample name="0BBE 0BBL -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BBE 0BBL -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBK 0BBG ">
+        <sample name="0BBK 0BBG -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BBK 0BBG -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAB 0BBJ ">
+        <sample name="0BAB 0BBJ -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="0BAB 0BBJ -2">
+          <datablock type="MICROSAT">
+            13 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBB 0BBM ">
+        <sample name="0BBB 0BBM -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="0BBB 0BBM -2">
+          <datablock type="MICROSAT">
+            13 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAC 0BBP ">
+        <sample name="0BAC 0BBP -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="0BAC 0BBP -2">
+          <datablock type="MICROSAT">
+            13 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBD 0BBA ">
+        <sample name="0BBD 0BBA -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="0BBD 0BBA -2">
+          <datablock type="MICROSAT">
+            14 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBC 0BAU ">
+        <sample name="0BBC 0BAU -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BBC 0BAU -2">
+          <datablock type="MICROSAT">
+            13 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAG 0BAH ">
+        <sample name="0BAG 0BAH -1">
+          <datablock type="MICROSAT">
+            11 
+          </datablock>
+        </sample>
+        <sample name="0BAG 0BAH -2">
+          <datablock type="MICROSAT">
+            13 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAN 0BBI ">
+        <sample name="0BAN 0BBI -1">
+          <datablock type="MICROSAT">
+            10 
+          </datablock>
+        </sample>
+        <sample name="0BAN 0BBI -2">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAO 0BAD ">
+        <sample name="0BAO 0BAD -1">
+          <datablock type="MICROSAT">
+            10 
+          </datablock>
+        </sample>
+        <sample name="0BAO 0BAD -2">
+          <datablock type="MICROSAT">
+            12 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBO 0BAF ">
+        <sample name="0BBO 0BAF -1">
+          <datablock type="MICROSAT">
+            10 
+          </datablock>
+        </sample>
+        <sample name="0BBO 0BAF -2">
+          <datablock type="MICROSAT">
+            10 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAR 0BAS ">
+        <sample name="0BAR 0BAS -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BAR 0BAS -2">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAJ 0BAW ">
+        <sample name="0BAJ 0BAW -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BAJ 0BAW -2">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBH 0BAK ">
+        <sample name="0BBH 0BAK -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BBH 0BAK -2">
+          <datablock type="MICROSAT">
+            10 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAV 0BAM ">
+        <sample name="0BAV 0BAM -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BAV 0BAM -2">
+          <datablock type="MICROSAT">
+            12 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAT 0BBN ">
+        <sample name="0BAT 0BBN -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BAT 0BBN -2">
+          <datablock type="MICROSAT">
+            12 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAX 0BAE ">
+        <sample name="0BAX 0BAE -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="0BAX 0BAE -2">
+          <datablock type="MICROSAT">
+            12 
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    <population name="   pop-1">
+      <individual name="1BAU 1BAF ">
+        <sample name="1BAU 1BAF -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="1BAU 1BAF -2">
+          <datablock type="MICROSAT">
+            13 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAX 1BAP ">
+        <sample name="1BAX 1BAP -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="1BAX 1BAP -2">
+          <datablock type="MICROSAT">
+            13 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAC 1BBH ">
+        <sample name="1BAC 1BBH -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="1BAC 1BBH -2">
+          <datablock type="MICROSAT">
+            14 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAO 1BBN ">
+        <sample name="1BAO 1BBN -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="1BAO 1BBN -2">
+          <datablock type="MICROSAT">
+            14 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAQ 1BAL ">
+        <sample name="1BAQ 1BAL -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="1BAQ 1BAL -2">
+          <datablock type="MICROSAT">
+            13 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAB 1BAT ">
+        <sample name="1BAB 1BAT -1">
+          <datablock type="MICROSAT">
+            13 
+          </datablock>
+        </sample>
+        <sample name="1BAB 1BAT -2">
+          <datablock type="MICROSAT">
+            13 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBB 1BAR ">
+        <sample name="1BBB 1BAR -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="1BBB 1BAR -2">
+          <datablock type="MICROSAT">
+            13 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBA 1BBL ">
+        <sample name="1BBA 1BBL -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="1BBA 1BBL -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAI 1BBM ">
+        <sample name="1BAI 1BBM -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="1BAI 1BBM -2">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBC 1BBG ">
+        <sample name="1BBC 1BBG -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="1BBC 1BBG -2">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAG 1BAM ">
+        <sample name="1BAG 1BAM -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="1BAG 1BAM -2">
+          <datablock type="MICROSAT">
+            11 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBK 1BAA ">
+        <sample name="1BBK 1BAA -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="1BBK 1BAA -2">
+          <datablock type="MICROSAT">
+            13 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBO 1BAH ">
+        <sample name="1BBO 1BAH -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="1BBO 1BAH -2">
+          <datablock type="MICROSAT">
+            10 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAK 1BBE ">
+        <sample name="1BAK 1BBE -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="1BAK 1BBE -2">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAW 1BBI ">
+        <sample name="1BAW 1BBI -1">
+          <datablock type="MICROSAT">
+            13 
+          </datablock>
+        </sample>
+        <sample name="1BAW 1BBI -2">
+          <datablock type="MICROSAT">
+            10 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAV 1BAE ">
+        <sample name="1BAV 1BAE -1">
+          <datablock type="MICROSAT">
+            13 
+          </datablock>
+        </sample>
+        <sample name="1BAV 1BAE -2">
+          <datablock type="MICROSAT">
+            12 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAN 1BBJ ">
+        <sample name="1BAN 1BBJ -1">
+          <datablock type="MICROSAT">
+            13 
+          </datablock>
+        </sample>
+        <sample name="1BAN 1BBJ -2">
+          <datablock type="MICROSAT">
+            12 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBP 1BAJ ">
+        <sample name="1BBP 1BAJ -1">
+          <datablock type="MICROSAT">
+            12 
+          </datablock>
+        </sample>
+        <sample name="1BBP 1BAJ -2">
+          <datablock type="MICROSAT">
+            12 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBF 1BAD ">
+        <sample name="1BBF 1BAD -1">
+          <datablock type="MICROSAT">
+            12 
+          </datablock>
+        </sample>
+        <sample name="1BBF 1BAD -2">
+          <datablock type="MICROSAT">
+            13 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBD 1BAS ">
+        <sample name="1BBD 1BAS -1">
+          <datablock type="MICROSAT">
+            14 
+          </datablock>
+        </sample>
+        <sample name="1BBD 1BAS -2">
+          <datablock type="MICROSAT">
+            12 
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    </region>
+    <region name="Ocharinka3">
+      <model name="Stepwise">
+        <normalize>true</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+      </model>
+      <spacing>
+        <block>
+          <map-position>0</map-position>
+          <length>1</length>
+          <locations> 0</locations>
+          <offset>0</offset>
+          <marker-weights> 1</marker-weights>
+        </block>
+      </spacing>
+    <population name="   pop-0">
+      <individual name="0BAA 0BBF ">
+        <sample name="0BAA 0BBF -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="0BAA 0BBF -2">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAQ 0BAL ">
+        <sample name="0BAQ 0BAL -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="0BAQ 0BAL -2">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAI 0BAP ">
+        <sample name="0BAI 0BAP -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="0BAI 0BAP -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBE 0BBL ">
+        <sample name="0BBE 0BBL -1">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+        <sample name="0BBE 0BBL -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBK 0BBG ">
+        <sample name="0BBK 0BBG -1">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+        <sample name="0BBK 0BBG -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAB 0BBJ ">
+        <sample name="0BAB 0BBJ -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BAB 0BBJ -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBB 0BBM ">
+        <sample name="0BBB 0BBM -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BBB 0BBM -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAC 0BBP ">
+        <sample name="0BAC 0BBP -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="0BAC 0BBP -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBD 0BBA ">
+        <sample name="0BBD 0BBA -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BBD 0BBA -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBC 0BAU ">
+        <sample name="0BBC 0BAU -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BBC 0BAU -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAG 0BAH ">
+        <sample name="0BAG 0BAH -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BAG 0BAH -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAN 0BBI ">
+        <sample name="0BAN 0BBI -1">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+        <sample name="0BAN 0BBI -2">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAO 0BAD ">
+        <sample name="0BAO 0BAD -1">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+        <sample name="0BAO 0BAD -2">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBO 0BAF ">
+        <sample name="0BBO 0BAF -1">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+        <sample name="0BBO 0BAF -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAR 0BAS ">
+        <sample name="0BAR 0BAS -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="0BAR 0BAS -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAJ 0BAW ">
+        <sample name="0BAJ 0BAW -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="0BAJ 0BAW -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBH 0BAK ">
+        <sample name="0BBH 0BAK -1">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+        <sample name="0BBH 0BAK -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAV 0BAM ">
+        <sample name="0BAV 0BAM -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BAV 0BAM -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAT 0BBN ">
+        <sample name="0BAT 0BBN -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BAT 0BBN -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAX 0BAE ">
+        <sample name="0BAX 0BAE -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BAX 0BAE -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    <population name="   pop-1">
+      <individual name="1BAU 1BAF ">
+        <sample name="1BAU 1BAF -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="1BAU 1BAF -2">
+          <datablock type="MICROSAT">
+            25 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAX 1BAP ">
+        <sample name="1BAX 1BAP -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="1BAX 1BAP -2">
+          <datablock type="MICROSAT">
+            25 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAC 1BBH ">
+        <sample name="1BAC 1BBH -1">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+        <sample name="1BAC 1BBH -2">
+          <datablock type="MICROSAT">
+            25 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAO 1BBN ">
+        <sample name="1BAO 1BBN -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="1BAO 1BBN -2">
+          <datablock type="MICROSAT">
+            25 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAQ 1BAL ">
+        <sample name="1BAQ 1BAL -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="1BAQ 1BAL -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAB 1BAT ">
+        <sample name="1BAB 1BAT -1">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+        <sample name="1BAB 1BAT -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBB 1BAR ">
+        <sample name="1BBB 1BAR -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="1BBB 1BAR -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBA 1BBL ">
+        <sample name="1BBA 1BBL -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="1BBA 1BBL -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAI 1BBM ">
+        <sample name="1BAI 1BBM -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="1BAI 1BBM -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBC 1BBG ">
+        <sample name="1BBC 1BBG -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="1BBC 1BBG -2">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAG 1BAM ">
+        <sample name="1BAG 1BAM -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="1BAG 1BAM -2">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBK 1BAA ">
+        <sample name="1BBK 1BAA -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="1BBK 1BAA -2">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBO 1BAH ">
+        <sample name="1BBO 1BAH -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="1BBO 1BAH -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAK 1BBE ">
+        <sample name="1BAK 1BBE -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="1BAK 1BBE -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAW 1BBI ">
+        <sample name="1BAW 1BBI -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="1BAW 1BBI -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAV 1BAE ">
+        <sample name="1BAV 1BAE -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="1BAV 1BAE -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAN 1BBJ ">
+        <sample name="1BAN 1BBJ -1">
+          <datablock type="MICROSAT">
+            25 
+          </datablock>
+        </sample>
+        <sample name="1BAN 1BBJ -2">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBP 1BAJ ">
+        <sample name="1BBP 1BAJ -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="1BBP 1BAJ -2">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBF 1BAD ">
+        <sample name="1BBF 1BAD -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="1BBF 1BAD -2">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBD 1BAS ">
+        <sample name="1BBD 1BAS -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="1BBD 1BAS -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    </region>
+    <region name="Ocharinka4">
+      <model name="Stepwise">
+        <normalize>true</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+      </model>
+      <spacing>
+        <block>
+          <map-position>0</map-position>
+          <length>1</length>
+          <locations> 0</locations>
+          <offset>0</offset>
+          <marker-weights> 1</marker-weights>
+        </block>
+      </spacing>
+    <population name="   pop-0">
+      <individual name="0BAA 0BBF ">
+        <sample name="0BAA 0BBF -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BAA 0BBF -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAQ 0BAL ">
+        <sample name="0BAQ 0BAL -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BAQ 0BAL -2">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAI 0BAP ">
+        <sample name="0BAI 0BAP -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BAI 0BAP -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBE 0BBL ">
+        <sample name="0BBE 0BBL -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BBE 0BBL -2">
+          <datablock type="MICROSAT">
+            14 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBK 0BBG ">
+        <sample name="0BBK 0BBG -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BBK 0BBG -2">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAB 0BBJ ">
+        <sample name="0BAB 0BBJ -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="0BAB 0BBJ -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBB 0BBM ">
+        <sample name="0BBB 0BBM -1">
+          <datablock type="MICROSAT">
+            12 
+          </datablock>
+        </sample>
+        <sample name="0BBB 0BBM -2">
+          <datablock type="MICROSAT">
+            14 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAC 0BBP ">
+        <sample name="0BAC 0BBP -1">
+          <datablock type="MICROSAT">
+            12 
+          </datablock>
+        </sample>
+        <sample name="0BAC 0BBP -2">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBD 0BBA ">
+        <sample name="0BBD 0BBA -1">
+          <datablock type="MICROSAT">
+            14 
+          </datablock>
+        </sample>
+        <sample name="0BBD 0BBA -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBC 0BAU ">
+        <sample name="0BBC 0BAU -1">
+          <datablock type="MICROSAT">
+            12 
+          </datablock>
+        </sample>
+        <sample name="0BBC 0BAU -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAG 0BAH ">
+        <sample name="0BAG 0BAH -1">
+          <datablock type="MICROSAT">
+            13 
+          </datablock>
+        </sample>
+        <sample name="0BAG 0BAH -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAN 0BBI ">
+        <sample name="0BAN 0BBI -1">
+          <datablock type="MICROSAT">
+            12 
+          </datablock>
+        </sample>
+        <sample name="0BAN 0BBI -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAO 0BAD ">
+        <sample name="0BAO 0BAD -1">
+          <datablock type="MICROSAT">
+            12 
+          </datablock>
+        </sample>
+        <sample name="0BAO 0BAD -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBO 0BAF ">
+        <sample name="0BBO 0BAF -1">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+        <sample name="0BBO 0BAF -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAR 0BAS ">
+        <sample name="0BAR 0BAS -1">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+        <sample name="0BAR 0BAS -2">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAJ 0BAW ">
+        <sample name="0BAJ 0BAW -1">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+        <sample name="0BAJ 0BAW -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBH 0BAK ">
+        <sample name="0BBH 0BAK -1">
+          <datablock type="MICROSAT">
+            9 
+          </datablock>
+        </sample>
+        <sample name="0BBH 0BAK -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAV 0BAM ">
+        <sample name="0BAV 0BAM -1">
+          <datablock type="MICROSAT">
+            10 
+          </datablock>
+        </sample>
+        <sample name="0BAV 0BAM -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAT 0BBN ">
+        <sample name="0BAT 0BBN -1">
+          <datablock type="MICROSAT">
+            11 
+          </datablock>
+        </sample>
+        <sample name="0BAT 0BBN -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAX 0BAE ">
+        <sample name="0BAX 0BAE -1">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+        <sample name="0BAX 0BAE -2">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    <population name="   pop-1">
+      <individual name="1BAU 1BAF ">
+        <sample name="1BAU 1BAF -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="1BAU 1BAF -2">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAX 1BAP ">
+        <sample name="1BAX 1BAP -1">
+          <datablock type="MICROSAT">
+            12 
+          </datablock>
+        </sample>
+        <sample name="1BAX 1BAP -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAC 1BBH ">
+        <sample name="1BAC 1BBH -1">
+          <datablock type="MICROSAT">
+            13 
+          </datablock>
+        </sample>
+        <sample name="1BAC 1BBH -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAO 1BBN ">
+        <sample name="1BAO 1BBN -1">
+          <datablock type="MICROSAT">
+            13 
+          </datablock>
+        </sample>
+        <sample name="1BAO 1BBN -2">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAQ 1BAL ">
+        <sample name="1BAQ 1BAL -1">
+          <datablock type="MICROSAT">
+            13 
+          </datablock>
+        </sample>
+        <sample name="1BAQ 1BAL -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAB 1BAT ">
+        <sample name="1BAB 1BAT -1">
+          <datablock type="MICROSAT">
+            13 
+          </datablock>
+        </sample>
+        <sample name="1BAB 1BAT -2">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBB 1BAR ">
+        <sample name="1BBB 1BAR -1">
+          <datablock type="MICROSAT">
+            13 
+          </datablock>
+        </sample>
+        <sample name="1BBB 1BAR -2">
+          <datablock type="MICROSAT">
+            14 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBA 1BBL ">
+        <sample name="1BBA 1BBL -1">
+          <datablock type="MICROSAT">
+            14 
+          </datablock>
+        </sample>
+        <sample name="1BBA 1BBL -2">
+          <datablock type="MICROSAT">
+            14 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAI 1BBM ">
+        <sample name="1BAI 1BBM -1">
+          <datablock type="MICROSAT">
+            14 
+          </datablock>
+        </sample>
+        <sample name="1BAI 1BBM -2">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBC 1BBG ">
+        <sample name="1BBC 1BBG -1">
+          <datablock type="MICROSAT">
+            10 
+          </datablock>
+        </sample>
+        <sample name="1BBC 1BBG -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAG 1BAM ">
+        <sample name="1BAG 1BAM -1">
+          <datablock type="MICROSAT">
+            11 
+          </datablock>
+        </sample>
+        <sample name="1BAG 1BAM -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBK 1BAA ">
+        <sample name="1BBK 1BAA -1">
+          <datablock type="MICROSAT">
+            11 
+          </datablock>
+        </sample>
+        <sample name="1BBK 1BAA -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBO 1BAH ">
+        <sample name="1BBO 1BAH -1">
+          <datablock type="MICROSAT">
+            11 
+          </datablock>
+        </sample>
+        <sample name="1BBO 1BAH -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAK 1BBE ">
+        <sample name="1BAK 1BBE -1">
+          <datablock type="MICROSAT">
+            9 
+          </datablock>
+        </sample>
+        <sample name="1BAK 1BBE -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAW 1BBI ">
+        <sample name="1BAW 1BBI -1">
+          <datablock type="MICROSAT">
+            10 
+          </datablock>
+        </sample>
+        <sample name="1BAW 1BBI -2">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAV 1BAE ">
+        <sample name="1BAV 1BAE -1">
+          <datablock type="MICROSAT">
+            11 
+          </datablock>
+        </sample>
+        <sample name="1BAV 1BAE -2">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAN 1BBJ ">
+        <sample name="1BAN 1BBJ -1">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+        <sample name="1BAN 1BBJ -2">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBP 1BAJ ">
+        <sample name="1BBP 1BAJ -1">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+        <sample name="1BBP 1BAJ -2">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBF 1BAD ">
+        <sample name="1BBF 1BAD -1">
+          <datablock type="MICROSAT">
+            16 
+          </datablock>
+        </sample>
+        <sample name="1BBF 1BAD -2">
+          <datablock type="MICROSAT">
+            14 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBD 1BAS ">
+        <sample name="1BBD 1BAS -1">
+          <datablock type="MICROSAT">
+            12 
+          </datablock>
+        </sample>
+        <sample name="1BBD 1BAS -2">
+          <datablock type="MICROSAT">
+            15 
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    </region>
+    <region name="Ocharinka5">
+      <model name="Stepwise">
+        <normalize>true</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+      </model>
+      <spacing>
+        <block>
+          <map-position>0</map-position>
+          <length>1</length>
+          <locations> 0</locations>
+          <offset>0</offset>
+          <marker-weights> 1</marker-weights>
+        </block>
+      </spacing>
+    <population name="   pop-0">
+      <individual name="0BAA 0BBF ">
+        <sample name="0BAA 0BBF -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BAA 0BBF -2">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAQ 0BAL ">
+        <sample name="0BAQ 0BAL -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BAQ 0BAL -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAI 0BAP ">
+        <sample name="0BAI 0BAP -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BAI 0BAP -2">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBE 0BBL ">
+        <sample name="0BBE 0BBL -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="0BBE 0BBL -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBK 0BBG ">
+        <sample name="0BBK 0BBG -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="0BBK 0BBG -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAB 0BBJ ">
+        <sample name="0BAB 0BBJ -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="0BAB 0BBJ -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBB 0BBM ">
+        <sample name="0BBB 0BBM -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="0BBB 0BBM -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAC 0BBP ">
+        <sample name="0BAC 0BBP -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="0BAC 0BBP -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBD 0BBA ">
+        <sample name="0BBD 0BBA -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="0BBD 0BBA -2">
+          <datablock type="MICROSAT">
+            26 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBC 0BAU ">
+        <sample name="0BBC 0BAU -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="0BBC 0BAU -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAG 0BAH ">
+        <sample name="0BAG 0BAH -1">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+        <sample name="0BAG 0BAH -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAN 0BBI ">
+        <sample name="0BAN 0BBI -1">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+        <sample name="0BAN 0BBI -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAO 0BAD ">
+        <sample name="0BAO 0BAD -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BAO 0BAD -2">
+          <datablock type="MICROSAT">
+            25 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBO 0BAF ">
+        <sample name="0BBO 0BAF -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BBO 0BAF -2">
+          <datablock type="MICROSAT">
+            25 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAR 0BAS ">
+        <sample name="0BAR 0BAS -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="0BAR 0BAS -2">
+          <datablock type="MICROSAT">
+            25 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAJ 0BAW ">
+        <sample name="0BAJ 0BAW -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="0BAJ 0BAW -2">
+          <datablock type="MICROSAT">
+            25 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBH 0BAK ">
+        <sample name="0BBH 0BAK -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="0BBH 0BAK -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAV 0BAM ">
+        <sample name="0BAV 0BAM -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="0BAV 0BAM -2">
+          <datablock type="MICROSAT">
+            25 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAT 0BBN ">
+        <sample name="0BAT 0BBN -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="0BAT 0BBN -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAX 0BAE ">
+        <sample name="0BAX 0BAE -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BAX 0BAE -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    <population name="   pop-1">
+      <individual name="1BAU 1BAF ">
+        <sample name="1BAU 1BAF -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="1BAU 1BAF -2">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAX 1BAP ">
+        <sample name="1BAX 1BAP -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="1BAX 1BAP -2">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAC 1BBH ">
+        <sample name="1BAC 1BBH -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="1BAC 1BBH -2">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAO 1BBN ">
+        <sample name="1BAO 1BBN -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="1BAO 1BBN -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAQ 1BAL ">
+        <sample name="1BAQ 1BAL -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="1BAQ 1BAL -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAB 1BAT ">
+        <sample name="1BAB 1BAT -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="1BAB 1BAT -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBB 1BAR ">
+        <sample name="1BBB 1BAR -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="1BBB 1BAR -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBA 1BBL ">
+        <sample name="1BBA 1BBL -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="1BBA 1BBL -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAI 1BBM ">
+        <sample name="1BAI 1BBM -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="1BAI 1BBM -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBC 1BBG ">
+        <sample name="1BBC 1BBG -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="1BBC 1BBG -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAG 1BAM ">
+        <sample name="1BAG 1BAM -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="1BAG 1BAM -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBK 1BAA ">
+        <sample name="1BBK 1BAA -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="1BBK 1BAA -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBO 1BAH ">
+        <sample name="1BBO 1BAH -1">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+        <sample name="1BBO 1BAH -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAK 1BBE ">
+        <sample name="1BAK 1BBE -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="1BAK 1BBE -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAW 1BBI ">
+        <sample name="1BAW 1BBI -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="1BAW 1BBI -2">
+          <datablock type="MICROSAT">
+            25 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAV 1BAE ">
+        <sample name="1BAV 1BAE -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="1BAV 1BAE -2">
+          <datablock type="MICROSAT">
+            25 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAN 1BBJ ">
+        <sample name="1BAN 1BBJ -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="1BAN 1BBJ -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBP 1BAJ ">
+        <sample name="1BBP 1BAJ -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="1BBP 1BAJ -2">
+          <datablock type="MICROSAT">
+            25 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBF 1BAD ">
+        <sample name="1BBF 1BAD -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="1BBF 1BAD -2">
+          <datablock type="MICROSAT">
+            25 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBD 1BAS ">
+        <sample name="1BBD 1BAS -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="1BBD 1BAS -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    </region>
+    <region name="Ocharinka6">
+      <model name="Stepwise">
+        <normalize>true</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+      </model>
+      <spacing>
+        <block>
+          <map-position>0</map-position>
+          <length>1</length>
+          <locations> 0</locations>
+          <offset>0</offset>
+          <marker-weights> 1</marker-weights>
+        </block>
+      </spacing>
+    <population name="   pop-0">
+      <individual name="0BAA 0BBF ">
+        <sample name="0BAA 0BBF -1">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+        <sample name="0BAA 0BBF -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAQ 0BAL ">
+        <sample name="0BAQ 0BAL -1">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+        <sample name="0BAQ 0BAL -2">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAI 0BAP ">
+        <sample name="0BAI 0BAP -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="0BAI 0BAP -2">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBE 0BBL ">
+        <sample name="0BBE 0BBL -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BBE 0BBL -2">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBK 0BBG ">
+        <sample name="0BBK 0BBG -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="0BBK 0BBG -2">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAB 0BBJ ">
+        <sample name="0BAB 0BBJ -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BAB 0BBJ -2">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBB 0BBM ">
+        <sample name="0BBB 0BBM -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BBB 0BBM -2">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAC 0BBP ">
+        <sample name="0BAC 0BBP -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BAC 0BBP -2">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBD 0BBA ">
+        <sample name="0BBD 0BBA -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="0BBD 0BBA -2">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBC 0BAU ">
+        <sample name="0BBC 0BAU -1">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+        <sample name="0BBC 0BAU -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAG 0BAH ">
+        <sample name="0BAG 0BAH -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="0BAG 0BAH -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAN 0BBI ">
+        <sample name="0BAN 0BBI -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="0BAN 0BBI -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAO 0BAD ">
+        <sample name="0BAO 0BAD -1">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+        <sample name="0BAO 0BAD -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBO 0BAF ">
+        <sample name="0BBO 0BAF -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="0BBO 0BAF -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAR 0BAS ">
+        <sample name="0BAR 0BAS -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="0BAR 0BAS -2">
+          <datablock type="MICROSAT">
+            25 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAJ 0BAW ">
+        <sample name="0BAJ 0BAW -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="0BAJ 0BAW -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBH 0BAK ">
+        <sample name="0BBH 0BAK -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="0BBH 0BAK -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAV 0BAM ">
+        <sample name="0BAV 0BAM -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="0BAV 0BAM -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAT 0BBN ">
+        <sample name="0BAT 0BBN -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="0BAT 0BBN -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAX 0BAE ">
+        <sample name="0BAX 0BAE -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="0BAX 0BAE -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    <population name="   pop-1">
+      <individual name="1BAU 1BAF ">
+        <sample name="1BAU 1BAF -1">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+        <sample name="1BAU 1BAF -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAX 1BAP ">
+        <sample name="1BAX 1BAP -1">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+        <sample name="1BAX 1BAP -2">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAC 1BBH ">
+        <sample name="1BAC 1BBH -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="1BAC 1BBH -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAO 1BBN ">
+        <sample name="1BAO 1BBN -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="1BAO 1BBN -2">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAQ 1BAL ">
+        <sample name="1BAQ 1BAL -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="1BAQ 1BAL -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAB 1BAT ">
+        <sample name="1BAB 1BAT -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="1BAB 1BAT -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBB 1BAR ">
+        <sample name="1BBB 1BAR -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="1BBB 1BAR -2">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBA 1BBL ">
+        <sample name="1BBA 1BBL -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="1BBA 1BBL -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAI 1BBM ">
+        <sample name="1BAI 1BBM -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="1BAI 1BBM -2">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBC 1BBG ">
+        <sample name="1BBC 1BBG -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="1BBC 1BBG -2">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAG 1BAM ">
+        <sample name="1BAG 1BAM -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="1BAG 1BAM -2">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBK 1BAA ">
+        <sample name="1BBK 1BAA -1">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+        <sample name="1BBK 1BAA -2">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBO 1BAH ">
+        <sample name="1BBO 1BAH -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="1BBO 1BAH -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAK 1BBE ">
+        <sample name="1BAK 1BBE -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="1BAK 1BBE -2">
+          <datablock type="MICROSAT">
+            25 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAW 1BBI ">
+        <sample name="1BAW 1BBI -1">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+        <sample name="1BAW 1BBI -2">
+          <datablock type="MICROSAT">
+            25 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAV 1BAE ">
+        <sample name="1BAV 1BAE -1">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+        <sample name="1BAV 1BAE -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAN 1BBJ ">
+        <sample name="1BAN 1BBJ -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="1BAN 1BBJ -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBP 1BAJ ">
+        <sample name="1BBP 1BAJ -1">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+        <sample name="1BBP 1BAJ -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBF 1BAD ">
+        <sample name="1BBF 1BAD -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="1BBF 1BAD -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBD 1BAS ">
+        <sample name="1BBD 1BAS -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="1BBD 1BAS -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    </region>
+    <region name="Ocharinka7">
+      <model name="Stepwise">
+        <normalize>true</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+      </model>
+      <spacing>
+        <block>
+          <map-position>0</map-position>
+          <length>1</length>
+          <locations> 0</locations>
+          <offset>0</offset>
+          <marker-weights> 1</marker-weights>
+        </block>
+      </spacing>
+    <population name="   pop-0">
+      <individual name="0BAA 0BBF ">
+        <sample name="0BAA 0BBF -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BAA 0BBF -2">
+          <datablock type="MICROSAT">
+            25 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAQ 0BAL ">
+        <sample name="0BAQ 0BAL -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BAQ 0BAL -2">
+          <datablock type="MICROSAT">
+            25 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAI 0BAP ">
+        <sample name="0BAI 0BAP -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BAI 0BAP -2">
+          <datablock type="MICROSAT">
+            26 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBE 0BBL ">
+        <sample name="0BBE 0BBL -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BBE 0BBL -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBK 0BBG ">
+        <sample name="0BBK 0BBG -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="0BBK 0BBG -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAB 0BBJ ">
+        <sample name="0BAB 0BBJ -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BAB 0BBJ -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBB 0BBM ">
+        <sample name="0BBB 0BBM -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BBB 0BBM -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAC 0BBP ">
+        <sample name="0BAC 0BBP -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="0BAC 0BBP -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBD 0BBA ">
+        <sample name="0BBD 0BBA -1">
+          <datablock type="MICROSAT">
+            25 
+          </datablock>
+        </sample>
+        <sample name="0BBD 0BBA -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBC 0BAU ">
+        <sample name="0BBC 0BAU -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="0BBC 0BAU -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAG 0BAH ">
+        <sample name="0BAG 0BAH -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BAG 0BAH -2">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAN 0BBI ">
+        <sample name="0BAN 0BBI -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BAN 0BBI -2">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAO 0BAD ">
+        <sample name="0BAO 0BAD -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BAO 0BAD -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBO 0BAF ">
+        <sample name="0BBO 0BAF -1">
+          <datablock type="MICROSAT">
+            25 
+          </datablock>
+        </sample>
+        <sample name="0BBO 0BAF -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAR 0BAS ">
+        <sample name="0BAR 0BAS -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="0BAR 0BAS -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAJ 0BAW ">
+        <sample name="0BAJ 0BAW -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BAJ 0BAW -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBH 0BAK ">
+        <sample name="0BBH 0BAK -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BBH 0BAK -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAV 0BAM ">
+        <sample name="0BAV 0BAM -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BAV 0BAM -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAT 0BBN ">
+        <sample name="0BAT 0BBN -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="0BAT 0BBN -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAX 0BAE ">
+        <sample name="0BAX 0BAE -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="0BAX 0BAE -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    <population name="   pop-1">
+      <individual name="1BAU 1BAF ">
+        <sample name="1BAU 1BAF -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="1BAU 1BAF -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAX 1BAP ">
+        <sample name="1BAX 1BAP -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="1BAX 1BAP -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAC 1BBH ">
+        <sample name="1BAC 1BBH -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="1BAC 1BBH -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAO 1BBN ">
+        <sample name="1BAO 1BBN -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="1BAO 1BBN -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAQ 1BAL ">
+        <sample name="1BAQ 1BAL -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="1BAQ 1BAL -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAB 1BAT ">
+        <sample name="1BAB 1BAT -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="1BAB 1BAT -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBB 1BAR ">
+        <sample name="1BBB 1BAR -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="1BBB 1BAR -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBA 1BBL ">
+        <sample name="1BBA 1BBL -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="1BBA 1BBL -2">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAI 1BBM ">
+        <sample name="1BAI 1BBM -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="1BAI 1BBM -2">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBC 1BBG ">
+        <sample name="1BBC 1BBG -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="1BBC 1BBG -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAG 1BAM ">
+        <sample name="1BAG 1BAM -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="1BAG 1BAM -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBK 1BAA ">
+        <sample name="1BBK 1BAA -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="1BBK 1BAA -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBO 1BAH ">
+        <sample name="1BBO 1BAH -1">
+          <datablock type="MICROSAT">
+            25 
+          </datablock>
+        </sample>
+        <sample name="1BBO 1BAH -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAK 1BBE ">
+        <sample name="1BAK 1BBE -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="1BAK 1BBE -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAW 1BBI ">
+        <sample name="1BAW 1BBI -1">
+          <datablock type="MICROSAT">
+            25 
+          </datablock>
+        </sample>
+        <sample name="1BAW 1BBI -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAV 1BAE ">
+        <sample name="1BAV 1BAE -1">
+          <datablock type="MICROSAT">
+            25 
+          </datablock>
+        </sample>
+        <sample name="1BAV 1BAE -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAN 1BBJ ">
+        <sample name="1BAN 1BBJ -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="1BAN 1BBJ -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBP 1BAJ ">
+        <sample name="1BBP 1BAJ -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="1BBP 1BAJ -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBF 1BAD ">
+        <sample name="1BBF 1BAD -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="1BBF 1BAD -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBD 1BAS ">
+        <sample name="1BBD 1BAS -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="1BBD 1BAS -2">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    </region>
+    <region name="Ocharinka8">
+      <model name="Stepwise">
+        <normalize>true</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+      </model>
+      <spacing>
+        <block>
+          <map-position>0</map-position>
+          <length>1</length>
+          <locations> 0</locations>
+          <offset>0</offset>
+          <marker-weights> 1</marker-weights>
+        </block>
+      </spacing>
+    <population name="   pop-0">
+      <individual name="0BAA 0BBF ">
+        <sample name="0BAA 0BBF -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BAA 0BBF -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAQ 0BAL ">
+        <sample name="0BAQ 0BAL -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BAQ 0BAL -2">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAI 0BAP ">
+        <sample name="0BAI 0BAP -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="0BAI 0BAP -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBE 0BBL ">
+        <sample name="0BBE 0BBL -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="0BBE 0BBL -2">
+          <datablock type="MICROSAT">
+            25 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBK 0BBG ">
+        <sample name="0BBK 0BBG -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="0BBK 0BBG -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAB 0BBJ ">
+        <sample name="0BAB 0BBJ -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BAB 0BBJ -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBB 0BBM ">
+        <sample name="0BBB 0BBM -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BBB 0BBM -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAC 0BBP ">
+        <sample name="0BAC 0BBP -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BAC 0BBP -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBD 0BBA ">
+        <sample name="0BBD 0BBA -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BBD 0BBA -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBC 0BAU ">
+        <sample name="0BBC 0BAU -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BBC 0BAU -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAG 0BAH ">
+        <sample name="0BAG 0BAH -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="0BAG 0BAH -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAN 0BBI ">
+        <sample name="0BAN 0BBI -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BAN 0BBI -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAO 0BAD ">
+        <sample name="0BAO 0BAD -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BAO 0BAD -2">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBO 0BAF ">
+        <sample name="0BBO 0BAF -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BBO 0BAF -2">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAR 0BAS ">
+        <sample name="0BAR 0BAS -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BAR 0BAS -2">
+          <datablock type="MICROSAT">
+            17 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAJ 0BAW ">
+        <sample name="0BAJ 0BAW -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BAJ 0BAW -2">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBH 0BAK ">
+        <sample name="0BBH 0BAK -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="0BBH 0BAK -2">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAV 0BAM ">
+        <sample name="0BAV 0BAM -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="0BAV 0BAM -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAT 0BBN ">
+        <sample name="0BAT 0BBN -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BAT 0BBN -2">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAX 0BAE ">
+        <sample name="0BAX 0BAE -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BAX 0BAE -2">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    <population name="   pop-1">
+      <individual name="1BAU 1BAF ">
+        <sample name="1BAU 1BAF -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="1BAU 1BAF -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAX 1BAP ">
+        <sample name="1BAX 1BAP -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="1BAX 1BAP -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAC 1BBH ">
+        <sample name="1BAC 1BBH -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="1BAC 1BBH -2">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAO 1BBN ">
+        <sample name="1BAO 1BBN -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="1BAO 1BBN -2">
+          <datablock type="MICROSAT">
+            25 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAQ 1BAL ">
+        <sample name="1BAQ 1BAL -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="1BAQ 1BAL -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAB 1BAT ">
+        <sample name="1BAB 1BAT -1">
+          <datablock type="MICROSAT">
+            26 
+          </datablock>
+        </sample>
+        <sample name="1BAB 1BAT -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBB 1BAR ">
+        <sample name="1BBB 1BAR -1">
+          <datablock type="MICROSAT">
+            26 
+          </datablock>
+        </sample>
+        <sample name="1BBB 1BAR -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBA 1BBL ">
+        <sample name="1BBA 1BBL -1">
+          <datablock type="MICROSAT">
+            26 
+          </datablock>
+        </sample>
+        <sample name="1BBA 1BBL -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAI 1BBM ">
+        <sample name="1BAI 1BBM -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="1BAI 1BBM -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBC 1BBG ">
+        <sample name="1BBC 1BBG -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="1BBC 1BBG -2">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAG 1BAM ">
+        <sample name="1BAG 1BAM -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="1BAG 1BAM -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBK 1BAA ">
+        <sample name="1BBK 1BAA -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="1BBK 1BAA -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBO 1BAH ">
+        <sample name="1BBO 1BAH -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="1BBO 1BAH -2">
+          <datablock type="MICROSAT">
+            18 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAK 1BBE ">
+        <sample name="1BAK 1BBE -1">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+        <sample name="1BAK 1BBE -2">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAW 1BBI ">
+        <sample name="1BAW 1BBI -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="1BAW 1BBI -2">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAV 1BAE ">
+        <sample name="1BAV 1BAE -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="1BAV 1BAE -2">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAN 1BBJ ">
+        <sample name="1BAN 1BBJ -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="1BAN 1BBJ -2">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBP 1BAJ ">
+        <sample name="1BBP 1BAJ -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="1BBP 1BAJ -2">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBF 1BAD ">
+        <sample name="1BBF 1BAD -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="1BBF 1BAD -2">
+          <datablock type="MICROSAT">
+            20 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBD 1BAS ">
+        <sample name="1BBD 1BAS -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="1BBD 1BAS -2">
+          <datablock type="MICROSAT">
+            19 
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    </region>
+    <region name="Ocharinka9">
+      <model name="Stepwise">
+        <normalize>true</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+      </model>
+      <spacing>
+        <block>
+          <map-position>0</map-position>
+          <length>1</length>
+          <locations> 0</locations>
+          <offset>0</offset>
+          <marker-weights> 1</marker-weights>
+        </block>
+      </spacing>
+    <population name="   pop-0">
+      <individual name="0BAA 0BBF ">
+        <sample name="0BAA 0BBF -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BAA 0BBF -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAQ 0BAL ">
+        <sample name="0BAQ 0BAL -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BAQ 0BAL -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAI 0BAP ">
+        <sample name="0BAI 0BAP -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BAI 0BAP -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBE 0BBL ">
+        <sample name="0BBE 0BBL -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BBE 0BBL -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBK 0BBG ">
+        <sample name="0BBK 0BBG -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BBK 0BBG -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAB 0BBJ ">
+        <sample name="0BAB 0BBJ -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BAB 0BBJ -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBB 0BBM ">
+        <sample name="0BBB 0BBM -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BBB 0BBM -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAC 0BBP ">
+        <sample name="0BAC 0BBP -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="0BAC 0BBP -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBD 0BBA ">
+        <sample name="0BBD 0BBA -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BBD 0BBA -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBC 0BAU ">
+        <sample name="0BBC 0BAU -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="0BBC 0BAU -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAG 0BAH ">
+        <sample name="0BAG 0BAH -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="0BAG 0BAH -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAN 0BBI ">
+        <sample name="0BAN 0BBI -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="0BAN 0BBI -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAO 0BAD ">
+        <sample name="0BAO 0BAD -1">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+        <sample name="0BAO 0BAD -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBO 0BAF ">
+        <sample name="0BBO 0BAF -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="0BBO 0BAF -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAR 0BAS ">
+        <sample name="0BAR 0BAS -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="0BAR 0BAS -2">
+          <datablock type="MICROSAT">
+            25 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAJ 0BAW ">
+        <sample name="0BAJ 0BAW -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="0BAJ 0BAW -2">
+          <datablock type="MICROSAT">
+            25 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BBH 0BAK ">
+        <sample name="0BBH 0BAK -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="0BBH 0BAK -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAV 0BAM ">
+        <sample name="0BAV 0BAM -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="0BAV 0BAM -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAT 0BBN ">
+        <sample name="0BAT 0BBN -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="0BAT 0BBN -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="0BAX 0BAE ">
+        <sample name="0BAX 0BAE -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="0BAX 0BAE -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    <population name="   pop-1">
+      <individual name="1BAU 1BAF ">
+        <sample name="1BAU 1BAF -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="1BAU 1BAF -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAX 1BAP ">
+        <sample name="1BAX 1BAP -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="1BAX 1BAP -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAC 1BBH ">
+        <sample name="1BAC 1BBH -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="1BAC 1BBH -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAO 1BBN ">
+        <sample name="1BAO 1BBN -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="1BAO 1BBN -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAQ 1BAL ">
+        <sample name="1BAQ 1BAL -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="1BAQ 1BAL -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAB 1BAT ">
+        <sample name="1BAB 1BAT -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="1BAB 1BAT -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBB 1BAR ">
+        <sample name="1BBB 1BAR -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="1BBB 1BAR -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBA 1BBL ">
+        <sample name="1BBA 1BBL -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="1BBA 1BBL -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAI 1BBM ">
+        <sample name="1BAI 1BBM -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="1BAI 1BBM -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBC 1BBG ">
+        <sample name="1BBC 1BBG -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="1BBC 1BBG -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAG 1BAM ">
+        <sample name="1BAG 1BAM -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="1BAG 1BAM -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBK 1BAA ">
+        <sample name="1BBK 1BAA -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="1BBK 1BAA -2">
+          <datablock type="MICROSAT">
+            25 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBO 1BAH ">
+        <sample name="1BBO 1BAH -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="1BBO 1BAH -2">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAK 1BBE ">
+        <sample name="1BAK 1BBE -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="1BAK 1BBE -2">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAW 1BBI ">
+        <sample name="1BAW 1BBI -1">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+        <sample name="1BAW 1BBI -2">
+          <datablock type="MICROSAT">
+            25 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAV 1BAE ">
+        <sample name="1BAV 1BAE -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="1BAV 1BAE -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BAN 1BBJ ">
+        <sample name="1BAN 1BBJ -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="1BAN 1BBJ -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBP 1BAJ ">
+        <sample name="1BBP 1BAJ -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="1BBP 1BAJ -2">
+          <datablock type="MICROSAT">
+            21 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBF 1BAD ">
+        <sample name="1BBF 1BAD -1">
+          <datablock type="MICROSAT">
+            23 
+          </datablock>
+        </sample>
+        <sample name="1BBF 1BAD -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="1BBD 1BAS ">
+        <sample name="1BBD 1BAS -1">
+          <datablock type="MICROSAT">
+            24 
+          </datablock>
+        </sample>
+        <sample name="1BBD 1BAS -2">
+          <datablock type="MICROSAT">
+            22 
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    </region>
+  </data>
+</lamarc>
diff --git a/doc/testfiles/v2.0.infiles/infile.coalmigrep b/doc/testfiles/v2.0.infiles/infile.coalmigrep
new file mode 100644
index 0000000..7636682
--- /dev/null
+++ b/doc/testfiles/v2.0.infiles/infile.coalmigrep
@@ -0,0 +1,1631 @@
+<lamarc>
+<!--For regions: region1                                                     -->
+<!--generated with:                                                          -->
+<!--                                                                         -->
+<!--jrectree(rand#???)/snp, snp, coal & mig                                  -->
+<!--theta = 0.01, migrate = 100                                              -->
+<!--2 populations, 20 individuals/population, 1000 sites                     -->
+<!--                                                                         -->
+<!--analyzed as:                                                             -->
+<!--                                                                         -->
+<!--snp (with spacing)                                                       -->
+<!--F84 model (freqsfromdata) with 1 category                                -->
+<!--2 replicates                                                             -->
+<!--starting params (theta = watterson, mig = FST)                           -->
+<!--5 initial (2000 samples, 20 interval, 1000 discard)                      -->
+<!--1 final   (5000 samples, 20 interval, 1000 discard)                      -->
+<!--no heating                                                               -->
+<!--percentile profiles (theta), fixed profiles(migration)                   -->
+<!--                                                                         -->
+<!--                                                                         -->
+<!--                                                                         -->
+<!--For regions: region2, region3, region4, region5, region6                 -->
+<!--generated with:                                                          -->
+<!--                                                                         -->
+<!--peter's migtree(nogamma,rand#145069)/migdata, microsat, coal & mig       -->
+<!--theta = 10.0 (from: Ne = 1000, mu = 0.0025), all immigration rates = 1.0 -->
+<!--2 populations with 10 individuals/population                             -->
+<!--                                                                         -->
+<!--analyzed as:                                                             -->
+<!--                                                                         -->
+<!--unlinked micros (5 markers)                                              -->
+<!--brownian model                                                           -->
+<!--2 replicates                                                             -->
+<!--starting params (theta = watterson, mig = FST)                           -->
+<!--5 initial (2000 samples, 20 interval, 1000 discard)                      -->
+<!--1 final   (5000 samples, 20 interval, 1000 discard)                      -->
+<!--no heating                                                               -->
+<!--percentile profiles (theta), fixed profiles (migration)                  -->
+<!--                                                                         -->
+<!--                                                                         -->
+<!-- Created from the LamarcDS DataStore -->
+	<forces>
+		<coalescence>
+			<method> Watterson Watterson </method>
+			<max-events> 1000 </max-events>
+                        <profiles> percentile percentile </profiles>
+		</coalescence>
+		<migration>
+			<start-values> 0.0 100 100 0.0  </start-values>
+			<method> FST FST FST FST </method>
+			<max-events> 10000 </max-events>
+                        <profiles> fixed fixed fixed fixed </profiles>
+		</migration>
+	</forces>
+	<!-- -->
+	<chains>
+		<replicates> 2 </replicates>
+		<heating>
+			<temperatures> 1 </temperatures>
+			<swap-interval> 1 </swap-interval>
+		</heating>
+		<strategy>
+			<resimulating> 1 </resimulating>
+		</strategy>
+		<initial>
+			<number> 5 </number>
+			<samples> 2000 </samples>
+			<discard> 1000 </discard>
+			<interval> 20 </interval>
+		</initial>
+		<final>
+			<number> 1 </number>
+			<samples> 5000 </samples>
+			<discard> 1000 </discard>
+			<interval> 20 </interval>
+		</final>
+	</chains>
+	<!-- -->
+	<format>
+		<verbosity> verbose </verbosity>
+		<progress-reports> verbose </progress-reports>
+		<echo> true </echo>
+		<plotting>
+			<profile> false </profile>
+			<posterior> false </posterior>
+		</plotting>
+		<seed> 1005 </seed>
+		<parameter-file> parmfile </parameter-file>
+		<results-file> outfile.coalmigrep </results-file>
+		<in-summary-file> insumfile.coalmigrep </in-summary-file>
+		<out-summary-file> outsumfile.coalmigrep </out-summary-file>
+                <use-out-summary> true </use-out-summary>
+                <use-in-summary> false </use-in-summary>
+	</format>
+        <model name="Brownian">
+                <normalize> false </normalize>
+        </model>
+	<!-- -->
+	<data>
+		<region name="region1">
+                          <model name="F84">
+                                <base-freqs> calculated </base-freqs>
+                                <ttratio> 2.0 </ttratio>
+                          </model>
+<spacing>
+				<block>
+					<map-position> 0 </map-position>
+					<length> 1000 </length>
+					<locations> 4 17 24 51 80 82 88 96 107 113 125 141 151 163 194 198 208 209 212 217 247 255 259 262 265 280 291 293 299 304 306 315 324 325 333 344 367 369 375 381 386 399 400 407 421 422 453 459 471 484 512 517 529 534 547 552 557 562 576 580 588 602 630 632 636 648 649 651 653 668 689 703 710 715 718 722 731 732 737 741 743 758 762 767 771 772 791 805 812 813 818 822 832 845 849 852 863 864 872 873 883 886 892 906 912 917 922 936 963 964 983 985 990 991 
+					</locations>
+					<offset> 0 </offset>
+				</block>
+			</spacing>
+			<population name="pop1">
+				<individual name="00_0004   ">
+					<sample name="00_0004   ">
+						<datablock type="DNA">
+							GCCTCGAGCCCGCACATTGGCTACGGGGTGTGCCGCGCGGGCCACAACGCGCAGGGATGCAATGCCTTGGTGGTCTAACCTATCCCAGGACCAGAATTGGGTCAGTTTAGAGGC
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0011   ">
+					<sample name="00_0011   ">
+						<datablock type="DNA">
+							GCCTCGAGCCCGCACATTGGCTACGGGGTGTGCCGCGCGGGCCACAACGCGCAGGGATGCAATGCCTTGGTGGTCTAACCTATCCCAGGACCAGAATTGGGTCAGTTTAGAGGC
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0009   ">
+					<sample name="00_0009   ">
+						<datablock type="DNA">
+							GCTACAATACTTGAGACTGGCCATGGCACTTGGCCCACAGGACATGGTGCGCGTGAACACCTTGACTTAATCGTCTAAGCTTTCATAAAGCCGGAATCAGAGCAATCAAGTGGC
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0017   ">
+					<sample name="00_0017   ">
+						<datablock type="DNA">
+							GCTACAATACTTGAGACTGGCCATGGCACTTGGCCCACAGGACATGGTGCGCGTGAACACCTTGACTTAATCGTCTAAGCTTTCATAAAGCCGGAATCAGAGCAATCAAGTGGC
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0002   ">
+					<sample name="00_0002   ">
+						<datablock type="DNA">
+							GCTACAATACTTGAGACTGGCCATGGCACTTGGCCCACAGGACATGGTGCGGGTGAACACCTTGACTTAATCGTCTAAGCTTTCATAAAGCCGGAATCAGAGCAATCAAGTGGC
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0012   ">
+					<sample name="00_0012   ">
+						<datablock type="DNA">
+							GCTACAATACTTGAGACTGGCCATGGCACTTGGCCCACAGGACATGGTGCGGGTGAACACCTTGACTTAATCGTCTAAGCTTTCATAAAGCCGGAATCAGAGCAATCAAGTGGC
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0007   ">
+					<sample name="00_0007   ">
+						<datablock type="DNA">
+							GCTACAATACTTGAGACTGGCCATGGCACTTGGCCCACAGGACATGGTGCGCGTGAACACCTTGACTTAATCGTCTAACCTTTCATAAAGCCGGAATCAGAGCAATCAAGTGGC
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0008   ">
+					<sample name="00_0008   ">
+						<datablock type="DNA">
+							GCTACAATACTTGAGACTGGCCATAGCACTTGGCCCACAGGACATGGTGCGCGTGAACACCTTGACTTAATCGTCTAACCTTTCATAAAGCCGGAATCAGAGCAATCAAGTGGC
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0014   ">
+					<sample name="00_0014   ">
+						<datablock type="DNA">
+							GCTACAATACTTGAGACTGGCCATGGCACTTGGCCCACAGGACATGGTGTGCGTGAACACCTTGACTTAATCGTCTAACCTTTCATAAAGCCGGAATCAGAGCAATCAAGTGGC
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0019   ">
+					<sample name="00_0019   ">
+						<datablock type="DNA">
+							GCTATAATACTTGACACTGGCCATGGCACTTGGTCCGCAGGACATGATGCGCGTGAACATCTTAACTTAACCAACTAACCTTTCATAAAGCCGGAACCAGAGCAATTAGTTGGC
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0000   ">
+					<sample name="00_0000   ">
+						<datablock type="DNA">
+							GCTATAATACTTGACACTAGCCATGGCACTTGGTCCGCAGGACATGATGCGCGTGAACATCTTAACTTAACCAACTAACCTTTCATAAAGCCGGAACCAGAGCAATTAGTTGGC
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0013   ">
+					<sample name="00_0013   ">
+						<datablock type="DNA">
+							GCTATAATACTTGACACTGGCCATGGCACTTGGTCCGCAGGACATGATGCGCGTGAACATCTTAACTTAACCAACTAACCTTTCATAAAGCCGGAACCAGAGCAATTAGTTGGC
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0010   ">
+					<sample name="00_0010   ">
+						<datablock type="DNA">
+							GCTATAATACTTGGCACTGGCCATGGCACTTGGCCCACACGACATGATGCGCGTGAACATCTTGACTTAACCAACTAACCGTTCATAAAGCCGGAATCACAGCAATTAGGTGGC
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0016   ">
+					<sample name="00_0016   ">
+						<datablock type="DNA">
+							GCTATAATACTTGGCACTGGCCATGGCACTTGGCCCACACGACATGATGCGCGTGAACATCTTGACTTAACCAACTAACCTTTCATAAAGCCGGTATCACAGCAATTAGGTGGC
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0003   ">
+					<sample name="00_0003   ">
+						<datablock type="DNA">
+							GCTATAATACTTGGCACTGGCCATGGCACTTGGCCCACACGACATGATGCGCGTGAACATCTTGACTTAACCAACTAACCTTTCATAAAGCCGGAATCACAGCAATTAGGTGGC
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0005   ">
+					<sample name="00_0005   ">
+						<datablock type="DNA">
+							GCTATAATACTTGGCACTGGCCATGGCACTTGGCCCACACGACATGATGCGCGTGAACATCTTGACTTAACCAACTAACCTTTCATAAAGCCGGAATCACAGCAATTAGGTGGC
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0018   ">
+					<sample name="00_0018   ">
+						<datablock type="DNA">
+							GCTATAATACTTGGCACTGGCCATGGCACTTGGCCCACAGGACATGATACGCGTGAACATCTTGACTTAACCAACTAACCTTTCATAAAGCCGGAATCACGGCAATTAGGTGGC
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0006   ">
+					<sample name="00_0006   ">
+						<datablock type="DNA">
+							GCTATAATACTTGGCACTGGCCATGGCACTTGACCCACAGGACATGATGCGCGTGAACATCTTGACTTAACCAATTAACCTTTCATAAAGCCGGAATCACAGCGATTAGGTGGC
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0001   ">
+					<sample name="00_0001   ">
+						<datablock type="DNA">
+							GCTATAATACTTGGCACTGGCCATGGCACTTGACCCACAGGACATGATGCGCGTGAACGTCTTGACTTAACCAATTAACCTTTCATAAAGCCGGAATCACAGCAATTAGGTGGC
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0015   ">
+					<sample name="00_0015   ">
+						<datablock type="DNA">
+							GCTATAATACTTGGCACTGGCCATGGCACTTGACCCACAGGACATGATGCGCGTGAACATCTTGACTTAACCAATTAACCTTTCATAAAGCCGGAATCACAGCAATTAGGTGGC
+						</datablock>
+					</sample>
+				</individual>
+			</population>
+			<population name="pop2">
+				<individual name="01_0022   ">
+					<sample name="01_0022   ">
+						<datablock type="DNA">
+							GCCTTAAGCCCGCACGTTGCTTACGAGGCGTGGCGCGCGGGCTTCAACGCGCGGGGATGCAACGCTTGGGTGGTCTAACCTATACCAGGACCAGAATTGGGTGAGCTTAGTGGT
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="01_0020   ">
+					<sample name="01_0020   ">
+						<datablock type="DNA">
+							GCCTTACGCCCGCACGTTGCTTACGAGGCGTGGCGCGCGGGCTTCAACGCGCGGGGATGCAACGCTTGGGTGGTCTAACCTATCCCAGGACCAGAATTGGGTCAGCTTAGTGGT
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="01_0030   ">
+					<sample name="01_0030   ">
+						<datablock type="DNA">
+							GCCTTACGCCCGCACGTTGCTTACGAGGCGTGGCGCGCGGGCTTCAACGCGCGGGGATGCAACGCTTGGGTGGTCTAACCTATCCCAGGACCAGAATTGGGTCAGCTTAGTGGT
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="01_0027   ">
+					<sample name="01_0027   ">
+						<datablock type="DNA">
+							GCCTTAAGCCCGCACGTTGCTTACGAGGCGTGGCGCGGGGGCTTCAACGCGCGGGGATGCAACGCTTTGGTGGTCTAGCCTATCCCAGGACCAGAGTTGGGTCAGCTTAGTGGT
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="01_0029   ">
+					<sample name="01_0029   ">
+						<datablock type="DNA">
+							GCCTTAAGCCCGCACGTTGCTTACGAGGCGTGGCGCGGGGGCTTCAACGCGCGGGGATGCAACGCTTTGGTGGTCTAGCCTATCCCAGGACCAGAGTTGGGTCAGCTTAGTGGT
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="01_0025   ">
+					<sample name="01_0025   ">
+						<datablock type="DNA">
+							GCCTTAAGCCCGCACGTTGCTTACGAGGCGTGGCGCGGGGGCTTCAACGCGCGGGGATGCAACGCTTTGGTGGTCTCGCCTATCCCAGGACCAGAATTGGGTCAGCTTAGTGGT
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="01_0038   ">
+					<sample name="01_0038   ">
+						<datablock type="DNA">
+							GCCTTAAGCCCGCACGTTGCTTACGAGGCGTGGCGCGGGGGCTTCAACGCGCGGGGATGCAACGCTTTGGTGGTCTAGCCTATCCCAGGACCAGAATTGGGTCAGCTTAGTGGT
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="01_0024   ">
+					<sample name="01_0024   ">
+						<datablock type="DNA">
+							GCCTTAAGCCCGCACGTTGCTTACGAGGCGTGGCGCGGGGCCTTCAACGCGCGGGGATGCAACGCTTTGGTGGTCTAGCCTATCCCGGGACCAGAATTGGGTCAGCTTAGTGGT
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="01_0026   ">
+					<sample name="01_0026   ">
+						<datablock type="DNA">
+							GCCTTAAGCCCGCACGTTGCTTACGAGGCGTGGCGCGGGGGCTTCAACGCGCGGGGATGCAACGCTTTGGTGGTCTAGCCTATCCCAGGACCAGAATTGGGTCAGCTTAGTGGT
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="01_0033   ">
+					<sample name="01_0033   ">
+						<datablock type="DNA">
+							GCCTCAAGCCCGCACATTGGCTACGGGGTGCAGCGCGCGGGCCACAACGCGCAGGGCTGCAATGCCTTGGTGGTCTAACCTATCCCAGGACTAGAATTGGGTCAGTTTAGTGGC
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="01_0035   ">
+					<sample name="01_0035   ">
+						<datablock type="DNA">
+							ACCTCAAGCCCGCACATTGGCTACGGGGTGTGGCGCGCGGGCCACAACGCGCAGGGATGCAATGCCTTGGTGGTCTAACCTATCCCAGGACCAGAATTGGGTCAGTTTAGTGGC
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="01_0031   ">
+					<sample name="01_0031   ">
+						<datablock type="DNA">
+							GTTACAATACTTGACACCGGCTCTGGCACTTGGCCAACAGGACATGATGCACGTGAACACGTTGACTTAATCGTCTAACCTTTCACAAGGCCAAAATCAGAGCAATTAAGTTAC
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="01_0028   ">
+					<sample name="01_0028   ">
+						<datablock type="DNA">
+							GTTACAATACTTGACACCGGCTCTGGCACTTGGCCAACAGGACATGATGCACGTGAACACGTTGACTTAATCGTCTAACCTTTCACAAGGCCAAAATCAGAGCAATTAAGTTAC
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="01_0032   ">
+					<sample name="01_0032   ">
+						<datablock type="DNA">
+							GTTACAATACTTGACACCGGCTCTGGCACTTGGCCAACAGGACATGATGCACGTGAACACGTTGACTTAATCGTCTAACCTTTCACAAGGCCAAAATCAGAGCAATTAAGTTAC
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="01_0023   ">
+					<sample name="01_0023   ">
+						<datablock type="DNA">
+							GCTATAATACTTGACACTGGCCATGGCACTTGGCCCACAGGACATGATGCGCGTAAACATCTTGACATAACCGACTAACCTTGCATAAAGTCGGAATCAGAGCAATTAGGTGGC
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="01_0036   ">
+					<sample name="01_0036   ">
+						<datablock type="DNA">
+							GCTATAATACTTGACACTGGCCATGGCACTTGGCCCACAGGACATGATGCGCGTAAACATCTTGACATAACCGACTAACCTTGCATAAAGTCGGAATCAGAGCAATTAGGTGGC
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="01_0039   ">
+					<sample name="01_0039   ">
+						<datablock type="DNA">
+							GCTATAATACTTGACACTGGCCATGGCACTTGGCCCACAGGACATGATGCGCGTAAACATCTTGACATAACCGACTAACCTTGCATAAAGTCGGAATCAGAGCAATTAGGTGGC
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="01_0021   ">
+					<sample name="01_0021   ">
+						<datablock type="DNA">
+							GCTATAATATTTGACACTGGCCATGGCACTTGGCCCACAGGACATGATGCGCGTGAACATCTTGACTTAACCAACAAACTTTTCATAAAGCCGGAATCAGAGCAATTAGGTGGC
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="01_0034   ">
+					<sample name="01_0034   ">
+						<datablock type="DNA">
+							GCTATAATATTTGACACTGGCCATGGCACTTGGCCCACAGGACATGATGCGCGTGAACATCTTGACTTAACCAACTAACTTTTCATAAAGCCGGAATCAGAGCAATTAGGTGGC
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="01_0037   ">
+					<sample name="01_0037   ">
+						<datablock type="DNA">
+							GCTATAATATTTGACACTGGCCATGGCACTTGGCCCACAGGACATGATGCGCGTGAACATCTTGACTTAACCAACTAACTTTTCATAAAGCCGGAATCAGAGCAATTAGGTGGC
+						</datablock>
+					</sample>
+				</individual>
+			</population>
+		</region>
+		<region name="region2">
+			<population name="pop1">
+				<individual name="0BAM 0BAG ">
+					<sample name="0BAM 0BAG -1">
+						<datablock type="MICROSAT">
+							23 
+						</datablock>
+					</sample>
+					<sample name="0BAM 0BAG -2">
+						<datablock type="MICROSAT">
+							20 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="0BAT 0BAK ">
+					<sample name="0BAT 0BAK -1">
+						<datablock type="MICROSAT">
+							23 
+						</datablock>
+					</sample>
+					<sample name="0BAT 0BAK -2">
+						<datablock type="MICROSAT">
+							20 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="0BAB 0BAS ">
+					<sample name="0BAB 0BAS -1">
+						<datablock type="MICROSAT">
+							18 
+						</datablock>
+					</sample>
+					<sample name="0BAB 0BAS -2">
+						<datablock type="MICROSAT">
+							20 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="0BAJ 0BAR ">
+					<sample name="0BAJ 0BAR -1">
+						<datablock type="MICROSAT">
+							22 
+						</datablock>
+					</sample>
+					<sample name="0BAJ 0BAR -2">
+						<datablock type="MICROSAT">
+							16 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="0BAC 0BAI ">
+					<sample name="0BAC 0BAI -1">
+						<datablock type="MICROSAT">
+							22 
+						</datablock>
+					</sample>
+					<sample name="0BAC 0BAI -2">
+						<datablock type="MICROSAT">
+							16 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="0BAN 0BAQ ">
+					<sample name="0BAN 0BAQ -1">
+						<datablock type="MICROSAT">
+							23 
+						</datablock>
+					</sample>
+					<sample name="0BAN 0BAQ -2">
+						<datablock type="MICROSAT">
+							16 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="0BAD 0BAA ">
+					<sample name="0BAD 0BAA -1">
+						<datablock type="MICROSAT">
+							24 
+						</datablock>
+					</sample>
+					<sample name="0BAD 0BAA -2">
+						<datablock type="MICROSAT">
+							18 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="0BAE 0BAL ">
+					<sample name="0BAE 0BAL -1">
+						<datablock type="MICROSAT">
+							24 
+						</datablock>
+					</sample>
+					<sample name="0BAE 0BAL -2">
+						<datablock type="MICROSAT">
+							18 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="0BAF 0BAH ">
+					<sample name="0BAF 0BAH -1">
+						<datablock type="MICROSAT">
+							24 
+						</datablock>
+					</sample>
+					<sample name="0BAF 0BAH -2">
+						<datablock type="MICROSAT">
+							18 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="0BAP 0BAO ">
+					<sample name="0BAP 0BAO -1">
+						<datablock type="MICROSAT">
+							23 
+						</datablock>
+					</sample>
+					<sample name="0BAP 0BAO -2">
+						<datablock type="MICROSAT">
+							18 
+						</datablock>
+					</sample>
+				</individual>
+			</population>
+			<population name="pop2">
+				<individual name="1BAA 1BAB ">
+					<sample name="1BAA 1BAB -1">
+						<datablock type="MICROSAT">
+							22 
+						</datablock>
+					</sample>
+					<sample name="1BAA 1BAB -2">
+						<datablock type="MICROSAT">
+							23 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="1BAC 1BAJ ">
+					<sample name="1BAC 1BAJ -1">
+						<datablock type="MICROSAT">
+							18 
+						</datablock>
+					</sample>
+					<sample name="1BAC 1BAJ -2">
+						<datablock type="MICROSAT">
+							24 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="1BAN 1BAP ">
+					<sample name="1BAN 1BAP -1">
+						<datablock type="MICROSAT">
+							19 
+						</datablock>
+					</sample>
+					<sample name="1BAN 1BAP -2">
+						<datablock type="MICROSAT">
+							24 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="1BAR 1BAS ">
+					<sample name="1BAR 1BAS -1">
+						<datablock type="MICROSAT">
+							20 
+						</datablock>
+					</sample>
+					<sample name="1BAR 1BAS -2">
+						<datablock type="MICROSAT">
+							23 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="1BAH 1BAI ">
+					<sample name="1BAH 1BAI -1">
+						<datablock type="MICROSAT">
+							20 
+						</datablock>
+					</sample>
+					<sample name="1BAH 1BAI -2">
+						<datablock type="MICROSAT">
+							23 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="1BAD 1BAQ ">
+					<sample name="1BAD 1BAQ -1">
+						<datablock type="MICROSAT">
+							21 
+						</datablock>
+					</sample>
+					<sample name="1BAD 1BAQ -2">
+						<datablock type="MICROSAT">
+							23 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="1BAL 1BAG ">
+					<sample name="1BAL 1BAG -1">
+						<datablock type="MICROSAT">
+							23 
+						</datablock>
+					</sample>
+					<sample name="1BAL 1BAG -2">
+						<datablock type="MICROSAT">
+							21 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="1BAT 1BAM ">
+					<sample name="1BAT 1BAM -1">
+						<datablock type="MICROSAT">
+							23 
+						</datablock>
+					</sample>
+					<sample name="1BAT 1BAM -2">
+						<datablock type="MICROSAT">
+							21 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="1BAE 1BAK ">
+					<sample name="1BAE 1BAK -1">
+						<datablock type="MICROSAT">
+							23 
+						</datablock>
+					</sample>
+					<sample name="1BAE 1BAK -2">
+						<datablock type="MICROSAT">
+							16 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="1BAO 1BAF ">
+					<sample name="1BAO 1BAF -1">
+						<datablock type="MICROSAT">
+							23 
+						</datablock>
+					</sample>
+					<sample name="1BAO 1BAF -2">
+						<datablock type="MICROSAT">
+							17 
+						</datablock>
+					</sample>
+				</individual>
+			</population>
+		</region>
+		<region name="region3">
+			<population name="pop1">
+				<individual name="0BAM 0BAG ">
+					<sample name="0BAM 0BAG -1">
+						<datablock type="MICROSAT">
+							23 
+						</datablock>
+					</sample>
+					<sample name="0BAM 0BAG -2">
+						<datablock type="MICROSAT">
+							21 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="0BAT 0BAK ">
+					<sample name="0BAT 0BAK -1">
+						<datablock type="MICROSAT">
+							23 
+						</datablock>
+					</sample>
+					<sample name="0BAT 0BAK -2">
+						<datablock type="MICROSAT">
+							20 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="0BAB 0BAS ">
+					<sample name="0BAB 0BAS -1">
+						<datablock type="MICROSAT">
+							23 
+						</datablock>
+					</sample>
+					<sample name="0BAB 0BAS -2">
+						<datablock type="MICROSAT">
+							21 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="0BAJ 0BAR ">
+					<sample name="0BAJ 0BAR -1">
+						<datablock type="MICROSAT">
+							23 
+						</datablock>
+					</sample>
+					<sample name="0BAJ 0BAR -2">
+						<datablock type="MICROSAT">
+							20 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="0BAC 0BAI ">
+					<sample name="0BAC 0BAI -1">
+						<datablock type="MICROSAT">
+							23 
+						</datablock>
+					</sample>
+					<sample name="0BAC 0BAI -2">
+						<datablock type="MICROSAT">
+							12 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="0BAN 0BAQ ">
+					<sample name="0BAN 0BAQ -1">
+						<datablock type="MICROSAT">
+							25 
+						</datablock>
+					</sample>
+					<sample name="0BAN 0BAQ -2">
+						<datablock type="MICROSAT">
+							12 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="0BAD 0BAA ">
+					<sample name="0BAD 0BAA -1">
+						<datablock type="MICROSAT">
+							23 
+						</datablock>
+					</sample>
+					<sample name="0BAD 0BAA -2">
+						<datablock type="MICROSAT">
+							12 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="0BAE 0BAL ">
+					<sample name="0BAE 0BAL -1">
+						<datablock type="MICROSAT">
+							22 
+						</datablock>
+					</sample>
+					<sample name="0BAE 0BAL -2">
+						<datablock type="MICROSAT">
+							16 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="0BAF 0BAH ">
+					<sample name="0BAF 0BAH -1">
+						<datablock type="MICROSAT">
+							21 
+						</datablock>
+					</sample>
+					<sample name="0BAF 0BAH -2">
+						<datablock type="MICROSAT">
+							16 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="0BAP 0BAO ">
+					<sample name="0BAP 0BAO -1">
+						<datablock type="MICROSAT">
+							20 
+						</datablock>
+					</sample>
+					<sample name="0BAP 0BAO -2">
+						<datablock type="MICROSAT">
+							14 
+						</datablock>
+					</sample>
+				</individual>
+			</population>
+			<population name="pop2">
+				<individual name="1BAA 1BAB ">
+					<sample name="1BAA 1BAB -1">
+						<datablock type="MICROSAT">
+							23 
+						</datablock>
+					</sample>
+					<sample name="1BAA 1BAB -2">
+						<datablock type="MICROSAT">
+							22 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="1BAC 1BAJ ">
+					<sample name="1BAC 1BAJ -1">
+						<datablock type="MICROSAT">
+							23 
+						</datablock>
+					</sample>
+					<sample name="1BAC 1BAJ -2">
+						<datablock type="MICROSAT">
+							20 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="1BAN 1BAP ">
+					<sample name="1BAN 1BAP -1">
+						<datablock type="MICROSAT">
+							23 
+						</datablock>
+					</sample>
+					<sample name="1BAN 1BAP -2">
+						<datablock type="MICROSAT">
+							20 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="1BAR 1BAS ">
+					<sample name="1BAR 1BAS -1">
+						<datablock type="MICROSAT">
+							23 
+						</datablock>
+					</sample>
+					<sample name="1BAR 1BAS -2">
+						<datablock type="MICROSAT">
+							21 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="1BAH 1BAI ">
+					<sample name="1BAH 1BAI -1">
+						<datablock type="MICROSAT">
+							23 
+						</datablock>
+					</sample>
+					<sample name="1BAH 1BAI -2">
+						<datablock type="MICROSAT">
+							14 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="1BAD 1BAQ ">
+					<sample name="1BAD 1BAQ -1">
+						<datablock type="MICROSAT">
+							22 
+						</datablock>
+					</sample>
+					<sample name="1BAD 1BAQ -2">
+						<datablock type="MICROSAT">
+							16 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="1BAL 1BAG ">
+					<sample name="1BAL 1BAG -1">
+						<datablock type="MICROSAT">
+							22 
+						</datablock>
+					</sample>
+					<sample name="1BAL 1BAG -2">
+						<datablock type="MICROSAT">
+							15 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="1BAT 1BAM ">
+					<sample name="1BAT 1BAM -1">
+						<datablock type="MICROSAT">
+							22 
+						</datablock>
+					</sample>
+					<sample name="1BAT 1BAM -2">
+						<datablock type="MICROSAT">
+							17 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="1BAE 1BAK ">
+					<sample name="1BAE 1BAK -1">
+						<datablock type="MICROSAT">
+							22 
+						</datablock>
+					</sample>
+					<sample name="1BAE 1BAK -2">
+						<datablock type="MICROSAT">
+							14 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="1BAO 1BAF ">
+					<sample name="1BAO 1BAF -1">
+						<datablock type="MICROSAT">
+							22 
+						</datablock>
+					</sample>
+					<sample name="1BAO 1BAF -2">
+						<datablock type="MICROSAT">
+							14 
+						</datablock>
+					</sample>
+				</individual>
+			</population>
+		</region>
+		<region name="region4">
+			<population name="pop1">
+				<individual name="0BAM 0BAG ">
+					<sample name="0BAM 0BAG -1">
+						<datablock type="MICROSAT">
+							16 
+						</datablock>
+					</sample>
+					<sample name="0BAM 0BAG -2">
+						<datablock type="MICROSAT">
+							14 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="0BAT 0BAK ">
+					<sample name="0BAT 0BAK -1">
+						<datablock type="MICROSAT">
+							15 
+						</datablock>
+					</sample>
+					<sample name="0BAT 0BAK -2">
+						<datablock type="MICROSAT">
+							15 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="0BAB 0BAS ">
+					<sample name="0BAB 0BAS -1">
+						<datablock type="MICROSAT">
+							16 
+						</datablock>
+					</sample>
+					<sample name="0BAB 0BAS -2">
+						<datablock type="MICROSAT">
+							14 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="0BAJ 0BAR ">
+					<sample name="0BAJ 0BAR -1">
+						<datablock type="MICROSAT">
+							17 
+						</datablock>
+					</sample>
+					<sample name="0BAJ 0BAR -2">
+						<datablock type="MICROSAT">
+							17 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="0BAC 0BAI ">
+					<sample name="0BAC 0BAI -1">
+						<datablock type="MICROSAT">
+							19 
+						</datablock>
+					</sample>
+					<sample name="0BAC 0BAI -2">
+						<datablock type="MICROSAT">
+							17 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="0BAN 0BAQ ">
+					<sample name="0BAN 0BAQ -1">
+						<datablock type="MICROSAT">
+							17 
+						</datablock>
+					</sample>
+					<sample name="0BAN 0BAQ -2">
+						<datablock type="MICROSAT">
+							18 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="0BAD 0BAA ">
+					<sample name="0BAD 0BAA -1">
+						<datablock type="MICROSAT">
+							15 
+						</datablock>
+					</sample>
+					<sample name="0BAD 0BAA -2">
+						<datablock type="MICROSAT">
+							18 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="0BAE 0BAL ">
+					<sample name="0BAE 0BAL -1">
+						<datablock type="MICROSAT">
+							13 
+						</datablock>
+					</sample>
+					<sample name="0BAE 0BAL -2">
+						<datablock type="MICROSAT">
+							18 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="0BAF 0BAH ">
+					<sample name="0BAF 0BAH -1">
+						<datablock type="MICROSAT">
+							14 
+						</datablock>
+					</sample>
+					<sample name="0BAF 0BAH -2">
+						<datablock type="MICROSAT">
+							18 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="0BAP 0BAO ">
+					<sample name="0BAP 0BAO -1">
+						<datablock type="MICROSAT">
+							14 
+						</datablock>
+					</sample>
+					<sample name="0BAP 0BAO -2">
+						<datablock type="MICROSAT">
+							18 
+						</datablock>
+					</sample>
+				</individual>
+			</population>
+			<population name="pop2">
+				<individual name="1BAA 1BAB ">
+					<sample name="1BAA 1BAB -1">
+						<datablock type="MICROSAT">
+							15 
+						</datablock>
+					</sample>
+					<sample name="1BAA 1BAB -2">
+						<datablock type="MICROSAT">
+							16 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="1BAC 1BAJ ">
+					<sample name="1BAC 1BAJ -1">
+						<datablock type="MICROSAT">
+							14 
+						</datablock>
+					</sample>
+					<sample name="1BAC 1BAJ -2">
+						<datablock type="MICROSAT">
+							14 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="1BAN 1BAP ">
+					<sample name="1BAN 1BAP -1">
+						<datablock type="MICROSAT">
+							22 
+						</datablock>
+					</sample>
+					<sample name="1BAN 1BAP -2">
+						<datablock type="MICROSAT">
+							15 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="1BAR 1BAS ">
+					<sample name="1BAR 1BAS -1">
+						<datablock type="MICROSAT">
+							19 
+						</datablock>
+					</sample>
+					<sample name="1BAR 1BAS -2">
+						<datablock type="MICROSAT">
+							16 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="1BAH 1BAI ">
+					<sample name="1BAH 1BAI -1">
+						<datablock type="MICROSAT">
+							19 
+						</datablock>
+					</sample>
+					<sample name="1BAH 1BAI -2">
+						<datablock type="MICROSAT">
+							16 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="1BAD 1BAQ ">
+					<sample name="1BAD 1BAQ -1">
+						<datablock type="MICROSAT">
+							18 
+						</datablock>
+					</sample>
+					<sample name="1BAD 1BAQ -2">
+						<datablock type="MICROSAT">
+							17 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="1BAL 1BAG ">
+					<sample name="1BAL 1BAG -1">
+						<datablock type="MICROSAT">
+							18 
+						</datablock>
+					</sample>
+					<sample name="1BAL 1BAG -2">
+						<datablock type="MICROSAT">
+							17 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="1BAT 1BAM ">
+					<sample name="1BAT 1BAM -1">
+						<datablock type="MICROSAT">
+							15 
+						</datablock>
+					</sample>
+					<sample name="1BAT 1BAM -2">
+						<datablock type="MICROSAT">
+							17 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="1BAE 1BAK ">
+					<sample name="1BAE 1BAK -1">
+						<datablock type="MICROSAT">
+							15 
+						</datablock>
+					</sample>
+					<sample name="1BAE 1BAK -2">
+						<datablock type="MICROSAT">
+							17 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="1BAO 1BAF ">
+					<sample name="1BAO 1BAF -1">
+						<datablock type="MICROSAT">
+							14 
+						</datablock>
+					</sample>
+					<sample name="1BAO 1BAF -2">
+						<datablock type="MICROSAT">
+							18 
+						</datablock>
+					</sample>
+				</individual>
+			</population>
+		</region>
+		<region name="region5">
+			<population name="pop1">
+				<individual name="0BAM 0BAG ">
+					<sample name="0BAM 0BAG -1">
+						<datablock type="MICROSAT">
+							15 
+						</datablock>
+					</sample>
+					<sample name="0BAM 0BAG -2">
+						<datablock type="MICROSAT">
+							20 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="0BAT 0BAK ">
+					<sample name="0BAT 0BAK -1">
+						<datablock type="MICROSAT">
+							14 
+						</datablock>
+					</sample>
+					<sample name="0BAT 0BAK -2">
+						<datablock type="MICROSAT">
+							21 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="0BAB 0BAS ">
+					<sample name="0BAB 0BAS -1">
+						<datablock type="MICROSAT">
+							17 
+						</datablock>
+					</sample>
+					<sample name="0BAB 0BAS -2">
+						<datablock type="MICROSAT">
+							19 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="0BAJ 0BAR ">
+					<sample name="0BAJ 0BAR -1">
+						<datablock type="MICROSAT">
+							16 
+						</datablock>
+					</sample>
+					<sample name="0BAJ 0BAR -2">
+						<datablock type="MICROSAT">
+							20 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="0BAC 0BAI ">
+					<sample name="0BAC 0BAI -1">
+						<datablock type="MICROSAT">
+							16 
+						</datablock>
+					</sample>
+					<sample name="0BAC 0BAI -2">
+						<datablock type="MICROSAT">
+							20 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="0BAN 0BAQ ">
+					<sample name="0BAN 0BAQ -1">
+						<datablock type="MICROSAT">
+							16 
+						</datablock>
+					</sample>
+					<sample name="0BAN 0BAQ -2">
+						<datablock type="MICROSAT">
+							20 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="0BAD 0BAA ">
+					<sample name="0BAD 0BAA -1">
+						<datablock type="MICROSAT">
+							18 
+						</datablock>
+					</sample>
+					<sample name="0BAD 0BAA -2">
+						<datablock type="MICROSAT">
+							20 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="0BAE 0BAL ">
+					<sample name="0BAE 0BAL -1">
+						<datablock type="MICROSAT">
+							22 
+						</datablock>
+					</sample>
+					<sample name="0BAE 0BAL -2">
+						<datablock type="MICROSAT">
+							20 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="0BAF 0BAH ">
+					<sample name="0BAF 0BAH -1">
+						<datablock type="MICROSAT">
+							21 
+						</datablock>
+					</sample>
+					<sample name="0BAF 0BAH -2">
+						<datablock type="MICROSAT">
+							20 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="0BAP 0BAO ">
+					<sample name="0BAP 0BAO -1">
+						<datablock type="MICROSAT">
+							20 
+						</datablock>
+					</sample>
+					<sample name="0BAP 0BAO -2">
+						<datablock type="MICROSAT">
+							21 
+						</datablock>
+					</sample>
+				</individual>
+			</population>
+			<population name="pop2">
+				<individual name="1BAA 1BAB ">
+					<sample name="1BAA 1BAB -1">
+						<datablock type="MICROSAT">
+							15 
+						</datablock>
+					</sample>
+					<sample name="1BAA 1BAB -2">
+						<datablock type="MICROSAT">
+							20 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="1BAC 1BAJ ">
+					<sample name="1BAC 1BAJ -1">
+						<datablock type="MICROSAT">
+							15 
+						</datablock>
+					</sample>
+					<sample name="1BAC 1BAJ -2">
+						<datablock type="MICROSAT">
+							20 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="1BAN 1BAP ">
+					<sample name="1BAN 1BAP -1">
+						<datablock type="MICROSAT">
+							16 
+						</datablock>
+					</sample>
+					<sample name="1BAN 1BAP -2">
+						<datablock type="MICROSAT">
+							20 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="1BAR 1BAS ">
+					<sample name="1BAR 1BAS -1">
+						<datablock type="MICROSAT">
+							17 
+						</datablock>
+					</sample>
+					<sample name="1BAR 1BAS -2">
+						<datablock type="MICROSAT">
+							20 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="1BAH 1BAI ">
+					<sample name="1BAH 1BAI -1">
+						<datablock type="MICROSAT">
+							17 
+						</datablock>
+					</sample>
+					<sample name="1BAH 1BAI -2">
+						<datablock type="MICROSAT">
+							21 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="1BAD 1BAQ ">
+					<sample name="1BAD 1BAQ -1">
+						<datablock type="MICROSAT">
+							17 
+						</datablock>
+					</sample>
+					<sample name="1BAD 1BAQ -2">
+						<datablock type="MICROSAT">
+							24 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="1BAL 1BAG ">
+					<sample name="1BAL 1BAG -1">
+						<datablock type="MICROSAT">
+							16 
+						</datablock>
+					</sample>
+					<sample name="1BAL 1BAG -2">
+						<datablock type="MICROSAT">
+							24 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="1BAT 1BAM ">
+					<sample name="1BAT 1BAM -1">
+						<datablock type="MICROSAT">
+							18 
+						</datablock>
+					</sample>
+					<sample name="1BAT 1BAM -2">
+						<datablock type="MICROSAT">
+							23 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="1BAE 1BAK ">
+					<sample name="1BAE 1BAK -1">
+						<datablock type="MICROSAT">
+							20 
+						</datablock>
+					</sample>
+					<sample name="1BAE 1BAK -2">
+						<datablock type="MICROSAT">
+							23 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="1BAO 1BAF ">
+					<sample name="1BAO 1BAF -1">
+						<datablock type="MICROSAT">
+							20 
+						</datablock>
+					</sample>
+					<sample name="1BAO 1BAF -2">
+						<datablock type="MICROSAT">
+							22 
+						</datablock>
+					</sample>
+				</individual>
+			</population>
+		</region>
+		<region name="region6">
+			<population name="pop1">
+				<individual name="0BAM 0BAG ">
+					<sample name="0BAM 0BAG -1">
+						<datablock type="MICROSAT">
+							20 
+						</datablock>
+					</sample>
+					<sample name="0BAM 0BAG -2">
+						<datablock type="MICROSAT">
+							16 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="0BAT 0BAK ">
+					<sample name="0BAT 0BAK -1">
+						<datablock type="MICROSAT">
+							20 
+						</datablock>
+					</sample>
+					<sample name="0BAT 0BAK -2">
+						<datablock type="MICROSAT">
+							22 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="0BAB 0BAS ">
+					<sample name="0BAB 0BAS -1">
+						<datablock type="MICROSAT">
+							20 
+						</datablock>
+					</sample>
+					<sample name="0BAB 0BAS -2">
+						<datablock type="MICROSAT">
+							22 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="0BAJ 0BAR ">
+					<sample name="0BAJ 0BAR -1">
+						<datablock type="MICROSAT">
+							21 
+						</datablock>
+					</sample>
+					<sample name="0BAJ 0BAR -2">
+						<datablock type="MICROSAT">
+							23 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="0BAC 0BAI ">
+					<sample name="0BAC 0BAI -1">
+						<datablock type="MICROSAT">
+							21 
+						</datablock>
+					</sample>
+					<sample name="0BAC 0BAI -2">
+						<datablock type="MICROSAT">
+							24 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="0BAN 0BAQ ">
+					<sample name="0BAN 0BAQ -1">
+						<datablock type="MICROSAT">
+							21 
+						</datablock>
+					</sample>
+					<sample name="0BAN 0BAQ -2">
+						<datablock type="MICROSAT">
+							23 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="0BAD 0BAA ">
+					<sample name="0BAD 0BAA -1">
+						<datablock type="MICROSAT">
+							21 
+						</datablock>
+					</sample>
+					<sample name="0BAD 0BAA -2">
+						<datablock type="MICROSAT">
+							23 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="0BAE 0BAL ">
+					<sample name="0BAE 0BAL -1">
+						<datablock type="MICROSAT">
+							22 
+						</datablock>
+					</sample>
+					<sample name="0BAE 0BAL -2">
+						<datablock type="MICROSAT">
+							23 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="0BAF 0BAH ">
+					<sample name="0BAF 0BAH -1">
+						<datablock type="MICROSAT">
+							14 
+						</datablock>
+					</sample>
+					<sample name="0BAF 0BAH -2">
+						<datablock type="MICROSAT">
+							23 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="0BAP 0BAO ">
+					<sample name="0BAP 0BAO -1">
+						<datablock type="MICROSAT">
+							16 
+						</datablock>
+					</sample>
+					<sample name="0BAP 0BAO -2">
+						<datablock type="MICROSAT">
+							22 
+						</datablock>
+					</sample>
+				</individual>
+			</population>
+			<population name="pop2">
+				<individual name="1BAA 1BAB ">
+					<sample name="1BAA 1BAB -1">
+						<datablock type="MICROSAT">
+							20 
+						</datablock>
+					</sample>
+					<sample name="1BAA 1BAB -2">
+						<datablock type="MICROSAT">
+							23 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="1BAC 1BAJ ">
+					<sample name="1BAC 1BAJ -1">
+						<datablock type="MICROSAT">
+							20 
+						</datablock>
+					</sample>
+					<sample name="1BAC 1BAJ -2">
+						<datablock type="MICROSAT">
+							23 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="1BAN 1BAP ">
+					<sample name="1BAN 1BAP -1">
+						<datablock type="MICROSAT">
+							20 
+						</datablock>
+					</sample>
+					<sample name="1BAN 1BAP -2">
+						<datablock type="MICROSAT">
+							23 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="1BAR 1BAS ">
+					<sample name="1BAR 1BAS -1">
+						<datablock type="MICROSAT">
+							21 
+						</datablock>
+					</sample>
+					<sample name="1BAR 1BAS -2">
+						<datablock type="MICROSAT">
+							23 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="1BAH 1BAI ">
+					<sample name="1BAH 1BAI -1">
+						<datablock type="MICROSAT">
+							21 
+						</datablock>
+					</sample>
+					<sample name="1BAH 1BAI -2">
+						<datablock type="MICROSAT">
+							22 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="1BAD 1BAQ ">
+					<sample name="1BAD 1BAQ -1">
+						<datablock type="MICROSAT">
+							13 
+						</datablock>
+					</sample>
+					<sample name="1BAD 1BAQ -2">
+						<datablock type="MICROSAT">
+							23 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="1BAL 1BAG ">
+					<sample name="1BAL 1BAG -1">
+						<datablock type="MICROSAT">
+							15 
+						</datablock>
+					</sample>
+					<sample name="1BAL 1BAG -2">
+						<datablock type="MICROSAT">
+							22 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="1BAT 1BAM ">
+					<sample name="1BAT 1BAM -1">
+						<datablock type="MICROSAT">
+							16 
+						</datablock>
+					</sample>
+					<sample name="1BAT 1BAM -2">
+						<datablock type="MICROSAT">
+							22 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="1BAE 1BAK ">
+					<sample name="1BAE 1BAK -1">
+						<datablock type="MICROSAT">
+							16 
+						</datablock>
+					</sample>
+					<sample name="1BAE 1BAK -2">
+						<datablock type="MICROSAT">
+							25 
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="1BAO 1BAF ">
+					<sample name="1BAO 1BAF -1">
+						<datablock type="MICROSAT">
+							16 
+						</datablock>
+					</sample>
+					<sample name="1BAO 1BAF -2">
+						<datablock type="MICROSAT">
+							29 
+						</datablock>
+					</sample>
+				</individual>
+			</population>
+		</region>
+	</data>
+</lamarc>
+
diff --git a/doc/testfiles/v2.0.infiles/infile.coalrec b/doc/testfiles/v2.0.infiles/infile.coalrec
new file mode 100644
index 0000000..b735781
--- /dev/null
+++ b/doc/testfiles/v2.0.infiles/infile.coalrec
@@ -0,0 +1,365 @@
+<lamarc>
+<!--generated with:                                                          -->
+<!--                                                                         -->
+<!--jrectree (rand#???)/snp, snp, coal & rec                                 -->
+<!--theta = 0.01, recrate = 0.1                                              -->
+<!--20 individuals 2 haplotypes/individual, 1000 sites                       -->
+<!--                                                                         -->
+<!--                                                                         -->
+<!--analyzed as:                                                             -->
+<!--                                                                         -->
+<!--snp (with spacing)                                                       -->
+<!--haplotyping 50%                                                          -->
+<!--F84 model (freqsfromdata) with 2 categories                              -->
+<!--starting params (theta = 0.01, recrate = 0.1)                            -->
+<!--5 initial (2000 samples, 20 interval, 1000 discard)                      -->
+<!--2 final   (5000 samples, 20 interval, 1000 discard)                      -->
+<!--adaptive heating (10 interval, 4 temps)                                  -->
+<!--fixed profiles                                                           -->
+<!-- Created from the LamarcDS DataStore -->
+<!-- -->
+	<forces>
+		<coalescence>
+			<start-values> 0.01 </start-values>
+			<method> USER </method>
+			<max-events> 1000 </max-events>
+                        <profiles> fixed </profiles>
+		</coalescence>
+                <recombination>
+                        <start-values> 0.1 </start-values>
+                        <method> USER </method>
+			<max-events> 1000 </max-events>
+                        <profiles> fixed </profiles>
+                </recombination>
+	</forces>
+	<!-- -->
+	<chains>
+		<replicates> 1 </replicates>
+		<heating>
+			<temperatures> 1.0 1.1 1.2 1.3 </temperatures>
+			<swap-interval> 10 </swap-interval>
+                        <adaptive> true </adaptive>
+		</heating>
+		<strategy>
+			<resimulating> 0.5 </resimulating>
+			<haplotyping> 0.5 </haplotyping>
+		</strategy>
+		<initial>
+			<number> 10 </number>
+			<samples> 2000 </samples>
+			<discard> 1000 </discard>
+			<interval> 20 </interval>
+		</initial>
+		<final>
+			<number> 2 </number>
+			<samples> 5000 </samples>
+			<discard> 1000 </discard>
+			<interval> 20 </interval>
+		</final>
+	</chains>
+	<!-- -->
+	<format>
+		<verbosity> verbose </verbosity>
+		<progress-reports> verbose </progress-reports>
+		<echo> true </echo>
+		<plotting>
+			<profile> false </profile>
+			<posterior> false </posterior>
+		</plotting>
+		<seed> 1005 </seed>
+		<parameter-file> parmfile </parameter-file>
+		<results-file> outfile.coalrec </results-file>
+		<in-summary-file> insumfile.coalrec </in-summary-file>
+		<out-summary-file> outsumfile.coalrec </out-summary-file>
+                <use-out-summary> true </use-out-summary>
+                <use-in-summary> false </use-in-summary>
+	</format>
+        <model name="F84">
+          <base-freqs> calculated </base-freqs>
+          <ttratio> 2.0 </ttratio>
+          <categories>
+            <num-categories> 2 </num-categories>
+            <rates> 1.0 2.0 </rates>
+            <probabilities> 0.5 0.5 </probabilities>
+            <!-- autocorrelation of 1 means no auto-correlation -->
+            <autocorrelation> 1.0 </autocorrelation>
+          </categories>
+        </model>
+	<!-- -->
+	<data>
+		<region name="coalrec">
+<spacing>
+				<block>
+					<map-position> 0 </map-position>
+					<length> 1000 </length>
+					<locations> 2 9 18 46 51 78 82 89 90 98 104 106 114 172 197 199 222 293 326 343 391 397 417 424 426 435 438 443 464 488 499 526 546 556 575 576 595 602 614 617 621 629 642 650 653 674 690 694 699 702 711 718 719 729 770 848 851 854 856 857 915 923 940 958 974 
+					</locations>
+					<offset> 0 </offset>
+				</block>
+			</spacing>
+			<population name="ZMNE">
+				<individual name="GG-322166855">
+<phase type="unknown"> 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 </phase>
+					<sample name="00_0025   ">
+						<datablock type="SNP">
+							TGAAGCCCCAATTATGGTTACCACGACTTACCAGTACCTCTTAGCGACGTGGGCTCTATCTCTCA
+						</datablock>
+					</sample>
+					<sample name="00_0032   ">
+						<datablock type="SNP">
+							TGAAGCCCCAATTATGGTTACCACGACTTACCAGTACCTCTTAGCGACGTGGGCTCTATCTCTCA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="CA-1016758720">
+<phase type="unknown"> 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 </phase>
+					<sample name="00_0022   ">
+						<datablock type="SNP">
+							CCACATCGTAGACGTTCGTACCGCTTCTTACCAGTACCTCTTAGCGACGTGGGCTCTATCTCTCA
+						</datablock>
+					</sample>
+					<sample name="00_0003   ">
+						<datablock type="SNP">
+							CCACATCGTAGACGTTCGTACCGCTTCTTACCAGTACCTCTTAGCGACGTGGGCTCTATCTCTCA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="GT-438712109">
+<phase type="unknown"> 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 </phase>
+					<sample name="00_0024   ">
+						<datablock type="SNP">
+							CCACATCGTAGACGTTCGTACCGCTTCTTACCAGTACCTCTTAGCGACGTGGGCTCTATCTCTCA
+						</datablock>
+					</sample>
+					<sample name="00_0000   ">
+						<datablock type="SNP">
+							CCACATCGTAGACGTTCGTGCCGCTTCTTATCAGTACCTCTTAGCGACGTGGGCTCTATCTCTCA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="AT1063120482">
+<phase type="unknown"> 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 </phase>
+					<sample name="00_0027   ">
+						<datablock type="SNP">
+							CCACATCGTAGACGTTCGTGGCGCTTCTTATCAGTACCTCTTAGCGACGTGGGCTCTATCTCTAA
+						</datablock>
+					</sample>
+					<sample name="00_0013   ">
+						<datablock type="SNP">
+							CCACATCGTAGACGTTCGTGCCGCTTCTTATCAGTACCTCTAAGCGACGTGGGCTCTATCTCTCA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="TG-108633955">
+<phase type="unknown"> 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 </phase>
+					<sample name="00_0037   ">
+						<datablock type="SNP">
+							CCACATCGTAGACGTTCGTGCCGCTTCTTATCAGTACCTCTTAGCGACGTGGGCTCTATCTCTCA
+						</datablock>
+					</sample>
+					<sample name="00_0009   ">
+						<datablock type="SNP">
+							CCACATCGTAGACGTTCGTGCCGCTTCTTCTCAGTACCACTTAGCAACGTAGACTCTAACACTCA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="GC-816174348">
+<phase type="unknown"> 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 </phase>
+					<sample name="00_0034   ">
+						<datablock type="SNP">
+							CCACATCGTAGACGTTCGTACCGCTTCCTCTCAGTACCACTTAGCAACGTAGACTCTATCACTCA
+						</datablock>
+					</sample>
+					<sample name="00_0038   ">
+						<datablock type="SNP">
+							CCACATCGTAGACGTTCGTACCGCTTCCTCTCAGTACCACTTAGCAACGTAGACTCTATCACTCA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="GG-1252698729">
+<phase type="unknown"> 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 </phase>
+					<sample name="00_0023   ">
+						<datablock type="SNP">
+							CCACATCGTAGACGTTCGTACCGCTTCCTATCGGTACCACTTAGCAACGTAGACCCTATATCTCA
+						</datablock>
+					</sample>
+					<sample name="00_0030   ">
+						<datablock type="SNP">
+							CCACATCGTAGACGTTCGTACCGCTTCTTATCGGTACCACTTAGCAACGTAGACCCTATATCTCA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="GG-173374346">
+<phase type="unknown"> 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 </phase>
+					<sample name="00_0031   ">
+						<datablock type="SNP">
+							CCACATCGTAGACGTTCGTACCGCTTCTTATCGGTACCACTTAGCAACGTAGACCCTATATCTCA
+						</datablock>
+					</sample>
+					<sample name="00_0002   ">
+						<datablock type="SNP">
+							CCACATCGTAGACGTTCGTACCGCTTCTTATCGGTACCACTTAGCAACGTAGACCCTATATCTCA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="CA-1985314239">
+<phase type="unknown"> 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 </phase>
+					<sample name="00_0018   ">
+						<datablock type="SNP">
+							CCACATCGTAGACGTTCGTACCGCTTCTTATCGGTACCACTTAGCAACGTAGACCCTATATCTCA
+						</datablock>
+					</sample>
+					<sample name="00_0014   ">
+						<datablock type="SNP">
+							CCACATCGTAGACGTTCGTACCGCTTCTTATCGGTACCACTTAGCAACGTAGACCCTATCTCTCA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="GC-780785816">
+<phase type="unknown"> 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 </phase>
+					<sample name="00_0005   ">
+						<datablock type="SNP">
+							CCACATCGTAGACGTTCGTACCGCTTCTTATCGGTACCACTTAGCAACGTAGACCCTATCTCTCA
+						</datablock>
+					</sample>
+					<sample name="00_0036   ">
+						<datablock type="SNP">
+							CCACATCGTAGACGTTCGTACCGCTTCTTATCGGTACCACTTAGCAACGTAGACCCTATCTCTCA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="GG1351601435">
+<phase type="unknown"> 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 </phase>
+					<sample name="00_0010   ">
+						<datablock type="SNP">
+							CCACATCGTAGACGTTCGTACCGCTTCTTATCGGTACCACTTAGCAACGTAGACCCTATCTCCCA
+						</datablock>
+					</sample>
+					<sample name="00_0001   ">
+						<datablock type="SNP">
+							CCACATCGTTGACATTCGCACTGGTTTTTATCGGTACCACTTAGCAACGTAGACCCTATCTCTCA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="CA-402572214">
+<phase type="unknown"> 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 </phase>
+					<sample name="00_0011   ">
+						<datablock type="SNP">
+							CCACATCGTTGACATTCGCACTGGTTTTTATCGGTACCACTTAGCAACGTAGACCCTATCTCTCA
+						</datablock>
+					</sample>
+					<sample name="00_0004   ">
+						<datablock type="SNP">
+							CCACATCGTTGACATTCGCACTGGTTTTTATCGGTACCACTTAGCAACGTAGACCCTATCTCTCA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="AC1355139237">
+<phase type="unknown"> 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 </phase>
+					<sample name="00_0033   ">
+						<datablock type="SNP">
+							CCACATCGTTGACATTCGCACTGGTTTTTATCGGTACCACTTAGCAACGTAGACCCTATCTCTCA
+						</datablock>
+					</sample>
+					<sample name="00_0029   ">
+						<datablock type="SNP">
+							CCACATCGTTGACATTCGCACTGGTTTTTATCGGTACCACTTAGCAACGTAGACCCTATCTCTCA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="GA865732508">
+<phase type="unknown"> 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 </phase>
+					<sample name="00_0006   ">
+						<datablock type="SNP">
+							CCACATCGTTGACATTCGCACTGGTTTTTATCGGTACCACTTAGCAACGTAGACCCTATCTCTCA
+						</datablock>
+					</sample>
+					<sample name="00_0015   ">
+						<datablock type="SNP">
+							CCACATCGTTGACATTCGCACTGGTTTTTATCGGTACCACTTAGCAACGTAGACCCTATCTCTCA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="GA1359434591">
+<phase type="unknown"> 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 </phase>
+					<sample name="00_0028   ">
+						<datablock type="SNP">
+							CCACATCGTTGACATTCGCACTGGTTTTCATTATTACCACATAGTGTCTCAGGTTCATTCTCTCA
+						</datablock>
+					</sample>
+					<sample name="00_0035   ">
+						<datablock type="SNP">
+							CCACATCGTTGACATTCGCACTGGTTTTCATTATTACCACATAGTGTCTCAGGTTCATTCTCTCA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="AT-1640691234">
+<phase type="unknown"> 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 </phase>
+					<sample name="00_0007   ">
+						<datablock type="SNP">
+							CCGCATCGTTGACATTCGCACTGGTTTTCATTAGCGCCATATAGTGTATCAAGTTCATTCTCTCA
+						</datablock>
+					</sample>
+					<sample name="00_0026   ">
+						<datablock type="SNP">
+							CCACATCGTTGACACTCGCACTGGTTCTCATTAGCGCCATATAGTGTATCAAGTTCATTCTCTCA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="AG774387657">
+<phase type="unknown"> 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 </phase>
+					<sample name="00_0019   ">
+						<datablock type="SNP">
+							CCACATCGTTGACACTCGCACTGGTTCTCATTAGCGCCATATAGTGTATCAAGTTCATTCTCTCA
+						</datablock>
+					</sample>
+					<sample name="00_0020   ">
+						<datablock type="SNP">
+							CCACATCGTTGACACTCGCACTGGTTCTCATTAGCGCCATATAGTGTATCAAGTTCATTCTCTCA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="TA864034704">
+<phase type="unknown"> 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 </phase>
+					<sample name="00_0021   ">
+						<datablock type="SNP">
+							CCACATCGTTGACACTCGCACTGGTTCTCATTAGCGCCATATAGTGTATCAAGTTCATTCTCTCA
+						</datablock>
+					</sample>
+					<sample name="00_0016   ">
+						<datablock type="SNP">
+							CCACATCGTTGACACTCGCACTGGTTCTCATTAGCGCCATATGGTGTATCAAGTTCATTCTCTCA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="GT-545862557">
+<phase type="unknown"> 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 </phase>
+					<sample name="00_0017   ">
+						<datablock type="SNP">
+							CCACATCGTTGACATTCGCACTGGTTCTCATTAGCGGCATATGGTGTATCAAGTTCATTCTCTCA
+						</datablock>
+					</sample>
+					<sample name="00_0039   ">
+						<datablock type="SNP">
+							CCACATTGTTGACATTCGCACTGGTTCTCATTAGCGCCATATAATGTATCAAGTTTATTCTCTCA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="GT502571314">
+<phase type="unknown"> 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 </phase>
+					<sample name="00_0008   ">
+						<datablock type="SNP">
+							CCACATCGTTGACATTCGCACTGGTTCTCATTAGCGCCATATAGTGTATCAAGTTCATTCTTTCG
+						</datablock>
+					</sample>
+					<sample name="00_0012   ">
+						<datablock type="SNP">
+							CCACATCGTTGACATTCGCACTGGTTCTCATTAGCGCTATATAGTGTATCAAGTTCATTCTCTCG
+						</datablock>
+					</sample>
+				</individual>
+			</population>
+		</region>
+	</data>
+</lamarc>
+
diff --git a/doc/testfiles/v2.0.infiles/infile.coalregrep b/doc/testfiles/v2.0.infiles/infile.coalregrep
new file mode 100644
index 0000000..2827a4c
--- /dev/null
+++ b/doc/testfiles/v2.0.infiles/infile.coalregrep
@@ -0,0 +1,819 @@
+<lamarc>
+<!--generated with:                                                           -->
+<!--                                                                          -->
+<!--jrectree (rand#???)/rectreedna/snp, dna&snp, coal only                    -->
+<!--theta = 0.01                                                              -->
+<!--1 population with 20 individuals/population, 1000 markers/sites           -->
+<!--                                                                          -->
+<!--                                                                          -->
+<!--analyzed as:                                                              -->
+<!--                                                                          -->
+<!--dna/snp (1000 markers, spacing as apropiate)                              -->
+<!--default data model for datatype                                           -->
+<!--starting params (theta = watterson)                                       -->
+<!--5 initial (1000 samples, 20 interval, 1000 discard)                       -->
+<!--2 final   (2500 samples, 20 interval, 1000 discard)                       -->
+<!--no heating                                                                -->
+<!--percentile profiles                                                       -->
+<!--                                                                          -->
+<!-- Created from the LamarcDS DataStore -->
+	<forces>
+		<coalescence>
+			<start-values> 0.01 </start-values>
+			<method> WAT </method>
+			<max-events> 1000 </max-events>
+                        <profiles> percentile </profiles>
+		</coalescence>
+	</forces>
+	<!-- -->
+	<chains>
+		<replicates> 3 </replicates>
+		<heating>
+			<temperatures> 1 </temperatures>
+			<swap-interval> 1 </swap-interval>
+		</heating>
+		<strategy>
+			<resimulating> 1 </resimulating>
+		</strategy>
+		<initial>
+			<number> 5 </number>
+			<samples> 1000 </samples>
+			<discard> 1000 </discard>
+			<interval> 20 </interval>
+		</initial>
+		<final>
+			<number> 2 </number>
+			<samples> 2500 </samples>
+			<discard> 1000 </discard>
+			<interval> 20 </interval>
+		</final>
+	</chains>
+	<!-- -->
+	<format>
+		<verbosity> verbose </verbosity>
+		<progress-reports> verbose </progress-reports>
+		<echo> true </echo>
+		<plotting>
+			<profile> false </profile>
+			<posterior> false </posterior>
+		</plotting>
+		<seed> 1005 </seed>
+		<parameter-file> paramfile </parameter-file>
+		<results-file> outfile </results-file>
+		<use-in-summary> false </use-in-summary>
+		<in-summary-file> insumfile </in-summary-file>
+		<use-out-summary> true  </use-out-summary>
+		<out-summary-file> outsumfile </out-summary-file>
+	</format>
+	<!-- -->
+	<data>
+		<region name="DR-1">
+			<population name="Berkeley">
+				<individual name="00_0003   ">
+					<sample name="00_0003   ">
+						<datablock type="DNA">
+							TCGGGCATCCATATTTCCCCACTGGAGCTAGAATTGACCCCGAAGTCAAACGATCTATATGACGTCCCTAATGACGTTGGCGCCATTGGGCATTCGTTCTGGACTATCGCGGACATAAAAGCAGCTCGGGTGGCATGATCATTGGTGCGAGCCCGCGTGAGCAAGCTGGTAAGAACTGAAGCGAGGCGAGTAGTTCAAGAGTTCGATCTCTTTCTCTTAACATCCATAGCACTGGGCGTCCCCCCTTGCCACTTACGACTTAAAACTTATCAGCTTATGTTTCGATTCCCCGCATTGTCCACATTCAGACGAAACCAATCCATTGCGAAATGTACTCTCAACTATGATGGTGTCTGGGGCCCTAAGCCGCGACATGAAGTAGATTGGGCCATCCCCATTTTACAAGGGTAGAGTAAAATGTTGAAGACGCTGTGGGCCAAGGGGGGCCTTAGAATTCATCAGGATAGATCCGTGTGGTAAATGGT [...]
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0009   ">
+					<sample name="00_0009   ">
+						<datablock type="DNA">
+							TCGGGCATCCATATTTCCCCACTGGAGCTAGAATTGACCCCGAAGTCAAACGATCTATATGACGTCCCTAATGACGTTGGCGCCATTGGGCATTCGTTCTGGACTATCGCGGACATAAAAGCAGCTCGGGTGGCATGATCATTGGTGCGAGCCCGCGTGAGCAAGCTGGTAAGAACTGAAGCGAGGCGAGTAGTTCAAGAGTTCGATCTCTTTCTCTTAACATCCATAGCACTGGGCGTCCCCCCTTGCCACTTACGACTTAAAACTTATCAGCTTATGTTTCGATTCCCCGCATTGTCCACATTCAGACGAAACCAATCCATTGCGAAATGTACTCTCAACTATGATGGTGTCTGGGGCCCTAAGCCGCGACATGAAGTAGATTGGGCCATCCCCATTTTACAAGGGTAGAGTAAAATGTTGAAGACGCTGTGGGCCAAGGGGGGCCTTAGAATTCATCAGGATAGATCCGTGTGGTAAATGGT [...]
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0002   ">
+					<sample name="00_0002   ">
+						<datablock type="DNA">
+							TCGGGCATCCATATTTCCCCACTGGAGCTAGAATTGACCCCGAAGTCAAACGATCTATGTGACGTCACTAATGACGTTGGCGCCATTGGGCATTCGTTCTGGACTATCGCGGACATAAACGCAGCTCGGGTGGCATGATCATTGGTGCGAGCCCGCGTGAGCAAGCTGGTAAGAACTGAAGCGAGGCGAGTAGTTCAAGAGTTCGATCTCTTTCTCTTAACATCCATAGCACTGGGCGTCCCCCCTTGCCACTTACGACTTAAAACTTGTCAGCTTATGTTTCGATTCCCCGCATTGTCCACATTGAAACGAAACCAATCCATTGCGAAATGTACTCTCAACTATGAGGGCGTCTGGGGCCCTGAGCCGCGACATGAAGTAGATTGGGCCATCCCCAGTTTACAAGGGTAGAGTAAAATGTTGAAGACGCTGTGGGCCAAGGGGGGCCTTAGAATTCATCAGGATAGATCCGTGTGGTAAATGGT [...]
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0011   ">
+					<sample name="00_0011   ">
+						<datablock type="DNA">
+							TCGGGCATCCATATTTCCCCACTGGAGCTAGAATTGACCCCGAAGTCAAACGATCTATGTGACGTCACTAATGACGTTGGCGCCATTGGGCATTCGTTCTGGACTATCGCGGACATAAACGCAGCTCGGGTGGCATGATCATTGGTGCGAGCCCGCGTGAGCAAGCTGGTAAGAACTGAAGCGAGGCGAGTAGTTCAAGAGTTCGATCTCTTTCTCTTAACATCCATAGCACTGGGCGTCCCCCCTTGCCACTTACGACTTAAAACTTATCAGCTTATGTTTCGATTCCCCGCATTGTCCACATTCAAACGAAACCAATCCATTGCGAAATGTACTCTCAACTATGAGGGCGTCTGGGGCCCTGAGCCGCGACATGAAGTAGATTGGGCCATCCCCAGTTTACAAGGGTAGAGTAAAATGTTGAAGACGCTGTGGGCCAAGGGGGGCCTTAGAATTCATCAGGATAGATCCGTGTGGTAAATGGT [...]
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0017   ">
+					<sample name="00_0017   ">
+						<datablock type="DNA">
+							TCGGGCATCCATATTTCCCCACTGGAGCTAGAATTGACCCCGAAGTCAAACGATCTATGTGACGTCACTAATGACGTTGGCGCCATTGGGCATTCGTTCTGGACTATCGCGGACATAAACGCAGCTCGGGTGGCATGATCATTGGTGCGAGCCCGCGTGAGCAAGCTGGTAAGAACTGAAGCGAGGCGAGTAGTTCAAGAGTTCGATCTCTTTCTCTTAACATCCATAGCACTGGGCGTCCCCCCTTGCCACTTACGACTTAAAACTTATCAGCTTATGTTTCGATTCCCCGCATTGTCCACATTCAAACGAAACCAATCCATTGCGAAATGTACTCTCAACTATGAGGGCGTCTGGGGCCCTGAGCCGCGACATGAAGTAGATTGGGCCATCCCCAGTTTACAAGGGTAGAGTAAAATGTTGAAGACGCTGTGGGCCAAGGGGGGCCTTAGAATTCATCAGGATAGATCCGTGTGGTAAATGGT [...]
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0006   ">
+					<sample name="00_0006   ">
+						<datablock type="DNA">
+							TCGGGCATCCATATTTCCCCACTGGAGCTAGAATTGACCCCGAAGTCAAACGATCTATGTGACGTCACTAATGACGTTGGCGCCATTGGGCATTCGTTCTGGACTATCGCGGACATAAACGCAGCTCGGGTGGCATGATCATTGGTGCGAGCCCGCGTGAGCAAGCTGGTAAGAACTGAAGCGAGGCGAGTAGTTCAAGAGTTCGATCTCTTTCTCTTAACATCCATAGCACTGGGCGTCCCCCCTTGCCACTTACGACTTAAAACTTATCAGCTTATGTTTCGATTCCCCGCATTGTCCACATTCAAACGAAACCAATCCATTGCGAAATGTACTCTCAACTATGAGGGCGTCTGGGGCCCTGAGCCGCGACATGAAGTAGATTGGGCCATCCCCATTTTACAAGGGTAGAGTAAAATGTTGAAGACGCTGTGGGCCAAGGGGGGCCTTAGAATTCATCAGGATAGATCCGTGTGGTAAATGGT [...]
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0013   ">
+					<sample name="00_0013   ">
+						<datablock type="DNA">
+							TCGGGCATCCATATTTCCCCACTGGAGCTAGAATTGACCCCGAAGTCAAACGATCTATGTGACGTCACTAATGACGTTGGCGCCATTGGGCATTCGTTCTGGACTATCGCGGACATAAACGCAGCTCGGGTGGCATGATCATTGGTGCGAGCCCGCGTGAGCAAGCTGGTAAGAACTGAAGCGAGGCGAGTAGTTCAAGAGTTCGATCTCTTTCTCTTAACATCCATAGCACTGGGCGTCCCCCCTTGCCACTTACGACTTAAAACTTATCAGCTTATGTTTCGATTCCCCGCATTGTCCACATTCAAACGAAACCAATCCATTGCGAAATGTACTCTCAACTATGAGGGCGTCTGGGGCCCTGAGCCGCGACATGAAGTAGATTGGGCCATCCCCATTTTACAAGGGTAGAGTAAAATGTTGAAGACGCTGTGGGCCAAGGGGGGCCTTAGAATTCATCAGGATAGATCCGTGTGGTAAATGGT [...]
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0001   ">
+					<sample name="00_0001   ">
+						<datablock type="DNA">
+							TCGGGCATCCATATTTCCCCACTGGAGCTAGAATTGACCCCGAAGTCAAACGATCTATGTGACGTCACTAATGACGTTGGCGCCATTGGGCATTCGTTCTGGACTATCGCGGACATAAACGCAGCTCGGGTGGCATGATCATTGGTGCGAGCCCGCGTGAGCAAGCTGGTAAGAACTGAAGCGAGGCGAGTAGTTCAAGAGTTCGATCTCTTTCTCTTAACATCCATAGCACTGGGCGTCCCCCCTTGCCACTTACGACTTAAAACTTATCAGCTTATGTTTCGATTCCCCGCATTGTCCACATTCAAACGAAACCAATCCATTGCGAAATGTACTCTCAACTATGAGGGCGTCTGGGGCCCTGAGCCGCGACATGAAGTAGATTGGGCCATCCCCATTTTACAAGGGTAGAGTAAAATGTTGAAGACGCTGTGGGCCAAGGGGGGCCTTAGAATTCATCAGGATAGATCCGTGTGGTAAATGGT [...]
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0000   ">
+					<sample name="00_0000   ">
+						<datablock type="DNA">
+							TCGGGCATCCATATTTCCCCACTGGAGCTAGAATTGACCCCGAAGTCAAACGATCTATGTGACGTCACTAATGACGTTGGCGCCATTGGGCATTCGTTCTGGACTATCGCGGACATAAACGCAGCTCGGGTGGCATGATCATTGGTGCGAGCCCGCGTGAGCAAGCTGGTAAGAACTGAAGCGAGGCGAGTAGTTCAAGAGTTCGATCTCTTTCTCTTAACATCCATAGCACTGGGCGTCCCCCCTTGCCACTTACGACTTAAAACTTATCAGCTTATGTTTCGATTCCCCGCATTGTCCACATTCAAACGAAACCAATCCATTGCGAAATGTACTCTCAACTATGAGGGCGTCTGGGGCCCTGAGCCGCGACATGAAGTAGATTGGGCCATCCCCATTTTACAAGGGTAGAGTAAAATGTTGAAGACGCTGTGGGCCAAGGGGGGCCTTAGAATTCATCAGGATAGATCCGTGTGGTAAATGGT [...]
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0016   ">
+					<sample name="00_0016   ">
+						<datablock type="DNA">
+							TCGGGCATCCATATTTCCCCACTGGAGCTAGAATTGACCCCGAAGTCAAACGATCTATGTGACGTCACTAATGACGTTGGCGCCATTGGGCATTCGTTCTGGACTATCGCGGACATAAACGCAGCTCGGGTGGCATGATCATTGGTGCGAGCCCGCGTGAGCAAGCTGGTAAGAACTGAAGCGAGGCGAGTAGTTCAAGAGTTCGATCTCTTTCTCTTAACATCCATAGCACTGGGCGTCCCCCCTTGCCACTTACGACTTAAAACTTATCAGCTTATGTTTCGATTCCCCGCATTGTCCACATTCAAACGAAACCAATCCATTGCGAAATGTACTCTCAACTATGAGGGCGTCTGGGGCCCTGAGCCGCGACATGAAGTAGATTGGGCCATCCCCATTTTACAAGGGTAGAGTAAAATGTTGAAGACGCTGTGGGCCAAGGGGGGCCTTAGAATTCATCAGGATAGATCCGTGTGGTAAATGGT [...]
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0007   ">
+					<sample name="00_0007   ">
+						<datablock type="DNA">
+							TCGGGCATCCATATTTCCCCACTGGAGCTAGAATTGACCCCGAAGTCAAACGATCTATGTGACGTCACTAATGACGTTGGCGCCATTGGGCATTCGTTCTGGACTATCGCGGACATAAACGCAGCTCGGGTGGCATGATCATTGGTGCGAGCCCGCGTGAGCAAGCTGGTAAGAACTGAAGCGAGGCGAGTAGTTCAAGAGTTCGATCTCTTTCTCTTAACATCCATAGCACTGGGCGTCCCCCCTTGCCACTTACGACTTAAAACTTATCAGCTTATGTTTCGATTCCCCGCATTGTCCACATTCAAACGAAACCAATCCATTGCGAAATGTACTCTCAACTATGAGGGCGTCTGGGGCCCTGAGCCGCGACATGAAGTAGATTGGGCCATCCCCATTTTACAAGGGTAGAGTAAAATGTTGAAGACGCTGTGGGCCAAGGGGGGCCTTAGAATTCATCAGGATAGATCCGTGTGGTAAATGGT [...]
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0019   ">
+					<sample name="00_0019   ">
+						<datablock type="DNA">
+							TCGGGCATCCATATTTCCCCACTGGAGCTAGAATTGACCCCGAAGTCAAACGATCTATGTGACGTCACTAATGACGTTGGCGCCATTGGGCATTCGTTCTGGACTATCGCGGACATAAACGCAGCTCGGGTGGCATGATCATTGGTGCGAGCCCGCGTGAGCAAGCTGGTAAGAACTGAAGCGAGGCGAGTAGTTCAAGAGTTCGATCTCTTTCTCTTAACATCCATAGCACTGGGCGTCCCCCCTTGCCACTTACGACTTAAAACTTATCAGCTTATGTTTCGATTCCCCGCATTGTCCACATTCAAACGAAACCAATCCATTGCGAAATGTACTCTCAACTATGAGGGCGTCTGGGGCCCTGAGCCGCGACATGAAGTAGATTGGGCCATCCCCATTTTACAAGGGTAGAGTAAAATGTTGAAGACGCTGTGGGCCAAGGGGGGCCTTAGAATTCATCAGGATAGATCCGTGTGGTAAATGGT [...]
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0004   ">
+					<sample name="00_0004   ">
+						<datablock type="DNA">
+							TCGGGCATCCATATTTCCCCACTGGAGCTAGAATTGACCCCGAAGTCAAACGATCTATGTGACGTCACTAATGACGTTGGCGCCATTGGGCATTCGTTCTGGACTATCGCGGACATAAACGCAGCTCGGGTGGCATGATCATTGGTGCGAGCCCGCGTGAGCAAGCTGGTAAGAACTGAAGCGAGGCGAGTAGTTCAAGAGTTCGATCTCTTTCTCTTAACATCCATAGCACTGGGCGTCCCCCCTTGCCACTTACGACTTAAAACTTATCAGCTTATGTTTCGATTCCCCGCATTGTCCACATTCAAACGAAACCAATCCATTGCGAAATGTACTCTCAACTATGAGGGCGTCTGGGGCCCTGAGCCGCGACGTGAAGTAGATTGGGCCATCCCCATTTTACAAGGGTAGAGTAAAATGTTGAAGACGCTGTGGGCCAAGGGGGGCCTTAGAATTCATCAGGATAGATCCGTGTGGTGAATGGT [...]
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0005   ">
+					<sample name="00_0005   ">
+						<datablock type="DNA">
+							TCGGGCATCCATATTTCCCCACTGGAGCTAGAATTGACCCCGAAGTCAAACGATCTATGTGACGTCACTAATGACGTTGGCGCCATTGGGCATTCGTTCTGGACTATCGCGGACATAAACGCAGCTCGGGTGGCATGATCATTGGTGCGAGCCCGCGTGAGCAAGCTGGTAAGAACTGAAGCGAGGCGAGTAGTTCAAGAGTTCGATCTCTTTCTCTTAACATCCATAGCACTGGGCGTCCCCCCTTGCCACTTACGACTTAAAACTTATCAGCTTATGTTTCGATTCCCCGCATTGTCCACATTCAAACGAAACCAATCCATTGCGAAATGTACTCTCAACTATGAGGGCGTCTGGGGCCCTGAGCCGCGACGTGAAGTAGATTGGGCCATCCCCATTTTACAAGGGTAGAGTAAAATGTTGAAGACGCTGTGGGCCAAGGGGGGCCTTAGAATTCATCAGGATAGATCCGTGTGGTGAATGGT [...]
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0015   ">
+					<sample name="00_0015   ">
+						<datablock type="DNA">
+							TCGGGCATCCATATTTCCCCACTGGAGCTAGAATTGACCCCGAAGTCAAACGATCTATGTGACGTCACTAATGACGTTGGCGCCATTGGGCATTCGTTCTGGACTATCGCGGACATAAACGCAGCTCGGGTGGCATGATCATTGGTGCGAGCCCGCGTGAGCAAGCTGGTAAGAACTGAAGCGAGGCGAGTAGTTCAAGAGTTCGATCTCTTTCTCTTAACATCCATAGCACTGGGCGTCCCCCCTTGCCACTTACGACTTAAAACTTATCAGCTTATGTTTCGATTCCCCGCATTGTCCACATTCAAACGAAACCAATCCATTGCGAAATGTACTCTCAACTATGAGGGCGTCTGGGGCCCTGAGCCGCGACGTGAAGTAGATTGGGCCATCCCCATTTTACAAGGGTAGAGTAAAATGTTGAAGACGCTGTGGGCCAAGGGGGGCCTTAGAATTCATCAGGATAGATCCGTGTGGTGAATGGT [...]
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0010   ">
+					<sample name="00_0010   ">
+						<datablock type="DNA">
+							TCGGGCATCCATATTTCCCCACTGGAGCTAGAATTGACCCCGAAGTCAAACGATCTATGTGACGTCACTAATGACGTTGGCGCCATTGGGCATTCGTTCTGGACTATCGCGGACATAAACGCAGCTCGGGTGGCATGATCATTGGTGCGAGCCCGCGTGAGCAAGCTGGTAAGAACTGAAGCGAGGCGAGTAGTTCAAGAGTTCGATCTCTTTCTCTTAACATCCATAGCACTGGGCGTCCCCCCTTGCCACTTACGACTTAAAACTTATCAGCTTATGTTTCGATTCCCCGCATTGTCCACATTCAAACGAAACCAATCCATTGCGAAATGTACTCTCAACTATGAGGGCGTCTGGGGCCCTGAGCCGCGACATGAAGTAGATTGGGCCATCCCCATTTTACAAGGGTAGAGTAAAATGTTGAAGACGCTGTGGGCCAAGGGGGGCCTTAGAATTCATCAGGATAGATCCGTGTGGTAAATGGT [...]
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0018   ">
+					<sample name="00_0018   ">
+						<datablock type="DNA">
+							TCGGGCATCCATATTTCCCCACTGGAGCTAGAATTGACCCCGAAGTCAAACGATCTATGTGACGTCACTAATGACGTTGGCGCCATTGGGCATTCGTTCTGGACTATCGCGGACATAAACGCAGCTCGGGTGGCATGATCATTGGTGCGAGCCCGCGTGAGCAAGCTGGTAAGAACTGAAGCGAGGCGAGTAGTTCAAGAGTTCGATCTCTTTCTCTTAACATCCATAGCACTGGGCGTCCCCCCTTGCCACTTACGACTTAAAACTTATCAGCTTATGTTTCGATTCCCCGCATTGTCCACATTCAAACGAAACCAATCCATTGCGAAATGTACTCTCAACTATGAGGGCGTCTGGGGCCCTGAGCCGCGACATGAAGTAGATTGGGCCATCCCCATTTTACAAGGGTAGAGTAAAATGTTGAAGACGCTGTGGGCCAAGGGGGGCCTTAGAATTCATCAGGATAGATCCGTGTGGTAAATGGT [...]
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0008   ">
+					<sample name="00_0008   ">
+						<datablock type="DNA">
+							TCGGGCATCCATATTTCCCCACTGGAGCTAGAGTTGACCCCGAAGTCAAACGATCTATGTGACGTCACTAATGACGTTGGCGCCATTGGGCATTCGTTCTGGACTATCGCGGACATAAACGCAGCTCGGGTGGCATGATCATTGGTGCGAGCCCGCGTGAGCAAGCTGGTAAGAACTGAAGCGAGGCGAGTAGTTCAAGAGTTCGATCTCTTTCTCTTAACATCCATAGCACTGGGCGTCCCCCCTTGCCACTTACGACTTAAAACTTATCAGCTTATGTTTCGATTCCCCGCATTGACCACATTCAAACGAAACCAATCCATTGCGAAATGTACTCTCAACTATGAGGGCGTCTGGGGCCCTGAGCCGCGACATGAAGTAGATTGGGCCATCCCCATTTTACAAGGGTAGAGTAAAATGTTGAAGACGCTGTGGGCCAAGGGGGGCCTTAGAATTCATCAGGATAGATCCGTGTGGTAAATGGT [...]
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0012   ">
+					<sample name="00_0012   ">
+						<datablock type="DNA">
+							TCGGGCATCCATATTTCCCCACTGGAGCTAGAGTTGACCCCGAAGTCAAACGATCTATGTGACGTCACTAATGACGTTGGCGCCATTGGGCATTCGTTCTGGACTATCGCGGACATAAACGCAGCTCGGGTGGCATGATCATTGGTGCGAGCCCGCGTGAGCAAGCTGGTAAGAACTGAAGCGAGGCGAGTAGTTCAAGAGTTCGATCTCTTTCTCTTAACATCCATAGCACTGGGCGTCCCCCCTTGCCACTTACGACTTAAAACTTATCAGCTTATGTTTCGATTCCCCGCATTGTCCACATTCAAACGAAACCAATCCATTGCGAAATGTACTCTCAACTATGAGGGCGTCTGGGGCCCTGAGCCGCGACATGAAGTAGATTGGGCCATCCCCATTTTACAAGGGTAGAGTAAAATGTTGAAGACGCTGTGGGCCAAGGGGGGCCTTAGAATTCATCAGGATAGATCCGTGTGGTAAATGGT [...]
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0014   ">
+					<sample name="00_0014   ">
+						<datablock type="DNA">
+							TCGGGCATCCATATTTCCCCACTGGAGCTAGAGTTGACCCCGAAGTCAAACGATCTATGTGACGTCACTAATGACGTTGGCGCCATTGGGCATTCGTTCTGGACTATCGCGGACATAAACGCAGCTCGGGTGGCATGATCATTGGTGCGAGCCCGCGTGAGCAAGCTGGTAAGAACTGAAGCGAGGCGAGTAGTTCAAGAGTTCGATCTCTTTCTCTTAACATCCATAGCACTGGGCGTCCCCCCTTGCCACTTACGACTTAAAACTTATCAGCTTATGTTTCGATTCCCCGCATTGTCCACATTCAAACGAAACCAATCCATTGCGAAATGTACTCTCAACTATGAGGGCGTCTGGGGCCCTGAGCCGCGACATGAAGTAGATTGGGCCATCCCCATTTTACAAGGGTAGAGTAAAATGTTGAAGACGCTGTGGGCCAAGGGGGGCCTTAGAATTCATCAGGATAGATCCGTGTGGTAAATGGT [...]
+						</datablock>
+					</sample>
+				</individual>
+			</population>
+		</region>
+		<region name="DR-2">
+<spacing>
+				<block>
+					<map_position> 0 </map_position>
+					<length> 1000 </length>
+					<locations> 32 58 267 297 333 350 355 373 412 466 478 512 526 548 625 629 630 647 692 739 749 763 826 855 880 918 981 986 
+					</locations>
+					<offset> 0 </offset>
+				</block>
+			</spacing>
+			<population name="Berkeley">
+				<individual name="00_0011   ">
+					<sample name="00_0011   ">
+						<datablock type="SNP">
+							AATTTCTAACGACCTGCGCCTACATTCA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0013   ">
+					<sample name="00_0013   ">
+						<datablock type="SNP">
+							ATTTTTTAACGACCTGTGCCTGCATCCA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0009   ">
+					<sample name="00_0009   ">
+						<datablock type="SNP">
+							ATTTTCTAACGACCTGTGCCTACATTCA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0001   ">
+					<sample name="00_0001   ">
+						<datablock type="SNP">
+							ATTTTCTAACGACCTGTGCCTACATTCA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0010   ">
+					<sample name="00_0010   ">
+						<datablock type="SNP">
+							ATTTTCTAACGACCTGTGCCTACATTCA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0014   ">
+					<sample name="00_0014   ">
+						<datablock type="SNP">
+							ATTTTCTAACGACCTGTGCCTACATTCA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0015   ">
+					<sample name="00_0015   ">
+						<datablock type="SNP">
+							AATTTCTATCGACCCGTGTCTACATTTA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0012   ">
+					<sample name="00_0012   ">
+						<datablock type="SNP">
+							AATTTCTAACGACCCGTGTCTACATTTA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0018   ">
+					<sample name="00_0018   ">
+						<datablock type="SNP">
+							AATTTCTAACGATCCGTGTCTACATTTA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0019   ">
+					<sample name="00_0019   ">
+						<datablock type="SNP">
+							AATTTCTAACGACCCGTGTCTACATTTA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0007   ">
+					<sample name="00_0007   ">
+						<datablock type="SNP">
+							AATTTCTAACGACCCGTGTCCATATTTA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0008   ">
+					<sample name="00_0008   ">
+						<datablock type="SNP">
+							AATTTCTAACGACCCGTGTCCACATTTA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0016   ">
+					<sample name="00_0016   ">
+						<datablock type="SNP">
+							AACTTCAAACGACCCCTCTCTACATTTA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0003   ">
+					<sample name="00_0003   ">
+						<datablock type="SNP">
+							AACTTCAGACAACCCCTCTCTACATTTT
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0004   ">
+					<sample name="00_0004   ">
+						<datablock type="SNP">
+							AACTTCAAACGACCCCTCTCTACATTTT
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0000   ">
+					<sample name="00_0000   ">
+						<datablock type="SNP">
+							AATTCCAAACGACCCATGTTTACATTTA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0017   ">
+					<sample name="00_0017   ">
+						<datablock type="SNP">
+							AATTCCAAACGACCCATGTTTACATTTA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0006   ">
+					<sample name="00_0006   ">
+						<datablock type="SNP">
+							GATTTCAAATGGCTCCTGTCTACGATTA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0002   ">
+					<sample name="00_0002   ">
+						<datablock type="SNP">
+							GATCTCAAATGGCCCCTGTCTACGATTA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0005   ">
+					<sample name="00_0005   ">
+						<datablock type="SNP">
+							GATCTCAAATGGCCCCTGTCTACGATTA
+						</datablock>
+					</sample>
+				</individual>
+			</population>
+		</region>
+		<region name="DR-3">
+			<population name="Berkeley">
+				<individual name="00_0006   ">
+					<sample name="00_0006   ">
+						<datablock type="DNA">
+							TCGGGCCTACGCTGTAGCATAGTGTTACCTCAAATGCCCTAGGGCCTGCTCAGTTAGGTGAGCGTAGTCTGATGTCATCCGTCAGATCCCTCACAGTCTGCTAGGTCAGCTGCTGCCCGATTACCCACTTACATAAGCAGATGAAGACTTAATATTCTTTCGCAGAGCACAAAAGATCACGTTTGATACAATCTCAAATACCCGCTTCCTGTATGATATCAGTACATCTCCATAAGACGTACCAACCGAGGCTTCCAACAGCTCTCCATTTAACTGGTCGTTTGCTAGCGGAGTCGTTCGCTGCTTGCATAATGAATTAGCGTTCTTGAATGGTTCCCTGTCCATCTTGCGGTTCGACAGGGTGACTGGTGCCTTTCCCCAAGGAAGACGTACGTGGTAAGAGGGTTCGGCGACGAGGATCGCTCTCAAGCATCCTATGGCGCCCTAGTGAAGCTGAGTTTGATGCCAGCTAACACAGGGAAGCC [...]
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0013   ">
+					<sample name="00_0013   ">
+						<datablock type="DNA">
+							TCGGGCCTACGCTGTAGCATAGTGTTACCTCAAATGCCCTAGGGCCTGCTCAGTTAGGTGAGCGTAGTCTGATGTCATCCGTCAGATCCCTCACAGTCTGCTAGGTCAGCTGCTGCCCGATTACCCACTTACATAAGCAGATGAAGACTTAATATTCTTTCGCAGAGCACAAAAGATCACGTTTGATACAATCTCAAATACCCGCTTCCTGTATGATATCAGTACATCTCCATAAGACGTACCAACCGAGGCTTCCAACAGCTCTCCATTTAACTGGTCGTTTGCTAGCGGAGTCGTTCGCTGCTTGCATAATGAATTAGCGTTCTTGAATGGTTCCCTGTCCATCTTGCGGTTCGACAGGGTGACTGGTGCCTTTCCCCAAGGAAGACGTACGTGGTAAGAGGGTTCGGCGACGAGGATCGCTCTCAAGCATCCTATGGCGCCCTAGTGAAGCTGAGTTTGATGCCAGCTAACACAGGGAAGCC [...]
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0004   ">
+					<sample name="00_0004   ">
+						<datablock type="DNA">
+							TCGGGCCTACGCTGTAGCATAGTGTTACCTCAAATGCCCTAGGGCCTGCTCAGTTAGGTGAGCGTAGTCTGATGTCATCCGTCAGATCCCTCACAGTCTGCTAGGTCAGCTGCTGCCCGATTACCCACTTACATAAGCAGATGAAGACTTAATATTCTTTCGCAGAGCACAAAAGATCACGTTTGATACAATCTCAAATACCCGCTTCCTGTATGATATCAGTACATCTCCATAAGACGTACCAACCGAGGCTTCCAACAGCTCTCCATTTAACTGGTCGTTTGCTAGCGGAGTCGTTCGCTGCTTGCATAATGAATTAGCGTTCTTGAATGGTTCCCTGTCCATCTTGCGGTTCGACAGGGTGACTGGTGCCTTTCCCCAAGGAAGACGTACGTGGTAAGAGGGTTCGGCGACGAGGATCGCTCTCAAGCATCCTATGGCGCCCTAGTGAAGCTGAGTTTGATGCCAGCTAACACAGGGAAGCC [...]
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0016   ">
+					<sample name="00_0016   ">
+						<datablock type="DNA">
+							TCGGGCCTACGCTGTAGCATAGTGTTACCTCAAATGCCCTAGGGCCTGCTCAGTTAGGTGAGCGTAGTCTGATGTCATCCGTCAGATCCCTCACAGTCTGCTAGGTCAGCTGCTGCCCGATTACCCACTTACATAAGCAGATGAAGACTTAATATTCTTTCGCAGAGCACAAAAGATCACGTTTGATACAATCTCAAATACCCGCTTCCTGTATGATATCAGTACATCTCCATAAGACGTACCAACCGAGGCTTCCAACAGCTCTCCATTTAACTGGTCGTTTGCTAGCGGAGTCGTTCGCTGCTTGCATAATGAATTAGCGTTCTTGAATGGTTCCCTGTCCATCTTGCGGTTCGACAGGGTGACTGGTGCCTTTCCCCAAGGAAGACGTACGTGGTAAGAGGGTTCGGCGACGAGGATCGCTCTCAAGCATCCTATGGCGCCCTAGTGAAGCTGAGTTTGATGCCAGCTAACACAGGGAAGCC [...]
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0017   ">
+					<sample name="00_0017   ">
+						<datablock type="DNA">
+							TCGGGCCTACGCTGTAGCATAGTGTTACCTCAAATGCCCTAGGGCCTGCTCAGTTAGGTGAGCGTAGTCTGATGTCATCCGTCAGATCCCTCACAGTCTGCTAGGTCAGCTGCTGCCCGATTACCCACTTACATAAGCAGATGAAGACTTAATATTCTTTCGCAGAGCACAAAAGATCACGTTTGATACAATCTCAAATACCCGCTTCCTGTATGATATCAGTACATCTCCATAAGACGTACCAACCGAGGCTTCCAACAGCTCTCCATTTAACTGGTCGTTTGCTAGCGGAGTCGTTCGCTGCTTGCATAATGAATTAGCGTTCTTGAATGGTTCCCTGTCCATCTTGCGGTTCGACAGGGTGACTGGTGCCTTTCCCCAAGGAAGACGTACGTGGTAAGAGGGTTCGGCGACGAGGATCGCTCTCAAGCATCCTATGGCGCCCTAGTGAAGCTGAGTTTGATGCCAGCTAACACAGGGAAGCC [...]
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0007   ">
+					<sample name="00_0007   ">
+						<datablock type="DNA">
+							TCGGGCCTACGCTGTAGCATAGTGTTACCTCAAATGCCCTAGGGCCTGCTCAGTTAGGTGAGCGTAGTCTGATGTCATCCGTCAGATCCCTCACAGTCTGCTAGGTCAGCTGCTGCCCGATTACCCACTTACATAAGCAGATGAAGACTTAATATTCTTTCGCAGAGCACAAAAGATCACGTTTGATACAATCTCAAATACCCGCTTCCTGTATGATATCAGTACATCTCCATAAGACGTACCAACCGAGGCTTCCAACAGCTCTCCATTTAACTGGTCGTCTGCTAGCGGAGTCGTTCGCTGCTTGCATAATGAATTAGCGTTCTTGAATGGTTCCCTGTCCATCTTGCGGTTCGACAGGGTGACTGGTGCCTTTCCCCAAGGAAGACGTACGTGGTAAGAGGGTTCGGCGACGAGGATCGCTCTCAAGCATCCTATGGCGCCCTAGTGAAGCTGAGTTTGATGCCAGCTAACACAGGGAAGCC [...]
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0002   ">
+					<sample name="00_0002   ">
+						<datablock type="DNA">
+							TCGGGCCTACGCTGTAGCATAGTGTTACCTCAAATGCCCTAGGGCCTGCTCAGTTAGGTGAGCGTAGTCTGATGTCATCCGTCAGATCCCTCACAGTCTGCTAGGTCAGCTGCTGCCCGATTACCCACTTACATAAGCAGATGAAGACTTAATATTCTTTCGCAGAGCACAAAAGATCACGTTTGATACAATCTCAAATACCCGCTTCCTGTATGATATCAGTACATCTCCATAAGACGTACCAACCGAGGCTTCCAACAGCTCTCCATTTAACTGGTCGTCTGCTAGCGGAGTCGTTCGCTGCTTGCATAATGAATTAGCGTTCTTGAATGGTTCCCTGTCCATCTTGCGGTTCGACAGGGTGACTGGTGCCTTTCCCCAAGGAAGACGTACGTGGTAAGAGGGTTCGGCGACGAGGATCGCTCTCAAGCATCCTATGGCGCCCTAGTGAAGCTGAGTTTGATGCCAGCTAACACAGGGAAGCC [...]
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0008   ">
+					<sample name="00_0008   ">
+						<datablock type="DNA">
+							TCGGGCCTACGCTGTAGCATAGTGTTACCTCAAATGCCCTAGGGCCTGCTCAGTTAGGTGAGCGTAGTCTGATGTCATCCGTCAGATCCCTCACAGTCTGCTAGGTCAGCTGCTGCCCGATTACCCACTTACATAAGCAGATGAAGACTTAATATTCTTTCGCAGAGCACAAAAGATCACGTTTGATACAATCTCAAATACCCGCTTCCTGTATGATATCAGTACATCTCCATAAGACGTACCAACCGAGGCTTCCAACAGCTCTCCATTTAACTGGTCGTCTGCTAGCGGAGTCGTTCGCTGCTTGCATAATGAATTAGCGTTCTTGAATGGTTCCCTGTCCATCTTGCGGTTCGACAGGGTGACTGGTGCCTTTCCCCAAGGAAGACGTACGTGGTAAGAGGGTTCGGCGACGAGGATCGCTCTCAAGCATCCTATGGCGCCCTAGTGAAGCTGAGTTTGATGCCAGCTAACACAGAGAAGCC [...]
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0010   ">
+					<sample name="00_0010   ">
+						<datablock type="DNA">
+							TCGGGGCTACGCTGTAGCATCGTGTTACCTCAAATGCCCTAGCGCCTGCTTAGTTAGGTGAGCGTAGTTTAATGTCATCCGTCGGATCACTCAAAGTTTGCTAGGTCAGCTACTGCCCGATTACCCACCTACATAAGCAGATGAAGACTTAATATTGTTTCGCAGAGCATAAAAGATCACGTTTGATACAATCTCAAATACCCGCTTCCTGTATCATATCAGTACATCTCCATAAGACGTACCAACCGAGGCTTCCAACAGCTCTCCATATAACTAGTCGTTTGCTAGCGGAGTCGTTCGCTGCTCGCATAATGGATTGGCGTTCTTGAGTGGTTCCCTGTCCATCGTGCGGTTCGACAGAGTGACTGGTGCCCTTCCCGAAGAAAGACGTATTTGGTAGGAGGGTTCGGTGGCGAGGATCGCTCTCAAGCATCTTGTGGCGCCCTGCTGAAGCTGAGTCTGATGCCAGCTAACACAGGGAAGCC [...]
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0000   ">
+					<sample name="00_0000   ">
+						<datablock type="DNA">
+							TCGGGGCTACGCTGTAGCATCGTGTTACCTCAAATGCCCTAGCGCCTGCTTAGTTAGGTGAGCGTAGTTTAATGTCATCCGTCAGATCCCTCAAAGTTCGCTAGGTCAGCTACTGCCCGATTACCCACCTACATAAGCAGATGAAGACGTAATATTGTTTCGCAGAGCATAAAAGATCAAGTTTGATACAATCTCAAAAACCCGCTTCCTGTATCATATCAGTACATCTCCATAAGACGTACCAACCGAGGCTTCCAACAGCTCTCCATATAACTAGTCGTTTGCTAGCGGAGTCGTTCGCTGCTCGCATAATGGATTGGCGTTCTTGAGTGGTTCCCTGTCCATCGTGCAGTTCGACAGAGTGACTGGTGCCTTTCCCGAAGAAAGACGTATTTGGTAGGAGGGTTCGGTGGCGAGGATCGCTCTCAAGCATCTTGTGGCGCCCTACTGAAGCTGAGTCTGATGCCAGCTAACACAGGGAAGCC [...]
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0001   ">
+					<sample name="00_0001   ">
+						<datablock type="DNA">
+							TCGGGGCTACGCTGTAGCATCGTGTTACCTCAAATGCCCTAGCGCCTGCTTAGTTAGGTGAGCGTAGTTTAATGTCATCCGTCAGATCCCTCAAAGTTCGCTAGGTCAGCTACTGCCCGATTACCCACCTACATAAGCAGATGAAGACGTAATATTGTTTCGCAGAGCATAAAAGATCAAGTTTGATACAATCTCAAAAACCCGCTTCCTGTATCATATCAGTACATCTCCATAAGACGTACCAACCGAGGCTTCCAACAGCTCTCCATATAACTAGTCGTTTGCTAGCGGAGTCGTTCGCTGCTCGCATAATGGATTGGCGTTCTTGAGTGGTTCCCTGTCCATCGTGCAGTTCGACAGAGTGACTGGTGCCTTTCCCGAAGAAAGACGTATTTGGTAGGAGGGTTCGGTGGCGAGGATCGCTCTCAAGCATCTTGTGGCGCCCTACTGAAGCTGAGTCTGATGCCAGCTAACACAGGGAAGCC [...]
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0011   ">
+					<sample name="00_0011   ">
+						<datablock type="DNA">
+							TCGGGGCTACGCTGTAGCATCGTGTTACCTCAAATGCCCTAGCGCCTGCTTAGTTAGGTGAGCGTAGTTTAATGTCATCCGTCAGATCCCTCAAAGTTCGCTAGGTCAGCTACTGCCCGATTACCCACCTACATAAGCAGATGAAGACGTAATATTGTTTCGCAGAGCATAAAAGATCAAGTTTGATACAATCTCAAATACCCGCTTCCTGTATCATATCAGTACATCTCCATAAGACGTACCAACCGAGGCTTCCAACAGCTCTCCATATAACTAGTCGTTTGCTAGCGGAGTCGTTCGCTGCTCGCATAATGGATTGGCGTTCTTGAGTGGTTCCCTGTCCATCGTGCAGTTCGACAGAGTGACTGGTGCCTTTCCCGAAGAAAGACGTATTTGGTAGGAGGGTTCGGTGGCGAGGATCGCTCTCAAGCATCTTGTGGCGCCCTACTGAAGCTGAGTCTGATGCCAGCTAACACAGGGAAGCC [...]
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0018   ">
+					<sample name="00_0018   ">
+						<datablock type="DNA">
+							TCGGGGCTACGCTGTAGCATCGTGTTACCTCAAATGCCCTAGCGCCTGCTTAGTTAGGTGAGCGTAGTTTAATGTCATCCGTCAGATCCCTCAAAGTTCGCTAGGTCAGCTACTGCCCGATTACCCACCTACATAAGCAGATGAAGACGTAATATTGTTTCGCAGAGCATAAAAGATCAAGTTTGATACAATCTCAAATACCCGCTTCCTGTATCATATCAGTACATCTCCATAAGACGTACCAACCGAGGCTTCCAACAGCTCTCCATATAACTAGTCGTTTGCTAGCGGAGTCGTTCGCTGCTCGCATAATGGATTGGCGTTCTTGAGTGGTTCCCTGTCCATCGTGCAGTTCGACAGAGTGACTGGTGCCTTTCCCGAAGAAAGACGTATTTGGTAGGAGGGTTCGGTGGCGAGGATCGCTCTCAAGCATCTTGTGGCGCCCTACTGAAGCTGAGTCTGATGCCAGCTAACACAGGGAAGCC [...]
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0012   ">
+					<sample name="00_0012   ">
+						<datablock type="DNA">
+							TCGGGGCTACGCTGTAGCATCGTGTTACCTCAAATGCCCTAGCGCCTGCTTAGTTAGGTGAGCGTAGTTTAATGTCATCCGTCAGATCCCTCAAAGTTCGCTAGGTCAGCTACTGCCCGATTACCCACCTACATAAGCAGATGAAGACGTAATATTGTTTCGCAGAGCATAAAAGATCAAGTTTGATACAATCTCAAATACCCGCTTCCTGTATCATATCAGTACATCTCCATAAGACGTACCAACCGAGGCTTCCAACAGCTCTCCATATAACTAGTCGTTTGCTAGCGGAGTCGTTCGCTGCTCGCATAATGGATTGGCGTTCTTGAGTGGTTCCCTGTCCATCGTGCAGTTCGACAGAGTGACTGGTGCCTTTCCCGAAGAAAGACGTATTTGGTAGGAGGGTTCGGTGGCGAGGATCGCTCTCAAGCATCTTGTGGCGCCCTGCTGAAGCTGAGTCTGATGCCAGCTAACACAGGGAAGCC [...]
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0015   ">
+					<sample name="00_0015   ">
+						<datablock type="DNA">
+							TCGGGGCTACGCTGTAGCATCGTGTTACCTCAAATGCCCTAGCGCCTGCTTAGTTAGGTGAGCGTAGTTTAACGTCATCCGTCAGATCCCTCAAAGTTCGCTAGGTCAGCTACTGCCCGATTACCCACCTACATAAGCAGATGAAGACGTAATATTGTTTCGCAGAGCATAAAAGATCAAGTTTGATACAATCTCAAATACCCGCTTCCTGTATCATATCAGTACATCTCCATAAGACGTACCAACCGAGGCTTCCAACAGCTCTCCATATAACTAGTCGTTTGCTAGCGGAGTCGTTCGCTGCTCGCATAATGGATTGGCGTTCTTGAGTGGTTCCCTGTCCATCGTGCAGTTCGACAGAGTGACTGGTGCCTTTCCCGAAGAAAGACGTATTTGGTAGGAGGGTTCGGTGGCGAGGATCGCTCTCAAGCATCTTGTGGCGCCCTACTGAAGCTGAGTCTGATGCCAGCTAACACAGGGAAGCC [...]
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0003   ">
+					<sample name="00_0003   ">
+						<datablock type="DNA">
+							TCGGGGCTACGCTGTAGCATCGTGTTACCTCAAATGCCCTAGCGCCTGCTTAGTTAGGTGAGCGTAGTTTAATGTCATCCGTCAGATCCCTCAAAGTTCGCTAGGTCAGCTACTGCCCGATTACCCACCTACATAAGCAGATGAAGACGTAATATTGTTTCGCAGAGCATAAAAGATCAAGTTTGATACAATCTCAAATACCCGCTTCCTGTATCATATCAGTACATCTCCATAAGACGTACCAACCGAGGCTTCCAACAGCTCTCCATATAACTAGTCGTTTGCTAGCGGAGTCGTTCGCTGCTCGCATAATGGATTGGCGTTCTTGAGTGGTTCCCTGTCCATCGTGCAGTTCGACAGAGTGACTGGTGCCTTTCCCGAAGAAAGACGTATTTGGTAGGAGGGTTCGGTGGCGAGGATCGCTCTCAAGCATCTTGTGGCGCCCTACTGAAGCTGAGTCTGATGCCAGCTAACACAGGGAAGCC [...]
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0009   ">
+					<sample name="00_0009   ">
+						<datablock type="DNA">
+							TCGGGGCTACGCTGTAGCATCGTGTTACCTCACATGCCCTAGCGCCTGCTTAGTTAGGTGAGCGTAGTTTAATGTCATCCGTCAGATCCCTCAAAGTTCGCTAGGTCAGCTACTGCCCGATTACCCACCTACATAAGCAGATGAAGACGTAATATTGTTTCGCAGAGCATAAAAGATCAAGTTTGATACAATCTCAAATACCCGCTTCCTGTATCATATCAGTACATCTCCATAAGACGTACCAACCGAGGCTTCCAACAGCTCTCCATATAACTAGTCGTTTGCTAGCGGAGTCGTTCGCTGCTCGCATAATGGATTGGCGTTCTTGAGTGGTTCCCTGTCCATCGTGTAGTTCGACAGAGTGACTGGTGCCTTTCCCGAAGAAAGACGTATTTGGTAGGAGGGTTCGGTGGCGAGGATCGCTCTCAAGCATCTTGTGGCGCCCTACTGAAGCTGAGTCTGATGCCAGCTAACACAGGGAAGCC [...]
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0019   ">
+					<sample name="00_0019   ">
+						<datablock type="DNA">
+							TCGGGGCTACGCTGTAGCATCGTGTTACCTCACATGCCCTAGCGCCTGCTTAGTTAGGTGAGCGTAGTTTAATGTCATCCGTCAGATCCCTCAAAGTTCGCTAGGTCAGCTACTGCCCGATTACCCACCTACATAAGCAGATGAAGACGTAATATTGTTTCGCAGAGCATAAAAGATCAAGTTTGATACAATCTCAAATACCCGCTTCCTGTATCATATCAGTACATCTCCATAAGACGTACCAACCGAGGCTTCCAACAGCTCTCCATATAACTAGTCGTTTGCTAGCGGAGTCGTTCGCTGCTCGCATAATGGATTGGCGTTCTTGAGTGGTTCCCTGTCCATCGTGTAGTTCGACAGAGTGACTGGTGCCTTTCCCGAAGAAAGACGTATTTGGTAGGAGGGTTCGGTGGCGAGGATCGCTCTCAAGCATCTTGTGGCGCCCTACTGAAGCTGAGTCTGATGCCAGCTAACACAGGGAAGCC [...]
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0005   ">
+					<sample name="00_0005   ">
+						<datablock type="DNA">
+							TCGGGGCTACGCTGTAGCATCGTGTTACCTCACATGCCCTAGCGCCTGCTTAGTTAGGTGAGCGTAGTTTAATGTCATCCGTCAGATCCCTCAAAGTTCGCTAGGTCAGCTACTGCCCGATTACCCACCTACATAAGCAGATGAAGACGTAATATTGTTTCGCAGAGCATAAAAGATCAAGTTTGATACAATCTCAAATACCCGCTTCCTGTATCATATCAGTACATCTCCATAAGACGTACCAACCGAGGCTTCCAACAGCTCTCCATATAACTAGTCGTTTGCTAGCGGAGTCGTTCGCTGCTCGCATAATGGATTGGCGTTCTTGAGTGGTTCCCTGTCCATCGTGCAGTTCGACAGAGTGACTGGTGCCTTTCCCGAAGAAAGACGTATTTGGTAGGAGGGTTCGGTGGCGAGGATCGCTCTCAAGCATCTTGTGGCGCCCTACTGAAGCTGAGTCTGATGCCAGCTAACACAGGGAAGCC [...]
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0014   ">
+					<sample name="00_0014   ">
+						<datablock type="DNA">
+							TCGGGGCTACGCTGTAGCATCGTGTTACCTCACATGCCCTAGCGCCTGCTTAGTTAGGTGAGCGTAGTTTAATGTCATCCGTCAGATCCCTCAAAGTTCGCTAGGTCAGCTACTGCCCGATTACCCACCTACATAAGCAGATGAAGACGTAATATTGTTTCGCAGAGCATAAAAGATCAAGTTTGATACAATCTCAAATACCCGCTTCCTGTATCATATCAGTACATCTCCATAAGACGTACCAACCGAGGCTTCCAACAGCTCTCCATATAACTAGTCGTTTGCTAGCGGAGTCGTTCGCTGCTCGCATAATGGATTGGCGTTCTTGAGTGGTTCCCTGTCCATCGTGCAGTTCGACAGAGTGACTGGTGCCTTTCCCGAAGAAAGACGTATTTGGTAGGAGGGTTCGGTGGCGAGGATCGCTCTCAAGCATCTTGTGGCGCCCTACTGAAGCTGAGTCTGATGCCAGCTAACACAGGGAAGCC [...]
+						</datablock>
+					</sample>
+				</individual>
+			</population>
+		</region>
+		<region name="DR-4">
+<spacing>
+				<block>
+					<map_position> 0 </map_position>
+					<length> 1000 </length>
+					<locations> 58 63 80 139 160 195 215 248 254 265 294 297 302 304 313 320 330 333 370 427 452 457 475 533 549 555 574 583 596 647 663 674 703 722 743 753 761 769 788 830 834 848 852 878 932 980 
+					</locations>
+					<offset> 0 </offset>
+				</block>
+			</spacing>
+			<population name="Berkeley">
+				<individual name="00_0001   ">
+					<sample name="00_0001   ">
+						<datablock type="SNP">
+							AGTAATGCAGCCAGCTTGCGGCGTACCAAGATTAGGACCGTTCAAG
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0011   ">
+					<sample name="00_0011   ">
+						<datablock type="SNP">
+							AGTAATGCAGCCAGCTTGCGGCGTACCAAGATTAGGACCGTTCATG
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0017   ">
+					<sample name="00_0017   ">
+						<datablock type="SNP">
+							GACGTGAATATTGAAATCCAATGTGCTGGGACCCGGTTAGCTCGAA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0008   ">
+					<sample name="00_0008   ">
+						<datablock type="SNP">
+							GACGTGAATATTGAAATCCAATGTGCTGGGACCCGGTTAGCTTGAA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0016   ">
+					<sample name="00_0016   ">
+						<datablock type="SNP">
+							GACGTGAATATTGAAATCCAATGTGCTGGGACCCGGTTAGCTCGAA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0012   ">
+					<sample name="00_0012   ">
+						<datablock type="SNP">
+							GACGTGAATATTGAAATCCAATGCGCTGGGACCCGGTTAGCTCGAA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0018   ">
+					<sample name="00_0018   ">
+						<datablock type="SNP">
+							GACGTGAATATTGAAATCCAATGTGCTGGGACCCGGTTAGCTCGAA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0000   ">
+					<sample name="00_0000   ">
+						<datablock type="SNP">
+							GACGTGAATATTGAAATCCAATGTGCTGGGACCCGGTTAACTCGAA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0007   ">
+					<sample name="00_0007   ">
+						<datablock type="SNP">
+							GACGTGAATATTGAAATCCAATGTGCTGGGACCCGATTAGCTCGAA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0006   ">
+					<sample name="00_0006   ">
+						<datablock type="SNP">
+							GACGTGAATATTGAAATCCAATGTGCTGGGACCCGATTAGCTCGAA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0010   ">
+					<sample name="00_0010   ">
+						<datablock type="SNP">
+							GACGTGAATATTGAAATCTAATGTGCTGGGACCCGATTAGCTCGAA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0004   ">
+					<sample name="00_0004   ">
+						<datablock type="SNP">
+							GACGTGAATATTGAAATCCAATATGCTGGGACCCGATTAGCTCGAA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0013   ">
+					<sample name="00_0013   ">
+						<datablock type="SNP">
+							GACGTGAATATTGAAATCCAATATGCTGGGACCCGATTAGCTCGAA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0002   ">
+					<sample name="00_0002   ">
+						<datablock type="SNP">
+							GACGTAAATATTAAAAACCAATGTGCTGGGACCAAGTTCGCCCGAG
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0015   ">
+					<sample name="00_0015   ">
+						<datablock type="SNP">
+							GACGTAAATATTAAAAACCAATGTGCTGGGACCAAGTTCGCCCGAG
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0005   ">
+					<sample name="00_0005   ">
+						<datablock type="SNP">
+							GACGTAAATATTAAAAACCAATGTGTTGGCGCCAGGTTCGCTCGAG
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0009   ">
+					<sample name="00_0009   ">
+						<datablock type="SNP">
+							GACGTAAATATTAAAAACCAATGTGTTGGCGCCAGGTTCGCTCGAG
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0014   ">
+					<sample name="00_0014   ">
+						<datablock type="SNP">
+							GACGTAAATATTAAAAACCAATGTGTTGGCGCCAGGTTCGCTCGAG
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0003   ">
+					<sample name="00_0003   ">
+						<datablock type="SNP">
+							GACGTAAATATTAAAAACCAATGTGTTGGCGCCAGGTTCGCTCGAG
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0019   ">
+					<sample name="00_0019   ">
+						<datablock type="SNP">
+							GACGTAAATATTAAAAACCAATGTGTTGGCGCCAGGTTCGCTCGAG
+						</datablock>
+					</sample>
+				</individual>
+			</population>
+		</region>
+		<region name="DR-5">
+<spacing>
+				<block>
+					<map_position> 0 </map_position>
+					<length> 1000 </length>
+					<locations> 58 66 119 226 267 305 307 315 347 350 363 373 386 397 455 478 536 552 558 577 599 669 749 773 803 804 836 854 859 865 921 938 986 
+					</locations>
+					<offset> 0 </offset>
+				</block>
+			</spacing>
+			<population name="Berkeley">
+				<individual name="00_0016   ">
+					<sample name="00_0016   ">
+						<datablock type="SNP">
+							AGAATTGCTTAACACGTACCCAAAACTACTGGG
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0012   ">
+					<sample name="00_0012   ">
+						<datablock type="SNP">
+							GACGTTTTGCGAGTAGTGCTGAAGGCTAATAAA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0009   ">
+					<sample name="00_0009   ">
+						<datablock type="SNP">
+							AACGTTTTGCGAGAAGTGCTGAAGGCTAATAAA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0002   ">
+					<sample name="00_0002   ">
+						<datablock type="SNP">
+							AACGTTTTGCGAGAAGTGCTGAAGGCTAATAAA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0005   ">
+					<sample name="00_0005   ">
+						<datablock type="SNP">
+							AACGTTTTGCGAGAAGTGCTGAAGGCTAATAAA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0000   ">
+					<sample name="00_0000   ">
+						<datablock type="SNP">
+							AACGTGTTGCGAGAAGTGCTGAAGGCTAATAAA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0006   ">
+					<sample name="00_0006   ">
+						<datablock type="SNP">
+							AACGTGTTGCGAGAAGCGCTGAAGGCTAATAAA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0004   ">
+					<sample name="00_0004   ">
+						<datablock type="SNP">
+							AACGTTTTGCGAGAAGTGCTGAAGGATAATAAA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0010   ">
+					<sample name="00_0010   ">
+						<datablock type="SNP">
+							AACGTTTTGCGAGAAGTGCTGAAGGACAATAAA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0003   ">
+					<sample name="00_0003   ">
+						<datablock type="SNP">
+							AACGTTTTGCGAGAAGTGCTGAAGGACAATAAA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0008   ">
+					<sample name="00_0008   ">
+						<datablock type="SNP">
+							AACGTTTTGCGAGAAGTGCTGAAGGACAATAAA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0014   ">
+					<sample name="00_0014   ">
+						<datablock type="SNP">
+							AACGTTTTGCGAGAAATGCTGAAGGACAATAAA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0015   ">
+					<sample name="00_0015   ">
+						<datablock type="SNP">
+							AACGGTTTGCGGGAAGTGCTGAAGGCTAATAAA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0018   ">
+					<sample name="00_0018   ">
+						<datablock type="SNP">
+							AACGTTTTGCGAGAAGTGCTGACGGCTTACAAA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0019   ">
+					<sample name="00_0019   ">
+						<datablock type="SNP">
+							AACGTTTTGCGAGAAGTGCTGAAGGCTAACAAA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0017   ">
+					<sample name="00_0017   ">
+						<datablock type="SNP">
+							AACGTTTTGCGAGAAGTGTTGGAGGCTAATAAA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0001   ">
+					<sample name="00_0001   ">
+						<datablock type="SNP">
+							AACGTTTTGCGAGAAGTGTTGGAGGCTAATAAA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0007   ">
+					<sample name="00_0007   ">
+						<datablock type="SNP">
+							AACGTTTTGCGAGAAGTGTTGGAGGCTAATAAA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0011   ">
+					<sample name="00_0011   ">
+						<datablock type="SNP">
+							AACGTTTTGCGAGAAGTGTTGGAGGCTAATAAA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="00_0013   ">
+					<sample name="00_0013   ">
+						<datablock type="SNP">
+							AACGTTTTGCGAGAAGTGTTGGAGGCTAATAAA
+						</datablock>
+					</sample>
+				</individual>
+			</population>
+		</region>
+	</data>
+</lamarc>
+
diff --git a/doc/testfiles/v2.0.infiles/infile.growmigheat b/doc/testfiles/v2.0.infiles/infile.growmigheat
new file mode 100644
index 0000000..e602730
--- /dev/null
+++ b/doc/testfiles/v2.0.infiles/infile.growmigheat
@@ -0,0 +1,366 @@
+<lamarc>
+<!-- Created from the LamarcDS DataStore -->
+<!-- -->
+<data>
+	<region name="tempdna---0">
+		<population name="  Popmig0">
+			<individual name="00_0019   ">
+				<sample name="00_0019   ">
+					<datablock type="DNA">
+						AAAGTGGAATGGCACTAGGTGGGCGGTCTGCTGAAAGTGTTTACTTGTTGTGAAGCTACCGTACAGTAGGTTCGCAATTCGGCTGCCGTAGTCGGCAACCGTTAAACGATGCTATACCGGCAATTTGGCATCGTGAAGAATTCAAGCGAGGAGCACATATGGGGGCTTTTGCAGCTTGAACTGTTTTACCCGGTCCGAACGAGTAGGTTTAAGGTGGACACGGCAATTTCCGTGAACCAATTGTCGTGGTACTCGTATATGTCTAAATGGGTCTTCTGCCGCCCGCAATAAGCTGGAGAGAAGAGCTAGAGAGTCTAGGTTCCATGTCGTTGGGAACGGGCGTAGGCTCACTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCCGCTTGATGCTTGTGCAAACCTCGATGGGATTCGACAGACGGCTTTTTTATATTATCTAAGGCGGGAGTACGGACTACCCTTTGTGCCTGAGCTTTGCTGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0001   ">
+				<sample name="00_0001   ">
+					<datablock type="DNA">
+						AAAGTGGAATGGCACTAGGTGGGCGGTCTGCTGAAAGTGTTTACTTGTTGTGAAGCTACCGTACAGTAGGTTCGCAATTCGGCTGCCGTAGTCGGCAACCGTTAAACGATGCTATACCGGCAATTTGGCATCGTGAAGAATTCAAGCGAGGAGCACATATGGGGGCTTTTGCAGCTTGAACTGTTTTACCCGGTCCGAACGAGTAGGTTTAAGGTGGACACGGCAATTTCCGTGAACCAATTGTCGTGGTACTCGTGTATGTCTAAATGGGTCTTCTGCCGCCCACAATAAGCTGGAGAGAAGAGCTAGAGAGTCTAGGTTCCATGTCGTTGGGAACGGGCGTAGGCTCACTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCCGCTTGATGCTTGTGCAAACCTCGATGGGATTCGACAGACGGCTTTTTTATATTATCTAAGGCGGGAGTACGGACTACCCTTTGTGCCTGAGCTTTGCTGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0006   ">
+				<sample name="00_0006   ">
+					<datablock type="DNA">
+						AAAGTGGAATGGCACTAGGTGGGCGGTCTGCTGAAAGTGTTTACTTGTTGTGAAGCTACCGTACAGTAGGTTCGCAATTCGGCTGCCGTAGTCGGCAATCGTTAAACGATGCTATACCGGCAATTTGGCATCGTGAAGAATTCAAGCGAGGAGCACATATGGGGGCTTTTGCAGCTTGAACTGTTTTACCCGGTCCGAACGAGTAGGTTTAAGGTGGACACGGCAATTTCCGTGAACCAATTGTCGTGGTACTCGTGTATGTCTAAATGGGTCTTCTGCCGCCCGCAATAAGCTGGAGAGAAGAGCTAGAGAGTCTAGGTTCCATGTCGTTGGGAACGGGCGTAGGCTCACTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCCGCTTGATGCTTGTGCAAACCTCGATGGGATTCGACAGACGGCTTTTTTATATTATCTAAGGCGGGAGTACGGACTACCCTCTGTGCCTGAGCTTTGCTGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0009   ">
+				<sample name="00_0009   ">
+					<datablock type="DNA">
+						AAAGTGGAATGGCACTAGGTGGGCGGTCTGCTGAAAGTGTTTACTTGTTGTGAAGCTACCGTACAGTAGGTTCGCAATTCGGCTGCCGTAGTCGGCAATCGTTAAACGATGCTATACCGGCAATTTGGCATCGTGAAGAATTCAAGCGAGGAGCACATATGGGGGCTTTTGCAGCTTGAACTGTTTTACCCGGTCCGAACGAGTAGGTTTAAGGTGGACACGGCAATTTCCGTGAACCAATTGTCGTGGTACTCGTGTATGTCTAAATGGGTCTTCTGCCGCCCGCAATAAGCTGGAGAGAAGAGCTAGAGAGTCTAGGTTCCATGTCGTTGGGAACGGGCGTAGGCTCACTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCCGCTTGATGCTTGTGCAAACCTCGATGGGATTCGACAGACGGCTTTTTTATATTATCTAAGGCGGGAGTACGGACTACCCTCTGTGCCTGAGCTTTGCTGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0003   ">
+				<sample name="00_0003   ">
+					<datablock type="DNA">
+						AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGCCGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGGTGACCGTTAAGCGATGCTATACCGGCCATTTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGGGGGCTTTTGCAGCTAGGACTGTTTTACTCGGTCCGCACGGGTAGGTGTACGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCCTCGCAATAAGCTGCAGAGAAGGGCTAGAGAGTCTATGTTCCCTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGATGCCTCTGTAAACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTATGCTGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0004   ">
+				<sample name="00_0004   ">
+					<datablock type="DNA">
+						AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGCCGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGGTGACCGTTAAGCGATGCTATACCGGCCATTTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGGGGGCTTTTGCAGCTAGGACTGTTTTACTCGGTCCGCACGGGTAGGTGTACGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCCTCGCAATAAGCTGCAGAGAAGGGCTAGAGAGTCTATGTTCCCTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGATGCCTCTGTAAACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTATGCTGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0000   ">
+				<sample name="00_0000   ">
+					<datablock type="DNA">
+						AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGCCGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGGTGACCGTTAAGCGATGCTATACCGGCCATTTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGGGGGCTTTTGCAGCTAGGACTGTTTTACTCGGTCCGCACGGGTAGGTGTAAGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATATCTAAATGGGTCTTCTGCCCCCCGCAATAAGCTGCAGAGAAGGGCTAGAGAGTCTATGTTCCCTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGATGCCTCTGTAAACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTATGCTGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0010   ">
+				<sample name="00_0010   ">
+					<datablock type="DNA">
+						AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGCCGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGGTGACCGTTAAGCGATGCTATACCGGCCATTTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGAGGGCTTTTGCAGCTAGGACTGTTTTACTCGGTCCGCACGGGTAGGTGTAAGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCCCCGCAATAAGCTGCAGAGAAGGGCTAGAGAGTCTATGTTCCCTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGATGCCTCTGTAAACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTATGCTGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0016   ">
+				<sample name="00_0016   ">
+					<datablock type="DNA">
+						AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGCCGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGGTGACCGTTAAGCGATGCTATACCGGCCATTTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGAGGGCTTTTGCAGCTAGGACTGTTTTACTCGGTCCGCACGGGTAGGTGTAAGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCCCCGCAATAAGCTGCAGAGAAGGGCTAGAGAGTCTATGTTCCCTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGATGCCTCTGTAAACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTATGCTGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0017   ">
+				<sample name="00_0017   ">
+					<datablock type="DNA">
+						AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGCCGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGGTGACCGTTAAGCGATGCTATACCGGCCATTTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGAGGGCTTTTGCAGCTAGGACTGTTTTACTCGGTCCGCACGGGTAGGTGTAAGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCCCCGCAATAAGCTGCAGAGAAGGGCTAGAGAGTCTATGTTCCCTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGATGCCTCTGTAAACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTATGCTGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0011   ">
+				<sample name="00_0011   ">
+					<datablock type="DNA">
+						AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGCCGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGGTGACCGTTAAGCGATGCTATACCGGCCATTTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGGGGGCTTTTGCAGCTAGGACTGTTTTACTCGGTCCGCACGGGTAGGTGTAAGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCCCCGCAATAAGCTGCAGAGAAGGGCTAGAGAGTCTATGTTCCCTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGATGCCTCTGTAAACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTATGCTGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0002   ">
+				<sample name="00_0002   ">
+					<datablock type="DNA">
+						AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGCCGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGGTGACCGTTAAGCGATGCTATACCGGCCATTTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGGGGGCTTTTGCAGCTAGGACTGTTTTACTCGGTCCGCACGGGTAGGTGTAAGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCCCCGCAATAAGCTGCAGAGAAGGGCTAGAGAGTCTGTGTTCCCTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGATGCCTCTGTAGACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTATGCTGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0005   ">
+				<sample name="00_0005   ">
+					<datablock type="DNA">
+						AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGCCGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGGTGACCGTTAAGCGATGCTATACCGGCCATTTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGGGGGCTTTTGCAGCTAGGACTGTTTTACTCGGTCCGCACGGGTAGGTGTAAGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCCCCGCAATAAGCTGCAGAGAAGGGCTAGAGAGTCTATGTTCCCTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGATGCCTCTGTAAACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTATGCTGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0008   ">
+				<sample name="00_0008   ">
+					<datablock type="DNA">
+						AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGCCGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGGTGACCGTTAAGCGATGCTATACCGGCCATTTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGGGGGCTTTTGCAGCTAGGACTGTTTTACTCGGTCCGCACGGGTAGGTGTAAGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCCCCGCAATAAGCTGCAGAGAAGGGCTAGAGAGTCTATGTTCCCTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGATGCCTCTGTAAACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTATGCTGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0007   ">
+				<sample name="00_0007   ">
+					<datablock type="DNA">
+						AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGACGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGGTGACCGTTAAGCGATGCTATACCGGCCATTTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGGGGGCTTTTGCAGCTAGGACTGTTTTACTCGGTCCGCACGGGTAGGTGTAAGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCCCCGCAATAAGCTGCAGATAAGGGCTAGAGAGTCTATGTTCCCTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGACGCCTTTGTAAACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTATGCTGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0014   ">
+				<sample name="00_0014   ">
+					<datablock type="DNA">
+						AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGACGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGGTGACCGTTAAGCGATGCTATACCGGCCATTTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGGGGGCTTTTGCAGCTAGGACTGTTTTACTCGGTCCGCACGGGTAGGTGTAAGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCCCCGCAATAAGCTGCAGATAAGGGCTAGAGAGTCTATGTTCCCTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGACGCCTTTGTAAACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTATGCTGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0012   ">
+				<sample name="00_0012   ">
+					<datablock type="DNA">
+						AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGCCGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGGTGACCGTTAAGCGATGCTATACCGGCCATTTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGGGGGCTTTTGCAGCTAGGACTGTTTTACTCGGTCCGCACGGGTAGGTGTAAGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCCCCGCAATAAGCTGCAGAGAAGGGCTAGAGAGTCTATGTTCCCTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGATGCCTCTGTAAACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTATGCTGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0013   ">
+				<sample name="00_0013   ">
+					<datablock type="DNA">
+						AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGCCGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGGTGACCGTTAAGCGATGCTATACCGGCCATTTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGGGGGCTTTTGCAGCTAGGACTGTTTTACTCGGTCCGCACGGGTAGGTGTAAGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCCCCGCAATAAGCTGCAGAGAAGGGCTAGAGAGTCTATGTTCCCTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGATGCCTCTGTAAACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTATGCTGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0015   ">
+				<sample name="00_0015   ">
+					<datablock type="DNA">
+						AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGCCGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGGTGACCGTTAAGCGATGCTATACCGGCCATTTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGGGGGCTTTTGCAGCTAGGACTGTTTTACTCGGTCCGCACGGGTAGGTGTAAGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCCCCGCAATAAGCTGCAGAGAAGGGCTAGAGAGTCTATGTTCCCTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGATGCCTCTGTAAACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTATGCTGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="00_0018   ">
+				<sample name="00_0018   ">
+					<datablock type="DNA">
+						AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGCCGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGGTGACCGTTAAGCGATGCTATACCGGCCATTTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGGGGGCTTTTGCAGCTAGGACTGTTTTACTCGGTCCGCACGGGTAGGTGTAAGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCCCCGCAATAAGCTGCAGAGAAGGGCTAGAGAGTCTATGTTCCCTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGATGCCTCTGTAAACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTATGCTGC [...]
+					</datablock>
+				</sample>
+			</individual>
+		</population>
+		<population name="  Popmig1">
+			<individual name="01_0022   ">
+				<sample name="01_0022   ">
+					<datablock type="DNA">
+						AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGCCGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGGTGACCGTTAAGCGATGCTATACCGGCCATTTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGGGGGCTTTTGCAGCTAGGACTGTTTTACTCGGTCCGCACGGGTAGGTGTAAGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCTCCGCAATAAGCTGCAGAGAAGGGCTAGAGAGTCTATGTTCCCTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGATGCCTCTGTAAACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTATGCTGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="01_0035   ">
+				<sample name="01_0035   ">
+					<datablock type="DNA">
+						AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGCCGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGGTGACCGTTAAGCGATGCTATACCGGCCATTTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGGGGGCTTTTGCAGCTAGGACTGTTTTACTCGGTCCGCACGGGTAGGTGTAAGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCCCCGCAATAAGCTGCAGAGAAGGGCTAGAGAGTCTATGTTCCCTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGATGCCTCTGTAAACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTATGCTGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="01_0024   ">
+				<sample name="01_0024   ">
+					<datablock type="DNA">
+						AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGCCGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGGTGACCGTTAAGCGATGCTATACCGGCCATTTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGGGGGCTTTTGCAGCTAGGACTGCTTTACTCGGTCCGCACGGGTAGGTGTAAGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCCCCGCAATAAGCTGCAGAGAAGGGCTAGAGAGTCTATGTTCCCTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGATGCCTCTGTAAACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTATGCTGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="01_0033   ">
+				<sample name="01_0033   ">
+					<datablock type="DNA">
+						AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGCCGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGGTGACCGTTGAGCGATGCTATACCGGCCATTTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGGGGGCTTTTGCAGCTAGGACTGCTTTACTCGGTCCGCACGGGTAGGTGTAAGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCCCCGCAATAAGCTGCAGAGAAGGGCTAGAGAGTCTATGTTCCCTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGATGCCTCTGTAAACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTATGCTGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="01_0030   ">
+				<sample name="01_0030   ">
+					<datablock type="DNA">
+						AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGCCGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGGTGACCGTTAAGCGATGCTATACCGGCCATTTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGGGGGCTTTTGCAGCTAGGACTGCTTTACTCGGTCCGCACGGGTAGGTGTAAGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCCCCGCAATAAGCTGCAGAGAAGGGCTAGAGAGTCTATGTTCCCTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGATGCCTCTGTAAACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTATGCTGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="01_0034   ">
+				<sample name="01_0034   ">
+					<datablock type="DNA">
+						AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGCCGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGGTGACCGTTAAGCGATGCTATACCGGCCATTTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGGGGGCTTTTGCAGCTAGGACTGCTTTACTCGGTCCGCACGGGTAGGTGTAGGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCCCCGCAATAAGCTGCAGAGAAGGGCTAGAGAGTCTATGTTCCCTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGATGCCTCTGTAAACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTATGCTGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="01_0039   ">
+				<sample name="01_0039   ">
+					<datablock type="DNA">
+						AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGCCGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGGTGACCGTTAAGCGATGCTATACCGGCCATTTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGGGGGCTTTTGCAGCTAGGACTGCTTTACTCGGTCCGCACGGGTAGGTGTAGGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCCCCGCAATAAGCTGCAGAGAAGGGCTAGAGAGTCTATGTTCCCTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGATGCCTCTGTAAACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTATGCTGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="01_0028   ">
+				<sample name="01_0028   ">
+					<datablock type="DNA">
+						AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGCCGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGGTGACCGTTAAGCGATGCTATACCGGCCATGTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGGGGGCTTTTGCAGCTAGGACTGTTTTACTCGGTCCGCACGGGTAGGTGTAAGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCCCCGCAATAAGCTGCAGAGAAGGGCTAGAGAGTCTATGTTCCCTGTCGTCGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGATGCCTCTGTAAACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTATGCTGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="01_0025   ">
+				<sample name="01_0025   ">
+					<datablock type="DNA">
+						AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGCCGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGGTGACCGTTAAGCGATGCTATACCGGCCATGTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGGGGGCTTTTGCAGCTAGGACTGTTTTACTCGGTCCGCACGGGTAGGTGTAAGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCCCCGCAATAAGCTGCAGAGAAGGGCTAGAGAGTCTATGTTCCCTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGATGCCTCTGTAAACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTATGCTGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="01_0031   ">
+				<sample name="01_0031   ">
+					<datablock type="DNA">
+						AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGCCGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGGTGACCGTTAAGCGATGCTATACCGGCCATGTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGGGGGCTTTTGCAGCTAGGACTGTTTTACTCGGTCCGCACGGGTAGGTGTAAGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCCCCGCAATAAGCTGCAGAGAAGGGCTAGAGAGTCTATGTTCCCTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGATGCCTCTGTAAACTTAGATGGGACTCGACAGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTATGCTGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="01_0029   ">
+				<sample name="01_0029   ">
+					<datablock type="DNA">
+						AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGCCGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGATGACCGTTAAGCGATGCTATACCGGCCATTTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGGGGGCTTTTGCAGCTAGGACTGTTTTACTCGGTCCGCACGGGTAGGTGTAAGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCCCCGCAATAAGCTGCAGAGAAGGGCTAGAGAGTCTATGTTCCCTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGATGCCTCTGTAAACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTATGCTGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="01_0037   ">
+				<sample name="01_0037   ">
+					<datablock type="DNA">
+						AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGCCGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGATGACCGTTAAGCGATGCTATACCGGCCATTTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGGGGGCTTTTGCAGCTAGGACTGTTTTACTCGGTCCGCACGGGTAGGTGTAAGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCCCCGCAATAAGCTGCAGAGAAGGGCTAGAGAGTCTATGTTCCCTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGATGCCTCTGTAAACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTATGCTGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="01_0027   ">
+				<sample name="01_0027   ">
+					<datablock type="DNA">
+						AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGCCGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGATGACCGTTACGCGATGCTATACCGGCCATTTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGGGGGCTTTTGCAGCTAGGACTGTTTTACTCGGTCCGCACGGGTAGGTGTAAGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCCCCGCAATAAGCTGCAGAGAAGGGCTAGAGAGTCTATGTTCCCTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGATGCCTCTGTAAACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTATGCTGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="01_0032   ">
+				<sample name="01_0032   ">
+					<datablock type="DNA">
+						AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGCCGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGATGACCGTTACGCGATGCTATACCGGCCATTTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGGGGGCTTTTGCAGCTAGGACTGTTTTACTCGGTCCGCACGGGTAGGTGTAAGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCCCCGCAATAAGCTGCAGAGAAGGGCTAGAGAGTCTATGTTCCCTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGATGCCTCTGTAAACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTATGCTGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="01_0038   ">
+				<sample name="01_0038   ">
+					<datablock type="DNA">
+						AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGCCGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGATGACCGTTACGCGATGCTATACCGGCCATTTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGGGGGCTTTTGCAGCTAGGACTGTTTTACTCGGTCCGCACGGGTAGGTGTAAGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCCCCGCAATAAGCTGCAGAGAAGGGCTAGAGAGTCTATGTTCCCTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGATGCCTCTGTAAACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTATGCTGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="01_0020   ">
+				<sample name="01_0020   ">
+					<datablock type="DNA">
+						AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGCCGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGATGACCGTTACGCGATGCTATACCGGCCATTTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGGGGGCTTTTGCAGCTAGGACTGTTTTACTCGGTCCGCACGGGTAGGTGTAAGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCCCCGCAATAAGCTGCAGAGAAGGGCTAGAGAGTCTATGTTCCCTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGATGCCTCTGTAAACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTATGCTGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="01_0026   ">
+				<sample name="01_0026   ">
+					<datablock type="DNA">
+						AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGCCGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGATGACCGTTACGCGATGCTATACCGGCCATTTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGGGGGCTTTTGCAGCTAGGACTGTTTTACTCGGTCCGCACGGGTAGGTGTAAGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCCCCGCAATAAGCTGCAGAGAAGGGCTAGAGAGTCTATGTTCCCTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGATGCCTCTGTAAACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTATGCTGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="01_0036   ">
+				<sample name="01_0036   ">
+					<datablock type="DNA">
+						AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGCCGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGATGACCGTTAAGCGATGCTATACCGGCCATTTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGGGGGCTTTTGCAGCTAGGACTGTTTTACTCGGTCCGCACGGGTAGGTGTAAGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCCCCGCAATAAGCTGCAGAGAAGGGCTAGAGAGTCTATGTTCCCTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGATGCCTCTGTAAACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTATGCTGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="01_0021   ">
+				<sample name="01_0021   ">
+					<datablock type="DNA">
+						AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGCCGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGATGACCGTTAAGCGATGCTATACCGGCCATTTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGGGGGCTTTTGCAGCTAGGACTGTTTTACTCGGTCCGCACGGGTAGGTGTAAGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCCCCGCAATAAGCTGCAGAGAAGGGCTAGAGAGTCTATGTTCCTTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGATGCCTCTGTAAACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTATGCTGC [...]
+					</datablock>
+				</sample>
+			</individual>
+			<individual name="01_0023   ">
+				<sample name="01_0023   ">
+					<datablock type="DNA">
+						AAAGTGGAAAGACACTAGGTCGGCGGTCTACTGAAGGTGTTTACTTGTTGTGAAGCTGCCGTACAGTAGGTTCGCAATTCGGCCACCGTAGTCGATGACCGTTAAGCGATGCTATACCGGCCATTTGGTTACGCGAAGAATTCAACCGAGGAGCACATATGGGGGCTTTTGCAGCTAGGACTGTTTTACTCGGTCCGCACGGGTAGGTGTAAGGTGGGCACGGCAATTCCCGTGAACCAATTGTCGTCGTGTTCGCATATGTCTAAATGGGTCTTCTGCCCCCCGCAATAAGCTGCAGAGAAGGGCTAGAGAGTCTATGTTCCCTGTCGTTGGGAACAGGCGTAGGCTCATTTGGCCTGAGTTTACAATGTGTCCGTGAGAGGTCAGCTTGATGCCTCTGTAAACTTAGATGGGACTCGACGGACGGCTTTCTTATATTATGTAAGGCGGGAGTACAGGCTACTCCTTCTGCCGAAGCTATGCTGC [...]
+					</datablock>
+				</sample>
+			</individual>
+		</population>
+	</region>
+</data>
+
+<!-- lamarc simulation parameter file -->
+<!-- -->
+<forces>
+  <coalescence>
+    <method> Watterson Watterson </method>
+    <max-events> 10000 </max-events>
+    <profiles> fixed fixed </profiles>
+  </coalescence>
+  <migration>
+    <method> FST FST FST FST </method>
+    <max-events> 10000 </max-events>
+    <profiles> fixed fixed fixed fixed </profiles>
+  </migration>
+  <growth>
+    <start-values> 1.0 1.0 </start-values>
+    <method> User User </method>
+    <max-events> 10000 </max-events>
+    <profiles> fixed fixed </profiles>
+  </growth>
+</forces>
+<!-- -->
+<chains>
+  <replicates>1</replicates>
+  <heating>
+    <temperatures>1.0 1.1 2.0 3.0 8.0 </temperatures>
+    <swap-interval>10</swap-interval>
+  </heating>
+  <strategy>
+    <resimulating> 1.0 </resimulating>
+  </strategy>
+  <initial>
+    <number>10</number>
+    <samples>50</samples>
+    <discard>1000</discard>
+    <interval>20</interval>
+  </initial>
+  <final>
+    <number>2</number>
+    <samples> 750 </samples>
+    <discard>1000</discard>
+    <interval>20</interval>
+  </final>
+</chains>   
+<!-- -->
+<format>
+  <verbosity>verbose</verbosity>
+  <progress-reports>verbose</progress-reports>
+  <echo>false</echo>
+  <plotting>
+    <profile>false</profile>
+    <posterior>false</posterior>
+  </plotting>
+  <seed>1005</seed>
+  <parameter-file>parmfile</parameter-file>
+  <results-file> outfile.growmigheat </results-file>
+  <in-summary-file> insumfile.growmigheat </in-summary-file>
+  <out-summary-file> outsumfile.growmigheat </out-summary-file>
+  <use-out-summary> true </use-out-summary>
+  <use-in-summary> false </use-in-summary>
+</format>
+<!-- -->
+<model name="F84">
+  <base-freqs> 0.25 0.25 0.25 0.25 </base-freqs>
+  <ttratio> 2.0 </ttratio>
+  <categories>
+    <num-categories> 1 </num-categories>
+    <rates> 1.0 </rates>
+    <probabilities> 1.0 </probabilities>
+    <!-- autocorrelation of 1 means no auto-correlation -->
+    <autocorrelation> 1.0 </autocorrelation>
+  </categories>
+</model>
+<!-- -->
+</lamarc>
diff --git a/doc/testfiles/v2.0.infiles/infile.multicat b/doc/testfiles/v2.0.infiles/infile.multicat
new file mode 100644
index 0000000..6e4ba11
--- /dev/null
+++ b/doc/testfiles/v2.0.infiles/infile.multicat
@@ -0,0 +1,112 @@
+<lamarc>
+<!-- Created from the LamarcDS DataStore -->
+	<forces>
+		<coalescence>
+			<start-values> 0.01 </start-values>
+			<method> USER </method>
+			<max-events> 1000 </max-events>
+		</coalescence>
+	</forces>
+	<!-- -->
+	<chains>
+		<replicates> 1 </replicates>
+		<heating>
+			<temperatures> 1 </temperatures>
+			<swap-interval> 1 </swap-interval>
+		</heating>
+		<strategy>
+			<resimulating> 1 </resimulating>
+		</strategy>
+			<initial>
+				<number> 10 </number>
+				<samples> 500 </samples>
+				<discard> 1000 </discard>
+				<interval> 20 </interval>
+			</initial>
+		<final>
+			<number> 2 </number>
+			<samples> 10000 </samples>
+			<discard> 1000 </discard>
+			<interval> 20 </interval>
+		</final>
+	</chains>
+	<!-- -->
+	<format>
+		<verbosity> verbose </verbosity>
+		<progress-reports> verbose </progress-reports>
+		<echo> true </echo>
+		<plotting>
+			<profile> false </profile>
+			<posterior> false </posterior>
+		</plotting>
+		<seed> 1005 </seed>
+		<parameter-file> parmfile </parameter-file>
+		<results-file> outfile.multicat </results-file>
+		<in-summary-file> insumfile.multicat </in-summary-file>
+		<out-summary-file> outsumfile.multicat </out-summary-file>
+                <use-out-summary> true </use-out-summary>
+                <use-in-summary> false </use-in-summary>
+	</format>
+<model name="GTR">
+  <base-freqs> 0.30884 0.31279 0.13133 0.24704 </base-freqs>
+  <gtr-rates> 5.40506 147.77654 4.27459 3.58012 96.36786 1.0 </gtr-rates>
+  <categories>
+    <num-categories> 2 </num-categories>
+    <rates> 0.14265184 1.85734816 </rates>
+    <probabilities> 0.5 0.5 </probabilities>
+    <!-- autocorrelation of 1 means no auto-correlation -->
+    <autocorrelation> 1.0 </autocorrelation>
+  </categories>
+  <normalize> false </normalize>
+</model>
+	<!-- -->
+	<data>
+		<region name="Finland">
+			<population name="Population KFMH">
+				<individual name="germ_10   ">
+					<sample name="germ_10   ">
+						<datablock type="DNA">
+							TCATTTCCGGTGCAAACCGAATCTCAGCTTGATTAATCTGGATCACCCAGAAGAGCTCTTAAAACGCA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="germ_11   ">
+					<sample name="germ_11   ">
+						<datablock type="DNA">
+							AGATTTGGGGTGCAATGGGAATCTCTCGTTGATTATAGTGGATCAGGGAGAAGAGGACTTAAAACCGT
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="germ_20   ">
+					<sample name="germ_20   ">
+						<datablock type="DNA">
+							ACTTTTGCCGTGCATAGCCAATCTGACCATGATTTAACAGGATCTCGCTGAAGACCAGATAAAAGGGA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="germ_21   ">
+					<sample name="germ_21   ">
+						<datablock type="DNA">
+							ACAATTGCGCTGCTAAGCGTATCACACCTAGATAAAACTCGATGACGCACAAGTGCAGTAAAATCGGA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="germ_30   ">
+					<sample name="germ_30   ">
+						<datablock type="DNA">
+							ACATATGCGGAGGAAAGCGATTGTCACCTTCAATAAACTGCAACACGCAGTACAGCAGTTTATACGGA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="germ_31   ">
+					<sample name="germ_31   ">
+						<datablock type="DNA">
+							ACATTAGCGGTCCAAAGCGAAACTCACCTTGTTTAAACTGGTTCACGCAGATGAGCAGTTATAACGGA
+						</datablock>
+					</sample>
+				</individual>
+			</population>
+		</region>
+	</data>
+</lamarc>
+
diff --git a/doc/testfiles/v2.0.infiles/infile.multilocus b/doc/testfiles/v2.0.infiles/infile.multilocus
new file mode 100644
index 0000000..4e39301
--- /dev/null
+++ b/doc/testfiles/v2.0.infiles/infile.multilocus
@@ -0,0 +1,287 @@
+<lamarc>
+<!-- Created by the Lamarc program -->
+  <chains>
+    <replicates>1</replicates>
+    <bayesian-analysis>No</bayesian-analysis>
+    <heating>
+      <adaptive>false</adaptive>
+      <temperatures> 1</temperatures>
+      <swap-interval>10</swap-interval>
+    </heating>
+    <strategy>
+      <resimulating>0.833333</resimulating>
+      <tree-size>0.166667</tree-size>
+      <haplotyping>0</haplotyping>
+    </strategy>
+    <initial>
+      <number>10</number>
+      <samples>500</samples>
+      <discard>1000</discard>
+      <interval>20</interval>
+    </initial>
+    <final>
+      <number>2</number>
+      <samples>10000</samples>
+      <discard>1000</discard>
+      <interval>20</interval>
+    </final>
+  </chains>
+  <format>
+    <verbosity>normal</verbosity>
+    <progress-reports>normal</progress-reports>
+    <results-file>outfile</results-file>
+    <use-in-summary>false</use-in-summary>
+    <in-summary-file>insumfile</in-summary-file>
+    <use-out-summary>false</use-out-summary>
+    <out-summary-file>outsumfile</out-summary-file>
+    <use-curvefiles>true</use-curvefiles>
+    <curvefile-prefix>curvefile</curvefile-prefix>
+    <out-xml-file>menuinfile</out-xml-file>
+    <seed>3001</seed>
+  </format>
+  <forces>
+    <coalescence>
+      <start-values> 0.01</start-values>
+      <method> PROGRAMDEFAULT</method>
+      <max-events>100000</max-events>
+      <profiles> percentile </profiles>
+      <constraints> unconstrained </constraints>
+      <prior type="log">
+        <paramindex> default </paramindex>
+        <lower> 1e-05 </lower>
+        <upper> 10 </upper>
+      </prior>
+    </coalescence>
+  </forces>
+  <data>
+    <region name="loci">
+      <model name="F84">
+        <normalize>false</normalize>
+        <categories>
+          <num-categories>1</num-categories>
+          <rates> 1</rates>
+          <probabilities> 1</probabilities>
+          <autocorrelation>1</autocorrelation>
+        </categories>
+        <relative-murate>1</relative-murate>
+        <base-freqs> 0.25 0.25 0.25 0.25 </base-freqs>
+        <ttratio>2</ttratio>
+      </model>
+      <effective-popsize>1</effective-popsize>
+      <spacing>
+        <block>
+          <map-position>85</map-position>
+          <offset>0</offset>
+        </block>
+        <block>
+          <map-position>39</map-position>
+          <offset>0</offset>
+        </block>
+        <block>
+          <map-position>88</map-position>
+          <offset>0</offset>
+        </block>
+        <block>
+          <map-position>1</map-position>
+          <offset>0</offset>
+        </block>
+        <block>
+          <map-position>0</map-position>
+          <offset>0</offset>
+        </block>
+      </spacing>
+    <population name="seattle">
+      <individual name="00_0001   ">
+        <sample name="00_0001   ">
+          <datablock type="DNA">
+            GAC
+          </datablock>
+          <datablock type="DNA">
+            GATGGATTTTATTCGCTAGCCCATGTGTGATTAGAAGGGTAACCGA
+          </datablock>
+          <datablock type="DNA">
+            ATTCCACAGCTA
+          </datablock>
+          <datablock type="DNA">
+            AAGCCGTTTGGCCCAGGGCTAGGGGGTGCCAGGGTCAT
+          </datablock>
+          <datablock type="DNA">
+            C
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0006   ">
+        <sample name="00_0006   ">
+          <datablock type="DNA">
+            GAC
+          </datablock>
+          <datablock type="DNA">
+            GATGGATTTTATTCGCTAGCCCATGTGTGATTAGAAGGGTAACCGA
+          </datablock>
+          <datablock type="DNA">
+            ATTCCACAGCTA
+          </datablock>
+          <datablock type="DNA">
+            AAGCAATTTCGCCCAGGGCTAGGGGGTGCCGGGGTCAT
+          </datablock>
+          <datablock type="DNA">
+            C
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0007   ">
+        <sample name="00_0007   ">
+          <datablock type="DNA">
+            GAC
+          </datablock>
+          <datablock type="DNA">
+            GATGGATTTTATTCGCTAGCCCATGTGTGATTAGAAGGGTAACCGA
+          </datablock>
+          <datablock type="DNA">
+            ATTCCACAGCTA
+          </datablock>
+          <datablock type="DNA">
+            AAACAGTTTCGCCCAGGGCTAGGGGGTGCCAGGGTCAT
+          </datablock>
+          <datablock type="DNA">
+            C
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0002   ">
+        <sample name="00_0002   ">
+          <datablock type="DNA">
+            GAC
+          </datablock>
+          <datablock type="DNA">
+            GATGGATTTTATTCGCTAGCCCATGTGTGATTAGAAGGGTAACCGA
+          </datablock>
+          <datablock type="DNA">
+            ATGCCACAGCTA
+          </datablock>
+          <datablock type="DNA">
+            AAGCAGTTTCGCCCAGGGCTAGGGGGTGCCAGGGTCAT
+          </datablock>
+          <datablock type="DNA">
+            C
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0008   ">
+        <sample name="00_0008   ">
+          <datablock type="DNA">
+            GAC
+          </datablock>
+          <datablock type="DNA">
+            GATGGATTTTATTCGCTAGCCCATGTGTGATTAGAAGGGTAACCGA
+          </datablock>
+          <datablock type="DNA">
+            ATGCCACAGCTA
+          </datablock>
+          <datablock type="DNA">
+            AAGCAGTTTCGCCCAGGGCTAGGGGGTGCCAGGGTCAT
+          </datablock>
+          <datablock type="DNA">
+            C
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0003   ">
+        <sample name="00_0003   ">
+          <datablock type="DNA">
+            GAC
+          </datablock>
+          <datablock type="DNA">
+            GATGGATTTTATTCGCTAGCCCATGTGTGCTTAGAAGGGTAACCGA
+          </datablock>
+          <datablock type="DNA">
+            ATGCCACAGCTA
+          </datablock>
+          <datablock type="DNA">
+            AAGCAGTTTCGCCCAGGGCTAGGGGGTGCCAGGGTCAT
+          </datablock>
+          <datablock type="DNA">
+            C
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0009   ">
+        <sample name="00_0009   ">
+          <datablock type="DNA">
+            GAC
+          </datablock>
+          <datablock type="DNA">
+            GATGGATTTTATTCGCTAGCCCATGTGTGATTAGAAGGGTAACCGG
+          </datablock>
+          <datablock type="DNA">
+            ATGCCACAGCTA
+          </datablock>
+          <datablock type="DNA">
+            AAGCAGTTTCGCCCAGGGCTAGGGGGTGCCAGGGTCAT
+          </datablock>
+          <datablock type="DNA">
+            C
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0005   ">
+        <sample name="00_0005   ">
+          <datablock type="DNA">
+            GGC
+          </datablock>
+          <datablock type="DNA">
+            GATGGATTTTATTCGCTAGCCCATGTGTGATTAGAAGGGTAACCGG
+          </datablock>
+          <datablock type="DNA">
+            ATGCCACAACTA
+          </datablock>
+          <datablock type="DNA">
+            AAGCAGTTTCGCCCAGGGCTAGGGGGTGCCAGGGTCAT
+          </datablock>
+          <datablock type="DNA">
+            C
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0000   ">
+        <sample name="00_0000   ">
+          <datablock type="DNA">
+            GAC
+          </datablock>
+          <datablock type="DNA">
+            GATGGATTTTATTCGCTTGCCCATGTGTGATTAGAAGGGTAACCGA
+          </datablock>
+          <datablock type="DNA">
+            ATGCCACAACTA
+          </datablock>
+          <datablock type="DNA">
+            AAGCAGTTTCGCCCAGGGCTAGGGGGTGCCAGGGTCAT
+          </datablock>
+          <datablock type="DNA">
+            C
+          </datablock>
+        </sample>
+      </individual>
+      <individual name="00_0004   ">
+        <sample name="00_0004   ">
+          <datablock type="DNA">
+            GAC
+          </datablock>
+          <datablock type="DNA">
+            GGTGGATTTTATTCGCTAGCCCACGTGTGATTAGAAGGGTAACCGA
+          </datablock>
+          <datablock type="DNA">
+            ATGCCACAACTA
+          </datablock>
+          <datablock type="DNA">
+            AAGCAGTTTCGCCCAGGGCTAGGGGGTGCCAGGGTCAT
+          </datablock>
+          <datablock type="DNA">
+            C
+          </datablock>
+        </sample>
+      </individual>
+    </population>
+    </region>
+  </data>
+</lamarc>
diff --git a/doc/testfiles/v2.0.infiles/infile.quick b/doc/testfiles/v2.0.infiles/infile.quick
new file mode 100644
index 0000000..1e8fc5d
--- /dev/null
+++ b/doc/testfiles/v2.0.infiles/infile.quick
@@ -0,0 +1,98 @@
+<lamarc>
+<!-- Created from the LamarcDS DataStore -->
+	<forces>
+		<coalescence>
+			<start-values> 0.01 </start-values>
+			<method> USER </method>
+			<max-events> 1000 </max-events>
+		</coalescence>
+	</forces>
+	<!-- -->
+	<chains>
+		<replicates> 1 </replicates>
+		<heating>
+			<temperatures> 1 </temperatures>
+			<swap-interval> 1 </swap-interval>
+		</heating>
+		<strategy>
+			<resimulating> 1 </resimulating>
+		</strategy>
+			<initial>
+				<number> 3 </number>
+				<samples> 200 </samples>
+				<discard> 1000 </discard>
+				<interval> 20 </interval>
+			</initial>
+		<final>
+			<number> 1 </number>
+			<samples> 500 </samples>
+			<discard> 1000 </discard>
+			<interval> 20 </interval>
+		</final>
+	</chains>
+	<!-- -->
+	<format>
+		<verbosity> verbose </verbosity>
+		<progress-reports> verbose </progress-reports>
+		<echo> true </echo>
+		<plotting>
+			<profile> false </profile>
+			<posterior> false </posterior>
+		</plotting>
+		<seed> 1005 </seed>
+		<parameter-file> parmfile </parameter-file>
+		<results-file> outfile.quick </results-file>
+		<in-summary-file> insumfile.quick </in-summary-file>
+		<out-summary-file> outsumfile.quick </out-summary-file>
+	</format>
+	<!-- -->
+	<data>
+		<region name="region 1">
+			<population name="Population JYRM">
+				<individual name="germ_10   ">
+					<sample name="germ_10   ">
+						<datablock type="DNA">
+							TCATTTCCGGTGCAAACCGAATCTCAGCTTGATTAATCTGGATCACCCAGAAGAGCTCTTAAAACGCA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="germ_11   ">
+					<sample name="germ_11   ">
+						<datablock type="DNA">
+							AGATTTGGGGTGCAATGGGAATCTCTCGTTGATTATAGTGGATCAGGGAGAAGAGGACTTAAAACCGT
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="germ_20   ">
+					<sample name="germ_20   ">
+						<datablock type="DNA">
+							ACTTTTGCCGTGCATAGCCAATCTGACCATGATTTAACAGGATCTCGCTGAAGACCAGATAAAAGGGA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="germ_21   ">
+					<sample name="germ_21   ">
+						<datablock type="DNA">
+							ACAATTGCGCTGCTAAGCGTATCACACCTAGATAAAACTCGATGACGCACAAGTGCAGTAAAATCGGA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="germ_30   ">
+					<sample name="germ_30   ">
+						<datablock type="DNA">
+							ACATATGCGGAGGAAAGCGATTGTCACCTTCAATAAACTGCAACACGCAGTACAGCAGTTTATACGGA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="germ_31   ">
+					<sample name="germ_31   ">
+						<datablock type="DNA">
+							ACATTAGCGGTCCAAAGCGAAACTCACCTTGTTTAAACTGGTTCACGCAGATGAGCAGTTATAACGGA
+						</datablock>
+					</sample>
+				</individual>
+			</population>
+		</region>
+	</data>
+</lamarc>
+
diff --git a/doc/testfiles/v2.0.infiles/infile.regheat b/doc/testfiles/v2.0.infiles/infile.regheat
new file mode 100644
index 0000000..5051f43
--- /dev/null
+++ b/doc/testfiles/v2.0.infiles/infile.regheat
@@ -0,0 +1,146 @@
+<lamarc>
+<!-- Created from the LamarcDS DataStore -->
+	<forces>
+		<coalescence>
+			<start-values> 0.01 </start-values>
+			<method> USER </method>
+			<max-events> 1000 </max-events>
+		</coalescence>
+	</forces>
+	<!-- -->
+	<chains>
+		<replicates> 1 </replicates>
+		<heating>
+			<temperatures> 1 2 3 4 </temperatures>
+			<swap-interval> 1 </swap-interval>
+                        <adaptive> true </adaptive>
+		</heating>
+		<strategy>
+			<resimulating> 1 </resimulating>
+		</strategy>
+			<initial>
+				<number> 10 </number>
+				<samples> 500 </samples>
+				<discard> 1000 </discard>
+				<interval> 20 </interval>
+			</initial>
+		<final>
+			<number> 2 </number>
+			<samples> 10000 </samples>
+			<discard> 1000 </discard>
+			<interval> 20 </interval>
+		</final>
+	</chains>
+	<!-- -->
+	<format>
+		<verbosity> verbose </verbosity>
+		<progress-reports> verbose </progress-reports>
+		<echo> true </echo>
+		<plotting>
+			<profile> false </profile>
+			<posterior> false </posterior>
+		</plotting>
+		<seed> 1005 </seed>
+		<parameter-file> parmfile </parameter-file>
+		<results-file> outfile.regheat </results-file>
+		<in-summary-file> insumfile.regheat </in-summary-file>
+		<out-summary-file> outsumfile.regheat </out-summary-file>
+                <use-out-summary> true </use-out-summary>
+                <use-in-summary> false </use-in-summary>
+	</format>
+	<!-- -->
+	<data>
+		<region name="multi">
+			<population name="finland">
+				<individual name="germ_10   ">
+					<sample name="germ_10   ">
+						<datablock type="DNA">
+							TCATTTCCGGTGCAAACCGAATCTCAGCTTGATTAATCTGGATCACCCAGAAGAGCTCTTAAAACGCA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="germ_11   ">
+					<sample name="germ_11   ">
+						<datablock type="DNA">
+							AGATTTGGGGTGCAATGGGAATCTCTCGTTGATTATAGTGGATCAGGGAGAAGAGGACTTAAAACCGT
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="germ_20   ">
+					<sample name="germ_20   ">
+						<datablock type="DNA">
+							ACTTTTGCCGTGCATAGCCAATCTGACCATGATTTAACAGGATCTCGCTGAAGACCAGATAAAAGGGA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="germ_21   ">
+					<sample name="germ_21   ">
+						<datablock type="DNA">
+							ACAATTGCGCTGCTAAGCGTATCACACCTAGATAAAACTCGATGACGCACAAGTGCAGTAAAATCGGA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="germ_30   ">
+					<sample name="germ_30   ">
+						<datablock type="DNA">
+							ACATATGCGGAGGAAAGCGATTGTCACCTTCAATAAACTGCAACACGCAGTACAGCAGTTTATACGGA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="germ_31   ">
+					<sample name="germ_31   ">
+						<datablock type="DNA">
+							ACATTAGCGGTCCAAAGCGAAACTCACCTTGTTTAAACTGGTTCACGCAGATGAGCAGTTATAACGGA
+						</datablock>
+					</sample>
+				</individual>
+			</population>
+		</region>
+		<region name="multi2">
+			<population name="finland">
+				<individual name="germ_10   ">
+					<sample name="germ_10   ">
+						<datablock type="DNA">
+							TCATTTCCGGTGCAAACCGAATCTCAGCTTGATTAATCTGGATCACCCAGAAGAGCTCTTAAAACGCA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="germ_11   ">
+					<sample name="germ_11   ">
+						<datablock type="DNA">
+							AGATTTGGGGTGCAATGGGAATCTCTCGTTGATTATAGTGGATCAGGGAGAAGAGGACTTAAAACCGT
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="germ_20   ">
+					<sample name="germ_20   ">
+						<datablock type="DNA">
+							ACTTTTGCCGTGCATAGCCAATCTGACCATGATTTAACAGGATCTCGCTGAAGACCAGATAAAAGGGA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="germ_21   ">
+					<sample name="germ_21   ">
+						<datablock type="DNA">
+							ACAATTGCGCTGCTAAGCGTATCACACCTAGATAAAACTCGATGACGCACAAGTGCAGTAAAATCGGA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="germ_30   ">
+					<sample name="germ_30   ">
+						<datablock type="DNA">
+							ACATATGCGGAGGAAAGCGATTGTCACCTTCAATAAACTGCAACACGCAGTACAGCAGTTTATACGGA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="germ_31   ">
+					<sample name="germ_31   ">
+						<datablock type="DNA">
+							ACATTAGCGGTCCAAAGCGAAACTCACCTTGTTTAAACTGGTTCACGCAGATGAGCAGTTATAACGGA
+						</datablock>
+					</sample>
+				</individual>
+			</population>
+		</region>
+	</data>
+</lamarc>
diff --git a/doc/testfiles/ward_short.phy b/doc/testfiles/ward_short.phy
new file mode 100644
index 0000000..668ac80
--- /dev/null
+++ b/doc/testfiles/ward_short.phy
@@ -0,0 +1,73 @@
+12 360
+3AY        ttctttcatggggaagcagatttgggtaccacccaagtattgactcacccatcaacaaccgctatgtatt
+tcgtacattactgccagccaccatgaatattgtacggtaccataaatacttgaccacctgtagtacataa
+aaacccaatccacatcaaaaccccccccccatgcttacaagcaagtacaacaatcaaccttcaactatca
+cacatcaactgcaactccaaagccacccctcacccactaggatatcaacaaacctacccacccttaacag
+tacatagtacataaagccatttaccgtacatagcacattacagtcaaatcccttctcgtccccatggatg
+acccccctca
+5BZ        ttctttcatggggaagcagatttgggtaccacccaagtattgactcacccatcaacaaccgctatgtatt
+tcgtacattactgccagccaccatgaatattgtacggtaccataaatacttgaccacctgtagtacataa
+aaacccaatccacatcaaaaccccctccccatgcttacaagcaagtacaacaatcaaccttcaactatca
+cacatcaactgcaactccaaagccacccctcacccactaggatatcaacaaacctacccacccttaacag
+tacatagtacataaagccatttaccgtacatagcacattacagtcaaatcccttctcgtccccatggatg
+acccccctca
+7D         ttctttcatggggaagcagatttgggtaccacccaagtattgactcacccatcaacaaccgctatgtatt
+tcgtacattactgccagccaccatgaatattgtacggtaccataaatacttgaccacctgtagtacataa
+aaacccaatccacatcaaaaccccctccccatgcttacaagcaagtacagcaatcaaccttcaactatca
+cacatcaactgcaactccaaagccacccctcacccactaggatatcaacaaacctacccacccttaacag
+tacatagtacataaagccatttaccgtacatagcacattacagtcaaatcccttctcgcccccatggatg
+acccccctca
+13H         ttctttcatggggaagcagatttgggtaccacccaagtattgactcacccatcaacaaccgctatgtatt
+tcgtacattactgccagtcaccatgaatattgtacagtaccataaatacttgaccacctgtagtacataa
+aaacccaatccacatcaaaaccccctccccatgcttacaagcaagtacagcaatcaaccttcaactatca
+cacatcaactgcaactccaaagccacccctcacccactaggataccaacaaacctatccacccttaacag
+tacatagtacataaaaccatttaccgtacatagcacattacagtcaaatcccttctcgcccccatggatg
+acccccctca
+18K         ttctttcatggggaagcagatttgggtaccacccaagtattgactcacccatcaacaaccgctatgtatt
+tcgtacattactgccagtcaccatgaatattgtacggtaccataaatacttgaccacctgtagtacataa
+aaacccaatccacatcaaaaccccctccccatgcttacaagcaagtacagcaatcaaccttcaactatca
+cacatcaactgcaactccaaagccacccctcacccactaggataccaacaaacctatccacccttaacag
+tacatagtacataaaaccatttaccgtacatagcacattacagtcaaatcccttctcgcccccatggatg
+acccccctca
+20KY        ttctttcatggggaagcagatttgggtaccacccaagtattgactcacccatcaacaaccgctatgtatt
+tcgtacattactgccagtcaccatgaatattgtacggtaccataaatacttgaccacctgtagtacataa
+aaacccaatccacatcaaaaccccctccccatgcttacaagcaagtacagcaatcaaccttcaactatca
+cacatcaactgcaactccaaagccacccctcacccactaggataccaacaaacctatccacccttaacag
+tacatagtacataaaaccatttaccgtacatagcacattacagtcaaatcccttctcgcccccatggatg
+acccccctca
+24LZ        ttctttcatggggaagcagatttgggtaccacccaagtattgactcacccatcaacaaccgctatgtatt
+tcgtacattactgccagtcaccatgaatattgtacggtaccataaatacttgaccacctgtagtacataa
+aaacccaatccacatcaaaaccccctccccatgcttacaagcaagtacagcaatcaaccctcaactatca
+cacatcaactgcaactccaaagccacccctcacccactaggataccaacaaacctatccacccttaacag
+tacatagtacataaaaccatttaccgtacatagcacattacagtcaaatcccttctcgcccccatggatg
+acccccctca
+26LX        ttctttcatggggaagcagatttgggtaccacccaagtattgactcacccatcaacaaccgctatgtatt
+tcgtacattactgccagtcaccatgaatattgtacggtaccataaatacttgaccacctgtagtacataa
+aaacccaatccacatcaaaaccccctccccatgcttacaagcaagtacagcaatcaaccctcaactatca
+cacatcaactgcaactccaaagccacccctcacccactaggataccaacaaacctatccacccttaacag
+tacatagtacataaaaccatttaccgtacatagcacattacagtcaaatcccttctcgcccccatggatg
+acccccctca
+29LU        ttctttcatggggaagcagatttgggtaccacccaagtattgactcacccatcaacaaccgctatgtatt
+tcgtacattactgccagtcaccatgaatattgtacggtaccataaatacttgaccacctgtagtacataa
+aaacccaatccacatcaaaaccccctccccatgcttacaagcaagtacagcaatcaaccctcaactatca
+cacatcaactgcaactccaaagccacccctcacccactaggataccaacaaacctatccacccttaacag
+tacatagtacataaaaccatttaccgtacatagcacattacagtcaaatcccttctcgcccccatggatg
+acccccctca
+38R         ttctttcatggggaagcagatttgggtaccacccaagtattgactcacccatcaacaaccgctatgtatt
+tcgtacattactgccagccaccatgaatattgtacggtaccataaatacttgatcacctgtagtacataa
+aaacccaatccacatcaaaaccccctccccatgcttacaagcaagtacagcaatcaaccttcaactatca
+cacatcaactgcaactccaaagccacccctcacccactaggataccaacaaacctacccacccttaacag
+tacatagtacataaagccattcaccgtacatagcacattacagtcaaatcccttctcgcccccatggatg
+acccccctca
+49VY        ttctttcatggggaagcagatttgggtaccacccaagtattgactcacccatcaacaaccgctatgtact
+tcgtacattactgccagccaccatgaatattgtacggtaccataaatacttgaccacctgtagtacataa
+aaacccaatccacatcaaaaccccctccccatgcttacaagcaagtacagcaatcaaccttcaactatca
+cacatcaactgcaactccaaagccacccctcacccactaggataccaacaaacctacccacccttaacag
+tacatagtacataaagccattcaccgtacatagcacattacagtcaaatcccttctcgtccccatggatg
+acccccctca
+61Z         ttctttcatggggaagcagatttgggtaccacccaagtattgactcacccatcaacaaccgctatgtatt
+tcgtacattactgccagccaccatgaatattgtacggtaccataaatacttgaccacctgtagtacataa
+aaacccaatccacatcaaaaccccctccccatgcttacaagcaagtacagcaatcaaccctcaactatca
+cacatcaattgcaactccaaagccacccctcacccactaggataccaacaaacctacccaccctcaacag
+tacatagtacataaagccattcatcgtacatagcacatcacagtcaaatcccttctcgtccccatggatg
+acccccctca
diff --git a/doc/wx-notes.txt b/doc/wx-notes.txt
new file mode 100644
index 0000000..20608a4
--- /dev/null
+++ b/doc/wx-notes.txt
@@ -0,0 +1,207 @@
+########################################################################
+# HOW TO BUILD WX WIDGETS LIBRARIES
+########################################################################
+
+This file is supposed to live in
+
+    sanity at lamarc.gs.washington.edu:wx-libs/wx-notes.txt
+
+
+#########################################
+Note: if you want to build a wxWidgets library for use with the mingw
+cross compiler (for running executables on MS Windows boxen), see
+sanity at lamarc.gs.washington.edu:cross-tools/cross-notes.txt
+first to make sure you have the mingw cross compiler set up
+
+#########################################
+# Where to put it
+
+I installed stuff in the "sanity" user account on lamarc. You should be
+able to read and use stuff there just by following the path
+
+    /net/gs/vol1/home/sanity
+
+If you want to make changes, you'll need the IT folks to give you sudo
+access. Once that is done, you can log in this way:
+
+    sudo -H -s -u sanity
+
+
+#########################################
+# Getting the packages
+
+Packages are available from the wxWidgets web site:
+
+    http://www.wxwidgets.org/
+
+Follow links to the "downloads" section and choose an appropriate
+version of the wxALL source archive. Note that a wxWidgets "Stable
+Release" is usually more stable than we want -- for this build I
+chose the "Development Release" 2.9.4 because it was better for 
+building for the Mac.
+
+
+#########################################
+# Building Linux wxWidgets libraries
+
+Here are the steps I used to build and install the release and debug
+64-bit Linux wxWidgets libraries in the sanity account. I build debug
+first because wx-config assumes the most recently installed library
+is the default installation, so we want the 'release' one done last
+
+    # Step 0:
+    # These are some values I used, but which you might want or need
+    # to do differently
+    export WX_VERSION=2.9.4
+    export WX_INSTALL_LOC=$HOME/wx-libs/wxLinux
+
+    # Step 1:
+    # untar your new distribution -- version number will be different for you
+    # the 'j' option to tar tells it this is a .bz2 file; use 'z' if you have .gz
+    cd $HOME/wx-libs/builds
+    tar xfvj ./../tars/wxWidgets-${WX_VERSION}.tar.bz2
+
+    # Step 2:
+    # invoke modules to get a more modern default g++ compiler and
+    # library.
+    . /etc/profile.d/modules.sh
+    module load modules modules-init modules-gs
+    module load gmp mpfr/2.4.1 gcc
+
+    # Step 3:
+    # build and install the debug wxWidgets library
+    #
+    # we use '--disable-shared' on all installations because
+    # that allows us to munge the library into the executable
+    # we give to users. It makes it big but most users cannot
+    # competently do installations with shared libraries
+    #
+    # '--with-libtiff=builtin' works around some unpleasantness
+    # in must Linux distributions
+    #
+    # getting rid of '--disable-unicode' would require unpleasant
+    # but do-able work in our code base. It has been easy to 
+    # put off
+    cd $HOME/wx-libs/builds/wxWidgets-${WX_VERSION}
+    mkdir debug
+    cd debug
+    ../configure --disable-shared --with-libtiff=builtin \
+        --disable-unicode \
+        --prefix=$WX_INSTALL_LOC --enable-debug
+    make
+    make install
+
+    # Step 4:
+    # build and install the release wxWidgets library
+    cd $HOME/wx-libs/builds/wxWidgets-${WX_VERSION}
+    mkdir release
+    cd release
+    ../configure --disable-shared --with-libtiff=builtin \
+        --disable-unicode \
+        --prefix=$WX_INSTALL_LOC
+    make
+    make install
+
+#########################################
+# Building cross-compiling wxWidgets libraries
+
+Here are the steps I used to build and install the 32-bit and 64-bit
+releases of the wxWidgets libraries. I assume you have done the
+above already, so I didn't repeat steps to unpack archives, etc.,
+but I did repeat environment variable settings and modules commands
+since those are the trickiest things. Note that I installed these
+into a different directory than the Linux libraries. Theoretically,
+it should be possible to do it all in one place, but I have had
+trouble with that in the past.
+
+    # Step 0:
+    # These are some values I used, but which you might want or need
+    # to do differently
+    export WX_VERSION=2.9.4
+    export WX_INSTALL_LOC=$HOME/wx-libs/wxMSW
+
+    # Step 1:
+    # invoke modules to get a more modern default g++ compiler and
+    # library.
+    . /etc/profile.d/modules.sh
+    module load modules modules-init modules-gs
+    module load gmp mpfr/2.4.1 gcc
+
+    # Step 2:
+    # unfortunately, in order to make the modules
+    # command make everything
+    # 'just work' for the typical user,
+    # several environment variables
+    # are set which hose the
+    # cross-compiling process. these steps
+    # undo that
+    unset CC
+    unset CPP
+    unset CXX
+    unset CPPFLAGS
+    unset LDFLAGS
+
+    # Step 3a:
+    # get the desired 32-bit cross-compiler on our $PATH
+    export CROSS_HOME=/net/gs/vol1/home/sanity/cross-tools/cross_win32
+    export HOST_TYPE=i686-w64-mingw32
+    export PATH=$CROSS_HOME/bin:$CROSS_HOME/$HOST_TYPE/bin/:$PATH
+
+    # Step 3b:
+    # build and install the release wxWidgets library
+    cd $HOME/wx-libs/builds/wxWidgets-${WX_VERSION}
+    mkdir msw-32
+    cd msw-32
+    ../configure --disable-shared --with-libtiff=builtin \
+        --host=$HOST_TYPE \
+        --prefix=$WX_INSTALL_LOC
+    make
+    make install
+
+
+    # Step 4a:
+    # get the desired 64-bit cross-compiler on our $PATH
+    export CROSS_HOME=/net/gs/vol1/home/sanity/cross-tools/cross_win64
+    export HOST_TYPE=x86_64-w64-mingw32
+    export PATH=$CROSS_HOME/bin:$CROSS_HOME/$HOST_TYPE/bin/:$PATH
+
+    # Step 4b:
+    # build and install the release wxWidgets library
+    cd $HOME/wx-libs/builds/wxWidgets-${WX_VERSION}
+    mkdir msw-64
+    cd msw-64
+    ../configure --disable-shared --with-libtiff=builtin \
+        --host=$HOST_TYPE \
+        --prefix=$WX_INSTALL_LOC
+    make
+    make install
+
+
+#########################################
+# Building 64-bit Mac OS X libraries
+
+Here's what I did to build a wxWidgets library that works for 10.6 and 10.7
+
+
+    cd path/to/wxWidgetsDistribution
+    mkdir osx
+    cd osx
+    ../configure --disable-unicode --disable-shared \
+      --with-osx_cocoa \
+      --with-macosx-version-min=10.6 \
+      --with-macosx-sdk=/Developer/SDKs/MacOSX10.6.sdk \
+      --with-libjpeg=builtin --with-libpng=builtin \
+      --with-regex=builtin --without-libtiff \
+      --with-zlib=builtin --with-expat=builtin \
+      --prefix=$HOME/my-wx-lib
+    make
+    make install
+
+# note -- you will need to use the following flags when configuring lamarc
+
+    ../configure --disable-unicode --disable-shared \
+      --with-osx_cocoa \
+      --with-macosx-version-min=10.6 \
+      --with-macosx-sdk=/Developer/SDKs/MacOSX10.6.sdk \
+      --with-wx-config=$HOME/my-wx-lib/bin/wx-config
+
diff --git a/doc/wx-osx-notes.txt b/doc/wx-osx-notes.txt
new file mode 100644
index 0000000..cf479c2
--- /dev/null
+++ b/doc/wx-osx-notes.txt
@@ -0,0 +1,57 @@
+This file is supposed to live here:
+
+    malecot.gs.washington.edu:/Users/Shared/wxWidgets/wx-osx-notes.txt
+
+* fetched wxWidgets source from wxwidgets.org
+* untar'd under /Users/Shared/wxWidgets/Builds
+
+
+    export WX_VERSION=2.9.4
+    export WX_INSTALL=/Users/Shared/wxWidgets/wx-install
+    cd /Users/Shared/wxWidgets/Builds/wxWidgets-${WX_VERSION}
+
+    mkdir osx-debug
+    cd osx-debug
+    ../configure --disable-unicode --disable-shared \
+        --with-osx_cocoa \
+        --with-macosx-version-min=10.6 \
+        --with-macosx-sdk=/Developer/SDKs/MacOSX10.6.sdk \
+        --with-libjpeg=builtin --with-libpng=builtin \
+        --with-regex=builtin --without-libtiff \
+        --with-zlib=builtin --with-expat=builtin \
+        --enable-debug \
+        --prefix=${WX_INSTALL}
+
+    make
+    make install
+
+    cd /Users/Shared/wxWidgets/Builds/wxWidgets-${WX_VERSION}
+    mkdir osx-release
+    cd osx-release
+    ../configure --disable-unicode --disable-shared \
+        --with-osx_cocoa \
+        --with-macosx-version-min=10.6 \
+        --with-macosx-sdk=/Developer/SDKs/MacOSX10.6.sdk \
+        --with-libjpeg=builtin --with-libpng=builtin \
+        --with-regex=builtin --without-libtiff \
+        --with-zlib=builtin --with-expat=builtin \
+        --prefix=${WX_INSTALL}
+
+    make
+    make install
+
+# note -- you will need to use the following flags when configuring lamarc
+
+    export WX_INSTALL=/Users/Shared/wxWidgets/wx-install
+
+    cvs co -P lamarc
+    cd lamarc
+    mkdir release-osx
+    cd release-osx
+    ../configure --disable-unicode --disable-shared \
+        --with-osx_cocoa \
+        --with-macosx-version-min=10.6 \
+        --with-macosx-sdk=/Developer/SDKs/MacOSX10.6.sdk \
+        --with-wx-config=${WX_INSTALL}/bin/wx-config
+
+
diff --git a/doc/xmltags b/doc/xmltags
new file mode 100644
index 0000000..11eed06
--- /dev/null
+++ b/doc/xmltags
@@ -0,0 +1,53 @@
+//$Id: xmltags,v 1.2 2002/03/19 23:58:37 beerli Exp $
+lamarc
+data
+region
+population
+individual
+sample
+datablock
+phase   -- V2
+model
+base-freqs
+ttratio
+categories
+normalize
+num-categories
+rates
+probabilities
+autocorrelation
+forces
+coalescence
+migration
+recombination
+method
+start-values
+max-events
+profiles
+chains
+replicates
+heating
+strategy
+initial
+final
+temperatures
+swap-interval
+heating-strategy
+resimulating
+haplotyping
+number
+samples
+interval
+discard
+format
+verbosity
+progress-reports
+echo
+plotting
+seed
+parameter-file  -- V2
+results-file
+summary-file  -- V2
+posterior -- V2
+
+
diff --git a/resources/Info.plist.in b/resources/Info.plist.in
new file mode 100644
index 0000000..827c763
--- /dev/null
+++ b/resources/Info.plist.in
@@ -0,0 +1,36 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist SYSTEM "file://localhost/System/Library/DTDs/PropertyList.dtd">
+<plist version="0.9">
+<dict>
+	<key>CFBundleInfoDictionaryVersion</key>
+	<string>6.0</string>
+	<key>CFBundleIdentifier</key>
+	<string>edu.washington.gs.evolution.IDENTIFIER</string>
+	<key>CFBundleDevelopmentRegion</key>
+	<string>English</string>
+	<key>CFBundleExecutable</key>
+	<string>COMMAND</string>
+	<key>CFBundleIconFile</key>
+	<string>ICONNAME.icns</string>
+	<key>CFBundleName</key>
+	<string>EXECUTABLE</string>
+	<key>CFBundlePackageType</key>
+	<string>APPL</string>
+	<key>CFBundleSignature</key>
+	<string>????</string>
+	<key>CFBundleVersion</key>
+	<string>VERSION</string>
+	<key>CFBundleShortVersionString</key>
+	<string>VERSION</string>
+	<key>CFBundleGetInfoString</key>
+	<string>EXECUTABLE version VERSION, (c) 2007 Kuhner et.al.</string>
+	<key>CFBundleLongVersionString</key>
+	<string>VERSION, (c) 2007 Kuhner et.al.</string>
+	<key>NSHumanReadableCopyright</key>
+	<string>Copyright 2007 Kuhner et.al.</string>
+	<key>LSRequiresCarbon</key>
+        <true/>
+	<key>CSResourcesFileMapped</key>
+	<true/>
+</dict>
+</plist>
diff --git a/resources/command.in b/resources/command.in
new file mode 100755
index 0000000..e9153c4
--- /dev/null
+++ b/resources/command.in
@@ -0,0 +1,20 @@
+#!/bin/sh
+
+# Directory we run from: blah/EXECUTABLE.app/Contents/MacOS
+mydir=`dirname "$0"`
+
+# Extra quoting required because $dirname may have spaces and double-quotes get
+# eaten below.
+
+scriptcmd="cd \\\"${mydir}/../../..\\\" ; \\\"${mydir}/EXECUTABLE -x\\\" ; exit"
+osascript \
+-e 'tell application "Terminal"' \
+-e 'activate' \
+-e "do script \"$scriptcmd\"" \
+-e 'set background color of window 1 to {52224, 65535, 65535}' \
+-e 'set normal text color of window 1 to "black"' \
+-e 'set cursor color of window 1 to "black"' \
+-e 'set custom title of window 1 to "EXECUTABLE"' \
+-e 'end tell'
+
+
diff --git a/resources/empty16.bmp b/resources/empty16.bmp
new file mode 100644
index 0000000..7c4e1fe
Binary files /dev/null and b/resources/empty16.bmp differ
diff --git a/resources/empty16.xpm b/resources/empty16.xpm
new file mode 100644
index 0000000..914a17c
--- /dev/null
+++ b/resources/empty16.xpm
@@ -0,0 +1,20 @@
+/* XPM */
+static const char *empty16_xpm[]={
+"16 16 1 1",
+". c None",
+"................",
+"................",
+"................",
+"................",
+"................",
+"................",
+"................",
+"................",
+"................",
+"................",
+"................",
+"................",
+"................",
+"................",
+"................",
+"................"};
diff --git a/resources/excl16.bmp b/resources/excl16.bmp
new file mode 100644
index 0000000..f3047ff
Binary files /dev/null and b/resources/excl16.bmp differ
diff --git a/resources/excl16.xpm b/resources/excl16.xpm
new file mode 100644
index 0000000..f4a9461
--- /dev/null
+++ b/resources/excl16.xpm
@@ -0,0 +1,21 @@
+/* XPM */
+static const char *excl16_xpm[]={
+"16 16 2 1",
+". c None",
+"# c #ff0000",
+"......####......",
+"......####......",
+"......####......",
+"......####......",
+"......####......",
+"......####......",
+"......####......",
+"......####......",
+"......####......",
+"......####......",
+"................",
+"................",
+"......####......",
+"......####......",
+"......####......",
+"......####......"};
diff --git a/resources/giraffe32.bmp b/resources/giraffe32.bmp
new file mode 100644
index 0000000..45884d8
Binary files /dev/null and b/resources/giraffe32.bmp differ
diff --git a/resources/giraffe32.xpm b/resources/giraffe32.xpm
new file mode 100644
index 0000000..3d56147
--- /dev/null
+++ b/resources/giraffe32.xpm
@@ -0,0 +1,48 @@
+/* XPM */
+static const char *giraffe32_xpm[]={
+"32 32 13 1",
+"h c #000000",
+"g c #008183",
+"j c #00ff00",
+"e c #414000",
+"d c #808000",
+"# c #838100",
+"f c #c0c000",
+"a c #c5c200",
+". c #c5ffff",
+"k c #ffc0ff",
+"i c #ffffc0",
+"c c #ffffc5",
+"b c #ffffff",
+"................................",
+".......###.........###..........",
+"......#####.......#####.........",
+"......#####.......#####.........",
+"......#####.......#####.........",
+".......###.........###..........",
+".aaab...aab.......aac.....bb....",
+".a##aa..aaaacaaaccaac...aaaa....",
+"..a##baabaaabbacaaaac.aac#a.....",
+"...a#dbbabaaabbbbbaaaccc##a.....",
+"....addabcaaaacbebabaaa##a......",
+".....adacccccacaaaaaaabaa.......",
+"......ffc#ggcccaa#ggaaaa........",
+".......fbhbgcbcaahbgaab.........",
+".......fchh#cbbaahh#cab.........",
+".......ffccccbabaaaacbf.........",
+"........fccccbaabbbccf..........",
+".........fcaabaaabbabf..........",
+"..........cffibaaaabbc..........",
+"...........abbbaaabaac..........",
+"..........fbbbaaabaaab#.........",
+"jjjjjjjjjjfbabbbbbaaac#jjjjjjjjj",
+"jjjjjjjjjjfbaaaaabaaac#jjjjjjjjj",
+"jjjjjjjjjjfaahaahbacc##jjjjjjjjj",
+"jjjjjjjjjjfcaaaaaabc####jjjjjjjj",
+"jjjjjjjjjjjbfkkkkcccccc#jjjjjjjj",
+"jjjjjjjjjjj#ikkkk#####ccjjjjjjjj",
+"jjjjjjjjjjjcckkkk#####c##jjjjjjj",
+"jjjjjjjjjjj#cdkk#c####c##jjjjjjj",
+"jjjjjjjjjjj##ckk##ccccc###jjjjjj",
+"jjjjjjjjjjj###c###c####cc#jjjjjj",
+"jjjjjjjjjjj###c###c######cjjjjjj"};
diff --git a/resources/lam_conv.icns b/resources/lam_conv.icns
new file mode 100644
index 0000000..32aa46a
Binary files /dev/null and b/resources/lam_conv.icns differ
diff --git a/resources/lam_conv.ico b/resources/lam_conv.ico
new file mode 100644
index 0000000..439f4b7
Binary files /dev/null and b/resources/lam_conv.ico differ
diff --git a/resources/lam_conv_rc.rc b/resources/lam_conv_rc.rc
new file mode 100644
index 0000000..a6b3b66
--- /dev/null
+++ b/resources/lam_conv_rc.rc
@@ -0,0 +1,3 @@
+Resource ICON "lam_conv.ico"
+excl16 BITMAP "excl16.bmp"
+empty16 BITMAP "empty16.bmp"
diff --git a/resources/lamarc.command b/resources/lamarc.command
new file mode 100755
index 0000000..e087e8b
--- /dev/null
+++ b/resources/lamarc.command
@@ -0,0 +1,24 @@
+#!/bin/sh
+
+# Directory we run from: blah/lamarc.app/Contents/MacOS
+LAMARC_BOTTOMDIR=`dirname "$0"`
+LAMARC_TOPDIR="$LAMARC_BOTTOMDIR/../../.."
+LAMARC_OUTPUT="$(/usr/bin/osascript -e "tell application \"System Events\" to activate" -e "tell application \"System Events\" to set thefile to choose folder with prompt \"Select your LAMARC output directory\"" -e "do shell script (\"echo \"&(quoted form of POSIX path of thefile as Unicode text)&\"\")")"
+LAMARC_INFILE="$(/usr/bin/osascript -e "tell application \"System Events\" to activate" -e "tell application \"System Events\" to set thefile to choose file with prompt \"Select your LAMARC infile\"" -e "do shell script (\"echo \"&(quoted form of POSIX path of thefile as Unicode text)&\"\")")"
+
+
+# Extra quoting required because $dirname may have spaces and double-quotes get
+# eaten below.
+
+scriptcmd="cd \\\"${mydir}/../../..\\\" ; \\\"${mydir}/lamarc -x\\\" ; exit"
+osascript <<EOF
+tell application "Terminal"
+	activate
+	do script "cd $LAMARC_OUTPUT; $LAMARC_BOTTOMDIR/lamarc -x $LAMARC_INFILE; exit"
+	set background color of window 1 to {52224, 65535, 65535}
+	set normal text color of window 1 to "black"
+	set cursor color of window 1 to "black"
+	set custom title of window 1 to "lamarc"
+end tell
+EOF
+
diff --git a/resources/lamarc.icns b/resources/lamarc.icns
new file mode 100644
index 0000000..1b0caf3
Binary files /dev/null and b/resources/lamarc.icns differ
diff --git a/resources/lamarc.ico b/resources/lamarc.ico
new file mode 100644
index 0000000..27d7f84
Binary files /dev/null and b/resources/lamarc.ico differ
diff --git a/resources/lamarc.xsd b/resources/lamarc.xsd
new file mode 100644
index 0000000..ab8e084
--- /dev/null
+++ b/resources/lamarc.xsd
@@ -0,0 +1,347 @@
+<xsd:schema>
+
+<xsd:element name="lamarc">
+    <xsd:complexType>
+        <xsd:all>
+            <xsd:element name="genomeStructure" type="genomeStructureType" minOccurs="1" maxOccurs="1"/>
+            <xsd:element name="sampleSpace"     type="sampleSpaceType"     minOccurs="1" maxOccurs="1"/>
+            <xsd:element name="analysis"        type="analysisType"        minOccurs="0" maxOccurs="1"/>
+            <xsd:element name="searchStrategy"  type="searchStrategyType"  minOccurs="0" maxOccurs="1"/>
+            <xsd:element name="reporting"       type="reportingType"       minOccurs="0" maxOccurs="1"/>
+            <xsd:element name="recovery"        type="recoveryType"        minOccurs="0" maxOccurs="1"/>
+        </xsd:all>
+    </xsd:complexType>
+</xsd:element>
+
+<xsd:complexType name="genomeStructureType">
+    <xsd:all>
+        <xsd:element name="region" minOccurs="1"/>
+            <xsd:complexType>
+                <xsd:all>
+                    <xsd:element name="effectivePopulationSize" type="xsd:decimal"          minOccurs="1" maxOccurs="1"/>
+                    <xsd:element name="numCopies"               type="xsd:positiveInteger"  minOccurs="1" maxOccurs="1"/>
+                    <xsd:element name="locus"                   type="locusType"            minOccurs="1"/>
+                </xsd:all>
+                <xsd:attribute name="name" type="xsd:string"/>
+            </xsd:complexType>
+        </xsd:element>
+    </xsd:all>
+</xsd:complexType>
+
+<xsd:complexType name="locusType">
+    <xsd:complexType>
+        <xsd:all>
+            <xsd:element name="regionOffset"        type="xsd:integer"          minOccurs="0" maxOccurs="1"/>
+            <xsd:element name="length"              type="xsd:positiveInteger"  minOccurs="1" maxOccurs="1"/>
+            <xsd:element name="numMarkers"          type="xsd:positiveInteger"  minOccurs="1" maxOccurs="1"/>
+            <xsd:element name="markerLocations"     type="MARKER_LIST_TYPE"     minOccurs="0" maxOccurs="1"/>
+            <xsd:element name="relativeMutation"    type="xsd:decimal"          minOccurs="0" maxOccurs="1"/>
+        </xsd:all>
+        <xsd:attribute name="name" type="xsd:string"/>
+    </xsd:complexType>
+</xsd:complexType>
+
+<!-- a list of integers in ascending order -->
+<xsd:simpleType name='MARKER_LIST_TYPE'>
+    <xsd:restriction base='xsd:string'>
+        <xsd:pattern value='([0-9]\s)*'/>
+    </xsd:restriction>
+</xsd:simpleType>
+
+<xsd:complexType name="sampleSpaceType">
+    <xsd:all>
+        <xsd:complexType name="population" minOccurs="1" />
+            <xsd:all>
+                <xsd:complexType name="individual" minOccurs="1" />
+                    <xsd:all>
+                        <!-- number of hapLikeSample occurences no more than value of corresponding region's "numCopies" tag -->
+                        <xsd:complexType name="hapLikeSample" minOccurs="1" />
+                            <xsd:all>
+                                <!-- each occurence of locusSample must have a locusName attribute which     -->
+                                <!-- matches the name attribute within a locusType element                   -->
+                                <xsd:complexType name="locusSample" minOccurs="1" />
+                                    <xsd:all>
+                                        <xsd:element name="data"          type="xsd:string"         minOccurs="1" maxOccurs="1"/>
+                                        <!-- probably need to replace phaseKnown/phaseUnknown with 2 versions -->
+                                        <!--    one with explicit locations and one with                      -->
+                                        <!--    an attribute that makes all known or all unknown              -->
+                                        <xsd:element name="phaseKnown"    type="PHASE_STRING_TYPE"  minOccurs="0" maxOccurs="1"/>
+                                        <xsd:element name="phaseUnknown"  type="PHASE_STRING_TYPE"  minOccurs="0" maxOccurs="1"/>
+                                        <xsd:element name="markerWeights" type="MARKER_WEIGHT_TYPE" minOccurs="0" maxOccurs="1"/>
+                                    </xsd:all>
+                                    <xsd:attribute name="locusName" type="xsd:string"/>
+                                </xsd:complexType>
+                            </xsd:all>
+                        </xsd:complexType>
+                    </xsd:all>
+                    <xsd:attribute name="name" type="xsd:string"/>
+                </xsd:complexType>
+            </xsd:all>
+            <xsd:attribute name="name" type="xsd:string"/>
+        </xsd:complexType>
+    </xsd:all>
+</xsd:complexType>
+
+<!-- a sequence of zeroes and ones with arbitrary spaces -->
+<xsd:simpleType name='PHASE_STRING_TYPE'>
+    <xsd:restriction base='xsd:string'>
+        <xsd:pattern value='(0|1|\s)*'/>
+    </xsd:restriction>
+</xsd:simpleType>
+
+<!-- a sequence of non-negative integers -->
+<xsd:simpleType name='MARKER_WEIGHT_TYPE'>
+    <xsd:restriction base='xsd:string'>
+        <xsd:pattern value='\s*[0-9]+(.[0-9]+)?'/>
+    </xsd:restriction>
+</xsd:simpleType>
+
+<xsd:complexType name="analysisType">
+    <xsd:all>
+        <xsd:element name="dataModels"          type="dataModelsType"           minOccurs="1"/>
+        <xsd:element name="evolutionaryForces"  type="evolutionaryForcesType"   />
+    </xsd:all>
+    <xsd:attribute name="type" type="ANALYSIS_TYPE"/>
+</xsd:complexType>
+
+<xsd:simpleType name='ANALYSIS_TYPE'>
+    <xsd:restriction base='xsd:string'>
+        <xsd:pattern value='(bayesian|likelihood)'\>
+    </xsd:restriction>
+</xsd:simpleType>
+
+<xsd:complexType name="dataModelsType">
+    <xsd:all>
+        <xsd:complexType name="dataModel" minOccurs="1"/>
+            <xsd:all>
+                <xsd:element name='alpha'       type='xsd:decimal'  minOccurs='0' maxOccurs='1'/>
+                <xsd:element name='base-freqs'  type='xsd:string'   minOccurs='0' maxOccurs='1'/>
+                <xsd:element name='categories'                      minOccurs='0' maxOccurs='1'>
+                    <xsd:complexType>
+                        <xsd:all>
+                            <xsd:element name='num-categories'  type='xsd:positiveInteger'  minOccurs='0' maxOccurs='1'/>
+                            <xsd:element name='rates'           type='xsd:string'           minOccurs='0' maxOccurs='1'/>
+                            <xsd:element name='probabilities'   type='xsd:string'           minOccurs='0' maxOccurs='1'/>
+                            <xsd:element name='autocorrelation' type='xsd:decimal'          minOccurs='0' maxOccurs='1'/>
+                        </xsd:all>
+                    </xsd:complexType>
+                </xsd:element>
+                <xsd:element name='gtr-rates'   type='xsd:string'   minOccurs='0' maxOccurs='1'/>
+                <xsd:element name='isOptimized' type='YES_NO_TYPE'  minOccurs='0' maxOccurs='1'/>
+                <xsd:element name='normalize'               type='xsd:string' minOccurs='0' maxOccurs='1'/>
+                <xsd:element name='ttratio'     type='xsd:decimal'  minOccurs='0' maxOccurs='1'/>
+                <xsd:element name='dataTypeAssociation'     type='xsd:string' minOccurs='0' />
+                <xsd:element name='locusModelAssociation'   type='xsd:string' minOccurs='0' />
+                <xsd:element name='regionModelAssociation'  type='xsd:string' minOccurs='0' />
+            </xsd:all>
+            <xsd:attribute name="name" type="xsd:string"/>
+            <xsd:attribute name="modelType" type="MODEL_TYPE"/>
+        </xsd:complexType>
+    </xsd:all>
+</xsd:complexType>
+
+
+<xsd:complexType name="evolutionaryForcesType">
+    <xsd:all>
+        <xsd:complexType name="force">
+            <xsd:all>
+                <xsd:element name="maxEvents"    type="xsd:positiveInteger"  minOccurs='0' maxOccurs='1'/>
+                <xsd:element name="profileType"  type="PROFILE_TYPE"         minOccurs='0' maxOccurs='1'/>
+                <xsd:complexType name="analysisDefaults">
+                    <xsd:all>
+                        <xsd:element name="defaultStartValue"   type="startValueType"       minOccurs='0' maxOccurs='1'/>
+                        <xsd:element name="defaultPrior"        type="bayesianPriorType"    minOccurs='0' maxOccurs='1'/>
+                    </xsd:all>
+                </xsd:complexType>
+                <xsd:element name='analyzableQuantities'  type='analyzableQuantitiesType'  minOccurs='0' maxOccurs='1'/>
+            </xsd:all>
+            <xsd:attribute name="type" type="FORCE_TYPE"/>
+        </xsd:complexType>
+    </xsd:all>
+</xsd:complexType>
+
+<xsd:simpleType name='FORCE_TYPE'>
+    <xsd:restriction base='xsd:string'>
+        <xsd:pattern value='(coalescence|growth|migration|recombination|trait)'/>
+    </xsd:restriction>
+</xsd:simpleType>
+
+<xsd:simpleType name='PROFILE_TYPE'>
+    <xsd:restriction base='xsd:string'>
+        <xsd:pattern value='(fixed|percentage)'/>
+    </xsd:restriction>
+</xsd:simpleType>
+
+<xsd:complexType name="analyzableQuantitiesType">
+    <xsd:all>
+        <xsd:complexType name="analyzableQuantity" minOccurs=1>
+            <xsd:all>
+                <xsd:element name="parameter"     type="parameterType"      minOccurs=1/>
+                <xsd:element name="startValue"    type="startValueType"     minOccurs=0 maxOccurs=1/>
+                <xsd:element name="bayesianPrior" type="bayesianPriorType"  minOccurs=0 maxOccurs=1/>
+                <xsd:element name="trueValue"     type="xsd:decimal"        minOccurs=0 maxOccurs=1/>
+            </xsd:all>
+            <xsd:attribute name="restriction" type="RESTRICTION_TYPE"/>
+        </xsd:complexType>
+    </xsd:all>
+</xsd:complexType>
+
+<xsd:complexType name="parameterType">
+    <!-- lots more restrictions occur based on the type of the parameter -->
+    <!--    recombination type                                           -->
+    <!--        no sub-elements                                          -->
+    <!--    trait (disease) type                                         -->
+    <!--        single traitTransition type                              -->
+    <!--    theta and growth types                                       -->
+    <!--        single populationMembership type                         -->
+    <!--        multiple traitMembership type                            -->
+    <!--    migration type                                               -->
+    <!--        single populationTransition type                         -->
+    <xsd:all>
+        <xsd:element name="populationMembership" type="populationMembershipType" minOccurs=0 maxOccurs=1/>
+        <xsd:element name="populationTransition" type="populationTransitionType" minOccurs=0 maxOccurs=1/>
+        <xsd:element name="traitMembership"      type="traitMembershipType"      minOccurs=0 />
+        <xsd:element name="traitTransition"      type="traitTransitionType"      minOccurs=0 />
+    </xsd:all>
+    <xsd:attribute name="type" type="PARAMETER_TYPE"/>
+</xsd:complexType>
+
+<xsd:complexType name="populationMembershipType">
+    <xsd:all>
+        <xsd:element name="populationName" type="xsd:string"/>
+    </xsd:all>
+</xsd:complexType>
+
+<xsd:complexType name="populationTransitionType">
+    <xsd:all>
+        <xsd:element name="fromPopulation" type="xsd:string"/>
+        <xsd:element name="toPopulation" type="xsd:string"/>
+    </xsd:all>
+</xsd:complexType>
+
+<xsd:complexType name="traitMembershipType">
+    <xsd:all>
+        <xsd:element name="traitName" type="xsd:string"/>
+        <xsd:element name="traitValue" type="xsd:string"/>
+    </xsd:all>
+</xsd:complexType>
+
+<xsd:complexType name="traitTransitionType">
+    <xsd:all>
+        <xsd:element name="traitName" type="xsd:string"/>
+        <xsd:element name="fromTraitValue" type="xsd:string"/>
+        <xsd:element name="toTraitValue" type="xsd:string"/>
+    </xsd:all>
+</xsd:complexType>
+
+<xsd:simpleType name='PARAMETER_TYPE'>
+    <xsd:restriction base='xsd:string'>
+        <xsd:pattern value='(recombination|trait|theta|migration|growth)'/>
+    </xsd:restriction>
+</xsd:simpleType>
+
+<xsd:complexType name="startValueType" type="xsd:decimal">
+    <xsd:attribute name="type" type="START_VALUE_TYPE"/>
+</xsd:complexType>
+
+<xsd:simpleType name='START_VALUE_TYPE'>
+    <xsd:restriction base='xsd:string'>
+        <xsd:pattern value='(USER|PROGRAM_DEFAULT|WATTERSON|FST)'/>
+    </xsd:restriction>
+</xsd:simpleType>
+
+<xsd:complexType name="bayesianPriorType">
+    <xsd:all>
+        <xsd:element name="lower" type="xsd:decimal"/>
+        <xsd:element name="upper" type="xsd:decimal"/>
+    </xsd:all>
+    <xsd:attribute name="type" type="PRIOR_TYPE"/>
+</xsd:complexType>
+
+<xsd:simpleType name='PRIOR_TYPE'>
+    <xsd:restriction base='xsd:string'>
+        <xsd:pattern value='(linear|logarithmic)'/>
+    </xsd:restriction>
+</xsd:simpleType>
+
+<xsd:simpleType name='RESTRICTION_TYPE'>
+    <xsd:restriction base='xsd:string'>
+        <xsd:pattern value='(varies|constant)'/>
+    </xsd:restriction>
+</xsd:simpleType>
+
+<xsd:complexType name="searchStrategyType">
+    <xsd:all>
+        <xsd:element name="seed" type="xsd:nonNegativeInteger"/>
+        <xsd:element name="replicates" type="xsd:nonNegativeInteger"/>
+        <xsd:complexType name="chains">
+            <xsd:element name="initialChainSequence" type="chainSequenceType"/>
+            <xsd:element name="finalChainSequence" type="chainSequenceType"/>
+        </xsd:complexType>
+        <xsd:complexType name="markovChainSteps" type="markovChainStepsType"/>
+    </xsd:all>
+</xsd:complexType>
+
+<xsd:complexType name="chainSequenceType">
+    <xsd:all>
+        <xsd:element name="number" type="xsd:nonNegativeInteger"/>
+        <xsd:element name="samples" type="xsd:positiveInteger"/>
+        <xsd:element name="discard" type="xsd:nonNegativeInteger"/>
+        <xsd:element name="interval" type="xsd:positiveInteger"/>
+    </xsd:all>
+</xsd:complexType>
+
+<xsd:complexType name="markovChainSteps">
+    <xsd:all>
+        <xsd:complexType name="rearranger" minOccurs="1">
+            <xsd:attribute name="type" type="REARRANGER_TYPE"/>
+            <xsd:attribute name="relativeFrequency" type=xsd:decimal/>
+        </xsd:complexType>
+    </xsd:all>
+</xsd:complexType>
+
+<xsd:simpleType name='REARRANGER_TYPE'>
+    <xsd:restriction base='xsd:string'>
+        <xsd:pattern value='\s*(resimulationg|tree-size|haplotyping|bayesian)\s*'/>
+    </xsd:restriction>
+</xsd:simpleType>
+
+<xsd:complexType name="reportingType">
+    <xsd:all>
+        <xsd:complexType name="results-file" type='xsd:string' minOccurs="0" maxOccurs="1">
+            <xsd:attribute name="level" type="VERBOSITY_TYPE"/>
+        </xsd:complexType>
+        <xsd:complexType name="progress-reports" minOccurs="0" maxOccurs="1">
+            <xsd:attribute name="level" type="VERBOSITY_TYPE"/>
+        </xsd:complexType>
+        <xsd:element name="out-xml-file" type='xsd:string' minOccurs="0" maxOccurs="1"/>
+        <xsd:complexType name="curvefile-prefix" type='xsd:string' minOccurs="0" maxOccurs="1">
+            <xsd:attribute name="generate" type="YES_NO_TYPE"/>
+        </xsd:complexType>
+    </xsd:all>
+</xsd:complexType>
+
+<xsd:complexType name="recoveryType">
+    <xsd:all>
+        <xsd:complexType name="inSummaryFile" type='xsd:string' minOccurs="0" maxOccurs="1">
+            <xsd:attribute name="read" type="YES_NO_TYPE"/>
+        </xsd:complexType>
+        <xsd:complexType name="outSummaryFile" type='xsd:string' minOccurs="0" maxOccurs="1">
+            <xsd:attribute name="write" type="YES_NO_TYPE"/>
+        </xsd:complexType>
+    </xsd:all>
+</xsd:complexType>
+
+<xsd:simpleType name='YES_NO_TYPE'>
+    <xsd:restriction base='xsd:string'>
+        <xsd:pattern value='(yes|no)'\>
+    </xsd:restriction>
+</xsd:simpleType>
+
+<xsd:simpleType name='VERBOSITY_TYPE'>
+    <xsd:restriction base='xsd:string'>
+        <xsd:pattern value='\s*(verbose|normal|concise|none)\s*'/>
+    </xsd:restriction>
+</xsd:simpleType>
+
+</xsd:schema>
diff --git a/resources/lamarc_rc.rc b/resources/lamarc_rc.rc
new file mode 100644
index 0000000..7f96e3c
--- /dev/null
+++ b/resources/lamarc_rc.rc
@@ -0,0 +1 @@
+Resource ICON "lamarc.ico"
diff --git a/src/BUGS b/src/BUGS
new file mode 100644
index 0000000..0750b61
--- /dev/null
+++ b/src/BUGS
@@ -0,0 +1,453 @@
+//$Id: BUGS,v 1.84 2005/03/11 20:59:55 mkkuhner Exp $
+
+//*********************************************************************
+Bugs needing resolution
+//*********************************************************************
+
+Deferred--multiple loci
+to Bugzilla 2005/03/10
+2003/04/28  Multiple loci do not work in the ReportPage; they are
+            still hardcoded to locus 0.  (Among other problems--2003/07/15)
+
+Deferred
+2003/07/11  Items commented out or cryptic in release 2.0:  multiple
+            loci, disease, tree summaries, tree reading and writing.
+
+to Bugzilla 2005/03/10
+PartialFix--Jon, program now throws a Data Error when IsReal/IsInteger encounter
+            an empty string to parse.
+2003/12/01  since stringx.cpp::FindToken() requires a digit after a decimal
+            point in reading a number; numbers of the form "2." cause the
+            program to hang as ProduceDoubleOrBarf() fails due to problems with
+            IsReal not handling an empty input string well...
+
+to Bugzilla 2005/03/10
+Deferred--multiple loci
+2004/02/12  map_position tag missing from XML documentation, and the
+            section where it goes needs to be rewritten for multiple
+            locus compliance.
+
+to Bugzilla 2005/03/10
+2004/03/09  In the converter, if you have 2+ datablocks and assign them
+            all to the same region and population, the code just loops
+            and asks for reassignment--a warning message would be good,
+            at the very least. JAY
+
+to Bugzilla 2005/03/10
+2004/06/11  The signal handling code (in lamarc.cpp and chainmanager.cpp)
+            needs to be checked on other OSes--the signal 'SIGXFSZ' is 
+            supposedly 'non-standard'.  Possibly a check in 'configure'?
+            --LS 
+
+            (This is definitely broken in Windows -- ewalkup 2004/06/14)
+
+2004/06/14  We need to do the following things to make sure our releases
+            are higher quality:
+                * document exact steps to creating a release, including
+                  how to get clean CVS checkout
+                * have a standard test case suite to run through release
+                  candidates
+                * institute a beta testing plan
+            ewalkup
+
+to Bugzilla 2005/03/10
+Deferred
+2004/04/22  For recombination, and possibly migration and disease, it
+            would be good to have correct handling of parameter=0 for
+            confidence limit testing, and we don't.  Mary
+
+to Bugzilla 2005/03/10
+Deferred
+2004/04/28  If DLCalculator::Calculate happens to be called on a tree which
+            doesn't need recalculation, it seg-faults due to failing to 
+            set the DLCell variables.  This doesn't happen in normal
+            execution but is still ugly.  Mary
+
+to Bugzilla 2005/03/10
+2004/08/06  outfile gives you the name of your infile relative to
+            the directory you ran lamarc in, not the directory the
+            outfile is written in. ewalkup
+
+to Bugzilla 2005/03/10
+Deferred
+2004/12/17  WARNING:  The way that PlForces now handles starttime will
+            fail badly if (a) we implement sequential sampling and
+            (b) one genetic region lacks samples from the topmost
+            time point.
+
+//*********************************************************************
+Bugs fixed for release 2.0
+//*********************************************************************
+
+Fixed--Lucian 2005/03/08
+2002/06/26  The XML reader reads a global data model and uses it to
+            set unspecified regional data models, but then abandons
+            it.  This both leaks a little memory and means that the menu
+            cannot know what this global model was.
+
+PartialFix--ewalkup
+2003/05/20  global datamodel menus are crocked when multiple regions
+            have differing datamodels!
+
+PartialFix--Lucian and others (tree reading and writing)
+2003/07/11  Items commented out or cryptic in release 1.2:  multiple
+            loci, 2-allele model, disease, tree summaries, tree reading
+            and writing.
+
+Accepted as-is Mary 2005/03/08
+2004/02/06  Brownian DataModel always doesn't normalize [ShouldNormalize()
+            hard set to return false].  Is this really correct behaviour?
+
+--Fixed LS 3/4/2005
+2004/05/21  Neither menu nor XML protects against asking for a Bayesian
+            analysis with no BayesArranger, which will crash.  Mary
+
+
+Eliminated--Not in current code base
+2004/01/15  Windows .NET compile on Wang Yi's machine (at least) complains
+            about protected function pointers in Menu class.  We "fixed"
+            it temporarily by making them public, but does this still
+            happen in 1.2, and if so why?
+
+//*********************************************************************
+Bugs fixed for release 1.2.3
+//*********************************************************************
+
+Declared Resolved--Lucian et al; 9/13/2004 (The 2 diseases problem can
+be solved by using a clever mix of constraints and setting things to zero)
+2002/11/22  The code is riddled through with the assumption that each
+            force object is unique, so for example, there are never
+	    2 disease force objects in existance.  One of the main
+	    enforcers of this assumption is
+	    ForceSummary::GetForceByTag().
+
+//*********************************************************************
+Bugs fixed for release 1.2
+//*********************************************************************
+
+DECLARED UNFIXABLE--Mary 2003/07/11
+2002/01/18  Converter -- phylip file conversion -- if longer sequences
+ are present than claimed in header, the reader will SILENTLY mangle
+ sequences and names.
+
+Fixed--Jon 2003/07/11
+2002/06/20  Brownian and Stepwise output reports are butt-ugly when no
+            categories are in use.
+
+FIXED--Jon 2004/02/11
+2002/07/19  The program dies with just "abort" showing to the user if
+            the forces specify multiple populations and the data has
+            only 1 population in it.  This may only apply to forces
+            specified in the xml input file.
+            Partially fixed 09/23/02 -- now taps in to the xml-reading
+            message error handling but doesn't give a good error
+            message -- see force/force.cpp:Force::SetMethods
+
+Jon note: Partial fix--due to xml schema now we get an "unknown" element
+   error  2003/08/03
+2002/06/21  Xml complains about mismatched tags when in fact the problem
+            is tags at the wrong scoping level (<start-value> tags
+            at the same level as a forcename tag).
+
+FIXED--Mary 2003/07/14
+2002/07/22  The program seems to run with duplicate site locations,
+            i.e. same site number assigned to more than one base.
+
+FIXED--Mary 2003/05/30, 2003/07/14
+2002/07/23  Adaptive heating is present in the program but completely
+            undocumented.  (Now it's documented, but I wonder
+            whether changing temperature with every swap is wise.
+            Have sent a query to Peter.)
+
+FIXED--Jon 2003/07/14
+2002/07/23  The xmlreader throws if it encouters an empty force.
+            Though even an invalid xml entry will suffice to avoid
+	    the throw.
+
+KLUDGED--Jon 2002/10/21 -- defaults now set for COAL, MIG, DISEASE
+2002/08/12  a multi-population infile with unspecified migration force
+            (but specified theta and recombination) fails to set any
+            migration starting values into ForceSummary::startParameters.
+            This lead to an infinite loop in starting tree generation!
+            addenda: 2002/10/21--it looks like if some but not all
+            forces are specified, no default force is created for the
+            unspecified in the xml file forces.
+
+Fixed--Mary 2002/08/21
+2002/08/20  I strongly suspect that if haplotyping is turned on in the
+            XML and not the menu, and auto-pairing of haplotypes is
+            supposed to happen, it won't happen correctly due to lack
+            of a necessary call to DataPack::EvaluatePairing.
+
+FIXED--Jon 2002/08/27
+2002/08/27  Converter, when passed an interleaved phylip file spits out
+            datablocks with only the first line of interleaved sequence
+            on them.
+
+FIXED--Jon 2002/09/05
+2002/09/03  Microsats, loci position vector fails to get to
+            Region::CreateTree(), other parts of Region::loci may
+            also be wrong.
+
+FIXED--Jon 2002/09/04
+2002/09/03  MULTI-TEMPERATURE NOW FAILS, tip nodes parents are dead
+            nodes in highest temperature tree!
+
+FIXED--Mary 2003/07/14
+2002/09/27  the html docs for the xmlinput does not address how to set
+            an F84 model to do base frequencies calculated from the data.
+
+Fixed--Chia-Chi 11/14/03
+2002/10/28  Converter--if multiple sequences are present in Phylip
+            format with the same phylip-truncated name, the converter
+	    will silently drop them from the analysis!
+
+FIXED--Jon 2002/10/10
+2002/10/10  the multicategory SNP case crashes in datalikelihood
+            calculation due to uninitialized member "catcells" if
+            there are no markers in the first subtree.
+
+Fixed--Jon--2003/02/21
+2003/02/06  Something expects there will be a profile
+
+FIXED--various--2003/07/11
+2003/02/21  Need to redo how static const strings are defined!  Example
+            in ui_interface/ui_strings.h and .cpp.
+
+FIXED--Mary--or rather it was a hallucination--2003/04/21
+2003/04/21  Handling of newly active sites in rectree.Migrate and
+            rectree.DiseaseMutation suspected of being erroneous.
+
+Fixed--Jon--2003/05/02--added new make target nomenulamarc.
+2003/05/02  some users want a no-menu option for running in background.
+
+Fixed--ewalkup 10/14/03
+2003/05/20  in the menu, datamodel menus, the ncategories, probcats, and
+            catrates are all set independently.  They should check for
+            mutual self-consistency at some point!  Currently, they
+            lead to a failed validity check if inconsistent.
+
+FIXED--Mary and Jon 2003/07/15
+2003/07/15  Loci need to be constrained to be non-overlapping, probably in
+            Region::IsValid.  
+
+Fixed--Mary 2003/07/30
+2003/07/15  Due to use of 'new' without 'delete' the menu leaks like a
+            sieve.  Use boost::shared_ptr here.  [NB I used auto_ptr
+            instead.]
+
+FIXED--Wang Yi 2004/04/05
+2003/07/15  The model claiming to be K-Allele is really 2-Allele,
+            but changing its name is not very good because the data
+            reading part really is K-Allele.  Wang Yi is working on a
+            genuine K-Allele model.
+
+Checked and found to be correct--Jon 2004/02/12
+2003/07/15  Handling of SNP positions in multi-locus code is probably wrong;
+            we believe that they are assigned site numbers (in xml reading)
+            relative to the offset of their locus but not its map position,
+            leading to locus-specific positions that are then misused as
+            regional positions in data likelihood code. 
+
+FIXED--Jon 2003/07/24
+2003/07/22  Cannot set Adaptive/Static temperature regime via xml!
+
+Apparently not true--Mary 10/15/03
+2003/09/22  multi-cats set in menu are not initialized correctly!
+            see dlmodel.cpp::Initialize()/Finalize()
+
+FIXED--Jon 2004/02/05
+2003/10/09  For DNA data, if number of markers (in XML) is inconsistent with
+            sequence length, error checking doesn't catch this and the
+            program eventually crashes in a cryptic way.  (It doesn't work
+            to just put in a check, because the number of sites
+            is set to FLAGLONG unless there is explicit spacing data.)
+
+Fixed--Eric 11/14/03
+2003/11/3   The "threshhold" of maximum distance between microsatellite
+            alleles should be set adaptively, not hardwired.  This is
+            set in the constructor of StepwiseModel.
+
+`Fixed--Lucian 2003/11/25  (no manpage; 'converter' now 'lam_conv')
+2003/11/20  'make install' fails to install the nonexistent manpage (oddly 
+	    enough), and does not copy over the 'converter' program (which 
+	    probably needs a new name).
+
+Fixed--Mary 2003/12/01
+2003/07/15  The inability of most arrangers to do DeNovoTree is ugly.
+
+FIXED--Mary 2003/12/09
+2003/12/09  Both adaptive and non-adaptive heating success reports have
+            always, it turns out, reported complete nonsense.
+
+Fixed--we just deleted the file entirely.
+2003/12/22  The file "conversion/LamarcParser.cpp" is not ever actually 
+            compiled or incorporated into the converter.  Is was created 
+            by Peter--did it ever work?  Should we work on including it?  
+            -LS
+
+FIXED--Jon 2004/02/11
+2002/07/19  The program dies with just "abort" showing to the user if
+            the forces specify multiple populations and the data has
+            only 1 population in it.  This may only apply to forces
+            specified in the xml input file.
+            Partially fixed 09/23/02 -- now taps in to the xml-reading
+            message error handling but doesn't give a good error
+            message -- see force/force.cpp:Force::SetMethods
+
+Checked and found to be correct--Jon 2004/02/12
+2003/07/15  Handling of SNP positions in multi-locus code is probably wrong;
+            we believe that they are assigned site numbers (in xml reading)
+            relative to the offset of their locus but not its map position,
+            leading to locus-specific positions that are then misused as
+            regional positions in data likelihood code. 
+
+FIXED--mainly Eric 2004/5
+2003/07/15  A lot of PDEBUG in the postlike code that need to be addressed.
+            (And some plain old DEBUG.)
+
+FIXED--mainly Eric
+2003/07/24  Worrisome comment in maximizer.h suggests growth may not work....?
+
+Irrelevant with removal of xerces
+2003/09/11  cryptic segfault after xerces schema catch output.
+
+Irrelevant with removal of Xerces
+2003/10/19  build from clean on darwin for fonzi at darwin failed to correctly
+            find the xerces library on run--kludged by putting xerces-lib
+            in /usr/lib, but need to find out why library not found!
+
+FIXED--Jon 2004/02/05
+2003/10/09  For DNA data, if number of markers (in XML) is inconsistent with
+            sequence length, error checking doesn't catch this and the
+            program eventually crashes in a cryptic way.  (It doesn't work
+            to just put in a check, because the number of sites
+            is set to FLAGLONG unless there is explicit spacing data.)
+
+FIXED--Eric 2003/5
+2003/11/4   The behavior of the maximizer when a posterior likelihood
+            overflows is bad. 
+
+FIXED--Mary 2003/12/09
+2003/12/09  Both adaptive and non-adaptive heating success reports have
+            always, it turns out, reported complete nonsense.
+
+FIXED--Lucian 2004/5
+2004/02/24  In the menu, if the profile types are changed (from 
+            'percentile' to 'fixed'), and then a new force is added (i.e.
+            growth), the new force will be listed as 'percentile', not
+            'fixed'.  While we might allow this in the future, Peter's 
+            code comments say we cannot do both fixed and percentile
+            profiling at once.  --LS
+
+//*********************************************************************
+Bugs fixed for release 1.1
+//*********************************************************************
+
+FIXED--Mary 2002/02/13
+2002/02/13  Specifying output file name in the xml causes a "space"
+ to be pre-pended to the file name (e.g. "outfile" becomes " outfile").
+
+FIXED--ewalkup 2002/05/31
+2002/01/29  A legal xml file with an extra </lamarc> tag after all the
+ data causes the program to crash mysteriously in parsing without any
+ error messages.
+
+FIXED--ewalkup 2002/05/31
+2001/12/17  If the user types a wrong filename in the menu,
+ subsequent messages still display a default filename, but this
+ default is actually not available.
+
+FIXED--Mary 2002/06/14
+2001/12/17  The acceptance rate information for individual arrangers
+ is repeatedly overwritten (at the end of each swap interval) when
+ heating is in effect.
+
+FIXED--ewalkup 2002/05/31
+2002/01/02  xml.cpp uses tests against -1 when it would be appropriate
+ to use tests against string::npos.  This is not necessarily portable
+ though it happens to work on both our compilers at the moment.
+
+FIXED--ewalkup 2002/06/06
+2002/01/21  Menu entry to turn normalization on/off seems to be
+ missing.
+
+FIXED--ewalkup 2002/06/03
+2002/01/21  There is no way to make an xml file that turns recombination
+ off; only on.
+
+FIXED--Mary 2002/02/20
+2002/02/20  Converter loses last base of each sequence.
+
+FIXED--Mary 2002/03/04
+2002/03/01  Converter is wildly incorrect in cases with multiple
+ populations (wrong method in <migration> tag, wrong number of entries in
+ <coalescence> tag).
+
+FIXED--Mary 2002/03/04
+2002/03/01  XML error messages needed for the results of the
+ above error (wrong methods, wrong number of populations, etc.)
+
+FIXED--Mary and Jon and Peter 2002/06/21
+2002/03/19  Default values in converter and program are too low.
+
+DECLARED A FEATURE--Mary and Jon 2002/05/21
+2002/04/12  XML reader is happy with SNP data and no positional info.
+
+FIXED--Mary 2002/05/21 (actually a while ago)
+2002/05/06  XML file shouldn't require a datamodel present.
+
+FIXED--Mary 2002/05/21
+2002/05/09  One-legged coalescence in stepwise model is handled wrong.
+
+FIXED--Mary 2002/05/21
+2002/05/20  In cases with migration and no recombination, serious problems with
+            handling one-legged coalescence at the base of the tree.  This bug
+            WAS PRESENT in release 1.0.
+
+FIXED--ewalkup 2002/06/07
+2002/05/21  Lamarc seg-faults if there is no genetic data in the XML file.
+
+FIXED--Jon 2002/05/31
+2002/05/31  Attempts to print current settings of microsat data models in
+            menu seg-fault.
+
+FIXED--ewalkup 2002/06/04
+2002/05/31  Lamarc menu shows some formating problems (possible bogus
+            rec-rate entry) in case of ONLY coalescence force active.
+
+FIXED--ewalkup 2002/06/17
+2002/06/14  Immediate program crash if the "xml input file" is not XML.
+
+FIXED--Jon 2002/06/17
+2002/06/14  Converter, reading Migrate file, finds two populations with no
+            name; the second overwrites the first
+
+FIXED--Jon 2002/06/17
+2002/06/14  There appears to be no way in the menu to call FST for Theta,
+            even though the code apparently exists; Watterson is always called
+            instead (but it's invalid on microsat data!)
+           
+FIXED--Mary 2002/06/18
+2002/06/18  FST for Theta gives negative values habitually (it would be
+            expected to do so occasionally but not all the time).
+
+FIXED--Mary 2002/06/21
+2002/06/20  Output report only reports on "Watterson" for Theta, even
+            when the method used was actually FST.
+
+FIXED--Mary and Jon 2002/06/24
+2002/06/24  Rate categories cannot be set in menu for Brownian or Stepwise
+            models (though the documentation claims we offer them)
+
+FIXED--ewalkup and Mary 2002/06/24
+2002/05/30  recombination start value gets not set when recombination
+            is not specified in the xml file.
+
+FIXED -- xml schema now gives "unknown element" error
+   Jon note: 2003/08/03
+2002/09/23  bogus tags are now rejected with a "mismatched tags" error 
+            message, is there no "unknown tag" error message.
+            Note from ewalkup: this is best solved using a DTD 
+            for the XML.
+
diff --git a/src/bayeslike/bayesanalyzer_1d.cpp b/src/bayeslike/bayesanalyzer_1d.cpp
new file mode 100644
index 0000000..7546b1c
--- /dev/null
+++ b/src/bayeslike/bayesanalyzer_1d.cpp
@@ -0,0 +1,808 @@
+// $Id: bayesanalyzer_1d.cpp,v 1.39 2012/06/30 01:32:39 bobgian Exp $
+
+/*
+  Copyright 2004  Lucian Smith, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+// Devised May 2004 by Lucian Smith
+// Uses data from a CollectionManager to do a bayesian analysis.
+
+#include <cassert>
+#include <fstream>
+#include <iostream>
+
+#include "bayesanalyzer_1d.h"
+#include "bayescurve.h"
+#include "bayesparamlike_1d.h"
+#include "collector.h"
+#include "collmanager.h"
+#include "constants.h"
+#include "force.h"
+#include "forcesummary.h"
+#include "mathx.h"
+#include "parameter.h"
+#include "registry.h"
+#include "runreport.h"
+#include "userparam.h"
+#include "vector_constants.h"
+#include "paramstat.h"
+
+using namespace std;
+
+//------------------------------------------------------------------------------------
+
+BayesAnalyzer_1D::BayesAnalyzer_1D()
+    : m_currentChain(0), m_currentReplicate(0), m_currentRegion(0),
+      m_forcesummary(registry.GetForceSummary())
+{
+    const ParamVector paramvec(true);
+    for (unsigned long int i = 0; i < paramvec.size(); i++)
+    {
+        BayesParamLike_1D bp(paramvec[i]);
+        m_blankBayesParamVec.push_back(bp);
+        m_pstats.push_back(paramvec[i].GetStatus());
+    }
+    m_startparams = registry.GetForceSummary().GetStartParameters().GetGlobalParameters();
+}
+
+//------------------------------------------------------------------------------------
+
+BayesAnalyzer_1D::~BayesAnalyzer_1D()
+{
+    // intentionally blank
+}
+
+//------------------------------------------------------------------------------------
+
+void BayesAnalyzer_1D::AnalyzeAndAdd(const ParamSumm paramsumm)
+{
+    if (m_currentChain == 0)
+    {
+        m_paramlikes.clear();
+    }
+
+    //Make new BayesParamLikes by force, stick the data in 'em, then
+    // run the curve smoothing algorithms on a per-chain basis.
+    vector<BayesParamLike_1D> bayesparamls = m_blankBayesParamVec;
+    for (unsigned long int sample = 0; sample < paramsumm.size(); sample++)
+    {
+        DoubleVec1d parameters = paramsumm[sample].first.GetGlobalParameters();
+        long int freq          = paramsumm[sample].second;
+        for (unsigned long int pnum = 0; pnum < parameters.size(); pnum++)
+        {
+            if (IsVariable(pnum))
+            {
+                bayesparamls[pnum].AddPoint(parameters[pnum], freq);
+            }
+        }
+    }
+
+    //All the data has been added; now smooth the curve.
+    for (unsigned long int pnum = 0; pnum<bayesparamls.size(); pnum++)
+    {
+        if (IsVariable(pnum))
+        {
+            bayesparamls[pnum].SmoothCurve();
+        }
+    }
+
+    //And now stick it into the appropriate member variables.
+    m_paramlikes.push_back(bayesparamls);
+    m_currentChain++;
+
+}
+
+//------------------------------------------------------------------------------------
+
+//You might call the Replace routine if you do a run with more than one
+// chain, wanted to analyze the first chain, but didn't want to keep the
+// results.
+
+void BayesAnalyzer_1D::ReplaceLastChainAndAnalyze(const ParamSumm paramsumm)
+{
+    if (m_currentChain > 0)
+    {
+        m_currentChain--;
+        m_paramlikes.pop_back();
+    }
+    AnalyzeAndAdd(paramsumm);
+}
+
+//------------------------------------------------------------------------------------
+
+void BayesAnalyzer_1D::EndChainsAndAnalyze()
+{
+    assert (m_paramlikes.size() > 0);
+
+    vector < vector < BayesCurve > > curvesbyparam;
+    //Dimensions are [parameter][chain], *not* the other way around.
+    // We want it this way so we can sum each parameter over its chains.
+
+    //Set up the curvesbyparam vector.
+    for (unsigned long int param = 0; param < m_blankBayesParamVec.size(); param++)
+    {
+        vector < BayesCurve> onecurve;
+        onecurve.push_back(m_paramlikes[0][param].GetSmoothedCurve());
+        curvesbyparam.push_back(onecurve);
+    }
+
+    //And now add the other chains to the vectors.
+    //LS NOTE:  our current analysis discards previous chains, so this never
+    // gets used.
+    for (long int chain = 1; chain < m_currentChain; chain++)
+    {
+        for (unsigned long int param = 0; param < m_blankBayesParamVec.size(); param++)
+            curvesbyparam[param].push_back(m_paramlikes[chain][param].GetSmoothedCurve());
+    }
+
+    vector <BayesCurve> summedcurves;
+    for (unsigned long int param = 0; param < m_blankBayesParamVec.size(); param++)
+    {
+        BayesCurve summedcurve(curvesbyparam[param], ADD);
+        summedcurves.push_back(summedcurve);
+    }
+
+    if (m_currentReplicate == 0)
+    {
+        vector < vector <BayesCurve> > sumcurvevec;
+        sumcurvevec.push_back(summedcurves);
+        m_replicatecurves.push_back(sumcurvevec);
+    }
+    else
+        m_replicatecurves[m_currentRegion].push_back(summedcurves);
+
+    //m_paramlikes.clear();
+    m_currentReplicate++;
+    m_currentChain = 0;
+}
+
+//------------------------------------------------------------------------------------
+
+void BayesAnalyzer_1D::EndReplicatesAndAnalyze()
+{
+    vector < vector < BayesCurve > > curvesbyparam;
+    //Dimensions are [parameter][replicate], *not* the other way around.
+    // We want it this way so we can sum each parameter over its replicates.
+
+    //Set up the curvesbyparam vector.
+    for (unsigned long int param = 0; param < m_blankBayesParamVec.size(); param++)
+    {
+        vector < BayesCurve> onecurve;
+        onecurve.push_back(m_replicatecurves[m_currentRegion][0][param]);
+        curvesbyparam.push_back(onecurve);
+    }
+
+    //And now add the other replicates to the vectors.
+    for (long int replicate = 1; replicate < m_currentReplicate; replicate++)
+    {
+        for (unsigned long int param = 0; param < m_blankBayesParamVec.size(); param++)
+            curvesbyparam[param].push_back(m_replicatecurves[m_currentRegion][replicate][param]);
+    }
+
+    vector <BayesCurve> summedcurves;
+    for (unsigned long int param = 0; param < m_blankBayesParamVec.size(); param++)
+    {
+        BayesCurve summedcurve(curvesbyparam[param], ADD);
+        summedcurves.push_back(summedcurve);
+    }
+
+    m_regioncurves.push_back(summedcurves);
+
+    m_currentRegion++;
+    m_currentReplicate = 0;
+    m_currentChain = 0;
+}
+
+//------------------------------------------------------------------------------------
+
+void BayesAnalyzer_1D::EndRegionsAndAnalyze()
+{
+    //Basically the same as EndReplicates, except we multiply the curves instead
+    // of adding them.
+
+    assert (m_regioncurves.size() > 0);
+    vector < vector < BayesCurve > > curvesbyparam;
+    //Dimensions are [parameter][region], *not* the other way around.
+    // We want it this way so we can sum each parameter over its replicates.
+
+    //Set up the curvesbyparam vector.
+    for (unsigned long int param = 0; param < m_blankBayesParamVec.size(); param++)
+    {
+        vector < BayesCurve> onecurve;
+        onecurve.push_back(m_regioncurves[0][param]);
+        curvesbyparam.push_back(onecurve);
+    }
+
+    //And now add the other regions to the vectors.
+    for (long int region = 1; region < m_currentRegion; region++)
+    {
+        for (unsigned long int param = 0; param < m_blankBayesParamVec.size(); param++)
+            curvesbyparam[param].push_back(m_regioncurves[region][param]);
+    }
+
+    for (unsigned long int param = 0; param < m_blankBayesParamVec.size(); param++)
+    {
+        BayesCurve summedcurve(curvesbyparam[param], MULTIPLY);
+        m_allregioncurves.push_back(summedcurve);
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+//The chain curves are stored in the vector of BayesParamLikes, while
+// the region and overall curves are stored as member variables.
+
+DoubleVec1d BayesAnalyzer_1D::GetMaxVecForLastChain()
+{
+    DoubleVec1d results = m_startparams;
+    unsigned long int chain = m_paramlikes.size()-1;
+
+    for (unsigned long int parameter = 0; parameter < m_startparams.size(); parameter++)
+    {
+        if (IsVariable(parameter))
+        {
+            bool islog(false);
+            double newval = GetMaxForChain(chain, parameter);
+            registry.GetForceSummary().SetParamWithConstraints(parameter, newval, results, islog);
+        }
+    }
+    return results;
+}
+
+//------------------------------------------------------------------------------------
+
+double BayesAnalyzer_1D::GetAvgMaxLikeForLastChain()
+{
+    double result = 0.0;
+    long int pcount = 0;
+    unsigned long int chain = m_paramlikes.size()-1;
+
+    for (unsigned long int parameter = 0; parameter<m_startparams.size(); parameter++)
+    {
+        if (IsVariable(parameter))
+        {
+            result += GetLikeAtMaxForChain(chain, parameter);
+            pcount++;
+        }
+    }
+
+    if (pcount>0)
+        return result/pcount;
+    else return 1.0;
+}
+
+//------------------------------------------------------------------------------------
+
+double BayesAnalyzer_1D::GetAvgMaxLikeForRegion(long int region)
+{
+    double result = 0.0;
+    long int pcount = 0;
+
+    for (unsigned long int parameter = 0; parameter < m_startparams.size(); parameter++)
+    {
+        if (IsVariable(parameter))
+        {
+            result += GetLikeAtMaxForRegion(region, parameter);
+            pcount++;
+        }
+    }
+
+    if (pcount>0)
+        return result/pcount;
+    else return 1.0;
+}
+
+//------------------------------------------------------------------------------------
+
+double BayesAnalyzer_1D::GetAvgMaxLikeForAllRegions()
+{
+    double result = 0.0;
+    long int pcount = 0;
+
+    for (unsigned long int parameter = 0; parameter < m_startparams.size(); parameter++)
+    {
+        if (IsVariable(parameter))
+        {
+            result += GetLikeAtMaxForAllRegions(parameter);
+            pcount++;
+        }
+    }
+
+    if (pcount>0)
+        return result/pcount;
+    else return 1.0;
+}
+
+//------------------------------------------------------------------------------------
+
+double BayesAnalyzer_1D::GetMaxForChain(long int chain, long int parameter)
+{
+    if (IsVariable(parameter))
+        return m_paramlikes[chain][parameter].GetSmoothedCurve().GetMax();
+    else return m_startparams[parameter];
+}
+
+//------------------------------------------------------------------------------------
+
+double BayesAnalyzer_1D::GetLikeAtMaxForChain(long int chain, long int parameter)
+{
+    if (IsVariable(parameter))
+        return m_paramlikes[chain][parameter].GetSmoothedCurve().GetLikeAtMax();
+    else return 1.0;
+}
+
+//------------------------------------------------------------------------------------
+
+double BayesAnalyzer_1D::GetMaxForRegion(long int region, long int parameter)
+{
+    if (IsVariable(parameter))
+        return m_regioncurves[region][parameter].GetMax();
+    else return m_startparams[parameter];
+}
+
+//------------------------------------------------------------------------------------
+
+double BayesAnalyzer_1D::GetLikeAtMaxForRegion(long int region, long int parameter)
+{
+    if (IsVariable(parameter))
+        return m_regioncurves[region][parameter].GetLikeAtMax();
+    else return 1.0;
+}
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d BayesAnalyzer_1D::GetMaxVecForRegion(long int region)
+{
+    DoubleVec1d results = m_startparams;
+    bool islog = false;
+
+    for (unsigned long int parameter = 0; parameter < m_startparams.size(); parameter++)
+    {
+        if (IsVariable(parameter))
+        {
+            double newval = GetMaxForRegion(region, parameter);
+            registry.GetForceSummary().SetParamWithConstraints(parameter, newval, results, islog);
+        }
+    }
+
+    return results;
+}
+
+//------------------------------------------------------------------------------------
+
+double BayesAnalyzer_1D::GetPercentileAtValForRegion(double val, long int region, long int parameter)
+{
+    if (IsVariable(parameter))
+        return m_regioncurves[region][parameter].GetPercentileAtVal(val);
+    else return 0.5;
+}
+
+//------------------------------------------------------------------------------------
+
+double BayesAnalyzer_1D::GetValAtPercentileForRegion(double val, long int region, long int parameter)
+{
+    if (IsVariable(parameter))
+        return m_regioncurves[region][parameter].GetValAtPercentile(val);
+    else return m_startparams[parameter];
+}
+
+//------------------------------------------------------------------------------------
+
+double BayesAnalyzer_1D::GetLikeAtValForRegion(double val, long int region, long int parameter)
+{
+    if (IsVariable(parameter))
+        return m_regioncurves[region][parameter].GetLikeAtVal(val);
+    else return 0.0;
+}
+
+//------------------------------------------------------------------------------------
+
+double BayesAnalyzer_1D::GetLikeAtValForReplicate(double val, long int region, long int replicate, long int parameter)
+{
+    if (IsVariable(parameter))
+        return m_replicatecurves[region][replicate][parameter].GetLikeAtVal(val);
+    else return 0.0;
+}
+
+//------------------------------------------------------------------------------------
+
+double BayesAnalyzer_1D::GetMaxForAllRegions(long int parameter)
+{
+    if (IsVariable(parameter))
+        return m_allregioncurves[parameter].GetMax();
+    else return m_startparams[parameter];
+}
+
+//------------------------------------------------------------------------------------
+
+double BayesAnalyzer_1D::GetLikeAtMaxForAllRegions(long int parameter)
+{
+    if (IsVariable(parameter))
+        return m_allregioncurves[parameter].GetLikeAtMax();
+    else return 1.0;
+}
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d BayesAnalyzer_1D::GetMaxVecForAllRegions()
+{
+    DoubleVec1d results = m_startparams;
+    bool islog(false);
+
+    for (unsigned long int parameter = 0; parameter < m_startparams.size(); parameter++)
+    {
+        if (IsVariable(parameter))
+        {
+            double newval = GetMaxForAllRegions(parameter);
+            registry.GetForceSummary().SetParamWithConstraints(parameter, newval, results, islog);
+        }
+    }
+
+    return results;
+}
+
+//------------------------------------------------------------------------------------
+
+double BayesAnalyzer_1D::GetValAtPercentileForAllRegions(double percentile, long int parameter)
+{
+    if (IsVariable(parameter))
+        return m_allregioncurves[parameter].GetValAtPercentile(percentile);
+    else return m_startparams[parameter];
+}
+
+//------------------------------------------------------------------------------------
+
+double BayesAnalyzer_1D::GetPercentileAtValForAllRegions(double val, long int parameter)
+{
+    if (IsVariable(parameter))
+        return m_allregioncurves[parameter].GetPercentileAtVal(val);
+    else return 0.5;
+}
+
+//------------------------------------------------------------------------------------
+
+double BayesAnalyzer_1D::GetLikeAtValForAllRegions(double val, long int parameter)
+{
+    if (IsVariable(parameter))
+        return m_allregioncurves[parameter].GetLikeAtVal(val);
+    else return 0.0;
+}
+
+//------------------------------------------------------------------------------------
+
+bool BayesAnalyzer_1D::IsVariable(unsigned long int parameter)
+{
+    return m_pstats[parameter].Inferred();
+}
+
+//------------------------------------------------------------------------------------
+
+void BayesAnalyzer_1D::CalcProfiles(long int region)
+{
+    if (!((region == FLAGLONG) && (m_regioncurves.size() == 1)))
+        registry.GetRunReport().RecordProfileStart();
+
+    ParamVector toDolist(false);
+    ParamVector::iterator param;
+    long int paramnum;
+
+    for (param = toDolist.begin(), paramnum = 0; param != toDolist.end(); ++param, ++paramnum)
+    {
+        ProfileStruct emptyprofile ;
+        if (!m_pstats[paramnum].Valid()) continue;
+        if (!m_pstats[paramnum].Inferred())
+        {
+            if (region==FLAGLONG)
+                param->AddProfile(emptyprofile, ltype_region);
+            else
+                param->AddProfile(emptyprofile, ltype_replicate);
+        }
+        else
+            CalcProfile (param, paramnum, region);
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+void BayesAnalyzer_1D::CalcProfile(ParamVector::iterator param, long int nparam, long int nregion)
+{
+    likelihoodtype ltype;
+    double MLE, MLElike, MLEperc;
+
+    if (nregion == FLAGLONG)
+    {
+        ltype = ltype_region; //'region' is an enum.
+        MLE = GetMaxForAllRegions(nparam);
+        MLElike = GetLikeAtMaxForAllRegions(nparam);
+        MLEperc = GetPercentileAtValForAllRegions(MLE, nparam);
+    }
+    else
+    {
+        ltype = ltype_replicate; //Could be anything as long as it's not 'region'.
+        MLE = GetMaxForRegion(nregion, nparam);
+        MLElike = GetLikeAtMaxForRegion(nregion, nparam);
+        MLEperc = GetPercentileAtValForRegion(MLE, nregion, nparam);
+    }
+    proftype ptype = param->GetProfileType();
+    if (ptype == profile_NONE)
+    {
+        assert(false);    // Profiling should always be on for bayesian parameters.
+        ProfileStruct emptyprofile ;
+        param->AddProfile(emptyprofile, ltype);
+        return;
+    }
+
+    DoubleVec1d modifiers = m_forcesummary.GetModifiers(nparam);
+
+    ProfileStruct theprofile;
+
+    for (unsigned long int i = 0u; i < modifiers.size(); ++i)
+    {
+        ProfileLineStruct profileline;
+        double perc, val, likelihood;
+
+        if (ptype == profile_FIX)
+        {
+            if(param->IsForce(force_GROW) &&
+               (registry.GetUserParameters().GetVerbosity() != CONCISE) &&
+               (registry.GetUserParameters().GetVerbosity() != NONE   ))
+            {
+                if (i < vecconst::growthmultipliers.size())
+                    val = modifiers[i] * MLE;
+                else
+                    val = modifiers[i];
+                // The second part of this vector is filled with values which we want
+                // to *set* growth to, not to multiply growth by. --LS
+            }
+            else if(param->IsForce(force_LOGISTICSELECTION) &&
+                    (registry.GetUserParameters().GetVerbosity() != CONCISE) &&
+                    (registry.GetUserParameters().GetVerbosity() != NONE   ))
+            {
+                if (i < vecconst::logisticselectionmultipliers.size())
+                    val = modifiers[i] * MLE;
+                else
+                    val = modifiers[i];
+                // The second part of this vector is filled with values which we want
+                // to *set* the logistic selection coeff. to, not to multiply it by. --LS
+            }
+            else
+                val = modifiers[i] * MLE;
+            if (ltype == ltype_region)
+            {
+                perc = GetPercentileAtValForAllRegions(val, nparam);
+                likelihood = GetLikeAtValForAllRegions(val, nparam);
+            }
+            else
+            {
+                perc = GetPercentileAtValForRegion(val, nregion, nparam);
+                likelihood = GetLikeAtValForRegion(val, nregion, nparam);
+            }
+        }
+        else                            //ptype == percentile;
+        {
+            perc = modifiers[i];
+            if (ltype == ltype_region)
+            {
+                val = GetValAtPercentileForAllRegions(perc, nparam);
+                likelihood = GetLikeAtValForAllRegions(val, nparam);
+            }
+            else
+            {
+                val = GetValAtPercentileForRegion(perc, nregion, nparam);
+                likelihood = GetLikeAtValForRegion(val, nregion, nparam);
+            }
+        }
+        profileline.percentile = perc;
+        profileline.profilevalue = val;
+        profileline.loglikelihood = likelihood; //Not really a *log*, but anyway.
+        theprofile.profilelines.push_back(profileline);
+        //LS TEST
+        // cout << perc << ", " << val << ", " << likelihood << endl;
+    }
+
+    ProfileLineStruct profileline;
+
+    profileline.percentile = MLEperc;
+    profileline.profilevalue = MLE;
+    profileline.loglikelihood = MLElike;
+    theprofile.profilelines.push_back(profileline);
+    //LS TEST
+    // cout << MLEperc << ", " << MLE << ", " << MLElike << endl;
+
+    param->AddProfile(theprofile, ltype);
+} // BayesAnalyzer_1D::CalcProfile
+
+//------------------------------------------------------------------------------------
+
+vector <long int> BayesAnalyzer_1D::GetNumUniquePointsVec()
+{
+    vector <long int> results;
+    unsigned long int chain = m_paramlikes.size()-1;
+    for (unsigned long int pnum = 0; pnum < m_paramlikes[chain].size(); pnum++)
+        results.push_back(m_paramlikes[chain][pnum].GetNumUniquePoints());
+    return results;
+}
+
+//------------------------------------------------------------------------------------
+
+double BayesAnalyzer_1D::GetMinParamValFromCurve(long int region, long int pnum)
+{
+    if (region == FLAGLONG)
+    {
+        const DoublePairVec & curve = m_allregioncurves[pnum].GetXYvec();
+        return curve[0].first;
+    }
+    const DoublePairVec & curve = m_regioncurves[region][pnum].GetXYvec();
+    return curve[0].first;
+}
+
+//------------------------------------------------------------------------------------
+
+double BayesAnalyzer_1D::GetMaxParamValFromCurve(long int region, long int pnum)
+{
+    if (region == FLAGLONG)
+    {
+        const DoublePairVec & curve = m_allregioncurves[pnum].GetXYvec();
+        return curve[curve.size()-1].first;
+    }
+    const DoublePairVec & curve = m_regioncurves[region][pnum].GetXYvec();
+    return curve[curve.size()-1].first;
+}
+
+//------------------------------------------------------------------------------------
+
+double BayesAnalyzer_1D::GetBinWidthFromCurve(long int region, long int pnum)
+{
+    if (region == FLAGLONG)
+    {
+        return m_allregioncurves[pnum].GetBinWidth();
+    }
+    return m_regioncurves[region][pnum].GetBinWidth();
+}
+
+//------------------------------------------------------------------------------------
+
+bool BayesAnalyzer_1D::GetIsLog(long int pnum)
+{
+    bool isLog = m_allregioncurves[pnum].IsLog();
+    long int numregions = m_regioncurves.size();
+    for(long int regNo = 0; regNo < numregions; regNo++)
+    {
+        assert(isLog == m_regioncurves[regNo][pnum].IsLog());
+    }
+    return isLog;
+}
+
+//------------------------------------------------------------------------------------
+
+long int BayesAnalyzer_1D::GetNumUniquePoints(long int pnum)
+{
+    return m_paramlikes[m_paramlikes.size()-1][pnum].GetNumUniquePoints();
+}
+
+//------------------------------------------------------------------------------------
+
+double BayesAnalyzer_1D::GetKernelWidth(long int pnum)
+{
+    return m_paramlikes[m_paramlikes.size()-1][pnum].GetKernelWidth();
+}
+
+//------------------------------------------------------------------------------------
+
+void BayesAnalyzer_1D::WriteCurvesForRegion(long int region)
+{
+    ofstream curvefile;
+    curvefile.precision(10);
+    string regname;
+    if (region==FLAGLONG)
+        regname = "overall_";
+    else
+        regname = "reg" + indexToKey(region) + "_";
+
+    const ParamVector paramvec(true);
+    unsigned long int chain = m_paramlikes.size()-1;
+    for (long int pnum = 0; pnum<static_cast<long int>(paramvec.size()); pnum++)
+    {
+        if (!IsVariable(pnum)) continue;
+        DoublePairVec curve;
+        bool islog;
+        if (region==FLAGLONG)
+        {
+            curve = m_allregioncurves[pnum].GetXYvec();
+            islog = m_allregioncurves[pnum].IsLog();
+        }
+        else
+        {
+            curve = m_regioncurves[region][pnum].GetXYvec();
+            islog = m_regioncurves[region][pnum].IsLog();
+        }
+        string pname = paramvec[pnum].GetShortName();
+        string::size_type i = pname.find("/");
+        while (i != string::npos)
+        {
+            pname.replace(i,1,"+");
+            i = pname.find("/");
+        }
+        UserParameters& userparams = registry.GetUserParameters();
+        string prefix = userparams.GetCurveFilePrefix();
+        string fname = prefix + "_" + regname + pname + ".txt";
+        curvefile.open(fname.c_str(), ios::out );
+        userparams.AddCurveFileName(fname);
+        //General info
+        curvefile << "Bayesian likelihood curve for \""
+                  << paramvec[pnum].GetName() << "\" ";
+        if (region==FLAGLONG)
+            curvefile << "as combined over all genomic regions.";
+        else
+            curvefile << "for genomic region " << (region + 1) << ".";
+        curvefile << endl;
+
+        if ((region != m_currentRegion-1) && (region != FLAGLONG))
+        {
+            registry.GetRunReport().ReportDebug("WriteCurvesForRegion can report more information when called on only"
+                                                " the most recent region or for the overall region estimates.");
+        }
+        else if (region == FLAGLONG)
+        {
+            long int numregions = m_regioncurves.size();
+            curvefile << "Created by averaging " << numregions
+                      << " different regional curves." << endl;
+        }
+        else
+        {
+            long int numreplicates = m_replicatecurves[m_currentRegion-1].size();
+            if (numreplicates > 1)
+            {
+                curvefile << "Created by averaging " << numreplicates
+                          << " different replicate curves." << endl;
+            }
+            else
+            {
+                long int numpoints = m_paramlikes[chain][pnum].GetNumUniquePoints();
+                double kernel = m_paramlikes[chain][pnum].GetKernelWidth();
+                curvefile << "Created from " << numpoints
+                          << " unique data points and a kernel width of "
+                          << kernel << "." << endl;
+            }
+        }
+        double lowerbound = paramvec[pnum].GetPrior().GetLowerBound();
+        double upperbound = paramvec[pnum].GetPrior().GetUpperBound();
+        curvefile << "The prior for this parameter was ";
+        if (islog)
+        {
+            curvefile << "logarithmic";
+        }
+        else
+        {
+            curvefile << "flat";
+        }
+        curvefile << ", and ranged from "
+                  << ToString(lowerbound) << " to "
+                  << ToString(upperbound) << ".";
+        if (islog)
+        {
+            curvefile << "  (Or, in log space, "
+                      << ToString(SafeLog(lowerbound)) << " to "
+                      << ToString(SafeLog(upperbound)) << ".)";
+        }
+        curvefile << endl;
+
+        //Column headers
+        if (islog)
+            curvefile << "Ln(" << pname << ")";
+        else
+            curvefile << pname;
+        curvefile << "\tLikelihood" << endl;
+        //Raw data
+        for (unsigned long int line = 0; line<curve.size(); line++)
+            curvefile << curve[line].first << "\t" << curve[line].second << endl;
+        curvefile << endl << endl;
+        curvefile.close();
+    }
+} // WriteCurvesForRegion
+
+//____________________________________________________________________________________
diff --git a/src/bayeslike/bayesanalyzer_1d.h b/src/bayeslike/bayesanalyzer_1d.h
new file mode 100644
index 0000000..1d2473d
--- /dev/null
+++ b/src/bayeslike/bayesanalyzer_1d.h
@@ -0,0 +1,138 @@
+// $Id: bayesanalyzer_1d.h,v 1.19 2012/06/30 01:32:39 bobgian Exp $
+
+/*
+  Copyright 2004  Lucian Smith, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+/*
+  The Bayesian analyzer holds vectors of 'BayesParamlike's, which are front
+  ends, of a sort, for BayesCurves.  As the collection manager sends the
+  BayesAnalyzer lists of parameters, it divvies up the information, and sends
+  it to the appropriate BayesParamLike.  The ParamLikes then take the
+  accumulated data points and curve-smooth them so that we know both the
+  MLEs and the confidence intervals for all parameters.
+
+  This can be accomplished by analyzing the parameters 1-, 2-, or
+  n-dimensionally.  The simplest is 1-dimensionally, where we look at each
+  parameter without regard to how it might affect other parameters.  At the
+  next level, we might consider pairs of parameters, and smooth the
+  resulting 2-dimensional surface.  Finally, we might attempt to go for the
+  gusto and consider all parameters at once, smoothing an n-dimensional
+  surface.  Such a surface would be nigh-impossible to visualize, and trying
+  to find the maximum on such a surface would probably be as difficult as
+  normal maximization.  Perhaps the maximizer object could do such a
+  maximization, given a different PLforces object, or some such.  Anyway.
+
+  As a first pass at attempting to do this, we'll be analyzing the parameters
+  individually with Bayes_Analyzer_1D.  In the future, we might try the
+  other two options (_2D and _ND, respectively).
+
+  It might be interesting to look at this in terms of Singular Value
+  Decomposition, or SVD.  This is what folks at Rice did with protein
+  dynamics.
+*/
+
+#ifndef BAYES_ANALYZER_H
+#define BAYES_ANALYZER_H
+
+#include "vectorx.h"
+#include "collector.h"
+#include "bayesparamlike_1d.h"
+#include "bayescurve.h"
+#include "forcesummary.h"
+#include "parameter.h"
+#include "paramstat.h"
+
+//------------------------------------------------------------------------------------
+
+class BayesAnalyzer_1D
+{
+  public:
+    //   ---Generic BayesAnalyzer functions---
+    BayesAnalyzer_1D();
+    ~BayesAnalyzer_1D();
+
+    void AnalyzeAndAdd(const ParamSumm paramsumm);
+    void ReplaceLastChainAndAnalyze(const ParamSumm paramsumm);
+    //You might call the Replace routine if you do a run with more than one
+    // chain, but wanted to analyze the first chain.
+    void EndChainsAndAnalyze();
+    void EndReplicatesAndAnalyze();
+    void EndRegionsAndAnalyze();
+
+    //   ---Functions specific to a 1D analysis---
+    DoubleVec1d GetMaxVecForLastChain();
+    double      GetAvgMaxLikeForLastChain();
+    double      GetAvgMaxLikeForRegion(long int region);
+    double      GetAvgMaxLikeForAllRegions();
+
+    double      GetMaxForChain(long int chain, long int parameter);
+    double      GetLikeAtMaxForChain(long int chain, long int parameter);
+
+    double      GetMaxForRegion(long int region, long int parameter);
+    DoubleVec1d GetMaxVecForRegion(long int region);
+
+    double      GetLikeAtMaxForRegion(long int region, long int parameter);
+    double      GetValAtPercentileForRegion(double percentile, long int region, long int parameter);
+    double      GetPercentileAtValForRegion(double val, long int region, long int parameter);
+    double      GetLikeAtValForRegion(double val, long int region, long int parameter);
+    double      GetLikeAtValForReplicate(double val, long int region, long int replicate, long int parameter);
+
+    double      GetMaxForAllRegions(long int parameter);
+    DoubleVec1d GetMaxVecForAllRegions();
+    double      GetLikeAtMaxForAllRegions(long int parameter);
+    double      GetValAtPercentileForAllRegions(double percentile, long int parameter);
+    double      GetPercentileAtValForAllRegions(double val, long int parameter);
+    double      GetLikeAtValForAllRegions(double val, long int parameter);
+
+    double      GetMinParamValFromCurve(long int region, long int parameter);
+    double      GetMaxParamValFromCurve(long int region, long int parameter);
+    double      GetBinWidthFromCurve   (long int region, long int parameter);
+
+    bool        GetIsLog(long int parameter);
+
+    long int    GetNumUniquePoints(long int parameter);
+    double      GetKernelWidth(long int parameter);
+
+    void CalcProfiles(long int region);
+    std::vector <long int> GetNumUniquePointsVec();
+    void WriteCurvesForRegion (long int region);
+
+  private:
+    long int m_currentChain;
+    long int m_currentReplicate;
+    long int m_currentRegion;
+
+    const ForceSummary &m_forcesummary;
+
+    vector <ParamStatus> m_pstats;
+    DoubleVec1d m_startparams;
+
+    //The chain curves are stored in the vector of BayesParamLikes, while
+    // the region and overall curves are stored as BayesCurve member variables.
+    vector < vector <BayesParamLike_1D> >  m_paramlikes;
+    //Dimensions are m_paramlikes[chain][parameter]
+    vector < vector < vector < BayesCurve> > > m_replicatecurves;
+    //Dimensions are [region][replicate][parameter]
+    vector < vector <BayesCurve> >  m_regioncurves;
+    //Dimensions are m_regioncurves[region][parameter]
+    vector <BayesCurve>  m_allregioncurves;
+    //Dimensions are m_allregioncurves[parameter]
+
+    vector <BayesParamLike_1D> m_blankBayesParamVec;
+    //This is created at object creation, initialized with the appropriate
+    // forces in the correct orders, since BayesParamLike_1D's can only be
+    // created knowing what force they're for.
+
+    bool IsVariable(unsigned long int parameter);
+    void CalcProfile(ParamVector::iterator param, long int nparam, long int nregion);
+
+}; // class BayesAnalyzer_1D
+
+#endif // BAYES_ANALYZER_H
+
+//____________________________________________________________________________________
diff --git a/src/bayeslike/bayescurve.cpp b/src/bayeslike/bayescurve.cpp
new file mode 100644
index 0000000..676dc1c
--- /dev/null
+++ b/src/bayeslike/bayescurve.cpp
@@ -0,0 +1,514 @@
+// $Id: bayescurve.cpp,v 1.22 2012/06/30 01:32:39 bobgian Exp $
+
+/*
+  Copyright 2004  Lucian Smith, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+/*
+  A bayes curve is a probability density function, aka a curve whose area
+  under its curve is 1.0
+
+  It's stored as a vector of doubles, with an offset and a bin width, such
+  that if vector[X]=y, the point in question is [(offset + width*X), y].
+
+  Important functions include the creation of a new bayes curve from a vector
+  of other bayes curves, and integration of the curve.
+*/
+
+#include <cassert>
+#include <cmath>
+#include <numeric>                      // for std::accumulate
+
+#include "bayescurve.h"
+#include "errhandling.h"
+#include "runreport.h"
+#include "registry.h"
+#include "mathx.h"                      // for PI
+
+using namespace std;
+
+//------------------------------------------------------------------------------------
+
+BayesCurve::BayesCurve()
+    :m_offset(0), m_binwidth(0), m_curr_numpoints(0), m_target_numpoints(0),
+     m_probabilities(), m_integration(), m_islog(false)
+{
+    // intentionally blank
+}
+
+//------------------------------------------------------------------------------------
+
+BayesCurve::BayesCurve(const vector<BayesCurve> srcvec, combine_type ctype)
+    : m_probabilities(), m_integration()
+{
+    if (srcvec.size() == 0)
+    {
+        throw implementation_error("Cannot create a new probability density curve, since there are no original curves to add together!");
+    }
+
+    m_offset = srcvec[0].m_offset;
+    m_binwidth = srcvec[0].m_binwidth;
+    m_curr_numpoints = srcvec[0].m_curr_numpoints;
+    m_target_numpoints = srcvec[0].m_target_numpoints;
+    m_islog = srcvec[0].m_islog;
+
+    if (m_binwidth == 0 && m_offset == 0 && m_curr_numpoints == 0 && m_target_numpoints == 0)
+    {
+        //The curve is uninitialized.  This often happens since we need placeholder
+        // objects for the invalid parameters--just return.
+        return;
+    }
+
+    if (m_binwidth == 0)
+    {
+        throw implementation_error("Tried to create a BayesCurve object from a vector of BayesCurves, but one had a binwidth of zero.");
+    }
+
+    if (srcvec.size() == 1)
+    {
+        m_probabilities = srcvec[0].m_probabilities;
+        m_integration = srcvec[0].m_integration;
+        return;
+    }
+
+    double lastpoint = m_offset + (m_binwidth * srcvec[0].m_probabilities.size());
+    double maxlike = 0;
+    for (unsigned long int curve = 1; curve < srcvec.size(); curve++)
+    {
+        if (srcvec[curve].m_offset < m_offset)
+        {
+            m_offset = srcvec[curve].m_offset;
+        }
+        if (srcvec[curve].m_binwidth != m_binwidth)
+        {
+            throw implementation_error("Cannot add probability density curves with different spacings.");
+        }
+        m_target_numpoints += srcvec[curve].m_target_numpoints;
+        m_curr_numpoints += srcvec[curve].m_curr_numpoints;
+        double thislastpoint = (m_binwidth * srcvec[curve].m_probabilities.size())
+            + srcvec[curve].m_offset;
+        if (thislastpoint > lastpoint)
+        {
+            lastpoint = thislastpoint;
+        }
+        maxlike = max(maxlike, srcvec[curve].GetLikeAtMax());
+    }
+
+    assert (m_curr_numpoints == m_target_numpoints);
+    // This will be true if all original curves were finalized.
+    assert (m_binwidth != 0);
+
+    double effectivezero = min(maxlike/100000, BiweightKernel(.99)/m_target_numpoints);
+
+    unsigned long int veclength = static_cast<unsigned long int>((lastpoint - m_offset)/m_binwidth);
+    switch(ctype)
+    {
+        case ADD:
+            m_probabilities.assign(veclength+1,0.0);
+            break;
+        case MULTIPLY:
+            m_probabilities.assign(veclength+1,1.0);
+            break;
+    }
+
+    //Now, go through the various vectors and add or multiply, cf 'ctype'.
+    for (unsigned long int curve = 0; curve<srcvec.size(); curve++)
+    {
+        double offsetdiff = srcvec[curve].m_offset - m_offset;
+        unsigned long int thisindex = static_cast<unsigned long int>(offsetdiff/m_binwidth);
+
+        if (ctype == MULTIPLY)
+        {
+            //We need to zero out the beginning and end of the vector.
+            for (unsigned long int ind = 0; ind<thisindex; ind++)
+            {
+                m_probabilities[ind] *= effectivezero;
+            }
+            for (unsigned long int ind = thisindex + srcvec[curve].m_probabilities.size() - 1;
+                 ind < m_probabilities.size(); ind++)
+            {
+                m_probabilities[ind] *= effectivezero;
+            }
+        }
+
+        for (unsigned long int srcindex = 0;
+             srcindex<srcvec[curve].m_probabilities.size();
+             thisindex++, srcindex++)
+        {
+            switch (ctype)
+            {
+                case ADD:
+                    m_probabilities[thisindex] +=
+                        srcvec[curve].m_probabilities[srcindex] / srcvec.size();
+                    //Note that this treats all curves as having equal weight.  If we
+                    // wanted to change this, we'd divide by
+                    // (m_target_numpoints / srcvec[curve].m_target_numpoints)
+                    // instead of srcvec.size().
+                    break;
+                case MULTIPLY:
+                    m_probabilities[thisindex] *=
+                        max(srcvec[curve].m_probabilities[srcindex], effectivezero);
+                    break;
+            }
+        }
+    }
+
+    if (ctype == MULTIPLY)
+    {
+        double scalingFactor = accumulate(m_probabilities.begin(), m_probabilities.end(), 0.0);
+        scalingFactor *= m_binwidth;
+        transform(m_probabilities.begin(),
+                  m_probabilities.end(),
+                  m_probabilities.begin(),
+                  bind2nd(divides<double>(), scalingFactor));
+    }
+
+    Integrate();
+}
+
+//------------------------------------------------------------------------------------
+
+BayesCurve::~BayesCurve()
+{
+    // intentionally blank
+}
+
+//------------------------------------------------------------------------------------
+
+void BayesCurve::Initialize(unsigned long int numpoints, double width, bool islog,
+                            double minval, double maxval, double maxkernwidth)
+{
+    m_binwidth = width;
+    m_curr_numpoints = 0;
+    m_target_numpoints = numpoints;
+    m_islog = islog;
+
+    //When using the biweight kernel, find (x-Xi)/h = 1, or:
+    double truemin = minval - maxkernwidth;
+    double low, high;
+    ClosestWidthsTo(truemin, low, high);
+    m_offset = low;
+
+    //Now find highest value and initialize the vectors:
+    double truehigh = maxval + maxkernwidth;
+    ClosestWidthsTo(truehigh, low, high);
+    unsigned long int veclength = static_cast<unsigned long int>(ceil((high - m_offset)/m_binwidth));
+    m_probabilities.assign(veclength+1,0.0);
+}
+
+//------------------------------------------------------------------------------------
+//AddKernel is the heart of the curve smoothing functionality.  It takes
+// a point (center) and transforms it into a probability density function
+//
+//kernelwidth needs to be an argument instead of a member variable
+// because a possible implementation of kernel width is to have it be
+// adaptive, i.e. wider at the edges, and thinner in the middle.
+
+void BayesCurve::AddKernel(double center, unsigned long int num, double kernelwidth)
+{
+    if (m_curr_numpoints + num > m_target_numpoints)
+    {
+        string error = "Can't add " + ToString(num)
+            + " point(s) to this curve, since we've already added "
+            + ToString(m_curr_numpoints) + " points to the curve, while expecting "
+            + ToString(m_target_numpoints) + " points.";
+        throw implementation_error(error);
+    }
+    m_curr_numpoints += num;
+
+    //'start' and 'end' are highly dependent on the particular kernel being
+    // used.  If a gaussian kernel is used, in fact, they'll need to be
+    // arbitrarily wide, since that kernel is unbounded.  For a biweight
+    // kernel, the bounds are +/- 1, so it's fairly simple:
+    double high, low;
+    ClosestWidthsTo(center-kernelwidth, high, low);
+    unsigned long int start = static_cast<unsigned long int> ((high - m_offset) / m_binwidth);
+    ClosestWidthsTo(center+kernelwidth, high, low);
+    unsigned long int end   = static_cast<unsigned long int> ((low - m_offset)  / m_binwidth);
+    for (unsigned long int i = start; i <= end; i++)
+    {
+        double kern_result = BiweightKernel((center - (m_offset + m_binwidth*i)) / kernelwidth);
+        m_probabilities[i] += num * kern_result / (m_target_numpoints * kernelwidth);
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+void BayesCurve::Integrate(bool warnuser)
+{
+    //To integrate, we'll simply add the heights of the curve throughout.
+    // This means that the integration is accurate halfway between the stored
+    // point and the next point.  Or, to put it another way:
+    // m_probabilities[index] is for x = m_offset + (m_binwidth*index).
+    // m_integration[index] is for x = m_offset + (m_binwidth*(index + 0.5)).
+    m_integration.assign(m_probabilities.size(), 0.0);
+    assert (m_curr_numpoints == m_target_numpoints);
+    //We need to have added the right number of points, or else we're guaranteed
+    // to not add up to 1.0
+
+    m_integration[0] = m_probabilities[0]*m_binwidth;
+    for (unsigned long int i = 1; i < m_integration.size(); ++i)
+    {
+        m_integration[i] = m_integration[i-1] + (m_probabilities[i]*m_binwidth);
+    }
+
+    double total=m_integration[m_integration.size()-1];
+    if (fabs(total-1.0) > .0001)
+    {
+        RunReport& runreport = registry.GetRunReport();
+        //Scale everything so that it actually adds up to 1.0
+        transform(m_integration.begin(),m_integration.end(), m_integration.begin(), bind2nd(divides<double>(),total));
+        if (warnuser)
+        {
+            string message = "Finished integrating a probability density function.  The total area under the curve should be 1.0; it's actually ";
+            message += Pretty(total)
+                + ".  The most likely cause of this is having too few data points; consider longer runtimes while collecting data.";
+            runreport.ReportNormal(message);
+        }
+        runreport.ReportDebug("Scaling this curve so it actually integrates to 1.0; if you're bug-hunting, you might want to turn this feature off (see bayescurve.cpp).");
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+double BayesCurve::GetValAtPercentile(double percentile) const
+{
+    assert(m_integration.size() > 1);
+    unsigned long int i = 1;
+    while ((i < m_integration.size()) && (m_integration[i] < percentile))
+        i++;
+    double fraction;
+    if (i == m_integration.size())
+        fraction = 1;
+    else
+    {
+        double lowperc  = m_integration[i-1];
+        double highperc = m_integration[i];
+        fraction = (percentile - lowperc) / (highperc - lowperc);
+    }
+    double val = m_offset + (m_binwidth*(i + 0.5 + fraction));
+    if (m_islog)
+        val = exp(val);
+    return val;
+}
+
+//------------------------------------------------------------------------------------
+
+double BayesCurve::GetPercentileAtVal(double val) const
+{
+    assert(m_integration.size() != 0);
+    if (m_islog)
+        val = log(val);
+
+    long int lowbin = static_cast<long int> (floor((val-m_offset-(m_binwidth*0.5))/m_binwidth));
+    long int highbin = lowbin + 1;
+    if (highbin < 0)
+    {
+        return 0.0;
+    }
+    if (lowbin >= static_cast<long int> (m_integration.size()))
+    {
+        return 1.0;
+    }
+    double lowperc, highperc;
+    double distance;
+    if (lowbin < 0)
+        lowperc = 0;
+    else
+        lowperc = m_integration[lowbin];
+    if (highbin >= static_cast<long int> (m_integration.size()))
+        highperc = 1.0;
+    else
+        highperc = m_integration[highbin];
+
+    distance = val - (m_offset + (m_binwidth*(lowbin + 0.5)));
+    assert (distance <= m_binwidth);
+    return ((highperc-lowperc)/m_binwidth)*distance + lowperc;
+}
+
+//------------------------------------------------------------------------------------
+
+double BayesCurve::GetLikeAtVal(double val) const
+{
+    //LS NOTE:  These numbers will be slightly low for the bin that
+    // contains the maximum:
+    //                          _,
+    //      *_,                * \              .
+    //     /  \_              /   \             .
+    //    /     *_           /     *_
+    //   /        \_        /        \_
+    //  *           *      *           *
+    //
+    // If the ' is the maximum, and at the same X coordinate in both, but at
+    // different y coordinates.  The *'s represent the known values (the edges
+    // of the bins).  The GetMax functions report it correctly, but this function
+    // acts like the first curve is true (i.e. drawing straight lines between
+    // each known point).
+    assert(m_probabilities.size() != 0);
+    if (m_islog)
+        val = log(val);
+    double high, low;
+    ClosestWidthsTo(val, low, high);
+    long int lowbin = static_cast<long int> ((low-m_offset)/m_binwidth);
+    long int highbin = lowbin + 1;
+
+    if (highbin < 0)
+        return 0.0;
+    if (lowbin >= static_cast<long int>(m_integration.size()))
+        return 0.0;
+
+    double lowlike, highlike;
+    double distance = val-low;
+    if (lowbin < 0)
+        lowlike = 0;
+    else
+        lowlike = m_probabilities[lowbin];
+    if (highbin >= static_cast<long int>(m_integration.size()))
+        highlike = 0.0;
+    else
+        highlike = m_probabilities[highbin];
+
+    // EWFIX -- get better epsilon from Lucian
+    assert (distance <= m_binwidth * 1.00000001);
+    return ((highlike-lowlike)/m_binwidth)*distance + lowlike;
+}
+
+//------------------------------------------------------------------------------------
+
+double BayesCurve::GetMax() const
+{
+    if (m_curr_numpoints != m_target_numpoints)
+    {
+        RunReport& runreport = registry.GetRunReport();
+        string error = "Warning:  attempted to get the maximum of a curve with";
+        error = error + ToString(m_curr_numpoints) + " points when we wanted "
+            + ToString(m_target_numpoints) + ".";
+        runreport.ReportNormal(error);
+    }
+    double retval;
+    if (m_probabilities.size() <= 1)
+        retval = m_offset;
+    else if (m_probabilities.size() == 2)
+    {
+        if (m_probabilities[0] > m_probabilities[1])
+            retval = m_offset;
+        else
+            retval = m_offset + m_binwidth;
+    }
+    else
+    {
+        //There are at least three points, which we can use to extrapolate the
+        // maximum.
+        unsigned long int max = 1;
+        for (unsigned long int i = 2; i < m_probabilities.size() - 1; i++)
+        {
+            if (m_probabilities[i] > m_probabilities[max] )
+                max = i;
+        }
+        //If the following is too intensive just do:
+        // return m_offset + (max * m_binwidth);
+
+        //The following interpolates the most likely spot for the max between
+        // the bins.
+        double higher, lower;
+        double maxval = m_probabilities[max];
+        bool firsthigh;
+        if (m_probabilities[max-1] > m_probabilities[max+1])
+        {
+            higher = m_probabilities[max-1];
+            lower  = m_probabilities[max+1];
+            firsthigh = true;
+        }
+        else
+        {
+            higher = m_probabilities[max+1];
+            lower  = m_probabilities[max-1];
+            firsthigh = false;
+        }
+
+        double distance = (m_binwidth/2) * (1-((maxval - higher)/(maxval - lower)));
+        assert (distance < m_binwidth);
+        if (firsthigh)
+            distance *= -1;
+        retval = (m_offset + (max * m_binwidth) + distance);
+    }
+    if (m_islog)
+        retval = exp(retval);
+    return retval;
+}
+
+//------------------------------------------------------------------------------------
+
+double BayesCurve::GetLikeAtMax() const
+{
+    assert(m_probabilities.size());
+    double bestlike = m_probabilities[0];
+    for (unsigned long int i = 1; i < m_probabilities.size(); i++)
+    {
+        if (m_probabilities[i] > bestlike )
+            bestlike = m_probabilities[i];
+    }
+    return bestlike;
+    //Note:  This is a slight approximation, given that we extrapolate the best
+    // value for this curve.
+}
+
+//------------------------------------------------------------------------------------
+
+DoublePairVec BayesCurve::GetXYvec() const
+{
+    DoublePairVec results;
+    for (unsigned long int i = 0; i < m_probabilities.size(); ++i)
+    {
+        double x = m_offset + (m_binwidth*i);
+        double y = m_probabilities[i];
+        results.push_back(pair<double, double>(x, y));
+    }
+    return results;
+}
+
+//------------------------------------------------------------------------------------
+
+void BayesCurve::ClosestWidthsTo(const double val, double& low, double& high) const
+{
+    double highint = ceil(val/m_binwidth);
+    high = highint*m_binwidth;
+    double lowint = floor(val/m_binwidth);
+    low = lowint*m_binwidth;
+}
+
+//------------------------------------------------------------------------------------
+//Three possible kernel functions; we're most likely to use the biweight.
+
+double BayesCurve::BiweightKernel(double t) const
+{
+    if (fabs(t) > 1.0)
+        return 0.0;
+    return (15.0/16.0) * pow((1.0-pow(t,2)),2);
+}
+
+//------------------------------------------------------------------------------------
+//The problem with the Epanechnikov kernel is that it's a bit more
+// expensive to compute, and that the bounds are weirder.
+
+double BayesCurve::EpanechnikovKernel(double t) const
+{
+    if (pow(t,2) > 5.0)
+        return 0.0;
+    return (3.0/4.0) * (1 - (pow(t,2)/5.0)) / sqrt(5.0);
+}
+
+//------------------------------------------------------------------------------------
+//The main problem with the gaussian kernel is that it's unbounded.  And
+// probably more expensive to compute, too.
+
+double BayesCurve::GaussianKernel(double t) const
+{
+    return (1.0/sqrt(2*PI)) * exp(-.5 * pow(t,2));
+}
+
+//____________________________________________________________________________________
diff --git a/src/bayeslike/bayescurve.h b/src/bayeslike/bayescurve.h
new file mode 100644
index 0000000..5d59852
--- /dev/null
+++ b/src/bayeslike/bayescurve.h
@@ -0,0 +1,80 @@
+// $Id: bayescurve.h,v 1.11 2011/03/07 06:08:39 bobgian Exp $
+
+/*
+  Copyright 2004  Lucian Smith, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+/*
+  A bayes curve is a probability density function, aka a curve whose area
+  under its curve is 1.0
+
+  It's stored as a vector of doubles, with an offset and a bin width.
+
+  Important functions include the creation of a new bayes curve from a vector
+  of other bayes curves, and integration of ...
+*/
+
+#ifndef BAYESCURVE_H
+#define BAYESCURVE_H
+
+#include "vectorx.h"
+
+typedef std::vector < std::pair < double, double> > DoublePairVec;
+
+enum combine_type { ADD, MULTIPLY };
+
+class BayesCurve
+{
+  public:
+    //  Deleted since we never use, 4/4/06
+    //  BayesCurve(unsigned long numpoints, double width,
+    //             double minval, double maxval, double maxkernwidth);
+    //  BayesCurve(const BayesCurve& bcurve); //We accept the default
+    BayesCurve(); //Must use Initialize later.
+    BayesCurve(const std::vector<BayesCurve> bcurve_vec, combine_type ctype);
+    ~BayesCurve();
+
+    void Initialize(unsigned long numpoints, double width, bool islog,
+                    double minval, double maxval, double maxkernwidth);
+
+    void AddKernel(double center, unsigned long num, double kernelwidth);
+    //kernelwidth needs to be an argument instead of a member variable
+    // because a possible implementation of kernel width is to have it be
+    // adaptive, i.e. wider at the edges, and thinner in the middle.
+    void Integrate(bool warnuser = true);
+    double GetValAtPercentile(double percentile) const;
+    double GetPercentileAtVal(double val) const;
+    double GetLikeAtVal(double val) const;
+    double GetLikeAtMax() const;
+    double GetMax() const;
+
+    DoublePairVec GetXYvec() const;
+    double GetOffset() const {return m_offset;};
+    double GetBinWidth() const {return m_binwidth;};
+    bool IsLog() const {return m_islog;};
+
+  private:
+    double m_offset;
+    double m_binwidth;
+    unsigned long m_curr_numpoints;
+    unsigned long m_target_numpoints;
+    DoubleVec1d m_probabilities;
+    DoubleVec1d m_integration;
+
+    bool m_islog;
+
+    void ClosestWidthsTo(const double val, double& low, double& high) const;
+
+    //Three possible kernel functions; we're most likely to use the biweight.
+    double BiweightKernel(double t) const;
+    double EpanechnikovKernel(double t) const;
+    double GaussianKernel(double t) const;
+};
+
+#endif  // BAYESCURVE_H
+
+//____________________________________________________________________________________
diff --git a/src/bayeslike/bayesparamlike_1d.cpp b/src/bayeslike/bayesparamlike_1d.cpp
new file mode 100644
index 0000000..fa21e70
--- /dev/null
+++ b/src/bayeslike/bayesparamlike_1d.cpp
@@ -0,0 +1,167 @@
+// $Id: bayesparamlike_1d.cpp,v 1.20 2012/06/30 01:32:39 bobgian Exp $
+
+/*
+  Copyright 2004  Lucian Smith, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+#include <cmath>
+#include <utility>
+
+#include "bayesparamlike_1d.h"
+#include "defaults.h"
+#include "errhandling.h"
+#include "parameter.h"
+#include "mathx.h"
+
+using namespace std;
+
+//------------------------------------------------------------------------------------
+
+BayesParamLike_1D::BayesParamLike_1D (Parameter param)
+    :m_raw_data(),
+     m_numpoints(0),
+     m_smoothed_curve(),
+     m_kernelwidth(0.0)
+{
+    if (param.IsValidParameter())
+    {
+        m_priortype = param.GetPrior().GetPriorType();
+        m_binwidth = param.GetPrior().GetBinwidth();
+        m_isvalid = true;
+    }
+    else
+    {
+        m_priortype = LINEAR;
+        m_binwidth = 0.1;
+        m_isvalid = false;
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+BayesParamLike_1D::~BayesParamLike_1D ()
+{
+    // intentionally blank
+}
+
+//------------------------------------------------------------------------------------
+
+void BayesParamLike_1D::AddPoint(double value, long int freq)
+{
+    assert(m_isvalid);
+    if (!m_isvalid) return;
+    if (m_priortype == LOGARITHMIC)
+        value = SafeLog(value);
+    m_numpoints += freq;
+    unsigned long int datasize = m_raw_data.size();
+    if (datasize > 0)
+        if (m_raw_data[datasize-1].first == value)
+        {
+            m_raw_data[datasize-1].second += freq;
+            return;
+        }
+    m_raw_data.push_back(make_pair(value, freq));
+}
+
+//------------------------------------------------------------------------------------
+//Sets up m_smoothed_curve
+
+void BayesParamLike_1D::SmoothCurve()
+{
+    assert(m_isvalid);
+    if (!m_isvalid) return;
+    InitializeCurve();
+    for (unsigned long int i = 0; i < m_raw_data.size(); i++)
+    {
+        m_smoothed_curve.AddKernel(m_raw_data[i].first, m_raw_data[i].second, m_kernelwidth);
+        // To do adaptive smoothing, kernelwidth would change instead of being
+        // a constant member variable here.
+    }
+
+    m_smoothed_curve.Integrate();
+
+}
+
+//------------------------------------------------------------------------------------
+//We need to create a new BayesCurve object, and to do that we need to
+// know all of the things it wants, namely:
+//  numpoints:  Total number of points (sum of the freq's in raw_data)
+//  width:      m_binwidth
+//  minval:     Minimum data point (lowest 'value' in raw_data)
+//  maxval:     Maximum data point (highest 'values' in raw_data)
+//  maxkernwidth:  The kernel width.  If we were to do adaptive
+//                 smoothing, we'd need the highest value here, but we
+//                 don't yet.
+//
+//  The kernel width is calculated from the number of points and from
+//   whichever's lower of the interquartile range or the standard deviation.
+
+void BayesParamLike_1D::InitializeCurve()
+{
+    sort(m_raw_data.begin(), m_raw_data.end());
+    double minval = m_raw_data[0].first;
+    double maxval = m_raw_data[m_raw_data.size()-1].first;
+
+    unsigned long int tally = 0;
+    double lowquart = 0;
+    double highquart = 0;
+    double average = 0;
+    for (unsigned long int i = 0; i < m_raw_data.size(); i++)
+    {
+        average += (m_raw_data[i].first * m_raw_data[i].second);
+        tally += m_raw_data[i].second;
+        if (lowquart == 0 && tally > (m_numpoints/4) )
+            lowquart = m_raw_data[i].first;
+        if (highquart == 0 && tally > (m_numpoints* 3/4) )
+            highquart = m_raw_data[i].first;
+    }
+    assert (tally == m_numpoints); //AddPoint should make this true.
+    average = average/m_numpoints;
+    double sigma = CalculateStdev(average);
+    double minsig = min(sigma, (highquart-lowquart)/1.34);
+    if (minsig < 5*m_binwidth)
+        minsig = 5*m_binwidth;
+    m_kernelwidth = 2.5 * minsig * pow(m_numpoints, -.2);
+    //2.5 is a little less than 2.78; it's totally ad hoc, but so is Silverman,
+    // basically.
+    // LS DEBUG: now that we have bayesian summary files, we can
+    // and probably should experiment
+    // with using different values for this constant to see how our data in
+    // particular interacts with the kernel width.
+    bool islog = false;
+    if (m_priortype == LOGARITHMIC)
+        islog = true;
+
+    m_smoothed_curve.Initialize(m_numpoints, m_binwidth, islog, minval, maxval, m_kernelwidth);
+}
+
+//------------------------------------------------------------------------------------
+//Private functions for internal calculations.
+
+//CalculateStdev takes a given value (presumably the average) and calculates
+// the standard deviation in m_raw_data from that value (sqrt(sum(diff^2)/N-1))
+
+double BayesParamLike_1D::CalculateStdev(double average)
+{
+    double sigma = 0.0;
+    if (m_numpoints <= 1) return sigma; //sigma is incalculable in this case
+    for (unsigned long int i = 0; i < m_raw_data.size(); i++)
+        sigma += pow(m_raw_data[i].second * (m_raw_data[i].first - average), 2);
+    sigma = sigma / (m_numpoints - 1);
+    sigma = sqrt(sigma);
+    return sigma;
+}
+
+//------------------------------------------------------------------------------------
+
+long int BayesParamLike_1D::GetNumUniquePoints()
+{
+    return static_cast<long int>(m_raw_data.size());
+}
+
+//____________________________________________________________________________________
diff --git a/src/bayeslike/bayesparamlike_1d.h b/src/bayeslike/bayesparamlike_1d.h
new file mode 100644
index 0000000..78fe22c
--- /dev/null
+++ b/src/bayeslike/bayesparamlike_1d.h
@@ -0,0 +1,52 @@
+// $Id: bayesparamlike_1d.h,v 1.10 2011/03/07 06:08:39 bobgian Exp $
+
+/*
+  Copyright 2004  Lucian Smith, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef BAYES_PARAMLIKE_1D_H
+#define BAYES_PARAMLIKE_1D_H
+
+#include "bayescurve.h"
+
+class Parameter;
+
+class BayesParamLike_1D
+{
+  public:
+    BayesParamLike_1D (Parameter param);
+    ~BayesParamLike_1D ();
+    void AddPoint(double value, long freq);
+    //Initializes m_smoothed_curve
+    void SmoothCurve();
+    BayesCurve GetSmoothedCurve() {return m_smoothed_curve;}
+    priortype GetPriorType() {return m_priortype;}
+    double GetKernelWidth() {return m_kernelwidth;}
+    long   GetNumUniquePoints();
+
+  private:
+    std::vector<std::pair<double, long> > m_raw_data;
+    unsigned long m_numpoints;
+    bool m_isvalid;
+
+    BayesCurve m_smoothed_curve;
+
+    double m_kernelwidth;
+
+    //These values are set on a per-force basis.
+    priortype m_priortype;
+    double m_binwidth;
+
+    //Functions.  Used by SmoothCurve(), generally.
+    void InitializeCurve();
+    double CalculateStdev(const double average); //used for CalcualateH()
+
+}; // class BayesParamLike_1D
+
+#endif // BAYES_PARAMLIKE_1DH
+
+//____________________________________________________________________________________
diff --git a/src/control/chainmanager.cpp b/src/control/chainmanager.cpp
new file mode 100644
index 0000000..f436408
--- /dev/null
+++ b/src/control/chainmanager.cpp
@@ -0,0 +1,2207 @@
+// $Id: chainmanager.cpp,v 1.217 2013/10/25 17:00:52 mkkuhner Exp $
+
+/*
+  Copyright 2002 Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+#include <cstdlib>
+#include <iostream>
+#include <setjmp.h>
+
+#include "local_build.h"
+
+#include "analyzer.h"
+#include "bayesanalyzer_1d.h"
+#include "cellmanager.h"
+#include "chain.h"
+#include "chainmanager.h"
+#include "chainout.h"
+#include "chainpack.h"
+#include "chainparam.h"
+#include "constants.h"
+#include "datapack.h"
+#include "definitions.h"
+#include "force.h"
+#include "forcesummary.h"
+#include "likelihood.h"
+#include "maximizer.h"
+#include "maximizer_strings.h"
+#include "outputfile.h"
+#include "range.h"                      // For RecRange::BuildBiglinkMap and related objects.
+#include "region.h"
+#include "registry.h"
+#include "runreport.h"
+#include "stringx.h"
+#include "timex.h"
+#include "tree.h"
+#include "treesum.h"
+#include "vector_constants.h"
+
+#ifdef DMALLOC_FUNC_CHECK
+#include "/usr/local/include/dmalloc.h"
+#endif
+
+using namespace std;
+
+//------------------------------------------------------------------------------------
+
+#ifdef DUMP_TREE_COAL_RDATA
+ofstream rdata;                         // JRM debug
+#endif
+
+#ifdef RUN_BIGLINKS
+BiglinkVectormap RecRange::s_biglink_vectormap; // Define RecRange static variable.
+#endif
+
+// Define Range static variable.  Initialized with an "illegal" value so we can detect
+// a call to a Range/RecRange constructor before it gets initialized properly.
+long int Range::s_numRegionSites(FLAGLONG);
+
+//------------------------------------------------------------------------------------
+
+class RegionGammaInfo;
+
+#if defined(LAMARC_COMPILE_LINUX) || defined(LAMARC_COMPILE_MACOSX)
+jmp_buf prewrite;
+#endif
+
+const double TEMPSWAPMIN =  0.1;        // Triggers decrease of temperature interval.
+const double TEMPSWAPMAX = 0.4;         // Triggers increase of temperature interval.
+const double TEMPINCR = 1.1;            // Proportion to increase temperature.
+const double TEMPDECR = 0.9;            // Proportion to decrease temperature.
+const double MAXTEMP  = 1000;           // The maximum temperature we'll allow.
+const long int ADJUSTINTERVAL = 20;     // About how many swaps between adaptive temperature changes.
+const double MAXDIFF = 100.0;           // Maximum difference between temperatures.
+const double MINDIFF = 0.001;           // Minimum difference between temperatures.
+
+//------------------------------------------------------------------------------------
+
+ChainManager::ChainManager(RunReport& runrep, Maximizer& maximizer)
+    : m_chainparam(registry.GetChainParameters()),
+      m_runreport(runrep),
+      m_randomsource(registry.GetRandom()),
+      m_maximizer(maximizer),
+      m_bayesanalyzer(registry.GetBayesAnalyzer_1D()),
+      m_chainpack(),
+      m_nregions(registry.GetDataPack().GetNRegions()),
+      m_nreplicates(m_chainparam.GetNReps()),
+      m_collectionmanager(m_nregions, m_chainparam.GetNRepsAndNProfileReps(registry.GetForceSummary().GetAllNParameters())),
+      m_sumfilehandler(),
+      m_totalsteps(0), m_currentsteps(0),
+      m_logGeyerWeights(),
+      m_writesumfile(false), m_readsumfile(false),
+      m_recoversumfile(false),
+      m_recover_region(0), m_recover_replicate(0), m_recover_chaintype(0),
+      m_recover_chain(0),
+      m_recover_redochain(false),
+      m_recover_redomaximization(false),
+      m_redoreplicatesum(false),
+      m_redoregionsum(false)
+{
+    long int chtype;
+    for(chtype = 0; chtype < NCHAINTYPES; ++chtype)
+    {
+        long int steps = m_chainparam.GetNSamples(chtype) * m_chainparam.GetInterval(chtype) + m_chainparam.GetNDiscard(chtype);
+        m_nsteps.push_back(steps);
+        m_totalsteps += steps * m_nreplicates * m_chainparam.GetNChains(chtype);
+
+        // added profile replicate chains are of type "long int"
+        if (registry.GetChainParameters().RunProfileReps() && chtype == 1)
+        {
+            m_totalsteps += steps *
+                registry.GetChainParameters().GetNProfileReps(registry.GetForceSummary().GetAllNParameters());
+        }
+    }
+
+    m_multitemp = (m_chainparam.GetAllTemperatures().size() > 1);
+    if (m_multitemp)
+    {
+        long int chunk;
+        for(chtype = 0; chtype < NCHAINTYPES; ++chtype)
+        {
+            long int interval = m_chainparam.GetTempInterval();
+            LongVec1d chunks;
+            if (m_nsteps[chtype] < interval)
+                chunks.push_back(m_nsteps[chtype]);
+            else
+            {
+                for(chunk = interval; chunk <= m_nsteps[chtype]; chunk += interval)
+                {
+                    chunks.push_back(interval);
+                    if (chunk != m_nsteps[chtype] && chunk+interval > m_nsteps[chtype])
+                        chunks.push_back(m_nsteps[chtype]-chunk);
+                }
+            }
+            m_chunksize.push_back(chunks);
+        }
+    }
+    else
+    {
+        for(chtype = 0; chtype < NCHAINTYPES; ++chtype)
+        {
+            LongVec1d chunks(1,m_nsteps[chtype]);
+            m_chunksize.push_back(chunks);
+        }
+    }
+
+    vector<pair<ForceParameters, long int> > emptypar;
+    vector<pair<DoubleVec1d, long int> > emptystick;
+
+} // ChainManager::ChainManager
+
+//------------------------------------------------------------------------------------
+
+ChainManager::~ChainManager()
+{
+    // intentionally blank
+} // ChainManager::~ChainManager
+
+//------------------------------------------------------------------------------------
+
+void ChainManager::DoAllChains()
+{
+    // note -- moved writing of xml file into lamarc.cpp
+#ifdef DUMP_TREE_COAL_RDATA
+    // save final tree coalescence times for R to display
+    rdata.open("rdata",ios::app);       // JRM debug
+#endif
+
+    // chainmanager obj seems to be created before user exits menu
+    m_readsumfile         = registry.GetUserParameters().GetReadSumFile();
+    m_writesumfile        = registry.GetUserParameters().GetWriteSumFile();
+    if (m_readsumfile)                  // Open and read in the input summary file.
+    {
+        long int total_chains = 0;
+        for (int i = 0; i < NCHAINTYPES; i++ )
+        {
+            total_chains += registry.GetChainParameters().GetNChains(i);
+        }
+        m_sumfilehandler.ReadInSumFile(m_chainpack, m_collectionmanager, total_chains);
+        ReadInRecover();
+
+#if defined(LAMARC_COMPILE_LINUX) || defined(LAMARC_COMPILE_MACOSX)
+        if (setjmp (prewrite))
+        {
+            CloseSumOut();
+        }
+#endif
+
+        if (m_writesumfile)
+        {
+            m_writesumfile = false;
+            // We'll reset it in DoChainFromSummaryFile(), but for now, it's much
+            //  more efficient to write them all at once, above, than whilst going
+            //  through the DoRegions/DoReplicates/DoChainTypes fandango.
+            m_sumfilehandler.WriteSumFileStart();
+            m_sumfilehandler.WriteWhatWasRead(m_recoversumfile,
+                                              m_recover_region,
+                                              m_recover_replicate,
+                                              m_recover_chaintype,
+                                              m_recover_chain,
+                                              m_recover_redochain,
+                                              m_recover_redomaximization,
+                                              m_nregions,
+                                              m_nreplicates,
+                                              m_chainpack,
+                                              m_collectionmanager);
+        }
+    }
+
+    else if(m_writesumfile)             //Open and start writing to the output summary file.
+    {
+#if defined(LAMARC_COMPILE_LINUX) || defined(LAMARC_COMPILE_MACOSX)
+        if (setjmp (prewrite))
+        {
+            CloseSumOut();
+        }
+        else
+        {
+            m_sumfilehandler.WriteSumFileStart();
+        }
+#else
+        m_sumfilehandler.WriteSumFileStart();
+#endif
+
+        //This is an 'else if' in case the user is reading and writing to the
+        // same file--we want to read in the data before clobbering it.
+    }
+
+    CreateChains();
+    DoRegions();
+
+#if defined(LAMARC_COMPILE_LINUX) || defined(LAMARC_COMPILE_MACOSX)
+    if (setjmp (prewrite))
+    {
+        CloseSumOut();
+    }
+#endif
+
+    if (m_writesumfile)
+        m_sumfilehandler.WriteSumFileEnd(m_chainpack);
+
+#ifndef STATIONARIES
+#ifndef LAMARC_QA_SINGLE_DENOVOS
+    // For LAMARC_QA_SINGLE_DENOVOS, we don't want to
+    // combine any MLE/profile data over regions.
+
+    if (m_chainparam.IsBayesian())
+        m_bayesanalyzer.CalcProfiles(FLAGLONG);
+    else
+        DoOverallProfiles();
+
+    try                                 // JDEBUG--this variable will be created during menu/xml reading
+    {                                   // and stored in the UserParam--eventually...
+        ResultsFile results;
+        results.Display();
+    }
+
+    catch (file_error& e)
+    {
+        throw e;
+    }
+
+#endif // LAMARC_QA_SINGLE_DENOVOS
+#endif // STATIONARIES
+
+#ifdef DUMP_TREE_COAL_RDATA
+    rdata.close(); // JRM debug
+#endif
+
+} // ChainManager::DoAllChains
+
+//------------------------------------------------------------------------------------
+
+//The basic scheme of ReadInRecover is to go through the information in
+// chainpack and check to see what's been set and what hasn't, and to check
+// the collectionmanager to make sure all the tree summaries are accurately
+// stored there, too.  When it reaches something that hasn't been set,
+// LAMARC then knows it needs to re-calculate those values, and sets various
+// m_recover_* variables appropriately.  These are then read by the main
+// program as it goes through its DoRegions/DoReplicates/DoChainTypes/DoChain
+// dance, and are set to trigger normal code execution at the appropriate
+// juncture.  WriteWhatWasRead also uses these variables when writing a new
+// summary file that is intended to match the old one.  The various flags
+// and counters are as follows:
+//
+//   bool m_recoversumfile:  True if the summary file was partial, false if not.
+//   long m_recover_region:  The index of the region where the sumfile stopped,
+//                           or (m_nregions-1) if all regions trees were read.
+//   long m_recover_replicate:  The index of the replicate where the sumfiles
+//                              stopped, or (m_nreplicates-1) if all replicate
+//                              trees were read.
+//   long m_recover_chaintype:  The index of the chain type where the sumfiles
+//                              stopped, or (NCHAINTYPES-1) if all trees read.
+//   long m_recover_chain:  The index of the chain where the sumfiles stopped.
+//                          Note that this is the chain *for the particular
+//                          chaintype* and not the absolute chain.  Set to
+//                          registry.GetChainParameters().GetNChains(m_recover_chaintype)
+//                          if all trees were read.  Note also that this is
+//                          not the standard '-1' that all the other values
+//                          get--it is not a valid index, merely an indication
+//                          that we're actually done and don't need to
+//                          redo this chain.
+//   bool m_redoreplicatesum:  True if the summary over all replicates of the
+//                             last recorded region was not recorded in the
+//                             sumfile, but the tree summaries were.
+//   bool m_redoregionsum:  True if the summary over all regions was not
+//                          recorded in the sumfile, but the tree summaries
+//                          were.
+
+void ChainManager::ReadInRecover()
+{
+    long int total_chains = 0;
+    for (int i = 0; i < NCHAINTYPES; i++ )
+    {
+        total_chains += registry.GetChainParameters().GetNChains(i);
+    }
+
+    m_recover_region = m_sumfilehandler.GetLastRegion();
+    m_recover_replicate = m_sumfilehandler.GetLastReplicate();
+
+    long int last_chain = m_sumfilehandler.GetLastChain();
+    SetRecoverChaintypeChainsFrom(last_chain);
+
+    long int regionChainSum = m_sumfilehandler.GetLastRegionChainSum();
+    long int replicateChainSum = m_sumfilehandler.GetLastReplicateChainSum();
+
+    if (replicateChainSum > m_recover_replicate)
+    {
+        //There must be only one chain per replicate, and we wrote out the
+        // trees for a replicate but not the chainpack.
+        assert(total_chains == 1);
+        m_recover_region = regionChainSum;
+        m_recover_replicate = replicateChainSum;
+        m_recover_chain = 0;
+        m_recover_redomaximization = true;
+        m_chainpack.EndReplicate();
+    }
+    else if (regionChainSum > m_recover_region)
+    {
+        //There must be only one chain per region, and we wrote out the trees
+        // but not the chainpack.
+        assert(total_chains == 1);
+        m_recover_region = regionChainSum;
+        m_recover_chain = 0;
+        m_recover_redomaximization = true;
+        m_chainpack.EndRegion();
+    }
+    else if (last_chain+1 == total_chains)
+    {
+        //There are three possible omissions here--the final chain summary,
+        // the final replicate summary, and the final region summary.
+        if ((regionChainSum != m_recover_region) || (replicateChainSum != m_recover_replicate))
+        {
+            //No final chainsum.
+            m_recover_redochain = true;
+            m_recover_chain--;
+        }
+        else if ((m_sumfilehandler.GetLastReplicateSummary()-1 != m_recover_region)
+                 && (m_recover_replicate == m_nreplicates-1)
+                 && (m_nreplicates > 1))
+        {
+            //No summary over replicates for this region.
+            m_redoreplicatesum = true;
+        }
+        else if ((!m_sumfilehandler.GetRegionSummary()) &&
+                 (m_recover_region == m_nregions-1) &&
+                 (m_recover_replicate == m_nreplicates-1) &&
+                 (m_nregions > 1))
+        {
+            //No summary over regions.
+            m_redoregionsum = true;
+        }
+        else
+        {
+            //We have everything--advance to the next replicate and/or region
+            if (m_recover_replicate+1 == m_nreplicates)
+            {
+                //On to the next region
+                m_chainpack.EndRegion();
+                m_recover_region++;
+                m_recover_replicate = 0;
+            }
+            else
+            {
+                //On to the next replicate
+                m_chainpack.EndReplicate();
+                m_recover_replicate++;
+            }
+            m_recover_chaintype = 0;
+            m_recover_chain = 0;
+        }
+    }
+    else if (last_chain+2 == total_chains)
+    {
+        //Check to see if we have a chain summary but no chainpack--in this case,
+        // we need to recalculate the maximum on that last chain.
+        if ((regionChainSum == m_recover_region) && (replicateChainSum == m_recover_replicate))
+        {
+            //We have a final chainsum, but no final chainpack
+            m_recover_redomaximization = true;
+            m_recover_chain = registry.GetChainParameters().GetNChains(NCHAINTYPES-1)-1;
+        }
+    }
+
+    m_recoversumfile = !m_sumfilehandler.GetRegionSummary();
+    //We'll assume that if the region summary is there, everything else was OK,
+    // since that's the last thing that gets written.
+
+    TellUserWhereWeAreRestarting();
+} // ReadInRecover
+
+//------------------------------------------------------------------------------------
+
+//Sets the member variables m_recover_chaintype and m_recover_chain from a
+// number that equals the index of the final chain read in.
+
+void ChainManager::SetRecoverChaintypeChainsFrom(long int last_chain)
+{
+    long int read_chains = last_chain+1;
+    //We want the index of the chain *after* the last read chain.
+    long int earlier_chains = 0;
+
+    for (int i = 0; i < NCHAINTYPES; i++)
+    {
+        long int these_chains = registry.GetChainParameters().GetNChains(i);
+        if (read_chains < earlier_chains + these_chains)
+        {
+            m_recover_chaintype = i;
+            m_recover_chain = read_chains - earlier_chains;
+            return;
+        }
+        earlier_chains += these_chains;
+    }
+
+    //If we get here, we've read the last chain of the list, so in case
+    // we still need to do things at this point, set the chaintype/chain
+    // to the last one.
+    m_recover_chaintype = NCHAINTYPES-1;
+    m_recover_chain = registry.GetChainParameters().GetNChains(m_recover_chaintype);
+}
+
+//------------------------------------------------------------------------------------
+
+void ChainManager::TellUserWhereWeAreRestarting()
+{
+    string msg;
+    if (m_recover_region >= m_nregions)
+    {
+        //Everything was intact
+        msg = "The summary file was complete, so no new chains need to be created."
+            "  Any requested profiling will be calculated at the appropriate place"
+            " in the run.";
+    }
+    else if (m_redoregionsum)
+    {
+        msg = "The summary file was mostly complete, with the exception of the "
+            "calculation of the final summary over all regions.  This will be re-"
+            "calculated, along with any requested profiling.";
+    }
+    else if (m_redoreplicatesum)
+    {
+        msg = "The summary file did not include the summary over replicates for"
+            " region " + ToString(m_recover_region+1) + ", so new calculations will"
+            " begin at that point, along with any requested profiling before then.";
+    }
+    else
+    {
+        msg = "The summary file was incomplete.  After calculating any requested profiles from summarized replicates, chains will resume being calculated from ";
+        if (m_recover_redochain)
+        {
+            msg += "the last chain of region "
+                + ToString(m_recover_region + 1) + ", replicate "
+                + ToString(m_recover_replicate + 1) + ", re-creating the final chain, "
+                "since the summary of that chain was lost.";
+        }
+        else if (m_recover_redomaximization)
+        {
+            msg += "the last chain of region "
+                + ToString(m_recover_region + 1) + ", replicate "
+                + ToString(m_recover_replicate + 1) + ", re-calculating the parameter "
+                "estimates from the data stored in the summary file.";
+        }
+        else
+        {
+            msg += "region " + ToString(m_recover_region + 1)
+                + ", replicate "  + ToString(m_recover_replicate + 1)
+                + ", chain type " + ToString(m_recover_chaintype + 1)
+                + ", and chain "  + ToString(m_recover_chain + 1) + ".";
+        }
+    }
+    m_runreport.ReportNormal(msg);
+}
+
+//------------------------------------------------------------------------------------
+
+void ChainManager::CreateChains()
+{
+    DoubleVec1d temperatures = m_chainparam.GetAllTemperatures();
+    DoubleVec1d::iterator temp;
+
+    for(temp = temperatures.begin(); temp != temperatures.end(); ++temp)
+    {
+        Chain ch(m_randomsource,m_runreport,m_chainparam,m_collectionmanager,*temp);
+        m_temps.push_back(ch);
+    }
+} // ChainManager::CreateChains
+
+//------------------------------------------------------------------------------------
+
+void ChainManager::DoRegions()
+{
+    vector<Chain>::iterator temp;
+    long int region;
+
+#ifndef STATIONARIES
+    ForceSummary & forcesum = registry.GetForceSummary();
+    Analyzer & analyzer = registry.GetAnalyzer();
+#endif // STATIONARIES
+
+    for(region = 0; region < m_nregions; ++region)
+    {
+        // Record region number. used for outputting recombination locations
+        registry.SetCurrentRegionIndex(region);
+
+        // Clear the DLCell store, since it can generally not be reused between regions.
+        registry.GetCellManager().ClearStore();
+        m_currentsteps = 0;
+
+        Region& curregion = registry.GetDataPack().GetRegion(region);
+
+#ifdef ENABLE_REGION_DUMP
+        // All user input is done, and all processing for the current region is about to start.
+        // Print the parsed input data relevant to analysis of this Region.
+        PrintRegionData(region, curregion);
+#endif  // ENABLE_REGION_DUMP
+
+        // Set the total number of sites for this Region.  This includes sites in all Loci in the Region plus all
+        // non-marker inter-locus sites (sites for which we have no data).  Recombination can happen at any link
+        // between any of these sites, and therefore we must account for all this "space" in both Biglink and emulated
+        // Littlelink models.  They are already treated as recombination-possible links in the Littlelink model.
+        Range::SetNumRegionSites(curregion);
+
+#ifdef RUN_BIGLINKS
+        // If using Biglink optimization, build the map that associates Littlelinks with Biglinks.
+        if (registry.GetForceSummary().CheckForce(force_REC)) // This force includes recombination.
+        {
+            // Called only when underlying data structures (trees, branches, ranges) will be recombinant.
+            RecRange::BuildBiglinkMap(curregion);
+        }
+#endif  // RUN_BIGLINKS
+
+        for(temp = m_temps.begin(); temp != m_temps.end(); ++temp)
+        {
+            Tree* regtr = curregion.CreateTree();
+            temp->StartRegion(regtr);
+            m_regiontrees.push_back(regtr);
+        }
+
+        if (registry.GetUserParameters().GetProgress() != NONE)
+        {
+            m_runreport.ReportUrgent("", false);
+            string msg = "Beginning region: ";
+            msg += curregion.GetRegionName();
+            m_runreport.ReportUrgent(msg);
+        }
+
+        DoReplicates(region);
+        registry.GetUserParameters().ClearCurrentBestLike();
+
+        vector<Tree*>::iterator tree = m_regiontrees.begin();
+        for(temp = m_temps.begin(); temp != m_temps.end(); ++temp, ++tree)
+        {
+            temp->EndRegion();
+            delete *tree;
+        }
+
+        m_regiontrees.clear();
+
+        if (!m_readsumfile)
+        {
+            m_chainpack.EndRegion();
+        }
+
+#ifndef STATIONARIES
+#ifndef LAMARC_QA_SINGLE_DENOVOS
+        // For LAMARC_QA_SINGLE_DENOVOS, we don't want to
+        // combine any MLE/profile data over replicates.
+        if (m_chainparam.IsBayesian())
+        {
+            m_bayesanalyzer.CalcProfiles(region);
+        }
+        else
+        {
+            if (forcesum.GetOverallProfileType() != profile_NONE)
+            {
+                ForceParameters region_fp = m_chainpack.GetRegion(region).GetEstimates();
+                DoubleVec1d region_MLEs = region_fp.GetRegionalParameters();
+                double region_like = m_chainpack.GetRegion(region).GetLlikemle();
+                analyzer.CalcProfiles(region_MLEs, region_like, region);
+            }
+        }
+        ChainOut chout = m_chainpack.GetRegion(region);
+        forcesum.SetRegionMLEs(chout, region);
+        m_runreport.PrognoseAll(m_chainpack, region, m_nregions);
+#endif // LAMARC_QA_SINGLE_DENOVOS
+#endif // STATIONARIES
+    }
+
+#ifdef RUN_BIGLINKS
+    if (registry.GetForceSummary().CheckForce(force_REC)) // This force includes recombination.
+    {
+        // Called only when underlying data structures (trees, branches, ranges)
+        // are potentially recombinant (ie, contain RecRanges, not Ranges).
+        // We are done with the Biglink Vector Map; we can clear it now.
+        RecRange::GetBiglinkVectormap().clear();
+    }
+#endif  // RUN_BIGLINKS
+
+    if (m_redoregionsum)
+    {
+        m_readsumfile = false;
+        m_writesumfile = registry.GetUserParameters().GetWriteSumFile();
+        m_runreport.ReportChat("Recalculating the overall region summary.\n");
+    }
+
+    registry.GetCellManager().ClearStore(); // static function called to clear datalikehood
+    // memory manager of final region
+
+#ifndef STATIONARIES
+    if (m_chainparam.IsBayesian())
+    {
+        m_bayesanalyzer.EndRegionsAndAnalyze();
+    }
+
+    if (m_nregions > 1)
+    {
+        m_runreport.ReportNormal("Calculating the best parameter estimates over all regions.");
+        CalculateMLEsOverRegions();
+    }
+#endif // STATIONARIES
+} // ChainManager::DoRegions()
+
+//------------------------------------------------------------------------------------
+
+void ChainManager::CalculateMLEsOverRegions()
+{
+    ForceSummary& forcesum = registry.GetForceSummary();
+    m_maximizer.SetLikelihood(&(registry.GetRegionPostLike()));
+    RegionGammaInfo *pRegionGammaInfo = registry.GetRegionGammaInfo();
+    double maxlike;
+    ForceParameters fp(global_region);
+    ChainOut regionout;
+    string msg;
+
+    if (pRegionGammaInfo)
+    {
+        if (m_chainparam.IsBayesian())
+            throw implementation_error("ChainManager::DoRegions(), can\'t apply a gamma over regions within a Bayesian analysis.");
+        pRegionGammaInfo->Activate();
+        // Here we need to get the constraint matrix from FS.
+        // Note: we can use its dimensionality to deterine the argument for Initialize().
+        m_maximizer.Initialize(forcesum.GetAllNParameters() + 1);
+        m_maximizer.SetConstraints(forcesum.GetIdenticalGroupedParams());
+        m_maximizer.SetLikelihood(&(registry.GetRegionPostLike()));
+        m_maximizer.AppendConstraintOnAlpha(pRegionGammaInfo->GetParamStatus());
+    }
+
+    if (m_chainparam.IsBayesian())
+    {
+#if 0  // EWFIX.BUG.838 -- moved to curvefiles.cpp, called from main Lamarc routine
+        if (registry.GetUserParameters().GetWriteCurveFiles())
+        {
+            m_bayesanalyzer.WriteCurvesForRegion(FLAGLONG);
+        }
+#endif // 0
+
+        DoubleVec1d mleparam = m_bayesanalyzer.GetMaxVecForAllRegions();
+        maxlike = m_bayesanalyzer.GetAvgMaxLikeForAllRegions();
+
+        fp.SetGlobalParameters(mleparam);
+        regionout.SetEstimates(fp);
+        regionout.SetLlikemle(maxlike);
+    }
+    else
+    {                                   // Non-Bayesian
+        registry.GetRegionPostLike().Setup(m_collectionmanager.GetTreeColl(),
+                                           m_logGeyerWeights);
+        CalculateNonBayesMultiRegionMLEs(fp, regionout, maxlike);
+    }
+
+    // Note:  even if we read in estimates from a summary file, we still
+    //  use the new values instead of the old ones.
+    if (m_readsumfile)
+    {
+        CompareAndWarn(regionout.GetEstimates(), m_chainpack.GetOverall().GetEstimates());
+    }
+
+    m_chainpack.SetSummaryOverRegions(regionout);
+    if (pRegionGammaInfo)
+        pRegionGammaInfo->Deactivate();
+    forcesum.SetOverallMLE(regionout);
+    if (pRegionGammaInfo)
+        pRegionGammaInfo->Activate();
+
+#if defined(LAMARC_COMPILE_LINUX) || defined(LAMARC_COMPILE_MACOSX)
+    if (setjmp (prewrite))
+    {
+        CloseSumOut();
+    }
+#endif
+
+    if (m_writesumfile)
+    {
+        m_sumfilehandler.WriteRegionSummary(fp, maxlike);
+    }
+
+    m_runreport.ReportNormal("", false);
+    m_runreport.ReportNormal("Final parameter estimates using data from all regions:", false);
+    m_runreport.DisplayReport(regionout);
+
+    //LS DEBUG:  This is a hack.  It should go away when the gamma force leaves the registry.
+    if (pRegionGammaInfo)
+    {
+        string msg = "Alpha: " + ToString(pRegionGammaInfo->GetMLE());
+        if (pRegionGammaInfo->GetParamStatus().Status() == pstat_constant)
+        {
+            msg += " (held constant)";
+        }
+        m_runreport.ReportUrgent(msg);
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+void ChainManager::CalculateNonBayesMultiRegionMLEs(ForceParameters& fp, ChainOut& regionout, double& maxlike)
+{
+    RegionGammaInfo *pRegionGammaInfo = registry.GetRegionGammaInfo();
+    string msg;
+
+    DoubleVec1d params = m_chainpack.OverallMeanParams();
+
+    if (pRegionGammaInfo)
+    {
+        params.push_back(pRegionGammaInfo->GetStartValue());
+    }
+
+    bool retval(false), atLeastOneSearchSucceeded(false);
+    double oneHighAlpha = FLAGDOUBLE;
+    string message = "";
+    DoubleVec1d MLEs(params.size() +1+1+1); // plus lnL, region, retval
+    DoubleVec2d peaks; // peak(s) in the multi-region lnL surface
+    // found by searching from different starting points
+
+    // We have found (November 2004) that adding the single-region
+    // likelihood surfaces together can yield a multi-region likelihood
+    // surface with multiple local peaks.  Hence, we search this composite
+    // surface multiple times, starting from a different point each time.
+
+    // First, try starting from the mean of the single-region MLE vectors.
+    retval = m_maximizer.Calculate(params, maxlike, message);
+    if (message == maxstr::MAX_HIGH_ALPHA_0)
+    {
+        oneHighAlpha = params[params.size()-1];
+    }
+    else if (message != "")
+    {
+        message = "Warning:  When calculating the maximum over all regions starting from the mean"
+            " of the single-region estimates, we received the following warning from the maximizer:  "
+            + message;
+        m_runreport.ReportDebug(message);
+    }
+
+    message = "";
+    // param was set to mean, above
+    // Store these results in a temporary vector.
+    MLEs[0] = maxlike;
+    MLEs[1] = -1.0; // signifies the mean of single-region MLE vectors
+    MLEs[2] = retval ? 1.0 : 0.0;
+    if (retval)
+    {
+        atLeastOneSearchSucceeded = true; // found a true (local) maximum
+    }
+    copy(params.begin(), params.end(), &MLEs[3]);
+    peaks.push_back(MLEs);
+
+    // Next, try starting from each single-region peak.
+    for (long int region = 0; region < m_nregions; region++)
+    {
+        params = m_chainpack.GetRegion(region).GetEstimates().GetGlobalParameters();
+        if (pRegionGammaInfo)
+        {
+            params.push_back(pRegionGammaInfo->GetStartValue());
+        }
+        retval = m_maximizer.Calculate(params, maxlike, message);
+        if (message == maxstr::MAX_HIGH_ALPHA_0)
+        {
+            oneHighAlpha = params[params.size()-1];
+        }
+        else if (message != "")
+        {
+            message = "Warning:  when calculating the maximum over all regions starting from the the region "
+                + ToString(region) + " estimates, we received the following warning from the maximizer:  "
+                + message;
+            m_runreport.ReportDebug(message);
+        }
+        message = "";
+        MLEs[0] = maxlike;
+        MLEs[1] = static_cast<double>(region);
+        MLEs[2] = retval ? 1.0 : 0.0;
+        if (retval)
+            atLeastOneSearchSucceeded = true;
+        copy(params.begin(), params.end(), &MLEs[3]);
+        peaks.push_back(MLEs);
+    }
+
+    unsigned long int IndexOfFirstReasonableResult(0);
+    if (atLeastOneSearchSucceeded)
+    {
+        // From the peak(s) we found, determine the highest one;
+        // report on the others when we're in debug mode.
+        sort(peaks.begin(), peaks.end(), greater<DoubleVec1d>());
+        while (IndexOfFirstReasonableResult < peaks.size() &&
+               peaks[IndexOfFirstReasonableResult][0] > DBL_BIG/10.0)
+            IndexOfFirstReasonableResult++;
+
+        msg = "Multi-region maximum-likelihood parameter estimates:\n";
+        for (unsigned long int j = 0; j < peaks.size(); j++)
+        {
+            if (1.0 == peaks[j][2]) // maximizer succeeded
+            {
+                msg += "lnL = " + Pretty(peaks[j][0]) + ", p = ("
+                    + Pretty(peaks[j][3]);
+                for (unsigned long int k = 4; k < peaks[j].size(); k++)
+                    msg +=  ", " + Pretty(peaks[j][k]);
+                msg += "), starting from ";
+            }
+            else
+                msg += "Failed to find a maximum when starting from ";
+            if (-1.0 == peaks[j][1])
+                msg += "the mean over single-region peaks.\n";
+            else
+                msg += "region " + ToString(peaks[j][1]) + "\'s peak.\n";
+        }
+        if (peaks.size() == IndexOfFirstReasonableResult)
+            IndexOfFirstReasonableResult = 0; // no reasonable result found; default to 0
+        maxlike = peaks[IndexOfFirstReasonableResult][0]; // the highest lnL we found
+        if (maxlike > DBL_MAX)
+            maxlike = DBL_MAX;
+        copy(&peaks[IndexOfFirstReasonableResult][3],
+             &(peaks[IndexOfFirstReasonableResult][peaks[IndexOfFirstReasonableResult].size()]),
+             params.begin());
+    }
+    else if (oneHighAlpha != FLAGDOUBLE)
+    {
+        //All our searches failed, but at least one failed due to having a high
+        // alpha.  Try again, constraining alpha at its maximum.
+        pRegionGammaInfo->ConstrainToMax();
+        m_maximizer.AppendConstraintOnAlpha(pRegionGammaInfo->GetParamStatus());
+        msg = maxstr::MAX_BAD_ALPHA_0
+            + ToString(pRegionGammaInfo->GetMaxValue())
+            + maxstr::MAX_BAD_ALPHA_1;
+        registry.GetRunReport().ReportNormal(msg);
+        CalculateNonBayesMultiRegionMLEs(fp, regionout, maxlike);
+        return;
+    }
+    else
+    {
+        msg = maxstr::MAX_NO_MULTI_MAX;
+        maxlike = -DBL_BIG;
+        for (unsigned long int n = 0; n < peaks.size(); n++)
+        {
+            // Find the entry corresponding to the mean
+            // of the single-region MLEs, and propagate this.
+            if (-1.0 == peaks[n][1])
+            {
+                for (unsigned long int i = 3; i < peaks[n].size(); i++)
+                    params[i - 3] = peaks[n][i];
+            }
+            break;
+        }
+    }
+
+    m_runreport.ReportDebug(msg);
+
+    if (pRegionGammaInfo)
+    {
+        pRegionGammaInfo->SetMLE(params[params.size()-1]);
+        params.pop_back(); // remove alpha from this vector
+    }
+
+    fp.SetGlobalParameters(params);
+    regionout.SetEstimates(fp);
+    regionout.SetLlikemle(maxlike);
+}
+
+//------------------------------------------------------------------------------------
+
+void ChainManager::DoReplicates(long int region)
+{
+    const ForceSummary& forcesum = registry.GetForceSummary();
+    long int rep;
+
+    for(rep = 0; rep < m_nreplicates; ++rep)
+    {
+        registry.GetUserParameters().UpdateFileNamesAndSteps(region, rep, m_readsumfile);
+        if (!m_readsumfile)
+        {
+            ResetAllAlphas();
+        }
+
+        vector<Chain>::iterator temp;
+        Region& curregion = registry.GetDataPack().GetRegion(region);
+        if (m_nreplicates > 1)
+        {
+            string msg = "Beginning Replicate ";
+            msg += indexToKey(rep) + " of " + curregion.GetRegionName() + ".";
+            m_runreport.ReportUrgent(msg);
+        }
+
+        for(temp = m_temps.begin(); temp != m_temps.end(); ++temp)
+        {
+            temp->StartReplicate(forcesum, curregion);
+        }
+
+        m_maximizer.SetLikelihood(&(registry.GetSinglePostLike()));
+        DoChainTypes(region, rep);
+
+        for(temp = m_temps.begin(); temp != m_temps.end(); ++temp)
+            temp->EndReplicate();
+
+        if (!m_readsumfile)
+        {
+            m_chainpack.EndReplicate();
+        }
+    }
+
+    registry.GetDataPack().GetRegion(region).WriteAnyMapping();
+#ifndef STATIONARIES
+    if (m_chainparam.IsBayesian())
+    {
+        m_bayesanalyzer.EndReplicatesAndAnalyze();
+
+#if 0  // EWFIX.BUG.838 -- moved to curvefiles.cpp, called from main Lamarc routine
+        if (registry.GetUserParameters().GetWriteCurveFiles())
+            m_bayesanalyzer.WriteCurvesForRegion(region);
+#endif // 0
+
+    }
+#endif // STATIONARIES
+
+    if (m_redoreplicatesum && (m_recover_region==region))
+    {
+        m_readsumfile = false;
+        m_writesumfile = registry.GetUserParameters().GetWriteSumFile();
+        m_runreport.ReportChat("Re-calculating the region summary over replicates.\n");
+    }
+
+    if (m_readsumfile && !m_chainparam.IsBayesian())
+    {
+        RedoMaximization(region);
+        //ReadInNoRedoMax(region);
+        //LS NOTE:  uncomment ReadInNoRedoMax and comment out RedoMaximization for
+        // faster reads from summary files with no checking.
+    }
+
+    if (m_nreplicates > 1)
+    {
+        if (!m_readsumfile)
+        {
+            m_runreport.ReportNormal("Calculating the best parameter estimates over all replicates.");
+            if (!m_chainparam.IsBayesian())
+            {
+                m_maximizer.SetLikelihood(&(registry.GetReplicatePostLike()));
+                registry.GetReplicatePostLike().Setup(m_collectionmanager.GetTreeColl(region));
+                SaveGeyerWeights(registry.GetReplicatePostLike().GetGeyerWeights(), region);
+            }
+
+            double maxlike;
+            ForceParameters fp(region);
+            ChainOut repout = ChainOut();
+
+            if (!m_chainparam.IsBayesian())
+            {
+                DoubleVec1d meanparam = m_chainpack.RegionalMeanParams();
+                string message = "";
+                if (!m_maximizer.Calculate(meanparam, maxlike, message))
+                {
+                    string msg = "Maximization failure when calculating the best ";
+                    msg = msg + "parameters for all replicates in region " +
+                        registry.GetDataPack().GetRegion(region).GetRegionName() +
+                        ".  Using the mean values for all replicates instead.";
+                    m_runreport.ReportUrgent(msg);
+                    m_runreport.ReportNormal("Error from the maximizer:  " + message);
+                    //Note:  This relies on the maximizer leaving 'meanparam' unchanged
+                    // during failure.
+                }
+                else if (message != "")
+                {
+                    m_runreport.ReportNormal("Warning from the maximizer:  " + message);
+                }
+                fp.SetRegionalParameters(meanparam);
+                repout.SetEstimates(fp);
+                repout.SetLlikemle(maxlike);
+            }
+            else
+            {
+                DoubleVec1d mleparam = m_bayesanalyzer.GetMaxVecForRegion(region);
+                maxlike = m_bayesanalyzer.GetAvgMaxLikeForRegion(region);
+                fp.SetGlobalParameters(mleparam);
+                repout.SetEstimates(fp);
+                repout.SetLlikemle(maxlike);
+            }
+
+            m_chainpack.SetSummaryOverReps(repout);
+
+#if defined(LAMARC_COMPILE_LINUX) || defined(LAMARC_COMPILE_MACOSX)
+            if (setjmp (prewrite))
+            {
+                CloseSumOut();
+            }
+#endif
+
+            if (m_writesumfile)
+            {
+                m_sumfilehandler.WriteReplicateSummary(fp, maxlike, m_chainpack);
+            }
+
+            m_runreport.ReportNormal("", false);
+            m_runreport.ReportNormal("Parameter estimates using data from all replicates in this region:", false);
+            m_runreport.DisplayReport(repout);
+
+            //Save the mapping data to the appropriate loci, then report on it.
+            if (registry.GetDataPack().GetRegion(region).GetNumMovingLoci() > 0)
+            {
+                DoubleVec1d logweights(m_nreplicates, 0.0);
+                if (!m_chainparam.IsBayesian())
+                {
+                    logweights = m_logGeyerWeights[region];
+                }
+                registry.GetDataPack().GetRegion(region).SaveMappingInfo(m_collectionmanager.GetMapColl(region), logweights);
+                registry.GetDataPack().GetRegion(region).ReportMappingInfo();
+            }
+        }
+    }
+} // ChainManager::DoReplicates
+
+//------------------------------------------------------------------------------------
+
+void ChainManager::DoChainTypes(long int region, long int rep)
+{
+    const ForceSummary& forcesum = registry.GetForceSummary();
+    ForceParameters chainstart(forcesum.GetStartParameters(),region);
+
+    for(long int chaintype = 0; chaintype < NCHAINTYPES; ++chaintype)
+    {
+        vector<Chain>::iterator temp;
+        for(temp = m_temps.begin(); temp != m_temps.end(); ++temp)
+            temp->SetChainType(chaintype, m_chainparam);
+        for(long int chain = 0; chain < m_chainparam.GetNChains(chaintype); ++chain)
+        {
+            if (m_readsumfile)
+            {
+                DoChainFromSummaryFile(region, rep, chaintype, chain);
+            }
+            else
+            {
+                DoChain(region, rep, chaintype, chain, chainstart);
+            }
+        }
+    }
+
+#ifndef STATIONARIES
+    // In a stationaries run we do not attempt any post-analysis
+    if (m_chainparam.IsBayesian())
+    {
+        if (m_readsumfile)
+        {
+            ChainOut co;
+            ForceParameters fp(region);
+            DoSingleChainBayes(region, rep, co, fp);
+        }
+        m_bayesanalyzer.EndChainsAndAnalyze();
+    }
+#endif // STATIONARIES
+} // ChainManager::DoChainTypes
+
+//------------------------------------------------------------------------------------
+
+// DoChainFromSummaryFile does absolutely nothing unless it's time to
+//  re-start a run from where the summary file broke off.  Its trigger
+//  is set in ReadInRecover() in the form of several 'recover*' member
+//  variables.  When the code here is finally triggered, it runs DoChain
+//  twice; both times with the force parameter values it got from the
+//  last read-in chain (stored in the chainpack).  The first time is just
+//  to get better-than-de-novo trees into the various chains; the parameters
+//  determined from that are thrown away.  The second time is 'for real',
+//  and the member boolean 'm_readsumfile' is turned off so from then on,
+//  DoChain is called instead of this function.
+
+void ChainManager::DoChainFromSummaryFile(long int region, long int rep, long int chaintype, long int chain)
+{
+    if (m_recoversumfile
+        && (region==m_recover_region) && (rep == m_recover_replicate)
+        && (chaintype==m_recover_chaintype) && (chain==m_recover_chain))
+    {
+        m_readsumfile  = false; //we're done reading.
+        registry.GetUserParameters().UpdateWriteTraceFile(region, rep);
+        if (chaintype == 0 && chain == 0)
+        {
+            //We've either got nothing at all in the insumfile, or we're starting
+            // up immediately after a region/replicate.  In either case, we only
+            // want to run DoChain, not anything as fancy as below.
+            m_writesumfile = registry.GetUserParameters().GetWriteSumFile();
+            ForceParameters chainstart(registry.GetForceSummary().GetStartParameters(),region);
+            DoChain(region, rep, chaintype, chain, chainstart);
+        }
+        else
+        {
+            ChainOut chout = m_chainpack.GetLastChain();
+            ForceParameters chainstart = chout.GetEstimates();
+            assert (chainstart.GetParamSpace() == known_region);
+
+            if (m_recover_redomaximization)
+            {
+                //We need to only do maximization, not any creating of trees.  This
+                // involves creating a chainout into which we put the resulting
+                // estimates.
+                m_runreport.ReportNormal("Calculating the best parameters from the stored summaries:");
+                chout = ChainOut();
+                if (chain == 0)
+                {
+                    chainstart = ForceParameters(registry.GetForceSummary().GetStartParameters(),region);
+                }
+                if (!m_chainparam.IsBayesian())
+                {
+                    DoSingleChainPosterior(region, rep, chaintype, chain, chout, chainstart);
+                }
+                else
+                {
+                    DoSingleChainBayes(region, rep, chout, chainstart);
+                }
+
+                //The following things are unknown in the current setup, but if we
+                // write out the current tree somewhere, we might store this info
+                // there, too.  --LS NOTE
+
+#if 0
+                chout.SetLlikedata(coldchain.GetCurrentDataLlike());
+                chout.SetSwaprates(swaprates);
+                chout.SetTemperatures(averagetemps);
+#endif // 0
+
+                m_chainpack.SetChain(chout);
+                m_writesumfile = registry.GetUserParameters().GetWriteSumFile();
+
+                // We just set a new chain in the chainpack--write it to a file if needed.
+#if defined(LAMARC_COMPILE_LINUX) || defined(LAMARC_COMPILE_MACOSX)
+                if (setjmp (prewrite))
+                {
+                    CloseSumOut();
+                }
+#endif
+
+                if (m_writesumfile)
+                {
+                    m_sumfilehandler.WriteLastChain(m_chainpack);
+                }
+                m_runreport.PrognoseRegion(m_chainpack, region, m_currentsteps, m_totalsteps);
+                m_runreport.DisplayReport(chout);
+            }
+            else
+            {
+                //We're picking up in the middle of a run.
+                if (m_recover_redochain)
+                {
+                    m_chainpack.RemoveLastChain();
+                }
+                // We need to delete the last set of summary info from chainpack,
+                //  but not before we stick the force parameters into chainstart.
+
+                //First, run a chain of chaintype 0 to try to get decent trees.
+                m_runreport.ReportNormal("Re-running a single Initial Chain to generate a good start tree:");
+                vector<Chain>::iterator temp;
+                for(temp = m_temps.begin(); temp != m_temps.end(); ++temp)
+                {
+                    temp->SetChainType(0,m_chainparam);
+                }
+                //For this DoChain, we need to set 'm_readsumfile' since otherwise
+                // we will run the 'OptimizeDataModels' routine.  Which is currently
+                // MixedKS only, but oh well.  Woo special-case code!  -LS
+                m_readsumfile = true;
+                DoChain(region, rep, 0, 0, chainstart);
+                m_readsumfile = false;
+
+                //Now throw away the chain summary in chainpack, and reset the parameters.
+                // (This is why we saved chout, above.)
+                m_chainpack.RemoveLastChain();
+                chainstart = chout.GetEstimates();
+
+                // Now do the chain we wanted to do in the first place.
+                //  (But with better trees.)
+                m_runreport.ReportNormal("And now picking up where the summary file left off:");
+                m_writesumfile = registry.GetUserParameters().GetWriteSumFile();
+                // We might want to start writing again.  If so, the file should
+                //  already be open.
+                for(temp = m_temps.begin(); temp != m_temps.end(); ++temp)
+                    temp->SetChainType(chaintype,m_chainparam);
+                DoChain(region, rep, chaintype, chain, chainstart);
+            }
+        }
+    }
+    else
+    {
+        //Save the chainout report for output.
+        long int chainnum = chain;
+        string name = "Initial";
+        if (chaintype == 1)
+        {
+            chainnum += registry.GetChainParameters().GetNChains(0);
+            name = "Final";
+        }
+        assert(chaintype < 2); //Above code relies on there only being 2 types.
+        name += " Chain " + ToString(chain+1) + ":";
+        m_runreport.SaveOutput(name, false);
+        ChainOut chout = m_chainpack.GetChain(region, rep, chainnum);
+        m_runreport.MakeReport(chout);
+        StringVec1d chainreport = m_runreport.FormatReport(chout, false, 78);
+        for (unsigned long int line = 0; line < chainreport.size(); ++line)
+        {
+            m_runreport.SaveOutput(chainreport[line], false);
+        }
+    }
+} // DoChainFromSummaryFile
+
+//------------------------------------------------------------------------------------
+
+// DoChain used to loop over all chains for a particular chaintype
+//  itself, but the loop has been moved to DoChainTypes to make it
+//  easier to pick up in the middle of a run.  In any event, this
+//  is the heart of the program.  It creates a chain of trees, then
+//  maximizes the force parameters over that chain.
+
+void ChainManager::DoChain(long int region, long int rep, long int chaintype, long int chain, ForceParameters & chainstart)
+{
+    // We can assume that we are generating, not reading, trees here.
+    // However, we have m_readsumfile set when we are re-doing initial chain
+    // one, so that we can not run OptimizeDataModels.
+    //  assert(!m_readsumfile);
+
+    volatile bool lastchain = ((chaintype ==  NCHAINTYPES-1 ||
+                                (chaintype ==  NCHAINTYPES-2 && registry.GetChainParameters().GetNChains(NCHAINTYPES-1) == 0)) &&
+                               chain == registry.GetChainParameters().GetNChains(chaintype)-1);
+
+    m_collectionmanager.StartChain(region, rep, lastchain);
+    if (lastchain)
+    {
+#if defined(LAMARC_COMPILE_LINUX) || defined(LAMARC_COMPILE_MACOSX)
+        if (setjmp (prewrite))
+        {
+            CloseSumOut();
+        }
+#endif
+
+        //LS NOTE:  We jump here if we run out of space in the middle of writing
+        // other stuff--have to set things earlier.
+        if (m_writesumfile)
+            m_sumfilehandler.WriteChainSumStart(region, rep, m_collectionmanager);
+    }
+
+    const ForceSummary& forcesum = registry.GetForceSummary();
+
+    bool adapt = m_chainparam.GetTempAdapt();
+
+    unsigned long int numtemps = m_temps.size();
+    DoubleVec1d averagetemps;
+    double nadapts(0.0);
+
+    for(unsigned long int temp = 0; temp < numtemps; ++temp)
+    {
+        // clear stored swap-success stats
+        m_temps[temp].ClearTotalSwaps();
+        if (m_chainparam.IsBayesian())
+            m_temps[temp].StartBayesianChain(chain,chaintype,forcesum);
+        else m_temps[temp].StartChain(chain,chaintype,forcesum,chainstart);
+    }
+
+    if (adapt) averagetemps.assign(numtemps,0.0);
+    else averagetemps = m_chainparam.GetAllTemperatures();
+
+    // Only attempt to "groom" the trees for chains beyond Initial Chain 1.
+    // This is not done in bayesian runs, as they do not suffer from
+    // changing parameter values out from under a tree at chain start.
+    if (!(0 == chain && 0 == chaintype) && !m_chainparam.IsBayesian())
+    {
+        GroomTrees(chainstart);
+    }
+
+    unsigned long int chunk;
+    for(chunk = 0; chunk < m_chunksize[chaintype].size(); ++chunk)
+    {
+        long int steps = m_chunksize[chaintype][chunk];
+        for(unsigned long int temp = 0; temp < m_temps.size(); ++temp)
+            m_temps[temp].DoOneChain(steps,lastchain);
+
+        if (m_multitemp)                // propose a swap
+        {
+            unsigned long int chain1, chain2;
+            ChooseTwoAdjacentChains(chain1, chain2);
+            m_temps[chain1].SwapTemperatureIdentities(m_temps[chain2]);
+            if (adapt) AdjustTemperatures(averagetemps, chunk, nadapts);
+        }
+        m_currentsteps += steps;
+    }
+
+    if (adapt)
+    {
+        // find the mean temperature of each chain
+        // if we never tried an adaptive swap, then set to (presumably unchanged)
+        //    starting temperatures
+        if (nadapts)
+        {
+            transform(averagetemps.begin(), averagetemps.end(),
+                      averagetemps.begin(), bind2nd(divides<double>(), nadapts));
+        }
+        else averagetemps = m_chainparam.GetAllTemperatures();
+    }
+
+    // Postprocessing
+    // Only the cold chain gets postprocessed
+    unsigned long int cold = FindColdChain(m_temps);
+    Chain& coldchain = m_temps[cold];
+    ChainOut chout = coldchain.EndChain();
+    chout.SetNumtemps(numtemps);
+    if (!lastchain)
+    {
+        // adjust the summaries, unless this is the last final chain
+        m_collectionmanager.CorrectForFatalAttraction(region);
+        //Need to send the region number because recombination might be
+        // illegal for some regions.
+    }
+
+#ifndef STATIONARIES
+    if (!lastchain && !m_readsumfile)
+    {
+        // Optimize the data models.  In practice, this means setting the alpha
+        // for any MixedKS models, and doing nothing for anything else.
+        OptimizeDataModels(region);
+    }
+#endif // STATIONARIES
+
+    if (lastchain)
+    {
+#if defined(LAMARC_COMPILE_LINUX) || defined(LAMARC_COMPILE_MACOSX)
+        if (setjmp (prewrite))
+        {
+            CloseSumOut();
+        }
+#endif
+
+        if (m_writesumfile)
+            m_sumfilehandler.WriteChainSumEnd(m_collectionmanager);
+    }
+
+    if (!m_chainparam.IsBayesian())
+        m_collectionmanager.
+            GetTreeColl(region, rep)->SetStartParameters(chainstart);
+
+#ifndef STATIONARIES
+
+    if (!m_chainparam.IsBayesian())
+    {
+        DoSingleChainPosterior(region, rep, chaintype, chain, chout, chainstart);
+    }
+    else
+    {
+        DoSingleChainBayes(region, rep, chout, chainstart);
+    }
+
+    chout.SetLlikedata(coldchain.GetCurrentDataLlike());
+
+    // Compute swap-success statistics.
+    DoubleVec1d swaprates(m_temps.size());
+    vector<pair<double, long int> > sortedtemps =
+        SortChainsByTemperature(m_temps);
+
+    for (unsigned long int temp = 0; temp < sortedtemps.size(); ++temp)
+    {
+        swaprates[temp] = m_temps[sortedtemps[temp].second].GetTotalSwapRate();
+    }
+    chout.SetSwaprates(swaprates);
+    chout.SetTemperatures(averagetemps);
+    m_chainpack.SetChain(chout);
+
+    // We just set a new chain in the chainpack--write it to a file if needed.
+#if defined(LAMARC_COMPILE_LINUX) || defined(LAMARC_COMPILE_MACOSX)
+    if (setjmp (prewrite))
+    {
+        CloseSumOut();
+    }
+#endif
+
+    if (m_writesumfile)
+    {
+        m_sumfilehandler.WriteLastChain(m_chainpack);
+    }
+
+#ifndef LAMARC_QA_SINGLE_DENOVOS
+    // no reporting here -- too nauseating in a tight loop
+    m_runreport.PrognoseRegion(m_chainpack, region, m_currentsteps, m_totalsteps);
+    m_runreport.DisplayReport(chout);
+
+    // Save the mapping data to the appropriate loci, then report on it.
+    if ((lastchain
+         && registry.GetDataPack().GetRegion(region).GetNumMovingLoci() >0)
+        || registry.GetDataPack().GetRegion(region).AnyJumpingAnalyses())
+    {
+        registry.GetDataPack().GetRegion(region).SaveMappingInfo(m_collectionmanager.GetMapColl(region, rep));
+        registry.GetDataPack().GetRegion(region).ReportMappingInfo();
+    }
+#endif // LAMARC_QA_SINGLE_DENOVOS
+#endif // STATIONARIES
+
+} // ChainManager::DoChain
+
+//------------------------------------------------------------------------------------
+
+void ChainManager::GroomTrees(ForceParameters& chainstart)
+{
+    // Because the growth values that yield the maximum likelihood for the trees
+    // of the previous chain can be dramatically different from the growth values
+    // that generated that chain, especially in the presence of migration, a
+    // problem can arise.  For each temperature, including the cold temperature,
+    // its next chain will be generated using the new MLE parameters as desired,
+    // but each chain will commence with a tree topology that is consistent with
+    // the _old_ parameters that were used to generate the previous chain.  This
+    // is especially prone to occurring in the heated chains, in which case these
+    // "extreme" trees (e.g., trees which favor slow or negative growth) can get
+    // swapped into the cold chain and sampled.  Such trees that are extreme for
+    // the current parameters (recall these  parameters will be the starting
+    // values for maximization) will lead to overflow and/or inconsistencies
+    // between the likelihood and its derivatives during maximization, in which
+    // case maximization will fail at the end of the next chain.  The phenotype of
+    // the "extreme tree" is always, or almost always, a pair of populations
+    // dwindling down to one lineage each (k=1), spending a long time interval
+    // with one lineage each, then accepting a migration into the faster-growing
+    // population (k=2 and k=0), followed soon, but not soon enough for the
+    // current growth values, by a coalescence.
+    // Hence, we attempt to retain the topology of each temperature's tree as
+    // we prepare to use each to generate a new chain, but if necessary, we shrink
+    // the length of a potentially-fatal time interval to a length that is
+    // consistent with the expectation value of that interval under the current
+    // parameter values.  This appears to solve the problem (Nov. 2004).
+    // Because the timelist stores time stamps for the start and ending of each
+    // interval, instead of the length of the interval, we can encounter cases
+    // in which the new expectation value impels us to shrink the interval so
+    // small that adding it to "starttime" to get "endtime" yields a result that
+    // is indistinguishable from "starttime," effectively producing a new interval
+    // of zero length.  Zero-length intervals are fatal, so in this case we
+    // copy the cold tree into the offending hot tree.  If the problem is in the
+    // cold tree itself, then we give up, warn the user, and assume that the
+    // next maximization will fail, with the program execution path proceeding
+    // from there.
+
+    const vector<double>& growths = chainstart.GetGrowthRates();
+    const vector<double>& logSelVector = chainstart.GetLogisticSelectionCoefficient();
+    unsigned long int cold = FindColdChain(m_temps);
+    bool positiveGrowth = false, logisticSelection = false;
+    for (unsigned long int i = 0; i < growths.size(); i++)
+        if (growths[i] > 0.0)
+            positiveGrowth = true;
+    if (!logSelVector.empty())
+    {
+        if (!growths.empty())
+        {
+            string msg = "Attempted to infer growth and logistic selection ";
+            msg += "simultaneously; currently we can not co-estimate these forces.";
+            throw implementation_error(msg);
+        }
+        logisticSelection = true;
+    }
+
+    //Only groom trees in the presence of positive growth
+    //or logistic selection.
+    if (!positiveGrowth && !logisticSelection)
+    {
+        return;
+    }
+
+    const vector<double>& thetas = chainstart.GetRegionalThetas();
+    double s = (logisticSelection ? logSelVector[0] : 0.0);
+
+    if (positiveGrowth)
+    {
+        for (unsigned long int temp = 0; temp < m_temps.size(); temp++)
+        {
+            // Tree::Groom() shrinks intervals when necessary and possible.
+            // If something goes really horribly wrong inside, it will throw.
+            if (!m_regiontrees[temp]->GroomForGrowth(thetas, growths,
+                                                     m_temps[temp].GetTemperature()))
+            {
+                if (temp != cold)
+                {
+                    m_regiontrees[temp]->CopyTips(m_regiontrees[cold]);
+                    m_regiontrees[temp]->CopyBody(m_regiontrees[cold]);
+                }
+                else
+                {
+                    string msg;
+                    msg += "\nWarning!  Detected an \"extreme\" cold ";
+                    msg += " tree.  It is likely that maximization will fail ";
+                    msg += "for the next chain.\n";
+                    registry.GetRunReport().ReportDebug(msg);
+                    continue;
+                }
+            }
+
+            // Whether we change an interval in the tree or replace it
+            // with a copy of the cold tree, we need to ensure the
+            // "oldtree" copy in chainstate is updated accordingly.
+            m_temps[temp].SetChainStateOldTree(m_regiontrees[temp]);
+        }
+    }
+    else // logistic selection
+    {
+        for (unsigned long int temp = 0; temp < m_temps.size(); temp++)
+        {
+            if (!m_regiontrees[temp]->GroomForLogisticSelection(thetas, s,
+                                                                m_temps[temp].GetTemperature()))
+            {
+                if (temp != cold)
+                {
+                    m_regiontrees[temp]->CopyTips(m_regiontrees[cold]);
+                    m_regiontrees[temp]->CopyBody(m_regiontrees[cold]);
+                }
+                else
+                {
+                    string msg;
+                    msg += "\nWarning!  Detected an \"extreme\" cold ";
+                    msg += " tree.  It is likely that maximization will fail ";
+                    msg += "for the next chain.\n";
+                    registry.GetRunReport().ReportDebug(msg);
+                    continue;
+                }
+            }
+
+            // Whether we change an interval in the tree or replace it
+            // with a copy of the cold tree, we need to ensure the
+            // "oldtree" copy in chainstate is updated accordingly.
+            m_temps[temp].SetChainStateOldTree(m_regiontrees[temp]);
+        }
+    }
+}
+
+//------------------------------------------------------------------------------------
+// Note:  This method is actually independent of class ChainManager,
+// so it could be removed from the class.
+
+unsigned long int ChainManager::FindColdChain(const vector<Chain>& chains) const
+{
+    unsigned long int chain;
+    for(chain = 0; chain < chains.size(); ++chain)
+    {
+        if (chains[chain].IsCold())
+        {
+            return chain;
+        }
+    }
+    assert(false);  // there must be a cold chain!
+    return 0;
+} // FindColdChain
+
+//------------------------------------------------------------------------------------
+
+// Adjusts the temperatures for the adaptive heating scheme.
+// This scheme assumes that the chains are swapped between
+// adjacent pairs.
+// If swapping rate is less than TEMPSWAPMIN, the temperature
+// difference is multiplied by TEMPDECR (reducing it).  If it
+// is greater than TEMPSWAPMAX, the temperature difference is
+// multiplied by TEMPINCR (increasing it).
+// PB 2002
+
+void ChainManager::AdjustTemperatures(DoubleVec1d& averagetemps, long int whichswap, double & howoften)
+{
+    // we will adjust temperature when there has been opportunity for
+    // approximately ADJUSTINTERVAL, currently 20, swaps
+    // between each temperature pair.
+    long int trigger = (ADJUSTINTERVAL*(m_temps.size() - 1));
+
+    if(whichswap != 0 && whichswap % trigger == 0)
+    {
+        // The chains are not, and cannot easily be, in correct sorted
+        // order (due to swapping) so we determine the sorted order here.
+        // averagetemps will be set in sorted order, while the chains
+        // themselves will be set in their native order.
+
+        // averagetemps is kept in sorted order for use in runtime
+        // reporting and output, since the unsorted chains would be confusing.
+
+        vector<pair<double, long int> > sorted = SortChainsByTemperature(m_temps);
+
+        // delta holds differences between adjacent temperatures
+        assert(!sorted.empty());
+        DoubleVec1d delta(sorted.size()-1);
+        unsigned long int i;
+        for(i = 0; i < sorted.size()-1; ++i)
+        {
+            delta[i] = sorted[i+1].first - sorted[i].first;
+        }
+        // Adjust the temperature differences
+        for(i = 0; i < sorted.size()-1; i++)
+        {
+            double swaprate = m_temps[sorted[i].second].GetSwapRate();
+            // if we never tried any swaps, we don't adjust the temperatures
+            if (swaprate >= 0.0)
+            {
+                if(swaprate <= TEMPSWAPMIN)
+                {
+                    // don't change them if they are already very similar
+                    /* if (delta[i]*TEMPDECR > MINDIFF) */ delta[i] *= TEMPDECR;
+                }
+                else
+                    if (swaprate > TEMPSWAPMAX)
+                    {
+                        // don't change them if they are already very dissimilar
+                        /* if (delta[i]*TEMPINCR < MAXDIFF) */ delta[i] *= TEMPINCR;
+                    }
+            }
+        }
+        //Set swap counter to zero, do not mix this with the
+        // swap counter for reporting, which is TotalSwaps().
+        for(i = 0; i < sorted.size(); i++)
+        {
+            m_temps[sorted[i].second].ClearSwaps();
+        }
+        // Reset the temperatures according to the adjusted temp diffs
+        if (sorted[0].first != 1)
+        {
+            string msg = "Error:  the lowest temperature is "
+                + ToString(sorted[0].first)
+                + ", which is not 1.0  Other temperatures are: ";
+            for (unsigned long int i = 1; i < sorted.size(); ++i)
+                msg += ToString(sorted[i].first) + "  ";
+            msg  += ".\n";
+            throw data_error(msg);
+        }
+        averagetemps[0] += 1;
+        for (i = 1; i < sorted.size(); ++i)
+        {
+            double newtemp = sorted[i-1].first + delta[i-1];
+            if (newtemp > MAXTEMP)
+                newtemp = MAXTEMP;
+            sorted[i].first = newtemp;
+            averagetemps[i] += newtemp;
+            m_temps[sorted[i].second].SetTemperature(newtemp);
+        }
+        ++howoften;
+    }
+} // AdjustTemperatures
+
+//------------------------------------------------------------------------------------
+
+void ChainManager::ChooseTwoAdjacentChains(unsigned long int & chain1, unsigned long int & chain2)
+{
+    // This routine must sort the chains by temperature to find out which are adjacent!
+    vector <pair<double, long int> > orderedtemps = SortChainsByTemperature(m_temps);
+
+    unsigned long int numchains = m_temps.size();
+    chain1 = m_randomsource.Long(numchains - 1);
+    chain2 = chain1 + 1;
+
+    // return the index associated with each choice
+    chain1 = orderedtemps[chain1].second;
+    chain2 = orderedtemps[chain2].second;
+} // ChooseTwoAdjacentChains
+
+//------------------------------------------------------------------------------------
+
+vector<pair<double, long int> > ChainManager::SortChainsByTemperature(const vector<Chain>& temps)
+{
+    vector <pair<double, long int> > orderedtemps;
+    unsigned long int i;
+    for (i = 0; i < temps.size(); ++i)
+    {
+        orderedtemps.push_back(make_pair<double, long int>(temps[i].GetTemperature(), i));
+    }
+    sort(orderedtemps.begin(), orderedtemps.end());
+    return orderedtemps;
+
+} // SortChainsByTemperature
+
+//------------------------------------------------------------------------------------
+
+void ChainManager::DoSingleChainPosterior(long int region, long int rep, long int chaintype,
+                                          long int chain, ChainOut & chout,
+                                          ForceParameters & chainstart)
+{
+    registry.GetSinglePostLike().Setup(m_collectionmanager.GetTreeColl(region, rep));
+    DoubleVec1d mleparam = chainstart.GetRegionalParameters();
+    double maxlike = 0.0;
+    string message = "";
+
+    bool maximizerCalculateOK = m_maximizer.Calculate(mleparam, maxlike, message);
+
+    if (!maximizerCalculateOK)
+    {
+        m_runreport.ReportUrgent("Warning:  maximization failure for this chain.  Using the parameter estimates from the previous chain and continuing.");
+        if (message != "")
+        {
+            m_runreport.ReportNormal("Error from the maximizer:  " + message);
+        }
+        else
+        {
+            m_runreport.ReportDebug("Whoops!  m_maximizer.Calculate() failed, but did not set a message telling us why it failed.  This needs to be fixed.");
+        }
+    }
+    else if (message != "")
+    {
+        m_runreport.ReportNormal("Warning from the maximizer:  " + message);
+    }
+
+#ifndef LAMARC_QA_SINGLE_DENOVOS
+    // don't want to update starting parameter values if we're testing
+    // denovo generation -- we want to use the start values from the
+    // user again and again
+
+    chainstart.SetRegionalParameters(mleparam);
+    // constrain the parameters, unless this is the last final chain
+    if (chaintype < 1 || chain != m_chainparam.GetNChains(chaintype) - 1)
+    {
+        if (registry.GetForceSummary().ConstrainParameterValues(chainstart))
+        {
+            //The parameters were constrained.
+            m_maximizer.ProfileGuideFixAll();
+            mleparam = chainstart.GetRegionalParameters();
+            m_maximizer.Calculate(mleparam, maxlike, message);
+            //Here we're just setting maxlike; the return value and any message is ignored.
+            m_maximizer.ProfileGuideRestore();
+        }
+    }
+
+    chout.SetLlikemle(maxlike);
+    chout.SetEstimates(chainstart);
+
+#else // LAMARC_QA_SINGLE_DENOVOS
+    // when LAMARC_QA_SINGLE_DENOVOS is defined, print out all
+    // parameter values to the SINGLE_DENOVO_FILE
+    // see config/local_build.h for more info
+
+    ofstream denovoFile;
+    denovoFile.open(SINGLE_DENOVO_FILE.c_str(),ios::app);
+
+    const ParamVector pvec(true); // Ugh! This is so we can know which
+
+    if(maximizerCalculateOK)
+    {
+        for(size_t mleindex=0; mleindex < mleparam.size(); mleindex++)
+        {
+            if(pvec[mleindex].IsValidParameter())
+            {
+                if(mleindex != 0)
+                {
+                    denovoFile << "\t";
+                }
+                denovoFile << mleparam[mleindex];
+            }
+
+        }
+    }
+    else
+    {
+        for(size_t mleindex=0; mleindex < mleparam.size(); mleindex++)
+        {
+            if(pvec[mleindex].IsValidParameter())
+            {
+                if(mleindex != 0)
+                {
+                    denovoFile << "\t";
+                }
+                denovoFile << "-";
+            }
+
+        }
+        registry.AddDenovoMaxRejectCount(1);
+    }
+
+
+    if(registry.GetForceSummary().CheckForce(force_MIG) ||
+       registry.GetForceSummary().CheckForce(force_DIVMIG))
+    {
+        const Tree & tree = *(m_regiontrees[0]);
+#if 0
+        const TimeList & timeList = tree.GetTimeList();
+        long int migs = timeList.HowMany(btypeMig);
+        migs += timeList.HowMany(btypeDivMig);
+        denovoFile << "\t" << migs;
+#endif
+
+        deque<bool> migsToPrint = registry.GetMigsToPrint();
+        TreeSummary * trsum = tree.SummarizeTree();
+        const vector<double>& nmig = trsum->GetMigSummary()->GetShortPoint();
+        size_t vv = nmig.size();
+        for(size_t index = 0; index < vv; index++)
+        {
+            if(migsToPrint[index])
+            {
+                denovoFile << "\t" << nmig[index];
+            }
+        }
+
+    }
+
+    if(registry.GetForceSummary().CheckForce(force_REC))
+    {
+
+        const Tree & tree = *(m_regiontrees[0]);
+        const TimeList & timeList = tree.GetTimeList();
+        long int recs = timeList.HowMany(btypeRec);
+        // divide by two because HowMany gives the number
+        // of branches, not events
+        recs = recs / 2;
+        denovoFile << "\t" << recs;
+    }
+
+    denovoFile << endl;
+    denovoFile.close();
+
+#endif // LAMARC_QA_SINGLE_DENOVOS
+
+} // DoSingleChainPosterior
+
+//------------------------------------------------------------------------------------
+
+void ChainManager::DoSingleChainBayes(long int region, long int rep, ChainOut & chout, ForceParameters & chainstart)
+{
+    const ParamSumm& paramsumm(m_collectionmanager.GetParamColl(region, rep)->GetParamSumm());
+
+    m_bayesanalyzer.ReplaceLastChainAndAnalyze(paramsumm);
+
+    //m_bayesanalyzer.AnalyzeAndAdd(paramsumm); //If we're adding up all chains.
+    DoubleVec1d mleparam = m_bayesanalyzer.GetMaxVecForLastChain();
+    chainstart.SetGlobalParameters(mleparam);
+
+    chout.SetEstimates(chainstart);
+    chout.SetLlikemle(m_bayesanalyzer.GetAvgMaxLikeForLastChain());
+    chout.SetBayesUnique(m_bayesanalyzer.GetNumUniquePointsVec());
+
+} // DoSingleChainBayes
+
+//------------------------------------------------------------------------------------
+
+//ReadInNoRedoMax replaces what DoSingleChainPosterior would have
+// done in the initial run in that the MLEs stored in the maximizer, the
+// analyzer, and the last cold chain are set to their read-in values.  The
+// crucial bit seems to be the maximizer, but it can't hurt to set the other
+// values as well.
+//
+//Note:  resurrected this function to speed up our running from summary
+// files.  Don't release with this called; play it safe with RedoMaximization
+// being called instead.  -LS, 12/14/05
+
+void ChainManager::ReadInNoRedoMax (long int region)
+{
+    m_runreport.ReportUrgent("Warning!  Not redoing maximization over this region!"
+                             "  If you see this message in a release version of LAMARC,"
+                             " this function has been called in error:  please let us know"
+                             " at lamarc at gs.washington.edu.\n");
+
+    ForceParameters read_fp = m_chainpack.GetRegion(region).GetEstimates();
+    DoubleVec1d read_MLEs = read_fp.GetRegionalParameters();
+    ChainOut read_co = m_chainpack.GetRegion(region);
+    double read_maxlike = read_co.GetLlikemle();
+
+    if (m_nreplicates > 1)
+    {
+        registry.GetReplicatePostLike().Setup(m_collectionmanager.GetTreeColl(region));
+        SaveGeyerWeights(registry.GetReplicatePostLike().GetGeyerWeights(), region);
+        m_maximizer.SetLikelihood(&(registry.GetReplicatePostLike()));
+    }
+    else
+    {
+        registry.GetSinglePostLike().Setup(m_collectionmanager.GetTreeColl(region, 0));
+        m_maximizer.SetLikelihood(&(registry.GetSinglePostLike()));
+    }
+
+    if (!m_chainparam.IsBayesian())
+    {
+        ForceParameters chainstart(read_fp,region);
+        unsigned long int cold = FindColdChain(m_temps);
+        Chain& coldchain = m_temps[cold];
+        ChainOut chout = coldchain.EndChain();
+        chout.SetNumtemps(m_temps.size());
+        chout.SetLlikemle(read_maxlike);
+        chout.SetEstimates(chainstart);
+
+        registry.GetAnalyzer().SetMLEs(read_MLEs);
+        m_maximizer.SetMLEs(read_MLEs);
+    }
+    else
+    {
+        m_runreport.ReportUrgent("Sumfile reading not implemented for Bayesian analysis yet.  Please bother your local implementors to change this.");
+        assert(false);
+    }
+} // ReadInNoRedoMax
+
+//------------------------------------------------------------------------------------
+
+//RedoMaximization is used to check to make sure the read-in
+// MLEs can be successfully re-calculated using the starting parameters of
+// the run instead of the parameters from the next-to-the-last chain.
+
+void ChainManager::RedoMaximization(long int region)
+{
+    string msg = "Re-calculating best parameter values.  ";
+    msg += "Differences may indicate a true maximum was not found, or that "
+        "an older version of LAMARC was used to create the original summary file."
+        "\n";
+    m_runreport.ReportChat(msg);
+
+    if (m_nreplicates > 1)
+    {
+        registry.GetReplicatePostLike().Setup(m_collectionmanager.GetTreeColl(region));
+        SaveGeyerWeights(registry.GetReplicatePostLike().GetGeyerWeights(), region);
+        m_maximizer.SetLikelihood(&(registry.GetReplicatePostLike()));
+    }
+    else
+    {
+        registry.GetSinglePostLike().Setup(m_collectionmanager.GetTreeColl(region, 0));
+        m_maximizer.SetLikelihood(&(registry.GetSinglePostLike()));
+    }
+
+    ForceSummary&   forcesum      = registry.GetForceSummary();
+    ForceParameters chainstart(forcesum.GetStartParameters(), region);
+    DoubleVec1d     mleparam      = chainstart.GetRegionalParameters();
+    //This bit can be used to re-run the maximizer with the same values
+    // it used the first time.
+    long int chain = 0;
+    for (int i = 0; i < NCHAINTYPES; i++ )
+    {
+        chain += registry.GetChainParameters().GetNChains(i);
+    }
+    chain = chain-2; //-1 for number->index, -1 for next-to-last instead of last.
+    DoubleVec1d setup_MLEs;
+    if (chain == -1)
+    {
+        //There is only one chain total--the next-to-last parameter input is the
+        // default input, not that created from the last chain.
+        setup_MLEs = mleparam;
+    }
+    else
+    {
+        ChainOut co = m_chainpack.GetRegion(region);
+        setup_MLEs = co.GetEstimates().GetRegionalParameters();
+    }
+    double maxlike;
+    string message = "";
+    if (!m_maximizer.Calculate(setup_MLEs, maxlike, message))
+    {
+        string msg = "Maximization failed when re-calculating the estimates ";
+        msg = msg + "for this data set.  If that's what happened the last time, "
+            + "at least it's consistent.  The resulting estimates were obtained "
+            + "from the last successfully-maximized chain.";
+        m_runreport.ReportUrgent(msg);
+        m_runreport.ReportNormal("Error from the maximizer:  " + message);
+    }
+    else if (message != "")
+    {
+        m_runreport.ReportNormal("Warning from the maximizer:  " + message);
+    }
+
+    ChainOut read_co = m_chainpack.GetRegion(region);
+    Region& curregion = registry.GetDataPack().GetRegion(region);
+
+    chainstart.SetRegionalParameters(setup_MLEs);
+    ForceParameters readfp = m_chainpack.GetRegion(region).GetEstimates();
+    CompareAndWarn(chainstart, readfp);
+
+    double read_maxlike = read_co.GetLlikemle();
+    double difference = fabs(read_maxlike - maxlike);
+    double percdiff = fabs(difference/read_maxlike);
+    if (percdiff > .001)
+    {
+        msg = "Warning:  the maximizer was unable to find the same maximum ";
+        msg += "likelihood estimates as before for region \"" +
+            curregion.GetRegionName() +
+            "\".  The parameter values it found have a log likelihood of " +
+            ToString(maxlike) + ", which is significantly different from the" +
+            " maximum likelihood of the old set of parameters, " +
+            ToString(read_maxlike) + " (A difference of " + ToString(difference) +
+            ").  If you were running from older summary files with replication, " +
+            "this is to be expected.  Otherwise, we recommend you " +
+            "re-run this data set with any or all of a) longer run times, b) " +
+            "more replicates, or c) more heating (adaptive or fixed).\n";
+        m_runreport.ReportUrgent(msg);
+    }
+    else if (difference > pow(10.0, -(SUMFILE_PRECISION-7)) )
+    {
+        msg = "The newly-calculated maximum likelihood for ";
+        msg += "region \"" + curregion.GetRegionName() + "\" (" + ToString(maxlike)
+            + ") is different from the maximum likelihood as read in from "
+            + "the summary file (" + ToString(read_maxlike) + ").  "
+            + "(A difference of " + ToString(difference)
+            + ", which exceeds the recommended minimum difference of "
+            + ToString(pow(10.0, -(SUMFILE_PRECISION-7))) + ".)\n";
+        m_runreport.ReportNormal(msg);
+    }
+
+    //We want to use the newly-calculated values, not the read in ones.  This
+    // means changing the ones in the chainpack.
+    ChainOut co;
+    if (m_nreplicates > 1)
+    {
+        co = m_chainpack.GetRegion(region);
+    }
+    else
+    {
+        co = m_chainpack.GetLastChain(region);
+    }
+    co.SetEstimates(chainstart);
+    co.SetLlikemle(maxlike);
+    if (m_nreplicates > 1)
+    {
+        m_chainpack.ResetSummaryOverReps(co, region);
+    }
+    else
+    {
+        m_chainpack.ResetLastChain(co, region);
+    }
+    forcesum.SetRegionMLEs(co, region);
+    //Just to make sure, we set stuff in the analyzer and maximizer, too.  I
+    // don't *think* we need to, but just in case.
+    registry.GetAnalyzer().SetMLEs(chainstart.GetRegionalParameters());
+    m_maximizer.SetMLEs(chainstart.GetRegionalParameters());
+} // RedoMaximization
+
+//------------------------------------------------------------------------------------
+
+void ChainManager::CompareAndWarn(ForceParameters calcfp, ForceParameters readfp)
+{
+    vector<force_type> fvec;
+    // MDEBUG needs updating whenever a Force is added, which is not ideal
+    //  NB: the following are in no particular order!
+    fvec.push_back(force_COAL);
+    fvec.push_back(force_MIG);
+    fvec.push_back(force_DISEASE);
+    fvec.push_back(force_REC);
+    fvec.push_back(force_GROW);
+    fvec.push_back(force_LOGISTICSELECTION);
+    fvec.push_back(force_DIVMIG);
+    fvec.push_back(force_DIVERGENCE);
+
+    for (unsigned long int fvi = 0; fvi < fvec.size(); fvi++)
+    {
+        DoubleVec1d calcnums = calcfp.GetGlobalParametersByTag(fvec[fvi]);
+        DoubleVec1d readnums = readfp.GetGlobalParametersByTag(fvec[fvi]);
+        if (calcnums.size() != readnums.size())
+        {
+            string msg = "The ";
+            msg += ToString(fvec[fvi]) + " forces for the read-in values differ in "
+                + "number from the calculated values, which probably means the read-in "
+                + "data differs from that used to write out the data.  It is strongly "
+                + "recommended that you exit the program now and correct this.\n";
+            m_runreport.ReportUrgent(msg);
+        }
+        else
+        {
+            for (unsigned long int numi = 0; numi<calcnums.size(); ++numi)
+            {
+                double difference = fabs(calcnums[numi] - readnums[numi]);
+                double percdiff = difference/readnums[numi];
+                if (percdiff > .001)
+                {
+                    //pow(10.0, -(SUMFILE_PRECISION-6))) {
+                    string msg = "Warning:  A newly-calculated value for ";
+                    msg += "force \"" + ToString(fvec[fvi]) + "\" ("
+                        + ToString(calcnums[numi])
+                        + ") is different from that read in from "
+                        + "the summary file (" + ToString(readnums[numi]) + ").  "
+                        + "(A difference of " + ToString(difference)
+                        + ", which exceeds the recommended minimum difference of 0.1%).\n";
+                    m_runreport.ReportUrgent(msg);
+                }
+                else if (calcnums[numi] != 0)
+                {
+                    string msg = "The " + ToString(fvec[fvi]) + " force estimate "
+                        + ToString(calcnums[numi])
+                        + " was accurately re-calculated from the summary file data.";
+                    m_runreport.ReportChat(msg);
+                }
+            }
+        }
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+void ChainManager::DoOverallProfiles()
+{
+    Analyzer & analyzer = registry.GetAnalyzer();
+
+    if(m_nregions>1)
+    {
+        ForceParameters overall_fp = m_chainpack.GetOverall().GetEstimates();
+        DoubleVec1d overall_MLEs = overall_fp.GetGlobalParameters();
+        double overall_like = m_chainpack.GetOverall().GetLlikemle();
+        const RegionGammaInfo *pRegionGammaInfo = registry.GetRegionGammaInfo();
+        if (pRegionGammaInfo)
+            overall_MLEs.push_back(pRegionGammaInfo->GetMLE());
+        analyzer.CalcProfiles(overall_MLEs, overall_like, FLAGLONG);
+    }
+
+    if (m_chainparam.RunProfileReps())
+    {
+        // JDEBUG--not finished yet--additional replicate runs at profile values
+        // setup and run a set of new "replicate" chains in each region
+        long int nparams(registry.GetForceSummary().GetAllNParameters());
+        long int reg;
+        for(reg = 0; reg < m_nregions; ++reg)
+        {
+            long int currrep = m_nreplicates;
+            long int param;
+            for(param = 0; param < nparams; ++param)
+            {
+                // pull the 97.5% param and start
+                // resimulate starting tree???
+                m_collectionmanager.StartChain(reg, currrep++, true);
+                // do upper and lower 95% (97.5)
+                // pull the 2.5% param and start
+                m_collectionmanager.StartChain(reg, currrep++, true);
+            }
+        }
+        // rerun the appropiate analysis
+    }
+} // ChainManager::DoOverallProfiles
+
+//------------------------------------------------------------------------------------
+
+void ChainManager::CloseSumOut()
+{
+    m_sumfilehandler.CloseSumOut();
+    registry.GetUserParameters().SetWriteSumFile(false);
+    m_writesumfile = false;
+} // CloseSumOut
+
+//------------------------------------------------------------------------------------
+
+void ChainManager::SaveGeyerWeights(DoubleVec1d logGeyerWeights, long int region)
+{
+    if (m_logGeyerWeights.size() <= static_cast<unsigned long int>(region))
+    {
+        assert(m_logGeyerWeights.size() == static_cast<unsigned long int>(region));
+        m_logGeyerWeights.push_back(logGeyerWeights);
+    }
+    else
+    {
+        m_logGeyerWeights[region] = logGeyerWeights;
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+void ChainManager::OptimizeDataModels(long int reg)
+{
+    Region& region = registry.GetDataPack().GetRegion(reg);
+    for (long int loc = 0; loc<region.GetNloci(); loc++)
+    {
+        Locus& locus = region.GetLocus(loc);
+        unsigned long int cold = FindColdChain(m_temps);
+        Tree* tree = m_temps[cold].GetTree();
+        if (locus.GetDataModel()->OptimizeDataModel(tree, locus))
+        {
+            for (unsigned long int temp = 0; temp<m_temps.size(); temp++)
+            {
+                m_temps[temp].RecalculateDataLikes();
+            }
+        }
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+void ChainManager::ResetAllAlphas()
+{
+    for (long int reg = 0; reg<registry.GetDataPack().GetNRegions(); reg++)
+    {
+        Region& region = registry.GetDataPack().GetRegion(reg);
+        for (long int loc = 0; loc<region.GetNloci(); loc++)
+        {
+            region.GetLocus(loc).GetDataModel()->ResetAlpha();
+        }
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+const ChainPack &
+ChainManager::GetChainPack() const
+{
+    return m_chainpack;
+}
+
+//____________________________________________________________________________________
diff --git a/src/control/chainmanager.h b/src/control/chainmanager.h
new file mode 100644
index 0000000..af4989c
--- /dev/null
+++ b/src/control/chainmanager.h
@@ -0,0 +1,139 @@
+// $Id: chainmanager.h,v 1.54 2012/06/30 01:32:39 bobgian Exp $
+
+/*
+  Copyright 2002 Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+// The ChainManager is one of the big persistent objects in Lamarc.  It
+// is expected to exist from the end of the intial factory phase til
+// after the last output report is displayed to the user.
+//
+// The ChainManager is primarily responsible for providing runtime user
+// output (through a RunReport), for managing the interface between the
+// posterior likelihood calculator/maximizer and the running chains,
+// and for providing end of program user output (through an
+// OutputFile).  It is also responsible for taking the user's input
+// on chain length and refactoring it for use by its stable of Chains.
+//
+// It owns the 2 big collections of chain output: the ChainPack, which
+// goes to user output; and a collection manager that is
+// utilized by the posterior likelihood calculator.
+
+#ifndef CHAINMAN_H
+#define CHAINMAN_H
+
+#include <vector>
+#include <fstream>
+
+#include "chainpack.h"
+#include "collector.h"
+#include "collmanager.h"
+#include "constants.h"
+#include "sumfilehandler.h"
+#include "vectorx.h"
+
+// this dumps the final coalescence times of each tree
+// to the file rdata
+// JRM Debug 4/10
+//#define DUMP_TREE_COAL_RDATA
+
+#ifdef DUMP_TREE_COAL_RDATA
+// file for data for R to process
+extern std::ofstream rdata; // JRM debug
+#endif
+
+class BayesAnalyzer_1D;
+class Random;
+class RunReport;
+class ChainParameters;
+class Chain;
+class Maximizer;
+
+class ChainManager
+{
+  public:
+    ChainManager(RunReport& runrep, Maximizer& maximize);
+    ~ChainManager();
+    void DoAllChains();
+    const ChainPack & GetChainPack() const;
+
+
+  private:
+    ChainManager();                                   // undefined
+    ChainManager(const ChainManager& src);            // undefined
+    ChainManager& operator=(const ChainManager& src); // undefined
+
+    const ChainParameters& m_chainparam;
+    RunReport& m_runreport;
+    Random& m_randomsource;
+    Maximizer& m_maximizer;
+    BayesAnalyzer_1D& m_bayesanalyzer;
+    ChainPack m_chainpack;
+    long int m_nregions, m_nreplicates;
+    CollectionManager m_collectionmanager;
+    SumFileHandler m_sumfilehandler;
+
+    bool m_multitemp;
+    LongVec1d m_nsteps;            // dim: chaintype
+    LongVec2d m_chunksize;         // dim: chaintype X chunk
+    std::vector<Chain> m_temps;         // dim: temperatures
+    long int m_totalsteps, m_currentsteps; // used for completion-time prognosis
+
+    DoubleVec2d m_logGeyerWeights;
+
+    bool m_writesumfile;
+    bool m_readsumfile;
+    // true if in the process of reading or writing tree summaries to a file.
+
+    bool m_recoversumfile;  //true if picking up in the middle
+    long int m_recover_region;
+    long int m_recover_replicate;
+    long int m_recover_chaintype;
+    long int m_recover_chain;   // All recover_* variables are indices, not counters.
+    bool m_recover_redochain;
+    bool m_recover_redomaximization;
+    bool m_redoreplicatesum;
+    bool m_redoregionsum;
+
+    vector<Tree*> m_regiontrees;  // the trees for the various temperature chains
+
+    void ReadInRecover();
+    void SetRecoverChaintypeChainsFrom(long int last_chain);
+    void TellUserWhereWeAreRestarting();
+    void CreateChains();
+    void DoRegions();
+    void CalculateMLEsOverRegions();
+    void CalculateNonBayesMultiRegionMLEs(ForceParameters & fp, ChainOut & regionout, double & maxlike);
+    void DoReplicates(long int region);
+    void DoChainTypes(long int region, long int rep);
+    // the following handles maximization on a stored-tree chain
+    void DoChainFromSummaryFile(long int region, long int rep, long int chaintype, long int chain);
+    void DoChain(long int region, long int rep, long int chaintype, long int chain, ForceParameters& chainstart);
+    void GroomTrees(ForceParameters& chainstart);
+    unsigned long int FindColdChain(const vector<Chain> & chains) const;
+    void DoSingleChainPosterior(long int region, long int rep, long int chaintype,
+                                long int chain, ChainOut & chout, ForceParameters & chainstart);
+    void DoSingleChainBayes(long int region, long int rep, ChainOut & chout, ForceParameters & chainstart);
+
+    // Adjust temperatures for adaptive heating
+    void AdjustTemperatures(DoubleVec1d & averagetemps, long int step, double & howoften);
+    void ChooseTwoAdjacentChains(unsigned long int & chain1, unsigned long int & chain2);
+    std::vector<std::pair<double, long int> > SortChainsByTemperature(const vector<Chain> & temps);
+
+    void ReadInNoRedoMax(long int region);
+    void RedoMaximization(long int region);
+    void CompareAndWarn(ForceParameters calcfp, ForceParameters readfp);
+    void DoOverallProfiles();
+    void CloseSumOut();
+    void SaveGeyerWeights(DoubleVec1d logGeyerWeights, long int region);
+    void OptimizeDataModels(long int reg);
+    void ResetAllAlphas();
+};
+
+#endif // CHAINMAN_H
+
+//____________________________________________________________________________________
diff --git a/src/control/chainout.cpp b/src/control/chainout.cpp
new file mode 100644
index 0000000..7fdd770
--- /dev/null
+++ b/src/control/chainout.cpp
@@ -0,0 +1,63 @@
+// $Id: chainout.cpp,v 1.13 2011/02/20 04:13:59 bobgian Exp $
+
+/*
+  Copyright 2002 Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include "chainout.h"
+
+#ifdef DMALLOC_FUNC_CHECK
+#include "/usr/local/include/dmalloc.h"
+#endif
+
+ChainOut::ChainOut()
+    : m_badtrees(FLAGLONG),
+      m_tinypoptrees(FLAGLONG),
+      m_zerodltrees(FLAGLONG),
+      m_stretchedtrees(FLAGLONG),
+      m_accrate(FLAGDOUBLE),
+      m_numtemps(FLAGLONG),
+      m_swaprates(),
+      m_tempaccrates(),
+      m_temperatures(),
+      m_bayesunique(),
+      m_starttime(0),
+      m_endtime(0),
+      m_estimates(unknown_region),
+      m_llikemle(FLAGDOUBLE),
+      m_llikedata(NEG_MAX)
+{
+    // deliberately blank
+} // ChainOut
+
+//------------------------------------------------------------------------------------
+
+void ChainOut::SetStarttime()
+{
+#ifdef NOTIME_FUNC
+    // The system does not provide a clock.  All times are 0.
+    m_starttime = 0;
+#else
+    // Return "real" time.
+    m_starttime = time(NULL);
+#endif
+}
+
+//------------------------------------------------------------------------------------
+
+void ChainOut::SetEndtime()
+{
+#ifdef NOTIME_FUNC
+    // The system does not provide a clock.  All times are 0.
+    m_endtime = 0;
+#else
+    // Return "real" time.
+    m_endtime = time(NULL);
+#endif
+}
+
+//____________________________________________________________________________________
diff --git a/src/control/chainout.h b/src/control/chainout.h
new file mode 100644
index 0000000..1baefee
--- /dev/null
+++ b/src/control/chainout.h
@@ -0,0 +1,120 @@
+// $Id: chainout.h,v 1.17 2011/04/23 02:02:48 bobgian Exp $
+
+/*
+  Copyright 2002 Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+/********************************************************************
+ Class ChainOut contains summary information about the results of
+ a chain, such as the acceptance rate, parameter estimates, and
+ start/end times.  It does *not* contain the summary trees, which
+ are stored separately in a Collector object.
+
+ ChainOut objects are normally stored and organized in the ChainPack.
+ They can reasonably be passed by value as they are fairly small,
+ simple objects.
+
+ ChainOut has ChainPack as a friend, since the two classes are
+ tightly coupled.  This would be fairly easy to change if desired.
+
+ Written by Mary Kuhner
+*********************************************************************/
+
+#ifndef CHAINOUT_H
+#define CHAINOUT_H
+
+#include <ctime>
+#include <map>
+#include <string>
+
+#include "types.h"
+#include "forceparam.h"
+
+class ChainOut
+{
+  private:
+    // chain information
+    long m_badtrees;                // number of trees discarded due to limits
+    long m_tinypoptrees;            // number of trees discarded due to small popsizes
+    long m_zerodltrees;             // number of trees discarded due to 0 data likelihood
+    long m_stretchedtrees;          // number of trees discarded due to long branches
+    double m_accrate;               // overall (cold) acceptance rate
+    ratemap m_rates;                // (cold) acceptance rate per arranger
+    long m_numtemps;                // number of temperatures
+    DoubleVec1d m_swaprates;        // heating swap rates between pairs
+    DoubleVec1d m_tempaccrates;     // acceptance rates per temperature
+    DoubleVec1d m_temperatures;     // average temperatures [adaptive/static]
+    LongVec1d   m_bayesunique;      // per-parameter bayesian acceptances
+
+    // Timing information
+    // Type "time_t" is a C library type which holds time information
+    // (in seconds since 1970)
+    time_t m_starttime;
+    time_t m_endtime;
+
+    // chain MLE's
+    ForceParameters m_estimates;    // Maximum likelihood estimates
+    double m_llikemle;              // posterior lnL at maximum
+    double m_llikedata;             // data lnL of last generated tree in chain
+
+  public:
+    // we accept defaults for copy constructor, operator=, and destructor
+    ChainOut();
+
+    // friendship to allow direct access to member variables
+    // for these two tightly coupled classes
+    friend class ChainPack;
+
+    // Inspector functions
+    long            GetNumBadTrees()   const {return m_badtrees;};
+    long            GetTinyPopTrees()  const {return m_tinypoptrees;};
+    long            GetStretchedTrees() const {return m_stretchedtrees;};
+    long            GetZeroDLTrees()   const {return m_zerodltrees;};
+    double          GetAccrate()       const {return m_accrate;};
+    ratemap         GetAllAccrates()   const {return m_rates;};
+    long            GetNumtemps()      const {return m_numtemps;};
+    DoubleVec1d     GetSwaprates()     const {return m_swaprates;};
+    DoubleVec1d     GetTemperatures()  const {return m_temperatures;};
+    DoubleVec1d     GetTempAccrates()  const {return m_tempaccrates;};
+    double          GetLlikemle()      const {return m_llikemle;};
+    double          GetLlikedata()     const {return m_llikedata;};
+    ForceParameters GetEstimates()     const {return m_estimates;};
+
+    time_t          GetStarttime()     const {return m_starttime;};
+    time_t          GetEndtime()       const {return m_endtime;};
+    vector<long>    GetBayesUnique()   const {return m_bayesunique;};
+
+    // Mutator functions
+    void SetNumBadTrees(const long &nbad)           {m_badtrees = nbad;};
+    void SetNumTinyPopTrees(const long &ntiny)      {m_tinypoptrees = ntiny;};
+    void SetNumStretchedTrees(const long &nstretch) {m_stretchedtrees = nstretch;};
+    void SetNumZeroDLTrees(const long &nzero)       {m_zerodltrees = nzero;};
+    void SetAccrate(const double &r)                {m_accrate = r;};
+    void SetAllAccrates(const ratemap &r)           {m_rates = r;};
+    void SetNumtemps(const long &ntemps)            {m_numtemps = ntemps;};
+    void SetSwaprates(const DoubleVec1d &r)         {m_swaprates = r;};
+    void SetTemperatures(const DoubleVec1d &temps)  {m_temperatures = temps;};
+    void SetTempAccrates(const DoubleVec1d &r)      {m_tempaccrates = r;};
+    void SetLlikemle(const double &src)             {m_llikemle = src;};
+    void SetLlikedata(const double &src)            {m_llikedata = src;};
+    void SetEstimates(const ForceParameters &src)   {m_estimates = src;};
+
+    void SetStarttime(const time_t &src)            {m_starttime = src;};
+    void SetEndtime(const time_t &src)              {m_endtime = src;};
+
+    void SetRates(const ratemap &r)                 {m_rates = r;};
+    void SetBayesUnique(const LongVec1d& b)         {m_bayesunique = b;};
+
+    // The following two overloads set the time to the current time, gotten from the system clock.
+    void SetStarttime();
+    void SetEndtime();
+
+};
+
+#endif // CHAINOUT_H
+
+//____________________________________________________________________________________
diff --git a/src/control/chainpack.cpp b/src/control/chainpack.cpp
new file mode 100644
index 0000000..8f4b968
--- /dev/null
+++ b/src/control/chainpack.cpp
@@ -0,0 +1,544 @@
+// $Id: chainpack.cpp,v 1.35 2012/06/30 01:32:39 bobgian Exp $
+
+/*
+  Copyright 2002 Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+#include <cstdlib>
+#include <iostream>
+
+#include "chainpack.h"
+#include "constants.h"
+#include "region.h"
+#include "registry.h"
+#include "stringx.h"
+#include "sumfilehandler.h"
+#include "xmlsum_strings.h"             // for xml sumfile handling
+
+#ifdef DMALLOC_FUNC_CHECK
+#include "/usr/local/include/dmalloc.h"
+#endif
+
+using namespace std;
+
+//------------------------------------------------------------------------------------
+
+ChainOut ChainPack::GetChain(long int region, long int rep, long int chain) const
+{
+    return(m_chains[region][rep][chain]);
+} // ChainPack::GetChain
+
+//------------------------------------------------------------------------------------
+
+ChainOut ChainPack::GetLastChain() const
+{
+    long int region = m_chains.size() - 1;
+    if (region != -1)
+    {
+        long int rep = m_chains[region].size() - 1;
+        if (rep != -1)
+        {
+            long int chain  = m_chains[region][rep].size() - 1;
+            if (chain != -1)
+            {
+                return(m_chains[region][rep][chain]);
+            }
+        }
+    }
+    //There was no last chain
+    ChainOut blankchout;
+    if (region == -1) region = 0;
+    ForceParameters fp(registry.GetForceSummary().GetStartParameters(), region);
+    blankchout.SetEstimates(fp);
+    return blankchout;
+
+} // ChainPack::GetLastChain
+
+ChainOut ChainPack::GetLastChain(long int region) const
+{
+    assert(region <= static_cast<long int>(m_chains.size()));
+    assert(m_chains[region].size() > 0);
+    unsigned long int rep = m_chains[region].size() - 1;
+    assert(m_chains[region][rep].size() > 0);
+    unsigned long int chain = m_chains[region][rep].size() - 1;
+    return(m_chains[region][rep][chain]);
+} // ChainPack::GetLastChain
+
+//------------------------------------------------------------------------------------
+
+ChainOut ChainPack::GetRegion(long int region) const
+{
+    if (m_regions.size() == 0)          // no sum over replicates
+    {
+        long int last = (m_chains[region][0].size())-1;
+        return(this->GetChain(region, 0, last));
+    }
+    else
+    {
+        return(m_regions[region]);
+    }
+} // ChainPack::GetRegion
+
+//------------------------------------------------------------------------------------
+
+ChainOut ChainPack::GetOverall() const
+{
+    if (m_overall.size() == 0)          // no sum over regions
+    {
+        return(this->GetRegion(0));
+    }
+    else
+    {
+        return(m_overall[0]);
+    }
+} // ChainPack::GetOverall
+
+//------------------------------------------------------------------------------------
+
+// Two functions for the summary file reader to make sure the
+//  region/replicate summaries are being set.
+
+long int ChainPack::GetLenRegionsVec() const
+{
+    return (m_regions.size());
+}
+
+long int ChainPack::GetLenOverallVec() const
+{
+    return (m_overall.size());
+}
+
+//------------------------------------------------------------------------------------
+
+void ChainPack::SetChain(ChainOut& chout, long int region, long int rep, long int chain)
+{
+    if (region > m_currentRegion)
+    {
+        EndRegion();
+        VerifyPosition(region, rep, chain);
+        SetChain(chout);
+        return;
+    }
+    if (rep > m_currentRep)
+    {
+        EndReplicate();
+        VerifyPosition(region, rep, chain);
+        SetChain(chout);
+        return;
+    }
+    VerifyPosition(region, rep, chain);
+    SetChain(chout);
+}
+
+//------------------------------------------------------------------------------------
+
+void ChainPack::ResetLastChain(ChainOut& chout, long int region)
+{
+    long int rep = m_chains[region].size()-1;
+    long int chain = m_chains[region].size()-1;
+    assert (rep >= 0);
+    assert (chain >= 0);
+    m_chains[region][rep][chain] = chout;
+}
+
+//------------------------------------------------------------------------------------
+
+void ChainPack::ResetSummaryOverReps(ChainOut& chout, long int region)
+{
+    assert(region <= static_cast<long int>(m_regions.size()));
+    m_regions[region] = chout;
+}
+
+//------------------------------------------------------------------------------------
+
+void ChainPack::SetChain(ChainOut& chout)
+{
+    // The estimates in chout might not have known what region they were for.
+    // If that's the case, this will set it straight.
+    ForceParameters fp(chout.GetEstimates(), m_currentRegion);
+    chout.SetEstimates(fp);
+
+    if (m_currentRep == 0 && m_currentChain == 0) // first entry of its region
+    {
+        vector<ChainOut> v1;
+        v1.push_back(chout);
+        vector<vector<ChainOut> > v2;
+        v2.push_back(v1);
+        m_chains.push_back(v2);
+        m_currentChain++;
+        return;
+    }
+    if (m_currentChain == 0)            // first entry of its replicate
+    {
+        vector<ChainOut> v1;
+        v1.push_back(chout);
+        m_chains[m_currentRegion].push_back(v1);
+        m_currentChain++;
+        return;
+    }
+
+    m_chains[m_currentRegion][m_currentRep].push_back(chout);
+    m_currentChain++;
+} // ChainPack::SetChain
+
+//------------------------------------------------------------------------------------
+// RemoveLastChain is needed by ChainManager in the unlikely event that
+//  we have read in chain summary information for the last chain in a
+//  region/replicate, but have no tree summary information, and thus
+//  need to reproduce that tree, giving us a new chain.
+//                 --Lucian
+
+void ChainPack::RemoveLastChain()
+{
+    if (m_currentChain == 0)
+    {
+        //We called EndRegion and/or EndReplicate too early, and have to back up.
+        if (m_currentRep == 0)
+        {
+            if (m_currentRegion == 0)
+            {
+                //There are no chains to remove--do nothing
+                assert(false); //Why did this happen?
+                return;
+            }
+        }
+        m_currentRegion = m_chains.size()-1;
+        m_currentRep = m_chains[m_currentRegion].size()-1;
+        m_currentChain = m_chains[m_currentRegion][m_currentRep].size()-1;
+    }
+    m_chains[m_currentRegion][m_currentRep].pop_back();
+    m_currentChain--;
+}
+
+//------------------------------------------------------------------------------------
+
+void ChainPack::SetSummaryOverReps(ChainOut& chout)
+{
+    // a convenience variable
+    vector<vector<ChainOut> >& region = m_chains[m_currentRegion];
+
+    // set regional start/end times
+    long int lastrep = (region.size())-1;
+    long int lastchain = (region[0].size())-1;
+    chout.SetStarttime(region[0][0].GetStarttime());
+    chout.SetEndtime(region[lastrep][lastchain].GetEndtime());
+
+    // This information is not helpful for the user so we'll supress it:
+    chout.SetNumBadTrees(FLAGLONG);
+    chout.SetNumTinyPopTrees(FLAGLONG);
+    chout.SetNumStretchedTrees(FLAGLONG);
+    chout.SetNumZeroDLTrees(FLAGLONG);
+    chout.SetAccrate(FLAGDOUBLE);
+    chout.SetLlikedata(FLAGDOUBLE);
+
+    // The estimates in chout might not have known what region they were for.
+    // If that's the case, this will set it straight.
+    ForceParameters fp(chout.GetEstimates(), m_currentRegion);
+    chout.SetEstimates(fp);
+    m_regions.push_back(chout);
+
+} // ChainPack::SetSummaryOverReps
+
+//------------------------------------------------------------------------------------
+
+void ChainPack::SetSummaryOverRegions(ChainOut& chout)
+{
+    assert(chout.GetEstimates().GetParamSpace() == global_region);
+    // set overall start/end times
+    long int lastregion = (m_chains.size()) - 1;
+    long int lastrep = (m_chains[0].size()) - 1;
+    long int lastchain = (m_chains[0][0].size()) - 1;
+
+    chout.SetStarttime(m_chains[0][0][0].GetStarttime());
+    chout.SetEndtime(m_chains[lastregion][lastrep][lastchain].GetEndtime());
+
+    // This information is not helpful for the user so we'll supress it:
+    chout.SetNumBadTrees(FLAGLONG);
+    chout.SetNumTinyPopTrees(FLAGLONG);
+    chout.SetNumStretchedTrees(FLAGLONG);
+    chout.SetNumZeroDLTrees(FLAGLONG);
+    chout.SetAccrate(FLAGDOUBLE);
+    chout.SetLlikedata(FLAGDOUBLE);
+
+    if (m_overall.size() > 0)
+    {
+        m_overall[0] = chout;
+        //Useful for re-setting this when reading from a summary file.
+    }
+    else
+    {
+        m_overall.push_back(chout);
+    }
+} // ChainPack::SetSummaryOverRegions
+
+//------------------------------------------------------------------------------------
+
+void ChainPack::EndRegionIfNecessary(long int region)
+{
+    if (m_currentRegion > region) return;
+    EndRegion();
+}
+
+//------------------------------------------------------------------------------------
+
+void ChainPack::EndRegionsOrRepsAsNeeded(long int maxreps, long int maxchains)
+{
+    if (m_currentChain >= maxchains)
+    {
+        EndReplicate();
+    }
+
+    if (m_currentRep >= maxreps)
+    {
+        EndRegion();
+    }
+}
+
+/**************************************************
+ The following functions provide summaries of the
+ parameter values from a ChainPack.
+**************************************************/
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d ChainPack::RegionalMeanParams() const
+{
+    // summarizes the current region
+    unsigned long int rep;
+    DoubleVec2d values;
+    long int lastchain = m_chains[m_currentRegion][0].size() - 1;
+
+    for (rep = 0; rep < m_chains[m_currentRegion].size(); ++rep)
+    {
+        const ChainOut& chout = GetChain(m_currentRegion, rep, lastchain);
+        values.push_back(chout.GetEstimates().GetRegionalParameters());
+    }
+
+    return CalcMean(values);
+
+} // RegionalMeanParams
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d ChainPack::OverallMeanParams() const
+{
+    // summarizes over all regions
+    unsigned long int region;
+    DoubleVec2d values;
+
+    for (region = 0; region < m_chains.size(); ++region)
+    {
+        const ChainOut& chout = GetRegion(region);
+        values.push_back(chout.GetEstimates().GetGlobalParameters());
+    }
+
+    return CalcMean(values);
+
+} // OverallMeanParams
+
+//------------------------------------------------------------------------------------
+
+time_t ChainPack::GetStartTime() const
+{
+    ChainOut firstchain = GetChain(0, 0, 0);
+    return(firstchain.GetStarttime());
+} // GetStartTime
+
+time_t ChainPack::GetStartTime(long int region) const
+{
+    ChainOut firstchain = GetChain(region, 0, 0);
+    return(firstchain.GetStarttime());
+} // GetStartTime
+
+//------------------------------------------------------------------------------------
+
+time_t ChainPack::GetEndTime() const
+{
+    ChainOut lastchain = GetOverall();
+    return(lastchain.GetEndtime());
+    // daniel 040903
+    // why does lastchain.GetEndTime() sometimes return 0?  when the below endtime is correct?
+    // see output file from reading in sumfile
+    // bug in the overall vector?
+
+#if 0
+    long int i, j, k;
+    i = m_chains.size() - 1;
+    j = m_chains[i].size() - 1;
+    k = m_chains[i][j].size() - 1;
+    return m_chains[i][j][k].endtime;
+#endif
+} // GetEndTime
+
+//------------------------------------------------------------------------------------
+
+// CalcMean assumes that all subvectors are as long as the first one
+DoubleVec1d ChainPack::CalcMean(const DoubleVec2d& src) const
+{
+    long int index;
+    long int size = src[0].size();
+    DoubleVec2d::const_iterator values;
+    DoubleVec1d mean;
+
+    for(index = 0; index < size; ++index)
+    {
+        double sum = 0.0;
+        for(values = src.begin(); values != src.end(); ++values)
+            sum += (*values)[index];
+        mean.push_back(sum /= src.size());
+    }
+    return mean;
+
+} // ChainPack::CalcMean
+
+//------------------------------------------------------------------------------------
+
+// following setter and getter for resuming with sumfiles
+vector<vector<vector<ChainOut > > > ChainPack::GetAllChains() const
+{ return m_chains; }
+
+//------------------------------------------------------------------------------------
+
+void ChainPack::WriteLastChain ( ofstream& sumout) const
+{
+    if ( sumout.is_open() )
+    {
+        sumout << xmlsum::CHAINPACK_START << endl;
+        sumout << "\t" << xmlsum::NUMBER_START
+               << " " << m_currentRegion << " " << m_currentRep << " " << (m_currentChain -1)
+               << " " << xmlsum::NUMBER_END << endl;
+        WriteChainOut( sumout, m_chains[m_currentRegion][m_currentRep][(m_currentChain-1)] );
+        WriteAlphas(sumout, m_currentRegion, m_currentRep, m_currentChain-1);
+        sumout << xmlsum::CHAINPACK_END << endl;
+    }
+    else
+        SumFileHandler::HandleSumOutFileFormatError("ChainPack::WriteLastChain");
+} // ChainPack::WriteLastChain
+
+//------------------------------------------------------------------------------------
+
+void ChainPack::WriteChain(ofstream& sumout, long int region, long int rep, long int chain) const
+{
+    if ( sumout.is_open() )
+    {
+        sumout << xmlsum::CHAINPACK_START << endl;
+        sumout << "\t" << xmlsum::NUMBER_START
+               << " " << region << " " << rep << " " << chain
+               << " " << xmlsum::NUMBER_END << endl;
+        WriteChainOut( sumout, m_chains[region][rep][chain] );
+        WriteAlphas(sumout, region, rep, chain);
+        sumout << xmlsum::CHAINPACK_END << endl;
+    }
+    else
+        SumFileHandler::HandleSumOutFileFormatError("ChainPack::WriteChain");
+} // ChainPack::WriteLastChain
+
+//------------------------------------------------------------------------------------
+
+void ChainPack::WriteChainOut( ofstream& sumout, const ChainOut& chout) const
+{
+    if ( sumout.is_open() )
+    {
+        long int badtrees       = chout.GetNumBadTrees();
+        long int tinytrees      = chout.GetTinyPopTrees();
+        long int stretchedtrees = chout.GetStretchedTrees();
+        long int zerodltrees    = chout.GetZeroDLTrees();
+        double accrate      = chout.GetAccrate();
+        double llikemle     = chout.GetLlikemle();
+        double llikedata    = chout.GetLlikedata();
+        time_t starttime    = chout.GetStarttime();
+        time_t endtime      = chout.GetEndtime();
+        ratemap rates       = chout.GetAllAccrates();
+        ForceParameters fp  = chout.GetEstimates();
+        DoubleVec1d temperatures = chout.GetTemperatures();
+        DoubleVec1d swaprates    = chout.GetSwaprates();
+        LongVec1d   bayesunique  = chout.GetBayesUnique();
+
+        sumout << "\t" << xmlsum::CHAINOUT_START << endl;
+        sumout << "\t\t" << xmlsum::BADTREES_START  << " " << badtrees
+               << " " << xmlsum::BADTREES_END << endl;
+        sumout << "\t\t" << xmlsum::TINYTREES_START << " " << tinytrees
+               << " " << xmlsum::TINYTREES_END << endl;
+        sumout << "\t\t" << xmlsum::STRETCHEDTREES_START << " " << stretchedtrees
+               << " " << xmlsum::STRETCHEDTREES_END << endl;
+        sumout << "\t\t" << xmlsum::ZERODLTREES_START << " " << zerodltrees
+               << " " << xmlsum::ZERODLTREES_END << endl;
+        sumout << "\t\t" << xmlsum::ACCRATE_START   << " " << accrate
+               << " " << xmlsum::ACCRATE_END << endl;
+        sumout << "\t\t" << xmlsum::LLIKEMLE_START  << " " << llikemle
+               << " " << xmlsum::LLIKEMLE_END << endl;
+        sumout << "\t\t" << xmlsum::LLIKEDATA_START << " " << llikedata
+               << " " << xmlsum::LLIKEDATA_END << endl;
+        sumout << "\t\t" << xmlsum::STARTTIME_START << " " << starttime
+               << " " << xmlsum::STARTTIME_END << endl;
+        sumout << "\t\t" << xmlsum::ENDTIME_START   << " " << endtime
+               << " " << xmlsum::ENDTIME_END << endl;
+
+        sumout << "\t\t" << xmlsum::RATES_START << " ";
+        map< string, pair<long int, long int> >::iterator rit;
+        for (rit = rates.begin(); rit != rates.end(); ++rit)
+        {
+            sumout << xmlsum::MAP_START << " "; // map<string, pair<long int, long int> >
+            sumout << rit->first << " "
+                   << rit->second.first << " " << rit->second.second << " ";
+            sumout << xmlsum::MAP_END << " ";
+        }
+        sumout << xmlsum::RATES_END << endl;
+
+        long int numtemps = chout.GetNumtemps();
+        if (numtemps > 1)
+        {
+            sumout << "\t\t" << xmlsum::TEMPERATURES_START << " ";
+            SumFileHandler::WriteVec1D(sumout, temperatures);
+            sumout << xmlsum::TEMPERATURES_END << endl;
+
+            sumout << "\t\t" << xmlsum::SWAPRATES_START << " ";
+            SumFileHandler::WriteVec1D(sumout, swaprates);
+            sumout << xmlsum::SWAPRATES_END << endl;
+        }
+        if (bayesunique.size() > 0)
+        {
+            sumout << "\t\t" << xmlsum::BAYESUNIQUE_START << " ";
+            SumFileHandler::WriteVec1D(sumout, bayesunique);
+            sumout << xmlsum::BAYESUNIQUE_END << endl;
+        }
+        fp.WriteForceParameters(sumout, 2);
+        sumout << "\t" << xmlsum::CHAINOUT_END << endl;
+    }
+    else
+        SumFileHandler::HandleSumOutFileFormatError("ChainPack::WriteChainOut");
+}
+
+//------------------------------------------------------------------------------------
+
+void ChainPack::WriteAlphas(ofstream& sumout, long int reg, long int rep, long int chain) const
+{
+    const Region& region = registry.GetDataPack().GetRegion(reg);
+    for (long int loc = 0; loc < region.GetNloci(); loc++)
+    {
+        const Locus & locus = region.GetLocus(loc);
+        locus.GetDataModel()->WriteAlpha(sumout, loc, rep, chain);
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+void ChainPack::VerifyPosition( long int test_reg, long int test_rep, long int test_chain)
+{
+    if ((test_reg != m_currentRegion) || (test_rep != m_currentRep) || (test_chain != m_currentChain))
+    {
+        string msg = "Tried to set the chainpack for region " + ToString(test_reg)
+            + ", replicate " + ToString(test_rep) + ", and chain "
+            + ToString(test_chain) + ", but we needed the chainpack for region "
+            + ToString(m_currentRegion) + ", replicate " + ToString(m_currentRep)
+            + ", and chain " + ToString(m_currentChain) + ".";
+        throw data_error(msg);
+    }
+}
+
+//____________________________________________________________________________________
diff --git a/src/control/chainpack.h b/src/control/chainpack.h
new file mode 100644
index 0000000..fbb4b78
--- /dev/null
+++ b/src/control/chainpack.h
@@ -0,0 +1,128 @@
+// $Id: chainpack.h,v 1.25 2012/06/30 01:32:39 bobgian Exp $
+
+/*
+  Copyright 2002 Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef CHAINPACK_H
+#define CHAINPACK_H
+
+#include <fstream>
+#include <vector>
+#include <algorithm>
+
+#include "vectorx.h"
+#include "chainout.h"
+#include "constants.h"
+#include "definitions.h"
+
+/************************************************************************
+ This class collects ChainOut objects containing the results of all
+ chains, as well as a summary for each region and an overall summary.
+ It is filled by the chain manager and used by the output report.
+
+ It implements the following rules:
+
+ If only one replicate was run, the regional result is the result
+ from its last chain.  If multiple replicates were run, there is a
+ separate regional result summing all replicates.
+
+ If only one region was run, the overall result is the result from
+ that region.  If multiple regions were run, there is a separate
+ overall result summing all regions.
+
+ ChainPack adds timestamps to each summary object indicating the
+ times of the chains which it summarizes.  The calling program does
+ not need to set timestamps on the summaries, but it does need to set
+ timestamps on the individual chain objects.
+
+ DEBUG There is currently a logic problem involving summary times when there
+ is only one region.
+
+ (MDEBUG:  Mary looked at this 3/23/2005.  The class is badly designed
+ and should be redesigned so that the regional and overall results always
+ exist (removes a lot of nasty special case code and kills the logic
+ bug) but this is a fairly large refactoring and is being deferred till
+ after 2.0.)
+
+ While it is being filled up by the chain manager, the ChainPack
+ is in an inconsistent state (for example, it may have two regions
+ but no overall summary) and should generally not be touched by
+ anyone else.
+
+ Written by:  Mary Kuhner September 2000
+*************************************************************************/
+
+class ChainPack
+{
+  private:
+    ChainPack& operator=(const ChainPack&);          // deliberately not defined
+    ChainPack(const ChainPack &src);                 // deliberately not defined
+
+    vector<vector<vector<ChainOut> > >  m_chains;    // output of each chain
+    // dim: reg X rep X cha
+    vector<ChainOut>                    m_regions;   // summed over reps
+    // dim: reg
+    vector<ChainOut>                    m_overall;   // summed over regions
+    // dim: really a scalar but kept
+    // as a vector for convenience
+    long int m_currentChain;
+    long int m_currentRep;
+    long int m_currentRegion;
+
+    long int    RegionsSoFar()   const   {return(static_cast<long int>(m_chains.size()));};
+    DoubleVec1d CalcMean(const DoubleVec2d& src)  const;
+
+  public:
+
+    ChainPack() : m_currentChain(0), m_currentRep(0), m_currentRegion(0) {}; ~ChainPack() {};
+
+    // Inspector functions
+    ChainOut  GetChain(long int region, long int rep, long int chain) const;
+    ChainOut  GetLastChain()                                          const;
+    ChainOut  GetLastChain(long int region)                           const;
+    ChainOut  GetRegion(long int region)                              const;
+    ChainOut  GetOverall()                                            const;
+    long int  GetLenRegionsVec()                                      const;
+    long int  GetLenOverallVec()                                      const;
+    time_t    GetStartTime()                                          const;
+    time_t    GetStartTime(long int region)                           const;
+    time_t    GetEndTime()                                            const;
+    DoubleVec1d   RegionalMeanParams()                                const;
+    DoubleVec1d   OverallMeanParams()                                 const;
+
+    vector<vector<vector<ChainOut > > > GetAllChains()                const;
+
+    // Mutator functions
+    // These change their arguments--do not make the arguments const!
+    void SetChain(ChainOut& chout, long int region, long int rep, long int chain);
+    void SetChain(ChainOut& chout);
+    void ResetLastChain(ChainOut& chout, long int region);
+    void RemoveLastChain();
+    void SetSummaryOverReps(ChainOut& chout);
+    void ResetSummaryOverReps(ChainOut& chout, long int region);
+    void SetSummaryOverRegions(ChainOut& chout);
+
+    void EndReplicate() { ++m_currentRep; m_currentChain = 0; };
+    void EndRegion()    { ++m_currentRegion; m_currentRep = 0; m_currentChain = 0;};
+    void EndRegionIfNecessary(long int region);
+    void EndRegionsOrRepsAsNeeded(long int maxreps, long int maxchains);
+
+    //Summary file functions to write out chain info
+    void WriteLastChain   ( std::ofstream& out ) const ;
+    void WriteChain       ( std::ofstream& out, long int, long int, long int ) const;
+    void WriteChainOut    ( std::ofstream& out, const ChainOut& chout) const;
+    void WriteForceParameters ( std::ofstream& out, const ForceParameters& fp ) const;
+    void WriteAlphas(std::ofstream& sumout, long int reg, long int rep, long int chain) const;
+
+    //Function to test whether we're assembling correctly.
+    void VerifyPosition   ( long int test_reg, long int test_rep, long int test_chain) ;
+};
+
+#endif // CHAINPACK_H
+
+//____________________________________________________________________________________
diff --git a/src/control/chainparam.cpp b/src/control/chainparam.cpp
new file mode 100644
index 0000000..31f61aa
--- /dev/null
+++ b/src/control/chainparam.cpp
@@ -0,0 +1,378 @@
+// $Id: chainparam.cpp,v 1.42 2013/06/03 17:23:13 jyamato Exp $
+
+/*
+  Copyright 2002 Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+#include <numeric>                      // for std::accumulate in ChainParameters::GetNAllChains()
+
+#include "arranger.h"
+#include "chainparam.h"
+#include "defaults.h"
+#include "errhandling.h"
+#include "stringx.h"                    // for ToXML()
+#include "xml_strings.h"                // for ToXML()
+
+#ifdef DMALLOC_FUNC_CHECK
+#include "/usr/local/include/dmalloc.h"
+#endif
+
+using namespace std;
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+ChainParameters::ChainParameters(
+    DoubleVec1d temperatures,
+    long        tempinterval,
+    bool        tempadapt,
+    long        nChains_initial,
+    long        nSamples_initial,
+    long        interval_initial,
+    long        nDiscard_initial,
+    long        nChains_final,
+    long        nSamples_final,
+    long        interval_final,
+    long        nDiscard_final,
+    long        nreps,
+    bool        bayesian,
+    double      dropTiming,
+    double      sizeTiming,
+    double      hapTiming,
+    double      probhapTiming,
+    double      bayesTiming,
+    double      locusTiming,
+    double      zilchTiming,
+    double      stairTiming,
+    double      epochsizeTiming
+//  double      epochnudgeTiming  Add this if it passes testing
+    )
+    :  m_arrangers(dropTiming, sizeTiming, hapTiming, probhapTiming, bayesTiming,
+                   locusTiming, zilchTiming, stairTiming, epochsizeTiming, 0.0),
+// JREMOVE -- epochnudge testing code
+// the 0.0 is the testing timing for epochnudge arranger, fix this if we
+// want to actually add this one
+// end JREMOVE
+       m_temperatures(temperatures),
+       m_tempinterval(tempinterval),
+       m_tempadapt(tempadapt),
+       m_nChains_initial(nChains_initial),
+       m_nSamples_initial(nSamples_initial),
+       m_interval_initial(interval_initial),
+       m_nDiscard_initial(nDiscard_initial),
+       m_nChains_final(nChains_final),
+       m_nSamples_final(nSamples_final),
+       m_interval_final(interval_final),
+       m_nDiscard_final(nDiscard_final),
+       m_nreps(nreps),
+       m_bayesian(bayesian),
+       m_runprofilereps(false /* JDEBUG--get from menu/xml! */)
+{
+    SortAndNormalizeTemperatures();
+
+    // Everything that is being asserted here should be prevented by logic in the user-menus and xml
+    // reading part of the code.  So, if you get an assert here, you need to add something there.
+
+    assert(!(m_arrangers.empty()));     // at least one arranger
+    assert(ValidTemperatures());        // at least one temperatures,
+                                        // lowest temperature == 1.0
+                                        // temperatures ascending
+    assert(m_tempinterval >= 0L);       // tempinterval non-negative
+
+    assert(m_nChains_initial >=  0L);   // initial chains non-negative
+    assert(m_nSamples_initial >= 0L);   // initial samples non-negative
+    assert(m_interval_initial >  0L);   // initial chains positive
+    assert(m_nDiscard_initial >= 0L);   // initial chains non-negative
+    assert(m_nChains_final   >=  0L);   // initial chains non-negative
+    assert(m_nSamples_final   >= 0L);   // initial samples non-negative
+    assert(m_interval_final   >  0L);   // initial chains positive
+    assert(m_nDiscard_final   >= 0L);   // initial chains non-negative
+    assert(m_nChains_initial + m_nChains_final > 0);    // at least one chain
+
+    assert(m_nreps > 0);                // at least one replicate
+
+}
+
+//------------------------------------------------------------------------------------
+
+ChainParameters::~ChainParameters()
+{
+    // intentionally blank
+} // ChainParameters::~ChainParameters
+
+//------------------------------------------------------------------------------------
+
+void
+ChainParameters::SortAndNormalizeTemperatures()
+{
+    if(!(m_temperatures.empty()))
+    {
+        sort(m_temperatures.begin(),m_temperatures.end());
+        double lowestTemp = m_temperatures[0];
+        assert(lowestTemp > 0.0);
+        transform(m_temperatures.begin(),m_temperatures.end(),
+                  m_temperatures.begin(),
+                  bind2nd(divides<double>(),lowestTemp));
+        // m_temperatures[0] should now be 1.0, but let's
+        // eliminate any problems with rounding
+        m_temperatures[0] = 1.0;
+
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+double ChainParameters::GetArrangerTiming(const string& atype) const
+{
+    return m_arrangers.GetArrangerTiming(atype);
+}
+
+StringVec1d ChainParameters::GetAllStringsForActiveArrangers() const
+{
+    return m_arrangers.GetAllStringsForActiveArrangers();
+}
+
+//------------------------------------------------------------------------------------
+
+ArrangerVec ChainParameters::CloneArrangers() const
+{
+    return m_arrangers;
+}
+
+//------------------------------------------------------------------------------------
+
+StringVec1d ChainParameters::ToXML(unsigned long nspaces) const
+{
+    // JDEBUG--need to add m_runprofilereps to this!
+    StringVec1d xmllines;
+    string line = MakeIndent(MakeTag(xmlstr::XML_TAG_CHAINS),nspaces);
+    xmllines.push_back(line);
+
+    nspaces += INDENT_DEPTH;
+    // replicates
+    string mytag(MakeTag(xmlstr::XML_TAG_REPLICATES));
+    line = MakeIndent(mytag,nspaces) + ToString(GetNReps()) + MakeCloseTag(mytag);
+    xmllines.push_back(line);
+
+    // bayesianism
+    mytag =MakeTag(xmlstr::XML_TAG_BAYESIAN_ANALYSIS);
+    line = MakeIndent(mytag,nspaces) + ToString(IsBayesian()) + MakeCloseTag(mytag);
+    xmllines.push_back(line);
+
+    // heating
+    line = MakeIndent(MakeTag(xmlstr::XML_TAG_HEATING),nspaces);
+    xmllines.push_back(line);
+
+    nspaces += INDENT_DEPTH;
+    mytag = MakeTag(xmlstr::XML_TAG_HEATING_STRATEGY);
+    line = MakeIndent(mytag,nspaces) + ToStringTF(GetTempAdapt()) + MakeCloseTag(mytag);
+    xmllines.push_back(line);
+    mytag = MakeTag(xmlstr::XML_TAG_TEMPERATURES);
+    line = MakeIndent(mytag,nspaces) + ToString(GetAllTemperatures(),4) + MakeCloseTag(mytag);
+    xmllines.push_back(line);
+    mytag = MakeTag(xmlstr::XML_TAG_SWAP_INTERVAL);
+    line = MakeIndent(mytag,nspaces) + ToString(GetTempInterval()) + MakeCloseTag(mytag);
+    xmllines.push_back(line);
+    nspaces -= INDENT_DEPTH;
+
+    line = MakeIndent(MakeCloseTag(xmlstr::XML_TAG_HEATING),nspaces);
+    xmllines.push_back(line);
+
+    // strategy
+    line = MakeIndent(MakeTag(xmlstr::XML_TAG_STRATEGY),nspaces);
+    xmllines.push_back(line);
+
+    nspaces += INDENT_DEPTH;
+
+    // MDEBUG needs updating every time an Arranger is added, not ideal!
+    mytag = MakeTag(xmlstr::XML_TAG_RESIMULATING);
+    line = MakeIndent(mytag,nspaces) + ToString(GetArrangerTiming(arrangerstrings::DROP)) + MakeCloseTag(mytag);
+    xmllines.push_back(line);
+    mytag = MakeTag(xmlstr::XML_TAG_TREESIZE);
+    line = MakeIndent(mytag,nspaces) + ToString(GetArrangerTiming(arrangerstrings::TREESIZE)) + MakeCloseTag(mytag);
+    xmllines.push_back(line);
+    mytag = MakeTag(xmlstr::XML_TAG_HAPLOTYPING);
+    line = MakeIndent(mytag,nspaces) + ToString(GetArrangerTiming(arrangerstrings::HAP)) + MakeCloseTag(mytag);
+    xmllines.push_back(line);
+    mytag = MakeTag(xmlstr::XML_TAG_LOCUSARRANGER);
+    line = MakeIndent(mytag,nspaces) + ToString(GetArrangerTiming(arrangerstrings::LOCUS)) + MakeCloseTag(mytag);
+    xmllines.push_back(line);
+    mytag = MakeTag(xmlstr::XML_TAG_EPOCHSIZEARRANGER);
+    line = MakeIndent(mytag,nspaces) + ToString(GetArrangerTiming(arrangerstrings::EPOCHSIZE)) + MakeCloseTag(mytag);
+    xmllines.push_back(line);
+    if (IsBayesian())
+    {
+        mytag = MakeTag(xmlstr::XML_TAG_BAYESIAN);
+        line = MakeIndent(mytag,nspaces) + ToString(GetArrangerTiming(arrangerstrings::BAYES)) + MakeCloseTag(mytag);
+        xmllines.push_back(line);
+    }
+    // DENOVO deliberately left out
+    nspaces -= INDENT_DEPTH;
+
+    line = MakeIndent(MakeCloseTag(xmlstr::XML_TAG_STRATEGY),nspaces);
+    xmllines.push_back(line);
+
+    // initial chains
+    line = MakeIndent(MakeTag(xmlstr::XML_TAG_INITIAL),nspaces);
+    xmllines.push_back(line);
+
+    nspaces += INDENT_DEPTH;
+    mytag = MakeTag(xmlstr::XML_TAG_NUMBER);
+    line = MakeIndent(mytag,nspaces) + ToString(GetNChains(0L)) + MakeCloseTag(mytag);
+    xmllines.push_back(line);
+    mytag = MakeTag(xmlstr::XML_TAG_SAMPLES);
+    line = MakeIndent(mytag,nspaces) + ToString(GetNSamples(0L)) + MakeCloseTag(mytag);
+    xmllines.push_back(line);
+    mytag = MakeTag(xmlstr::XML_TAG_DISCARD);
+    line = MakeIndent(mytag,nspaces) + ToString(GetNDiscard(0L)) + MakeCloseTag(mytag);
+    xmllines.push_back(line);
+    mytag = MakeTag(xmlstr::XML_TAG_INTERVAL);
+    line = MakeIndent(mytag,nspaces) + ToString(GetInterval(0L)) + MakeCloseTag(mytag);
+    xmllines.push_back(line);
+    nspaces -= INDENT_DEPTH;
+
+    line = MakeIndent(MakeCloseTag(xmlstr::XML_TAG_INITIAL),nspaces);
+    xmllines.push_back(line);
+
+    // final chains
+    line = MakeIndent(MakeTag(xmlstr::XML_TAG_FINAL),nspaces);
+    xmllines.push_back(line);
+
+    nspaces += INDENT_DEPTH;
+    mytag = MakeTag(xmlstr::XML_TAG_NUMBER);
+    line = MakeIndent(mytag,nspaces) + ToString(GetNChains(1L)) + MakeCloseTag(mytag);
+    xmllines.push_back(line);
+    mytag = MakeTag(xmlstr::XML_TAG_SAMPLES);
+    line = MakeIndent(mytag,nspaces) + ToString(GetNSamples(1L)) + MakeCloseTag(mytag);
+    xmllines.push_back(line);
+    mytag = MakeTag(xmlstr::XML_TAG_DISCARD);
+    line = MakeIndent(mytag,nspaces) + ToString(GetNDiscard(1L)) + MakeCloseTag(mytag);
+    xmllines.push_back(line);
+    mytag = MakeTag(xmlstr::XML_TAG_INTERVAL);
+    line = MakeIndent(mytag,nspaces) + ToString(GetInterval(1L)) + MakeCloseTag(mytag);
+    xmllines.push_back(line);
+    nspaces -= INDENT_DEPTH;
+
+    line = MakeIndent(MakeCloseTag(xmlstr::XML_TAG_FINAL),nspaces);
+    xmllines.push_back(line);
+    nspaces -= INDENT_DEPTH;
+
+    // ending close tag
+    line = MakeIndent(MakeCloseTag(xmlstr::XML_TAG_CHAINS),nspaces);
+    xmllines.push_back(line);
+
+    return xmllines;
+}
+
+//------------------------------------------------------------------------------------
+
+long
+ChainParameters::GetNChains(long i) const
+{
+    if(i == defaults::initial)    return m_nChains_initial;
+    if(i == defaults::final)      return m_nChains_final;
+    assert(false);
+    throw implementation_error("bad chain type");
+}
+
+long
+ChainParameters::GetNSamples(long i) const
+{
+    if(i == defaults::initial)    return m_nSamples_initial;
+    if(i == defaults::final)      return m_nSamples_final;
+    assert(false);
+    throw implementation_error("bad chain type");
+}
+
+long
+ChainParameters::GetInterval(long i) const
+{
+    if(i == defaults::initial)    return m_interval_initial;
+    if(i == defaults::final)      return m_interval_final;
+    assert(false);
+    throw implementation_error("bad chain type");
+}
+
+long
+ChainParameters::GetNDiscard(long i) const
+{
+    if(i == defaults::initial)    return m_nDiscard_initial;
+    if(i == defaults::final)      return m_nDiscard_final;
+    assert(false);
+    throw implementation_error("bad chain type");
+}
+
+//------------------------------------------------------------------------------------
+
+bool ChainParameters::ValidTemperatures() const
+{
+    if(m_temperatures.empty())
+    {
+        return false;
+    }
+    if(m_temperatures[0] != 1.0)
+    {
+        return false;
+    }
+    DoubleVec1d::const_iterator iter;
+    for(iter = m_temperatures.begin(); iter != m_temperatures.end(); iter++)
+    {
+        if(*iter < 1.0)
+        {
+            return false;
+        }
+        DoubleVec1d::const_iterator nextIter = iter+1;
+        if(nextIter != m_temperatures.end())
+        {
+            if(*iter >= *nextIter)
+            {
+                return false;
+            }
+        }
+    }
+    return true;
+}
+
+//------------------------------------------------------------------------------------
+
+long ChainParameters::GetNAllChains() const
+{
+    return m_nChains_initial + m_nChains_final;
+} // GetNAllChains
+
+//------------------------------------------------------------------------------------
+
+bool ChainParameters::RunProfileReps() const
+{
+    return ((IsBayesian()) ? false : m_runprofilereps);
+} // RunProfileReps
+
+//------------------------------------------------------------------------------------
+
+long ChainParameters::GetNProfileReps(long nparams) const
+{
+    long nreps(0);
+
+    if (RunProfileReps())
+    {
+        // for each parameter, 1 replicate at 5% and 1 at 95%.
+        nreps = 2 * nparams;
+    }
+
+    return nreps;
+
+} // GetNProfileReps
+
+//------------------------------------------------------------------------------------
+
+long ChainParameters::GetNRepsAndNProfileReps(long nparams) const
+{
+    return GetNReps() + GetNProfileReps(nparams);
+} // GetNRepsAndNProfileReps
+
+//____________________________________________________________________________________
diff --git a/src/control/chainparam.h b/src/control/chainparam.h
new file mode 100644
index 0000000..b8edb6a
--- /dev/null
+++ b/src/control/chainparam.h
@@ -0,0 +1,133 @@
+// $Id: chainparam.h,v 1.35 2012/06/30 01:32:39 bobgian Exp $
+
+/*
+  Copyright 2002 Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+/********************************************************************
+ ChainParameters is a collection class used for internal communication
+ throughout Lamarc.
+
+ ChainParameters contains information needed to run chains, including
+ managing the different arrangers.
+
+ Written by Jim Sloan, revised by Mary Kuhner
+
+ reworked by Elizabeth Walkup 2004/08/06 to mesh with
+ front-end/back-end separation
+********************************************************************/
+
+#ifndef CHAINPARAMETERS_H
+#define CHAINPARAMETERS_H
+
+#include <vector>
+#include <stdlib.h>
+
+#include "vectorx.h"
+#include "constants.h"
+#include "arrangervec.h"
+
+class Arranger;
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+class ChainParameters
+{
+  private:
+    ArrangerVec  m_arrangers;    // we own this
+    DoubleVec1d  m_temperatures; // sorted in ascending order
+    long         m_tempinterval; // how often to swap between heated chains
+    bool         m_tempadapt;    // adpative change of the temperatures
+    long         m_nChains_initial;
+    long         m_nSamples_initial;
+    long         m_interval_initial;
+    long         m_nDiscard_initial;
+    long         m_nChains_final;
+    long         m_nSamples_final;
+    long         m_interval_final;
+    long         m_nDiscard_final;
+    long         m_nreps;
+    bool         m_bayesian;     // are we doing Bayesian or frequentist?
+    bool         m_runprofilereps; // if true run additional replicates
+    // to improve generation of
+    // profile-likelihood boundaries
+    // ignored if the run is bayesian
+
+    bool ValidTemperatures() const;
+
+    ChainParameters();                                      // undefined
+    ChainParameters(const ChainParameters& src);            // undefined
+    ChainParameters& operator=(const ChainParameters& src); // undefined
+
+  protected:
+    void SortAndNormalizeTemperatures();
+
+  public:
+    ChainParameters(
+        DoubleVec1d temperatures,
+        long        tempInterval,
+        bool        tempAdapt,
+        long        nChains_initial,
+        long        nSamples_initial,
+        long        interval_initial,
+        long        nDiscard_initial,
+        long        nChains_final,
+        long        nSamples_final,
+        long        interval_final,
+        long        nDiscard_final,
+        long        nreps,
+        bool        bayesian,
+        double      dropTiming,
+        double      sizeTiming,
+        double      hapTiming,
+        double      probhapTiming,
+        double      bayesTiming,
+        double      locusTiming,
+        double      zilchTiming,
+        double      stairTiming,
+        double      epochsizeTiming
+        );
+    ~ChainParameters();
+
+    // Get Functions
+    double GetArrangerTiming(const string& atype) const;
+    ArrangerVec CloneArrangers() const;
+    StringVec1d GetAllStringsForActiveArrangers() const;
+
+    StringVec1d ToXML(unsigned long nspaces) const;
+
+    long GetTempInterval()             const { return m_tempinterval; };
+    bool GetTempAdapt()                const { return m_tempadapt; };
+    double GetTemperature(long i)      const { return m_temperatures[i]; };
+    DoubleVec1d GetAllTemperatures()   const { return m_temperatures; };
+    long GetNChains(long i)            const;
+    long GetNSamples(long i)           const;
+    long GetInterval(long i)           const;
+    long GetNDiscard(long i)           const;
+    long GetNReps()                    const { return m_nreps; };
+    bool IsBayesian()                  const { return m_bayesian; };
+    // getter used by CollectionManger ctor
+    long GetNAllChains()               const;
+    bool RunProfileReps()              const;
+    long GetNProfileReps(long nparams) const;
+    long GetNRepsAndNProfileReps(long nparams)    const;
+
+    // Validation
+    bool IsValidChain()                const;
+
+};
+
+// This free function throws an exception of type data_error
+// and containing the specified message.  It's meant only as
+// a convenience.
+
+void DoError(const string& what);
+
+#endif // CHAINPARAMETERS_H
+
+//____________________________________________________________________________________
diff --git a/src/control/constants.cpp b/src/control/constants.cpp
new file mode 100644
index 0000000..877794a
--- /dev/null
+++ b/src/control/constants.cpp
@@ -0,0 +1,112 @@
+// $Id: constants.cpp,v 1.28 2013/10/25 17:00:52 mkkuhner Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <string>
+#include "constants.h"
+
+const int lamarccodes::cleanReturn      = 0;
+const int lamarccodes::badAllocation    = 1;
+const int lamarccodes::fileError        = 2;
+const int lamarccodes::optionError      = 4;
+const int lamarccodes::denovoCompileError = 8;
+const int lamarccodes::unknownError     = 16;
+
+const std::string lamarcstrings::COAL = "coalesce";
+const std::string lamarcstrings::MIG = "migrate";
+const std::string lamarcstrings::DISEASE = "disease";
+const std::string lamarcstrings::REC = "recombine";
+const std::string lamarcstrings::GROW = "grow";
+const std::string lamarcstrings::REGION_GAMMA = "gamma over regions";
+const std::string lamarcstrings::INVALID = "invalid";
+const std::string lamarcstrings::EXPGROWSTICK = "stick grow exponential";
+const std::string lamarcstrings::LOGISTICSELECTION = "logistic selection";
+const std::string lamarcstrings::LOGSELECTSTICK = "stick logistic selection";
+const std::string lamarcstrings::DIVERGENCE = "divergence";
+const std::string lamarcstrings::DIVMIG = "divmigrate";
+
+const std::string lamarcstrings::STICK = "stick";
+const std::string lamarcstrings::TIP = "tip";
+const std::string lamarcstrings::BASE = "base";
+
+const std::string lamarcstrings::SNP = "SNP";
+const std::string lamarcstrings::DNA = "DNA";
+const std::string lamarcstrings::NUC = "NUC";
+const std::string lamarcstrings::MICROSAT = "MICROSAT";
+
+const std::string lamarcstrings::PANEL = "Panel";
+const std::string lamarcstrings::STUDY = "Study";
+const std::string lamarcstrings::EMPTY = "";
+
+const std::string lamarcstrings::F84 = "F84";
+const std::string lamarcstrings::GTR = "GTR";
+const std::string lamarcstrings::STEPWISE = "Stepwise";
+const std::string lamarcstrings::BROWNIAN = "Brownian";
+const std::string lamarcstrings::KALLELE = "KAllele";
+const std::string lamarcstrings::MIXEDKS = "MixedKS";
+
+const std::string lamarcstrings::ELECTRO = "ELECTRO";
+
+const std::string lamarcstrings::longNameUSER = "USER";
+const std::string lamarcstrings::longNamePROGRAMDEFAULT = "PROGRAMDEFAULT";
+const std::string lamarcstrings::longNameFST = "FST";
+const std::string lamarcstrings::longNameWATTERSON = "WATTERSON";
+
+const std::string lamarcstrings::shortNameUSER = "USR";
+const std::string lamarcstrings::shortNamePROGRAMDEFAULT = "DEF";
+const std::string lamarcstrings::shortNameFST = "FST";
+const std::string lamarcstrings::shortNameWATTERSON = "WAT";
+
+const std::string lamarcstrings::longBrownianName = "Brownian";
+const std::string lamarcstrings::longF84Name = "Felsenstein '84";
+const std::string lamarcstrings::longGTRName = "General Time Reversible";
+const std::string lamarcstrings::longKAlleleName = "K Allele";
+const std::string lamarcstrings::longStepwiseName = "Stepwise";
+const std::string lamarcstrings::longMixedKSName = "Mixed KAllele-Stepwise";
+
+const std::string lamarcstrings::shortBrownianName = "Brownian";
+const std::string lamarcstrings::shortF84Name = "F84";
+const std::string lamarcstrings::shortGTRName = "GTR";
+const std::string lamarcstrings::shortKAlleleName = "KAllele";
+const std::string lamarcstrings::shortStepwiseName = "Stepwise";
+const std::string lamarcstrings::shortMixedKSName = "MixedKS";
+
+const std::string lamarcstrings::shortCurveName =   "CURVE";
+
+// ignoring case, lamarcstrings::shortStickExpName must be the same as
+// xmlstr::XML_ATTRVALUE_STICK for use in
+// stringx.cpp::StringMatchesGrowthType() used by
+// stringx.cpp::ProduceGrowthTypeOrBarf()
+const std::string lamarcstrings::shortStickExpName = "STICK-EXP";
+const std::string lamarcstrings::shortStickName = "STICK";
+const std::string lamarcstrings::longCurveName  =   "Analytical (Curve) Growth Approx.";
+const std::string lamarcstrings::longStickExpName  = "Constant (Stick) Exponential Growth Approx.";
+const std::string lamarcstrings::longStickName  = "Linear (Stick) Growth Approx.";
+
+const std::string lamarcstrings::longExpName = "Exponential";
+const std::string lamarcstrings::shortExpName = "Exponential";
+const std::string lamarcstrings::longStairStepName = "Stair Step";
+const std::string lamarcstrings::shortStairStepName = "StairStep";
+
+const std::string lamarcstrings::longDSelectionName = "Log-Deterministic Selection";
+const std::string lamarcstrings::longSSelectionName = "Stochastic (Stick) Selection";
+const std::string lamarcstrings::shortDSelectionName = "Deterministic Selection";
+const std::string lamarcstrings::shortSSelectionName = "Stochastic Selection";
+
+//____________________________________________________________________________________
+
+bool IsMigrationLike(force_type f)
+{
+  return (f==force_MIG || f==force_DIVMIG);
+}
+
+bool IsLocalPartForce(force_type f)
+{ 
+   return (f==force_DISEASE); 
+}
diff --git a/src/control/constants.h b/src/control/constants.h
new file mode 100644
index 0000000..309cd55
--- /dev/null
+++ b/src/control/constants.h
@@ -0,0 +1,287 @@
+// $Id: constants.h,v 1.108 2013/10/25 17:00:52 mkkuhner Exp $
+
+/*
+  Copyright 2002 Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef CONSTANTS_H
+#define CONSTANTS_H
+
+#include <string>
+
+// Defines various symbols used to control debugging of experimental code blocks.
+#include "local_build.h"
+
+#include "definitions.h"
+
+using std::string;
+
+/***************************************************************
+  This file contains constants which control the behavior of
+the program.  They are divided into sections:
+
+(1)  Constants which the user may wish to change, in order
+     to adapt the program to his/her needs
+(2)  Debugging constants which the user should probably not
+     change unless sure of his/her reasons
+(3)  Internal constants which should not be changed at all
+(4)  The tag library (relating tags to literals) which should
+     not be changed except to translate the program to a
+     different language
+
+If you change anything in this file you must 'make clean' the
+entire project.  Unix Make utilities may not think you need to
+go so far; but they lie.
+
+****************************************************************/
+
+class Registry;
+
+extern Registry registry;
+
+//------------------------------------------------------------------------------------
+//  User-changable constants
+//------------------------------------------------------------------------------------
+
+// Only for Metrowerks compiles, see Makefile for other platforms
+// If you do not want to use the menu at all, set this constant
+// to 0.  The program will then read from 'infile' and
+// write to 'outfile' and no menu will be displayed.  Be sure
+// that all necessary information is present in 'infile' if
+// you use this option.
+#ifdef __MWERKS__
+#define MENU 1
+#endif
+
+// Enumerations
+enum verbosity_type {CONCISE, NORMAL, VERBOSE, NONE};
+enum paramlistcondition { paramlist_YES, paramlist_MIX, paramlist_NO };
+enum likelihoodtype { ltype_ssingle, ltype_replicate, ltype_region,
+                      ltype_gammaregion };
+enum proftype { profile_PERCENTILE, profile_FIX, profile_NONE };
+enum noval {noval_none}; //used when I need a null instantiation for a template
+
+// if you edit model_type, also edit numPossibleDataModels and allDataModels()
+// in defaults.cpp
+enum model_type {F84,Brownian,Stepwise,KAllele,GTR,MixedKS};
+enum method_type {method_PROGRAMDEFAULT, method_USER, method_FST, method_WATTERSON};
+enum priortype {LINEAR, LOGARITHMIC};
+enum pstatus { pstat_invalid, pstat_unconstrained, pstat_constant, pstat_identical, pstat_identical_head,
+               pstat_multiplicative, pstat_multiplicative_head };
+/* Invalid--nonexistant parameter such as diagonal migration value
+   Unconstrained--ordinary parameter
+   Constant--constrained to constant value
+   Identical_head--first member of group constrained to be identical (in ParamVec order)
+   Identical--any other member of group constrained to be identical
+   Multiplicative_head--first member of multiplicative group
+   Multiplicative--any other member of multiplicative group
+   Epochtime--boundary time of an epoch (handled specially because can't be Bayes-arranged)
+*/
+/* Deleted (for now?) pstatus values:  valid, symmetricNm, symmetricM
+   (use 'mean' instead of symmetricM, 'standard' instead of valid, and don't
+   use symmetricNm unless we allow our general migration model to vary based
+   on population size.
+*/
+
+enum growth_type {growth_CURVE, growth_STICK, growth_STICKEXP};
+enum growth_scheme {growth_EXP, growth_STAIRSTEP};
+enum selection_type {selection_DETERMINISTIC, selection_STOCHASTIC};
+
+// for marking study samples vs panel sources
+enum data_source {dsource_study, dsource_panel };
+
+enum force_type { force_COAL, force_MIG, force_DISEASE, force_REC,
+                  force_GROW, force_REGION_GAMMA, force_EXPGROWSTICK,
+                  force_LOGISTICSELECTION, force_LOGSELECTSTICK, force_DIVERGENCE,
+                  force_DIVMIG, force_NONE };
+
+// Query functions for force_type
+bool IsMigrationLike(force_type f);
+bool IsLocalPartForce(force_type f); 
+
+//---------------------------------------------------------------
+// Debugging constants
+// (you had better know what you are doing before changing any of
+// these)
+//---------------------------------------------------------------
+
+// When STATIONARIES is defined, the program will run without use of
+// data, producing a report of the stationary distribution of the sampler
+// This is a useful debugging tool and can also be used to obtain
+// stationaries of otherwise difficult distributions via Monte Carlo.
+// NEVER turn this on if you mean to analyze your data--it will
+// cause the data to be totally ignored!
+#ifdef STATIONARIES
+const string INTERVALFILE = "interval.out";
+const string MIGFILE = "migcount";
+const string DISFILE = "discount";
+const string RECFILE = "reccount";
+const string SELECTFILE = "selectstuff";
+const string EPOCHFILE = "epochtimes";
+#endif // STATIONARIES
+
+#ifdef LAMARC_QA_SINGLE_DENOVOS
+// When LAMARC_QA_SINGLE_DENOVOS is defined, the program generates
+// a slew of denovo trees and calculates the likely parameter
+// values of each separately. See lamarc/config/local_build.h
+// for more info
+const std::string SINGLE_DENOVO_INFO = "denovo_info.txt";
+const std::string SINGLE_DENOVO_FILE = "denovo_params.txt";
+#endif // LAMARC_QA_SINGLE_DENOVOS
+
+// When true, track data likelihoods into file 'like1'
+#define LIKETRACK 0
+
+//---------------------------------------------------------------
+//  Internal program constants
+//  (you had better know *exactly* what you are doing if you
+//  change any of these--the program may not survive)
+//---------------------------------------------------------------
+
+const double MAX_LENGTH = 200.0;     // maximum branch length
+                                     // this must be less than
+                                     // DBL_MAX because of use in
+                                     // Active/InactiveStairStickCoal
+                                     // events.
+
+const long BASES = 4;                // number of nucleotides
+const long baseA = 0;                // codes for nucleotides
+const long baseC = 1;
+const long baseG = 2;
+const long baseT = 3;
+const long baseEnd = 4;              // allows one-past-the-end use
+
+const int  INVARIANTS   = 4;         // possible invariant sites
+const std::string SINGLEBASES[INVARIANTS] = {"A","C","G","T"};
+
+const long markerCell     = 0;       // indicates a marker dlcell
+const long invariantCell  = 1;       // indicates a invariant dlcell
+
+const long FLAGLONG     = -99;       // arbitrary flag values
+const double FLAGDOUBLE = -99.0;
+const long FLAGINVAR    = -999;
+
+const double DF         = 2.0;       // degrees of freedom for
+                                     // likelihood ratio test
+const long NCHAINTYPES = 2;          // how many kinds of chains?
+const long IDSIZE = 2;               // size of branch ID number
+const long NELEM = 2;                // allowed parents or children
+                                     // of a branch
+
+const long XML_RANDOM_NAME_LENGTH   = 100000;
+
+// The following should be provided by the compiler, but
+// we have found this to be unportable, so we define them
+// ourselves.
+
+const double NEG_MAX = -999999999.9; // the smallest reasonable double
+
+//------------------------------------------------------------------------------------
+// Registry of constant string tags
+// (you might wish to change these if changing language)
+//------------------------------------------------------------------------------------
+
+const unsigned long INDENT_DEPTH = 2;  // output of input xml indentation depth
+
+// The precision of the numbers written to the summary output file.
+// NOTE:  When writing tree summaries with growth, the precision *must*
+// be at least as large as the minimum timestep allowable.
+const int SUMFILE_PRECISION = 18;
+
+class lamarccodes
+{
+  public:
+    static const int cleanReturn;
+    static const int badAllocation;
+    static const int fileError;
+    static const int optionError;
+    static const int denovoCompileError;
+    static const int unknownError;
+};
+
+class lamarcstrings
+{
+  public:
+    static const std::string COAL       ;
+    static const std::string MIG        ;
+    static const std::string DISEASE    ;
+    static const std::string REC        ;
+    static const std::string GROW       ;
+    static const std::string LOGISTICSELECTION;
+    static const std::string DIVERGENCE ;
+    static const std::string DIVMIG     ;
+
+    static const std::string REGION_GAMMA;
+    static const std::string INVALID    ;
+    static const std::string STICK      ;
+    static const std::string TIP        ;
+    static const std::string BASE       ;
+    static const std::string SNP        ;
+    static const std::string DNA        ;
+    static const std::string NUC        ;
+    static const std::string MICROSAT   ;
+    static const std::string EXPGROWSTICK;
+    static const std::string LOGSELECTSTICK;
+
+    static const std::string PANEL;
+    static const std::string STUDY;
+    static const std::string EMPTY;
+
+    static const std::string F84;
+    static const std::string GTR;
+    static const std::string STEPWISE;
+    static const std::string BROWNIAN;
+    static const std::string KALLELE;
+    static const std::string MIXEDKS;
+
+    static const std::string ELECTRO    ;
+
+    static const std::string longNameUSER;
+    static const std::string longNamePROGRAMDEFAULT;
+    static const std::string longNameFST;
+    static const std::string longNameWATTERSON;
+
+    static const std::string shortNameUSER;
+    static const std::string shortNamePROGRAMDEFAULT;
+    static const std::string shortNameFST;
+    static const std::string shortNameWATTERSON;
+
+    static const std::string longBrownianName;
+    static const std::string longF84Name;
+    static const std::string longGTRName;
+    static const std::string longKAlleleName;
+    static const std::string longStepwiseName;
+    static const std::string longMixedKSName;
+
+    static const std::string shortBrownianName;
+    static const std::string shortF84Name;
+    static const std::string shortGTRName;
+    static const std::string shortKAlleleName;
+    static const std::string shortStepwiseName;
+    static const std::string shortMixedKSName;
+
+    static const std::string longCurveName;
+    static const std::string longStickExpName;
+    static const std::string longStickName;
+    static const std::string shortCurveName;
+    static const std::string shortStickExpName;
+    static const std::string shortStickName;
+
+    static const std::string longExpName;
+    static const std::string longStairStepName;
+    static const std::string shortExpName;
+    static const std::string shortStairStepName;
+
+    static const std::string longDSelectionName;
+    static const std::string longSSelectionName;
+    static const std::string shortDSelectionName;
+    static const std::string shortSSelectionName;
+};
+
+#endif // CONSTANTS_H
+
+//____________________________________________________________________________________
diff --git a/src/control/defaults.cpp b/src/control/defaults.cpp
new file mode 100644
index 0000000..0569f83
--- /dev/null
+++ b/src/control/defaults.cpp
@@ -0,0 +1,368 @@
+// $Id: defaults.cpp,v 1.72 2012/06/30 01:32:39 bobgian Exp $
+
+#include <string>
+#include "local_build.h"
+#include "defaults.h"
+
+using std::string;
+
+const bool defaults::convert_output_to_eliminate_zeroes = true;
+
+// minimal mutation rate -- used in XpartitionEvent::ThrowIfPopSizeTiny
+const double defaults::minMuRate          = 1e-10;
+
+// parameters -- default to these when resonable value not provided or
+// calculable
+const double defaults::theta              = 0.01;
+const double defaults::migration          = 100.0;
+const double defaults::divMigration       = 100.0;
+const double defaults::disease            = 1.0;
+const double defaults::recombinationRate  = 0.01;
+const double defaults::growth             = 1.0;
+const double defaults::gammaOverRegions   = 1.0; // = exponential dist.
+const double defaults::logisticSelection  = 1.0/defaults::theta;
+const double defaults::epochtime          = defaults::theta;
+
+// disease location
+const long defaults::diseaseLocation    = 1L;
+
+// growth approximation type
+const growth_type defaults::growType    = growth_CURVE;
+
+// selection approximation type
+const selection_type defaults::selectionType = selection_DETERMINISTIC;
+
+// maximum events
+const long defaults::coalEvents           = 100000;
+const long defaults::migEvents            = 10000;
+const long defaults::diseaseEvents        = 1000;
+const long defaults::recEvents            = 1000;
+const long defaults::growEvents           = 0; // no such "event" possible
+const long defaults::lselectEvents        = 0; // no such "event" possible
+const long defaults::epochEvents          = 1000;  // might be a bit small!
+
+// methods
+const method_type defaults::thetaMethod        = method_PROGRAMDEFAULT;
+const method_type defaults::migMethod          = method_PROGRAMDEFAULT;
+const method_type defaults::divMigMethod       = method_PROGRAMDEFAULT;
+const method_type defaults::diseaseMethod      = method_PROGRAMDEFAULT;
+const method_type defaults::recMethod          = method_PROGRAMDEFAULT;
+const method_type defaults::growMethod         = method_PROGRAMDEFAULT;
+const method_type defaults::lselectMethod      = method_PROGRAMDEFAULT;
+const method_type defaults::divMethod          = method_PROGRAMDEFAULT;
+
+// default boundaries for bayesian prior
+const double defaults::lowerboundTheta    = 0.00001;
+const double defaults::upperboundTheta    = 10.0;
+const double defaults::lowerboundMig      = 0.01;
+const double defaults::upperboundMig      = 1000.0;
+const double defaults::lowerboundDivMig   = 0.01;
+const double defaults::upperboundDivMig   = 1000.0;
+const double defaults::lowerboundDisease  = 0.0001;
+const double defaults::upperboundDisease  = 1000.0;
+const double defaults::lowerboundRec      = 0.00001;
+const double defaults::upperboundRec      = 10.0;
+const double defaults::lowerboundGrowth   = -500.0;
+const double defaults::upperboundGrowth   = 1000.0;
+const double defaults::lowerboundLSelect  = -1000.0;
+const double defaults::upperboundLSelect  = 5000.0;
+const double defaults::lowerboundEpoch    = 0.0001;
+const double defaults::upperboundEpoch    = 10.0;
+
+#ifdef LAMARC_NEW_FEATURE_RELATIVE_SAMPLING
+const long   defaults::samplingRate       = 1;
+#endif
+
+// default prior types
+const priortype defaults::priortypeTheta   = LOGARITHMIC;
+const priortype defaults::priortypeMig     = LOGARITHMIC;
+const priortype defaults::priortypeDivMig  = LOGARITHMIC;
+const priortype defaults::priortypeDisease = LOGARITHMIC;
+const priortype defaults::priortypeRec     = LOGARITHMIC;
+const priortype defaults::priortypeGrowth  = LINEAR;
+const priortype defaults::priortypeLSelect = LINEAR;
+const priortype defaults::priortypeEpoch   = LINEAR;
+
+// Min/Max allowable values for bayesian priors.  Used both in
+//  prior_interface.cpp and ui_vars_prior.cpp
+const double defaults::minboundTheta       = 1e-10;
+const double defaults::maxboundTheta       = 100;
+const double defaults::minboundMig         = 1e-10;
+const double defaults::maxboundMig         = 10000;
+const double defaults::minboundDivMig      = 1e-10;
+const double defaults::maxboundDivMig      = 10000;
+const double defaults::minboundDisease     = 1e-10;
+const double defaults::maxboundDisease     = 10000;
+const double defaults::minboundRec         = 1e-10;
+const double defaults::maxboundRec         = 100;
+const double defaults::minboundGrowth      = -5000;
+const double defaults::maxboundGrowth      = 15000;
+const double defaults::minboundLSelect     = -5000;
+const double defaults::maxboundLSelect     = 15000;
+const double defaults::minboundEpoch       = 1e-10;
+const double defaults::maxboundEpoch       = 1000;
+
+// maximum allowable values for initial parameter estimates
+// for any chain.
+const double defaults::maxTheta           = 100.0;
+const double defaults::minTheta           =   1e-10;
+const double defaults::maxMigRate         = 10000.0;
+const double defaults::minMigRate         =     0.0;
+const double defaults::maxDivMigRate      = 10000.0;
+const double defaults::minDivMigRate      =     0.0;
+const double defaults::maxDiseaseRate     = 10000.0;
+const double defaults::minDiseaseRate     =     0.0;
+const double defaults::maxRecRate         = 100.0;
+const double defaults::minRecRate         =   0.0;
+const double defaults::maxGrowRate        = 15000.0;
+const double defaults::minGrowRate        = -5000.0;
+const double defaults::maxLSelectCoeff    = 15000.0;
+const double defaults::minLSelectCoeff    = -5000.0;
+const double defaults::maxGammaOverRegions = 100.0;
+const double defaults::minGammaOverRegions =   1e-4;
+const double defaults::maxEpoch           = 1000.0;
+const double defaults::minEpoch           =   1e-10;
+
+// maximum allowable values during parameter estimation (maximization)
+const double defaults::maximization_maxTheta           = 1500.0;
+const double defaults::maximization_minTheta           =   1e-12;
+const double defaults::maximization_maxMigRate         = 10000.0;
+const double defaults::maximization_minMigRate         = 0.0;
+const double defaults::maximization_maxDiseaseRate     = 10000.0;
+const double defaults::maximization_minDiseaseRate     = 0.0;
+const double defaults::maximization_maxRecRate         = 500.0;
+const double defaults::maximization_minRecRate         = 0.0;
+const double defaults::maximization_maxGrowRate        = 25000.0;
+const double defaults::maximization_minGrowRate        = -5000.0;
+const double defaults::maximization_maxLSelectCoeff    = 25000.0;
+const double defaults::maximization_minLSelectCoeff    = -5000.0;
+const double defaults::maximization_maxGammaOverRegions = 100.0;
+const double defaults::maximization_minGammaOverRegions =   1e-4;
+const double defaults::maximization_maxEpoch           = 1500.0;
+const double defaults::maximization_minEpoch           =   1e-12;
+
+// arrangers
+const double defaults::dropArrangerTiming         = 1.0;
+const double defaults::sizeArrangerTiming         = 0.2;
+const double defaults::haplotypeArrangerTiming    = 0.0;
+const double defaults::probhapArrangerTiming      = 0.2;
+const double defaults::bayesianArrangerTiming     = 0.0;
+const bool   defaults::useBayesianAnalysis        = false;
+
+// temperatures
+const long defaults::temperatureInterval                  = 10;
+const bool defaults::useAdaptiveTemperatures              = false;
+const double defaults::minTemperature                     = 1.0;
+const double defaults::secondTemperature                  = 1.1;
+
+// chains
+const long defaults::initial              = 0;
+const long defaults::initNChains          = 10;
+const long defaults::initNSamples         = 500;
+const long defaults::initInterval         = 20;
+const long defaults::initDiscard          = 1000;
+
+const long defaults::final                = 1;
+const long defaults::finalNChains         = 2;
+const long defaults::finalNSamples        = 10000;
+const long defaults::finalInterval        = 20;
+const long defaults::finalDiscard         = 1000;
+
+// replicates
+const long defaults::replicates           = 1;
+const long defaults::geyeriters           = 100000;
+
+//Max num heated chains.
+const long defaults::maxNumHeatedChains   = 30;
+
+// error conditions
+const long defaults::tooManyDenovo        = 20;
+
+// default user parameters for profiling
+const bool defaults::doProfile = true;
+const proftype defaults::profileType = profile_PERCENTILE;
+
+// default group parameter status
+const pstatus defaults::groupPstat = pstat_identical;
+
+//Key parameters for profiling estimation algorithms.
+const double defaults::highvalTheta     = 10;
+const double defaults::lowvalTheta      = .0001;
+const double defaults::highmultTheta    = 10;
+const double defaults::lowmultTheta     = .1;
+
+const double defaults::highvalMig       = 1000;
+const double defaults::lowvalMig        = 1;
+const double defaults::highmultMig      = 10;
+const double defaults::lowmultMig       = .1;
+
+const double defaults::highvalDisease   = 100;
+const double defaults::lowvalDisease    = 1;
+const double defaults::highmultDisease  = 10;
+const double defaults::lowmultDisease   = .1;
+
+const double defaults::highvalRec       = .1;
+const double defaults::lowvalRec        = .0001;
+const double defaults::highmultRec      = 5;
+const double defaults::lowmultRec       = .1;
+
+const double defaults::highvalGrowth    = 1000;
+const double defaults::lowvalGrowth     = -10;
+const double defaults::highmultGrowth   = 10;
+const double defaults::lowmultGrowth    = 10;
+
+const double defaults::highvalLSelect   = 1000;
+const double defaults::lowvalLSelect    = -100;
+const double defaults::highmultLSelect  = 10;
+const double defaults::lowmultLSelect   = 10;
+
+const double defaults::highvalGammaOverRegions  = 100;
+const double defaults::lowvalGammaOverRegions   = .0001;
+const double defaults::highmultGammaOverRegions = 1;
+const double defaults::lowmultGammaOverRegions  = .1;
+
+const double defaults::highvalEpoch     = 100;
+const double defaults::lowvalEpoch      = .0001;
+const double defaults::highmultEpoch    = 10;
+const double defaults::lowmultEpoch     = .1;
+
+// defaults for regions
+const double defaults::effpopsize       = 1.0;
+
+// defaults for datamodels
+const long defaults::nucleotideBins     = 4;
+
+// default number of bins for allelic and microsat stepwise models
+const long defaults::bins               = 100;
+
+// default number of bins for brownian model
+const long defaults::brownianBins       = 3; //mean, variance, cumulative total
+
+// default maximum separation (functionally) allowable between any two alleles
+const long defaults::threshhold           = 20;
+
+//Default external ranges for the model to explore.  In the stepwise
+// model, we want a wider range than the mixedks model, since in the
+// former we can allow some wandering at the edges, but in the latter,
+// you don't want to spend a lot of time bouncing around in unused space.
+//
+// These may eventually be settable in the UI, but we need to find a clear
+// way to explain to the user what it is they're setting if so.
+const long defaults::step_allowance       = 5;
+const long defaults::mixedks_allowance    = 1;
+
+const string defaults::startMethod        = "user";
+
+// default values for userparams
+
+const string defaults::curvefileprefix      = "curvefile";
+const string defaults::mapfileprefix        = "mapfile";
+const string defaults::reclocfileprefix     = "reclocfile";
+const string defaults::tracefileprefix      = "tracefile";
+const string defaults::newicktreefileprefix = "newick";
+#ifdef LAMARC_QA_TREE_DUMP
+const string defaults::argfileprefix        = "ARGs";
+#endif
+const string defaults::datafilename         = "infile.xml";
+const string defaults::resultsfilename      = "outfile.txt";
+const string defaults::treesuminfilename    = "insumfile.xml";
+const string defaults::treesumoutfilename   = "outsumfile.xml";
+const string defaults::xmloutfilename       = "menusettings_infile.xml";
+const string defaults::xmlreportfilename    = "report.xml";
+const string defaults::profileprefix        = "profile";
+
+const verbosity_type defaults::verbosity    = NORMAL;
+const verbosity_type defaults::progress     = NORMAL;
+
+const time_t defaults::programstarttime     = -1;
+const long   defaults::randomseed           = -1L;      // bogus value
+const bool   defaults::hasoldrandomseed     = false;
+
+const bool defaults::plotpost               = false;
+const bool defaults::readsumfile            = false;
+const bool defaults::useoldrandomseed       = true;
+const bool defaults::usesystemclock         = true;
+const bool defaults::writecurvefiles        = true;
+const bool defaults::writereclocfiles       = false;
+const bool defaults::writetracefiles        = true;
+const bool defaults::writenewicktreefiles   = false;
+#ifdef LAMARC_QA_TREE_DUMP
+const bool defaults::writeargfiles          = false;
+const bool defaults::writemanyargs          = false;
+#endif //  LAMARC_QA_TREE_DUMP
+const bool defaults::writesumfile           = false;
+
+// default values for data models
+const double            defaults::minLegalFrequency     = 0.00001;
+
+const ModelTypeVec1d    defaults::allDataModels()
+{
+    static ModelTypeVec1d possibleModels;
+    //Models for DNA/SNP data:
+    possibleModels.push_back(F84);
+    possibleModels.push_back(GTR);
+    //Models for microsat/eletrophoretic/phenotype data:
+    possibleModels.push_back(KAllele);
+    //Models for microsat-only data:
+    possibleModels.push_back(Stepwise);
+    possibleModels.push_back(MixedKS);
+    possibleModels.push_back(Brownian);
+    return possibleModels;
+}
+
+const model_type    defaults::dataModelType     = F84;
+const bool          defaults::doNormalize       = false;
+const double        defaults::autoCorrelation   = 1.0;
+const double        defaults::ttratio           = 2.0;
+const bool          defaults::calcFreqsFromData = true;
+const double        defaults::categoryProbability = 1.0;
+
+const DoubleVec1d   defaults::categoryProbabilities()
+{
+    static const DoubleVec1d catProbs(1,defaults::categoryProbability);
+    return catProbs;
+}
+
+const double        defaults::categoryRate      = 1.0;
+const double        defaults::categoryRateMultiple = 2.0;
+
+const DoubleVec1d   defaults::categoryRates()
+{
+    static const DoubleVec1d catRates(1,defaults::categoryRate);
+    return catRates;
+}
+
+const DoubleVec1d   defaults::chainTemperatures()
+{
+    static const DoubleVec1d chainTemps(1,defaults::minTemperature);
+    return chainTemps;
+}
+
+const double        defaults::baseFrequencyA    = 0.25;
+const double        defaults::baseFrequencyC    = 0.25;
+const double        defaults::baseFrequencyG    = 0.25;
+const double        defaults::baseFrequencyT    = 0.25;
+const double        defaults::gtrRateAC         = 0.1;
+const double        defaults::gtrRateAG         = 0.1;
+const double        defaults::gtrRateAT         = 0.1;
+const double        defaults::gtrRateCG         = 0.1;
+const double        defaults::gtrRateCT         = 0.1;
+const double        defaults::gtrRateGT         = 0.1;
+const double        defaults::relativeMuRate    = 1.0;
+const double        defaults::KS_alpha          = 0.5;
+const bool          defaults::optimize_KS_alpha = false;
+const long          defaults::numCategories     = 1;
+
+const long          defaults::maxNumCategories  = 10;
+
+const double        defaults::per_base_error_rate= 0.0;
+
+const double        defaults::perThetaChange = 0.01;
+
+const long          defaults::numSemiUniqueBranches = 1000000;
+
+#ifdef LAMARC_QA_SINGLE_DENOVOS
+const long          defaults::numDenovos = 500000;
+#endif // LAMARC_QA_SINGLE_DENOVOS
+
+//____________________________________________________________________________________
diff --git a/src/control/defaults.h b/src/control/defaults.h
new file mode 100644
index 0000000..9aa038c
--- /dev/null
+++ b/src/control/defaults.h
@@ -0,0 +1,355 @@
+// $Id: defaults.h,v 1.63 2012/06/30 01:32:39 bobgian Exp $
+
+#ifndef DEFAULTS_H
+#define DEFAULTS_H
+
+#include <string>
+#include "local_build.h"
+#include "constants.h"
+#include "vectorx.h"
+
+using std::string;
+
+class defaults
+{
+    // Default values -- you can change these, but it is easier to
+    // code your defaults into your input data file.
+
+  public:
+
+    // Whether or not the user expects 'site 0' in their output
+    static const bool convert_output_to_eliminate_zeroes;
+
+    // minimal mutation rate
+    static const double minMuRate;
+
+    // parameters
+    static const double theta;
+    static const double migration;
+    static const double divMigration;
+    static const double disease;
+    static const double recombinationRate;
+    static const double growth;
+    static const double logisticSelection;
+    static const double gammaOverRegions; // equals alpha, which equals the
+    // scaled shape parameter of a gamma distribution over regions
+    // of the background mutation rate
+    static const double epochtime;
+
+    // disease location
+    static const long diseaseLocation;
+
+    // growth approximation type
+    static const growth_type growType;
+    // selection approximation type
+    static const selection_type selectionType;
+
+    // maximum events
+    static const long coalEvents;
+    static const long migEvents;
+    static const long divMigEvents;
+    static const long diseaseEvents;
+    static const long recEvents;
+    static const long growEvents;
+    static const long lselectEvents;
+    static const long epochEvents;
+
+    // methods
+    static const method_type thetaMethod;
+    static const method_type migMethod;
+    static const method_type divMigMethod;
+    static const method_type diseaseMethod;
+    static const method_type recMethod;
+    static const method_type growMethod;
+    static const method_type lselectMethod;
+    static const method_type divMethod;
+
+    // Do a bayesian analysis
+    static const bool   useBayesianAnalysis;
+
+    // bayesian prior default boundaries
+    static const double lowerboundTheta;
+    static const double upperboundTheta;
+    static const double lowerboundMig;
+    static const double upperboundMig;
+    static const double lowerboundDivMig;
+    static const double upperboundDivMig;
+    static const double lowerboundDisease;
+    static const double upperboundDisease;
+    static const double lowerboundRec;
+    static const double upperboundRec;
+    static const double lowerboundGrowth;
+    static const double upperboundGrowth;
+    static const double lowerboundLSelect;
+    static const double upperboundLSelect;
+    static const double lowerboundEpoch;
+    static const double upperboundEpoch;
+
+#ifdef LAMARC_NEW_FEATURE_RELATIVE_SAMPLING
+    // default relative sampling rate
+    static const long   samplingRate;
+#endif
+
+    // bayesian prior default types
+    static const priortype priortypeTheta;
+    static const priortype priortypeMig;
+    static const priortype priortypeDivMig;
+    static const priortype priortypeDisease;
+    static const priortype priortypeRec;
+    static const priortype priortypeGrowth;
+    static const priortype priortypeLSelect;
+    static const priortype priortypeEpoch;
+
+    // Min/Max allowable values for bayesian priors.  Used both in
+    //  prior_interface.cpp and ui_vars_prior.cpp
+    static const double minboundTheta;
+    static const double maxboundTheta;
+    static const double minboundMig;
+    static const double maxboundMig;
+    static const double minboundDivMig;
+    static const double maxboundDivMig;
+    static const double minboundDisease;
+    static const double maxboundDisease;
+    static const double minboundRec;
+    static const double maxboundRec;
+    static const double minboundGrowth;
+    static const double maxboundGrowth;
+    static const double minboundLSelect;
+    static const double maxboundLSelect;
+    static const double minboundEpoch;
+    static const double maxboundEpoch;
+
+    // maximum allowable values for initial parameter estimates
+    static const double maxTheta;
+    static const double minTheta;
+    static const double maxMigRate;
+    static const double minMigRate;
+    static const double maxDivMigRate;
+    static const double minDivMigRate;
+    static const double maxDiseaseRate;
+    static const double minDiseaseRate;
+    static const double maxRecRate;
+    static const double minRecRate;
+    static const double maxGrowRate;
+    static const double minGrowRate;
+    static const double maxLSelectCoeff;
+    static const double minLSelectCoeff;
+    static const double minGammaOverRegions;
+    static const double maxGammaOverRegions; // equals alpha, which equals
+    // the shape parameter of a gamma distribution over regions
+    // of the background mutation rate
+    static const double minEpoch;
+    static const double maxEpoch;
+
+    // maximum allowable values during parameter estimation (maximization)
+    static const double maximization_maxTheta;
+    static const double maximization_minTheta;
+    static const double maximization_maxMigRate;
+    static const double maximization_minMigRate;
+    static const double maximization_maxDivMigRate;
+    static const double maximization_minDivMigRate;
+    static const double maximization_maxDiseaseRate;
+    static const double maximization_minDiseaseRate;
+    static const double maximization_maxRecRate;
+    static const double maximization_minRecRate;
+    static const double maximization_maxGrowRate;
+    static const double maximization_minGrowRate;
+    static const double maximization_maxLSelectCoeff;
+    static const double maximization_minLSelectCoeff;
+    static const double maximization_minGammaOverRegions;
+    static const double maximization_maxGammaOverRegions; // equals alpha,
+    // which equals the shape parameter of a gamma distribution over regions
+    // of the background mutation rate
+    static const double maximization_minEpoch;
+    static const double maximization_maxEpoch;
+
+    // arrangers
+    static const double dropArrangerTiming;
+    static const double sizeArrangerTiming;
+    static const double haplotypeArrangerTiming;
+    static const double probhapArrangerTiming;
+    static const double bayesianArrangerTiming;
+
+    // temperature
+    static const long temperatureInterval;
+    static const bool useAdaptiveTemperatures;
+    static const double minTemperature;
+    static const double secondTemperature;
+
+    // chains
+    static const long initial;
+    static const long initNChains;
+    static const long initNSamples;
+    static const long initInterval;
+    static const long initDiscard;
+
+    static const long final;
+    static const long finalNChains;
+    static const long finalNSamples;
+    static const long finalInterval;
+    static const long finalDiscard;
+    // replicates
+    static const long replicates;
+    static const long geyeriters;
+
+    //Max num heated chains.
+    static const long maxNumHeatedChains;
+
+    // error conditions
+    static const long tooManyDenovo;  // we give up; we can't make a denovo tree
+
+    // default user params for profiling
+    static const bool doProfile;
+    static const proftype profileType;
+
+    // default group parameter status
+    static const pstatus groupPstat;
+
+    //Parameters for profiling estimation
+    static const double highvalTheta;
+    static const double lowvalTheta;
+    static const double highmultTheta;
+    static const double lowmultTheta;
+
+    static const double highvalMig;
+    static const double lowvalMig;
+    static const double highmultMig;
+    static const double lowmultMig;
+
+    static const double highvalDisease;
+    static const double lowvalDisease;
+    static const double highmultDisease;
+    static const double lowmultDisease;
+
+    static const double highvalRec;
+    static const double lowvalRec;
+    static const double highmultRec;
+    static const double lowmultRec;
+
+    static const double highvalGrowth;
+    static const double lowvalGrowth;
+    static const double highmultGrowth;
+    static const double lowmultGrowth;
+
+    static const double highvalLSelect;
+    static const double lowvalLSelect;
+    static const double highmultLSelect;
+    static const double lowmultLSelect;
+
+    static const double highvalGammaOverRegions;
+    static const double lowvalGammaOverRegions;
+    static const double highmultGammaOverRegions;
+    static const double lowmultGammaOverRegions; // equals alpha, which equals
+    // the shape parameter of a gamma distribution over regions
+    // of the background mutation rate
+
+    static const double highvalEpoch;
+    static const double lowvalEpoch;
+    static const double highmultEpoch;
+    static const double lowmultEpoch;
+
+    // defaults for datamodels
+    static const long nucleotideBins;
+    // default number of bins for allelic and microsat stepwise models
+    static const long bins;
+    // default number of bins for brownian model
+    static const long brownianBins;
+    // default maximum seperation (functionally) allowed between any
+    // two alleles in a microsat stepwise model
+    static const long threshhold;
+    static const long step_allowance;
+    static const long mixedks_allowance;
+
+    static const string startMethod;
+
+    // default values for userparams
+    static const string curvefileprefix;
+    static const string mapfileprefix;
+    static const string reclocfileprefix;
+    static const string tracefileprefix;
+    static const string newicktreefileprefix;
+#ifdef LAMARC_QA_TREE_DUMP
+    static const string argfileprefix;
+#endif
+
+    static const string datafilename;
+    static const string profileprefix;
+    static const string resultsfilename;
+    static const string treesuminfilename;
+    static const string treesumoutfilename;
+    static const string xmloutfilename;
+    static const string xmlreportfilename;
+
+    static const verbosity_type verbosity;
+    static const verbosity_type progress;
+
+    static const time_t programstarttime;
+    static const long randomseed;
+    static const bool hasoldrandomseed;
+
+    static const bool plotpost;
+    static const bool readsumfile;
+    static const bool useoldrandomseed;
+    static const bool usesystemclock;
+    static const bool writecurvefiles;
+    static const bool writereclocfiles;
+    static const bool writetracefiles;
+    static const bool writenewicktreefiles;
+#ifdef LAMARC_QA_TREE_DUMP
+    static const bool writeargfiles;
+    static const bool writemanyargs;
+#endif // LAMARC_QA_TREE_DUMP
+    static const bool writesumfile;
+
+    // default values for regions
+    static const double effpopsize;
+
+    // default values for data models
+
+    static const double       minLegalFrequency;
+
+    static const ModelTypeVec1d allDataModels();
+    static const model_type   dataModelType;
+    static const bool         doNormalize;
+    static const double       autoCorrelation;
+    static const double       ttratio;
+    static const bool         calcFreqsFromData;
+    static const double       categoryProbability;
+    static const DoubleVec1d  categoryProbabilities();
+    static const double       categoryRate;
+    static const double       categoryRateMultiple;
+    static const DoubleVec1d  categoryRates();
+    static const DoubleVec1d  chainTemperatures();
+    static const double       baseFrequencyA;
+    static const double       baseFrequencyC;
+    static const double       baseFrequencyG;
+    static const double       baseFrequencyT;
+    static const double       gtrRateAC;
+    static const double       gtrRateAG;
+    static const double       gtrRateAT;
+    static const double       gtrRateCG;
+    static const double       gtrRateCT;
+    static const double       gtrRateGT;
+    static const double       relativeMuRate;
+    static const double       KS_alpha;
+    static const bool         optimize_KS_alpha;
+    static const long         numCategories;
+
+    static const long         maxNumCategories;
+
+    static const double       per_base_error_rate;
+
+    // default stick-joint length
+    static const double       perThetaChange;
+
+    // default maximum number of "semi-unique" concurrent branches in
+    // all trees
+    static const long         numSemiUniqueBranches;
+#ifdef LAMARC_QA_SINGLE_DENOVOS
+    static const long         numDenovos;
+#endif // LAMARC_QA_SINGLE_DENOVOS
+};
+
+#endif // DEFAULTS_H
+
+//____________________________________________________________________________________
diff --git a/src/control/definitions.h b/src/control/definitions.h
new file mode 100644
index 0000000..d22d31f
--- /dev/null
+++ b/src/control/definitions.h
@@ -0,0 +1,79 @@
+// $Id: definitions.h,v 1.21 2011/04/23 02:02:48 bobgian Exp $
+
+/*
+  Copyright 2002 Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef DEFINITIONS_H
+#define DEFINITIONS_H
+
+#include <cmath>                        // for exp() or log()
+#include <limits>
+
+// DEFINED stuff
+//
+// Peter Beerli
+//
+#ifndef DBL_MAX
+//#define DBL_MAX (static_cast<double>(1.7976931348623157e308))
+const double DBL_MAX = std::numeric_limits<double>::max();
+#endif
+
+#ifndef EPSILON
+#define EPSILON         0.000001
+#endif
+
+const double GROWTH_EPSILON = 0.0001; // trigger special code when |g| is less than this
+const double LOGISTIC_SELECTION_COEFFICIENT_EPSILON = 0.0001; // ditto
+
+#ifndef DBL_EPSILON
+#define DBL_EPSILON 2.2204460492503131e-14
+#endif
+
+#ifndef NEGMAX
+//#define NEGMAX -DBL_MAX
+const double NEGMAX = -DBL_MAX;
+#endif
+
+#ifndef DBL_BIG
+//#define DBL_BIG (static_cast<double>(1.0e300))
+const double DBL_BIG = pow(10.0, floor(0.97*log10(DBL_MAX)));
+#endif
+
+#ifndef EXPMAX
+const double EXPMAX = log(DBL_BIG); //about 688 on our machines
+#endif
+
+#ifndef EXPMIN //We want roughly -EXPMAX; -687 on our machines.
+//We actually only want a different definition for Microsoft VC++, but the only
+// way to get that is by testing _MSC_VER, which Metrowerks also defines
+// sometimes.  So we have to define EXPMIN in three places; twice the same way.
+// (we need a different version for VC++ because there is no min())
+#ifdef __MWERKS__
+const double EXPMIN = 0.97*log(std::numeric_limits<double>::min());
+#elif defined _MSC_VER
+const double EXPMIN = -0.97*EXPMAX;
+#else
+const double EXPMIN = 0.97*log(std::numeric_limits<double>::min());
+#endif
+#endif
+
+const double EXP_OF_EXPMAX = exp(EXPMAX);
+const double EXP_OF_EXPMIN = exp(EXPMIN);
+
+#ifndef MAXLONG
+//#define MAXLONG (static_cast<long>(32000))
+const long MAXLONG = std::numeric_limits<long>::max();
+#endif
+
+#include "conf.h"
+
+#define MAX(A,B) (((A) > (B)) ? (A) : (B))
+
+#endif // DEFINITONS_H
+
+//____________________________________________________________________________________
diff --git a/src/control/dynatracer.cpp b/src/control/dynatracer.cpp
new file mode 100644
index 0000000..754d9b2
--- /dev/null
+++ b/src/control/dynatracer.cpp
@@ -0,0 +1,592 @@
+// $Id: dynatracer.cpp,v 1.11 2012/06/30 01:32:39 bobgian Exp $
+
+/*
+ * Copyright 2009-2010 Bob Giansiracusa, Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+ *
+ * This software is distributed free of charge for non-commercial use
+ * and is copyrighted.  Of course, we do not guarantee that the software
+ * works, and are not responsible for any damage you may cause or have.
+ */
+
+// The name of this file should be changed to "dynameter.cpp", but CVS makes that *so* difficult ...
+
+//------------------------------------------------------------------------------------
+
+#include <iomanip>
+#include <iostream>
+#include "timex.h"                      // for GetTime()
+
+#include "local_build.h"
+#include "dynatracer.h"
+
+using namespace std;
+
+//------------------------------------------------------------------------------------
+
+// If DYNAMETER_LEVEL is now defined, compile the rest of the file.  But if not, skip to the end.
+// This file is included in the build, but when DYNAMETER_LEVEL is not defined, the rest is skipped
+// (except for the definition of rdtsc() at the very end).
+#ifdef DYNAMETER_LEVEL
+
+//------------------------------------------------------------------------------------
+// Experimental overloaded definitions for new and delete.
+
+#ifdef MEMORYTRACE                      // Under construction - not running in current distribution.
+
+void * operator new (size_t size)
+{
+    void * ptr = malloc(size);
+#ifdef MEMORYPRINT
+    printf("operator new: %lu bytes at %p\n", size, ptr);
+#endif // MEMORYPRINT
+    if(!ptr)
+        printf("operator new: OUT OF MEMORY\n");
+    return ptr;
+}
+
+//------------------------------------------------------------------------------------
+
+void * operator new [] (size_t size)
+{
+    void * ptr = malloc(size);
+#ifdef MEMORYPRINT
+    printf("operator new []: %lu bytes at %p\n", size, ptr);
+#endif // MEMORYPRINT
+    if(!ptr)
+    {
+        printf("operator new []: OUT OF MEMORY\n");
+    }
+    return ptr;
+}
+
+//------------------------------------------------------------------------------------
+
+void operator delete (void * ptr)
+{
+#ifdef MEMORYPRINT
+    printf("operator delete: %p\n", ptr);
+#endif // MEMORYPRINT
+    if(ptr)
+        free(ptr);
+    else
+        printf("operator delete: NULL POINTER\n");
+}
+
+//------------------------------------------------------------------------------------
+
+void operator delete [] (void * ptr)
+{
+#ifdef MEMORYPRINT
+    printf("operator delete []: %p\n", ptr);
+#endif // MEMORYPRINT
+    if(ptr)
+        free(ptr);
+    else
+        printf("operator delete []: NULL POINTER\n");
+}
+
+//------------------------------------------------------------------------------------
+
+void operator delete (void * ptr, size_t size)
+{
+#ifdef MEMORYPRINT
+    printf("operator delete: %lu bytes at %p\n", size, ptr);
+#endif // MEMORYPRINT
+    if(ptr)
+        free(ptr);
+    else
+        printf("operator delete: NULL POINTER\n");
+}
+
+//------------------------------------------------------------------------------------
+
+void operator delete [] (void * ptr, size_t size)
+{
+#ifdef MEMORYPRINT
+    printf("operator delete []: %lu bytes at %p\n", size, ptr);
+#endif // MEMORYPRINT
+    if(ptr)
+        free(ptr);
+    else
+        printf("operator delete []: NULL POINTER\n");
+}
+
+//------------------------------------------------------------------------------------
+
+void * operator new (size_t size, char * file, unsigned int line)
+{
+    void * ptr = malloc(size);
+    printf("operator NEW: %lu bytes at %p, File: %s, Line: %u\n", size, ptr, file, line);
+    if(!ptr)
+        printf("operator NEW: OUT OF MEMORY\n");
+    return ptr;
+}
+
+//------------------------------------------------------------------------------------
+
+void * operator new [] (size_t size, char * file, unsigned int line)
+{
+    void * ptr = malloc(size);
+    printf("operator NEW []: %lu bytes at %p, File: %s, Line: %u\n", size, ptr, file, line);
+    if(!ptr)
+        printf("operator NEW []: OUT OF MEMORY\n");
+    return ptr;
+}
+
+#endif // MEMORYTRACE - End of experimental code - not running in current distribution.
+
+//------------------------------------------------------------------------------------
+// Constructor and expansion of StartDynameter().
+
+LocalDynameter::LocalDynameter(const char * const srcFunctionName, // Supplied by __PRETTY_FUNCTION__ macro
+                               const char * const srcFileName,     // Supplied by __FILE__ macro
+                               const unsigned int srcLineNumber,   // Supplied by __LINE__ macro
+                               // Next argument supplied by macro expansion as: DYNACOUNTER_START + __COUNTER__ .
+                               // DYNACOUNTER_START must be defined at the beginning of each file that uses it
+                               // to the starting range of indices for that file to use.  Then __COUNTER__ increments
+                               // automatically through the range.  Count the number of calls in each file so the next
+                               // file to use tracing/metering can initialize its range to start with the next value
+                               // (<last-in-current-file> + 1).
+                               const unsigned int globalDynameterIdx)
+    :
+    m_fcnEntryTime(rdtsc()),            // Clock count at initial function entry.
+    m_srcFunctionName(srcFunctionName), // Function name decorated with class and argument signature.
+    m_srcFileName(srcFileName),         // Source file name.
+#if (DYNAMETER_LEVEL >= 3u)
+    m_tracePrintoutOK(true),            // Controls suppression of trace printout (elision).
+#endif // (DYNAMETER_LEVEL >= 3u)
+    m_calleeRuntime(0u),                // Accumulated runtime of callees.
+    m_globalDynameterPtr(NULL),         // Pointer to GlobalDynameter object in GlobalDynameterArray.
+    m_srcLineNumber(srcLineNumber)      // Source line number.
+{
+    // Clocks since last clock tracing/metering sample before current call.
+    const unsigned long long int sinceLastBeforeCall = m_fcnEntryTime - s_lastClock;
+
+    if(s_currentMeterPtr)
+    {
+        // If call to this function was made from another - ie, not the top-level invocation -
+        // set my "parent Meter pointer" to that parent's LocalDynameter object and bump trace level.
+        m_parentMeterPtr = s_currentMeterPtr;
+#if (DYNAMETER_LEVEL >= 3u)
+        m_tracePrintoutOK = s_currentMeterPtr->m_tracePrintoutOK;
+        m_traceLevel = s_currentMeterPtr->m_traceLevel + 1u;
+#endif // (DYNAMETER_LEVEL >= 3u)
+    }
+    else
+    {
+        // If this is first call - ie, at toplevel - set a few variables.
+        m_parentMeterPtr = NULL;
+
+#if (DYNAMETER_LEVEL >= 3u)
+        m_traceLevel = 0u;
+        cout << "Trace Count Limit (max num calls to same function): ";
+        cin >> s_traceCountLimit;
+        cout << endl;
+#endif // (DYNAMETER_LEVEL >= 3u)
+
+        // Set slots of GlobalDynameterArray to NULL here, in case implicit initialization doesn't.
+        for(unsigned int fcn_idx = 0u; fcn_idx < DYNAMETER_ARRAYSIZE; ++fcn_idx)
+        {
+            s_GlobalDynameterArray[fcn_idx] = NULL;
+        }
+
+        // Print opening trace messages no matter the range to be traced dynamically.
+        s_traceOut.open("TracePrint.out", ios::out);
+
+        s_traceOut <<     "Tracing output started:    " << PrintTime(GetTime(), "%c")
+                   << "\n\nDynameter Version:         " << DYNAMETER_LEVEL
+#if (DYNAMETER_LEVEL >= 3u)
+                   <<   "\nTrace max funcall limit:   " << s_traceCountLimit
+#endif // (DYNAMETER_LEVEL >= 3u)
+                   << "\n\nSize of int:               " << sizeof(int)
+                   <<   "\nSize of long int:          " << sizeof(long int)
+                   <<   "\nSize of long long int:     " << sizeof(long long int)
+                   <<   "\nSize of float:             " << sizeof(float)
+                   <<   "\nSize of double:            " << sizeof(double)
+                   <<   "\nSize of long double:       " << sizeof(long double)
+                   <<   "\nSize of int pointer:       " << sizeof(int *)
+                   <<   "\nSize of void pointer:      " << sizeof(void *) << "\n\n";
+    }
+
+    // And set the "global" current Meter Object pointer to ME (ie, I'M the function currently running).
+    s_currentMeterPtr = this;
+
+    DebugAssert(globalDynameterIdx < DYNAMETER_ARRAYSIZE, "GlobalDynameterArray index overflow");
+    m_globalDynameterPtr = s_GlobalDynameterArray[globalDynameterIdx];
+
+    if(m_globalDynameterPtr)
+    {
+        // If already allocated (second or later call), update these data members.
+        if(sinceLastBeforeCall > m_globalDynameterPtr->m_maxSinceLastBeforeCall)
+            m_globalDynameterPtr->m_maxSinceLastBeforeCall = sinceLastBeforeCall;
+        ++m_globalDynameterPtr->m_selfNumberOfCalls;
+    }
+    else
+    {
+        // If GlobalDynameter object has not yet been allocated and entered into GlobalDynameterArray, do so now.
+        // Note that these objects may be entered into array in non-sequential array index order,
+        // which is why we must keep track of total number as well as NULL/non-NULLness of each entry.
+        // Increment global count of GlobalDynameter objects allocated and use value as fcn's order-of-first-call.
+        m_globalDynameterPtr = new GlobalDynameter(srcFunctionName, srcFileName, ++s_GlobalDynameterCount,
+                                                   0u, 0u, sinceLastBeforeCall);
+        s_GlobalDynameterArray[globalDynameterIdx] = m_globalDynameterPtr;
+    }
+
+    // Increment the total number of traced calls so far - used to print total count at end and
+    // (more importantly) to restrict printing of dynamic tracing/metering information to subset of all traced calls.
+    ++s_totalNumberOfCalls;
+
+#if (DYNAMETER_LEVEL >= 3u)
+    if(m_tracePrintoutOK && (m_globalDynameterPtr->m_selfNumberOfCalls <= s_traceCountLimit))
+    {
+        if(s_ellipsisCounter > 0u)
+            print_ellipsis(s_traceOut);
+
+        print_indentation(s_traceOut);
+        s_traceOut << ">> " << srcFunctionName << " in " << m_srcFileName << ", Line " << srcLineNumber << '\n';
+        print_indentation(s_traceOut);
+
+        s_traceOut << "|  " << m_fcnEntryTime - s_startClock << " clocks now.  "
+                   << sinceLastBeforeCall << " clocks before.  "
+                   << m_globalDynameterPtr->m_selfNumberOfCalls << " self, "
+                   << s_totalNumberOfCalls << " total.\n";
+
+        print_indentation(s_traceOut);
+        s_traceOut << "|\n";
+    }
+    else
+    {
+        ++s_ellipsisCounter;
+        ++s_printoutsElided;
+        m_tracePrintoutOK = false;
+    }
+#endif // (DYNAMETER_LEVEL >= 3u)
+
+    s_lastClock = m_fcnEntryTime;
+    if(sinceLastBeforeCall > s_maxSinceLastBeforeCall)
+        s_maxSinceLastBeforeCall = sinceLastBeforeCall;
+}
+
+//------------------------------------------------------------------------------------
+// Destructor.  Automatically provides functionality of trace-printing on leaving function.
+
+LocalDynameter::~LocalDynameter()
+{
+    // Set on each call (ie, updated to current time).
+    const unsigned long long int nowClock = rdtsc();
+    const unsigned long long int myTotalRuntime = nowClock - m_fcnEntryTime;
+    const unsigned long long int mySelfRuntime = myTotalRuntime - m_calleeRuntime;
+
+#if (DYNAMETER_LEVEL >= 3u)
+    if(m_tracePrintoutOK && (m_globalDynameterPtr->m_selfNumberOfCalls <= s_traceCountLimit))
+    {
+        if(s_ellipsisCounter > 0u)
+        {
+            // Trace printout elided last before this call probably was indented one level deeper.
+            ++m_traceLevel;
+            print_ellipsis(s_traceOut);
+            --m_traceLevel;
+        }
+
+        print_indentation(s_traceOut);
+        // Member variables used since all values printed here must be stored; destructor takes no arguments.
+        s_traceOut << "<- " << m_srcFunctionName << " in " << m_srcFileName << ", Line " << m_srcLineNumber << '\n';
+        print_indentation(s_traceOut);
+
+        s_traceOut << "   "
+                   << nowClock - s_startClock << " clocks now.  "
+                   << nowClock - s_lastClock << " clocks before.  "
+                   << m_calleeRuntime << " clocks callees.  "
+                   << mySelfRuntime << " clocks self.  "
+                   << myTotalRuntime << " clocks total.  "
+                   << m_globalDynameterPtr->m_selfNumberOfCalls << " self, "
+                   << s_totalNumberOfCalls << " total.\n";
+
+        print_indentation(s_traceOut);
+        s_traceOut << '\n';
+    }
+    else
+    {
+        ++s_ellipsisCounter;
+        ++s_printoutsElided;
+    }
+#endif // (DYNAMETER_LEVEL >= 3u)
+
+    s_lastClock = nowClock;
+
+    // Insert the information accumulated in this dynamic object into the permanent structure holding global data.
+    // The GlobalDynameter object is assumed to exist (m_globalDynameterPtr is non-NULL) because otherwise 'new' would
+    // have thrown a 'bad_alloc' exception.
+    m_globalDynameterPtr->m_calleeRuntime += m_calleeRuntime;
+    m_globalDynameterPtr->m_selfRuntime += mySelfRuntime;
+
+    // Pop the metering tool stack: set current object pointer to my parent.
+    s_currentMeterPtr = m_parentMeterPtr;
+
+    if(m_parentMeterPtr)
+    {
+        // If parent is non-NULL, increment his kid's runtime by my own.
+        m_parentMeterPtr->m_calleeRuntime += myTotalRuntime;
+    }
+    else
+    {
+        // If we've popped back to top-level, ie, the toplevel destructor, write trailer and close file.
+        // Print closing trace messages no matter the range to be traced dynamically.
+        // First write the summary data stored in the GlobalDynameterArray objects.
+        s_traceOut << "\nTrace information on "
+                   << s_GlobalDynameterCount
+                   << " functions traced of "
+                   << DYNAMETER_ARRAYSIZE
+                   << " function slots available.\n";
+
+        // "Squish" all populated entries down to contiguous set at "low" end of array.
+        // All functions will have slots reserved at compile time for themselves, but
+        // functions not called during this run will contain empty slots in this array.
+        unsigned int target = 0u, source = 0u;
+        while(source < DYNAMETER_ARRAYSIZE)
+        {
+            if(s_GlobalDynameterArray[source]) // Source populated.
+            {
+                if(s_GlobalDynameterArray[target]) // Source populated; Target populated.
+                {
+                    if(source == target) // If tracking same slot, bump both indices.
+                        ++source;        // Otherwise bump only target.
+                    ++target;
+                }
+                else                    // Source populated; Target empty.
+                {
+                    s_GlobalDynameterArray[target] = s_GlobalDynameterArray[source]; // Swap source into target.
+                    s_GlobalDynameterArray[source] = NULL;       // "Swap" means "clear source slot".
+                    ++target;                                    // This pair now OK; bump both indices.
+                    ++source;
+                }
+            }
+            else                        // Source empty; target may be empty or populated.
+            {
+                ++source;               // Bump only source (look for populated slot).
+            }
+            DebugAssert(target <= source, "Bug in GlobalDynameterArray squisher");
+        }
+        DebugAssert((source == DYNAMETER_ARRAYSIZE) && (target == s_GlobalDynameterCount),
+                    "Bug in GlobalDynameterArray compactor");
+
+        TablePrinter("Traced functions unordered:",
+                     "FcnsUnsorted.txt");
+
+        sort(s_GlobalDynameterArray, s_GlobalDynameterArray + s_GlobalDynameterCount, NumSelfCalls);
+        TablePrinter("Traced functions ordered by number of calls:",
+                     "FcnsNumCalls.txt");
+
+        sort(s_GlobalDynameterArray, s_GlobalDynameterArray + s_GlobalDynameterCount, MaxSinceLastBefore);
+        TablePrinter("Traced functions ordered by max time since last before call:",
+                     "FcnsMaxTimeBefore.txt");
+
+        sort(s_GlobalDynameterArray, s_GlobalDynameterArray + s_GlobalDynameterCount, CalleeRuntimePerCall);
+        TablePrinter("Traced functions ordered by callee runtime per call:",
+                     "FcnsCalleeTimePerCall.txt");
+
+        sort(s_GlobalDynameterArray, s_GlobalDynameterArray + s_GlobalDynameterCount, CalleeRuntimeAggregate);
+        TablePrinter("Traced functions ordered by callee runtime, aggregate:",
+                     "FcnsCalleeTimeAggregate.txt");
+
+        sort(s_GlobalDynameterArray, s_GlobalDynameterArray + s_GlobalDynameterCount, SelfRuntimePerCall);
+        TablePrinter("Traced functions ordered by self runtime (excluding callees) per call:",
+                     "FcnsSelfTimePerCall.txt");
+
+        sort(s_GlobalDynameterArray, s_GlobalDynameterArray + s_GlobalDynameterCount, SelfRuntimeAggregate);
+        TablePrinter("Traced functions ordered by self runtime (excluding callees), aggregate:",
+                     "FcnsSelfTimeAggregate.txt");
+
+        sort(s_GlobalDynameterArray, s_GlobalDynameterArray + s_GlobalDynameterCount, TotalRuntimePerCall);
+        TablePrinter("Traced functions ordered by total runtime (self plus callees) per call:",
+                     "FcnsTotalTimePerCall.txt");
+
+        sort(s_GlobalDynameterArray, s_GlobalDynameterArray + s_GlobalDynameterCount, TotalRuntimeAggregate);
+        TablePrinter("Traced functions ordered by total runtime (self plus callees), aggregate:",
+                     "FcnsTotalTimeAggregate.txt");
+
+        s_traceOut <<   "\nTracing output done:       " << PrintTime(GetTime(), "%c")
+                   <<   "\nTotal number of calls:     " << s_totalNumberOfCalls
+#if (DYNAMETER_LEVEL >= 3u)
+                   <<   "\nTrace max funcall limit:   " << s_traceCountLimit
+                   <<   "\nTrace print calls elided:  " << s_printoutsElided
+#endif // (DYNAMETER_LEVEL >= 3u)
+                   << "\n\nDynameter Version:         " << DYNAMETER_LEVEL
+                   << "\n\nSize of int:               " << sizeof(int)
+                   <<   "\nSize of long int:          " << sizeof(long int)
+                   <<   "\nSize of long long int:     " << sizeof(long long int)
+                   <<   "\nSize of float:             " << sizeof(float)
+                   <<   "\nSize of double:            " << sizeof(double)
+                   <<   "\nSize of long double:       " << sizeof(long double)
+                   <<   "\nSize of int pointer:       " << sizeof(int *)
+                   <<   "\nSize of void pointer:      " << sizeof(void *)
+                   << "\n\nMax since last trace:      "
+                   << LocalDynameter::s_maxSinceLastBeforeCall << " clocks (over all function calls).\n";
+
+        s_traceOut.close();
+    }
+}
+
+//------------------------------------------------------------------------------------
+// Private utility member function.
+
+#if (DYNAMETER_LEVEL >= 3u)
+
+void LocalDynameter::print_indentation(ofstream & stream)
+{
+    stream << ' ';
+    for(unsigned int trace_idx = 0u; trace_idx < m_traceLevel; ++trace_idx)
+        stream << "| ";
+}
+
+#endif // (DYNAMETER_LEVEL >= 3u)
+
+//------------------------------------------------------------------------------------
+// Private utility member function.
+
+#if (DYNAMETER_LEVEL >= 3u)
+
+void LocalDynameter::print_ellipsis(ofstream & stream)
+{
+    print_indentation(stream);
+    stream << "... " << s_ellipsisCounter << " trace printouts elided.\n";
+    print_indentation(stream);
+    stream << '\n';
+    s_ellipsisCounter = 0u;
+}
+
+#endif // (DYNAMETER_LEVEL >= 3u)
+
+//------------------------------------------------------------------------------------
+// Private utility member function.
+
+void LocalDynameter::TablePrinter(const string heading, const string filename)
+{
+    ofstream os;
+    os.open(filename.c_str(), ios::out);
+    os << heading << '\n';
+
+    for(unsigned int fcn_idx = 0u; fcn_idx < s_GlobalDynameterCount; ++fcn_idx)
+    {
+        GlobalDynameter * pGlobalDynameter = s_GlobalDynameterArray[fcn_idx];
+        DebugAssert(pGlobalDynameter, "Bug in GlobalDynameterArray display");
+
+        const unsigned long long int selfNumCalls = pGlobalDynameter->m_selfNumberOfCalls;
+        const unsigned long long int calleeRuntime = pGlobalDynameter->m_calleeRuntime;
+        const unsigned long long int selfRuntime = pGlobalDynameter->m_selfRuntime;
+        const unsigned long long int maxSinceLastBeforeCall = pGlobalDynameter->m_maxSinceLastBeforeCall;
+
+        os << '\n' << fcn_idx
+           << ":\t" << pGlobalDynameter->m_srcFunctionName
+           << " in file " << pGlobalDynameter->m_srcFileName
+           << "\n\t" << selfNumCalls << " calls (order "
+           << pGlobalDynameter->m_ordinalCount << ")  "
+           << maxSinceLastBeforeCall << " clocks before.  "
+           << calleeRuntime / selfNumCalls << '/' << calleeRuntime << " callees.  "
+           << selfRuntime / selfNumCalls << '/' << selfRuntime << " self.  "
+           << (selfRuntime + calleeRuntime) / selfNumCalls << '/' << selfRuntime + calleeRuntime << " total.\n";
+    }
+
+    os.close();
+}
+
+//------------------------------------------------------------------------------------
+// Constructor for GlobalDynameter called by LocalDynameter constructor to store per-function results.
+
+GlobalDynameter::GlobalDynameter(const char * const srcFunctionName,
+                                 const char * const srcFileName,
+                                 const unsigned int ordinalCount,
+                                 const unsigned long long int calleeRuntime,
+                                 const unsigned long long int selfRuntime,
+                                 const unsigned long long int maxSinceLastBeforeCall)
+    :
+    m_srcFunctionName(srcFunctionName),               // Function name decorated with class and argument signature.
+    m_srcFileName(srcFileName),                       // Source file name.
+    m_ordinalCount(ordinalCount),                     // Ordinal count of this function's call of all traced.
+    m_calleeRuntime(calleeRuntime),                   // Accumulated runtime of callees.
+    m_selfRuntime(selfRuntime),                       // Accumulated runtime of this function (not including callees).
+    m_maxSinceLastBeforeCall(maxSinceLastBeforeCall), // Max time since last timing measurement (CPU cycles).
+    m_selfNumberOfCalls(1u)                           // Count of number of times this function called.
+{ }
+
+//------------------------------------------------------------------------------------
+// Sorts by number of self calls (decreasing order).
+
+bool NumSelfCalls(const GlobalDynameter * const x_ptr, const GlobalDynameter * const y_ptr)
+{
+    return x_ptr->m_selfNumberOfCalls > y_ptr->m_selfNumberOfCalls;
+}
+
+//------------------------------------------------------------------------------------
+// Sorts by max time since last trace probe before current call (decreasing order).
+
+bool MaxSinceLastBefore(const GlobalDynameter * const x_ptr, const GlobalDynameter * const y_ptr)
+{
+    return x_ptr->m_maxSinceLastBeforeCall > y_ptr->m_maxSinceLastBeforeCall;
+}
+
+//------------------------------------------------------------------------------------
+// Sorts by callee runtime per call (total over all callees) (decreasing order).
+
+bool CalleeRuntimePerCall(const GlobalDynameter * const x_ptr, const GlobalDynameter * const y_ptr)
+{
+    return (x_ptr->m_calleeRuntime / x_ptr->m_selfNumberOfCalls)
+        > (y_ptr->m_calleeRuntime / y_ptr->m_selfNumberOfCalls);
+}
+
+//------------------------------------------------------------------------------------
+// Sorts by callee runtime aggregate (summed over all calls) (decreasing order).
+
+bool CalleeRuntimeAggregate(const GlobalDynameter * const x_ptr, const GlobalDynameter * const y_ptr)
+{
+    return x_ptr->m_calleeRuntime > y_ptr->m_calleeRuntime;
+}
+
+//------------------------------------------------------------------------------------
+// Sorts by self runtime per call (excluding all callees) (decreasing order).
+
+bool SelfRuntimePerCall(const GlobalDynameter * const x_ptr, const GlobalDynameter * const y_ptr)
+{
+    return (x_ptr->m_selfRuntime / x_ptr->m_selfNumberOfCalls) > (y_ptr->m_selfRuntime / y_ptr->m_selfNumberOfCalls);
+}
+
+//------------------------------------------------------------------------------------
+// Sorts by self runtime aggregate (excluding callees, summed over all calls) (decreasing order).
+
+bool SelfRuntimeAggregate(const GlobalDynameter * const x_ptr, const GlobalDynameter * const y_ptr)
+{
+    return x_ptr->m_selfRuntime > y_ptr->m_selfRuntime;
+}
+
+//------------------------------------------------------------------------------------
+// Sorts by total runtime per call (sum of self plus callee runtime) (decreasing order).
+
+bool TotalRuntimePerCall(const GlobalDynameter * const x_ptr, const GlobalDynameter * const y_ptr)
+{
+    return
+        ((x_ptr->m_selfRuntime + x_ptr->m_calleeRuntime) / x_ptr->m_selfNumberOfCalls)
+        >
+        ((y_ptr->m_selfRuntime + y_ptr->m_calleeRuntime) / y_ptr->m_selfNumberOfCalls);
+}
+
+//------------------------------------------------------------------------------------
+// Sorts by total runtime aggregate (self plus callee, summed over all calls) (decreasing order).
+
+bool TotalRuntimeAggregate(const GlobalDynameter * const x_ptr, const GlobalDynameter * const y_ptr)
+{
+    return (x_ptr->m_selfRuntime + x_ptr->m_calleeRuntime) > (y_ptr->m_selfRuntime + y_ptr->m_calleeRuntime);
+}
+
+//------------------------------------------------------------------------------------
+// CPU cycle counter used by tracing/metering timing functions.
+
+unsigned long long int rdtsc()
+{
+    unsigned int a, d;
+    __asm__ volatile("rdtsc" : "=a" (a), "=d" (d));
+    return (static_cast<unsigned long long int>(a)) | ((static_cast<unsigned long long int>(d)) << 32);
+}
+
+#endif // DYNAMETER_LEVEL
+
+//____________________________________________________________________________________
diff --git a/src/control/dynatracer.h b/src/control/dynatracer.h
new file mode 100644
index 0000000..b890bab
--- /dev/null
+++ b/src/control/dynatracer.h
@@ -0,0 +1,366 @@
+// $Id: dynatracer.h,v 1.11 2012/06/30 01:32:39 bobgian Exp $
+
+/*
+ * Copyright 2009-2012 Bob Giansiracusa, Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+ *
+ * This software is distributed free of charge for non-commercial use
+ * and is copyrighted.  Of course, we do not guarantee that the software
+ * works, and are not responsible for any damage you may cause or have.
+ */
+
+// The name of this file should be changed to "dynameter.h", but CVS makes that *so* difficult ...
+#ifndef DYNATRACER_H
+#define DYNATRACER_H
+
+//------------------------------------------------------------------------------------
+// Must include this file first, before testing DYNAMETER_LEVEL for definedness,
+// because "local_build.h" defines that symbol.  It also defines some other compile-time
+// symbols used to control expansion of some debugging macros defined in this file.
+
+#include <iostream>
+#include "local_build.h"
+
+//------------------------------------------------------------------------------------
+
+#ifdef DYNAMETER_LEVEL
+#include <fstream>                      // Required to define std::ofstream.
+#include "dynacount.h"                  // Required to define DYNAMETER_ARRAYSIZE.
+
+//------------------------------------------------------------------------------------
+
+// Used in experimental code - not used in current distribution.
+// #define MEMORYTRACE
+// #define MEMORYPRINT
+
+//------------------------------------------------------------------------------------
+// Definition of experimental class for testing overloading of new and delete.
+
+#ifdef MEMORYTRACE                      // Under construction - not running in current distribution.
+
+class S
+{
+  private:
+    int i[100];
+  public:
+    S() { puts("S::S()"); }
+    ~S() { puts("S::~S()"); }
+};
+
+#endif // MEMORYTRACE - End of experimental code - not running in current distribution.
+
+//------------------------------------------------------------------------------------
+// Experimental test of overloading global and class new.
+
+#ifdef MEMORYTRACE                      // Under construction - not running in current distribution.
+
+#define NEW new(__FILE__, __LINE__)
+#define DELETE printf("delete: %s at %u\n", __FILE__, __LINE__) ; delete
+
+extern void * operator new (size_t size, char * file, unsigned int line);
+extern void * operator new [] (size_t size, char * file, unsigned int line);
+
+#endif // MEMORYTRACE - End of experimental code - not running in current distribution.
+
+//------------------------------------------------------------------------------------
+// Experimental test of overloading global and class delete.
+
+#ifdef MEMORYTRACE                      // Under construction - not running in current distribution.
+
+extern void operator delete (void * ptr);
+extern void operator delete [] (void * ptr);
+
+extern void operator delete (void * ptr, size_t size);
+extern void operator delete [] (void * ptr, size_t size);
+
+#endif // MEMORYTRACE - End of experimental code - not running in current distribution.
+
+//------------------------------------------------------------------------------------
+// Class which defines accumulation of non-dynamic tracing/metering information (total calls/timing, etc).
+
+class GlobalDynameter
+{
+    friend class LocalDynameter;
+
+    // Utility sorting functions.
+    friend bool NumSelfCalls(const GlobalDynameter * const x_ptr, const GlobalDynameter * const y_ptr);
+    friend bool MaxSinceLastBefore(const GlobalDynameter * const x_ptr, const GlobalDynameter * const y_ptr);
+    friend bool CalleeRuntimePerCall(const GlobalDynameter * const x_ptr, const GlobalDynameter * const y_ptr);
+    friend bool CalleeRuntimeAggregate(const GlobalDynameter * const x_ptr, const GlobalDynameter * const y_ptr);
+    friend bool SelfRuntimePerCall(const GlobalDynameter * const x_ptr, const GlobalDynameter * const y_ptr);
+    friend bool SelfRuntimeAggregate(const GlobalDynameter * const x_ptr, const GlobalDynameter * const y_ptr);
+    friend bool TotalRuntimePerCall(const GlobalDynameter * const x_ptr, const GlobalDynameter * const y_ptr);
+    friend bool TotalRuntimeAggregate(const GlobalDynameter * const x_ptr, const GlobalDynameter * const y_ptr);
+
+  private:
+    const char * const m_srcFunctionName;            // Function name decorated with class and argument signature.
+    const char * const m_srcFileName;                // Source file name.
+    const unsigned int m_ordinalCount;               // This function's order of first call (relative to all others).
+    unsigned long long int m_calleeRuntime;          // Accum runtime of functions called by this one (callees).
+    unsigned long long int m_selfRuntime;            // Accum runtime of this function (not including callees).
+    unsigned long long int m_maxSinceLastBeforeCall; // Max ticks since last metering tool access before current call.
+    unsigned long long int m_selfNumberOfCalls;      // Num traced calls to this function (not necessarily printed).
+
+    // Constructor for GlobalDynameter called by LocalDynameter constructor to store per-function results.
+    GlobalDynameter(const char * const srcFunctionName,
+                    const char * const srcFileName,
+                    const unsigned int ordinalCount,
+                    const unsigned long long int calleeRuntime,
+                    const unsigned long long int selfRuntime,
+                    const unsigned long long int maxSinceLastBeforeCall);
+
+    // Default constructor - should never get called.
+    GlobalDynameter();
+
+}; // class GlobalDynameter
+
+//------------------------------------------------------------------------------------
+// Class which defines dynamic tracing/metering activity (per-call information, dynamic call chain, etc).
+
+class LocalDynameter
+{
+  private:
+    const unsigned long long int m_fcnEntryTime; // Clock count at initial function entry.
+    const char * const m_srcFunctionName;        // Function name decorated with class and argument signature.
+    const char * const m_srcFileName;            // Source file name.
+#if (DYNAMETER_LEVEL >= 3u)
+    bool m_tracePrintoutOK;                 // Flag indicating state of trace printout suppression.
+    unsigned int m_traceLevel;              // "Local" value (per class object - ie, at given fcn level).
+#endif // (DYNAMETER_LEVEL >= 3u)
+    unsigned long long int m_calleeRuntime; // Accumulated runtime of functions called by this one (callees).
+    GlobalDynameter * m_globalDynameterPtr; // Pointer to GlobalDynameter object in GlobalDynameterArray.
+    LocalDynameter * m_parentMeterPtr;      // Pointer to my parent (or NULL).
+    unsigned int m_srcLineNumber;           // Source line number where constructor called.
+
+    // "Global" pointer to LocalDynameter object of function currently on top of run-time stack.
+    static LocalDynameter * s_currentMeterPtr;
+
+    // Accumulates max time from previous trace recording to current call, over all calls to all functions.
+    static unsigned long long int s_maxSinceLastBeforeCall;
+
+    // Total number of function calls traced so far (all functions).
+    static unsigned long long int s_totalNumberOfCalls;
+
+    static std::ofstream s_traceOut;    // File stream for tracing/metering output.
+
+    // Array of pointers to GlobalDynameter objects holding trace data.
+    static GlobalDynameter * s_GlobalDynameterArray[DYNAMETER_ARRAYSIZE];
+
+    // Number of GlobalDynameter objects populating above array - ie, number of functions traced so far this run.
+    // This is used as an ordinal count for GlobalDynameter objects (gives order of each function's first call).
+    static unsigned int s_GlobalDynameterCount;
+
+#if (DYNAMETER_LEVEL >= 3u)
+
+    // User-settable limit to depth of tracing based on number of calls to same function.
+    static unsigned int s_traceCountLimit;
+
+    // Flag/counter to control whether (and how) ellipsis indication is printed when dynamic tracing is suppressed.
+    static unsigned int s_ellipsisCounter;
+
+    // Count of total number of elided tracing printouts.
+    static unsigned int s_printoutsElided;
+
+    // Utility function.  Prints marks indicating level of function-call nesting.
+    void print_indentation(std::ofstream & stream);
+
+    // Utility function.  Prints message indicating number of elided tracing printouts.
+    void print_ellipsis(std::ofstream & stream);
+
+#endif // (DYNAMETER_LEVEL >= 3u)
+
+    // Utility function.  Prints summary statistics on all traced functions.
+    void TablePrinter(const std::string heading, const std::string filename);
+
+  public:
+    // Set at first call and used as zero-point of elapsed time for run.
+    static unsigned long long int s_startClock;
+
+    // Set at first call and updated at end of each call, irrespective of trace level at call.
+    static unsigned long long int s_lastClock;
+
+    // Constructor/Printer.
+    // Note: If macro __PRETTY_FUNCTION__ (which includes signature and class name in string) is unavailable,
+    // use more generally available macro __FUNCTION__ instead (no signature or class name in string).
+    LocalDynameter(const char * const srcFunctionName,     // Supplied by __PRETTY_FUNCTION__ macro
+                   const char * const srcFileName,         // Supplied by __FILE__ macro
+                   const unsigned int srcLineNumber,       // Supplied by __LINE__ macro
+                   const unsigned int globalDynameterIdx); // Supplied by DYNACOUNTER_START + __COUNTER__
+
+    // Destructor/Printer.
+    ~LocalDynameter();
+
+}; // class LocalDynameter
+
+//------------------------------------------------------------------------------------
+// Utility functions for sorting.
+// All comparison functions yield sorts in decreasing (non-increasing) order.
+
+// Utility function.  Sorts by number of self calls.
+extern bool NumSelfCalls(const GlobalDynameter * const x_ptr, const GlobalDynameter * const y_ptr);
+
+// Utility function.  Sorts by max time since last trace probe before current call.
+extern bool MaxSinceLastBefore(const GlobalDynameter * const x_ptr, const GlobalDynameter * const y_ptr);
+
+// Utility function.  Sorts by callee runtime per call (total over all callees).
+extern bool CalleeRuntimePerCall(const GlobalDynameter * const x_ptr, const GlobalDynameter * const y_ptr);
+
+// Utility function.  Sorts by callee runtime aggregate (summed over all calls).
+extern bool CalleeRuntimeAggregate(const GlobalDynameter * const x_ptr, const GlobalDynameter * const y_ptr);
+
+// Utility function.  Sorts by self runtime per call (excluding all callees).
+extern bool SelfRuntimePerCall(const GlobalDynameter * const x_ptr, const GlobalDynameter * const y_ptr);
+
+// Utility function.  Sorts by self runtime aggregate (excluding callees, summed over all calls).
+extern bool SelfRuntimeAggregate(const GlobalDynameter * const x_ptr, const GlobalDynameter * const y_ptr);
+
+// Utility function.  Sorts by total runtime per call (sum of self plus callee runtime).
+extern bool TotalRuntimePerCall(const GlobalDynameter * const x_ptr, const GlobalDynameter * const y_ptr);
+
+// Utility function.  Sorts by total runtime aggregate (sum of self plus callee, summed over all calls).
+extern bool TotalRuntimeAggregate(const GlobalDynameter * const x_ptr, const GlobalDynameter * const y_ptr);
+
+//------------------------------------------------------------------------------------
+
+#endif  // DYNAMETER_LEVEL
+
+//------------------------------------------------------------------------------------
+// External function declarations.
+
+#ifdef DYNAMETER_LEVEL
+
+// Timer utility function (CPU cycle counter).
+// This function is used by the Dynameter constructors (in expansion of the StartDynameter() macro)
+// and corresponding destructors.  It is also called in a few other places to "reset" the tracing/metering
+// clock (ie, after human interaction finishes).
+//
+extern unsigned long long int rdtsc();
+
+#endif
+
+//------------------------------------------------------------------------------------
+// Macros which invoke tracing/metering functionality.
+//
+// Note: If predefined macro __PRETTY_FUNCTION__ (which includes signature and class name in string) is unavailable,
+// use predefined (and more generally available) macro __FUNCTION__ instead (no signature or class name in string).
+//
+// Note that they expand into other than ordinary function calls (ie, declarations) if DYNAMETER_LEVEL is defined,
+// which is why inline function definitions are insufficient.  If DYNAMETER_LEVEL is NOT defined, they expand into
+// whitespace (and therefore are invisible in non-tracing/metering-mode compilations).
+
+#ifdef DYNAMETER_LEVEL
+
+#define StartDynameter()                                         \
+    LocalDynameter traceObj(__PRETTY_FUNCTION__,                 \
+                            __FILE__,                            \
+                            __LINE__,                            \
+                            DYNACOUNTER_START + __COUNTER__)
+
+#else // DYNAMETER_LEVEL
+
+#define StartDynameter()
+
+#endif // DYNAMETER_LEVEL
+
+//------------------------------------------------------------------------------------
+// Extensions to ASSERT macro with equivalent functionality but extra argument printout capability.
+// These are independent of the Dynameter tool and are useful for debugging anywhere.
+
+#ifndef NDEBUG                          // In Debug mode, these all take effect.
+
+#define DebugAssert(condition, msg)                                     \
+    if(!(condition))                                                    \
+    {                                                                   \
+        std::cerr << std::endl                                          \
+                  << "DebugAssert: " << msg << std::endl                \
+                  << "Condition:   " << #condition << std::endl         \
+                  << "In function: " << __PRETTY_FUNCTION__ << std::endl \
+                  << "In filename: " << __FILE__ << " (line " << __LINE__ << ")" << std::endl << std::endl; \
+        assert(condition);                                              \
+    }
+
+#define DebugAssert2(condition, arg1, arg2)                             \
+    if(!(condition))                                                    \
+    {                                                                   \
+        std::cerr << std::endl                                          \
+                  << "DebugAssert2: " << #condition << std::endl        \
+                  << #arg1 << " : " << (arg1) << std::endl              \
+                  << #arg2 << " : " << (arg2) << std::endl              \
+                  << "In function:  " << __PRETTY_FUNCTION__ << std::endl \
+                  << "In filename:  " << __FILE__ << " (line " << __LINE__ << ")" << std::endl << std::endl; \
+        assert(condition);                                              \
+    }
+
+#define DebugAssert3(condition, arg1, arg2, arg3)                       \
+    if(!(condition))                                                    \
+    {                                                                   \
+        std::cerr << std::endl                                          \
+                  << "DebugAssert3: " << #condition << std::endl        \
+                  << #arg1 << " : " << (arg1) << std::endl              \
+                  << #arg2 << " : " << (arg2) << std::endl              \
+                  << #arg3 << " : " << (arg3) << std::endl              \
+                  << "In function:  " << __PRETTY_FUNCTION__ << std::endl \
+                  << "In filename:  " << __FILE__ << " (line " << __LINE__ << ")" << std::endl << std::endl; \
+        assert(condition);                                              \
+    }
+
+#define DebugAssert4(condition, arg1, arg2, arg3, arg4)                 \
+    if(!(condition))                                                    \
+    {                                                                   \
+        std::cerr << std::endl                                          \
+                  << "DebugAssert4: " << #condition << std::endl        \
+                  << #arg1 << " : " << (arg1) << std::endl              \
+                  << #arg2 << " : " << (arg2) << std::endl              \
+                  << #arg3 << " : " << (arg3) << std::endl              \
+                  << #arg4 << " : " << (arg4) << std::endl              \
+                  << "In function:  " << __PRETTY_FUNCTION__ << std::endl \
+                  << "In filename:  " << __FILE__ << " (line " << __LINE__ << ")" << std::endl << std::endl; \
+        assert(condition);                                              \
+    }
+
+#define DebugPrint1(arg1)                                               \
+    if(true)                                                            \
+    {                                                                   \
+        std::cerr << std::endl                                          \
+                  << #arg1 << " : " << (arg1) << std::endl              \
+                  << "In function:  " << __PRETTY_FUNCTION__ << std::endl \
+                  << "In filename:  " << __FILE__ << " (line " << __LINE__ << ")" << std::endl << std::endl; \
+    }
+
+#define DebugPrint2(arg1, arg2)                                         \
+    if(true)                                                            \
+    {                                                                   \
+        std::cerr << std::endl                                          \
+                  << #arg1 << " : " << (arg1) << std::endl              \
+                  << #arg2 << " : " << (arg2) << std::endl              \
+                  << "In function:  " << __PRETTY_FUNCTION__ << std::endl \
+                  << "In filename:  " << __FILE__ << " (line " << __LINE__ << ")" << std::endl << std::endl; \
+    }
+
+#define DebugPrint3(arg1, arg2, arg3)                                   \
+    if(true)                                                            \
+    {                                                                   \
+        std::cerr << std::endl                                          \
+                  << #arg1 << " : " << (arg1) << std::endl              \
+                  << #arg2 << " : " << (arg2) << std::endl              \
+                  << #arg3 << " : " << (arg3) << std::endl              \
+                  << "In function:  " << __PRETTY_FUNCTION__ << std::endl \
+                  << "In filename:  " << __FILE__ << " (line " << __LINE__ << ")" << std::endl << std::endl; \
+    }
+
+#else  // In non-debug mode, they are all defined as NO-OPs.
+
+#define DebugAssert(condition, msg)
+#define DebugAssert2(condition, arg1, arg2)
+#define DebugAssert3(condition, arg1, arg2, arg3)
+#define DebugAssert4(condition, arg1, arg2, arg3, arg4)
+#define DebugPrint1(arg1)
+#define DebugPrint2(arg1, arg2)
+#define DebugPrint3(arg1, arg2, arg3)
+
+#endif // NDEBUG
+
+//------------------------------------------------------------------------------------
+
+#endif // DYNATRACER_H
+
+//____________________________________________________________________________________
diff --git a/src/control/errhandling.cpp b/src/control/errhandling.cpp
new file mode 100644
index 0000000..e2b397d
--- /dev/null
+++ b/src/control/errhandling.cpp
@@ -0,0 +1,290 @@
+// $Id: errhandling.cpp,v 1.6 2012/06/30 01:32:39 bobgian Exp $
+
+/*
+  Copyright 2002 Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include "errhandling.h"
+
+//------------------------------------------------------------------------------------
+// Data reading exceptions
+//------------------------------------------------------------------------------------
+
+data_error::data_error(const std::string& wh): _what (wh) { };
+data_error::~data_error() throw() {};
+
+const char*
+data_error::what () const throw() { return _what.c_str (); };
+
+const std::string&
+data_error::whatString() const { return _what;};
+
+//------------------------------------------------------------------------------------
+
+file_error::file_error(const std::string& wh) : data_error(wh) { };
+file_error::~file_error() throw() {};
+
+//------------------------------------------------------------------------------------
+
+incorrect_data::incorrect_data(const std::string& wh) : data_error(wh) { };
+incorrect_data::~incorrect_data() throw() {};
+
+//------------------------------------------------------------------------------------
+
+incorrect_xml::incorrect_xml(const std::string& wh) : data_error(wh) { };
+incorrect_xml::~incorrect_xml() throw() {};
+
+incorrect_xml_extra_tag::incorrect_xml_extra_tag(const std::string& wh, const std::string& tag) : incorrect_xml(wh), _tag(tag) {};
+incorrect_xml_extra_tag::~incorrect_xml_extra_tag() throw() {};
+const std::string& incorrect_xml_extra_tag::tag() {return _tag;};
+
+incorrect_xml_missing_tag::incorrect_xml_missing_tag(const std::string& wh, const std::string& tag) : incorrect_xml(wh), _tag(tag) {};
+incorrect_xml_missing_tag::~incorrect_xml_missing_tag() throw() {};
+const std::string& incorrect_xml_missing_tag::tag() {return _tag;};
+
+incorrect_xml_not_double::incorrect_xml_not_double(const std::string& wh, const std::string& text) : incorrect_xml(wh), _text(text) {};
+incorrect_xml_not_double::~incorrect_xml_not_double() throw() {};
+const std::string& incorrect_xml_not_double::text() {return _text;};
+
+incorrect_xml_not_long::incorrect_xml_not_long(const std::string& wh, const std::string& text) : incorrect_xml(wh), _text(text) {};
+incorrect_xml_not_long::~incorrect_xml_not_long() throw() {};
+const std::string& incorrect_xml_not_long::text() {return _text;};
+
+incorrect_xml_not_size_t::incorrect_xml_not_size_t(const std::string& wh, const std::string& text) : incorrect_xml(wh), _text(text) {};
+incorrect_xml_not_size_t::~incorrect_xml_not_size_t() throw() {};
+const std::string& incorrect_xml_not_size_t::text() {return _text;};
+
+//------------------------------------------------------------------------------------
+
+invalid_sequence::invalid_sequence(const std::string& wh) : data_error(wh) { };
+invalid_sequence::~invalid_sequence() throw() {};
+
+//------------------------------------------------------------------------------------
+
+unrecognized_tag_error::unrecognized_tag_error(const std::string& wh, int where) : data_error(wh), m_where(where) { };
+unrecognized_tag_error::~unrecognized_tag_error() throw() {};
+int
+unrecognized_tag_error::where() const {return m_where;};
+
+#if 0
+
+//------------------------------------------------------------------------------------
+// Reject newly generated tree events
+//------------------------------------------------------------------------------------
+
+enum reject_type{TINYPOP, OVERRUN, ZERODL, STRETCHED};
+
+class rejecttree_error : public std::exception
+{
+  private:
+    std::string _what;
+  public:
+    virtual reject_type GetType() const = 0;
+    rejecttree_error(const std::string& wh) : _what(wh) {};
+    virtual ~rejecttree_error() throw() {};
+    virtual const char* what () const throw() { return _what.c_str (); };
+};
+
+// Reject for improbable population size
+
+class tinypopulation_error : public rejecttree_error
+{
+  public:
+    tinypopulation_error(const std::string& wh) : rejecttree_error(wh) {};
+    virtual ~tinypopulation_error() throw() {};
+    virtual reject_type GetType() const {return TINYPOP;};
+};
+
+// Reject for too-many-events (the tree has gotten bloated)
+
+class overrun_error : public rejecttree_error
+{
+  public:
+    overrun_error(const std::string& wh) : rejecttree_error(wh) { };
+    virtual ~overrun_error() throw() {};
+    virtual reject_type GetType() const {return OVERRUN;};
+};
+
+// Reject for zero data likelihood (the tree doesn't fit the data at all)
+class zero_dl_error : public rejecttree_error
+{
+  public:
+    zero_dl_error(const std::string& wh) : rejecttree_error(wh) {};
+    virtual ~zero_dl_error() throw() {};
+    virtual reject_type GetType() const {return ZERODL;};
+};
+
+// Reject for improbably long branch lengths  (the tree has gotten
+// stretched)
+class stretched_error : public rejecttree_error
+{
+  public:
+    stretched_error(const std::string& wh) : rejecttree_error(wh) { };
+    virtual ~stretched_error() throw() {};
+    virtual reject_type GetType() const {return STRETCHED;};
+};
+
+//------------------------------------------------------------------------------------
+
+//LS NOTE:  coal_overrun no longer used--we'll catch this error in
+// rec_overrun instead, and the user can't set max coal events.
+
+//------------------------------------------------------------------------------------
+
+class rec_overrun : public overrun_error
+{
+  public:
+    rec_overrun(const std::string& wh = "Too many recombinations") :
+        overrun_error(wh) { };
+    virtual ~rec_overrun() throw() {};
+};
+
+//------------------------------------------------------------------------------------
+
+class mig_overrun : public overrun_error
+{
+  public:
+    mig_overrun(const std::string& wh = "Too many migrations") :
+        overrun_error(wh) { };
+    virtual ~mig_overrun() throw() {};
+};
+
+//------------------------------------------------------------------------------------
+
+class dis_overrun : public overrun_error
+{
+  public:
+    dis_overrun(const std::string& wh = "Too many disease mutations") :
+        overrun_error(wh) { };
+    virtual ~dis_overrun() throw() {};
+};
+
+//------------------------------------------------------------------------------------
+// The denovo tree cannot be constructed (probably due to deeply
+// unreasonable parameter values)
+//------------------------------------------------------------------------------------
+
+class denovo_failure : public overrun_error
+{
+  private:
+    std::string _what;
+  public:
+    denovo_failure(const std::string& wh = "Can't generate denovo tree") :
+        overrun_error(wh) { };
+    virtual ~denovo_failure() throw() {};
+};
+
+//------------------------------------------------------------------------------------
+// Errors that should never happen
+//------------------------------------------------------------------------------------
+
+class impossible_error : public std::exception
+{
+  private:
+    std::string _what;
+  public:
+    impossible_error(const std::string& wh): _what ("impossible error:"+wh) { };
+    virtual ~impossible_error() throw() {};
+    virtual const char* what () const throw() { return _what.c_str (); };
+};
+
+//------------------------------------------------------------------------------------
+// Errors indicating code that needs to be written
+//------------------------------------------------------------------------------------
+
+class implementation_error : public std::exception
+{
+  private:
+    std::string _what;
+  public:
+    implementation_error(const std::string& wh): _what ("implementation error:"+wh) { };
+    virtual ~implementation_error() throw() {};
+    virtual const char* what () const throw() { return _what.c_str (); };
+};
+
+//------------------------------------------------------------------------------------
+// Errors showing that deprecation is not complete
+//------------------------------------------------------------------------------------
+
+class deprecation_error : public std::exception
+{
+  private:
+    std::string _what;
+  public:
+    deprecation_error(const std::string& wh): _what (wh) { };
+    virtual ~deprecation_error() throw() {};
+    virtual const char* what () const throw() { return _what.c_str (); };
+};
+
+//------------------------------------------------------------------------------------
+// Errors showing that a dynamic allocation request was not successful
+//------------------------------------------------------------------------------------
+
+class alloc_error : public std::exception
+{
+  private:
+    std::string _what;
+  public:
+    alloc_error(const std::string& wh) : _what(wh) { };
+    virtual ~alloc_error() throw() {};
+    virtual const char* what () const throw() { return _what.c_str (); };
+    virtual const std::string& whatString() { return _what;};
+};
+
+//------------------------------------------------------------------------------------
+// Errors that indicate a probable failure in data-likelihood calculation
+//------------------------------------------------------------------------------------
+
+class datalike_error : public std::exception
+{
+  private:
+    std::string _what;
+  public:
+    datalike_error(const std::string& wh) : _what(wh) { };
+    virtual ~datalike_error() throw() {};
+    virtual const char* what () const throw() { return _what.c_str (); };
+    virtual const std::string& whatString() { return _what;};
+};
+
+//------------------------------------------------------------------------------------
+// Use this datalike_error when the failure can be fixed by turning on
+// normalization.
+//------------------------------------------------------------------------------------
+
+class datalikenorm_error : public datalike_error
+{
+  public:
+    datalikenorm_error(const std::string& wh) : datalike_error(wh) { };
+    virtual ~datalikenorm_error() throw() {};
+    virtual const char* what () const throw() { return datalike_error::what(); };
+    virtual const std::string& whatString() { return datalike_error::whatString(); };
+};
+
+//------------------------------------------------------------------------------------
+
+class insufficient_variability_over_regions_error : public std::exception
+{
+  private:
+    //std::string _what;
+    double m_oldAlpha;
+    double m_newAlpha;
+  public:
+    //insufficient_variability_over_regions_error(const std::string& wh): _what ("error:"+wh) { };
+    insufficient_variability_over_regions_error(const double& oldAlpha, const double& newAlpha):
+        m_oldAlpha(oldAlpha),m_newAlpha(newAlpha) {};
+    virtual ~insufficient_variability_over_regions_error() throw() {};
+    double GetOldAlpha() const { return m_oldAlpha; };
+    double GetNewAlpha() const { return m_newAlpha; };
+    //virtual const char* what () const throw() { return _what.c_str (); };
+};
+
+#endif
+
+tixml_error::tixml_error(const std::string& wh) : data_error(wh) { };
+tixml_error::~tixml_error() throw() {};
+
+//____________________________________________________________________________________
diff --git a/src/control/errhandling.h b/src/control/errhandling.h
new file mode 100644
index 0000000..37f53dc
--- /dev/null
+++ b/src/control/errhandling.h
@@ -0,0 +1,390 @@
+// $Id: errhandling.h,v 1.24 2011/03/07 06:08:47 bobgian Exp $
+
+/*
+  Copyright 2002 Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+/**************************************************************
+ This file contains all exception classes defined for this
+ program.  For all of these exceptions, the public function
+ what() can be called to get a string describing the error.
+
+ Please remember to catch exceptions by reference, not by value,
+ lest you slice them.
+
+ Mary Kuhner      March 18, 2001
+**************************************************************/
+
+#ifndef ERRHANDLING_H
+#define ERRHANDLING_H
+
+#include <stdexcept>
+#include <string>
+
+//------------------------------------------------------------------------------------
+// Data reading exceptions
+//------------------------------------------------------------------------------------
+
+class data_error : public std::exception
+{
+  private:
+    std::string _what;
+
+  public:
+    data_error(const std::string& wh);
+    virtual ~data_error() throw();
+    virtual const char* what () const throw();
+    virtual const std::string& whatString() const;
+};
+
+//------------------------------------------------------------------------------------
+
+class file_error : public data_error
+{
+    // failure to read a necessary file
+  public:
+    file_error(const std::string& wh);
+    virtual ~file_error() throw();
+};
+
+//------------------------------------------------------------------------------------
+
+class incorrect_data : public data_error
+{
+    // XML did not contain required information
+  public:
+    incorrect_data(const std::string& wh);
+    virtual ~incorrect_data() throw();
+};
+
+//------------------------------------------------------------------------------------
+
+class incorrect_xml : public data_error
+{
+    // XML was syntactically incorrect
+  public:
+    incorrect_xml(const std::string& wh);
+    virtual ~incorrect_xml() throw();
+};
+
+class incorrect_xml_extra_tag : public incorrect_xml
+{
+    std::string _tag;
+
+  public:
+    incorrect_xml_extra_tag(const std::string& wh,const std::string& text);
+    virtual ~incorrect_xml_extra_tag() throw();
+    const std::string& tag();
+};
+
+class incorrect_xml_missing_tag : public incorrect_xml
+{
+    std::string _tag;
+
+  public:
+    incorrect_xml_missing_tag(const std::string& wh,const std::string& text);
+    virtual ~incorrect_xml_missing_tag() throw();
+    const std::string& tag();
+};
+
+class incorrect_xml_not_double : public incorrect_xml
+{
+    std::string _text;
+
+  public:
+    incorrect_xml_not_double(const std::string& wh,const std::string& text);
+    virtual ~incorrect_xml_not_double() throw();
+    const std::string& text();
+};
+
+class incorrect_xml_not_long : public incorrect_xml
+{
+    std::string _text;
+
+  public:
+    incorrect_xml_not_long(const std::string& wh,const std::string& text);
+    virtual ~incorrect_xml_not_long() throw();
+    const std::string& text();
+};
+
+class incorrect_xml_not_size_t : public incorrect_xml
+{
+    std::string _text;
+
+  public:
+    incorrect_xml_not_size_t(const std::string& wh,const std::string& text);
+    virtual ~incorrect_xml_not_size_t() throw();
+    const std::string& text();
+};
+
+//------------------------------------------------------------------------------------
+
+class invalid_sequence : public data_error
+{
+    // invalid genetic data
+  public:
+    invalid_sequence(const std::string& wh);
+    virtual ~invalid_sequence() throw();
+};
+
+class unrecognized_tag_error : public data_error
+{
+    // tag not in schema
+  private:
+    int     m_where;
+
+  public:
+    unrecognized_tag_error(const std::string& wh, int where);
+    virtual ~unrecognized_tag_error() throw();
+    int where() const;
+};
+
+//------------------------------------------------------------------------------------
+// Reject newly generated tree events
+//------------------------------------------------------------------------------------
+
+enum reject_type{TINYPOP, OVERRUN, ZERODL, STRETCHED, EPOCHBOUNDARYCROSS};
+
+class rejecttree_error : public std::exception
+{
+  private:
+    std::string _what;
+
+  public:
+    virtual reject_type GetType() const = 0;
+    rejecttree_error(const std::string& wh) : _what(wh) {};
+    virtual ~rejecttree_error() throw() {};
+    virtual const char* what () const throw() { return _what.c_str (); };
+};
+
+// Reject for improbable population size
+class tinypopulation_error : public rejecttree_error
+{
+  public:
+    tinypopulation_error(const std::string& wh) : rejecttree_error(wh) {};
+    virtual ~tinypopulation_error() throw() {};
+    virtual reject_type GetType() const {return TINYPOP;};
+};
+
+// Reject for too-many-events (the tree has gotten bloated)
+class overrun_error : public rejecttree_error
+{
+  public:
+    overrun_error(const std::string& wh) : rejecttree_error(wh) { };
+    virtual ~overrun_error() throw() {};
+    virtual reject_type GetType() const {return OVERRUN;};
+};
+
+// Reject for zero data likelihood (the tree doesn't fit the data at all)
+class zero_dl_error : public rejecttree_error
+{
+  public:
+    zero_dl_error(const std::string& wh) : rejecttree_error(wh) {};
+    virtual ~zero_dl_error() throw() {};
+    virtual reject_type GetType() const {return ZERODL;};
+};
+
+// Reject for improbably long branch lengths  (the tree has gotten stretched)
+class stretched_error : public rejecttree_error
+{
+  public:
+    stretched_error(const std::string& wh) : rejecttree_error(wh) { };
+    virtual ~stretched_error() throw() {};
+    virtual reject_type GetType() const {return STRETCHED;};
+};
+
+// Reject for improper crossing of epoch boundaries
+class epoch_error : public rejecttree_error
+{
+  public:
+    epoch_error(const std::string& wh) : rejecttree_error(wh) {};
+    virtual ~epoch_error() throw() {};
+    virtual reject_type GetType() const {return EPOCHBOUNDARYCROSS;};
+};
+
+//------------------------------------------------------------------------------------
+
+//LS NOTE:  coal_overrun no longer used--we'll catch this error in
+// rec_overrun instead, and the user can't set max coal events.
+
+//------------------------------------------------------------------------------------
+
+class rec_overrun : public overrun_error
+{
+  public:
+    rec_overrun(const std::string& wh = "Too many recombinations") :
+        overrun_error(wh) { };
+    virtual ~rec_overrun() throw() {};
+};
+
+//------------------------------------------------------------------------------------
+
+class mig_overrun : public overrun_error
+{
+  public:
+    mig_overrun(const std::string& wh = "Too many migrations") :
+        overrun_error(wh) { };
+    virtual ~mig_overrun() throw() {};
+};
+
+//------------------------------------------------------------------------------------
+
+class dis_overrun : public overrun_error
+{
+  public:
+    dis_overrun(const std::string& wh = "Too many disease mutations") :
+        overrun_error(wh) { };
+    virtual ~dis_overrun() throw() {};
+};
+
+//------------------------------------------------------------------------------------
+
+class epoch_overrun : public overrun_error
+{
+  public:
+    epoch_overrun(const std::string& wh = "Too many epochs") :
+        overrun_error(wh) { };
+    virtual ~epoch_overrun() throw() {};
+};
+
+//------------------------------------------------------------------------------------
+// The denovo tree cannot be constructed (probably due to deeply
+// unreasonable parameter values)
+//------------------------------------------------------------------------------------
+
+class denovo_failure : public overrun_error
+{
+  private:
+    std::string _what;
+
+  public:
+    denovo_failure(const std::string& wh = "Can't generate denovo tree") :
+        overrun_error(wh) { };
+    virtual ~denovo_failure() throw() {};
+};
+
+//------------------------------------------------------------------------------------
+// Errors that should never happen
+//------------------------------------------------------------------------------------
+
+class impossible_error : public std::exception
+{
+  private:
+    std::string _what;
+
+  public:
+    impossible_error(const std::string& wh): _what ("impossible error:"+wh) { };
+    virtual ~impossible_error() throw() {};
+    virtual const char* what () const throw() { return _what.c_str (); };
+};
+
+//------------------------------------------------------------------------------------
+// Errors indicating code that needs to be written
+//------------------------------------------------------------------------------------
+
+class implementation_error : public std::exception
+{
+  private:
+    std::string _what;
+
+  public:
+    implementation_error(const std::string& wh): _what ("implementation error:"+wh) { };
+    virtual ~implementation_error() throw() {};
+    virtual const char* what () const throw() { return _what.c_str (); };
+};
+
+//------------------------------------------------------------------------------------
+// Errors showing that deprecation is not complete
+//------------------------------------------------------------------------------------
+
+class deprecation_error : public std::exception
+{
+  private:
+    std::string _what;
+
+  public:
+    deprecation_error(const std::string& wh): _what (wh) { };
+    virtual ~deprecation_error() throw() {};
+    virtual const char* what () const throw() { return _what.c_str (); };
+};
+
+//------------------------------------------------------------------------------------
+// Errors showing that a dynamic allocation request was not successful
+//------------------------------------------------------------------------------------
+
+class alloc_error : public std::exception
+{
+  private:
+    std::string _what;
+
+  public:
+    alloc_error(const std::string& wh) : _what(wh) { };
+    virtual ~alloc_error() throw() {};
+    virtual const char* what () const throw() { return _what.c_str (); };
+    virtual const std::string& whatString() { return _what;};
+};
+
+//------------------------------------------------------------------------------------
+// Errors that indicate a probable failure in data-likelihood calculation
+//------------------------------------------------------------------------------------
+
+class datalike_error : public std::exception
+{
+  private:
+    std::string _what;
+
+  public:
+    datalike_error(const std::string& wh) : _what(wh) { };
+    virtual ~datalike_error() throw() {};
+    virtual const char* what () const throw() { return _what.c_str (); };
+    virtual const std::string& whatString() { return _what;};
+};
+
+//------------------------------------------------------------------------------------
+// Use this datalike_error when the failure can be fixed by turning on
+// normalization.
+//------------------------------------------------------------------------------------
+
+class datalikenorm_error : public datalike_error
+{
+  public:
+    datalikenorm_error(const std::string& wh) : datalike_error(wh) { };
+    virtual ~datalikenorm_error() throw() {};
+    virtual const char* what () const throw() { return datalike_error::what(); };
+    virtual const std::string& whatString() { return datalike_error::whatString(); };
+};
+
+//------------------------------------------------------------------------------------
+
+class insufficient_variability_over_regions_error : public std::exception
+{
+  private:
+    //std::string _what;
+    double m_oldAlpha;
+    double m_newAlpha;
+
+  public:
+    //insufficient_variability_over_regions_error(const std::string& wh): _what ("error:"+wh) { };
+    insufficient_variability_over_regions_error(const double& oldAlpha, const double& newAlpha):
+        m_oldAlpha(oldAlpha),m_newAlpha(newAlpha) {};
+    virtual ~insufficient_variability_over_regions_error() throw() {};
+    double GetOldAlpha() const { return m_oldAlpha; };
+    double GetNewAlpha() const { return m_newAlpha; };
+    //virtual const char* what () const throw() { return _what.c_str (); };
+};
+
+class tixml_error : public data_error
+{
+    // complaint from tixml parser
+  public:
+    tixml_error(const std::string& wh);
+    virtual ~tixml_error() throw();
+};
+
+#endif // ERRHANDLING_H
+
+//____________________________________________________________________________________
diff --git a/src/control/lamarc.cpp b/src/control/lamarc.cpp
new file mode 100644
index 0000000..db077a5
--- /dev/null
+++ b/src/control/lamarc.cpp
@@ -0,0 +1,815 @@
+// $Id: lamarc.cpp,v 1.163 2013/11/07 22:56:31 jyamato Exp $
+
+/*
+ * Copyright 2002 Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+ *
+ * This software is distributed free of charge for non-commercial use
+ * and is copyrighted.  Of course, we do not guarantee that the software
+ * works, and are not responsible for any damage you may cause or have.
+ */
+
+#include <cassert>
+#include <iostream>
+#include <string>
+#include <vector>
+
+#if defined(LAMARC_COMPILE_LINUX) || defined(LAMARC_COMPILE_MACOSX)
+#include <setjmp.h>
+#include <signal.h>
+#endif
+
+#include "local_build.h"
+#include "dynatracer.h"
+
+#include "analyzer.h"
+#include "arranger.h"
+#include "bayesanalyzer_1d.h"
+#include "chainmanager.h"
+#include "chainout.h"
+#include "chainparam.h"
+#include "constants.h"
+#include "curvefiles.h"
+#include "datafilenamedialog.h"
+#include "datapack.h"
+#include "datatype.h"
+#include "dialog.h"
+#include "display.h"
+#include "dlmodel.h"
+#ifdef LAMARC_QA_SINGLE_DENOVOS
+#include "force.h"
+#endif //  LAMARC_QA_SINGLE_DENOVOS
+#include "forcesummary.h"
+#include "front_end_warnings.h"
+#include "lamarc.h"
+#include "lamarcheaderdialog.h"
+#include "lamarcmenu.h"
+#include "likelihood.h"
+#include "maximizer.h"
+#include "newmenuitems.h"
+#include "nomenufilereaddialog.h"
+#include "outputfile.h"
+#include "parameter.h"
+#include "parsetreeschema.h"
+#include "parsetreetodata.h"
+#include "parsetreetosettings.h"
+#include "plotstat.h"
+#include "region.h"
+#include "registry.h"
+#include "runreport.h"
+#include "spreadsheet.h"
+#include "stringx.h"
+#include "treesum.h"
+#include "ui_interface.h"
+#include "ui_vars.h"
+#include "userparam.h"
+#include "xml.h"
+#include "xml_report.h"
+#include "xml_strings.h"
+
+using namespace std;
+
+//------------------------------------------------------------------------------------
+// These "globals" (actually class statics) are relevant ONLY when DYNAMETER_LEVEL is defined,
+// but when so, they must be defined before any other globals.
+
+#ifdef DYNAMETER_LEVEL
+
+// Pointer to current "toplevel" LocalDynameter object.
+LocalDynameter * LocalDynameter::s_currentMeterPtr(NULL);
+
+// Set at first call and used as zero-point of elapsed time for run.
+unsigned long long int LocalDynameter::s_startClock(rdtsc());
+
+// Set at first call and updated at end of each call, irrespective of trace level at call.
+unsigned long long int LocalDynameter::s_lastClock(s_startClock);
+
+// Accumulates maximum "Clocks since last trace before current call" time, over all calls to all functions.
+unsigned long long int LocalDynameter::s_maxSinceLastBeforeCall(0ULL);
+
+// Total number of function calls traced so far (all functions).
+unsigned long long int LocalDynameter::s_totalNumberOfCalls(0ULL);
+
+// Trace output file - opened in by LocalDynameter toplevel constructor and closed by toplevel destructor.
+ofstream LocalDynameter::s_traceOut;
+
+// Array of pointers to GlobalDynameter objects holding data globally (updated by dynamic LocalDynameter objects).
+// All slots are explicitly set to NULL by toplevel call to StartDynameter().
+GlobalDynameter * LocalDynameter::s_GlobalDynameterArray[DYNAMETER_ARRAYSIZE];
+
+// Number of GlobalDynameter objects populating above array - ie, number of functions traced so far this run.
+// This is used as an ordinal count for GlobalDynameter objects (gives order of each function's first call).
+unsigned int LocalDynameter::s_GlobalDynameterCount(0u);
+
+#if (DYNAMETER_LEVEL >= 3u)
+
+// User-settable limit to depth of tracing based on number of calls to same function.
+unsigned int LocalDynameter::s_traceCountLimit(0u);
+
+// Flag/counter to control whether (and how) ellipsis indication is printed when dynamic tracing is suppressed.
+unsigned int LocalDynameter::s_ellipsisCounter(0u);
+
+// Count of total number of elided tracing printouts.
+unsigned int LocalDynameter::s_printoutsElided(0u);
+
+#endif // (DYNAMETER_LEVEL >= 3u)
+
+// If using Dynameter, the first call must come before any other global data objects are allocated.
+StartDynameter();
+
+#endif // ifdef DYNAMETER_LEVEL
+
+//------------------------------------------------------------------------------------
+
+class TiXmlElement;
+
+//------------------------------------------------------------------------------------
+// The Registry singleton is global because everyone in the world needs to use it.
+
+Registry registry;
+
+#if defined(LAMARC_COMPILE_LINUX) || defined(LAMARC_COMPILE_MACOSX)
+extern jmp_buf prewrite;
+// We attempt to deal with cases of a too-big output file, but
+// we don't know how to make this work on Windows yet!
+#endif
+
+//------------------------------------------------------------------------------------
+// This routine controls the menu, if any, and other startup issues.
+
+void ParseOptions(long int argc, char** argv, bool* isBatch, bool* explicitExit, bool* hasFile, string & fileName)
+{
+    *isBatch=false;
+    *explicitExit=false;
+    *hasFile=false;
+    for (int arg = 1; arg < argc; arg++)
+    {
+        string option(argv[arg]);
+        if (option == "-b" || option == "--batch")
+        {
+            *isBatch = true;
+        }
+        else if (option == "-x" || option == "--explicit-exit")
+        {
+            *explicitExit = true;
+        }
+        else if (option == "-d" || option == "--denovo-sim-count")
+        {
+#ifdef LAMARC_QA_SINGLE_DENOVOS
+            arg++;
+            string numSimString = argv[arg];
+            long numSim = static_cast<long>(atoi(numSimString.c_str()));
+            registry.SetDenovoCount(numSim);
+#else
+            cerr << "Command line option \"-d\" only applies to LAMARC_QA_SINGLE_DENOVOS test. ";
+            cerr << "Exiting." << endl;
+            exit(lamarccodes::optionError);
+#endif // LAMARC_QA_SINGLE_DENOVOS
+        }
+        else if (argv[arg][0] == '-')
+        {
+            // this is an error -- no such option
+            cerr << "Unknown command line option \"" + option + "\"" << endl;
+            cerr << "Exiting." << endl;
+            exit(lamarccodes::optionError);
+        }
+        else if (*hasFile)
+        {
+            // this is an error -- don't know how to read two files
+            cerr << "Cannot process argument \"" + option + "\"" << endl;
+            cerr << "Already read \"" + fileName + "\" as file argument" << endl;
+            cerr << "Exiting." << endl;
+            exit(lamarccodes::optionError);
+        }
+        else
+        {
+            *hasFile = true;
+            fileName = option;
+        }
+    }
+}
+
+//------------------------------------------------------------------------------------
+// This routine controls the menu, if any, and other startup issues.
+
+bool DoUserInput(bool batchmode, bool infileprovided, string infilename)
+{
+    // DEFAULTS and SETUP
+    // DataPack & datapack = registry.GetDataPack();
+    BranchTag::BeginBranchIDs(defaults::numSemiUniqueBranches);
+
+    // Menu Subsystem
+
+    ScrollingDisplay display;
+    LamarcHeaderDialog header;
+    header.InvokeMe(display);
+
+    LamarcSchema        schema;
+    FrontEndWarnings    warnings;
+    XmlParser           parser(schema,warnings);
+
+    if (!infileprovided)
+    {
+        if (batchmode)
+        {
+            // advises user that inputFileName will be read
+            NoMenuFileReadDialog noMenuFileReadDialog(parser);
+            noMenuFileReadDialog.InvokeMe(display);
+        }
+        else
+        {
+            // allows user to change inputFileName
+            DataFileNameDialog dataDialog(parser);
+            dataDialog.InvokeMe(display);
+        }
+    }
+    else
+    {
+        parser.ParseFileData(infilename);
+    }
+
+    // print out any warnings that occured during parsing
+    display.Warn(warnings.GetAndClearWarnings());
+
+    // read the datapack portion of the input file
+    ParseTreeToData dataParser(parser,registry.GetDataPack());
+    dataParser.ProcessFileData();
+
+    // read the settings portion of the input file
+    UIInterface uiInterface(warnings,registry.GetDataPack(),parser.GetFileName());
+    uiInterface.SetUndoRedoMode(undoRedoMode_FILE);
+    ParseTreeToSettings settingsParser(parser,uiInterface);
+    bool runProgram = true;
+
+    // If we're in batch mode, default to no output (verbosity="none").  However,
+    //  we allow this to be overwritten by the input file.
+    if (batchmode)
+    {
+        uiInterface.GetCurrentVars().userparams.SetProgress(NONE);
+        if (registry.GetARGfound())
+        {
+            if (!dataParser.CheckARGtree(uiInterface.doGetUIIdVec1d(uistr::validForces), batchmode))
+            {
+                runProgram = false;
+            }
+        }
+    }
+
+    settingsParser.ProcessFileSettings();
+
+#ifndef LAMARC_QA_SINGLE_DENOVOS
+    if (!batchmode)
+    {
+        // main menu: all menus and displays, setting of parameters
+        uiInterface.SetUndoRedoMode(undoRedoMode_USER);
+        LamarcMainMenu mainmenu(uiInterface);
+        bool stayInLoop = true;
+        while (stayInLoop)
+        {
+            menu_return_type menuCmd = mainmenu.InvokeMe(display);
+            switch(menuCmd)
+            {
+                case menu_RUN:
+                    uiInterface.AddWarning("(Already at top-level menu.  Type '.' to run LAMARC.)");
+                    stayInLoop = false;
+                    // if there were any ARG trees in the imput file, make sure they are still valid
+                    if (registry.GetARGfound())
+                    {
+                        // make sure ARG trees are still valid
+                        if (!dataParser.CheckARGtree(uiInterface.doGetUIIdVec1d(uistr::validForces), batchmode))
+                        {
+                            stayInLoop = true;
+                        }
+                    }
+                    break;
+                case menu_QUIT:
+                    stayInLoop = false;
+                    runProgram = false;
+                    break;
+                default:
+                    break;
+            }
+        }
+    }
+#endif // LAMARC_QA_SINGLE_DENOVOS
+
+    FinishRegistry(uiInterface);
+
+    // read in the ARG trees and link them up
+    if (registry.GetARGfound())
+    {
+        if (!dataParser.DoARGtree())
+        {
+            runProgram = false;
+        }
+    }
+
+    return runProgram;
+
+} // DoUserInput
+
+//------------------------------------------------------------------------------------
+
+//***********************************************************
+// This routine constructs and registers objects which
+// require user-generated information to create:
+//    the proto-tree
+//    the runtime reporter
+//    the maximizer and its associated likelihoods
+//************************************************************
+
+void FinishRegistry(UIInterface & uiInterface)
+{
+#ifdef LAMARC_QA_SINGLE_DENOVOS
+
+    ////////////////////////////////////////////////////
+    // Check that we've only got one region and one replicate.
+    long numReg = uiInterface.GetCurrentVars().datapackplus.GetNumRegions();
+    long numRep = uiInterface.doGetLong(uistr::replicates);
+    if(numReg != 1)
+    {
+        cerr << "Cannot do a denovo QA test run with other than one region. Exiting";
+        exit(lamarccodes::unknownError);
+    }
+    if(numRep != 1)
+    {
+        cerr << "Cannot do a denovo QA test run with other than one replicate. Exiting";
+        exit(lamarccodes::unknownError);
+    }
+
+    // set user parameters the way we know they need to be
+    // for doing the denovo generation test
+
+    // turn off bayesian -- we want likelihood so we can
+    // get a single-tree profile out
+    uiInterface.doSet(uistr::bayesian,"false",NO_ID());
+
+    // this probably isn't necessary, as we do one rearrangement
+    // but always reject
+    uiInterface.doSet(uistr::dropArranger,"1",NO_ID());
+    uiInterface.doSet(uistr::hapArranger,"0",NO_ID());
+    uiInterface.doSet(uistr::sizeArranger,"0",NO_ID());
+
+    // turn off heating
+    uiInterface.doSet(uistr::heatedChainCount,"1",NO_ID());
+
+    // adjust chain parameters
+    uiInterface.doSet(uistr::initialChains,"0",NO_ID());
+    string countStr = ToString(registry.GetDenovoCount());
+    uiInterface.doSet(uistr::finalChains,countStr,NO_ID());
+    uiInterface.doSet(uistr::finalInterval,"1",NO_ID());
+    uiInterface.doSet(uistr::finalDiscard,"0",NO_ID());
+    uiInterface.doSet(uistr::finalSamples,"1",NO_ID());
+
+    // turn off profiling
+    uiInterface.doSet(uistr::allProfilesOff,"",NO_ID());
+
+    // turn off verbose output
+    uiInterface.doSet(uistr::progress,"none",NO_ID());
+    uiInterface.doSet(uistr::verbosity,"none",NO_ID());
+
+#endif // LAMARC_QA_SINGLE_DENOVOS
+
+    // build structures from uiInterface data -- ORDER MATTERS HERE
+    registry.InstallUserParameters(uiInterface);
+    registry.InstallForcesAllOverThePlace(uiInterface);
+    registry.InstallChainParameters(uiInterface); //needs a ForceSummary, above
+    registry.InstallDataModels(uiInterface);
+    registry.FinalizeDataPack(uiInterface); //Needs installed datamodels
+
+    ForceSummary & forcesummary = registry.GetForceSummary();
+
+    Tree * prototree = forcesummary.CreateProtoTree();
+    registry.Register(prototree);
+
+    // create and register the runtime reporter
+    // MUST DO THIS BEFORE MAKING THE ANALYZER
+    verbosity_type progress = registry.GetUserParameters().GetProgress();
+
+    RunReport* prunreport = new RunReport(forcesummary, progress);
+
+    registry.Register(prunreport);
+
+    // create and register the posterior likelihood objects
+
+    // retrieve user-defined values from collection objects
+    // obsolete? StringVec1d forcestr = forcesummary.GetForceString();
+    long int nregs = registry.GetDataPack().GetNRegions();
+    long int nreps = registry.GetChainParameters().GetNReps();
+    long int paramsize = forcesummary.GetAllNParameters();
+
+    // the maximizer itself
+    Maximizer* pmax = new Maximizer(paramsize);
+    pmax->SetConstraints(forcesummary.GetIdenticalGroupedParams());
+    //The above could probably be moved into the maximizer constructor.
+    registry.Register(pmax);
+
+    const ParamVector params(true);
+    if (!params.NumVariableParameters() &&
+        registry.GetChainParameters().IsBayesian() &&
+        !registry.GetDataPack().AnyMapping())
+    {
+        string msg = "All parameters are fixed in value, which in a Bayesian run "
+            "means no estimation is possible.  This only makes sense if you are "
+            "mapping a trait.  Please check your xml input file.";
+        prunreport->ReportUrgent(msg);
+        exit(0);
+    }
+
+    // single likelihood
+    SinglePostLike* plsingle = new SinglePostLike(forcesummary, nregs, nreps, paramsize);
+    registry.Register(plsingle);
+    pmax->GradientGuideSetup(params, plsingle);
+
+    // replicate likelihood
+    ReplicatePostLike* plrep = new ReplicatePostLike(forcesummary, nregs, nreps, paramsize);
+    pmax->GradientGuideSetup(params, plrep);
+    registry.Register(plrep);
+
+    // region likelihood
+    // create the vector to handle parameter rescaling
+    DoubleVec2d paramscalars(registry.GetDataPack().CreateParamScalarVector());
+    RegionPostLike* plreg = new RegionPostLike(forcesummary, nregs, nreps, paramsize, paramscalars);
+    pmax->GradientGuideSetup(params, plreg);
+    registry.Register(plreg);
+
+    // multi-region likelihood
+    // with background mu rates varying over regions via a gamma distribution
+    GammaRegionPostLike* plgammareg = new GammaRegionPostLike(forcesummary, nregs, nreps, paramsize, paramscalars);
+    pmax->GradientGuideSetup(params, plgammareg);
+    registry.Register(plgammareg);
+
+    // Setup Analyzer subsystem {will do plots and profiles}
+    Analyzer *analyzer = new Analyzer(forcesummary, params, pmax);
+    registry.Register(analyzer);
+
+    // But we might be doing a Bayesian analysis, in which case we need a
+    // bayesian analyzer instead.  For now, just do a 1D analysis.
+
+    BayesAnalyzer_1D *bayesanalyzer = new BayesAnalyzer_1D();
+    registry.Register(bayesanalyzer);
+} // FinishRegistry
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+int main(int argc, char **argv)
+{
+    // We attempt to deal with cases of a too-big output file, but
+    // we don't know how to make this work on Windows yet!
+#if defined(LAMARC_COMPILE_LINUX) || defined(LAMARC_COMPILE_MACOSX)
+    if (! setjmp(prewrite))
+    {
+        signal (SIGXFSZ, & CatchFileTooBigSignal);
+    }
+    else
+    {
+        cerr << "Something went unexpectedly wrong with file writing. Exiting.";
+        exit(lamarccodes::unknownError);
+    }
+#endif
+
+    int return_code = lamarccodes::cleanReturn;
+    bool isbatch;
+    bool explicitExit;
+
+#ifdef STATIONARIES
+    // clear out files used for stationaries record-keeping
+    ofstream of;
+    of.open(INTERVALFILE.c_str(),ios::trunc);
+    of.close();
+    of.open(MIGFILE.c_str(),ios::trunc);
+    of.close();
+    of.open(DISFILE.c_str(),ios::trunc);
+    of.close();
+    of.open(RECFILE.c_str(),ios::trunc);
+    of.close();
+#endif // STATIONARIES
+
+#ifdef LAMARC_QA_SINGLE_DENOVOS
+    // empty out file we'll write data to
+    ofstream of_denovo;
+    ofstream of_denovo_info;
+
+    of_denovo.open(SINGLE_DENOVO_FILE.c_str(),ios::trunc);
+    of_denovo.close();
+    of_denovo_info.open(SINGLE_DENOVO_INFO.c_str(),ios::trunc);
+    of_denovo_info.close();
+#endif // LAMARC_QA_SINGLE_DENOVOS
+
+    try
+    {
+        // handle user input including menu and data file
+        bool hasfile;
+        string filename;
+        ParseOptions(argc, argv, & isbatch, &explicitExit, & hasfile, filename);
+
+        bool runProgram = DoUserInput(isbatch,hasfile,filename);
+
+#ifdef LAMARC_QA_SINGLE_DENOVOS
+        // create header for denovo_params.txt
+        // we do it after user input because we need to know
+        // which params are valid
+        //
+        // also show only the simulation parameters for the info file
+        of_denovo.open(SINGLE_DENOVO_FILE.c_str(),ios::out | ios::app);
+        of_denovo_info.open(SINGLE_DENOVO_INFO.c_str(),ios::trunc);
+
+        const ParamVector pvec(true);
+        const ForceParameters & fp = registry.GetForceSummary().GetStartParameters();
+        DoubleVec1d sv = fp.GetGlobalParameters();
+
+        StringVec1d names;
+        DoubleVec1d startValues;
+
+        ParamVector::const_iterator pi;
+        size_t startI;
+
+        for(pi = pvec.begin(), startI=0; pi != pvec.end(), startI < sv.size(); pi++,startI++)
+        {
+            if(pi->IsValidParameter())
+            {
+                names.push_back(pi->GetShortName());
+                startValues.push_back(sv[startI]);
+            }
+        }
+
+        assert(names.size() == startValues.size());
+
+        for(startI=0; startI < names.size(); startI++)
+        {
+            if(startI != 0)
+            {
+                of_denovo << "\t";
+                of_denovo_info << "\t";
+            }
+            of_denovo << names[startI];
+            of_denovo_info << names[startI];
+        }
+        // carriage return for info file only, still more in denovo_params.txt
+        of_denovo_info << endl;
+
+        ////////////////////////////////////////////////////////////////
+        // these are for the denovo_params file only
+        //
+
+        if(registry.GetForceSummary().CheckForce(force_MIG) ||
+           registry.GetForceSummary().CheckForce(force_DIVMIG))
+        {
+            deque<bool> migsToPrint;
+            for(pi = pvec.begin(), startI=0; pi != pvec.end(), startI < sv.size(); pi++,startI++)
+            {
+                if(pi->IsForce(force_MIG) || pi->IsForce(force_DIVMIG)){
+                    if(pi->IsValidParameter())
+                    {
+                        of_denovo << "\t" << pi->GetShortName() << "Count";
+                        migsToPrint.push_back(true);
+                    }
+                    else
+                    {
+                        migsToPrint.push_back(false);
+                    }
+                }
+            }
+            registry.SetMigsToPrint(migsToPrint);
+        }
+
+        if(registry.GetForceSummary().CheckForce(force_REC))
+        {
+            of_denovo << "\t" << "RecCount";
+        }
+
+        of_denovo << endl;
+        of_denovo.close();
+
+        ////////////////////////////////////////////////////////////////
+        // these are for the denovo_info file only
+        //
+        for(startI=0; startI < startValues.size(); startI++)
+        {
+            if(startI != 0)
+            {
+                of_denovo_info << "\t";
+            }
+            of_denovo_info << startValues[startI];
+        }
+        of_denovo_info << endl;
+        of_denovo_info << "==============";
+        of_denovo_info << endl;
+        of_denovo_info.close();
+#endif // LAMARC_QA_SINGLE_DENOVOS
+
+#ifdef JSIM  // call with true to get separate files by region and population
+        registry.GetDataPack().WritePopulationXMLFiles(false);
+        string ofname("flucinfile");
+        registry.GetDataPack().WriteFlucFile(ofname);
+#endif // JSIM
+
+        // Run the chainmanager.
+        Maximizer & maximizer = registry.GetMaximizer();
+
+        // Prepare for the results.
+        RunReport & runreport = registry.GetRunReport();
+
+        if (runProgram)
+        {
+            try
+            {
+                XMLOutfile xmlout;
+                xmlout.Display();
+            }
+            catch (file_error & e)
+            {
+                throw e;
+            }
+
+#ifdef DYNAMETER_LEVEL
+            // This controls whether user-interaction startup time is to be excluded from the global measures or not.
+            // If tracing/metering, restart the "epoch" timers when user interaction is done and "real" processing starts.
+            LocalDynameter::s_lastClock = LocalDynameter::s_startClock = rdtsc();
+#endif // DYNAMETER_LEVEL
+
+            // Run the main program.
+            ChainManager chainmanager(runreport, maximizer);
+            chainmanager.DoAllChains();
+
+            // EWFIX -- does this belong in chainmanager.DoAllChains() ??
+
+#ifdef LAMARC_QA_SINGLE_DENOVOS
+            // next to no reporting for LAMARC_QA_SINGLE_DENOVOS
+            ofstream of;
+            of.open(SINGLE_DENOVO_INFO.c_str(), ios::app);
+
+
+            of << setiosflags ( ios_base::right ) << setw ( 10 );
+            of << registry.GetDenovoTreeRejectCount() << " denovo trees failed during construction" << endl;
+            of << setiosflags ( ios_base::right ) << setw ( 10 );
+            of << registry.GetDenovoCount() << " denovo trees successfully constructed" << endl;
+            of << setiosflags ( ios_base::right ) << setw ( 10 );
+            of << registry.GetDenovoMaxRejectCount() << " constructed trees failed in maximizer " << endl;
+            of.close();
+#else
+#ifndef STATIONARIES
+            string xmlReportFileName = registry.GetUserParameters().GetXMLReportFileName();
+            assert (!xmlReportFileName.empty());
+            XMLReport xmlReportFile(xmlReportFileName);
+            TiXmlElement * topElem = xmlReportFile.Write(chainmanager);
+            string profilePrefix = registry.GetUserParameters().GetProfilePrefix();
+            WriteProfileSpreads(profilePrefix,topElem);
+            bool writeCurves = registry.GetUserParameters().GetWriteCurveFiles();
+            if(writeCurves && registry.GetChainParameters().IsBayesian())
+            {
+                string curvefilePrefix = registry.GetUserParameters().GetCurveFilePrefix();
+                BayesAnalyzer_1D & bayesAnalyzer = registry.GetBayesAnalyzer_1D();
+                WriteConsolidatedCurveFiles(curvefilePrefix,bayesAnalyzer);
+            }
+#endif // STATIONARIES
+
+            if (registry.GetUserParameters().GetProgress() != NONE)
+            {
+                string msg = "Output written to ";
+                msg += registry.GetUserParameters().GetResultsFileName();
+                runreport.ReportUrgent(msg);
+                if (registry.GetUserParameters().GetWriteSumFile())
+                {
+                    msg = "Output summary file written to ";
+                    msg += registry.GetUserParameters().GetTreeSumOutFileName();
+                    runreport.ReportUrgent(msg);
+                }
+
+                StringVec1d profilenames = registry.GetUserParameters().GetProfileNames();
+                if (profilenames.size() > 0)
+                {
+                    msg = "Wrote to profile file(s):  " + profilenames[0];
+                    for (unsigned long i=1; i<profilenames.size(); i++)
+                    {
+                        msg += ", " + profilenames[i];
+                    }
+                    runreport.ReportUrgent(msg);
+                }
+
+                StringVec1d curvefilenames = registry.GetUserParameters().GetCurveFileNames();
+                if (curvefilenames.size() > 0)
+                {
+                    //LS NOTE: not GetWriteCurveFiles() because if not bayesian, that
+                    // parameter is meaningless.  This version is much safer.
+                    msg = "Wrote to curve file(s):  " + curvefilenames[0];
+                    for (unsigned long int i = 1; i < curvefilenames.size(); i++)
+                    {
+                        msg += ", " + curvefilenames[i];
+                    }
+                    runreport.ReportUrgent(msg);
+                }
+
+                StringVec1d mapfilenames = registry.GetUserParameters().GetMapFileNames();
+                if (mapfilenames.size() > 0)
+                {
+                    msg = "Wrote to mapping file(s):  " + mapfilenames[0];
+                    for (unsigned long int i = 1; i < mapfilenames.size(); i++)
+                    {
+                        msg += ", " + mapfilenames[i];
+                    }
+                    runreport.ReportUrgent(msg);
+                }
+
+                set<string> tracefilenames = registry.GetUserParameters().GetTraceFileNames();
+                if (tracefilenames.size() > 0)
+                {
+                    msg = "Wrote to Tracer file(s):";
+                    set<string>::iterator tname = tracefilenames.begin();
+                    msg += " " + *tname;
+                    for (tname++; tname != tracefilenames.end(); tname++)
+                    {
+                        msg += ", " + *tname;;
+                    }
+                    runreport.ReportUrgent(msg);
+                }
+
+                runreport.ReportUrgent("\nProgram completed.", 0);
+                runreport.ReportUrgent("You may now exit manually if we haven't already exited automatically.", 0);
+                runreport.ReportUrgent("In either case, you may now examine your results.", 0);
+                runreport.ReportUrgent("For assistance interpreting your results, see "
+                                       "http://evolution.gs.washington.edu/lamarc/documentation/"
+                                       " (or your local copy of the documentation).",
+                                       0);
+            }
+#endif // !LAMARC_QA_SINGLE_DENOVOS
+        }
+    }
+
+    catch (bad_alloc)
+    {
+        cerr << endl << endl
+             << "LAMARC has terminated because an insufficient amount of memory"
+             << endl
+             << "is available.  If you are running additional programs"
+             << endl
+             << "simultaneously on your computer, try terminating them and"
+             << endl
+             << "re-executing LAMARC.  Or, try running LAMARC on a different"
+             << endl << "computer that has more memory available."
+             << endl << endl;
+        return_code = return_code | lamarccodes::badAllocation;
+    }
+
+    catch(const unrecognized_tag_error & e)
+    {
+        cerr <<  xmlstr::XML_IERR_NO_TAG_0
+            + e.what()
+            + xmlstr::XML_IERR_NO_TAG_1
+            + ToString(e.where())
+            + xmlstr::XML_IERR_NO_TAG_2
+             << endl ;
+        return_code = return_code | lamarccodes::fileError;
+    }
+
+    catch (exception & ex)
+    {
+        cerr << ex.what() << endl;
+        return_code = return_code | lamarccodes::unknownError;
+    }
+
+    // needed for windows clickable executables
+    // Mac handles this with an "-x" in the invoking apple script
+#if defined(LAMARC_COMPILE_MSWINDOWS)
+    GetCloseConfirmation();
+#else
+    if(explicitExit)
+    {
+        GetCloseConfirmation();
+    }
+#endif
+
+    return return_code;
+
+} // LAMARC main routine
+
+//------------------------------------------------------------------------------------
+
+void GetCloseConfirmation()
+{
+    cout << "Press Enter to Quit" << endl;
+    string dummyString;
+    MyCinGetline(dummyString);
+}
+
+//------------------------------------------------------------------------------------
+
+#if defined(LAMARC_COMPILE_LINUX) || defined(LAMARC_COMPILE_MACOSX)
+
+void CatchFileTooBigSignal(int signal)
+{
+    //The kernel has sent a signal that the file size limit has been exceeded.
+    //We're going to assume that this was for the summary file writing, since
+    // that's the biggest
+    // Stop writing to the summary file.
+    registry.GetRunReport().ReportUrgent("File size exceeded for summary file writing--continuing,"
+                                         " but without writing to the summary file any more.  "
+                                         "Try the command 'unlimit filesize' if on a UNIX system.");
+    longjmp(prewrite, 1);
+    //We set this jumppoint in ChainManager at a point where we can close
+    // the summary file and stop writing to it.
+}
+
+#endif
+
+//____________________________________________________________________________________
diff --git a/src/control/lamarc.h b/src/control/lamarc.h
new file mode 100644
index 0000000..bdd8a6d
--- /dev/null
+++ b/src/control/lamarc.h
@@ -0,0 +1,43 @@
+// $Id: lamarc.h,v 1.25 2012/06/30 01:32:39 bobgian Exp $
+
+/*
+  Copyright 2002 Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+/***************************************************************
+ This file (not a class at this time) contains the main driver
+ routines for LAMARC.  It does the following:
+
+ (1) Set up registry (main)
+ (2) Accept user input (DoUserInput)
+ (3) Finalize registry (FinishRegistry)
+ (4) Launch chain manager (main)
+
+ Mary Kuhner (based on work by Jon Yamato) April 2001
+***************************************************************/
+
+#ifndef LAMARC_H
+#define LAMARC_H
+
+class UIInterface;
+
+extern void GetCloseConfirmation();
+extern void ParseOptions(long argc, char** argv, bool* isBatch, bool* explicitExit, bool* hasFile, std::string & infileName);
+extern bool DoUserInput(long argc, char** argv);
+extern void FinishRegistry(UIInterface&);
+
+#if defined(LAMARC_COMPILE_LINUX) || defined(LAMARC_COMPILE_MACOSX)
+extern void CatchFileTooBigSignal(int signal);
+#endif
+
+extern int main(int, char **);
+
+//------------------------------------------------------------------------------------
+
+#endif // LAMARC_H
+
+//____________________________________________________________________________________
diff --git a/src/control/regiongammainfo.cpp b/src/control/regiongammainfo.cpp
new file mode 100644
index 0000000..3d31850
--- /dev/null
+++ b/src/control/regiongammainfo.cpp
@@ -0,0 +1,146 @@
+// $Id: regiongammainfo.cpp,v 1.8 2011/03/07 06:08:47 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, Eric Rynes and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+
+#include "regiongammainfo.h"
+#include "force.h"
+#include "stringx.h"
+#include "xml_strings.h"
+
+//------------------------------------------------------------------------------------
+
+RegionGammaInfo::RegionGammaInfo(double startValue, ParamStatus paramStatus,
+                                 bool doProfile, proftype profType,
+                                 std::string profTypeSummaryDescription,
+                                 RegionGammaForce *pRegionGammaForce)
+    : m_startValue(startValue),
+      m_pstatus(paramStatus),
+      m_doProfile(doProfile),
+      m_proftype(profType),
+      m_proftypeSummaryDescription(profTypeSummaryDescription),
+      m_CurrentlyPerformingAnalysisOverRegions(false),
+      m_HaveMLE(false),
+      m_HaveProfile(false),
+      m_pRegionGammaForce(pRegionGammaForce)
+{
+}
+
+RegionGammaInfo::~RegionGammaInfo()
+{
+    delete m_pRegionGammaForce;
+}
+
+proftype RegionGammaInfo::GetProfType(void) const
+{
+    if (m_doProfile)
+    {
+        return m_proftype;
+    }
+    return profile_NONE;
+}
+
+double RegionGammaInfo::GetLowValue(void) const
+{
+    if (!m_pRegionGammaForce)
+        throw implementation_error("RegionGammaInfo is missing m_pRegionGammaForce");
+    return m_pRegionGammaForce->GetLowVal();
+}
+
+double RegionGammaInfo::GetHighValue(void) const
+{
+    if (!m_pRegionGammaForce)
+        throw implementation_error("RegionGammaInfo is missing m_pRegionGammaForce");
+    return m_pRegionGammaForce->GetHighVal();
+}
+
+double RegionGammaInfo::GetLowMultiplier(void) const
+{
+    if (!m_pRegionGammaForce)
+        throw implementation_error("RegionGammaInfo is missing m_pRegionGammaForce");
+    return m_pRegionGammaForce->GetLowMult();
+}
+
+double RegionGammaInfo::GetHighMultiplier(void) const
+{
+    if (!m_pRegionGammaForce)
+        throw implementation_error("RegionGammaInfo is missing m_pRegionGammaForce");
+    return m_pRegionGammaForce->GetHighMult();
+}
+
+double RegionGammaInfo::GetMaxValue(void) const
+{
+    if (!m_pRegionGammaForce)
+        throw implementation_error("RegionGammaInfo is missing m_pRegionGammaForce");
+    return m_pRegionGammaForce->GetMaximizerMaxVal();
+}
+
+double RegionGammaInfo::GetMinValue(void) const
+{
+    if (!m_pRegionGammaForce)
+        throw implementation_error("RegionGammaInfo is missing m_pRegionGammaForce");
+    return m_pRegionGammaForce->GetMaximizerMinVal();
+}
+
+void RegionGammaInfo::ConstrainToMax()
+{
+    if (m_pstatus.Status()==pstat_constant)
+    {
+        assert(false);
+        throw implementation_error("Tried to constrain alpha to its maximum value when it was already constrained.");
+    }
+    m_pstatus = ParamStatus(pstat_constant);
+    m_doProfile = false;
+    m_HaveProfile = false; // possibly unnecessary
+    m_startValue = GetMaxValue();
+}
+
+StringVec1d RegionGammaInfo::ToXML(long nspaces) const
+{
+    StringVec1d xmllines;
+    string line = MakeIndent(MakeTag(xmlstr::XML_TAG_REGION_GAMMA),nspaces);
+    xmllines.push_back(line);
+
+    nspaces += INDENT_DEPTH;
+    string mytag(MakeTag(xmlstr::XML_TAG_START_VALUES));
+    DoubleVec1d startvalues;
+    startvalues.push_back(m_startValue);
+    line = MakeIndent(mytag,nspaces) + ToString(startvalues,5)
+        + " " + MakeCloseTag(mytag);
+    xmllines.push_back(line);
+
+    mytag = MakeTag(xmlstr::XML_TAG_PROFILES);
+    string profile_st;
+    if (m_doProfile)
+    {
+        profile_st = ToString(m_proftype);
+    }
+    else
+    {
+        profile_st = ToString(profile_NONE);
+    }
+    line = MakeIndent(mytag,nspaces) + " " + profile_st
+        + " " + MakeCloseTag(mytag);
+    xmllines.push_back(line);
+
+    mytag = MakeTag(xmlstr::XML_TAG_CONSTRAINTS);
+    line = MakeIndent(mytag,nspaces) + " " + ToString(m_pstatus)
+        + " " + MakeCloseTag(mytag);
+    xmllines.push_back(line);
+
+    //The gamma doesn't have groups or a prior.  Change this if we give it some!
+
+    nspaces -= INDENT_DEPTH;
+    line = MakeIndent(MakeCloseTag(xmlstr::XML_TAG_REGION_GAMMA),nspaces);
+    xmllines.push_back(line);
+    return xmllines;
+}
+
+//____________________________________________________________________________________
diff --git a/src/control/regiongammainfo.h b/src/control/regiongammainfo.h
new file mode 100644
index 0000000..203ec51
--- /dev/null
+++ b/src/control/regiongammainfo.h
@@ -0,0 +1,72 @@
+// $Id: regiongammainfo.h,v 1.10 2011/03/08 19:21:53 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef REGIONGAMMAINFO_H
+#define REGIONGAMMAINFO_H
+
+#include "constants.h"
+#include "plotstat.h"
+#include "paramstat.h"
+
+class RegionGammaForce;
+
+class RegionGammaInfo
+{
+  private:
+    double m_startValue;
+    ParamStatus m_pstatus;
+    bool m_doProfile;
+    proftype m_proftype;
+    std::string m_proftypeSummaryDescription;
+    bool m_CurrentlyPerformingAnalysisOverRegions;
+    double m_MLE;
+    bool m_HaveMLE;
+    ProfileStruct m_OverallProfile;
+    bool m_HaveProfile;
+    RegionGammaForce *m_pRegionGammaForce;
+    RegionGammaInfo();
+    RegionGammaInfo(const RegionGammaInfo& src);
+
+  public:
+    RegionGammaInfo(double startValue, ParamStatus paramStatus,
+                    bool doProfile, proftype profType,
+                    std::string profTypeSummaryDescription,
+                    RegionGammaForce *pRegionGammaForce);
+    ~RegionGammaInfo();
+    double GetStartValue(void) const { return m_startValue; };
+    ParamStatus GetParamStatus(void) const { return m_pstatus; };
+    bool DoProfile(void) const { return m_doProfile; };
+    proftype GetProfType(void) const;
+    std::string GetProftypeSummaryDescription(void) const
+    { return m_proftypeSummaryDescription; };
+    void Activate(void) { m_CurrentlyPerformingAnalysisOverRegions = true; };
+    void Deactivate(void) { m_CurrentlyPerformingAnalysisOverRegions = false; };
+    bool CurrentlyPerformingAnalysisOverRegions(void) const
+    { return m_CurrentlyPerformingAnalysisOverRegions; };
+    void SetMLE(double value) { m_MLE = value; m_HaveMLE = true; };
+    bool HaveMLE(void) const { return m_HaveMLE; };
+    double GetMLE(void) const { return m_MLE; };
+    double GetLowValue(void) const;
+    double GetHighValue(void) const;
+    double GetLowMultiplier(void) const;
+    double GetHighMultiplier(void) const;
+    double GetMaxValue(void) const;
+    double GetMinValue(void) const;
+    void AddProfile(const ProfileStruct& profile)
+    { m_OverallProfile = profile; m_HaveProfile = true; };
+    bool HaveProfile(void) const { return m_HaveProfile; };
+    const ProfileStruct& GetProfile(void) const { return m_OverallProfile; };
+    void ConstrainToMax();
+    StringVec1d ToXML(long nspaces) const;
+};
+
+#endif // REGIONGAMMAINFO_H
+
+//____________________________________________________________________________________
diff --git a/src/control/registry.cpp b/src/control/registry.cpp
new file mode 100644
index 0000000..9c3c5c7
--- /dev/null
+++ b/src/control/registry.cpp
@@ -0,0 +1,1122 @@
+// $Id: registry.cpp,v 1.103 2013/10/17 21:36:15 mkkuhner Exp $
+
+/*
+  Copyright 2002 Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+#include <iostream>
+
+#include "local_build.h"
+
+#include "arranger.h"
+#include "analyzer.h"
+#include "bayesanalyzer_1d.h"
+#include "defaults.h"
+#include "dlmodel.h"
+#include "errhandling.h"
+#include "force.h"
+#include "likelihood.h"
+#include "maximizer.h"
+#include "region.h"
+#include "regiongammainfo.h"
+#include "registry.h"
+#include "runreport.h"
+#include "tree.h"
+#include "treesum.h"
+#include "ui_interface.h"
+#include "ui_regid.h"
+#include "ui_vars.h"
+#include "cellmanager.h"
+
+#ifdef DMALLOC_FUNC_CHECK
+#include "/usr/local/include/dmalloc.h"
+#endif
+
+using namespace std;
+
+//------------------------------------------------------------------------------------
+
+class RegionGammaInfo;
+
+//------------------------------------------------------------------------------------
+
+Registry::Registry()
+    :
+    protoTree(NULL),
+    protoTreeSummary(NULL),
+    chainparams(NULL),
+    userparams(NULL),
+    forcesummary(NULL),
+    random(NULL),
+    runreport(NULL),
+    maximizer(NULL),
+    PLsingle(NULL),
+    PLreplicate(NULL),
+    PLregion(NULL),
+    PLgammaRegion(NULL),
+    pRegionGammaForceInfo(NULL),
+    bayesanalyzer(NULL),
+    analyzer(NULL),
+    m_currRegion(-1),
+    m_ARGfound(false),
+    m_convert_output_to_eliminate_zeroes(defaults::convert_output_to_eliminate_zeroes)
+#ifdef LAMARC_QA_SINGLE_DENOVOS
+    ,
+    m_denovoTreeRejectCount(0),
+    m_denovoMaxRejectCount(0),
+    m_lastDenovoGood(false),
+    m_defaultBranchCount(0),
+    m_denovoCount(defaults::numDenovos)
+#endif // LAMARC_QA_SINGLE_DENOVOS
+{
+    cellmanager = new CellManager;
+} // Registry constructor
+
+//------------------------------------------------------------------------------------
+
+Registry::~Registry()
+{
+    delete protoTree;
+    delete protoTreeSummary;
+    delete PLsingle;
+    delete PLreplicate;
+    delete PLregion;
+    delete PLgammaRegion;
+    delete runreport;
+    delete maximizer;
+    delete bayesanalyzer;
+    delete analyzer;
+    delete chainparams;
+    delete userparams;
+    delete random;
+    delete forcesummary;
+    delete pRegionGammaForceInfo;
+} // Registry destructor
+
+//------------------------------------------------------------------------------------
+
+const Tree& Registry::GetProtoTree() const
+{
+    if (protoTree == NULL) ThrowBadPrototype();
+    return(*protoTree);
+} // GetProtoTree
+
+//------------------------------------------------------------------------------------
+
+const TreeSummary& Registry::GetProtoTreeSummary() const
+{
+    if (protoTreeSummary == NULL) ThrowBadPrototype();
+    return (*protoTreeSummary);
+} // GetProtoTreeSummary
+
+//------------------------------------------------------------------------------------
+
+RunReport& Registry::GetRunReport()
+{
+    if (runreport == NULL) ThrowBadPrototype();
+    return (*runreport);
+} // Registry::GetRunReport()
+
+//------------------------------------------------------------------------------------
+
+const RunReport& Registry::GetRunReport() const
+{
+    if (runreport == NULL) ThrowBadPrototype();
+    return (*runreport);
+} // Registry::GetRunReport()
+
+//------------------------------------------------------------------------------------
+
+Maximizer& Registry::GetMaximizer()
+{
+    if (maximizer == NULL) ThrowBadPrototype();
+    return (*maximizer);
+} // Registry::GetMaximizer
+
+Maximizer * Registry::GetMaximizerPtr()
+{
+    if (maximizer == NULL) ThrowBadPrototype();
+    return (maximizer);
+} // Registry::GetMaximizer
+
+//------------------------------------------------------------------------------------
+
+BayesAnalyzer_1D& Registry::GetBayesAnalyzer_1D()
+{
+    if (bayesanalyzer == NULL) ThrowBadPrototype();
+    return (*bayesanalyzer);
+} // Registry::GetBayesAnalyzer_1D
+
+BayesAnalyzer_1D * Registry::GetBayesAnalyzer_1DPtr()
+{
+    if (bayesanalyzer == NULL) ThrowBadPrototype();
+    return (bayesanalyzer);
+} // Registry::GetBayesAnalyzer_1D
+
+//------------------------------------------------------------------------------------
+
+SinglePostLike& Registry::GetSinglePostLike()
+{
+    if (PLsingle == NULL) ThrowBadPrototype();
+    return (*PLsingle);
+} // Registry::GetSinglePostLike
+
+//------------------------------------------------------------------------------------
+
+ReplicatePostLike& Registry::GetReplicatePostLike()
+{
+    if (PLreplicate == NULL) ThrowBadPrototype();
+    return (*PLreplicate);
+} // Registry::GetReplicatePostLike
+
+//------------------------------------------------------------------------------------
+
+RegionPostLike& Registry::GetRegionPostLike()
+{
+    if (GetRegionGammaInfo())
+    {
+        if (PLgammaRegion == NULL) ThrowBadPrototype();
+        return (*PLgammaRegion);
+    }
+    else
+    {
+        if (PLregion == NULL) ThrowBadPrototype();
+        return (*PLregion);
+    }
+} // Registry::GetRegionPostLike
+
+//------------------------------------------------------------------------------------
+
+Analyzer& Registry::GetAnalyzer()
+{
+    if (analyzer == NULL) ThrowBadPrototype();
+    return (*analyzer);
+} // Registry::GetRegionPostLike
+
+//------------------------------------------------------------------------------------
+
+void Registry::Register(Tree* tree)
+{
+    delete protoTree;
+    protoTree = tree;
+} // Register (Tree)
+
+//------------------------------------------------------------------------------------
+
+void Registry::Register(RunReport* report)
+{
+    delete runreport;
+    runreport = report;
+
+} // Registry::Register(RunReport)
+
+//------------------------------------------------------------------------------------
+
+void Registry::Register(Maximizer* maxim)
+{
+    delete maximizer;
+    maximizer = maxim;
+} // Registry::Register (Maximizer)
+
+//------------------------------------------------------------------------------------
+
+void Registry::Register(BayesAnalyzer_1D* bayesan)
+{
+    delete bayesanalyzer;
+    bayesanalyzer = bayesan;
+} // Registry::Register (BayesAnalyzer_1D)
+
+//------------------------------------------------------------------------------------
+
+void Registry::Register(SinglePostLike* singlel)
+{
+    delete PLsingle;
+    PLsingle = singlel;
+} // Registry::Register (SinglePostLike)
+
+//------------------------------------------------------------------------------------
+
+void Registry::Register(ReplicatePostLike* replicate)
+{
+    delete PLreplicate;
+    PLreplicate = replicate;
+} // Registry::Register (ReplicatePostLike)
+
+//------------------------------------------------------------------------------------
+
+void Registry::Register(RegionPostLike* region)
+{
+    delete PLregion;
+    PLregion = region;
+} // Registry::Register (RegionPostLike)
+
+//------------------------------------------------------------------------------------
+
+void Registry::Register(GammaRegionPostLike* gammaRegion)
+{
+    delete PLgammaRegion;
+    PLgammaRegion = gammaRegion;
+} // Registry::Register (GammaRegionPostLike)
+
+//------------------------------------------------------------------------------------
+
+void Registry::Register(RegionGammaInfo* pRGFI)
+{
+    delete pRegionGammaForceInfo;
+    pRegionGammaForceInfo = pRGFI;
+} // Registry::Register (RegionGammaInfo)
+
+//------------------------------------------------------------------------------------
+
+void Registry::Register(Analyzer *thisanalyzer)
+{
+    delete analyzer;
+    analyzer = thisanalyzer;
+} // Registry::Register (RegionPostLike)
+
+//------------------------------------------------------------------------------------
+
+void Registry::Register(TreeSummary* treesum)
+{
+    delete protoTreeSummary;
+    protoTreeSummary = treesum;
+} // Registry::Register (intervaldata)
+
+//------------------------------------------------------------------------------------
+
+void Registry::SetCurrentRegionIndex(long regionIndex)
+{
+    assert( regionIndex >= 0);
+    assert( regionIndex < datapack.GetNRegions());
+    m_currRegion = regionIndex;
+}
+
+//------------------------------------------------------------------------------------
+
+long Registry::GetCurrentReclocOffset() const
+{
+    assert( m_currRegion >= 0);
+    assert( m_currRegion < datapack.GetNRegions());
+    const Region& reg = datapack.GetRegion(m_currRegion);
+    long globalStart = reg.GetGlobalMapPosition(0);
+    return globalStart;
+}
+
+//------------------------------------------------------------------------------------
+
+void Registry::ThrowBadPrototype() const
+{
+    logic_error e("Attempt to use unregistered prototype");
+    throw e;
+}
+
+//------------------------------------------------------------------------------------
+
+void Registry::FinalizeDataPack(UIInterface& ui)
+{
+    //Set the effective population sizes
+    DoubleVec1d effectivePopSizes
+        = ui.GetCurrentVars().datapackplus.GetEffectivePopSizes();
+    long nRegions = datapack.GetNRegions();
+    assert(static_cast<long>(effectivePopSizes.size()) == nRegions);
+    for (long regnum=0; regnum < nRegions; regnum++)
+    {
+        datapack.GetRegion(regnum).SetEffectivePopSize(effectivePopSizes[regnum]);
+    }
+
+    //Now set up all the trait information and whether to simulate
+    for (long regnum=0; regnum < nRegions; regnum++)
+    {
+        long nLoci = datapack.GetRegion(regnum).GetNloci();
+        long regoffset = datapack.GetRegion(regnum).GetSiteSpan().first;
+        for (long locusnum=0; locusnum < nLoci; locusnum++)
+        {
+            Locus& locus = datapack.GetRegion(regnum).GetLocus(locusnum);
+            locus.SetShouldSimulate(ui.GetCurrentVars().datapackplus.GetSimulateData(regnum, locusnum));
+            if (locus.IsMovable())
+            {
+                UIRegId regID(regnum, locusnum, ui.GetCurrentVars());
+                const UIVarsTraitModels& traits = ui.GetCurrentVars().traitmodels;
+                locus.SetName(traits.GetName(regID));
+                locus.SetAnalysisType(traits.GetAnalysisType(regID));
+                locus.SetAllowedRange(traits.GetRange(regID), regoffset);
+                locus.SetGlobalMapPosition(traits.GetInitialMapPosition(regID));
+                //And set up the phenotypes (for simulation, possibly)
+                locus.SetPhenotypes(traits.GetPhenotypes(regID));
+            }
+        }
+        // This call sets up the moving and non-moving loci, gets them
+        // assigned to their correct positions on the overall map of the
+        // region, and creates their DLCells and DLCalculators.
+        datapack.GetRegion(regnum).SetupAndMoveAllLoci();
+    }
+  // If the user asked for recombination and at least one Region lacks
+  // the needed variable sites, abend now
+  for (long regnum=0; regnum < nRegions; regnum++) {
+    if (forcesummary->CheckForce(force_REC)) {
+      bool okay = datapack.GetRegion(regnum).RecombinationCanBeEstimated();
+      if (!okay) {
+        incorrect_data e("***ERROR:  2+ variable sites in every region required to use recombination");
+        throw e;
+      }
+    }
+  }
+  
+
+} // FinalizeDataPack
+
+//------------------------------------------------------------------------------------
+
+void Registry::InstallChainParameters(UIInterface& ui)
+{
+    UIVarsChainParameters & chains = ui.GetCurrentVars().chains;
+
+    double dropTiming = chains.GetDropArrangerRelativeTiming();
+    double sizeTiming = chains.GetSizeArrangerRelativeTiming();
+    double stairTiming = chains.GetStairArrangerRelativeTiming();
+    double hapTiming =
+        chains.GetHaplotypeArrangerPossible()
+        ? chains.GetHaplotypeArrangerRelativeTiming()
+        : 0.0 ;
+    double probhapTiming =
+        chains.GetProbHapArrangerPossible()
+        ? chains.GetProbHapArrangerRelativeTiming()
+        : 0.0 ;
+    double bayesTiming =
+        chains.GetDoBayesianAnalysis()
+        ? chains.GetBayesianArrangerRelativeTiming()
+        : 0.0 ;
+    double locusTiming =
+        ui.GetCurrentVars().traitmodels.AnyJumpingAnalyses()
+        ? chains.GetLocusArrangerRelativeTiming()
+        : 0.0 ;
+    double zilchTiming =
+        ui.GetCurrentVars().datapackplus.AnySimulation()
+        ? chains.GetZilchArrangerRelativeTiming()
+        : 0.0 ;
+    double epochsizeTiming =
+        chains.GetDoBayesianAnalysis()
+        ? chains.GetEpochSizeArrangerRelativeTiming()
+        : 0.0 ;
+
+    chainparams = new ChainParameters(
+        chains.GetChainTemperatures(),
+        chains.GetTemperatureInterval(),
+        chains.GetAdaptiveTemperatures(),
+        chains.GetInitialNumberOfChains(),
+        chains.GetInitialNumberOfSamples(),
+        chains.GetInitialChainSamplingInterval(),
+        chains.GetInitialNumberOfChainsToDiscard(),
+        chains.GetFinalNumberOfChains(),
+        chains.GetFinalNumberOfSamples(),
+        chains.GetFinalChainSamplingInterval(),
+        chains.GetFinalNumberOfChainsToDiscard(),
+        chains.GetNumberOfReplicates(),
+        chains.GetDoBayesianAnalysis(),
+        dropTiming,
+        sizeTiming,
+        hapTiming,
+        probhapTiming,
+        bayesTiming,
+        locusTiming,
+        zilchTiming,
+        stairTiming,
+        epochsizeTiming
+        );
+}
+
+//------------------------------------------------------------------------------------
+
+void Registry::InstallUserParameters(UIInterface& ui)
+{
+    UIVarsUserParameters & uparams = ui.GetCurrentVars().userparams;
+
+    userparams = new UserParameters(
+        uparams.GetCurveFilePrefix(),
+        uparams.GetMapFilePrefix(),
+        uparams.GetReclocFilePrefix(),
+        uparams.GetTraceFilePrefix(),
+        uparams.GetNewickTreeFilePrefix(),
+#ifdef LAMARC_QA_TREE_DUMP
+        uparams.GetArgFilePrefix(),
+#endif // LAMARC_QA_TREE_DUMP
+        uparams.GetDataFileName(),
+        uparams.GetProfilePrefix(),
+        uparams.GetResultsFileName(),
+        uparams.GetTreeSumInFileName(),
+        uparams.GetTreeSumOutFileName(),
+        uparams.GetXMLOutFileName(),
+        uparams.GetXMLReportFileName(),
+        uparams.GetVerbosity(),
+        uparams.GetProgress(),
+        uparams.GetPlotPost(),
+        uparams.GetUseSystemClock(),
+        uparams.GetReadSumFile(),
+        uparams.GetWriteSumFile(),
+        uparams.GetWriteCurveFiles(),
+        uparams.GetWriteReclocFiles(),
+        uparams.GetWriteTraceFiles(),
+        uparams.GetWriteNewickTreeFiles(),
+#ifdef LAMARC_QA_TREE_DUMP
+        uparams.GetWriteArgFiles(),
+        uparams.GetWriteManyArgs(),
+#endif // LAMARC_QA_TREE_DUMP
+        uparams.GetRandomSeed(),
+        uparams.GetProgramStartTime()
+        );
+
+    random = new Random(uparams.GetRandomSeed());
+}
+
+//------------------------------------------------------------------------------------
+
+DataModel * Registry::CreateDataModel(UIVarsDataModels& modelVars, const Locus& locus,
+                                      const UIVarsDataPackPlus& dataPackPlus, UIRegId regionId)
+{
+    data_type thisDataType = locus.GetDataType();
+    model_type modelType = modelVars.GetDataModelType(regionId);
+    if(!(ModelTypeAcceptsDataType(modelType,thisDataType)))
+    {
+        // If we reach this point, we have a data model for an inappropriate
+        // region/locus pair.  This should be avoided.  --LS NOTE
+        assert(false);
+        modelType = DefaultModelForDataType(thisDataType);
+    }
+
+    switch(modelType)
+    {
+        case F84:
+        {
+            F84Model * model
+                = new F84Model(
+                    locus.GetNmarkers(),
+                    modelVars.GetNumCategories(regionId),
+                    modelVars.GetCategoryRates(regionId),
+                    modelVars.GetCategoryProbabilities(regionId),
+                    modelVars.GetAutoCorrelation(regionId),
+                    modelVars.GetNormalization(regionId),
+                    modelVars.GetRelativeMuRate(regionId),
+                    modelVars.GetFrequencyA(regionId),
+                    modelVars.GetFrequencyC(regionId),
+                    modelVars.GetFrequencyG(regionId),
+                    modelVars.GetFrequencyT(regionId),
+                    modelVars.GetTTRatio(regionId),
+                    modelVars.GetCalcFreqsFromData(regionId),
+                    modelVars.GetPerBaseErrorRate(regionId)
+                    );
+            return model;
+        }
+        break;
+        case Brownian:
+        {
+            BrownianModel * model
+                = new BrownianModel(
+                    locus.GetNmarkers(),
+                    modelVars.GetNumCategories(regionId),
+                    modelVars.GetCategoryRates(regionId),
+                    modelVars.GetCategoryProbabilities(regionId),
+                    modelVars.GetAutoCorrelation(regionId),
+                    modelVars.GetNormalization(regionId),
+                    modelVars.GetRelativeMuRate(regionId)
+                    );
+            return model;
+        }
+        break;
+        case Stepwise:
+        {
+            StepwiseModel * model
+                = new StepwiseModel(
+                    locus.GetNmarkers(),
+                    dataPackPlus.GetUniqueAlleles(regionId.GetRegion(),
+                                                  regionId.GetLocus()),
+                    modelVars.GetNumCategories(regionId),
+                    modelVars.GetCategoryRates(regionId),
+                    modelVars.GetCategoryProbabilities(regionId),
+                    modelVars.GetAutoCorrelation(regionId),
+                    modelVars.GetNormalization(regionId),
+                    modelVars.GetRelativeMuRate(regionId)
+                    );
+            return model;
+        }
+        break;
+        case KAllele:
+        {
+            //If we have trait data with unknown haplotypes, we might have
+            // some possible alleles that are not represented in the tip data.
+            // In this case, we need to get those strings out of the individuals.
+            KAlleleModel * model
+                = new KAlleleModel(
+                    locus.GetNmarkers(),
+                    dataPackPlus.GetUniqueAlleles(regionId.GetRegion(),
+                                                  regionId.GetLocus()),
+                    modelVars.GetNumCategories(regionId),
+                    modelVars.GetCategoryRates(regionId),
+                    modelVars.GetCategoryProbabilities(regionId),
+                    modelVars.GetAutoCorrelation(regionId),
+                    modelVars.GetNormalization(regionId),
+                    modelVars.GetRelativeMuRate(regionId)
+                    );
+            return model;
+        }
+        break;
+        case GTR:
+        {
+            GTRModel * model
+                = new GTRModel(
+                    locus.GetNmarkers(),
+                    modelVars.GetNumCategories(regionId),
+                    modelVars.GetCategoryRates(regionId),
+                    modelVars.GetCategoryProbabilities(regionId),
+                    modelVars.GetAutoCorrelation(regionId),
+                    modelVars.GetNormalization(regionId),
+                    modelVars.GetRelativeMuRate(regionId),
+                    modelVars.GetFrequencyA(regionId),
+                    modelVars.GetFrequencyC(regionId),
+                    modelVars.GetFrequencyG(regionId),
+                    modelVars.GetFrequencyT(regionId),
+                    modelVars.GetGTR_AC(regionId),
+                    modelVars.GetGTR_AG(regionId),
+                    modelVars.GetGTR_AT(regionId),
+                    modelVars.GetGTR_CG(regionId),
+                    modelVars.GetGTR_CT(regionId),
+                    modelVars.GetGTR_GT(regionId),
+                    modelVars.GetPerBaseErrorRate(regionId)
+                    );
+            return model;
+        }
+        break;
+        case MixedKS:
+        {
+            MixedKSModel * model
+                = new MixedKSModel(
+                    locus.GetNmarkers(),
+                    dataPackPlus.GetUniqueAlleles(regionId.GetRegion(),
+                                                  regionId.GetLocus()),
+                    modelVars.GetNumCategories(regionId),
+                    modelVars.GetCategoryRates(regionId),
+                    modelVars.GetCategoryProbabilities(regionId),
+                    modelVars.GetAutoCorrelation(regionId),
+                    modelVars.GetNormalization(regionId),
+                    modelVars.GetRelativeMuRate(regionId),
+                    modelVars.GetAlpha(regionId),
+                    modelVars.GetOptimizeAlpha(regionId)
+                    );
+            return model;
+        }
+        break;
+    }
+    throw implementation_error("Registry::CreateDataModel needs to know how to build a "
+                               + ToString(modelType) + " model ");
+}
+
+//------------------------------------------------------------------------------------
+
+void Registry::InstallDataModels(UIInterface& ui)
+{
+    UIVars & vars = ui.GetCurrentVars();
+
+    // install models for each region
+    for(long regionId = 0; regionId < vars.datapackplus.GetNumRegions() ; regionId++)
+    {
+        Region & thisRegion = datapack.GetRegion(regionId);
+
+        for(long locusId = 0; locusId < thisRegion.GetNloci(); locusId++)
+        {
+            Locus & thisLocus = thisRegion.GetLocus(locusId);
+            UIRegId regId(regionId, locusId, vars);
+
+            DataModel * modelForThisLocus
+                = CreateDataModel(vars.datamodel, thisLocus, vars.datapackplus, regId);
+
+            // whatever model we've created, attach it to this locus
+            thisLocus.SetDataModelOnce(DataModel_ptr(modelForThisLocus));
+
+            // let's make sure we've done this right
+            string errString;
+            if(!thisLocus.IsValidLocus(errString))
+            {
+                throw implementation_error("Inconsistent region "+errString);
+            }
+        }
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+void Registry::InstallForcesAllOverThePlace(UIInterface& ui)
+{
+
+    UIVars& vars = ui.GetCurrentVars(); //Not const because of FixGroups().
+
+    // get list of forces to create and install into the forcesummary
+    ForceTypeVec1d activeForceTypes = vars.forces.GetPhase2ActiveForces();
+    LongVec1d activeForceSizes = vars.forces.GetForceSizes();
+    ForceTypeVec1d::iterator iter;
+    LongVec1d::iterator forceSize_iter;
+    // MFIX -- need to get the multipliers from something....
+    const DoubleVec1d multipliers;
+
+    if (vars.forces.GetForceOnOff(force_REGION_GAMMA))
+    {
+        vector<Parameter> emptyParamVector;
+        RegionGammaForce *pRegionGammaForce =
+            new RegionGammaForce(emptyParamVector,
+                                 vars.forces.GetIdentGroups(force_REGION_GAMMA),
+                                 vars.forces.GetMultGroups(force_REGION_GAMMA),
+                                 multipliers,
+                                 vars.forces.GetDefaultPrior(force_REGION_GAMMA));
+        RegionGammaInfo *pRegionGammaInfo =
+            new RegionGammaInfo(vars.forces.GetStartValue(force_REGION_GAMMA, 0),
+                                vars.forces.GetParamstatus(force_REGION_GAMMA, 0).Status(),
+                                vars.forces.GetDoProfile(force_REGION_GAMMA, 0),
+                                vars.forces.GetProfileType(force_REGION_GAMMA),
+                                vars.forces.GetProfileTypeSummaryDescription(
+                                    force_REGION_GAMMA),
+                                pRegionGammaForce);
+        Register(pRegionGammaInfo);
+
+        // Prevent this force from being added to forceparams.
+        for (iter = activeForceTypes.begin(), forceSize_iter = activeForceSizes.begin();
+             iter != activeForceTypes.end() && *iter != force_REGION_GAMMA;
+             iter++, forceSize_iter++)
+        {
+            // purposely empty loop body
+        }
+
+        if (activeForceTypes.end() == iter || activeForceSizes.end() == forceSize_iter)
+        {
+            string msg = "Registry::InstallForcesAllOverThePlace(), ";
+            msg += "vars.forces.GetForceOnOff(";
+            msg += "force_REGION_GAMMA) claimed this force was ";
+            msg += "active, but vars.forces.";
+            msg += "GetActiveForces() did not return the label ";
+            msg += "for this force.";
+            throw implementation_error(msg);
+        }
+        activeForceTypes.erase(iter);
+        activeForceSizes.erase(forceSize_iter);
+    }
+
+    ForceParameters forceparams(global_region, activeForceTypes, activeForceSizes);
+
+    unsigned long paramvecIndex = 0;
+    ForceVec allforces;
+    bool logisticSelectionIsOn(vars.forces.GetForceOnOff(force_LOGISTICSELECTION));
+    CoalesceLogisticSelectionPL *pCoalLogisticSelectionPL(NULL);
+    DiseaseLogisticSelectionPL *pDiseaseLogisticSelectionPL(NULL);
+
+    for(iter = activeForceTypes.begin(); iter != activeForceTypes.end(); iter++)
+    {
+        const force_type thisForceType = *iter;
+        vars.forces.FixGroups(thisForceType);
+        if (!vars.forces.GetForceZeroesValidity(thisForceType))
+        {
+            string err = "Invalid settings for force ";
+            err += ToString(thisForceType) + ".  Too many parameters are set ";
+            err += "invalid or have a start value of 0.0.";
+            throw data_error(err);
+        }
+        // build parameters for this force
+        long nParamsForThisForce = vars.forces.GetNumParameters(thisForceType);
+        vector<Parameter> parameters;
+        for(long paramIndex = 0; paramIndex < nParamsForThisForce;
+            paramIndex++, paramvecIndex++)
+        {
+            bool thisParamValid = vars.forces.GetParamValid(thisForceType,paramIndex);
+            ParamStatus mystatus=vars.forces.GetParamstatus(thisForceType,paramIndex);
+#if 0 // JDEBUG -- assumes stochastic selection is in play, used for
+      // early testing, remove once UI is handled correctly
+            if (thisForceType == force_COAL)
+            {
+                if (paramIndex == 0) pstat=pstat_multiplicative_head;
+                else pstat = pstat_multiplicative;
+            }
+#endif
+            proftype ptype=vars.forces.GetProfileType(thisForceType,paramIndex);
+            if(thisParamValid)
+            {
+                force_type phase2type(vars.forces.GetPhase2Type(thisForceType));
+                parameters.push_back
+                    (Parameter
+                     (mystatus,
+                      paramvecIndex,
+                      vars.datapackplus.GetParamName(phase2type,paramIndex,false),
+                      vars.datapackplus.GetParamName(phase2type,paramIndex,true),
+                      phase2type,
+                      vars.forces.GetStartMethod(thisForceType,paramIndex),
+                      ptype,
+                      vars.forces.GetPrior(thisForceType,paramIndex),
+                      vars.forces.GetTrueValue(thisForceType,paramIndex)
+                         ));
+            }
+            else
+            {
+                parameters.push_back(Parameter(ParamStatus(pstat_invalid), paramvecIndex));
+                assert (!mystatus.Valid()); //Warn the user about their input? -LS
+            }
+        }
+        Force * newForce = NULL;
+        long maxEvents = vars.forces.GetMaxEvents(thisForceType);
+
+        // JDEBUG--this is wrong! need to get UI multiplicative interface....
+        const vector<ParamGroup> multgroups = vars.forces.
+            GetMultGroups(thisForceType);
+        // JDEBUG--this is wrong! need to get UI multiplicative interface....
+        const DoubleVec1d paramgroupmultipliers;
+
+#if 0 // JDEBUG -- assumes stochastic selection is in play, used for
+      // early testing, remove once UI is handled correctly
+        vector<ParamGroup> multgroups = vars.forces.GetMultGroups(thisForceType);
+        DoubleVec1d paramgroupmultipliers;
+        if (thisForceType == force_COAL)
+        {
+            multgroups.clear();
+            LongVec1d multindices;
+            multindices.push_back(0);
+            multindices.push_back(1);
+            ParamGroup mults = make_pair(pstat_multiplicative,multindices);
+            multgroups.push_back(mults);
+            paramgroupmultipliers.push_back(99.0);
+        }
+#endif
+
+        const vector<ParamGroup> identgroups =
+            vars.forces.GetIdentGroups(thisForceType);
+
+        for (unsigned long gnum=0; gnum<identgroups.size(); gnum++)
+        {
+            if (identgroups[gnum].first.Status() == (pstat_identical) ||
+                identgroups[gnum].first.Status() == pstat_multiplicative)
+            {
+                //We need to re-name one parameter for every such group.
+                long pindex = identgroups[gnum].second[0];
+                assert(parameters[pindex].GetStatus().Status() == pstat_identical_head ||
+                       parameters[pindex].GetStatus().Status() == pstat_multiplicative_head);
+                // MFIX probably need to handle multiplicatives here
+                parameters[pindex].SetShortName(vars.GetParamNameWithConstraint(thisForceType, pindex, false));
+                parameters[pindex].SetName(vars.GetParamNameWithConstraint(thisForceType, pindex, true));
+            }
+        }
+
+        switch(thisForceType)
+        {
+            case force_COAL:
+                newForce = new CoalForce(
+                    parameters,
+                    maxEvents,
+                    vars.forces.GetForceOnOff(force_GROW),
+                    logisticSelectionIsOn,
+                    identgroups,
+                    multgroups,
+                    paramgroupmultipliers,
+                    vars.forces.GetDefaultPrior(thisForceType));
+                if (logisticSelectionIsOn)
+                {
+                    pCoalLogisticSelectionPL =
+                        dynamic_cast<CoalesceLogisticSelectionPL*>
+                        (newForce->GetPLForceFunction());
+                    if (!pCoalLogisticSelectionPL)
+                    {
+                        string msg = "Registry::InstallForcesAllOverThePlace(), ";
+                        msg += "detected that ";
+                        msg += "the logistic selection force is on, but failed to ";
+                        msg += "obtain a ";
+                        msg += "CoalesceLogisticSelectionPL object from a ";
+                        msg += "CoalForce object.";
+                        throw implementation_error(msg);
+                    }
+                }
+                break;
+
+            case force_MIG:
+                //cerr << "create force_MIG" << endl;
+                newForce = new MigForce
+                    (
+                        parameters,
+                        maxEvents,
+                        vars.datapackplus.GetNPartitionsByForceType(thisForceType),
+                        identgroups,
+                        multgroups,
+                        paramgroupmultipliers,
+                        vars.forces.GetDefaultPrior(thisForceType));
+                break;
+
+            case force_DIVMIG:
+                //cerr << "create force_DIVMIG" << endl;
+                newForce = new DivMigForce
+                    (
+                        parameters,
+                        maxEvents,
+                        vars.datapackplus.GetNPartitionsByForceType(thisForceType),
+                        identgroups,
+                        multgroups,
+                        paramgroupmultipliers,
+                        vars.forces.GetDefaultPrior(thisForceType));
+                break;
+
+            case force_DISEASE:
+                newForce = new DiseaseForce(
+                    parameters,
+                    maxEvents,
+                    logisticSelectionIsOn,
+                    vars.datapackplus.GetNPartitionsByForceType(thisForceType),
+                    vars.forces.GetDiseaseLocation(),
+                    identgroups,
+                    multgroups,
+                    paramgroupmultipliers,
+                    vars.forces.GetDefaultPrior(thisForceType));
+                if (logisticSelectionIsOn)
+                {
+                    pDiseaseLogisticSelectionPL =
+                        dynamic_cast<DiseaseLogisticSelectionPL*>
+                        (newForce->GetPLForceFunction());
+                    if (!pDiseaseLogisticSelectionPL)
+                    {
+                        string msg = "Registry::InstallForcesAllOverThePlace(), ";
+                        msg += "detected that ";
+                        msg += "the logistic selection force is on, but ";
+                        msg += "failed to obtain a ";
+                        msg += "DiseaseLogisticSelectionPL object from a ";
+                        msg += "DiseaseForce object.";
+                        throw implementation_error(msg);
+                    }
+                }
+                break;
+
+            case force_REC:
+                newForce = new RecForce(
+                    parameters,
+                    maxEvents,
+                    identgroups,
+                    multgroups,
+                    paramgroupmultipliers,
+                    vars.forces.GetDefaultPrior(thisForceType));
+                break;
+
+            case force_DIVERGENCE:
+            {
+                newForce = new DivForce(
+                    parameters,
+                    maxEvents,
+                    identgroups,
+                    multgroups,
+                    paramgroupmultipliers,
+                    vars.forces.GetDefaultPrior(thisForceType),
+                    vars.forces.GetNewPops(),
+                    vars.forces.GetAncestors(),
+                    registry.GetDataPack()); // const access, in order to call GetPartitionNumber
+                break;
+            }
+
+            case force_EXPGROWSTICK:
+            case force_GROW:
+                if (vars.forces.GetGrowthType() == growth_STICKEXP)
+                {
+                    // stick exponential model
+                    newForce = new StickExpGrowForce(
+                        parameters,
+                        maxEvents,
+                        identgroups,
+                        multgroups,
+                        paramgroupmultipliers,
+                        vars.forces.GetDefaultPrior(thisForceType));
+                }
+                else                    // default growth model
+                {
+                    newForce = new GrowthForce(
+                        parameters,
+                        maxEvents,
+                        identgroups,
+                        multgroups,
+                        paramgroupmultipliers,
+                        vars.forces.GetDefaultPrior(thisForceType));
+                }
+                break;
+
+            case force_LOGISTICSELECTION:
+                newForce = new LogisticSelectionForce(
+                    parameters,
+                    maxEvents,
+                    paramvecIndex - 1, // -1 to counteract "for" loop
+                    identgroups,
+                    multgroups,
+                    paramgroupmultipliers,
+                    vars.forces.GetDefaultPrior(thisForceType));
+
+                // temporary HACK:  Assumes force_COAL processed before force_LOGISTICSELECTION.
+                if (!pCoalLogisticSelectionPL)
+                {
+                    string msg = "Registry::InstallForcesAllOverThePlace(), ";
+                    msg += "while processing ";
+                    msg += "the logistic selection force, expected to ";
+                    msg += "have access to a ";
+                    msg += "CoalesceLogisticSelectionPL object, but ";
+                    msg += "this object was not found.";
+                    throw implementation_error(msg);
+                }
+                pCoalLogisticSelectionPL->
+                    SetSelectionCoefficientLocation(paramvecIndex - 1);
+                pDiseaseLogisticSelectionPL->
+                    SetSelectionCoefficientLocation(paramvecIndex - 1);
+                // -1 because paramvecIndex gets incremented one extra time
+                // when the "for" loop stops
+                break;
+
+            case force_LOGSELECTSTICK:
+                newForce = new StickSelectForce(parameters,maxEvents,
+                                                identgroups,multgroups,paramgroupmultipliers,
+                                                vars.forces.GetDefaultPrior(thisForceType));
+                // NB: unlike other forces, the StickSelectForce does not create
+                // a PLForces object on creation.  Instead, one will be created
+                // when the PLForces are installed into the PostLike objects (we
+                // need special case code, like ModifyEvent(), to deal with Stick
+                // Selection anyway, so we consolidate it all there....
+                break;
+
+            case force_REGION_GAMMA:
+            {
+                string msg = "Registry::InstallForcesAllOverThePlace() is using ";
+                msg += "force_REGION_GAMMA in two conflicting ways.";
+                throw implementation_error(msg);
+                break;
+            }
+
+            case force_NONE:
+            {
+                string msg = "Registry::InstallForcesAllOverThePlace() is trying to make ";
+                msg += "a NONE force.";
+                throw implementation_error(msg);
+                break;
+            }
+        }
+
+        assert(newForce != NULL);
+        allforces.push_back(newForce);
+        forceparams.SetGlobalParametersByTag(thisForceType, vars.forces.GetStartValues(thisForceType));
+    }
+
+#if 0    // JTEMP create a DivergenceForce and a DivMigForce
+    // build parameters for Divergence force
+    long nParamsForThisForce = 9L:
+        vector<Parameter> parameters;
+    for(long paramIndex = 0; paramIndex < nParamsForThisForce;
+        paramIndex++, paramvecIndex++)
+    {
+        bool thisParamValid = false;
+        paramstatus pstat=pstat_constant;
+        proftype ptype=profile_NONE;
+        parameters.push_back(Parameter(pstat_invalid, paramvecIndex));
+    }
+    Force * newForce = NULL;
+    long maxEvents = 1000L;
+
+    vector<Epoch> epochs;
+    vector<long> here, departing;
+    here.push_back(0);
+    here.push_back(1);
+    Epoch firstepoch(here,departing,0L);
+    epochs.push_back(firstepoch);
+    departing = here;
+    here.clear();
+    here.push_back(2);
+    Epoch secondepoch(here,departing,2L);
+
+    newForce = new DivForce(
+        parameters,
+        maxEvents,
+        identgroups,
+        multgroups,
+        paramgroupmultipliers,
+        vars.forces.GetDefaultPrior(thisForceType),
+        epochs);
+
+#if 0
+    if(thisParamValid)
+    {
+        force_type phase2type(vars.forces.GetPhase2Type(thisForceType));
+        parameters.push_back
+            (Parameter
+             (pstat,
+              paramvecIndex,
+              vars.datapackplus.GetParamName(phase2type,paramIndex,false),
+              vars.datapackplus.GetParamName(phase2type,paramIndex,true),
+              phase2type,
+              vars.forces.GetStartMethod(thisForceType,paramIndex),
+              ptype,
+              vars.forces.GetPrior(thisForceType,paramIndex),
+              vars.forces.GetTrueValue(thisForceType,paramIndex)
+                 ));
+    }
+    else
+    {
+        parameters.push_back(Parameter(pstat_invalid, paramvecIndex));
+        assert (pstat==pstat_invalid); //Warn the user about their input? -LS
+    }
+#endif
+
+#endif // end JTEMP
+
+    // install forces into the forcesummary.
+    forcesummary = new ForceSummary(allforces, forceparams, GetDataPack());
+    assert (forcesummary->IsValidForceSummary());
+
+    // Create and register a ProtoTree using the new forcesummary object.
+    Register(forcesummary->CreateProtoTreeSummary());
+}
+
+//------------------------------------------------------------------------------------
+
+#ifdef LAMARC_QA_SINGLE_DENOVOS
+
+//------------------------------------------------------------------------------------
+
+void Registry::AddDenovoMaxRejectCount(long addMe)
+{
+    m_denovoMaxRejectCount += addMe;
+}
+
+//------------------------------------------------------------------------------------
+
+long Registry::GetDenovoMaxRejectCount()
+{
+    return m_denovoMaxRejectCount;
+}
+
+//------------------------------------------------------------------------------------
+
+void Registry::AddDenovoTreeRejectCount(long addMe)
+{
+    m_denovoTreeRejectCount += addMe;
+}
+
+//------------------------------------------------------------------------------------
+
+long Registry::GetDenovoTreeRejectCount()
+{
+    return m_denovoTreeRejectCount;
+}
+
+//------------------------------------------------------------------------------------
+
+long Registry::GetDenovoCount()
+{
+    return m_denovoCount;
+}
+
+//------------------------------------------------------------------------------------
+
+void Registry::SetDenovoCount(long count)
+{
+    m_denovoCount = count;
+}
+
+//------------------------------------------------------------------------------------
+
+#endif // LAMARC_QA_SINGLE_DENOVOS
+
+//____________________________________________________________________________________
diff --git a/src/control/registry.h b/src/control/registry.h
new file mode 100644
index 0000000..51368c0
--- /dev/null
+++ b/src/control/registry.h
@@ -0,0 +1,204 @@
+// $Id: registry.h,v 1.37 2012/06/30 01:32:39 bobgian Exp $
+
+/*
+  Copyright 2002 Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef REGISTRY_H
+#define REGISTRY_H
+
+#include <deque>
+#include <stdlib.h>
+
+#include "local_build.h"
+
+#include "constants.h"
+#include "defaults.h"
+#include "chainparam.h"
+#include "userparam.h"
+#include "datapack.h"
+#include "forcesummary.h"
+#include "random.h"
+#include "errhandling.h"
+#include "regiongammainfo.h"
+
+/***************************************************************
+ The Registry has two functions:
+
+ (1)  It holds (most of) the Singleton objects of the program
+ and provides access to them.
+
+ (2)  It holds prototypes of objects which must be created in
+ a specific way for each run.  For example, it holds a
+ prototypical Tree.  Once the program knows what kind of Tree will
+ be wanted, a prototype is put in the Registry and all further
+ Tree creation is done by Clone() of the prototype.
+
+ The Registry is itself a Singleton.  The single instance is
+ global (it's in lamarc.cpp) and it's extern everywhere (via
+ constants.h).
+
+ Written by Mary Kuhner
+*******************************************************************/
+
+class Analyzer;
+class BayesAnalyzer_1D;
+class DataModel;
+class Locus;
+class Maximizer;
+class RegionPostLike;
+class GammaRegionPostLike;
+class ReplicatePostLike;
+class RunReport;
+class SinglePostLike;
+class Tree;
+class TreeSummary;
+class UIInterface;
+class UIVarsDataModels;
+class UIVarsDataPackPlus;
+class UIRegId;
+class CellManager;
+
+//------------------------------------------------------------------------------------
+
+class Registry
+{
+  public:
+    Registry();
+    ~Registry();
+
+    // Initialization -- to foil static global initialization bug
+    void Init();
+
+    // Getters
+    const Tree& GetProtoTree()                  const;
+    const TreeSummary& GetProtoTreeSummary()    const;
+    const ChainParameters& GetChainParameters() const {return *chainparams;};
+    const UserParameters& GetUserParameters()   const {return *userparams;};
+    const DataPack& GetDataPack()               const {return datapack;};
+    const ForceSummary& GetForceSummary()       const {return *forcesummary;};
+    long GetCurrentReclocOffset()               const;
+
+    ChainParameters& GetChainParameters()       {return *chainparams;};
+    UserParameters& GetUserParameters()         {return *userparams;};
+    DataPack& GetDataPack()                     {return datapack;};
+    ForceSummary& GetForceSummary()             {return *forcesummary;};
+    Random& GetRandom()                         {return *random;};
+    RunReport& GetRunReport();
+    const RunReport& GetRunReport()             const;
+    Maximizer& GetMaximizer();
+    Maximizer * GetMaximizerPtr();
+    BayesAnalyzer_1D& GetBayesAnalyzer_1D();
+    BayesAnalyzer_1D * GetBayesAnalyzer_1DPtr();
+    SinglePostLike& GetSinglePostLike();
+    ReplicatePostLike& GetReplicatePostLike();
+    RegionPostLike& GetRegionPostLike();
+    const RegionGammaInfo* GetRegionGammaInfo() const {return pRegionGammaForceInfo;}; // NULL is allowed
+    RegionGammaInfo* GetRegionGammaInfo() {return pRegionGammaForceInfo;}; // NULL is allowed
+    Analyzer& GetAnalyzer();
+    CellManager& GetCellManager()               {return *cellmanager; };
+    bool GetConvertOutputToEliminateZeroes()    {return m_convert_output_to_eliminate_zeroes;};
+    bool GetARGfound()                          {return m_ARGfound;}
+
+    // Setters
+    void Register(Tree* tree);
+    void Register(TreeSummary* treesum);
+    void Register(RunReport* report);
+    void Register(Maximizer* maxim);
+    void Register(BayesAnalyzer_1D* bayesan);
+    void Register(SinglePostLike* singlel);
+    void Register(ReplicatePostLike* replicate);
+    void Register(RegionPostLike* region);
+    void Register(GammaRegionPostLike* gammaRegion);
+    void Register(RegionGammaInfo* regionGammaForceInfo);
+    void Register(Analyzer * thisanalyzer);
+    void SetCurrentRegionIndex(long regionIndex);
+    void SetConvertOutputToEliminateZeroes(bool conv) {m_convert_output_to_eliminate_zeroes = conv;};
+    void SetARGfound(bool val) {m_ARGfound = val;};
+
+    // build structures from UI data
+    void FinalizeDataPack(UIInterface&);
+    void InstallChainParameters(UIInterface&);
+    void InstallDataModels(UIInterface&);
+    void InstallForcesAllOverThePlace(UIInterface&);
+    void InstallUserParameters(UIInterface&);
+
+#ifdef LAMARC_QA_SINGLE_DENOVOS
+    void AddDenovoTreeRejectCount(long);
+    long GetDenovoTreeRejectCount();
+    void AddDenovoMaxRejectCount(long);
+    long GetDenovoMaxRejectCount();
+    long GetDenovoCount();
+    void SetDenovoCount(long);
+    void SetMigsToPrint(std::deque<bool> mtp) {m_migsToPrint = mtp;};
+    std::deque<bool> GetMigsToPrint() { return m_migsToPrint;};
+#endif // LAMARC_QA_SINGLE_DENOVOS
+
+  protected:
+
+    DataModel * CreateDataModel(UIVarsDataModels &, const Locus&,
+                                const UIVarsDataPackPlus&, UIRegId);
+
+  private:
+    Registry(const Registry&);       // not defined
+    Registry& operator=(const Registry&);  // not defined
+
+    // the prototypes
+    Tree* protoTree;
+    TreeSummary* protoTreeSummary;
+
+    // the singletons
+    ChainParameters *chainparams;
+    UserParameters *userparams;
+    DataPack datapack;
+    ForceSummary *forcesummary;
+    Random *random;
+    CellManager *cellmanager;  // we deliberately don't delete this!
+
+    // the runtime reporter
+    RunReport* runreport;
+
+    // the posterior-likelihood drivers
+    Maximizer* maximizer;
+    SinglePostLike* PLsingle;
+    ReplicatePostLike* PLreplicate;
+    RegionPostLike* PLregion;
+    GammaRegionPostLike* PLgammaRegion;
+
+    // the pseudo-hack
+    RegionGammaInfo* pRegionGammaForceInfo;
+
+    BayesAnalyzer_1D* bayesanalyzer;
+
+    // the profile and plot driver
+    Analyzer *analyzer;
+
+    // this is dreadful, but we need it to make GetCurrentReclocOffset work
+    long m_currRegion;
+
+    // another dreadful - we need to know in phase2 that an ARG tree was found in the input XML
+    bool m_ARGfound;
+
+    //Whether the user wants zeroes in the output
+    bool m_convert_output_to_eliminate_zeroes;
+
+#ifdef LAMARC_QA_SINGLE_DENOVOS
+    long m_denovoTreeRejectCount;
+    long m_denovoMaxRejectCount;
+    bool m_lastDenovoGood;
+    long m_defaultBranchCount;
+    long m_denovoCount;
+    std::deque<bool> m_migsToPrint;
+#endif // LAMARC_QA_SINGLE_DENOVOS
+
+    // error handling
+    void ThrowBadPrototype() const;
+};
+
+#endif // REGISTRY_H
+
+//____________________________________________________________________________________
diff --git a/src/control/sumfilehandler.cpp b/src/control/sumfilehandler.cpp
new file mode 100644
index 0000000..bf01716
--- /dev/null
+++ b/src/control/sumfilehandler.cpp
@@ -0,0 +1,868 @@
+// $Id: sumfilehandler.cpp,v 1.20 2013/11/07 22:46:06 mkkuhner Exp $
+
+/*
+  Copyright 2002 Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <iostream>
+
+#include "sumfilehandler.h"
+#include "forceparam.h"
+#include "chainout.h"
+#include "chainpack.h"
+#include "collmanager.h"
+#include "registry.h"
+#include "region.h"
+#include "runreport.h"
+#include "stringx.h"
+#include "timex.h"
+#include "treesum.h"
+#include "xmlsum_strings.h"  // for xml sumfile handling
+
+using namespace std;
+
+SumFileHandler::SumFileHandler()
+    : m_sumin(), m_sumout(),  m_lastRegion(0), m_lastReplicate(0),
+      m_lastChain(-1),m_lastRegionChainSum(-1), m_lastReplicateChainSum(-1),
+      m_lastReplicateSummary(0), m_regionSummary(false)
+{
+}
+
+/****************************************************************************
+ *
+ * Sumfile Reading functions
+ *
+ ****************************************************************************/
+
+// should be only reading fxn to read EOF, all others should not
+void SumFileHandler::ReadInSumFile(ChainPack& chainpack,
+                                   CollectionManager& collectionmanager,
+                                   long int numchains)
+{
+    string infilename = registry.GetUserParameters().GetTreeSumInFileName();
+    m_sumin.open(infilename.c_str(), ios::in );
+    if (!m_sumin)
+    {
+        string err_string =  "Could not open \"" + infilename + "\".  "
+            + "Please check that this file:\n"
+            + "       1) exists in directory that lamarc is being run from,\n"
+            + "       2) is read enabled, and\n"
+            + "       3) is not in use by another program.\n";
+        throw file_error( err_string );
+    }
+
+    string tag;
+    m_sumin >> tag;
+    ReadInCheckFileFormat("ReadInSumFile", xmlsum::SUMFILE_START, tag );
+    m_sumin >> tag;
+
+    while ( (!m_sumin.eof()) && (tag != xmlsum::SUMFILE_END))
+    {
+        if( tag == xmlsum::COMMENT_START )
+        {
+            SkipComments();
+        }
+        else if( tag == xmlsum::CHAINPACK_START )
+        {
+            ReadInChainPack(chainpack);
+        }
+        else if( tag == xmlsum::CHAINSUM_START )
+        {
+            ReadInChainSum(chainpack, collectionmanager, numchains);
+        }
+        else if( tag == xmlsum::REPLICATE_SUMMARY_START )
+        {
+            ReadInReplicateSummary(chainpack);
+        }
+        else if( tag == xmlsum::END_REGION_START )
+        {
+            ReadInEndRegion(chainpack); //This no longer does anything.
+        }
+        else if( tag == xmlsum::REGION_SUMMARY_START )
+        {
+            ReadInRegionSummary(chainpack);
+        }
+        else  {//The file is formatted incorrectly.
+            ReadInCheckFileFormat("ReadInSumFile", "a top-level sumfile tag", tag );
+        }
+
+        m_sumin >> tag;
+    }
+    m_sumin.close();
+} // SumFileHandler::ReadInSumFile
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+// does not handle nested comments, but should eventually
+// precondition:  last tag read in from stream in:   xmlsum::COMMENT_START
+// postcondition: last string read in from stream in: xmlsum::COMMENT_END
+void SumFileHandler::SkipComments()
+{
+    string comment;
+    m_sumin >> comment;
+    while ( !m_sumin.eof() && comment != xmlsum::COMMENT_END )
+    {
+        m_sumin >> comment;
+        if (comment == xmlsum::COMMENT_START) //for nested comments.
+        {
+            SkipComments();
+        }
+    }
+}
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+// populate ChainManager's chainpack with a chain summary.
+// precondition:  last tag read in from stream in: xmlsum::CHAINPACK_START
+// postcondition: last tag read in from stream in: xmlsum::CHAINPACK_END
+void SumFileHandler::ReadInChainPack(ChainPack& chainpack)
+{
+    string tag;
+    m_sumin >> tag;
+    ReadInCheckFileFormat("ReadInChainPack", xmlsum::NUMBER_START, tag );
+    m_sumin >> m_lastRegion;
+    m_sumin >> m_lastReplicate;
+    m_sumin >> m_lastChain;
+    m_sumin >> tag;
+    ReadInCheckFileFormat("ReadInChainPack", xmlsum::NUMBER_END, tag );
+    m_sumin >> tag;
+    ReadInCheckFileFormat("ReadInChainPack", xmlsum::CHAINOUT_START, tag );
+
+    ChainOut co;
+    ReadInChainOut(co);
+    chainpack.SetChain(co, m_lastRegion, m_lastReplicate, m_lastChain);
+
+    m_sumin >> tag;
+    while (tag==xmlsum::ALPHA_START1)
+    {
+        m_sumin >> tag;
+        ReadInCheckFileFormat( "ReadInChainPack", xmlsum::ALPHA_START2, tag );
+        long int loc;
+        m_sumin >> loc;
+        m_sumin >> tag;
+        ReadInCheckFileFormat( "ReadInChainPack", xmlsum::ALPHA_START3, tag );
+        double alpha;
+        m_sumin >> alpha;
+        registry.GetDataPack().GetRegion(m_lastRegion).GetLocus(loc).GetDataModel()->
+            SetAlpha(alpha, m_lastReplicate, m_lastChain+1);
+        m_sumin >> tag;
+        ReadInCheckFileFormat( "ReadInChainPack", xmlsum::ALPHA_END, tag );
+        m_sumin >> tag;
+    }
+    ReadInCheckFileFormat( "ReadInChainPack", xmlsum::CHAINPACK_END, tag );
+}
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+void SumFileHandler::ReadInReplicateSummary(ChainPack& chainpack)
+{
+    m_lastReplicateSummary++;
+    string tag;
+    ForceParameters fp(global_region);
+
+    m_sumin>>tag;
+    ReadInCheckFileFormat("ReadInReplicateSummary", xmlsum::ESTIMATES_START, tag );
+    ReadInForceParameters(fp);
+
+    double maxlike;
+    m_sumin >> tag;
+    ReadInCheckFileFormat("ReadInReplicateSummary", xmlsum::MAXLIKE_START, tag );
+    m_sumin >> maxlike;
+    m_sumin >> tag;
+    ReadInCheckFileFormat("ReadInReplicateSummary", xmlsum::MAXLIKE_END, tag );
+    m_sumin >> tag;
+    ReadInCheckFileFormat("ReadInReplicateSummary", xmlsum::REPLICATE_SUMMARY_END, tag );
+
+    ChainOut repout;
+    repout.SetEstimates(fp);
+    repout.SetLlikemle(maxlike);
+    chainpack.SetSummaryOverReps(repout);
+
+} // ReadInReplicateSummary
+
+void SumFileHandler::ReadInEndRegion(ChainPack& chainpack)
+{
+    //This tag is only present when there is no Replicate Summary information.
+    string tag;
+    m_sumin >> tag;
+    ReadInCheckFileFormat("ReadInSumFile", xmlsum::END_REGION_END, tag );
+}
+
+void SumFileHandler::ReadInRegionSummary(ChainPack& chainpack)
+{
+    m_regionSummary = true;
+    string tag;
+    ForceParameters fp(global_region);
+
+    m_sumin >> tag;
+    ReadInCheckFileFormat("ReadInRegionSummary", xmlsum::ESTIMATES_START, tag );
+    ReadInForceParameters(fp);
+
+    double maxlike;
+
+    m_sumin >> tag;
+    ReadInCheckFileFormat("ReadInRegionSummary", xmlsum::MAXLIKE_START, tag );
+    m_sumin >> maxlike;
+    m_sumin >> tag;
+    ReadInCheckFileFormat("ReadInRegionSummary", xmlsum::MAXLIKE_END, tag );
+    m_sumin >> tag;
+    ReadInCheckFileFormat("ReadInRegionSummary", xmlsum::REGION_SUMMARY_END, tag );
+
+    ForceSummary& forcesum = registry.GetForceSummary();
+    ChainOut regionout;
+
+    regionout.SetEstimates(fp);
+    regionout.SetLlikemle(maxlike);
+    chainpack.SetSummaryOverRegions(regionout);
+    forcesum.SetOverallMLE(regionout);
+} // ReadInRegionSummary
+
+// populate chainout object c with data from the sumfile
+// precondition:  last string read in from stream in: xmlsum::CHAINOUT_START
+// postcondition: last string read in from stream in: xmlsum::CHAINOUT_END
+void SumFileHandler::ReadInChainOut(ChainOut &c)
+{
+    string tag, srate;
+    long int p1, p2;
+    long int badtrees, stretchedtrees, tinytrees, zerodltrees;
+    double accrate, llikemle, llikedata;
+    time_t starttime, endtime;
+    ForceParameters fp(global_region);
+
+    do{
+        m_sumin >> tag;
+        if ( tag == xmlsum::BADTREES_START )
+        {
+            m_sumin >> badtrees;
+            c.SetNumBadTrees( badtrees );
+            m_sumin >> tag;
+            ReadInCheckFileFormat( "ReadInChainOut", xmlsum::BADTREES_END, tag );
+        }
+        if ( tag == xmlsum::STRETCHEDTREES_START )
+        {
+            m_sumin >> stretchedtrees;
+            c.SetNumStretchedTrees( stretchedtrees );
+            m_sumin >> tag;
+            ReadInCheckFileFormat( "ReadInChainOut", xmlsum::STRETCHEDTREES_END, tag );
+        }
+        if ( tag == xmlsum::TINYTREES_START )
+        {
+            m_sumin >> tinytrees;
+            c.SetNumTinyPopTrees( tinytrees );
+            m_sumin >> tag;
+            ReadInCheckFileFormat( "ReadInChainOut", xmlsum::TINYTREES_END, tag );
+        }
+        if ( tag == xmlsum::ZERODLTREES_START )
+        {
+            m_sumin >> zerodltrees;
+            c.SetNumZeroDLTrees( zerodltrees );
+            m_sumin >> tag;
+            ReadInCheckFileFormat( "ReadInChainOut", xmlsum::ZERODLTREES_END, tag );
+        }
+        if ( tag == xmlsum::ACCRATE_START )
+        {
+            m_sumin >> accrate;
+            c.SetAccrate( accrate );
+            m_sumin >> tag;
+            ReadInCheckFileFormat( "ReadInChainOut", xmlsum::ACCRATE_END, tag );
+        }
+        if ( tag == xmlsum::LLIKEMLE_START )
+        {
+            m_sumin >> llikemle;
+            c.SetLlikemle( llikemle );
+            m_sumin >> tag;
+            ReadInCheckFileFormat( "ReadInChainOut", xmlsum::LLIKEMLE_END, tag );
+        }
+        if ( tag == xmlsum::LLIKEDATA_START )
+        {
+            m_sumin >> llikedata;
+            c.SetLlikedata( llikedata );
+            m_sumin >> tag;
+            ReadInCheckFileFormat( "ReadInChainOut", xmlsum::LLIKEDATA_END, tag );
+        }
+        if ( tag == xmlsum::STARTTIME_START )
+        {
+            m_sumin >> starttime;
+            c.SetStarttime( starttime );
+            m_sumin >> tag;
+            ReadInCheckFileFormat( "ReadInChainOut", xmlsum::STARTTIME_END, tag );
+        }
+        if ( tag == xmlsum::ENDTIME_START )
+        {
+            m_sumin >> endtime;
+            c.SetEndtime( endtime );
+            m_sumin >> tag;
+            ReadInCheckFileFormat( "ReadInChainOut", xmlsum::ENDTIME_END, tag );
+        }
+        if ( tag == xmlsum::RATES_START )
+        {
+            m_sumin >> tag;
+            ratemap r;
+            while ( tag == xmlsum::MAP_START )
+            {
+                m_sumin >> srate;
+                m_sumin >> p1;
+                m_sumin >> p2;
+                m_sumin >> tag;
+                pair<long int, long int> rpair(p1, p2);
+                r.insert( make_pair(srate, rpair) );
+                c.SetRates( r );
+                ReadInCheckFileFormat( "ReadInChainOut", xmlsum::MAP_END, tag );
+                m_sumin >> tag;
+            }
+            ReadInCheckFileFormat( "ReadInChainOut", xmlsum::RATES_END, tag );
+        }
+        if ( tag == xmlsum::ESTIMATES_START )
+        {
+            ReadInForceParameters(fp);
+            c.SetEstimates(fp);
+        }
+        if ( tag == xmlsum::TEMPERATURES_START)
+        {
+            DoubleVec1d temperatures;
+            if ( ReadInVec1D( temperatures, xmlsum::TEMPERATURES_END ) )
+            {
+                c.SetTemperatures( temperatures );
+                c.SetNumtemps(temperatures.size());
+            }
+        }
+        if ( tag == xmlsum::SWAPRATES_START)
+        {
+            DoubleVec1d swaprates;
+            if ( ReadInVec1D( swaprates, xmlsum::SWAPRATES_END ) )
+                c.SetSwaprates( swaprates );
+        }
+        if ( tag == xmlsum::BAYESUNIQUE_START)
+        {
+            LongVec1d bayesunique;
+            if ( ReadInVec1D( bayesunique, xmlsum::BAYESUNIQUE_END ) )
+                c.SetBayesUnique( bayesunique );
+        }
+
+    } while (tag != xmlsum::CHAINOUT_END);
+} // ReadInChainOut
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+// populate forceparameter object fp with data from the sumfile
+// precondition:  last string read in: xmlsum::ESTIMATES_START
+// postcondition: last string read in: xmlsum::ESTIMATES_END
+void SumFileHandler::ReadInForceParameters( ForceParameters& fp )
+{
+    vector<double> vd;
+    string tag;
+
+    m_sumin >> tag;
+    while ( tag != xmlsum::ESTIMATES_END )
+    {
+        if ( tag == xmlsum::THETAS_START )
+        {
+            if ( ReadInVec1D( vd, xmlsum::THETAS_END ) )
+                fp.SetGlobalThetas( vd );
+        }
+        if ( tag == xmlsum::MIGRATES_START )
+        {
+            if ( ReadInVec1D( vd, xmlsum::MIGRATES_END ) )
+                fp.SetMigRates( vd );
+        }
+        if ( tag == xmlsum::DIVMIGRATES_START )
+        {
+            if ( ReadInVec1D( vd, xmlsum::DIVMIGRATES_END ) )
+                fp.SetMigRates( vd );
+        }
+        if ( tag == xmlsum::RECRATES_START )
+        {
+            if ( ReadInVec1D( vd, xmlsum::RECRATES_END ) )
+                fp.SetRecRates( vd );
+        }
+        if ( tag == xmlsum::GROWTHRATES_START )
+        {
+            if ( ReadInVec1D( vd, xmlsum::GROWTHRATES_END ) )
+                fp.SetGrowthRates( vd );
+        }
+        if ( tag == xmlsum::LOGISTICSELECTION_START )
+        {
+            if ( ReadInVec1D( vd, xmlsum::LOGISTICSELECTION_END ) )
+                fp.SetLogisticSelectionCoefficient( vd );
+        }
+        if ( tag == xmlsum::EPOCHTIMES_START )
+        {
+            if ( ReadInVec1D( vd, xmlsum::EPOCHTIMES_END ) )
+                fp.SetEpochTimes( vd );
+        }
+        if ( tag == xmlsum::DISEASERATES_START )
+        {
+            if ( ReadInVec1D( vd, xmlsum::DISEASERATES_END ) )
+                fp.SetDiseaseRates( vd );
+        }
+        if ( tag == xmlsum::GAMMAOVERREGIONS_START )
+        {
+            if ( ReadInVec1D( vd, xmlsum::GAMMAOVERREGIONS_END ) )
+            {
+                if ( !registry.GetRegionGammaInfo() )
+                {
+                    string msg = "Warning!  You are reading summary-file data ";
+                    msg += "from a run in which you allowed the mutation rate ";
+                    msg += "to vary over genomic regions according to a gamma ";
+                    msg += "distribution, and in which the scaled shape parameter ";
+                    msg += "of this distribution was estimated to be ";
+                    msg += ToString(vd[0]);
+                    msg += ".  In the present analysis, you have the gamma ";
+                    msg += "distribution turned OFF, which will produce different ";
+                    msg += "results.  You may wish to re-start LAMARC, and ";
+                    msg += "turn the gamma distribution on, using the evolutionary ";
+                    msg += "forces menu.";
+                    registry.GetRunReport().ReportUrgent(msg);
+                }
+            }
+        }
+        m_sumin >> tag;
+    }
+    registry.GetForceSummary().ValidateForceParamOrBarf(fp);
+    ReadInCheckFileFormat( "SumFileHandler::ReadInForceParameters", xmlsum::ESTIMATES_END, tag);
+} // ReadInForceParameters
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+// populate vector vd with data from sumfile
+// clears vd before adding doubles to it
+// if vd not empty, return true, else return false
+bool SumFileHandler::ReadInVec1D( vector<double>& vd, string endtag )
+{
+    string sdouble;
+    m_sumin >> sdouble;
+    vd.clear();
+    while ( sdouble != endtag )
+    {
+        vd.push_back( atof(sdouble.c_str()) );
+        m_sumin >> sdouble;
+    }
+
+    if (vd.size() > 0)
+        return true;
+    else
+        return false;
+} // ReadInVec1D
+
+bool SumFileHandler::ReadInVec1D( vector<long int> & vd, string endtag )
+{
+    string slong;
+    m_sumin >> slong;
+    vd.clear();
+    while ( slong != endtag )
+    {
+        vd.push_back( atol(slong.c_str()) );
+        m_sumin >> slong;
+    }
+
+    if (vd.size() > 0)
+        return true;
+    else
+        return false;
+} // ReadInVec1D
+
+//------------------------------------------------------------------------------------
+
+void SumFileHandler::ReadInChainSum(ChainPack& chainpack, CollectionManager& collectionmanager, long int numchains)
+{
+    long int region, replicate;
+    string tag;
+    m_sumin >> tag;
+    ReadInCheckFileFormat("ReadInChainSum", xmlsum::REG_REP_START, tag );
+    m_sumin >> region;
+    m_sumin >> replicate;
+    m_sumin >> tag;
+    ReadInCheckFileFormat("ReadInChainSum", xmlsum::REG_REP_END, tag );
+    m_sumin >> tag;
+
+    m_lastRegionChainSum = region;
+    m_lastReplicateChainSum = replicate;
+
+    collectionmanager.StartChain(region, replicate, true);
+    while (((tag == xmlsum::TREESUM_START) ||
+            (tag == xmlsum::PARAM_SUMMARY_START))
+           && (!m_sumin.eof()))
+    {
+        if (tag==xmlsum::TREESUM_START)
+        {
+            TreeSummary* ts = registry.GetProtoTreeSummary().Clone();
+            ts->ReadInTreeSummary(m_sumin);
+            collectionmanager.GetTreeColl(region, replicate)->AddTreeSummary(ts);
+        }
+        else if (tag==xmlsum::PARAM_SUMMARY_START)
+        {
+            m_sumin >> tag;
+            ReadInCheckFileFormat("ReadInChainSum", xmlsum::NCOPY_START, tag );
+            long int ncopy;
+            m_sumin >> ncopy;
+            m_sumin >> tag;
+            ReadInCheckFileFormat("ReadInChainSum", xmlsum::NCOPY_END, tag );
+            m_sumin >> tag;
+            ReadInCheckFileFormat("ReadInChainSum", xmlsum::ESTIMATES_START, tag );
+            ForceParameters fp(region);
+            ReadInForceParameters(fp);
+            collectionmanager.GetParamColl(region, replicate)->AddParamSummary(fp, ncopy);
+            m_sumin >> tag;
+            ReadInCheckFileFormat("ReadInChainSum", xmlsum::PARAM_SUMMARY_END, tag );
+
+        }
+        else
+        {
+            string eitheror = "either " + xmlsum::TREESUM_START + " or "
+                + xmlsum::PARAM_SUMMARY_START;
+            ReadInCheckFileFormat("ReadInChainSum", eitheror, tag);
+        }
+        m_sumin >> tag;
+    }
+
+    if (collectionmanager.GetSampleTrees())
+    {
+        ForceParameters fp(unknown_region);
+        if (numchains >=2 )
+        {
+            fp = chainpack.GetChain(region, replicate, numchains-2).GetEstimates();
+            //-2 is -1 for size->index, and -1 for next-to-last.
+        }
+        else if (numchains == 1)
+        {
+            ForceParameters fpstart(registry.GetForceSummary().GetStartParameters(), region);
+            fp = fpstart;
+        }
+        else
+        {
+            //numchains is negative or zero?
+            throw implementation_error
+                ("The number of chains seems to be zero or negative.  This should be impossible.");
+        }
+        collectionmanager.GetTreeColl(region, replicate)->SetStartParameters(fp);
+
+        //The old format had estimates here, but now they're optional.
+        if (tag == xmlsum::ESTIMATES_START)
+        {
+            ForceParameters fp(region);
+            ReadInForceParameters(fp);
+            collectionmanager.GetTreeColl(region, replicate)->SetStartParameters(fp);
+            m_sumin >> tag;
+        }
+    }
+
+    ReadInCheckFileFormat("ReadInChainSum", xmlsum::CHAINSUM_END, tag );
+    //We don't need AdjustSummaries since it was called if needed originally.
+
+} // ReadInChainSum
+
+// ReadInCheckFileFormat:  checks the file format one tag at a time
+// args:
+//   callingfxn  - should include class calling fxn belongs to
+//   expectedtag - expected xml tag in sumfile
+//   readtag     - xml tag just read in by the calling fxn
+void SumFileHandler::ReadInCheckFileFormat(string callingfxn, string expectedtag, string readtag)
+{
+    if (readtag != expectedtag)
+    {
+        string sumfile =  registry.GetUserParameters().GetTreeSumInFileName();
+        string err_string = callingfxn + " - " + sumfile + " has syntactic error.";
+        err_string += " Expected " + expectedtag + " but read " + readtag + "\n";
+        throw incorrect_xml(err_string);
+    }
+} // ReadInCheckFileFormat
+
+/****************************************************************************
+ *
+ * Sumfile Writing functions
+ *
+ ****************************************************************************/
+
+void SumFileHandler::WriteSumFileStart()
+{
+    // open file
+    m_sumout.open(registry.GetUserParameters().GetTreeSumOutFileName().c_str(), ios::out );
+    if (!m_sumout)
+    {
+        HandleSumOutFileFormatError("SumFileHandler::WriteSumFileStart");
+    }
+
+    // write preamble
+    m_sumout << xmlsum::SUMFILE_START << endl;
+    m_sumout << xmlsum::COMMENT_START << " Lamarc v. "
+             << VERSION << endl
+             << "     Please do not modify. " << xmlsum::COMMENT_END  << endl;
+
+    // set the precision of the data (should be greater than that of outfile)
+    m_sumout.precision(SUMFILE_PRECISION);
+} // WriteSumFileStart
+
+void SumFileHandler::WriteSumFileEnd(const ChainPack& chainpack)
+{
+    if ( m_sumout.is_open() )
+    {
+        // finish up
+        m_sumout << xmlsum::COMMENT_START << " End summary file" << endl
+                 << "\t Generated from run that started at: "
+                 << PrintTime(chainpack.GetStartTime(), "%c") << endl
+                 << "\t and ended at: " << PrintTime(chainpack.GetEndTime(), "%c")
+                 << " " << xmlsum::COMMENT_END << endl;
+        m_sumout << xmlsum::SUMFILE_END << endl;
+        m_sumout.close();
+    }
+    else
+        HandleSumOutFileFormatError("WriteSumFileEnd");
+} // WriteSumFileEnd
+
+void SumFileHandler::WriteLastChain(const ChainPack& chainpack)
+{
+    chainpack.WriteLastChain(m_sumout);
+}
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+// currently called after tree generation/processing done
+// currently outputs necessary data to skip tree gen to calculate MLE's
+
+void SumFileHandler::WriteChainSumStart( long int whichregion, long int whichreplicate,
+                                         const CollectionManager& collectionmanager )
+{
+    if ( m_sumout.is_open() )
+    {
+        m_sumout << xmlsum::CHAINSUM_START << endl
+                 << "\t" << xmlsum::REG_REP_START << " " << whichregion << " "
+                 << whichreplicate << " " << xmlsum::REG_REP_END << endl;
+
+        collectionmanager.WriteThisChainsCollections(&m_sumout);
+    }
+    else
+        HandleSumOutFileFormatError("WriteChainSumStart");
+} // WriteChainSumsStart
+
+void SumFileHandler::WriteChainSumEnd(const CollectionManager& collectionmanager )
+{
+    if (m_sumout.is_open())
+    {
+        collectionmanager.WriteLastSummaries();
+        m_sumout << xmlsum::CHAINSUM_END << endl;
+    }
+    else
+        HandleSumOutFileFormatError("WriteChainSumEnd");
+}
+
+//Write summary info if more than one region is summarized.
+void SumFileHandler::WriteRegionSummary(ForceParameters& fp, double maxlike)
+{
+    if ( m_sumout.is_open() )
+    {
+        m_sumout << xmlsum::REGION_SUMMARY_START << endl;
+        fp.WriteForceParameters(m_sumout, 1);
+        m_sumout << "\t" << xmlsum::MAXLIKE_START
+                 << " " << maxlike << " "
+                 << xmlsum::MAXLIKE_END << endl;
+        m_sumout << xmlsum::REGION_SUMMARY_END << endl;
+    }
+    else
+        HandleSumOutFileFormatError("WriteRegionSummary");
+} // WriteRegionSummary
+
+//Write summary info if more than one replicate is summarized.
+void SumFileHandler::WriteReplicateSummary( ForceParameters& fp, double maxlike, const ChainPack& chainpack)
+{
+    if ( m_sumout.is_open() )
+    {
+        m_sumout << xmlsum::REPLICATE_SUMMARY_START << endl;
+        fp.WriteForceParameters(m_sumout, 1);
+        m_sumout << "\t" << xmlsum::MAXLIKE_START
+                 << " " << maxlike << " "
+                 << xmlsum::MAXLIKE_END << endl;
+        m_sumout << xmlsum::REPLICATE_SUMMARY_END << endl;
+    }
+    else
+        HandleSumOutFileFormatError("WriteReplicateSummary");
+} // WriteReplicateSummary
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+// purpose: called if sumfile is closed when a summarizing fxn expected it
+//    to be open.
+// causes:  user could have done something to sumfile, or lamarc was
+//   unable to open it in WriteSumFileStart now also called by
+//   non-Summarize fxns, ie TreeSummary::ReadInTreeSummary
+void SumFileHandler::HandleSumOutFileFormatError(string callingfxn)
+{
+    if (registry.GetUserParameters().GetWriteSumFile())
+    {
+        string sumfile    =  registry.GetUserParameters().GetTreeSumOutFileName();
+        string err_string =  "In function, " + callingfxn + ", file," + sumfile
+            + " unexpectedly closed or could not be opened and is no longer valid."
+            + "  Possible causes of this problem include, but are not limited to: "
+            + "1) read permissions for this file are not or are no longer enabled."
+            + "2) this file was unexpectedly moved, renamed, or deleted. "
+            + "Continuing, but without writing to the summary file any more.\n";
+        registry.GetRunReport().ReportUrgent(err_string);
+        registry.GetUserParameters().SetWriteSumFile(false);
+    }
+} // HandleSumOutFileFormatError
+
+// WriteWhatWasRead is called when the user is both reading and writing to a
+//  summary file.  This function writes everything at once, instead of doing
+//  it piecemeal within DoChain/DoReplicate/etc. as it would normally.
+//
+//  This function relies on ReadInRecover working properly and setting
+//  the recover* member variables if the read summary file was not complete.
+void SumFileHandler::WriteWhatWasRead(bool recoversumfile,
+                                      long int recover_region,
+                                      long int recover_replicate,
+                                      long int recover_chaintype,
+                                      long int recover_chain,
+                                      bool recover_redochain,
+                                      bool recover_redomaximization,
+                                      long int nregions,
+                                      long int nreplicates,
+                                      const ChainPack& chainpack,
+                                      const CollectionManager& collectionmanager)
+{
+    if (m_sumout.is_open())
+    {
+        m_sumout << xmlsum::COMMENT_START
+                 << "  This summary file should match the input summary file "
+                 << registry.GetUserParameters().GetTreeSumInFileName()
+                 << ",\n";
+        if (recover_redochain)
+        {
+            m_sumout << "      with the exception of the final chainpack, which was re-created. ";
+        }
+        else
+            m_sumout << "      up until that file's end. ";
+        m_sumout << xmlsum::COMMENT_END  << endl;
+
+        long int total_chains = 0 ;
+        for (int i=0; i<NCHAINTYPES; i++ )
+        {
+            total_chains += registry.GetChainParameters().GetNChains(i);
+        }
+
+        long int last_region = recover_region + 1;
+        for (long int rw_region = 0; rw_region < last_region; rw_region++ )
+        {
+            long int last_replicate = nreplicates;
+            if (rw_region == last_region-1)
+            {
+                //This is the last region, so the number of replicates might be
+                // different than the full number.
+                last_replicate = recover_replicate+1;
+            }
+            for (long int rw_replicate = 0; rw_replicate < last_replicate; rw_replicate++ )
+            {
+                long int last_chain = total_chains;
+                if ((rw_replicate == last_replicate-1) &&
+                    (rw_region == last_region-1))
+                {
+                    //This is the last replicate, so the number of chains might be
+                    // different than the full number.
+                    last_chain = 0;
+                    for (int i = 0; i < recover_chaintype; i++ )
+                    {
+                        last_chain += registry.GetChainParameters().GetNChains(i);
+                    }
+                    last_chain += recover_chain;
+                    if (recover_redomaximization)
+                    {
+                        //We have chain summaries we need to write
+                        last_chain++;
+                    }
+                }
+                for (long int rw_chain = 0; rw_chain < last_chain; rw_chain++)
+                {
+                    bool last_loop = false;
+                    if ((rw_region == last_region-1) &&
+                        (rw_replicate == last_replicate-1) &&
+                        (rw_chain == last_chain-1))
+                    {
+                        last_loop = true;
+                    }
+                    if ((rw_chain == total_chains-1) &&
+                        (!last_loop || !recover_redochain))
+                    {
+                        //Write out the chain summary.
+                        WriteChainSumStart(rw_region, rw_replicate, collectionmanager);
+                        collectionmanager.WriteAllSummaries(rw_region, rw_replicate, m_sumout);
+                        m_sumout << xmlsum::CHAINSUM_END << endl;
+                    }
+                    if (!last_loop || !recover_redomaximization)
+                    {
+                        //Write out the chainpack.
+                        chainpack.WriteChain(m_sumout, rw_region, rw_replicate, rw_chain);
+                    }
+                }
+            }
+            if (chainpack.GetLenRegionsVec() - 1 >= rw_region)
+            {
+                //Write the summary over replicates
+                ChainOut rw_chainout = chainpack.GetRegion(rw_region);
+                double rw_maxlike = rw_chainout.GetLlikemle();
+                ForceParameters rw_fp = rw_chainout.GetEstimates();
+                WriteReplicateSummary(rw_fp, rw_maxlike, chainpack);
+            }
+        }
+        if (chainpack.GetLenOverallVec() != 0)
+        {
+            //Write the summary over regions
+            ChainOut rw_chainout = chainpack.GetOverall();
+            double rw_maxlike = rw_chainout.GetLlikemle();
+            ForceParameters rw_fp = rw_chainout.GetEstimates();
+            WriteRegionSummary(rw_fp, rw_maxlike);
+        }
+
+        if (recoversumfile)
+        {
+            m_sumout << xmlsum::COMMENT_START
+                     << "  New information past this point. "
+                     << xmlsum::COMMENT_END  << endl;
+        }
+        else
+            WriteSumFileEnd(chainpack);
+    }
+    else
+        HandleSumOutFileFormatError("SumFileHandler::WriteWhatWasRead");
+} // WriteWhatWasRead
+
+void SumFileHandler::WriteVec1D( ofstream& sumout, vector<double> &vd)
+{
+    vector<double>::iterator itstart = vd.begin();
+    vector<double>::iterator itend   = vd.end();
+    if ( sumout.is_open() )
+    {
+        for( ; itstart != itend; ++itstart )
+            sumout << *itstart << " ";
+    }
+    else
+        SumFileHandler::HandleSumOutFileFormatError("WriteVec1D");
+} // WriteVec1D
+
+void SumFileHandler::WriteVec1D( ofstream& sumout, vector<long int> & vd)
+{
+    vector<long int>::iterator itstart = vd.begin();
+    vector<long int>::iterator itend   = vd.end();
+    if ( sumout.is_open() )
+    {
+        for( ; itstart != itend; ++itstart )
+            sumout << *itstart << " ";
+    }
+    else
+        SumFileHandler::HandleSumOutFileFormatError("WriteVec1D");
+} // WriteVec1D
+
+void SumFileHandler::CloseSumOut()
+{
+    m_sumout.close();
+}
+
+//____________________________________________________________________________________
diff --git a/src/control/sumfilehandler.h b/src/control/sumfilehandler.h
new file mode 100644
index 0000000..8b405a8
--- /dev/null
+++ b/src/control/sumfilehandler.h
@@ -0,0 +1,107 @@
+// $Id: sumfilehandler.h,v 1.7 2011/03/07 06:08:47 bobgian Exp $
+
+/*
+  Copyright 2005 Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+// The Sumfile Handler is here to handle reading and writing of summary files.
+// Earlier versions of the functions here can be found in chainmanager directly,
+// since it was pulled out of there due to this sort of thing taking up too
+// much space.
+
+// In general, it is owned by and called from the chainmanager object, and
+// modifies (by reference) other chainmanager member variables when called
+// upon to read a summary file, and reads (by reference) other chainmanager
+// member variables when called upon to write a summary file.
+
+#ifndef SUMFILEHANDLER_H
+#define SUMFILEHANDLER_H
+
+#include <fstream>
+#include "vectorx.h"
+
+class ForceParameters;
+class ChainOut;
+class ChainPack;
+class CollectionManager;
+
+class SumFileHandler
+{
+  private:
+    std::ifstream m_sumin;        // use when reading from a summary file
+    std::ofstream m_sumout;       // use when writing to a summary file
+
+    long int m_lastRegion;
+    long int m_lastReplicate;
+    long int m_lastChain;
+    long int m_lastRegionChainSum;
+    long int m_lastReplicateChainSum;
+    long int m_lastReplicateSummary;
+    bool m_regionSummary;
+
+  public:
+    SumFileHandler();
+    ~SumFileHandler() {};
+    SumFileHandler(const SumFileHandler& src);            // undefined
+    SumFileHandler& operator=(const SumFileHandler& src); // undefined
+
+    long int GetLastRegion() {return m_lastRegion;};
+    long int GetLastReplicate() {return m_lastReplicate;};
+    long int GetLastChain() {return m_lastChain;};
+    long int GetLastRegionChainSum() {return m_lastRegionChainSum;};
+    long int GetLastReplicateChainSum() {return m_lastReplicateChainSum;};
+    long int GetLastReplicateSummary() {return m_lastReplicateSummary;};
+    bool GetRegionSummary() {return m_regionSummary;};
+
+    void ReadInChainPack          (ChainPack& chainpack);
+    void ReadInChainSum           (ChainPack& chainpack,
+                                   CollectionManager& collectionmanager, long int numchains);
+    void SkipComments             ();
+    void ReadInChainOut           (ChainOut &c );
+    void ReadInSumFile            (ChainPack& chainpack,
+                                   CollectionManager& collectionmanager, long int numchains);
+    void ReadInReplicateSummary   (ChainPack& chainpack);
+    void ReadInRegionSummary      (ChainPack& chainpack);
+    bool ReadInVec1D              (DoubleVec1d& vd, string endtag );
+    bool ReadInVec1D              (LongVec1d& vd, string endtag );
+    void ReadInEndRegion          (ChainPack& chainpack);
+    void ReadInRecover            (ChainPack& chainpack);
+    // public only so TreeSummary::ReadInTreeSummary() can use, change if merge
+    void ReadInForceParameters    (ForceParameters& fp );
+    static void HandleSumOutFileFormatError( string callingfxn );
+    static void ReadInCheckFileFormat( string callingfxn, string expectedtag, string readtag );
+
+    void WriteSumFileStart        ();
+    void WriteSumFileEnd          (const ChainPack& chainpack);
+    void WriteChainSumStart       (long int whichregion, long int whichreplicate,
+                                   const CollectionManager& collectionmanager);
+    void WriteChainSumEnd         (const CollectionManager& collectionmanager);
+    void WriteChainPack           (ChainOut& chainout,const ChainPack& chainpack);
+    void WriteLastChain           (const ChainPack& chainpack);
+    void WriteRegionSummary       (ForceParameters& fp, double maxlike);
+    void WriteReplicateSummary    (ForceParameters& fp, double maxlike,
+                                   const ChainPack& chainpack);
+    void WriteWhatWasRead         (bool recoversumfile,
+                                   long int recover_region,
+                                   long int recover_replicate,
+                                   long int recover_chaintype,
+                                   long int recover_chain,
+                                   bool recover_redochain,
+                                   bool recover_redomaximization,
+                                   long int nregions,
+                                   long int nreplicates,
+                                   const ChainPack& chainpack,
+                                   const CollectionManager& collectionmanager);
+    static void WriteVec1D ( std::ofstream& out, vector<double>& vd );
+    static void WriteVec1D ( std::ofstream& out, vector<long int>& vd );
+    void CloseSumOut();
+
+};
+
+#endif // SUMFILEHANDLER_H
+
+//____________________________________________________________________________________
diff --git a/src/control/types.h b/src/control/types.h
new file mode 100644
index 0000000..89c7909
--- /dev/null
+++ b/src/control/types.h
@@ -0,0 +1,69 @@
+// $Id: types.h,v 1.24 2012/06/30 01:32:39 bobgian Exp $
+
+/*
+  Copyright 2002 Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef TYPES_H
+#define TYPES_H
+
+#include <list>
+#include <string>
+#include <map>
+#include <memory>
+
+#include "vectorx.h"
+#include "shared_ptr.hpp"
+
+class Branch;
+class DataType;
+class DataModel;
+class Cell;
+class triplet;
+class DLCalculator;
+
+typedef boost::shared_ptr<Cell> Cell_ptr;
+typedef std::pair<long,long>   wakestats;
+
+typedef std::pair<std::string,long> TagLine;
+
+typedef std::map<std::string,DoubleVec2d> percentmap;     // DoubleVec2d dim:
+// pop X reg
+typedef std::map<std::string,DoubleVec1d> overpercentmap; // DoubleVec1d dim:
+// dim: by pop
+
+typedef std::map<std::string, std::pair<long, long> > ratemap; // acceptance rate per arranger
+
+typedef boost::shared_ptr<Branch>  Branch_ptr;
+typedef boost::weak_ptr<Branch>    weakBranch_ptr;
+typedef std::list<Branch_ptr>      Branchlist;
+typedef Branchlist::iterator       Branchiter;
+typedef Branchlist::const_iterator Branchconstiter;
+
+typedef double*** cellarray;
+typedef std::multimap<triplet, cellarray> FreeStore;
+
+typedef boost::shared_ptr<DataType> DataType_ptr;
+typedef boost::shared_ptr<DataModel> DataModel_ptr;
+
+typedef std::pair<double, double> centilepair;
+
+typedef boost::shared_ptr<DLCalculator> DLCalc_ptr;
+
+typedef std::map<force_type, long>  FPartMap;
+typedef std::map<force_type, long>::iterator  FPartMapiter;
+
+typedef short xpart_t;  // type representing a number of partitions or
+                        // cross-partitions.  Change this if you expect to
+                        // have too many population x disease state x whatever
+                        // combos to fit in the current size.
+typedef std::vector<xpart_t> XPartVec1d;
+typedef std::vector<std::vector<xpart_t> > XPartVec2d;
+
+#endif // TYPES_H
+
+//____________________________________________________________________________________
diff --git a/src/control/userparam.cpp b/src/control/userparam.cpp
new file mode 100644
index 0000000..65c4697
--- /dev/null
+++ b/src/control/userparam.cpp
@@ -0,0 +1,397 @@
+// $Id: userparam.cpp,v 1.51 2012/07/10 21:24:25 jmcgill Exp $
+
+/*
+  Copyright 2002 Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <iostream>
+#include <fstream>
+
+#include "local_build.h"
+
+#include "errhandling.h"
+#include "force.h"
+#include "region.h"
+#include "registry.h"
+#include "stringx.h"
+#include "timex.h"
+#include "userparam.h"
+#include "xml_strings.h"  // for ToXML()
+
+#ifdef DMALLOC_FUNC_CHECK
+#include "/usr/local/include/dmalloc.h"
+#endif
+
+using std::string;
+using std::vector;
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+string UserParameters::BuildFileName(string prefix, string regionname, long repCount, string ext) const
+{
+    string rname = SpacesToUnderscores(regionname);
+    return prefix + "_" + rname + "_" + ToString(repCount+1) + "." + ext;
+}
+
+//------------------------------------------------------------------------------------
+
+UserParameters::UserParameters(string curvefileprefix,
+                               string mapfileprefix,
+                               string reclocfileprefix,
+                               string tracefileprefix,
+                               string newicktreefileprefix,
+#ifdef LAMARC_QA_TREE_DUMP
+                               string argfileprefix,
+#endif // LAMARC_QA_TREE_DUMP
+                               string datafilename,
+                               string profileprefix,
+                               string resultsfilename,
+                               string treesuminfilename,
+                               string treesumoutfilename,
+                               string xmloutfilename,
+                               string xmlreportfilename,
+                               verbosity_type   verbosity,
+                               verbosity_type   progress,
+                               bool   plotPost,
+                               bool   usesystemclock,
+                               bool   readsumfile,
+                               bool   writesumfile,
+                               bool   writecurvefiles,
+                               bool   writereclocfiles,
+                               bool   writetracefiles,
+                               bool   writenewicktreefiles,
+#ifdef LAMARC_QA_TREE_DUMP
+                               bool   writeargfiles,
+                               bool   writemanyargs,
+#endif // LAMARC_QA_TREE_DUMP
+                               long   randomSeed,
+                               time_t programstarttime)
+    :
+    m_curvefileprefix(curvefileprefix),
+    m_mapfileprefix(mapfileprefix),
+    m_reclocfileprefix(reclocfileprefix),
+    m_tracefileprefix(tracefileprefix),
+    m_newicktreefileprefix(newicktreefileprefix),
+#ifdef LAMARC_QA_TREE_DUMP
+    m_argfileprefix(argfileprefix),
+#endif // LAMARC_QA_TREE_DUMP
+    m_datafilename(datafilename),
+    m_profileprefix(profileprefix),
+    m_resultsfilename(resultsfilename),
+    m_treesuminfilename(treesuminfilename),
+    m_treesumoutfilename(treesumoutfilename),
+    m_xmloutfilename(xmloutfilename),
+    m_xmlreportfilename(xmlreportfilename),
+    m_verbosity(verbosity),
+    m_progress(progress),
+    m_plotPost(plotPost),
+    m_usesystemclock(usesystemclock),
+    m_readsumfile(readsumfile),
+    m_writesumfile(writesumfile),
+    m_writecurvefiles(writecurvefiles),
+    m_writereclocfiles(writereclocfiles),
+    m_writetracefiles(writetracefiles),
+    m_writenewicktreefiles(writenewicktreefiles),
+#ifdef LAMARC_QA_TREE_DUMP
+    m_writeargfiles(writeargfiles),
+    m_writemanyargs(writemanyargs),
+#endif // LAMARC_QA_TREE_DUMP
+    m_randomSeed(randomSeed),
+    m_programstarttime(programstarttime)
+{
+    // we establish defaults for the user parameters here.
+    // they can be overriden both by the data file and by
+    // the menu.
+
+    //LS DEBUG:  move to the output manager when one exists.
+    string regionname = registry.GetDataPack().GetRegion(0).GetRegionName();
+    m_currentReclocFileName     = BuildFileName(m_reclocfileprefix,    regionname,-1, "txt");
+    m_currentTraceFileName      = BuildFileName(m_tracefileprefix,     regionname,-1, "txt");
+    m_currentNewickTreeFileName = BuildFileName(m_newicktreefileprefix,regionname,-1, "txt");
+#ifdef LAMARC_QA_TREE_DUMP
+    m_currentArgFileName        = BuildFileName(m_argfileprefix,       regionname,-1, "xml");
+#endif // LAMARC_QA_TREE_DUMP
+
+    m_currentBestLike = -DBL_MAX;
+    m_currentStep = 0;
+}
+
+//------------------------------------------------------------------------------------
+
+StringVec1d UserParameters::ToXML(unsigned long nspaces) const
+{
+    StringVec1d xmllines;
+    string line = MakeIndent(MakeTag(xmlstr::XML_TAG_FORMAT),nspaces);
+    xmllines.push_back(line);
+
+    nspaces += INDENT_DEPTH;
+
+    string mytag;
+
+    mytag = MakeTag(xmlstr::XML_TAG_CONVERT_OUTPUT);
+    line = MakeIndent(mytag, nspaces) + " "
+        + ToString(registry.GetConvertOutputToEliminateZeroes()) + " "
+        + MakeCloseTag(mytag);
+    xmllines.push_back(line);
+
+    if (GetUseSystemClock())
+    {
+        xmllines.push_back("");
+        line = MakeIndent(xmlstr::XML_COMMENT_SEED_FROM_CLOCK_0,nspaces);
+        xmllines.push_back(line);
+        line = MakeIndent(xmlstr::XML_COMMENT_SEED_FROM_CLOCK_1,nspaces);
+        xmllines.push_back(line);
+        mytag = MakeTag(xmlstr::XML_TAG_SEED_FROM_CLOCK);
+        line = MakeIndent(mytag,nspaces) + ToString(GetRandomSeed()) + MakeCloseTag(mytag);
+        xmllines.push_back(line);
+        xmllines.push_back("");
+    }
+    else
+    {
+        mytag = MakeTag(xmlstr::XML_TAG_SEED);
+        line = MakeIndent(mytag,nspaces) + ToString(GetRandomSeed()) + MakeCloseTag(mytag);
+        xmllines.push_back(line);
+    }
+
+    mytag = MakeTag(xmlstr::XML_TAG_VERBOSITY);
+    line = MakeIndent(mytag,nspaces) + ToString(GetVerbosity()) + MakeCloseTag(mytag);
+    xmllines.push_back(line);
+    mytag = MakeTag(xmlstr::XML_TAG_PROGRESS_REPORTS);
+    line = MakeIndent(mytag,nspaces) + ToString(GetProgress()) + MakeCloseTag(mytag);
+    xmllines.push_back(line);
+    mytag = MakeTag(xmlstr::XML_TAG_RESULTS_FILE);
+    line = MakeIndent(mytag,nspaces) + GetResultsFileName() + MakeCloseTag(mytag);
+    xmllines.push_back(line);
+    mytag = MakeTag(xmlstr::XML_TAG_USE_IN_SUMMARY);
+    line = MakeIndent(mytag,nspaces) + ToStringTF(GetReadSumFile()) + MakeCloseTag(mytag);
+    xmllines.push_back(line);
+    mytag = MakeTag(xmlstr::XML_TAG_IN_SUMMARY_FILE);
+    line = MakeIndent(mytag,nspaces) + GetTreeSumInFileName() + MakeCloseTag(mytag);
+    xmllines.push_back(line);
+    mytag = MakeTag(xmlstr::XML_TAG_USE_OUT_SUMMARY);
+    line = MakeIndent(mytag,nspaces) + ToStringTF(GetWriteSumFile()) + MakeCloseTag(mytag);
+    xmllines.push_back(line);
+    mytag = MakeTag(xmlstr::XML_TAG_OUT_SUMMARY_FILE);
+    line = MakeIndent(mytag,nspaces) + GetTreeSumOutFileName() + MakeCloseTag(mytag);
+    xmllines.push_back(line);
+    mytag = MakeTag(xmlstr::XML_TAG_USE_CURVEFILES);
+    line = MakeIndent(mytag,nspaces) + ToStringTF(GetWriteCurveFiles()) + MakeCloseTag(mytag);
+    xmllines.push_back(line);
+    mytag = MakeTag(xmlstr::XML_TAG_CURVEFILE_PREFIX);
+    line = MakeIndent(mytag,nspaces) + GetCurveFilePrefix() + MakeCloseTag(mytag);
+    xmllines.push_back(line);
+
+    //LS DEBUG:  add this
+#if 0
+    mytag = MakeTag(xmlstr::XML_TAG_MAPFILE_PREFIX);
+    line = MakeIndent(mytag,nspaces) + GetMapFilePrefix() + MakeCloseTag(mytag);
+    xmllines.push_back(line);
+#endif
+
+    mytag = MakeTag(xmlstr::XML_TAG_USE_RECLOCFILE);
+    line = MakeIndent(mytag,nspaces) + ToStringTF(GetWriteReclocFiles())
+        + MakeCloseTag(mytag);
+    xmllines.push_back(line);
+    mytag = MakeTag(xmlstr::XML_TAG_RECLOCFILE_PREFIX);
+    line = MakeIndent(mytag,nspaces) + GetReclocFilePrefix() + MakeCloseTag(mytag);
+    xmllines.push_back(line);
+
+    mytag = MakeTag(xmlstr::XML_TAG_USE_TRACEFILE);
+    line = MakeIndent(mytag,nspaces) + ToStringTF(GetWriteTraceFiles())
+        + MakeCloseTag(mytag);
+    xmllines.push_back(line);
+    mytag = MakeTag(xmlstr::XML_TAG_TRACEFILE_PREFIX);
+    line = MakeIndent(mytag,nspaces) + GetTraceFilePrefix() + MakeCloseTag(mytag);
+    xmllines.push_back(line);
+
+    mytag = MakeTag(xmlstr::XML_TAG_USE_NEWICKTREEFILE);
+    line = MakeIndent(mytag,nspaces) + ToStringTF(GetWriteNewickTreeFiles())
+        + MakeCloseTag(mytag);
+    xmllines.push_back(line);
+    mytag = MakeTag(xmlstr::XML_TAG_NEWICKTREEFILE_PREFIX);
+    line = MakeIndent(mytag,nspaces) + GetNewickTreeFilePrefix()
+        + MakeCloseTag(mytag);
+    xmllines.push_back(line);
+
+#ifdef LAMARC_QA_TREE_DUMP
+    mytag = MakeTag(xmlstr::XML_TAG_USE_ARGFILES);
+    line = MakeIndent(mytag,nspaces) + ToStringTF(GetWriteArgFiles())
+        + MakeCloseTag(mytag);
+    xmllines.push_back(line);
+    mytag = MakeTag(xmlstr::XML_TAG_MANY_ARGFILES);
+    line = MakeIndent(mytag,nspaces) + ToStringTF(GetWriteManyArgs())
+        + MakeCloseTag(mytag);
+    xmllines.push_back(line);
+    mytag = MakeTag(xmlstr::XML_TAG_ARGFILE_PREFIX);
+    line = MakeIndent(mytag,nspaces) + GetArgFilePrefix()
+        + MakeCloseTag(mytag);
+    xmllines.push_back(line);
+#endif // LAMARC_QA_TREE_DUMP
+
+    mytag = MakeTag(xmlstr::XML_TAG_OUT_XML_FILE);
+    line = MakeIndent(mytag,nspaces) + GetXMLOutFileName() + MakeCloseTag(mytag);
+    xmllines.push_back(line);
+
+    mytag = MakeTag(xmlstr::XML_TAG_REPORT_XML_FILE);
+    line = MakeIndent(mytag,nspaces) + GetXMLReportFileName() + MakeCloseTag(mytag);
+    xmllines.push_back(line);
+    mytag = MakeTag(xmlstr::XML_TAG_PROFILE_PREFIX);
+    line = MakeIndent(mytag,nspaces) + GetProfilePrefix() + MakeCloseTag(mytag);
+    xmllines.push_back(line);
+
+    nspaces -= INDENT_DEPTH;
+
+    line = MakeIndent(MakeCloseTag(xmlstr::XML_TAG_FORMAT),nspaces);
+    xmllines.push_back(line);
+
+    return xmllines;
+} // ToXML
+
+//------------------------------------------------------------------------------------
+
+void UserParameters::UpdateFileNamesAndSteps(long region, long replicate,
+                                             bool readsumfile)
+{
+    string regionname = registry.GetDataPack().GetRegion(region).GetRegionName();
+    m_currentReclocFileName     = BuildFileName(m_reclocfileprefix,    regionname,replicate, "txt");
+    m_currentTraceFileName      = BuildFileName(m_tracefileprefix,     regionname,replicate, "txt");
+    m_currentNewickTreeFileName = BuildFileName(m_newicktreefileprefix,regionname,replicate, "txt");
+#ifdef LAMARC_QA_TREE_DUMP
+    m_currentArgFileName        = BuildFileName(m_argfileprefix,       regionname,replicate, "xml");
+#endif // LAMARC_QA_TREE_DUMP
+
+    m_currentStep = 0;
+    //If we're in a non-bayesian run, and if we're collecting tracer data, we
+    // need to start things off here.
+    //
+    // Er, this is ugly design.  LS DEBUG
+
+    //Note:  Below are the headers for the tracer file; the contents are
+    // written in CollectionManager::WriteTraceFile(...) in collmanager.cpp
+    if (GetWriteTraceFiles() && !readsumfile)
+    {
+        std::ofstream tracefile;
+        tracefile.open(m_currentTraceFileName.c_str(),std::ios::trunc);
+        tracefile << "Step\tLn(Data Likelihood)";
+        if (registry.GetChainParameters().IsBayesian())
+        {
+            const vector<Force*>& forces = registry.GetForceSummary().GetAllForces();
+            vector<Force*>::const_iterator force;
+            for (force = forces.begin(); force != forces.end(); ++force)
+            {
+                vector<string> paramnames = (*force)->GetAllParamNames();
+                vector<string>::iterator name;
+                for (name = paramnames.begin(); name != paramnames.end(); ++name)
+                {
+                    if (!(*name).empty())
+                    {
+                        tracefile << "\t" << *name;
+                    }
+                }
+            }
+        }
+        tracefile << std::endl;
+        tracefile.close();
+        AddTraceFileName(m_currentTraceFileName);
+    }
+    if (GetWriteReclocFiles() && !readsumfile)
+    {
+        std::ofstream reclocfile;
+        reclocfile.open(m_currentReclocFileName.c_str(),std::ios::trunc);
+        reclocfile.close();
+        AddReclocFileName(m_currentReclocFileName);
+    }
+
+#ifdef LAMARC_QA_TREE_DUMP
+    if (GetWriteArgFiles() && !readsumfile)
+    {
+        std::ofstream argfile;
+        argfile.open(m_currentArgFileName.c_str(),std::ios::trunc);
+        argfile.close();
+        AddArgFileName(m_currentArgFileName);
+    }
+#endif
+}
+
+//------------------------------------------------------------------------------------
+
+void UserParameters::UpdateWriteTraceFile(long region, long replicate)
+{
+    //m_currentTraceFileName should be accurate.  We should not have written
+    // to it during this run of LAMARC, but we might have written to it
+    // during the previous run.  If so, we'll just append.  If not, we'll
+    // create a new file by calling UpdateFileNamesAndSteps.
+    std::ifstream tracefile;
+    tracefile.open(m_currentTraceFileName.c_str(), std::ios::in );
+    if (!tracefile)
+    {
+        UpdateFileNamesAndSteps(region, replicate, false);
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+long UserParameters::GetNextStep(long initialOrFinal)
+{
+    m_currentStep += registry.GetChainParameters().GetInterval(initialOrFinal); // EWFIX.CHAINTYPE
+    return m_currentStep;
+}
+
+//------------------------------------------------------------------------------------
+
+void UserParameters::AddCurveFileName (const std::string name)
+{
+    m_curvefilenames.push_back(name);
+}
+
+//------------------------------------------------------------------------------------
+
+void UserParameters::AddMapFileName (const std::string name)
+{
+    m_mapfilenames.push_back(name);
+}
+
+//------------------------------------------------------------------------------------
+
+void UserParameters::AddProfileName (const std::string name)
+{
+    m_profilenames.push_back(name);
+}
+
+//------------------------------------------------------------------------------------
+// RSGNOTE: Called, but result is never used.
+
+void UserParameters::AddReclocFileName (const std::string name)
+{
+    m_reclocfilenames.insert(name);
+}
+
+//------------------------------------------------------------------------------------
+
+void UserParameters::AddTraceFileName (const std::string name)
+{
+    m_tracefilenames.insert(name);
+}
+
+//------------------------------------------------------------------------------------
+
+void UserParameters::AddNewickTreeFileName (const std::string name)
+{
+    m_newicktreefilenames.insert(name);
+}
+
+//------------------------------------------------------------------------------------
+
+#ifdef LAMARC_QA_TREE_DUMP
+void UserParameters::AddArgFileName (const std::string name)
+{
+    m_argfilenames.insert(name);
+}
+#endif // LAMARC_QA_TREE_DUMP
+
+//____________________________________________________________________________________
diff --git a/src/control/userparam.h b/src/control/userparam.h
new file mode 100644
index 0000000..657a87b
--- /dev/null
+++ b/src/control/userparam.h
@@ -0,0 +1,220 @@
+// $Id: userparam.h,v 1.42 2013/11/08 21:46:21 mkkuhner Exp $
+
+/*
+  Copyright 2002 Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+/********************************************************************
+ UserParameters is a collection class used for internal communication
+ throughout Lamarc.
+
+ UserParameters is a grab-bag of information provided by the user that
+ doesn't seem to belong anywhere else, including file names and
+ output format options, and the random number seed.
+
+ Copy semantics are provided for menu roll-back purposes; otherwise
+ there should only be one copy, registered in Registry.
+
+ Written by Jim Sloan, revised by Mary Kuhner
+
+********************************************************************/
+
+#ifndef USERPARAMETERS_H
+#define USERPARAMETERS_H
+
+#include <string>
+#include <vector>
+#include <set>
+
+#include "local_build.h"
+
+#include "constants.h"
+
+
+class Registry;
+
+class UserParameters
+{
+  private:
+    const std::string m_curvefileprefix;
+    const std::string m_mapfileprefix;
+    const std::string m_reclocfileprefix;
+    const std::string m_tracefileprefix;
+    const std::string m_newicktreefileprefix;
+#ifdef LAMARC_QA_TREE_DUMP
+    const std::string m_argfileprefix;
+#endif // LAMARC_QA_TREE_DUMP
+    std::string m_datafilename;
+    std::string m_profileprefix;
+    std::string m_resultsfilename;
+    std::string m_treesuminfilename;
+    std::string m_treesumoutfilename;
+    std::string m_xmloutfilename;
+    std::string m_xmlreportfilename;
+
+    verbosity_type   m_verbosity;
+    verbosity_type   m_progress;
+    bool   m_plotPost;
+    bool   m_usesystemclock;
+    bool   m_readsumfile;   // if true read in tree summaries from a file
+    bool   m_writesumfile;  // if true write tree summaries to file.
+    // Note:  Both could be 'true'; reading
+    //  and writing to a file.
+    bool   m_writecurvefiles;
+    bool   m_writereclocfiles; // if true write Recloc output to file.
+    // following variable only applies in Bayesian runs
+    bool   m_writetracefiles; // if true write Tracer output to file.
+    bool   m_writenewicktreefiles;
+#ifdef LAMARC_QA_TREE_DUMP
+    bool   m_writeargfiles;
+    bool   m_writemanyargs;
+#endif // LAMARC_QA_TREE_DUMP
+    long   m_randomSeed;
+
+    time_t m_programstarttime;
+
+    //LS DEBUG:  The following member variables are being stored here for now,
+    // but should eventually move to an 'output manager' of some sort.
+    std::vector<std::string> m_curvefilenames;
+    std::vector<std::string> m_mapfilenames;
+    std::vector<std::string> m_profilenames;
+    std::set<std::string> m_reclocfilenames; // RSGNOTE: Never used.
+    std::set<std::string> m_tracefilenames;
+    std::set<std::string> m_newicktreefilenames;
+#ifdef LAMARC_QA_TREE_DUMP
+    std::set<std::string> m_argfilenames;
+#endif // LAMARC_QA_TREE_DUMP
+    std::string m_currentReclocFileName;
+    std::string m_currentTraceFileName;
+    std::string m_currentNewickTreeFileName;
+#ifdef LAMARC_QA_TREE_DUMP
+    std::string m_currentArgFileName;
+#endif // LAMARC_QA_TREE_DUMP
+    double m_currentBestLike;
+    long m_currentStep;
+
+    UserParameters();       // undefined
+
+  protected:
+    std::string BuildFileName(std::string prefix, std::string regname, long repCount, std::string extension) const;
+
+  public:
+    UserParameters(
+        std::string curvefileprefix,
+        std::string mapfileprefix,
+        std::string reclocfileprefix,
+        std::string tracefileprefix,
+        std::string newicktreefileprefix,
+#ifdef LAMARC_QA_TREE_DUMP
+        std::string argfileprefix,
+#endif // LAMARC_QA_TREE_DUMP
+        std::string datafilename,
+        std::string profileprefix,
+        std::string resultsfilename,
+        std::string treesuminfilename,
+        std::string treesumoutfilename,
+        std::string xmloutfilename,
+        std::string xmlreportfilename,
+        verbosity_type   verbosity,
+        verbosity_type   progress,
+        bool   plotPost,
+        bool   usesystemclock,
+        bool   readsumfile,
+        bool   writesumfile,
+        bool   writecurvefiles,
+        bool   writereclocfile,         // RSGNOTE: Never used.
+        bool   writetracefile,
+        bool   writenewicktreefile,     // RSGNOTE: Never used.
+#ifdef LAMARC_QA_TREE_DUMP
+        bool   writeargfiles,
+        bool   writemanyargs,
+#endif // LAMARC_QA_TREE_DUMP
+        long   randomSeed,
+        time_t programstarttime
+        );
+
+    ~UserParameters() {};
+    // accepting default copy constructor and assignment operator
+
+    // Get Functions
+    std::vector < std::string > ToXML(unsigned long nspaces) const;
+
+    std::string GetCurveFilePrefix()     const { return m_curvefileprefix; };
+    std::string GetMapFilePrefix()       const { return m_mapfileprefix; };
+    std::string GetReclocFilePrefix()    const { return m_reclocfileprefix; };
+    std::string GetTraceFilePrefix()     const { return m_tracefileprefix; };
+    std::string GetNewickTreeFilePrefix()    const { return m_newicktreefileprefix;};
+#ifdef LAMARC_QA_TREE_DUMP
+    std::string GetArgFilePrefix()       const { return m_argfileprefix;};
+#endif // LAMARC_QA_TREE_DUMP
+    std::string GetDataFileName()        const { return m_datafilename; };
+    std::string GetProfilePrefix()       const { return m_profileprefix; };
+    std::string GetResultsFileName()     const { return m_resultsfilename; };
+    std::string GetTreeSumInFileName()   const { return m_treesuminfilename; };
+    std::string GetTreeSumOutFileName()  const { return m_treesumoutfilename; };
+    std::string GetXMLOutFileName()      const { return m_xmloutfilename; };
+    std::string GetXMLReportFileName()   const { return m_xmlreportfilename; };
+    std::vector<std::string> GetCurveFileNames() const { return m_curvefilenames; };
+    std::vector<std::string> GetMapFileNames() const { return m_mapfilenames; };
+    std::vector<std::string> GetProfileNames() const { return m_profilenames; };
+    std::set<std::string> GetReclocFileNames() const { return m_reclocfilenames; }; // RSGNOTE: Never used.
+    std::set<std::string> GetTraceFileNames() const { return m_tracefilenames; };
+    std::set<std::string> GetNewickTreeFileNames() const { return m_newicktreefilenames; };
+
+    verbosity_type   GetVerbosity() const { return m_verbosity; };
+    verbosity_type   GetProgress()  const { return m_progress; };
+    bool   GetPlotPost()            const { return m_plotPost; };
+    bool   GetReadSumFile()         const { return m_readsumfile; };
+    bool   GetWriteSumFile()        const { return m_writesumfile; };
+    bool   GetWriteCurveFiles()     const { return m_writecurvefiles; };
+    bool   GetWriteReclocFiles()    const { return m_writereclocfiles; };
+    bool   GetWriteTraceFiles()     const { return m_writetracefiles; };
+    bool   GetWriteNewickTreeFiles()const { return m_writenewicktreefiles; };
+#ifdef LAMARC_QA_TREE_DUMP
+    bool   GetWriteArgFiles()       const { return m_writeargfiles; };
+    bool   GetWriteManyArgs()       const { return m_writemanyargs; };
+#endif // LAMARC_QA_TREE_DUMP
+    bool   GetUseSystemClock()      const { return m_usesystemclock; };
+
+    long   GetRandomSeed()          const { return m_randomSeed; };
+    time_t GetProgramStartTime()    const { return m_programstarttime; };
+
+    // These Set methods are public because lamarc may have to set them
+    // to "false" if an error occurs. This makes them not quite "user"
+    // parameters, but it is the best place for this logic.
+    void   SetWriteSumFile  (const bool a)     { m_writesumfile = a; };
+    void   SetReadSumFile   (const bool a)     { m_readsumfile  = a; };
+
+    //LS DEBUG:  These are the functions that would need to move to an output
+    // manager if we make one:
+    void AddCurveFileName (const std::string name);
+    void AddMapFileName   (const std::string name);
+    void AddProfileName   (const std::string name);
+    void AddReclocFileName(const std::string name);
+    void AddTraceFileName (const std::string name);
+    void AddNewickTreeFileName (const std::string name);
+#ifdef LAMARC_QA_TREE_DUMP
+    void AddArgFileName   (const std::string name);
+#endif // LAMARC_QA_TREE_DUMP
+    void UpdateFileNamesAndSteps(long region, long replicate, bool readsumfile);
+    void UpdateWriteReclocFile(long region, long replicate); // RSGNOTE: Never used.
+    void UpdateWriteTraceFile(long region, long replicate);
+    string GetCurrentReclocFileName() {return m_currentReclocFileName;};
+    string GetCurrentTraceFileName() {return m_currentTraceFileName;};
+    string GetCurrentNewickTreeFileName() {return m_currentNewickTreeFileName;};
+#ifdef LAMARC_QA_TREE_DUMP
+    string GetCurrentArgFileName() {return m_currentArgFileName;};
+#endif // LAMARC_QA_TREE_DUMP
+    double GetCurrentBestLike() {return m_currentBestLike;};
+    void SetCurrentBestLike(double like) {m_currentBestLike = like;};
+    void ClearCurrentBestLike() {m_currentBestLike = -DBL_MAX;};
+    long GetNextStep(long initialOrFinal);  // EWFIX.CHAINTYPE
+};
+
+#endif // USERPARAMETERS_H
+
+//____________________________________________________________________________________
diff --git a/src/control/xmlsum_strings.cpp b/src/control/xmlsum_strings.cpp
new file mode 100644
index 0000000..b5140ba
--- /dev/null
+++ b/src/control/xmlsum_strings.cpp
@@ -0,0 +1,159 @@
+// $Id: xmlsum_strings.cpp,v 1.20 2013/11/07 22:46:06 mkkuhner Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+// strings assigned here should be defined in constants.h as
+// public static const string xmlsum::members of class xmlsum
+
+//------------------------------------------------------------------------------------
+// xml tags for sumfile
+//
+//
+
+#include "xmlsum_strings.h"
+#include <string>
+
+using std::string;
+
+const string xmlsum::COMMENT_START              ="<!--";
+const string xmlsum::COMMENT_END                ="-->";
+
+// chainsum related xml tags
+const string xmlsum::SUMFILE_START              ="<XML-summary-file>";
+const string xmlsum::SUMFILE_END                ="</XML-summary-file>";
+const string xmlsum::CHAINSUM_START             ="<chainsum>";
+const string xmlsum::CHAINSUM_END               ="</chainsum>";
+const string xmlsum::REG_REP_START              ="<reg_rep>";
+const string xmlsum::REG_REP_END                ="</reg_rep>";
+const string xmlsum::END_REGION_START           ="<end-region>";
+const string xmlsum::END_REGION_END             ="</end-region>";
+
+// chainpack related xml tags, includes all chainouts
+const string xmlsum::CHAINPACK_START            ="<chainpack>";
+const string xmlsum::CHAINPACK_END              ="</chainpack>";
+
+// chainout related xml tags, includes estimate tags
+const string xmlsum::ALPHA_END                  ="</alpha>";
+const string xmlsum::ALPHA_START1               ="<alpha";
+const string xmlsum::ALPHA_START2               ="locus=";
+const string xmlsum::ALPHA_START3               =">";
+const string xmlsum::ACCRATE_END                ="</accrate>";
+const string xmlsum::ACCRATE_START              ="<accrate>";
+const string xmlsum::BADTREES_END               ="</badtrees>";
+const string xmlsum::BADTREES_START             ="<badtrees>";
+const string xmlsum::BAYESUNIQUE_END            ="</bayes_unique>";
+const string xmlsum::BAYESUNIQUE_START          ="<bayes_unique>";
+const string xmlsum::CHAINOUT_END               ="</chainout>";
+const string xmlsum::CHAINOUT_START             ="<chainout>";
+const string xmlsum::ENDTIME_END                ="</endtime>";
+const string xmlsum::ENDTIME_START              ="<endtime>";
+const string xmlsum::LLIKEDATA_END              ="</llikedata>";
+const string xmlsum::LLIKEDATA_START            ="<llikedata>";
+const string xmlsum::LLIKEMLE_END               ="</llikemle>";
+const string xmlsum::LLIKEMLE_START             ="<llikemle>";
+const string xmlsum::MAP_END                    ="</map>";
+const string xmlsum::MAP_START                  ="<map>";
+const string xmlsum::NUMBER_END                 ="</number>";
+const string xmlsum::NUMBER_START               ="<number>";
+const string xmlsum::RATES_END                  ="</rates>";
+const string xmlsum::RATES_START                ="<rates>";
+const string xmlsum::STARTTIME_END              ="</starttime>";
+const string xmlsum::STARTTIME_START            ="<starttime>";
+const string xmlsum::STRETCHEDTREES_END         ="</stretchedtrees>";
+const string xmlsum::STRETCHEDTREES_START       ="<stretchedtrees>";
+const string xmlsum::SWAPRATES_END              ="</swaprates>";
+const string xmlsum::SWAPRATES_START            ="<swaprates>";
+const string xmlsum::TEMPERATURES_END           ="</temperatures>";
+const string xmlsum::TEMPERATURES_START         ="<temperatures>";
+const string xmlsum::TINYTREES_END              ="</tinytrees>";
+const string xmlsum::TINYTREES_START            ="<tinytrees>";
+const string xmlsum::ZERODLTREES_END            ="</zerodltrees>";
+const string xmlsum::ZERODLTREES_START          ="<zerodltrees>";
+
+// estimate specific xml tags
+const string xmlsum::EPOCHTIMES_START           ="<epochtimes>";
+const string xmlsum::EPOCHTIMES_END             ="</epochtimes>";
+const string xmlsum::ESTIMATES_START            ="<estimates>";
+const string xmlsum::ESTIMATES_END              ="</estimates>";
+const string xmlsum::THETAS_START               ="<thetas>";
+const string xmlsum::THETAS_END                 ="</thetas>";
+const string xmlsum::MIGRATES_START             ="<migrates>";
+const string xmlsum::MIGRATES_END               ="</migrates>";
+const string xmlsum::DIVMIGRATES_START          ="<divmigrates>";
+const string xmlsum::DIVMIGRATES_END            ="</divmigrates>";
+const string xmlsum::RECRATES_START             ="<recrates>";
+const string xmlsum::RECRATES_END               ="</recrates>";
+const string xmlsum::GROWTHRATES_START          ="<growthrates>";
+const string xmlsum::GROWTHRATES_END            ="</growthrates>";
+const string xmlsum::LOGISTICSELECTION_START = "<logisitic-selection-coefficient>";
+const string xmlsum::LOGISTICSELECTION_END = "</logisitic-selection-coefficient>";
+const string xmlsum::DISEASERATES_START         ="<diseasethrates>";
+const string xmlsum::DISEASERATES_END           ="</diseasethrates>";
+const string xmlsum::GAMMAOVERREGIONS_START     ="<ShapeParameterForGammaOverRegions>";
+const string xmlsum::GAMMAOVERREGIONS_END       ="</ShapeParameterForGammaOverRegions>";
+
+// tree smmary xml tags
+const string xmlsum::TREESUM_START              ="<treesum>";
+const string xmlsum::TREESUM_END                ="</treesum>";
+
+const string xmlsum::NCOPY_START                ="<ncopy>";
+const string xmlsum::NCOPY_END                  ="</ncopy>";
+
+const string xmlsum::SHORTFORCE_START           ="<shortforce>";
+const string xmlsum::SHORTFORCE_END             ="</shortforce>";
+const string xmlsum::INTERVALS_START            ="<intervals>";
+const string xmlsum::INTERVALS_END              ="</intervals>";
+
+const string xmlsum::SHORTPOINT_START           ="<shortpoint>";
+const string xmlsum::SHORTPOINT_END             ="</shortpoint>";
+const string xmlsum::SHORTWAIT_START            ="<shortwait>";
+const string xmlsum::SHORTWAIT_END              ="</shortwait>";
+const string xmlsum::SHORTPICK_START            ="<shortpick>";
+const string xmlsum::SHORTPICK_END              ="</shortpick>";
+const string xmlsum::SHORTPICK_FORCE_START      ="<shortpickforce>";
+const string xmlsum::SHORTPICK_FORCE_END        ="</shortpickforce>";
+
+// tree summary full-interval xml tags
+const string xmlsum::FORCE_START                ="<force>";
+const string xmlsum::FORCE_END                  ="</force>";
+const string xmlsum::XPARTLINES_START           ="<xpartlines>";
+const string xmlsum::XPARTLINES_END             ="</xpartlines>";
+const string xmlsum::PARTLINES_START            ="<partlines>";
+const string xmlsum::PARTLINES_END              ="</partlines>";
+const string xmlsum::RECWEIGHT_START            ="<recweight>";
+const string xmlsum::RECWEIGHT_END              ="</recweight>";
+const string xmlsum::OLDSTATUS_START            ="<oldstatus>";
+const string xmlsum::OLDSTATUS_END              ="</oldstatus>";
+const string xmlsum::NEWSTATUS_START            ="<newstatus>";
+const string xmlsum::NEWSTATUS_END              ="</newstatus>";
+const string xmlsum::RECPOINT_START             ="<recpoint>";
+const string xmlsum::RECPOINT_END               ="</recpoint>";
+const string xmlsum::PARTNERPICKS_START         ="<partnerpicks>";
+const string xmlsum::PARTNERPICKS_END           ="</partnerpicks>";
+
+//Parameter summary tags
+const string xmlsum::PARAM_SUMMARY_START        ="<param-summary>";
+const string xmlsum::PARAM_SUMMARY_END          ="</param-summary>";
+
+//Map summary tags
+const string xmlsum::MAP_SUMMARY_START          ="<map-summary>";
+const string xmlsum::MAP_SUMMARY_END            ="</map-summary>";
+const string xmlsum::MAP_SUMMARIES_START        ="<map-summaries>";
+const string xmlsum::MAP_SUMMARIES_END          ="</map-summaries>";
+
+// region and replicate summary xml tags
+
+const string xmlsum::REGION_SUMMARY_START       ="<region-summary>";
+const string xmlsum::REGION_SUMMARY_END         ="</region-summary>";
+const string xmlsum::REPLICATE_SUMMARY_START    ="<replicate-summary>";
+const string xmlsum::REPLICATE_SUMMARY_END      ="</replicate-summary>";
+const string xmlsum::MAXLIKE_START              ="<maxlike>";
+const string xmlsum::MAXLIKE_END                ="</maxlike>";
+
+//____________________________________________________________________________________
diff --git a/src/control/xmlsum_strings.h b/src/control/xmlsum_strings.h
new file mode 100644
index 0000000..baacadc
--- /dev/null
+++ b/src/control/xmlsum_strings.h
@@ -0,0 +1,160 @@
+// $Id: xmlsum_strings.h,v 1.21 2013/11/07 22:46:06 mkkuhner Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef XMLSUMSTRINGS_H
+#define XMLSUMSTRINGS_H
+
+#include <string>
+
+//------------------------------------------------------------------------------------
+// xml tags for sumfile
+//
+//
+class xmlsum
+{
+  public:
+    static const std::string COMMENT_START;
+    static const std::string COMMENT_END;
+
+    // chainsum related xml tags
+    static const std::string SUMFILE_START;
+    static const std::string SUMFILE_END;
+    static const std::string CHAINSUM_START;
+    static const std::string CHAINSUM_END;
+    static const std::string REG_REP_START;
+    static const std::string REG_REP_END;
+    static const std::string END_REGION_START;
+    static const std::string END_REGION_END;
+
+    // chainpack related xml tags, includes all chainouts
+    static const std::string CHAINPACK_START;
+    static const std::string CHAINPACK_END;
+
+    // chainout related xml tags, includes estimate tags
+    static const std::string ALPHA_END;
+    static const std::string ALPHA_START1;
+    static const std::string ALPHA_START2;
+    static const std::string ALPHA_START3;
+    static const std::string ACCRATE_END;
+    static const std::string ACCRATE_START;
+    static const std::string BADTREES_END;
+    static const std::string BADTREES_START;
+    static const std::string BAYESUNIQUE_END;
+    static const std::string BAYESUNIQUE_START;
+    static const std::string CHAINOUT_END;
+    static const std::string CHAINOUT_START;
+    static const std::string ENDTIME_END;
+    static const std::string ENDTIME_START;
+    static const std::string LLIKEDATA_END;
+    static const std::string LLIKEDATA_START;
+    static const std::string LLIKEMLE_END;
+    static const std::string LLIKEMLE_START;
+    static const std::string MAP_END;
+    static const std::string MAP_START;
+    static const std::string NUMBER_END;
+    static const std::string NUMBER_START;
+    static const std::string RATES_END;
+    static const std::string RATES_START;
+    static const std::string STARTTIME_END;
+    static const std::string STARTTIME_START;
+    static const std::string STRETCHEDTREES_END;
+    static const std::string STRETCHEDTREES_START;
+    static const std::string SWAPRATES_END;
+    static const std::string SWAPRATES_START;
+    static const std::string TEMPERATURES_END;
+    static const std::string TEMPERATURES_START;
+    static const std::string TINYTREES_END;
+    static const std::string TINYTREES_START;
+    static const std::string ZERODLTREES_END;
+    static const std::string ZERODLTREES_START;
+
+    // estimate-specific xml tags
+    static const std::string EPOCHTIMES_START;
+    static const std::string EPOCHTIMES_END;
+    static const std::string ESTIMATES_START;
+    static const std::string ESTIMATES_END;
+    static const std::string THETAS_START;
+    static const std::string THETAS_END;
+    static const std::string MIGRATES_START;
+    static const std::string MIGRATES_END;
+    static const std::string DIVMIGRATES_START;
+    static const std::string DIVMIGRATES_END;
+    static const std::string RECRATES_START;
+    static const std::string RECRATES_END;
+    static const std::string GROWTHRATES_START;
+    static const std::string GROWTHRATES_END;
+    static const std::string LOGISTICSELECTION_START;
+    static const std::string LOGISTICSELECTION_END;
+    static const std::string DISEASERATES_START;
+    static const std::string DISEASERATES_END;
+    static const std::string GAMMAOVERREGIONS_START;
+    static const std::string GAMMAOVERREGIONS_END;
+
+    // tree summary xml tags
+    static const std::string TREESUM_START;
+    static const std::string TREESUM_END;
+
+    static const std::string NCOPY_START;
+    static const std::string NCOPY_END;
+
+    static const std::string SHORTFORCE_START;
+    static const std::string SHORTFORCE_END;
+    static const std::string INTERVALS_START;
+    static const std::string INTERVALS_END;
+
+    static const std::string SHORTPOINT_START;
+    static const std::string SHORTPOINT_END;
+    static const std::string SHORTWAIT_START;
+    static const std::string SHORTWAIT_END;
+    static const std::string SHORTPICK_START;
+    static const std::string SHORTPICK_END;
+    static const std::string SHORTPICK_FORCE_START;
+    static const std::string SHORTPICK_FORCE_END;
+
+    // tree summary full-interval xml tags
+    static const std::string FORCE_START;
+    static const std::string FORCE_END;
+    static const std::string XPARTLINES_START;
+    static const std::string XPARTLINES_END;
+    static const std::string PARTLINES_START;
+    static const std::string PARTLINES_END;
+    static const std::string RECWEIGHT_START;
+    static const std::string RECWEIGHT_END;
+    static const std::string OLDSTATUS_START;
+    static const std::string OLDSTATUS_END;
+    static const std::string NEWSTATUS_START;
+    static const std::string NEWSTATUS_END;
+    static const std::string RECPOINT_START;
+    static const std::string RECPOINT_END;
+    static const std::string PARTNERPICKS_START;
+    static const std::string PARTNERPICKS_END;
+
+    //Parameter summary tags
+    static const std::string PARAM_SUMMARY_START;
+    static const std::string PARAM_SUMMARY_END;
+
+    //Map summary tags
+    static const std::string MAP_SUMMARY_START;
+    static const std::string MAP_SUMMARY_END;
+    static const std::string MAP_SUMMARIES_START;
+    static const std::string MAP_SUMMARIES_END;
+
+    // region and replicate summary xml tags
+    static const std::string REGION_SUMMARY_START;
+    static const std::string REGION_SUMMARY_END;
+    static const std::string REPLICATE_SUMMARY_START;
+    static const std::string REPLICATE_SUMMARY_END;
+    static const std::string MAXLIKE_START;
+    static const std::string MAXLIKE_END;
+};
+
+#endif // XMLSUMSTRINGS_H
+
+//____________________________________________________________________________________
diff --git a/src/convErr/gc_cmdfile_err.cpp b/src/convErr/gc_cmdfile_err.cpp
new file mode 100644
index 0000000..54eb819
--- /dev/null
+++ b/src/convErr/gc_cmdfile_err.cpp
@@ -0,0 +1,131 @@
+// $Id: gc_cmdfile_err.cpp,v 1.7 2011/03/08 19:21:59 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include "gc_cmdfile_err.h"
+#include "gc_strings_cmdfile.h"
+
+//------------------------------------------------------------------------------------
+
+gc_cmdfile_err::gc_cmdfile_err(const wxString & msg) throw ()
+    : gc_ex(msg)
+{
+}
+
+gc_cmdfile_err::~gc_cmdfile_err() throw() {};
+
+//------------------------------------------------------------------------------------
+
+gc_bad_yes_no::gc_bad_yes_no(const wxString & string) throw()
+    : gc_cmdfile_err(wxString::Format(gcerr_cmdfile::badYesNo,string.c_str()))
+{
+}
+
+gc_bad_yes_no::~gc_bad_yes_no() throw() {};
+
+//------------------------------------------------------------------------------------
+
+gc_bad_proximity::gc_bad_proximity(const wxString & string) throw()
+    : gc_cmdfile_err(wxString::Format(gcerr_cmdfile::badProximity,string.c_str()))
+{
+}
+
+gc_bad_proximity::~gc_bad_proximity() throw() {};
+
+//------------------------------------------------------------------------------------
+
+gc_bad_file_format::gc_bad_file_format(const wxString & string) throw()
+    : gc_cmdfile_err(wxString::Format(gcerr_cmdfile::badFileFormat,string.c_str()))
+{
+}
+
+gc_bad_file_format::~gc_bad_file_format() throw() {};
+
+//------------------------------------------------------------------------------------
+
+gc_bad_interleaving::gc_bad_interleaving(const wxString & string) throw()
+    : gc_cmdfile_err(wxString::Format(gcerr_cmdfile::badInterleaving,string.c_str()))
+{
+}
+
+gc_bad_interleaving::~gc_bad_interleaving() throw() {};
+
+//------------------------------------------------------------------------------------
+
+gc_bad_general_data_type::gc_bad_general_data_type(const wxString & string) throw()
+    : gc_cmdfile_err(wxString::Format(gcerr_cmdfile::badGeneralDataType,string.c_str()))
+{
+}
+
+gc_bad_general_data_type::~gc_bad_general_data_type() throw() {};
+
+//------------------------------------------------------------------------------------
+
+gc_bad_specific_data_type::gc_bad_specific_data_type(const wxString & string) throw()
+    : gc_cmdfile_err(wxString::Format(gcerr_cmdfile::badSpecificDataType,string.c_str()))
+{
+}
+
+gc_bad_specific_data_type::~gc_bad_specific_data_type() throw() {};
+
+//------------------------------------------------------------------------------------
+
+gc_locus_match_byname_not_empty::gc_locus_match_byname_not_empty() throw ()
+    : gc_cmdfile_err(gcerr_cmdfile::locusMatchByNameNotEmpty)
+{
+}
+
+gc_locus_match_byname_not_empty::~gc_locus_match_byname_not_empty() throw() {};
+
+//------------------------------------------------------------------------------------
+
+gc_locus_match_single_empty::gc_locus_match_single_empty() throw ()
+    : gc_cmdfile_err(gcerr_cmdfile::locusMatchSingleEmpty)
+{
+}
+
+gc_locus_match_single_empty::~gc_locus_match_single_empty() throw() {};
+
+//------------------------------------------------------------------------------------
+
+gc_locus_match_unknown::gc_locus_match_unknown(const wxString & matchType) throw ()
+    : gc_cmdfile_err(wxString::Format(gcerr_cmdfile::locusMatchUnknown,matchType.c_str()))
+{
+}
+
+gc_locus_match_unknown::~gc_locus_match_unknown() throw() {};
+
+//------------------------------------------------------------------------------------
+
+gc_pop_match_byname_not_empty::gc_pop_match_byname_not_empty() throw ()
+    : gc_cmdfile_err(gcerr_cmdfile::popMatchByNameNotEmpty)
+{
+}
+
+gc_pop_match_byname_not_empty::~gc_pop_match_byname_not_empty() throw() {};
+
+//------------------------------------------------------------------------------------
+
+gc_pop_match_single_empty::gc_pop_match_single_empty() throw ()
+    : gc_cmdfile_err(gcerr_cmdfile::popMatchSingleEmpty)
+{
+}
+
+gc_pop_match_single_empty::~gc_pop_match_single_empty() throw() {};
+
+//------------------------------------------------------------------------------------
+
+gc_pop_match_unknown::gc_pop_match_unknown(const wxString & matchType) throw ()
+    : gc_cmdfile_err(wxString::Format(gcerr_cmdfile::popMatchUnknown,matchType.c_str()))
+{
+}
+
+gc_pop_match_unknown::~gc_pop_match_unknown() throw() {};
+
+//____________________________________________________________________________________
diff --git a/src/convErr/gc_cmdfile_err.h b/src/convErr/gc_cmdfile_err.h
new file mode 100644
index 0000000..06715bc
--- /dev/null
+++ b/src/convErr/gc_cmdfile_err.h
@@ -0,0 +1,113 @@
+// $Id: gc_cmdfile_err.h,v 1.8 2011/03/08 19:21:59 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_CMDFILE_ERR
+#define GC_CMDFILE_ERR
+
+#include "gc_errhandling.h"
+
+class gc_cmdfile_err : public gc_ex
+{
+  public:
+    gc_cmdfile_err(const wxString & msg) throw();
+    virtual ~gc_cmdfile_err() throw() ;
+};
+
+//------------------------------------------------------------------------------------
+
+class gc_bad_yes_no : public gc_cmdfile_err
+{
+  public:
+    gc_bad_yes_no(const wxString & token) throw();
+    virtual ~gc_bad_yes_no() throw() ;
+};
+
+class gc_bad_proximity : public gc_cmdfile_err
+{
+  public:
+    gc_bad_proximity(const wxString & token) throw();
+    virtual ~gc_bad_proximity() throw() ;
+};
+
+class gc_bad_file_format : public gc_cmdfile_err
+{
+  public:
+    gc_bad_file_format(const wxString & token) throw();
+    virtual ~gc_bad_file_format() throw() ;
+};
+
+class gc_bad_interleaving : public gc_cmdfile_err
+{
+  public:
+    gc_bad_interleaving(const wxString & token) throw();
+    virtual ~gc_bad_interleaving() throw() ;
+};
+
+class gc_bad_general_data_type : public gc_cmdfile_err
+{
+  public:
+    gc_bad_general_data_type(const wxString & token) throw();
+    virtual ~gc_bad_general_data_type() throw() ;
+};
+
+class gc_bad_specific_data_type : public gc_cmdfile_err
+{
+  public:
+    gc_bad_specific_data_type(const wxString & token) throw();
+    virtual ~gc_bad_specific_data_type() throw() ;
+};
+
+//------------------------------------------------------------------------------------
+
+class gc_locus_match_byname_not_empty : public gc_cmdfile_err
+{
+  public:
+    gc_locus_match_byname_not_empty() throw ();
+    virtual ~gc_locus_match_byname_not_empty() throw() ;
+};
+
+class gc_locus_match_single_empty : public gc_cmdfile_err
+{
+  public:
+    gc_locus_match_single_empty() throw ();
+    virtual ~gc_locus_match_single_empty() throw() ;
+};
+
+class gc_locus_match_unknown : public gc_cmdfile_err
+{
+  public:
+    gc_locus_match_unknown(const wxString & locusMatchType) throw ();
+    virtual ~gc_locus_match_unknown() throw() ;
+};
+
+class gc_pop_match_byname_not_empty : public gc_cmdfile_err
+{
+  public:
+    gc_pop_match_byname_not_empty() throw ();
+    virtual ~gc_pop_match_byname_not_empty() throw() ;
+};
+
+class gc_pop_match_single_empty : public gc_cmdfile_err
+{
+  public:
+    gc_pop_match_single_empty() throw ();
+    virtual ~gc_pop_match_single_empty() throw() ;
+};
+
+class gc_pop_match_unknown : public gc_cmdfile_err
+{
+  public:
+    gc_pop_match_unknown(const wxString & popType) throw ();
+    virtual ~gc_pop_match_unknown() throw() ;
+};
+
+#endif  // GC_CMDFILE_ERR
+
+//____________________________________________________________________________________
diff --git a/src/convErr/gc_data_missing_err.cpp b/src/convErr/gc_data_missing_err.cpp
new file mode 100644
index 0000000..b88ed27
--- /dev/null
+++ b/src/convErr/gc_data_missing_err.cpp
@@ -0,0 +1,35 @@
+// $Id: gc_data_missing_err.cpp,v 1.4 2011/03/08 19:21:59 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include "gc_data_missing_err.h"
+#include "gc_strings_data.h"
+
+gc_data_missing_err::gc_data_missing_err(wxString msg) throw()
+    : gc_ex(msg)
+{
+}
+
+gc_data_missing_err::~gc_data_missing_err() throw() {}
+
+gc_data_missing_pop_locus::gc_data_missing_pop_locus(wxString popName, wxString locusName) throw()
+    : gc_data_missing_err(wxString::Format(gcerr_data::missingPopLocus,popName.c_str(),locusName.c_str()))
+{
+}
+
+gc_data_missing_pop_locus::~gc_data_missing_pop_locus() throw() {}
+
+gc_data_missing_pop_region::gc_data_missing_pop_region(wxString popName, wxString regionName) throw()
+    : gc_data_missing_err(wxString::Format(gcerr_data::missingPopRegion,popName.c_str(),regionName.c_str()))
+{
+}
+
+gc_data_missing_pop_region::~gc_data_missing_pop_region() throw() {}
+
+//____________________________________________________________________________________
diff --git a/src/convErr/gc_data_missing_err.h b/src/convErr/gc_data_missing_err.h
new file mode 100644
index 0000000..06b9162
--- /dev/null
+++ b/src/convErr/gc_data_missing_err.h
@@ -0,0 +1,43 @@
+// $Id: gc_data_missing_err.h,v 1.5 2011/03/08 19:21:59 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_DATA_MISSING_ERR
+#define GC_DATA_MISSING_ERR
+
+#include "gc_errhandling.h"
+
+class gc_data_missing_err : public gc_ex
+{
+  public:
+    gc_data_missing_err(wxString msg) throw();
+    virtual ~gc_data_missing_err() throw() ;
+};
+
+//------------------------------------------------------------------------------------
+
+class gc_data_missing_pop_locus : public gc_data_missing_err
+{
+  public:
+    gc_data_missing_pop_locus(wxString popName, wxString locusName) throw();
+    virtual ~gc_data_missing_pop_locus() throw() ;
+};
+
+//------------------------------------------------------------------------------------
+
+class gc_data_missing_pop_region : public gc_data_missing_err
+{
+  public:
+    gc_data_missing_pop_region(wxString popName, wxString regionName) throw();
+    virtual ~gc_data_missing_pop_region() throw() ;
+};
+
+#endif  // GC_DATA_MISSING_ERR
+
+//____________________________________________________________________________________
diff --git a/src/convErr/gc_errhandling.cpp b/src/convErr/gc_errhandling.cpp
new file mode 100644
index 0000000..0a1dda6
--- /dev/null
+++ b/src/convErr/gc_errhandling.cpp
@@ -0,0 +1,144 @@
+// $Id: gc_errhandling.cpp,v 1.13 2011/03/08 19:21:59 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+
+#include "gc_errhandling.h"
+#include "gc_strings.h"
+
+//------------------------------------------------------------------------------------
+
+gc_ex::gc_ex(wxString string) throw()
+    :   m_what(string),
+        m_row(0),
+        m_fileName(wxEmptyString)
+{
+}
+
+gc_ex::~gc_ex() throw()
+{
+}
+
+bool
+gc_ex::hasRow() const throw()
+{
+    return m_row > 0;
+}
+
+size_t
+gc_ex::getRow() const throw()
+{
+    return m_row;
+}
+
+void
+gc_ex::setRow(size_t row) throw ()
+{
+    m_row = row;
+}
+
+void
+gc_ex::setRow(int row) throw ()
+{
+    assert(row >= 0);
+    m_row = (size_t)row;
+}
+
+bool
+gc_ex::hasFile() const throw()
+{
+    return !m_fileName.IsEmpty();
+}
+
+wxString
+gc_ex::getFile() const throw()
+{
+    return m_fileName;
+}
+
+void
+gc_ex::setFile(const wxString& fileName) throw ()
+{
+    m_fileName = fileName;
+}
+
+const char *
+gc_ex::what() const throw()
+{
+    return m_what.c_str();
+}
+
+const wxString &
+gc_ex::wxWhat() const throw()
+{
+    return m_what;
+}
+
+//------------------------------------------------------------------------------------
+
+gc_data_error::gc_data_error(const wxString & wh)
+    :
+    gc_ex(wh)
+{
+}
+
+gc_data_error::~gc_data_error() throw()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+gc_implementation_error::gc_implementation_error(const wxString & wh)
+    :
+    gc_ex(wh)
+{
+}
+
+gc_implementation_error::~gc_implementation_error() throw()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+gui_error::gui_error(const wxString & wh)
+    :
+    gc_ex(wh)
+{
+}
+
+gui_error::~gui_error() throw()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+gc_abandon_export::gc_abandon_export() throw()
+    :
+    gc_ex(gcerr::abandonExport)
+{
+}
+
+gc_abandon_export::~gc_abandon_export() throw()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+gc_fatal_error::gc_fatal_error()
+    :
+    gc_ex(wxEmptyString)
+{
+}
+
+gc_fatal_error::~gc_fatal_error() throw()
+{
+}
+
+//____________________________________________________________________________________
diff --git a/src/convErr/gc_errhandling.h b/src/convErr/gc_errhandling.h
new file mode 100644
index 0000000..0bc01b5
--- /dev/null
+++ b/src/convErr/gc_errhandling.h
@@ -0,0 +1,82 @@
+// $Id: gc_errhandling.h,v 1.11 2011/03/07 06:08:47 bobgian Exp $
+
+/*
+  Copyright 2002 Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_ERRHANDLING_H
+#define GC_ERRHANDLING_H
+
+#include <stdexcept>
+#include "wx/string.h"
+
+//------------------------------------------------------------------------------------
+
+class gc_ex : public std::exception
+{
+  private:
+    gc_ex();    // undefined
+  protected:
+    wxString    m_what;
+    size_t      m_row;
+    wxString    m_fileName;
+  public:
+    gc_ex(wxString) throw();
+    virtual ~gc_ex() throw();
+
+    bool    hasRow() const throw();
+    size_t  getRow() const throw();
+    void    setRow(size_t row) throw();
+    void    setRow(int row) throw();
+
+    bool        hasFile()   const throw();
+    wxString    getFile()   const throw();
+    void        setFile(const wxString & s) throw();
+
+    virtual const char* what () const throw();
+    virtual const wxString & wxWhat() const throw();
+
+};
+
+class gc_data_error : public gc_ex
+{
+  public:
+    gc_data_error(const wxString & wh);
+    virtual ~gc_data_error() throw();
+};
+
+class gc_implementation_error : public gc_ex
+{
+  public:
+    gc_implementation_error(const wxString & wh);
+    virtual ~gc_implementation_error() throw();
+};
+
+class gui_error : public gc_ex
+{
+  public:
+    gui_error(const wxString & wh);
+    virtual ~gui_error() throw();
+};
+
+class gc_abandon_export : public gc_ex
+{
+  public:
+    gc_abandon_export() throw() ;
+    virtual ~gc_abandon_export() throw();
+};
+
+class gc_fatal_error : public gc_ex
+{
+  public:
+    gc_fatal_error();
+    virtual ~gc_fatal_error() throw();
+};
+
+#endif  // GC_ERRHANDLING_H
+
+//____________________________________________________________________________________
diff --git a/src/convErr/gc_individual_err.cpp b/src/convErr/gc_individual_err.cpp
new file mode 100644
index 0000000..2ffeaea
--- /dev/null
+++ b/src/convErr/gc_individual_err.cpp
@@ -0,0 +1,57 @@
+// $Id: gc_individual_err.cpp,v 1.6 2011/03/08 19:21:59 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include "gc_individual.h"
+#include "gc_individual_err.h"
+#include "gc_strings_individual.h"
+
+gc_individual_err::gc_individual_err(wxString msg) throw ()
+    : gc_ex(msg)
+{
+}
+
+gc_individual_err::~gc_individual_err() throw () {}
+
+gc_phase_locus_repeat::gc_phase_locus_repeat(wxString indName, wxString locusName) throw()
+    : gc_individual_err(wxString::Format(gcerr_ind::phaseLocusRepeat,indName.c_str(),locusName.c_str()))
+{
+}
+
+gc_phase_locus_repeat::~gc_phase_locus_repeat() throw () {}
+
+gc_sample_locus_repeat::gc_sample_locus_repeat(wxString sampleName, wxString locusName) throw()
+    : gc_individual_err(wxString::Format(gcerr_ind::sampleLocusRepeat,sampleName.c_str(),locusName.c_str()))
+{
+}
+
+gc_sample_locus_repeat::~gc_sample_locus_repeat() throw () {}
+
+gc_sample_missing_locus_data::gc_sample_missing_locus_data(wxString sampleName, wxString locusName) throw()
+    : gc_individual_err(wxString::Format(gcerr_ind::sampleMissingLocusData,sampleName.c_str(),locusName.c_str()))
+{
+}
+
+gc_sample_missing_locus_data::~gc_sample_missing_locus_data() throw () {}
+
+gc_ind_missing_phase_for_locus::gc_ind_missing_phase_for_locus(wxString indName, wxString locusName) throw()
+    : gc_individual_err(wxString::Format(gcerr_ind::missingPhaseForLocus,indName.c_str(),locusName.c_str()))
+{
+}
+
+gc_ind_missing_phase_for_locus::~gc_ind_missing_phase_for_locus() throw () {}
+
+gc_ind_wrong_sample_count::gc_ind_wrong_sample_count(const GCIndividual& ind) throw()
+    : gc_individual_err(wxString::Format(gcerr_ind::wrongSampleCount,ind.GetName().c_str()))
+{
+}
+
+gc_ind_wrong_sample_count::~gc_ind_wrong_sample_count() throw () {}
+
+//____________________________________________________________________________________
diff --git a/src/convErr/gc_individual_err.h b/src/convErr/gc_individual_err.h
new file mode 100644
index 0000000..cd95696
--- /dev/null
+++ b/src/convErr/gc_individual_err.h
@@ -0,0 +1,73 @@
+// $Id: gc_individual_err.h,v 1.7 2011/03/08 19:21:59 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_INDIVIDUAL_ERR
+#define GC_INDIVIDUAL_ERR
+
+#include "gc_errhandling.h"
+
+class GCIndividual;
+class GCParseSample;
+
+class gc_individual_err : public gc_ex
+{
+  public:
+    gc_individual_err(wxString msg) throw() ;
+    virtual ~gc_individual_err() throw() ;
+};
+
+//------------------------------------------------------------------------------------
+
+class gc_phase_locus_repeat : public gc_individual_err
+{
+  public:
+    gc_phase_locus_repeat(wxString indName, wxString locusName) throw();
+    virtual ~gc_phase_locus_repeat() throw() ;
+};
+
+//------------------------------------------------------------------------------------
+
+class gc_sample_locus_repeat : public gc_individual_err
+{
+  public:
+    gc_sample_locus_repeat(wxString sampleName, wxString locusName) throw();
+    virtual ~gc_sample_locus_repeat() throw() ;
+};
+
+//------------------------------------------------------------------------------------
+
+class gc_sample_missing_locus_data : public gc_individual_err
+{
+  public:
+    gc_sample_missing_locus_data(wxString sampleName, wxString locusName) throw();
+    virtual ~gc_sample_missing_locus_data() throw() ;
+};
+
+//------------------------------------------------------------------------------------
+
+class gc_ind_missing_phase_for_locus : public gc_individual_err
+{
+  public:
+    gc_ind_missing_phase_for_locus(wxString indName, wxString locusName) throw();
+    virtual ~gc_ind_missing_phase_for_locus() throw() ;
+};
+
+//------------------------------------------------------------------------------------
+
+class gc_ind_wrong_sample_count : public gc_individual_err
+{
+  public:
+    gc_ind_wrong_sample_count(const GCIndividual&) throw();
+    virtual ~gc_ind_wrong_sample_count() throw() ;
+};
+
+#endif  // GC_INDIVIDUAL_ERR
+
+//____________________________________________________________________________________
diff --git a/src/convErr/gc_infile_err.cpp b/src/convErr/gc_infile_err.cpp
new file mode 100644
index 0000000..0ea7b53
--- /dev/null
+++ b/src/convErr/gc_infile_err.cpp
@@ -0,0 +1,253 @@
+// $Id: gc_infile_err.cpp,v 1.12 2011/03/08 19:21:59 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include "gc_data.h"
+#include "gc_infile_err.h"
+#include "gc_strings_infile.h"
+
+gc_infile_err::gc_infile_err(wxString msg) throw ()
+    : gc_ex(msg)
+{
+}
+
+gc_infile_err::~gc_infile_err() throw ()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+gc_extra_file_data::gc_extra_file_data() throw ()
+    : gc_infile_err(gcerr_infile::extraFileData)
+{
+}
+
+gc_extra_file_data::~gc_extra_file_data() throw ()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+gc_illegal_dna_character::gc_illegal_dna_character(char character, size_t position, const wxString & data) throw()
+    : gc_infile_err(wxString::Format(gcerr_infile::illegalDna,character,position,data.c_str()))
+{
+}
+
+gc_illegal_dna_character::~gc_illegal_dna_character() throw ()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+gc_illegal_msat::gc_illegal_msat(const wxString & data) throw()
+    : gc_infile_err(wxString::Format(gcerr_infile::illegalMsat,data.c_str()))
+{
+}
+
+gc_illegal_msat::~gc_illegal_msat() throw ()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+gc_premature_end_of_file::gc_premature_end_of_file(const wxString & fileName) throw()
+    : gc_infile_err(wxString::Format(gcerr_infile::prematureEndOfFile,fileName.c_str()))
+{
+}
+
+gc_premature_end_of_file::~gc_premature_end_of_file() throw ()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+gc_too_few_markers::gc_too_few_markers(wxString oldMsg) throw()
+    : gc_infile_err(wxString::Format(gcerr_infile::tooFewMarkersInSample,oldMsg.c_str()))
+{
+}
+
+gc_too_few_markers::~gc_too_few_markers() throw ()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+gc_too_many_markers::gc_too_many_markers(size_t have, size_t want) throw()
+    : gc_infile_err(wxString::Format(gcerr_infile::tooManyMarkersInSample,(int)have,(int)want))
+{
+}
+
+gc_too_many_markers::~gc_too_many_markers() throw ()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+gc_token_count_mismatch::gc_token_count_mismatch(const wxString & delimiter,
+                                                 const wxString & thisToken,
+                                                 const wxString & sampleName,
+                                                 size_t receivedCount,
+                                                 size_t expectedCount) throw ()
+    : gc_infile_err(wxString::Format(gcerr_infile::tokenCountMismatch,
+                                     delimiter.c_str(),
+                                     thisToken.c_str(),
+                                     sampleName.c_str(),
+                                     (int)receivedCount,
+                                     (int)expectedCount))
+{
+}
+
+gc_token_count_mismatch::~gc_token_count_mismatch() throw ()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+gc_migrate_bad_pop_count::gc_migrate_bad_pop_count(const wxString& token) throw()
+    : gc_infile_err(wxString::Format(gcerr_migrate::badPopCount,token.c_str()))
+{
+}
+
+gc_migrate_bad_pop_count::~gc_migrate_bad_pop_count() throw ()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+gc_migrate_bad_locus_count::gc_migrate_bad_locus_count(const wxString& token) throw()
+    : gc_infile_err(wxString::Format(gcerr_migrate::badLocusCount,token.c_str()))
+{
+}
+
+gc_migrate_bad_locus_count::~gc_migrate_bad_locus_count() throw ()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+gc_migrate_locus_length_not_positive::gc_migrate_locus_length_not_positive(const wxString & token) throw()
+    : gc_infile_err(wxString::Format(gcerr_migrate::locusLengthNotPositive,token.c_str()))
+{
+}
+
+gc_migrate_locus_length_not_positive::~gc_migrate_locus_length_not_positive() throw ()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+gc_migrate_bad_sequence_count::gc_migrate_bad_sequence_count(const wxString & token) throw()
+    : gc_infile_err(wxString::Format(gcerr_migrate::badSequenceCount,token.c_str()))
+{
+}
+
+gc_migrate_bad_sequence_count::~gc_migrate_bad_sequence_count() throw ()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+gc_migrate_missing_sequence_count::gc_migrate_missing_sequence_count(const wxString & token) throw()
+    : gc_infile_err(wxString::Format(gcerr_migrate::missingSequenceCount,token.c_str()))
+{
+}
+
+gc_migrate_missing_sequence_count::~gc_migrate_missing_sequence_count() throw ()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+gc_migrate_too_few_sequence_lengths::gc_migrate_too_few_sequence_lengths(size_t count, const wxString & line) throw()
+    : gc_infile_err(wxString::Format(gcerr_migrate::tooFewSequenceLengths,(int)count,line.c_str()))
+{
+}
+
+gc_migrate_too_few_sequence_lengths::~gc_migrate_too_few_sequence_lengths() throw ()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+gc_migrate_bad_delimiter::gc_migrate_bad_delimiter(const wxString & token) throw()
+    : gc_infile_err(wxString::Format(gcerr_migrate::badDelimiter,token.c_str()))
+{
+}
+
+gc_migrate_bad_delimiter::~gc_migrate_bad_delimiter() throw ()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+gc_migrate_missing_msat_delimiter::gc_migrate_missing_msat_delimiter(const wxString & fileName) throw()
+    : gc_infile_err(wxString::Format(gcerr_migrate::missingMsatDelimiter,fileName.c_str()))
+{
+}
+
+gc_migrate_missing_msat_delimiter::~gc_migrate_missing_msat_delimiter() throw ()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+gc_migrate_parse_err::gc_migrate_parse_err(size_t lineNum, wxString fileName, const char * msg) throw ()
+    : gc_infile_err(wxString::Format(gcerr_migrate::parseErr,(int)lineNum,fileName.c_str(),msg))
+{
+}
+
+gc_migrate_parse_err::~gc_migrate_parse_err() throw ()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+gc_phylip_first_token::gc_phylip_first_token(const wxString & token) throw()
+    : gc_infile_err(wxString::Format(gcerr_phylip::badFirstToken,token.c_str()))
+{
+}
+
+gc_phylip_first_token::~gc_phylip_first_token() throw ()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+gc_phylip_second_token::gc_phylip_second_token(const wxString & token) throw()
+    : gc_infile_err(wxString::Format(gcerr_phylip::badSecondToken,token.c_str()))
+{
+}
+
+gc_phylip_second_token::~gc_phylip_second_token() throw ()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+gc_parse_data_type_spec_mismatch::gc_parse_data_type_spec_mismatch(gcSpecificDataType stype,gcGeneralDataType dtype) throw ()
+    : gc_infile_err(wxString::Format(gcerr_infile::parseDataTypeSpecMismatch,ToWxString(stype).c_str(),ToWxString(dtype).c_str()))
+{
+}
+
+gc_parse_data_type_spec_mismatch::~gc_parse_data_type_spec_mismatch() throw ()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+gc_parse_missing_err::gc_parse_missing_err(wxString fileName) throw ()
+    : gc_infile_err(wxString::Format(gcerr_infile::parseMissingErr,fileName.c_str()))
+{
+}
+
+gc_parse_missing_err::~gc_parse_missing_err() throw ()
+{
+}
+
+//____________________________________________________________________________________
diff --git a/src/convErr/gc_infile_err.h b/src/convErr/gc_infile_err.h
new file mode 100644
index 0000000..943f5e6
--- /dev/null
+++ b/src/convErr/gc_infile_err.h
@@ -0,0 +1,170 @@
+// $Id: gc_infile_err.h,v 1.13 2011/03/08 19:21:59 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_INFILE_ERR
+#define GC_INFILE_ERR
+
+#include "gc_errhandling.h"
+#include "gc_types.h"
+
+class gc_infile_err : public gc_ex
+{
+  public:
+    gc_infile_err(wxString msg) throw();
+    virtual ~gc_infile_err() throw() ;
+};
+
+class gc_extra_file_data : public gc_infile_err
+{
+  public:
+    gc_extra_file_data() throw();
+    virtual ~gc_extra_file_data() throw() ;
+};
+
+class gc_illegal_dna_character : public gc_infile_err
+{
+  public:
+    gc_illegal_dna_character(char character, size_t position, const wxString & wh) throw();
+    virtual ~gc_illegal_dna_character() throw() ;
+};
+
+class gc_illegal_msat : public gc_infile_err
+{
+  public:
+    gc_illegal_msat(const wxString& msat) throw();
+    virtual ~gc_illegal_msat() throw() ;
+};
+
+class gc_premature_end_of_file : public gc_infile_err
+{
+  public:
+    gc_premature_end_of_file(const wxString & fileName) throw();
+    virtual ~gc_premature_end_of_file() throw() ;
+};
+
+class gc_token_count_mismatch : public gc_infile_err
+{
+  public:
+    gc_token_count_mismatch(const wxString & delimiter,
+                            const wxString & thisToken,
+                            const wxString & sampleName,
+                            size_t receivedCount,
+                            size_t expectedCount) throw ();
+    virtual ~gc_token_count_mismatch() throw() ;
+};
+
+class gc_too_few_markers : public gc_infile_err
+{
+  public:
+    gc_too_few_markers(wxString oldMsg) throw ();
+    virtual ~gc_too_few_markers() throw() ;
+};
+
+class gc_too_many_markers : public gc_infile_err
+{
+  public:
+    gc_too_many_markers(size_t haveCount, size_t wantCount) throw ();
+    virtual ~gc_too_many_markers() throw() ;
+};
+
+class gc_migrate_bad_pop_count : public gc_infile_err
+{
+  public:
+    gc_migrate_bad_pop_count(const wxString &) throw();
+    virtual ~gc_migrate_bad_pop_count() throw() ;
+};
+
+class gc_migrate_bad_locus_count : public gc_infile_err
+{
+  public:
+    gc_migrate_bad_locus_count(const wxString &) throw();
+    virtual ~gc_migrate_bad_locus_count() throw() ;
+};
+
+class gc_migrate_locus_length_not_positive : public gc_infile_err
+{
+  public:
+    gc_migrate_locus_length_not_positive(const wxString& token) throw();
+    virtual ~gc_migrate_locus_length_not_positive() throw() ;
+};
+
+class gc_migrate_bad_sequence_count : public gc_infile_err
+{
+  public:
+    gc_migrate_bad_sequence_count(const wxString& token) throw();
+    virtual ~gc_migrate_bad_sequence_count() throw() ;
+};
+
+class gc_migrate_missing_sequence_count : public gc_infile_err
+{
+  public:
+    gc_migrate_missing_sequence_count(const wxString& token) throw();
+    virtual ~gc_migrate_missing_sequence_count() throw() ;
+};
+
+class gc_migrate_too_few_sequence_lengths : public gc_infile_err
+{
+  public:
+    gc_migrate_too_few_sequence_lengths(size_t count, const wxString& line) throw();
+    virtual ~gc_migrate_too_few_sequence_lengths() throw() ;
+};
+
+class gc_migrate_bad_delimiter : public gc_infile_err
+{
+  public:
+    gc_migrate_bad_delimiter(const wxString& token) throw();
+    virtual ~gc_migrate_bad_delimiter() throw() ;
+};
+
+class gc_migrate_missing_msat_delimiter : public gc_infile_err
+{
+  public:
+    gc_migrate_missing_msat_delimiter(const wxString& fileName) throw();
+    virtual ~gc_migrate_missing_msat_delimiter() throw();
+};
+
+class gc_migrate_parse_err : public gc_infile_err
+{
+  public:
+    gc_migrate_parse_err(size_t lineNum, wxString fileName, const char * msg) throw();
+    virtual ~gc_migrate_parse_err() throw();
+};
+
+class gc_phylip_first_token : public gc_infile_err
+{
+  public:
+    gc_phylip_first_token(const wxString& token) throw();
+    virtual ~gc_phylip_first_token() throw() ;
+};
+
+class gc_phylip_second_token : public gc_infile_err
+{
+  public:
+    gc_phylip_second_token(const wxString& token) throw();
+    virtual ~gc_phylip_second_token() throw() ;
+};
+
+class gc_parse_data_type_spec_mismatch : public gc_infile_err
+{
+  public:
+    gc_parse_data_type_spec_mismatch(gcSpecificDataType,gcGeneralDataType) throw ();
+    virtual ~gc_parse_data_type_spec_mismatch() throw () ;
+};
+
+class gc_parse_missing_err : public gc_infile_err
+{
+  public:
+    gc_parse_missing_err(wxString fileName) throw ();
+    virtual ~gc_parse_missing_err() throw () ;
+};
+
+#endif  // GC_INFILE_ERR
+
+//____________________________________________________________________________________
diff --git a/src/convErr/gc_locus_err.cpp b/src/convErr/gc_locus_err.cpp
new file mode 100644
index 0000000..e75ce8e
--- /dev/null
+++ b/src/convErr/gc_locus_err.cpp
@@ -0,0 +1,152 @@
+// $Id: gc_locus_err.cpp,v 1.16 2011/03/08 19:21:59 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include "gc_data.h"
+#include "gc_locus_err.h"
+#include "gc_strings_locus.h"
+
+gc_locus_err::gc_locus_err(wxString msg) throw()
+    : gc_data_error(msg)
+{
+}
+
+gc_locus_err::~gc_locus_err() throw() {}
+
+gc_missing_locus::gc_missing_locus(const wxString & locusName) throw()
+    : gc_locus_err(wxString::Format(gcerr_locus::missing,locusName.c_str()))
+{
+}
+
+gc_missing_locus::~gc_missing_locus() throw() {}
+
+gc_set_locations_err::gc_set_locations_err(const wxString & locusName,
+                                           const wxString & newLocations,
+                                           size_t oldCount,
+                                           size_t newCount) throw()
+    : gc_locus_err(wxString::Format(gcerr_locus::setLocs,locusName.c_str(),newLocations.c_str(),(long)oldCount,(long)newCount))
+{
+}
+
+gc_set_locations_err::~gc_set_locations_err() throw() {}
+
+gc_locus_length_mismatch::gc_locus_length_mismatch(wxString name1, wxString name2, size_t pLength, size_t length) throw ()
+    : gc_locus_err(wxString::Format(gcerr_locus::lengthMismatch,name1.c_str(),name2.c_str(),(int)pLength,(int)length))
+{
+}
+
+gc_locus_length_mismatch::~gc_locus_length_mismatch() throw() {}
+
+gc_locus_map_position_mismatch::gc_locus_map_position_mismatch(wxString name1, wxString name2, long mapPos1, long mapPos2) throw ()
+    : gc_locus_err(wxString::Format(gcerr_locus::mapPositionMismatch,name1.c_str(),name2.c_str(),mapPos1,mapPos2))
+{
+}
+
+gc_locus_map_position_mismatch::~gc_locus_map_position_mismatch() throw() {}
+
+gc_locus_offset_mismatch::gc_locus_offset_mismatch(wxString name1, wxString name2, long offset1, long offset2) throw ()
+    : gc_locus_err(wxString::Format(gcerr_locus::offsetMismatch,name1.c_str(),name2.c_str(),offset1,offset2))
+{
+}
+
+gc_locus_offset_mismatch::~gc_locus_offset_mismatch() throw() {}
+
+gc_locus_site_count_mismatch::gc_locus_site_count_mismatch(wxString name1, wxString name2, size_t sites1, size_t sites2) throw ()
+    : gc_locus_err(wxString::Format(gcerr_locus::siteCountMismatch,(int)sites1,(int)sites2,name1.c_str(),name2.c_str()))
+{
+}
+
+gc_locus_site_count_mismatch::~gc_locus_site_count_mismatch() throw() {}
+
+gc_locus_types_mismatch::gc_locus_types_mismatch(wxString name1, wxString name2, wxString pType, wxString type) throw ()
+    : gc_locus_err(wxString::Format(gcerr_locus::typeMismatch,name1.c_str(),name2.c_str(),pType.c_str(),type.c_str()))
+{
+}
+
+gc_locus_types_mismatch::~gc_locus_types_mismatch() throw() {}
+
+gc_locus_user_data_type_mismatch::gc_locus_user_data_type_mismatch(wxString name1, wxString name2, gcSpecificDataType t1, gcSpecificDataType t2) throw ()
+    : gc_locus_err(wxString::Format(gcerr_locus::userDataTypeMismatch,name1.c_str(),name2.c_str(),ToWxString(t1).c_str(),ToWxString(t2).c_str()))
+{
+}
+
+gc_locus_user_data_type_mismatch::~gc_locus_user_data_type_mismatch() throw() {}
+
+gc_locus_user_linked_mismatch::gc_locus_user_linked_mismatch(wxString name1, wxString name2, bool l1, bool l2) throw ()
+    : gc_locus_err(wxString::Format(gcerr_locus::typeMismatch,name1.c_str(),name2.c_str(),ToWxStringLinked(l1).c_str(),ToWxStringLinked(l2).c_str()))
+{
+}
+
+gc_locus_user_linked_mismatch::~gc_locus_user_linked_mismatch() throw() {}
+
+gc_locus_overlap::gc_locus_overlap(wxString name1, long start1, long end1, wxString name2, long start2, long end2) throw ()
+    : gc_locus_err(wxString::Format(gcerr_locus::overlap,name1.c_str(),start1,end1,name2.c_str(),start2,end2))
+{
+}
+
+gc_locus_overlap::~gc_locus_overlap() throw() {}
+
+gc_num_markers_zero::gc_num_markers_zero() throw ()
+    : gc_locus_err(gcerr_locus::numMarkersZero)
+{
+}
+
+gc_num_markers_zero::~gc_num_markers_zero() throw() {}
+
+gc_locus_without_length::gc_locus_without_length(const wxString & locusName) throw ()
+    : gc_locus_err(wxString::Format(gcerr_locus::lengthMissing,locusName.c_str()))
+{
+}
+
+gc_locus_without_length::~gc_locus_without_length() throw() {}
+
+gc_unlinked_nuc::gc_unlinked_nuc() throw ()
+    : gc_locus_err(gcerr_locus::unlinkedNuc)
+{
+}
+
+gc_unlinked_nuc::~gc_unlinked_nuc() throw() {}
+
+//------------------------------------------------------------------------------------
+
+gc_locations_out_of_order::gc_locations_out_of_order(const wxString& locusName, long prevLoc, long newLoc) throw ()
+    : gc_locus_err(wxString::Format(gcerr_locus::locationsOutOfOrder,locusName.c_str(),newLoc,prevLoc))
+{
+}
+
+gc_locations_out_of_order::~gc_locations_out_of_order() throw () {};
+
+//------------------------------------------------------------------------------------
+
+gc_location_too_small::gc_location_too_small(long loc, long smallest, const wxString& locusName) throw ()
+    : gc_locus_err(wxString::Format(gcerr_locus::locationTooSmall,loc,locusName.c_str(),smallest))
+{
+}
+
+gc_location_too_small::~gc_location_too_small() throw () {};
+
+//------------------------------------------------------------------------------------
+
+gc_location_too_large::gc_location_too_large(long loc, long largest, const wxString& locusName) throw ()
+    : gc_locus_err(wxString::Format(gcerr_locus::locationTooLarge,loc,locusName.c_str(),largest))
+{
+}
+
+gc_location_too_large::~gc_location_too_large() throw () {};
+
+//------------------------------------------------------------------------------------
+
+gc_wrong_location_count::gc_wrong_location_count(long numLoc, long numSites, const wxString& locusName) throw ()
+    : gc_locus_err(wxString::Format(gcerr_locus::wrongLocationCount,numLoc,locusName.c_str(),numSites))
+{
+}
+
+gc_wrong_location_count::~gc_wrong_location_count() throw () {};
+
+//____________________________________________________________________________________
diff --git a/src/convErr/gc_locus_err.h b/src/convErr/gc_locus_err.h
new file mode 100644
index 0000000..3ebab4c
--- /dev/null
+++ b/src/convErr/gc_locus_err.h
@@ -0,0 +1,147 @@
+// $Id: gc_locus_err.h,v 1.15 2011/03/08 19:21:59 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_LOCUS_ERR_H
+#define GC_LOCUS_ERR_H
+
+#include "gc_errhandling.h"
+#include "gc_types.h"
+
+class gc_locus_err : public gc_data_error
+{
+  public:
+    gc_locus_err(wxString msg) throw();
+    virtual ~gc_locus_err() throw () ;
+};
+
+class gc_locus_site_count_mismatch : public gc_locus_err
+{
+  public:
+    gc_locus_site_count_mismatch(wxString,wxString,size_t,size_t) throw ();
+    virtual ~gc_locus_site_count_mismatch() throw () ;
+};
+
+class gc_locus_offset_mismatch : public gc_locus_err
+{
+  public:
+    gc_locus_offset_mismatch(wxString,wxString,long,long) throw ();
+    virtual ~gc_locus_offset_mismatch() throw () ;
+};
+
+class gc_locus_map_position_mismatch : public gc_locus_err
+{
+  public:
+    gc_locus_map_position_mismatch(wxString,wxString,long,long) throw ();
+    virtual ~gc_locus_map_position_mismatch() throw () ;
+};
+
+class gc_locus_user_linked_mismatch : public gc_locus_err
+{
+  public:
+    gc_locus_user_linked_mismatch(wxString,wxString,bool,bool) throw ();
+    virtual ~gc_locus_user_linked_mismatch() throw () ;
+};
+
+class gc_missing_locus : public gc_locus_err
+{
+    // setting number of locations that differs from number of sites
+  public:
+    gc_missing_locus(const wxString& locusName) throw();
+    virtual ~gc_missing_locus() throw() ;
+};
+
+class gc_set_locations_err : public gc_locus_err
+{
+    // setting number of locations that differs from number of sites
+  public:
+    gc_set_locations_err(const wxString& name, const wxString& newlocs, size_t oldSize, size_t newSize) throw();
+    virtual ~gc_set_locations_err() throw() ;
+};
+
+class gc_locus_length_mismatch : public gc_locus_err
+{
+  public:
+    gc_locus_length_mismatch(wxString name1, wxString name2, size_t pLength, size_t length) throw();
+    virtual ~gc_locus_length_mismatch() throw() ;
+};
+
+class gc_locus_user_data_type_mismatch : public gc_locus_err
+{
+  public:
+    gc_locus_user_data_type_mismatch(wxString name1, wxString name2, gcSpecificDataType, gcSpecificDataType) throw();
+    virtual ~gc_locus_user_data_type_mismatch() throw() ;
+};
+
+class gc_locus_types_mismatch : public gc_locus_err
+{
+  public:
+    gc_locus_types_mismatch(wxString name1, wxString name2, wxString types1, wxString types2) throw();
+    virtual ~gc_locus_types_mismatch() throw() ;
+};
+
+class gc_locus_overlap : public gc_locus_err
+{
+  public:
+    gc_locus_overlap(wxString,long,long,wxString,long,long) throw ();
+    virtual ~gc_locus_overlap() throw () ;
+};
+
+class gc_num_markers_zero : public gc_locus_err
+{
+  public:
+    gc_num_markers_zero() throw();
+    virtual ~gc_num_markers_zero() throw() ;
+};
+
+class gc_locus_without_length : public gc_locus_err
+{
+  public:
+    gc_locus_without_length(const wxString& locusName) throw();
+    virtual ~gc_locus_without_length() throw() ;
+};
+
+class gc_unlinked_nuc : public gc_locus_err
+{
+  public:
+    gc_unlinked_nuc() throw();
+    virtual ~gc_unlinked_nuc() throw() ;
+};
+
+class gc_locations_out_of_order : public gc_locus_err
+{
+  public:
+    gc_locations_out_of_order(const wxString& locusName, long prevLoc, long newLoc) throw();
+    virtual ~gc_locations_out_of_order() throw();
+};
+
+class gc_location_too_small : public gc_locus_err
+{
+  public:
+    gc_location_too_small(long loc, long smallest, const wxString& locusName) throw();
+    virtual ~gc_location_too_small() throw();
+};
+
+class gc_location_too_large : public gc_locus_err
+{
+  public:
+    gc_location_too_large(long loc, long largest, const wxString& locusName) throw();
+    virtual ~gc_location_too_large() throw();
+};
+
+class gc_wrong_location_count : public gc_locus_err
+{
+  public:
+    gc_wrong_location_count(long numLoc, long numSite, const wxString& locusName) throw();
+    virtual ~gc_wrong_location_count() throw();
+};
+
+#endif  // GC_LOCUS_ERR_H
+
+//____________________________________________________________________________________
diff --git a/src/convErr/gc_map_err.cpp b/src/convErr/gc_map_err.cpp
new file mode 100644
index 0000000..d8af44f
--- /dev/null
+++ b/src/convErr/gc_map_err.cpp
@@ -0,0 +1,42 @@
+// $Id: gc_map_err.cpp,v 1.5 2011/03/08 19:21:59 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include "gc_map_err.h"
+#include "gc_strings_map.h"
+
+gc_map_err::gc_map_err(wxString msg) throw()
+    :   gc_ex(msg)
+{
+}
+
+gc_map_err::~gc_map_err() throw() {};
+
+gc_map_file_missing::gc_map_file_missing(wxString fileName) throw()
+    : gc_map_err(wxString::Format(gcerr_map::fileMissing,fileName.c_str()))
+{
+}
+
+gc_map_file_missing::~gc_map_file_missing() throw() {}
+
+gc_map_file_read_err::gc_map_file_read_err(wxString fileName) throw()
+    : gc_map_err(wxString::Format(gcerr_map::fileReadErr,fileName.c_str()))
+{
+}
+
+gc_map_file_read_err::~gc_map_file_read_err() throw() {}
+
+gc_map_file_empty::gc_map_file_empty(wxString fileName) throw()
+    : gc_map_err(wxString::Format(gcerr_map::fileEmpty,fileName.c_str()))
+{
+}
+
+gc_map_file_empty::~gc_map_file_empty() throw() {}
+
+//____________________________________________________________________________________
diff --git a/src/convErr/gc_map_err.h b/src/convErr/gc_map_err.h
new file mode 100644
index 0000000..23121b0
--- /dev/null
+++ b/src/convErr/gc_map_err.h
@@ -0,0 +1,46 @@
+// $Id: gc_map_err.h,v 1.6 2011/03/08 19:21:59 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_MAP_ERR
+#define GC_MAP_ERR
+
+#include "gc_errhandling.h"
+
+class gc_map_err : public gc_ex
+{
+  public:
+    gc_map_err(wxString msg) throw();
+    virtual ~gc_map_err() throw();
+};
+
+class gc_map_file_missing : public gc_map_err
+{
+  public:
+    gc_map_file_missing(wxString fileName) throw();
+    virtual ~gc_map_file_missing() throw() ;
+};
+
+class gc_map_file_read_err : public gc_map_err
+{
+  public:
+    gc_map_file_read_err(wxString fileName) throw();
+    virtual ~gc_map_file_read_err() throw() ;
+};
+
+class gc_map_file_empty : public gc_map_err
+{
+  public:
+    gc_map_file_empty(wxString fileName) throw();
+    virtual ~gc_map_file_empty() throw() ;
+};
+
+#endif  // GC_MAP_ERR
+
+//____________________________________________________________________________________
diff --git a/src/convErr/gc_phase_err.cpp b/src/convErr/gc_phase_err.cpp
new file mode 100644
index 0000000..0d912ed
--- /dev/null
+++ b/src/convErr/gc_phase_err.cpp
@@ -0,0 +1,154 @@
+// $Id: gc_phase_err.cpp,v 1.11 2011/03/08 19:21:59 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include "gc_phase_err.h"
+#include "gc_phase_info.h"
+#include "gc_strings_phase.h"
+
+//------------------------------------------------------------------------------------
+
+gc_phase_err::gc_phase_err(wxString msg) throw ()
+    :   gc_data_error(msg)
+{
+}
+
+gc_phase_err::~gc_phase_err() throw () {};
+
+//------------------------------------------------------------------------------------
+
+gc_no_individual_for_sample::gc_no_individual_for_sample(wxString sampleName) throw()
+    : gc_phase_err(wxString::Format(gcerr_phase::noIndividualForSample,sampleName.c_str()))
+{
+}
+
+gc_no_individual_for_sample::~gc_no_individual_for_sample() throw () {};
+
+//------------------------------------------------------------------------------------
+
+gc_no_sample_for_individual::gc_no_sample_for_individual(wxString individualName) throw()
+    : gc_phase_err(wxString::Format(gcerr_phase::noSampleForIndividual,individualName.c_str()))
+{
+}
+
+gc_no_sample_for_individual::~gc_no_sample_for_individual() throw () {};
+
+//------------------------------------------------------------------------------------
+
+gc_both_individual_and_sample::gc_both_individual_and_sample(const wxString & name, const gcPhaseRecord& rec1,const gcPhaseRecord& rec2) throw()
+    : gc_phase_err(wxString::Format(gcerr_phase::bothIndividualAndSample,name.c_str()))
+      // EWFIX.P4.BUG.564 -- use all arguments
+{
+}
+
+gc_both_individual_and_sample::~gc_both_individual_and_sample() throw () {};
+
+//------------------------------------------------------------------------------------
+
+gc_adjacent_phase_resolution_for_multisample_input::gc_adjacent_phase_resolution_for_multisample_input(wxString fname) throw()
+    : gc_phase_err(wxString::Format(gcerr_phase::adjacentPhaseForMultiSample,fname.c_str()))
+{
+}
+
+gc_adjacent_phase_resolution_for_multisample_input::~gc_adjacent_phase_resolution_for_multisample_input() throw () {};
+
+//------------------------------------------------------------------------------------
+
+gc_bad_ind_match_adjacency_value::gc_bad_ind_match_adjacency_value(const wxString& numSamplesText) throw ()
+    : gc_phase_err(wxString::Format(gcerr_phase::badIndMatchAdjacencyValue,numSamplesText.c_str()))
+{
+}
+
+gc_bad_ind_match_adjacency_value::~gc_bad_ind_match_adjacency_value() throw () {};
+
+//------------------------------------------------------------------------------------
+
+gc_bad_ind_match::gc_bad_ind_match(const wxString& matchString) throw()
+    : gc_phase_err(wxString::Format(gcerr_phase::badIndMatchType,matchString.c_str()))
+{
+}
+
+gc_bad_ind_match::~gc_bad_ind_match() throw () {};
+
+//------------------------------------------------------------------------------------
+
+gc_phase_mismatch::gc_phase_mismatch(const gcPhaseRecord& rec1,const gcPhaseRecord& rec2) throw()
+    : gc_phase_err("")
+{
+    // EWFIX.BUG.722 -- needs lots better information here, including sample names
+    m_what=wxString::Format(gcerr_phase::mergeMismatch,rec1.GetDescriptiveName().c_str(),rec2.GetDescriptiveName().c_str());
+}
+
+gc_phase_mismatch::~gc_phase_mismatch() throw () {};
+
+//------------------------------------------------------------------------------------
+
+gc_individual_sample_adj_mismatch::gc_individual_sample_adj_mismatch(size_t lineNum, wxString fileName, size_t numSamples, size_t numAdj) throw()
+    : gc_phase_err(wxString::Format(gcerr_phase::unevenAdjDivisor,(int)numSamples,(int)numAdj))
+      // EWFIX.P4.BUG.564 -- use all arguments
+{
+}
+
+gc_individual_sample_adj_mismatch::~gc_individual_sample_adj_mismatch() throw () {};
+
+//------------------------------------------------------------------------------------
+
+gc_phase_matching_confusion::gc_phase_matching_confusion(const wxString& regionName, const wxString& label1, const wxString& label2) throw()
+    : gc_phase_err(wxString::Format(gcerr_phase::matchingConfusion,regionName.c_str(),label1.c_str(),label2.c_str()))
+{
+}
+
+gc_phase_matching_confusion::~gc_phase_matching_confusion() throw () {};
+
+//------------------------------------------------------------------------------------
+
+gc_phase_marker_not_legal::gc_phase_marker_not_legal(wxString posString) throw ()
+    : gc_phase_err(wxString::Format(gcerr_phase::markerNotLegal,posString.c_str()))
+{
+}
+
+gc_phase_marker_not_legal::~gc_phase_marker_not_legal() throw () {};
+
+//------------------------------------------------------------------------------------
+
+gc_phenotype_name_repeat_for_individual::gc_phenotype_name_repeat_for_individual(wxString pheno, wxString ind) throw ()
+    : gc_phase_err(wxString::Format(gcerr_phase::individualPhenotypeNameRepeat,pheno.c_str(),ind.c_str()))
+{
+}
+
+gc_phenotype_name_repeat_for_individual::~gc_phenotype_name_repeat_for_individual() throw () {};
+
+//------------------------------------------------------------------------------------
+
+gc_phase_too_small::gc_phase_too_small(long phase, wxString indName,long smallest, wxString locName) throw ()
+    : gc_phase_err(wxString::Format(gcerr_phase::tooSmall,phase,indName.c_str(),locName.c_str(),smallest))
+{
+}
+
+gc_phase_too_small::~gc_phase_too_small() throw () {};
+
+//------------------------------------------------------------------------------------
+
+gc_phase_too_large::gc_phase_too_large(long phase, wxString indName,long largest, wxString locName) throw ()
+    : gc_phase_err(wxString::Format(gcerr_phase::tooLarge,phase,indName.c_str(),locName.c_str(),largest))
+{
+}
+
+gc_phase_too_large::~gc_phase_too_large() throw () {};
+
+//------------------------------------------------------------------------------------
+
+gc_phase_not_location::gc_phase_not_location(long phase, wxString locName) throw ()
+    : gc_phase_err(wxString::Format(gcerr_phase::notLocation,phase,locName.c_str()))
+{
+}
+
+gc_phase_not_location::~gc_phase_not_location() throw () {};
+
+//____________________________________________________________________________________
diff --git a/src/convErr/gc_phase_err.h b/src/convErr/gc_phase_err.h
new file mode 100644
index 0000000..217910a
--- /dev/null
+++ b/src/convErr/gc_phase_err.h
@@ -0,0 +1,143 @@
+// $Id: gc_phase_err.h,v 1.11 2011/03/08 19:21:59 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_PHASE_ERR_H
+#define GC_PHASE_ERR_H
+
+#include "gc_errhandling.h"
+
+class gcPhaseRecord;
+
+class gc_phase_err : public gc_data_error
+{
+  public:
+    gc_phase_err(wxString msg) throw();
+    virtual ~gc_phase_err() throw();
+};
+
+class gc_no_individual_for_sample : public gc_phase_err
+{
+  private:
+    gc_no_individual_for_sample();      // undefined
+  public:
+    gc_no_individual_for_sample(wxString sampleName) throw();
+    virtual ~gc_no_individual_for_sample() throw();
+};
+
+class gc_no_sample_for_individual : public gc_phase_err
+{
+  private:
+    gc_no_sample_for_individual();      // undefined
+  public:
+    gc_no_sample_for_individual(wxString individualName) throw();
+    virtual ~gc_no_sample_for_individual() throw() ;
+};
+
+class gc_both_individual_and_sample : public gc_phase_err
+{
+  private:
+    gc_both_individual_and_sample();      // undefined
+  public:
+    gc_both_individual_and_sample(const wxString &, const gcPhaseRecord&,const gcPhaseRecord&) throw();
+    virtual ~gc_both_individual_and_sample() throw() ;
+};
+
+class gc_adjacent_phase_resolution_for_multisample_input : public gc_phase_err
+{
+  private:
+    gc_adjacent_phase_resolution_for_multisample_input();  // undefined
+  public:
+    gc_adjacent_phase_resolution_for_multisample_input(wxString fname) throw();
+    virtual ~gc_adjacent_phase_resolution_for_multisample_input() throw() ;
+};
+
+class gc_bad_ind_match_adjacency_value : public gc_phase_err
+{
+  private:
+    gc_bad_ind_match_adjacency_value();  // undefined
+  public:
+    gc_bad_ind_match_adjacency_value(const wxString& numSamplesText) throw();
+    virtual ~gc_bad_ind_match_adjacency_value() throw() ;
+};
+
+class gc_bad_ind_match : public gc_phase_err
+{
+  private:
+    gc_bad_ind_match();  // undefined
+  public:
+    gc_bad_ind_match(const wxString&) throw();
+    virtual ~gc_bad_ind_match() throw() ;
+};
+
+class gc_phase_mismatch : public gc_phase_err
+{
+  private:
+    gc_phase_mismatch();  // undefined
+  public:
+    gc_phase_mismatch(const gcPhaseRecord&, const gcPhaseRecord&) throw();
+    virtual ~gc_phase_mismatch() throw() ;
+};
+
+class gc_individual_sample_adj_mismatch : public gc_phase_err
+{
+  private:
+    gc_individual_sample_adj_mismatch();  // undefined
+  public:
+    gc_individual_sample_adj_mismatch(size_t lineNum, wxString fileName, size_t numSamples, size_t numAdj) throw();
+    virtual ~gc_individual_sample_adj_mismatch() throw() ;
+};
+
+class gc_phase_matching_confusion : public gc_phase_err
+{
+  private:
+    gc_phase_matching_confusion();      // undefined
+  public:
+    gc_phase_matching_confusion(const wxString& regionName, const wxString& label1, const wxString& label2) throw();
+    virtual ~gc_phase_matching_confusion() throw() ;
+};
+
+class gc_phase_marker_not_legal : public gc_phase_err
+{
+  public:
+    gc_phase_marker_not_legal(wxString posString) throw();
+    virtual ~gc_phase_marker_not_legal() throw() ;
+};
+
+class gc_phenotype_name_repeat_for_individual : public gc_phase_err
+{
+  public:
+    gc_phenotype_name_repeat_for_individual(wxString pheno, wxString ind) throw();
+    virtual ~gc_phenotype_name_repeat_for_individual() throw() ;
+};
+
+class gc_phase_too_small : public gc_phase_err
+{
+  public:
+    gc_phase_too_small(long phase, wxString indName, long offset, wxString locusName) throw();
+    virtual ~gc_phase_too_small() throw() ;
+};
+
+class gc_phase_too_large : public gc_phase_err
+{
+  public:
+    gc_phase_too_large(long phase, wxString indName, long maxVal, wxString locusName) throw();
+    virtual ~gc_phase_too_large() throw() ;
+};
+
+class gc_phase_not_location : public gc_phase_err
+{
+  public:
+    gc_phase_not_location(long phase, wxString locusName) throw();
+    virtual ~gc_phase_not_location() throw() ;
+};
+
+#endif  // GC_PHASE_ERR_H
+
+//____________________________________________________________________________________
diff --git a/src/convErr/gc_structures_err.cpp b/src/convErr/gc_structures_err.cpp
new file mode 100644
index 0000000..5ab000e
--- /dev/null
+++ b/src/convErr/gc_structures_err.cpp
@@ -0,0 +1,258 @@
+// $Id: gc_structures_err.cpp,v 1.12 2011/12/01 22:32:42 jmcgill Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include "gc_structures_err.h"
+#include "gc_strings.h" // EWFIX.P4 refactor out
+#include "gc_strings_structures.h"
+
+gc_structures_err::gc_structures_err(const wxString & wh) throw()
+    :   gc_data_error(wh)
+{
+}
+
+gc_structures_err::~gc_structures_err() throw() {};
+
+//------------------------------------------------------------------------------------
+
+duplicate_file_base_name_error::duplicate_file_base_name_error(const wxString & wh) throw()
+    : gc_structures_err(wxString::Format(gcerr_structures::duplicateFileBaseName,wh.c_str()))
+{
+}
+
+duplicate_file_base_name_error::~duplicate_file_base_name_error() throw() {};
+
+duplicate_file_error::duplicate_file_error(const wxString & wh) throw()
+    : gc_structures_err(wxString::Format(gcerr_structures::duplicateFileName,wh.c_str()))
+{
+}
+
+duplicate_file_error::~duplicate_file_error() throw() {};
+
+duplicate_name_error::duplicate_name_error(const wxString& name, const wxString & where) throw()
+    : gc_structures_err(wxString::Format(gcerr_structures::duplicateName,name.c_str(),where.c_str()))
+{
+}
+
+duplicate_name_error::~duplicate_name_error() throw() {};
+
+empty_name_error::empty_name_error(const wxString& where) throw()
+    : gc_structures_err(wxString::Format(gcerr::emptyName,where.c_str()))
+{
+}
+
+empty_name_error::~empty_name_error() throw() {};
+
+incompatible_pops::incompatible_pops(const wxString& wh) throw()
+    : gc_structures_err(wh)
+{
+}
+
+incompatible_pops::~incompatible_pops() throw() {};
+
+missing_file_error::missing_file_error(const wxString & wh) throw()
+    : gc_structures_err(wxString::Format(gcerr_structures::missingFile,wh.c_str()))
+{
+}
+
+missing_file_error::~missing_file_error() throw() {};
+
+missing_name_error::missing_name_error(const wxString& name, const wxString & where) throw()
+    : gc_structures_err(wxString::Format(gcerr_structures::missingName,name.c_str(),where.c_str()))
+{
+}
+
+missing_migration::missing_migration(const wxString & migrationName) throw()
+    : gc_structures_err(wxString::Format(gcerr_structures::missingMigration,migrationName.c_str()))
+{
+}
+
+missing_migration::missing_migration(const wxString & fromName, const wxString & toName) throw()
+    : gc_structures_err(wxString::Format(gcerr_structures::migrationNotDefined, fromName.c_str(), toName.c_str()))
+{
+}
+
+missing_migration::~missing_migration() throw() {};
+
+missing_migration_id::missing_migration_id(const wxString & idString) throw()
+    : gc_structures_err(wxString::Format(gcerr_structures::missingMigrationId,idString.c_str()))
+{
+}
+
+missing_migration_id::~missing_migration_id() throw() {};
+
+missing_name_error::~missing_name_error() throw() {};
+
+unparsable_file_error::unparsable_file_error(const wxString & wh) throw()
+    : gc_structures_err((wxString::Format(gcerr_structures::unparsableFile,wh.c_str())))
+{
+}
+
+unparsable_file_error::~unparsable_file_error() throw() {};
+
+gc_missing_population::gc_missing_population(const wxString & popName) throw()
+    : gc_structures_err(wxString::Format(gcerr_structures::missingPopulation,popName.c_str()))
+{
+}
+
+gc_missing_population::~gc_missing_population() throw() {};
+
+missing_panel::missing_panel(const wxString & panelName) throw()
+    : gc_structures_err(wxString::Format(gcerr_structures::missingPanel,panelName.c_str()))
+{
+}
+
+missing_panel::missing_panel(const wxString & regionName, const wxString & popName) throw()
+    : gc_structures_err(wxString::Format(gcerr_structures::panelNotDefined,regionName.c_str(), popName.c_str()))
+{
+}
+
+missing_panel::~missing_panel() throw() {};
+
+missing_panel_id::missing_panel_id(const wxString & idString) throw()
+    : gc_structures_err(wxString::Format(gcerr_structures::missingPanelId,idString.c_str()))
+{
+}
+
+missing_panel_id::~missing_panel_id() throw() {};
+
+panel_size_clash::panel_size_clash(const wxString & popName, const wxString & region1Name, const wxString & region2Name) throw()
+    : gc_structures_err(wxString::Format(gcerr_structures::panelSizeClash, popName.c_str(), region1Name.c_str(), region2Name.c_str()))
+{
+}
+
+panel_size_clash::~panel_size_clash() throw() {};
+
+panel_blessed_error::panel_blessed_error(const wxString & regionName, const wxString & popName) throw()
+    : gc_structures_err(wxString::Format(gcerr_structures::panelBlessedError,regionName.c_str(), popName.c_str()))
+{
+}
+
+panel_blessed_error::~panel_blessed_error() throw() {};
+
+missing_region::missing_region(const wxString & regName) throw()
+    : gc_structures_err(wxString::Format(gcerr_structures::missingRegion,regName.c_str()))
+{
+}
+
+missing_parent::missing_parent(const wxString & parentName) throw()
+    : gc_structures_err(wxString::Format(gcerr_structures::missingParent,parentName.c_str()))
+{
+}
+
+missing_parent::~missing_parent() throw() {};
+
+missing_parent_id::missing_parent_id(const wxString & idString) throw()
+    : gc_structures_err(wxString::Format(gcerr_structures::missingParentId,idString.c_str()))
+{
+}
+
+missing_parent_id::~missing_parent_id() throw() {};
+
+missing_region::~missing_region() throw() {};
+
+missing_trait::missing_trait(const wxString & traitName) throw()
+    : gc_structures_err(wxString::Format(gcerr_structures::missingTrait,traitName.c_str()))
+{
+}
+
+missing_trait::~missing_trait() throw() {};
+
+effective_pop_size_clash::effective_pop_size_clash(double size1, double size2) throw()
+    :   gc_structures_err(wxString::Format(gcerr_structures::regionEffPopSizeClash,size1,size2))
+{
+}
+
+effective_pop_size_clash::~effective_pop_size_clash() throw() {};
+
+gc_name_repeat_allele::gc_name_repeat_allele(wxString & name, int row1, int row2) throw()
+    :   gc_structures_err(wxString::Format(gcerr_structures::nameRepeatAllele,name.c_str(),row1))
+{
+    setRow(row2);
+}
+
+gc_name_repeat_allele::~gc_name_repeat_allele() throw() {};
+
+gc_name_repeat_locus::gc_name_repeat_locus(wxString & name, int row1, int row2) throw()
+    :   gc_structures_err(wxString::Format(gcerr_structures::nameRepeatLocus,name.c_str(),row1))
+{
+    setRow(row2);
+}
+
+gc_name_repeat_locus::~gc_name_repeat_locus() throw() {};
+
+gc_name_repeat_pop::gc_name_repeat_pop(wxString & name, int row1, int row2) throw()
+    :   gc_structures_err(wxString::Format(gcerr_structures::nameRepeatPop,name.c_str(),row1))
+{
+    setRow(row2);
+}
+
+gc_name_repeat_pop::~gc_name_repeat_pop() throw() {};
+
+gc_name_repeat_region::gc_name_repeat_region(wxString & name, int row1, int row2) throw()
+    :   gc_structures_err(wxString::Format(gcerr_structures::nameRepeatRegion,name.c_str(),row1))
+{
+    setRow(row2);
+}
+
+gc_name_repeat_region::~gc_name_repeat_region() throw() {};
+
+gc_name_repeat_trait::gc_name_repeat_trait(wxString & name, int row1, int row2) throw()
+    :   gc_structures_err(wxString::Format(gcerr_structures::nameRepeatTrait,name.c_str(),row1))
+{
+    setRow(row2);
+}
+
+gc_name_repeat_trait::~gc_name_repeat_trait() throw() {};
+
+gc_allele_trait_mismatch::gc_allele_trait_mismatch(wxString name, wxString newTraitName, wxString oldTraitName) throw()
+    :   gc_structures_err(wxString::Format(gcerr_structures::mismatchAlleleTrait,name.c_str(),newTraitName.c_str(),oldTraitName.c_str()))
+{
+}
+
+gc_allele_trait_mismatch::~gc_allele_trait_mismatch() throw() {};
+
+gc_locus_region_mismatch::gc_locus_region_mismatch(wxString name, wxString newRegionName, wxString oldRegionName) throw()
+    :   gc_structures_err(wxString::Format(gcerr_structures::mismatchLocusRegion,name.c_str(),newRegionName.c_str(),oldRegionName.c_str()))
+{
+}
+
+gc_locus_region_mismatch::~gc_locus_region_mismatch() throw() {};
+
+
+gc_wrong_divergence_error::gc_wrong_divergence_error() throw()
+    :   gc_structures_err(gcerr::wrongDivergenceCount)
+{
+}
+
+gc_wrong_divergence_error::~gc_wrong_divergence_error() throw() {};
+
+
+gc_divergence_bad_id_error::gc_divergence_bad_id_error(wxString parent, wxString child) throw()
+    :   gc_structures_err(wxString::Format(gcerr::badDivergenceId,parent.c_str(), child.c_str()))
+{
+}
+
+gc_divergence_bad_id_error::~gc_divergence_bad_id_error() throw() {};
+
+gc_rate_too_small_error::gc_rate_too_small_error() throw()
+    :   gc_structures_err(gcerr::migrationRateTooSmall)
+{
+}
+
+gc_rate_too_small_error::~gc_rate_too_small_error() throw() {};
+
+gc_bad_name_error::gc_bad_name_error(wxString name) throw()
+    :   gc_structures_err(wxString::Format(gcerr::badName, name.c_str()))
+{
+}
+
+gc_bad_name_error::~gc_bad_name_error() throw() {};
+
+//____________________________________________________________________________________
diff --git a/src/convErr/gc_structures_err.h b/src/convErr/gc_structures_err.h
new file mode 100644
index 0000000..a79ba36
--- /dev/null
+++ b/src/convErr/gc_structures_err.h
@@ -0,0 +1,244 @@
+// $Id: gc_structures_err.h,v 1.11 2011/12/01 22:32:42 jmcgill Exp $
+
+/*
+  Copyright 2002 Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_STRUCTURES_ERR_H
+#define GC_STRUCTURES_ERR_H
+
+#include "gc_errhandling.h"
+
+class gc_structures_err : public gc_data_error
+{
+  public:
+    gc_structures_err(const wxString & wh) throw();
+    virtual ~gc_structures_err() throw() ;
+};
+
+class duplicate_file_base_name_error : public gc_structures_err
+{
+  public:
+    duplicate_file_base_name_error(const wxString & wh) throw();
+    virtual ~duplicate_file_base_name_error() throw() ;
+};
+
+class duplicate_file_error : public gc_structures_err
+{
+  public:
+    duplicate_file_error(const wxString & wh) throw();
+    virtual ~duplicate_file_error() throw() ;
+};
+
+class unparsable_file_error : public gc_structures_err
+{
+  public:
+    unparsable_file_error(const wxString & wh) throw();
+    virtual ~unparsable_file_error() throw() ;
+};
+
+class gc_missing_population : public gc_structures_err
+{
+  public:
+    gc_missing_population(const wxString & popName) throw() ;
+    virtual ~gc_missing_population() throw() ;
+};
+
+class missing_panel : public gc_structures_err
+{
+  public:
+    missing_panel(const wxString & panelName) throw() ;
+    missing_panel(const wxString & regionName, const wxString & popName) throw() ;
+    virtual ~missing_panel() throw() ;
+};
+
+class missing_panel_id : public gc_structures_err
+{
+  public:
+    missing_panel_id(const wxString & idString) throw() ;
+    virtual ~missing_panel_id() throw() ;
+};
+
+class missing_parent : public gc_structures_err
+{
+  public:
+    missing_parent(const wxString & parentName) throw() ;
+    virtual ~missing_parent() throw() ;
+};
+
+class missing_parent_id : public gc_structures_err
+{
+  public:
+    missing_parent_id(const wxString & idString) throw() ;
+    virtual ~missing_parent_id() throw() ;
+};
+
+class missing_migration : public gc_structures_err
+{
+  public:
+    missing_migration(const wxString & migrationName) throw() ;
+    missing_migration(const wxString & fromName, const wxString & toName) throw() ;
+    virtual ~missing_migration() throw() ;
+};
+
+class missing_migration_id : public gc_structures_err
+{
+  public:
+    missing_migration_id(const wxString & idString) throw() ;
+    virtual ~missing_migration_id() throw() ;
+};
+
+class panel_size_clash : public gc_structures_err
+{
+  public:
+    panel_size_clash(const wxString & popName, const wxString & region1Name, const wxString & region2Name) throw() ;
+    virtual ~panel_size_clash() throw() ;
+};
+
+class panel_blessed_error : public gc_structures_err
+{
+  public:
+    panel_blessed_error(const wxString & regionName, const wxString & popName) throw() ;
+    virtual ~panel_blessed_error() throw() ;
+};
+
+class missing_region : public gc_structures_err
+{
+  public:
+    missing_region(const wxString& wh) throw() ;
+    virtual ~missing_region() throw() ;
+};
+
+class missing_trait : public gc_structures_err
+{
+  public:
+    missing_trait(const wxString& wh) throw();
+    virtual ~missing_trait() throw() ;
+};
+
+class duplicate_name_error : public gc_structures_err
+{
+  public:
+    duplicate_name_error(const wxString & name, const wxString & why) throw();
+    virtual ~duplicate_name_error() throw() ;
+};
+
+class empty_name_error : public gc_structures_err
+{
+  public:
+    empty_name_error(const wxString& why) throw();
+    virtual ~empty_name_error() throw() ;
+};
+
+class incompatible_pops : public gc_structures_err
+{
+  public:
+    incompatible_pops(const wxString & wh) throw();
+    virtual ~incompatible_pops() throw() ;
+};
+
+class missing_file_error : public gc_structures_err
+{
+  public:
+    missing_file_error(const wxString & filename) throw();
+    virtual ~missing_file_error() throw() ;
+};
+
+class missing_name_error : public gc_structures_err
+{
+  public:
+    missing_name_error(const wxString& name, const wxString & why) throw();
+    virtual ~missing_name_error() throw() ;
+};
+
+class effective_pop_size_clash : public gc_structures_err
+{
+  public:
+    effective_pop_size_clash(double size1, double size2) throw();
+    virtual ~effective_pop_size_clash() throw() ;
+};
+
+class gc_name_repeat_allele : public gc_structures_err
+{
+  public:
+    gc_name_repeat_allele(wxString & name, int oldRow, int newRow) throw();
+    virtual ~gc_name_repeat_allele() throw ();
+};
+
+class gc_name_repeat_locus : public gc_structures_err
+{
+  public:
+    gc_name_repeat_locus(wxString & name, int oldRow, int newRow) throw();
+    virtual ~gc_name_repeat_locus() throw ();
+};
+
+class gc_name_repeat_pop : public gc_structures_err
+{
+  public:
+    gc_name_repeat_pop(wxString & name, int oldRow, int newRow) throw();
+    virtual ~gc_name_repeat_pop() throw ();
+};
+
+class gc_name_repeat_region : public gc_structures_err
+{
+  public:
+    gc_name_repeat_region(wxString & name, int oldRow, int newRow) throw();
+    virtual ~gc_name_repeat_region() throw ();
+};
+
+class gc_name_repeat_trait : public gc_structures_err
+{
+  public:
+    gc_name_repeat_trait(wxString & name, int oldRow, int newRow) throw();
+    virtual ~gc_name_repeat_trait() throw ();
+};
+
+class gc_allele_trait_mismatch : public gc_structures_err
+{
+  public:
+    gc_allele_trait_mismatch(wxString alleleName, wxString newTraitName, wxString oldTraitName) throw();
+    virtual ~gc_allele_trait_mismatch() throw ();
+};
+
+class gc_locus_region_mismatch : public gc_structures_err
+{
+  public:
+    gc_locus_region_mismatch(wxString locusName, wxString newRegionName, wxString oldRegionName) throw();
+    virtual ~gc_locus_region_mismatch() throw ();
+};
+
+class gc_wrong_divergence_error : public gc_structures_err
+{
+  public:
+    gc_wrong_divergence_error() throw();
+    virtual ~gc_wrong_divergence_error() throw();
+};
+
+class gc_divergence_bad_id_error : public gc_structures_err
+{
+  public:
+    gc_divergence_bad_id_error(wxString parent, wxString child) throw();
+    virtual ~gc_divergence_bad_id_error() throw();
+};
+
+class gc_rate_too_small_error : public gc_structures_err
+{
+  public:
+    gc_rate_too_small_error() throw();
+    virtual ~gc_rate_too_small_error() throw();
+};
+
+class gc_bad_name_error : public gc_structures_err
+{
+  public:
+    gc_bad_name_error(wxString name) throw();
+    virtual ~gc_bad_name_error() throw();
+};
+
+#endif  // GC_STRUCTURES_ERR_H
+
+//____________________________________________________________________________________
diff --git a/src/convErr/gc_trait_err.cpp b/src/convErr/gc_trait_err.cpp
new file mode 100644
index 0000000..4eab2f6
--- /dev/null
+++ b/src/convErr/gc_trait_err.cpp
@@ -0,0 +1,90 @@
+// $Id: gc_trait_err.cpp,v 1.9 2012/06/30 01:32:39 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include "gc_trait_err.h"
+#include "gc_strings_trait.h"
+
+gc_trait_err::gc_trait_err(wxString msg) throw()
+    : gc_ex(msg)
+{
+}
+gc_trait_err::~gc_trait_err() throw() {}
+
+gc_haplotype_probability_negative::gc_haplotype_probability_negative(double prob) throw()
+    : gc_trait_err(wxString::Format(gcerr_trait::hapProbabilityNegative,prob))
+{
+}
+gc_haplotype_probability_negative::~gc_haplotype_probability_negative() throw() {}
+
+gc_trait_allele_name_reuse::gc_trait_allele_name_reuse(wxString alleleName) throw()
+    : gc_trait_err(wxString::Format(gcerr_trait::alleleNameReuse,alleleName.c_str()))
+{
+}
+gc_trait_allele_name_reuse::~gc_trait_allele_name_reuse() throw() {}
+
+gc_trait_phenotype_name_reuse::gc_trait_phenotype_name_reuse(wxString phenotypeName, wxString traitName) throw()
+    : gc_trait_err(wxString::Format(gcerr_trait::phenotypeNameReuse,phenotypeName.c_str(),traitName.c_str()))
+{
+}
+gc_trait_phenotype_name_reuse::~gc_trait_phenotype_name_reuse() throw() {}
+
+gc_missing_phenotype::gc_missing_phenotype(wxString phenotypeName) throw()
+    : gc_trait_err(wxString::Format(gcerr_trait::phenotypeMissing,phenotypeName.c_str()))
+{
+}
+gc_missing_phenotype::~gc_missing_phenotype() throw() {}
+
+gc_missing_allele::gc_missing_allele(wxString alleleName) throw()
+    : gc_trait_err(wxString::Format(gcerr_trait::alleleMissing,alleleName.c_str()))
+{
+}
+gc_missing_allele::~gc_missing_allele() throw() {}
+
+#if 0
+
+gc_allele_trait_mismatch::gc_allele_trait_mismatch(const gcTraitAllele & allele,
+                                                   const gcTraitInfo & trait,
+                                                   const gcPhenotype & pheno,
+                                                   size_t lineNo) throw ()
+    : gc_trait_err(wxString::Format(gcerr_trait::alleleTraitMismatch,
+                                    allele.GetName().c_str(),
+                                    pheno.GetName().c_str(),
+                                    trait.GetName().c_str()))
+{
+    setRow(lineNo);
+}
+
+gc_allele_trait_mismatch::~gc_allele_trait_mismatch() throw() {}
+
+#endif
+
+gc_pheno_trait_mismatch::gc_pheno_trait_mismatch(const gcTraitInfo & outer,
+                                                 const gcTraitInfo & inner,
+                                                 const gcPhenotype & pheno,
+                                                 size_t lineNo) throw ()
+    : gc_trait_err(wxString::Format(gcerr_trait::phenoTraitMismatch,
+                                    pheno.GetName().c_str(),
+                                    inner.GetName().c_str(),
+                                    outer.GetName().c_str()))
+{
+    setRow(lineNo);
+}
+
+gc_pheno_trait_mismatch::~gc_pheno_trait_mismatch() throw() {}
+
+gc_trait_allele_name_spaces::gc_trait_allele_name_spaces(wxString alleleName) throw()
+    : gc_trait_err(wxString::Format(gcerr_trait::alleleNameSpaces,
+                                    alleleName.c_str()))
+{
+}
+
+gc_trait_allele_name_spaces::~gc_trait_allele_name_spaces() throw() {}
+
+//____________________________________________________________________________________
diff --git a/src/convErr/gc_trait_err.h b/src/convErr/gc_trait_err.h
new file mode 100644
index 0000000..408cd4d
--- /dev/null
+++ b/src/convErr/gc_trait_err.h
@@ -0,0 +1,91 @@
+// $Id: gc_trait_err.h,v 1.9 2011/06/22 18:22:22 jmcgill Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_TRAIT_ERR
+#define GC_TRAIT_ERR
+
+#include "gc_errhandling.h"
+#include "gc_phenotype.h"
+#include "gc_trait.h"
+#include "gc_trait_allele.h"
+
+class gc_trait_err : public gc_ex
+{
+  public:
+    gc_trait_err(wxString msg) throw();
+    virtual ~gc_trait_err() throw();
+};
+
+//------------------------------------------------------------------------------------
+
+class gc_haplotype_probability_negative : public gc_trait_err
+{
+  public:
+    gc_haplotype_probability_negative(double probability) throw();
+    virtual ~gc_haplotype_probability_negative() throw() ;
+};
+
+//------------------------------------------------------------------------------------
+
+class gc_trait_allele_name_reuse : public gc_trait_err
+{
+  public:
+    gc_trait_allele_name_reuse(wxString alleleName) throw();
+    virtual ~gc_trait_allele_name_reuse() throw() ;
+};
+
+//------------------------------------------------------------------------------------
+
+class gc_trait_phenotype_name_reuse : public gc_trait_err
+{
+  public:
+    gc_trait_phenotype_name_reuse(wxString phenotypeName, wxString traitName) throw();
+    virtual ~gc_trait_phenotype_name_reuse() throw() ;
+};
+
+//------------------------------------------------------------------------------------
+
+class gc_missing_phenotype : public gc_trait_err
+{
+  public:
+    gc_missing_phenotype(wxString phenotypeName) throw();
+    virtual ~gc_missing_phenotype() throw() ;
+};
+
+//------------------------------------------------------------------------------------
+
+class gc_missing_allele : public gc_trait_err
+{
+  public:
+    gc_missing_allele(wxString alleleName) throw();
+    virtual ~gc_missing_allele() throw() ;
+};
+
+//------------------------------------------------------------------------------------
+
+class gc_trait_allele_name_spaces : public gc_trait_err
+{
+  public:
+    gc_trait_allele_name_spaces(wxString alleleName) throw();
+    virtual ~gc_trait_allele_name_spaces() throw() ;
+};
+
+//------------------------------------------------------------------------------------
+
+class gc_pheno_trait_mismatch : public gc_trait_err
+{
+  public:
+    gc_pheno_trait_mismatch(const gcTraitInfo& outer, const gcTraitInfo& inner, const gcPhenotype &, size_t lineNo) throw();
+    virtual ~gc_pheno_trait_mismatch() throw() ;
+};
+
+#endif  // GC_TRAIT_ERR
+
+//____________________________________________________________________________________
diff --git a/src/convModel/cmdfileschema.cpp b/src/convModel/cmdfileschema.cpp
new file mode 100644
index 0000000..b5929c4
--- /dev/null
+++ b/src/convModel/cmdfileschema.cpp
@@ -0,0 +1,134 @@
+// $Id: cmdfileschema.cpp,v 1.23 2012/06/30 01:32:39 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <iostream>
+
+#include "cmdfileschema.h"
+#include "cnv_strings.h"
+
+CmdFileSchema::CmdFileSchema()
+{
+    const bool required = true;
+    const bool optional = false;
+    const bool onlyone = true;
+    const bool many = false;
+
+    AddTag(cnvstr::TAG_CONVERTER_CMD);
+    AddAttribute(optional,cnvstr::TAG_CONVERTER_CMD,cnvstr::ATTR_VERSION);
+    AddSubtag(optional, onlyone,cnvstr::TAG_CONVERTER_CMD,cnvstr::TAG_TRAITS);
+    AddSubtag(optional, onlyone,cnvstr::TAG_CONVERTER_CMD,cnvstr::TAG_REGIONS);
+    AddSubtag(optional, onlyone,cnvstr::TAG_CONVERTER_CMD,cnvstr::TAG_POPULATIONS);
+    AddSubtag(optional, onlyone,cnvstr::TAG_CONVERTER_CMD,cnvstr::TAG_PANELS);
+    AddSubtag(optional, onlyone,cnvstr::TAG_CONVERTER_CMD,cnvstr::TAG_INDIVIDUALS);
+    AddSubtag(optional, onlyone,cnvstr::TAG_CONVERTER_CMD,cnvstr::TAG_INFILES);
+    AddSubtag(optional, onlyone,cnvstr::TAG_CONVERTER_CMD,cnvstr::TAG_OUTFILE);
+    AddSubtag(optional, onlyone,cnvstr::TAG_CONVERTER_CMD,cnvstr::TAG_ADDCOMMENT);
+    AddSubtag(optional, onlyone,cnvstr::TAG_CONVERTER_CMD,cnvstr::TAG_DIVERGENCES);
+
+    AddSubtag(optional, many,   cnvstr::TAG_TRAITS,cnvstr::TAG_TRAIT_INFO);
+    AddSubtag(required, onlyone,cnvstr::TAG_TRAIT_INFO,cnvstr::TAG_NAME);
+    AddSubtag(required, many,   cnvstr::TAG_TRAIT_INFO,cnvstr::TAG_ALLELE);
+    AddSubtag(optional, many,   cnvstr::TAG_TRAITS,cnvstr::TAG_PHENOTYPE);
+    AddSubtag(required, onlyone,cnvstr::TAG_PHENOTYPE,cnvstr::TAG_NAME);
+    AddSubtag(required, many   ,cnvstr::TAG_PHENOTYPE,cnvstr::TAG_GENO_RESOLUTIONS);
+    AddSubtag(required, onlyone,cnvstr::TAG_GENO_RESOLUTIONS,cnvstr::TAG_TRAIT_NAME);
+    AddSubtag(required, many,   cnvstr::TAG_GENO_RESOLUTIONS,cnvstr::TAG_HAPLOTYPES);
+    AddSubtag(required, onlyone,cnvstr::TAG_HAPLOTYPES,cnvstr::TAG_ALLELES);
+    AddSubtag(required, onlyone,cnvstr::TAG_HAPLOTYPES,cnvstr::TAG_PENETRANCE);
+
+    AddSubtag(required, many,cnvstr::TAG_REGIONS,cnvstr::TAG_REGION);
+
+    AddSubtag(required, onlyone,cnvstr::TAG_REGION,cnvstr::TAG_NAME);
+    AddSubtag(optional, onlyone,cnvstr::TAG_REGION,cnvstr::TAG_EFFECTIVE_POPSIZE);
+    AddSubtag(required, onlyone,cnvstr::TAG_REGION,cnvstr::TAG_SEGMENTS);
+
+    AddSubtag(optional, many,   cnvstr::TAG_REGION,cnvstr::TAG_TRAIT_LOCATION);
+    AddSubtag(required, onlyone,cnvstr::TAG_TRAIT_LOCATION,cnvstr::TAG_TRAIT_NAME);
+
+    AddSubtag(required, many,cnvstr::TAG_SEGMENTS,cnvstr::TAG_SEGMENT);
+
+    AddAttribute(required,cnvstr::TAG_SEGMENT,cnvstr::ATTR_DATATYPE);
+    AddAttribute(optional,cnvstr::TAG_SEGMENT,cnvstr::ATTR_PROXIMITY);
+    AddSubtag(required, onlyone,cnvstr::TAG_SEGMENT,cnvstr::TAG_NAME);
+    AddSubtag(required, onlyone,cnvstr::TAG_SEGMENT,cnvstr::TAG_MARKERS);
+    AddSubtag(optional, onlyone,cnvstr::TAG_SEGMENT,cnvstr::TAG_MAP_POSITION);
+
+    //For SNP data:
+    AddSubtag(optional, onlyone,cnvstr::TAG_SEGMENT,cnvstr::TAG_SCANNED_LENGTH);
+    AddSubtag(optional, onlyone,cnvstr::TAG_SEGMENT,cnvstr::TAG_SCANNED_DATA_POSITIONS);
+    AddSubtag(optional, onlyone,cnvstr::TAG_SEGMENT,cnvstr::TAG_FIRST_POSITION_SCANNED);
+    AddSubtag(optional, onlyone,cnvstr::TAG_SEGMENT,cnvstr::TAG_UNRESOLVED_MARKERS);
+
+    AddSubtag(optional, many,cnvstr::TAG_PANELS,cnvstr::TAG_PANEL);
+    AddSubtag(required, onlyone,cnvstr::TAG_PANEL,cnvstr::TAG_PANEL_REGION);
+    AddSubtag(required, onlyone,cnvstr::TAG_PANEL,cnvstr::TAG_PANEL_POP);
+    AddSubtag(optional, onlyone,cnvstr::TAG_PANEL,cnvstr::TAG_PANEL_NAME);
+    AddSubtag(required, onlyone,cnvstr::TAG_PANEL,cnvstr::TAG_PANEL_SIZE);
+
+    AddSubtag(required, many,cnvstr::TAG_POPULATIONS,cnvstr::TAG_POPULATION);
+
+    AddSubtag(optional, many    ,cnvstr::TAG_INDIVIDUALS,cnvstr::TAG_INDIVIDUAL);
+    AddSubtag(required, onlyone ,cnvstr::TAG_INDIVIDUAL,cnvstr::TAG_NAME);
+    AddSubtag(required, many    ,cnvstr::TAG_INDIVIDUAL,cnvstr::TAG_SAMPLE);
+    AddSubtag(required, onlyone ,cnvstr::TAG_SAMPLE,cnvstr::TAG_NAME);
+    AddSubtag(optional, many    ,cnvstr::TAG_INDIVIDUAL,cnvstr::TAG_PHASE);
+    AddSubtag(required, onlyone ,cnvstr::TAG_PHASE,cnvstr::TAG_SEGMENT_NAME);
+    AddSubtag(required, onlyone ,cnvstr::TAG_PHASE,cnvstr::TAG_UNRESOLVED_MARKERS);
+    AddSubtag(optional, many    ,cnvstr::TAG_INDIVIDUAL,cnvstr::TAG_HAS_PHENOTYPE);
+    AddSubtag(optional, many    ,cnvstr::TAG_INDIVIDUAL,cnvstr::TAG_GENO_RESOLUTIONS);
+
+    AddSubtag(required, many,cnvstr::TAG_INFILES,cnvstr::TAG_INFILE);
+    AddAttribute(required,cnvstr::TAG_INFILE,cnvstr::ATTR_DATATYPE);
+    AddAttribute(required,cnvstr::TAG_INFILE,cnvstr::ATTR_FORMAT);
+    AddAttribute(required,cnvstr::TAG_INFILE,cnvstr::ATTR_SEQUENCEALIGNMENT);
+    AddSubtag(required, onlyone,cnvstr::TAG_INFILE,cnvstr::TAG_NAME);
+    AddSubtag(required, onlyone,cnvstr::TAG_INFILE,cnvstr::TAG_SEGMENTS_MATCHING);
+    AddSubtag(required, onlyone,cnvstr::TAG_INFILE,cnvstr::TAG_POP_MATCHING);
+    AddSubtag(optional, onlyone,cnvstr::TAG_INFILE,cnvstr::TAG_INDIVIDUALS_FROM_SAMPLES);
+    AddAttribute(required,cnvstr::TAG_INDIVIDUALS_FROM_SAMPLES,cnvstr::ATTR_TYPE);
+
+    AddAttribute(required,cnvstr::TAG_SEGMENTS_MATCHING,cnvstr::ATTR_TYPE);
+    AddAttribute(required,cnvstr::TAG_POP_MATCHING,cnvstr::ATTR_TYPE);
+
+    AddSubtag(optional, many,cnvstr::TAG_SEGMENTS_MATCHING,cnvstr::TAG_SEGMENT_NAME);
+    AddSubtag(optional, many,cnvstr::TAG_POP_MATCHING,cnvstr::TAG_POP_NAME);
+
+    AddSubtag(optional, many,cnvstr::TAG_DIVERGENCES,cnvstr::TAG_DIVERGENCE);
+    AddSubtag(optional, onlyone,cnvstr::TAG_DIVERGENCE,cnvstr::TAG_DIV_ANCESTOR);
+    AddSubtag(required, onlyone,cnvstr::TAG_DIVERGENCE,cnvstr::TAG_DIV_CHILD1);
+    AddSubtag(required, onlyone,cnvstr::TAG_DIVERGENCE,cnvstr::TAG_DIV_CHILD2);
+}
+
+CmdFileSchema::~CmdFileSchema()
+{
+}
+
+void
+CmdFileSchema::AddAttribute(bool required, wxString tagName, wxString attrName)
+{
+    //ParseTreeSchema::AddAttribute(required,tagName.c_str(),attrName.c_str());
+    ParseTreeSchema::AddAttribute(required, (const char *)tagName.mb_str(), (const char *)attrName.mb_str());// JRM hack
+}
+
+void
+CmdFileSchema::AddTag(wxString tag)
+{
+    //ParseTreeSchema::AddTag(tag.c_str());
+    ParseTreeSchema::AddTag((const char *)tag.mb_str());// JRM hack
+}
+
+void
+CmdFileSchema::AddSubtag(bool required, bool onlyOne, wxString parentTag, wxString childTag)
+{
+    //ParseTreeSchema::AddSubtag(required,onlyOne,parentTag.c_str(),childTag.c_str());
+    ParseTreeSchema::AddSubtag(required,onlyOne, (const char *)parentTag.mb_str(), (const char *)childTag.mb_str());// JRM hack
+}
+
+//____________________________________________________________________________________
diff --git a/src/convModel/cmdfileschema.h b/src/convModel/cmdfileschema.h
new file mode 100644
index 0000000..586289e
--- /dev/null
+++ b/src/convModel/cmdfileschema.h
@@ -0,0 +1,40 @@
+// $Id: cmdfileschema.h,v 1.4 2011/03/07 06:08:47 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef CMDFILESCHEMA_H
+#define CMDFILESCHEMA_H
+
+#include "parsetreeschema.h"
+#include "wx/string.h"
+
+class CmdFileSchema : public ParseTreeSchema
+// specialization of ParseTreeSchema to apply to command
+// files used for batch version of converter
+{
+  private:
+
+  protected:
+    // these methods allow us to pass wxStrings in as
+    // arguments instead of std::string
+    void AddTag(wxString tag);
+    void AddAttribute(bool required,
+                      wxString tagName,
+                      wxString attrName);
+    void AddSubtag(bool required, bool onlyOne,
+                   wxString parentTag,
+                   wxString childTag);
+  public:
+    CmdFileSchema();            // modify this method to update schema
+    virtual ~CmdFileSchema();
+};
+
+#endif // CMDFILESCHEMA_H
+
+//____________________________________________________________________________________
diff --git a/src/convModel/gc_creation_info.cpp b/src/convModel/gc_creation_info.cpp
new file mode 100644
index 0000000..7da031a
--- /dev/null
+++ b/src/convModel/gc_creation_info.cpp
@@ -0,0 +1,129 @@
+// $Id: gc_creation_info.cpp,v 1.3 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+
+#include "wx/string.h"
+#include "gc_creation_info.h"
+#include "gc_default.h"
+#include "gc_strings_creation.h"
+
+//------------------------------------------------------------------------------------
+
+gcCreationInfo::gcCreationInfo()
+    :   m_creationType(created_UNKNOWN),
+        m_hasLineNumber(false),
+        m_lineNumber(gcdefault::badIndex),
+        m_hasFileName(false),
+        m_fileName(wxEmptyString)
+{
+}
+
+gcCreationInfo::~gcCreationInfo()
+{
+}
+
+void
+gcCreationInfo::SetLineNumber(size_t lineNumber)
+{
+    m_hasLineNumber = true;
+    m_lineNumber = lineNumber;
+}
+
+void
+gcCreationInfo::SetFileName(wxString fileName)
+{
+    m_hasFileName = true;
+    m_fileName = fileName;
+}
+
+bool
+gcCreationInfo::HasLineNumber() const
+{
+    return m_hasLineNumber;
+}
+
+size_t
+gcCreationInfo::GetLineNumber() const
+{
+    assert(HasLineNumber());
+    return m_lineNumber;
+}
+
+bool
+gcCreationInfo::HasFileName() const
+{
+    return m_hasFileName;
+}
+
+wxString
+gcCreationInfo::GetFileName() const
+{
+    assert(HasFileName());
+    return m_fileName;
+}
+
+wxString
+gcCreationInfo::GetDescriptiveName() const
+{
+    switch(m_creationType)
+    {
+        case created_UNKNOWN:
+            return wxEmptyString;
+            break;
+        case created_CMDFILE:
+            assert(HasLineNumber());
+            assert(HasFileName());
+            return wxString::Format(gcstr_creation::cmdfile,(long)m_lineNumber,m_fileName.c_str());
+            break;
+        case created_DATAFILE:
+            assert(HasLineNumber());
+            assert(HasFileName());
+            return wxString::Format(gcstr_creation::datafile,(long)m_lineNumber,m_fileName.c_str());
+            break;
+        case created_GUI:
+            return gcstr_creation::gui;
+            break;
+        default:
+            assert(false);
+            return wxEmptyString;
+            break;
+    }
+}
+
+gcCreationInfo
+gcCreationInfo::MakeGuiCreationInfo()
+{
+    gcCreationInfo info;
+    info.m_creationType = created_GUI;
+    return info;
+}
+
+gcCreationInfo
+gcCreationInfo::MakeDataFileCreationInfo(size_t lineNumber, wxString fileName)
+{
+    gcCreationInfo info;
+    info.m_creationType = created_DATAFILE;
+    info.SetLineNumber(lineNumber);
+    info.SetFileName(fileName);
+    return info;
+}
+
+gcCreationInfo
+gcCreationInfo::MakeCmdFileCreationInfo(size_t lineNumber, wxString fileName)
+{
+    gcCreationInfo info;
+    info.m_creationType = created_CMDFILE;
+    info.SetLineNumber(lineNumber);
+    info.SetFileName(fileName);
+    return info;
+}
+
+//____________________________________________________________________________________
diff --git a/src/convModel/gc_creation_info.h b/src/convModel/gc_creation_info.h
new file mode 100644
index 0000000..3eef6ed
--- /dev/null
+++ b/src/convModel/gc_creation_info.h
@@ -0,0 +1,61 @@
+// $Id: gc_creation_info.h,v 1.4 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_CREATION_INFO_H
+#define GC_CREATION_INFO_H
+
+#include "wx/string.h"
+
+enum gcCreationType
+{
+    created_UNKNOWN,
+    created_CMDFILE,
+    created_DATAFILE,
+    created_GUI
+};
+
+class gcCreationInfo
+{
+  private:
+
+  protected:
+    gcCreationType      m_creationType;
+
+    bool                m_hasLineNumber;
+    size_t              m_lineNumber;
+
+    // EWFIX.P3.BUG.766 -- consider refering to actual file object
+    bool                m_hasFileName;
+    wxString            m_fileName;
+
+    void    SetLineNumber(size_t lineNumber);
+    void    SetFileName(wxString fileName);
+
+  public:
+    gcCreationInfo();
+    ~gcCreationInfo();
+
+    bool        HasLineNumber() const;
+    size_t      GetLineNumber() const;
+
+    bool        HasFileName() const;
+    wxString    GetFileName() const;
+
+    wxString    GetDescriptiveName() const;
+
+    static gcCreationInfo MakeGuiCreationInfo();
+    static gcCreationInfo MakeDataFileCreationInfo(size_t lineNumber,wxString fileName);
+    static gcCreationInfo MakeCmdFileCreationInfo(size_t lineNumber,wxString fileName);
+
+};
+
+#endif  // GC_CREATION_INFO_H
+
+//____________________________________________________________________________________
diff --git a/src/convModel/gc_datastore.cpp b/src/convModel/gc_datastore.cpp
new file mode 100644
index 0000000..db2ddcd
--- /dev/null
+++ b/src/convModel/gc_datastore.cpp
@@ -0,0 +1,661 @@
+// $Id: gc_datastore.cpp,v 1.104 2012/06/30 01:32:39 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+
+#include "cmdfileschema.h"
+#include "cnv_strings.h"
+#include "errhandling.h"
+#include "front_end_warnings.h"     // EWFIX -- try to get rid of this
+#include "gc_data.h"
+#include "gc_datastore.h"
+#include "gc_errhandling.h"
+#include "gc_data.h"
+#include "gc_default.h"
+#include "gc_file.h"
+#include "gc_file_util.h"
+#include "gc_infile_err.h"
+#include "gc_loci_match.h"
+#include "gc_locus.h"
+#include "gc_map_err.h"
+#include "gc_migrate.h"
+#include "gc_parse.h"
+#include "gc_parse_block.h"
+#include "gc_parse_locus.h"
+#include "gc_parse_sample.h"
+#include "gc_parse_pop.h"
+#include "gc_parser.h"
+#include "gc_phylip.h"
+#include "gc_population.h"
+#include "gc_pop_match.h"
+#include "gc_region.h"
+#include "gc_strings.h"
+#include "gc_strings_infile.h"
+#include "gc_strings_map.h"
+#include "gc_strings_parse.h"
+#include "gc_strings_phase.h"
+#include "gc_types.h"
+#include "parsetreewalker.h"
+#include "tixml_util.h"
+#include "xml.h"
+#include "xml_strings.h"
+
+#include "Converter_DataSourceException.h"
+
+#include "wx/file.h"
+#include "wx/filename.h"
+#include "wx/log.h"
+#include "wx/tokenzr.h"
+#include "wx/txtstrm.h"
+#include "wx/wfstream.h"
+#include "wx/utils.h"
+
+//------------------------------------------------------------------------------------
+
+GCDataStore::GCDataStore()
+    :
+    m_commandFileCurrentlyBeingParsed(wxEmptyString),
+    m_commentString(wxEmptyString),
+    m_outfileName(gcstr::exportFileDefault),
+    m_structures(this)
+{
+}
+
+GCDataStore::~GCDataStore()
+// DON'T do a thourough delete here -- do it in NukeContents()
+{
+}
+
+void
+GCDataStore::NukeContents()
+{
+    for(dataFileSet::iterator i=m_dataFiles.begin(); i!=m_dataFiles.end();i++)
+    {
+        delete *i;
+    }
+    // EWFIX.LEAK -- do we need to free memory in hap file stuff
+}
+
+void
+GCDataStore::GCFatal(wxString msg) const
+{
+    GCError(msg);
+    gc_fatal_error e;
+    throw e;
+}
+
+void
+GCDataStore::GCFatalBatchWarnGUI(wxString msg) const
+{
+    GCFatal(msg);
+}
+
+void
+GCDataStore::GCFatalUnlessDebug(wxString msg) const
+{
+#ifdef NDEBUG
+    GCFatal(msg);
+#else
+    GCError(msg);
+#endif
+}
+
+void
+GCDataStore::GCError(wxString msg) const
+{
+    wxLogError(msg);
+}
+
+void
+GCDataStore::GCInfo(wxString msg) const
+{
+    wxLogInfo(msg);
+}
+
+void
+GCDataStore::GCWarning(wxString msg) const
+{
+    wxLogWarning(msg);
+}
+
+void
+GCDataStore::GettingBusy(const wxString& msg) const
+{
+    wxLogMessage(msg);
+}
+
+void
+GCDataStore::LessBusy(const wxString& msg) const
+{
+    wxLogMessage(msg);
+}
+
+void
+GCDataStore::fileRejectingError(wxString msg,size_t lineNo) const
+{
+    gc_infile_err e(msg);
+    e.setRow(lineNo);
+    throw e;
+}
+
+void
+GCDataStore::batchFileRejectGuiLog(wxString msg, size_t lineNo) const
+{
+    fileRejectingError(msg,lineNo);
+}
+
+void
+GCDataStore::warnLog(wxString msg) const
+{
+    wxLogMessage(msg);
+}
+
+void
+GCDataStore::warnLog(wxString msg, size_t lineNo) const
+{
+    wxLogMessage(gcstr::nearRow,(int)lineNo,msg.c_str());
+}
+
+bool
+GCDataStore::guiQuestionBatchLog(wxString msg, wxString stopButton, wxString continueButton) const
+{
+    warnLog(msg);
+    return true;    // tells caller to continue on (false -> throw)
+}
+
+GCParse *
+GCDataStore::OneParse(GCFile &              file,
+                      GCFileFormat        format,
+                      gcGeneralDataType   dataType,
+                      GCInterleaving      interleaving)
+{
+    if(format == format_PHYLIP)
+    {
+        GCPhylipParser phy(*this);
+        GCParse * parse = phy.Parse(file,dataType,interleaving);
+        return parse;
+    }
+
+    if(format == format_MIGRATE)
+    {
+        GCMigrateParser mig(*this);
+        GCParse * parse = mig.Parse(file,dataType,interleaving);
+        return parse;
+    }
+    return NULL;
+
+}
+
+GCParseVec
+GCDataStore::GoodPhylipParses(GCFile & file)
+{
+    GCParseVec goodParses;
+
+    /////////// parse phylip sequential
+    try
+    {
+        GCPhylipParser phy(*this);
+        GCParse * phyParseC = phy.Parse(file,gcdata::nucDataTypes(),interleaving_SEQUENTIAL);
+        goodParses.push_back(phyParseC);
+    }
+    catch(const gc_ex& e)
+        // do nothing, we're just avoiding putting parse into return vec
+    {
+        wxLogVerbose(wxString::Format(gcerr_infile::unableToParseBecause,
+                                      file.GetName().c_str(),
+                                      ToWxString(format_PHYLIP).c_str(),
+                                      ToWxString(gcdata::nucDataTypes()).c_str(),
+                                      ToWxString(interleaving_SEQUENTIAL).c_str(),
+                                      e.what()));
+    }
+
+    /////////// parse phylip interleaved
+    try
+    {
+        GCPhylipParser phy(*this);
+        GCParse * phyParseI = phy.Parse(file,gcdata::nucDataTypes(),interleaving_INTERLEAVED);
+        goodParses.push_back(phyParseI);
+    }
+    catch(const gc_ex& e)
+        // do nothing, we're just avoiding putting parse into return vec
+    {
+        wxLogVerbose(wxString::Format(gcerr_infile::unableToParseBecause,
+                                      file.GetName().c_str(),
+                                      ToWxString(format_PHYLIP).c_str(),
+                                      ToWxString(gcdata::nucDataTypes()).c_str(),
+                                      ToWxString(interleaving_INTERLEAVED).c_str(),
+                                      e.what()));
+    }
+
+    goodParses.MungeParses();
+    return goodParses;
+}
+
+GCParseVec
+GCDataStore::GoodMigrateParses(GCFile & file, gcGeneralDataType dataType)
+{
+    GCParseVec goodParses;
+
+    /////////// parse migrate sequential
+    try
+    {
+        GCMigrateParser mig(*this);
+        GCParse * migParseC = mig.Parse(file,dataType,interleaving_SEQUENTIAL);
+        goodParses.push_back(migParseC);
+    }
+    catch(const gc_ex& e)
+        // do nothing, we're just avoiding putting parse into return vec
+    {
+        wxLogVerbose(wxString::Format(gcerr_infile::unableToParseBecause,
+                                      file.GetName().c_str(),
+                                      ToWxString(format_MIGRATE).c_str(),
+                                      ToWxString(dataType).c_str(),
+                                      ToWxString(interleaving_SEQUENTIAL).c_str(),
+                                      e.what()));
+    }
+
+    /////////// parse migrate interleaved
+    try
+    {
+        GCMigrateParser mig(*this);
+        GCParse * migParseI = mig.Parse(file,dataType,interleaving_INTERLEAVED);
+        goodParses.push_back(migParseI);
+    }
+    catch(const gc_ex& e)
+        // do nothing, we're just avoiding putting parse into return vec
+    {
+        wxLogVerbose(wxString::Format(gcerr_infile::unableToParseBecause,
+                                      file.GetName().c_str(),
+                                      ToWxString(format_MIGRATE).c_str(),
+                                      ToWxString(dataType).c_str(),
+                                      ToWxString(interleaving_INTERLEAVED).c_str(),
+                                      e.what()));
+    }
+
+    goodParses.MungeParses();
+    return goodParses;
+}
+
+GCParseVec *
+GCDataStore::AllParsesForFile(GCFile & file)
+{
+    GettingBusy(wxString::Format(gcstr_parse::parsingStarting,file.GetShortName().c_str()));
+    GCParseVec * parseVecP = new GCParseVec();
+
+    GCParseVec phyParses = GoodPhylipParses(file);
+    parseVecP->insert(parseVecP->end(),phyParses.begin(),phyParses.end());
+
+    GCParseVec migParsesKallele = GoodMigrateParses(file,gcdata::allelicDataTypes());
+    parseVecP->insert(parseVecP->end(),migParsesKallele.begin(),migParsesKallele.end());
+
+    GCParseVec migParsesNuc = GoodMigrateParses(file,gcdata::nucDataTypes());
+    parseVecP->insert(parseVecP->end(),migParsesNuc.begin(),migParsesNuc.end());
+
+    LessBusy(wxString::Format(gcstr_parse::parsingDone,file.GetShortName().c_str()));
+
+    return parseVecP;
+}
+
+void
+GCDataStore::SetLamarcCommentString(wxString commentString)
+{
+    m_commentString = commentString;
+}
+
+wxString
+GCDataStore::GetOutfileName() const
+{
+    return m_outfileName;
+}
+
+void
+GCDataStore::SetOutfileName(wxString outfileName)
+{
+    m_outfileName = outfileName;
+}
+
+const GCStructures &
+GCDataStore::GetStructures() const
+{
+    return m_structures;
+}
+
+GCStructures &
+GCDataStore::GetStructures()
+{
+    return m_structures;
+}
+
+void
+GCDataStore::DebugDump(wxString prefix) const
+{
+    wxLogDebug("%s************************",prefix.c_str());    // EWDUMPOK
+    wxLogDebug("%sGC DataStore: DEBUG DUMP",prefix.c_str());    // EWDUMPOK
+
+    // data file contents
+    wxLogDebug("%sData Files",(prefix+gcstr::indent).c_str());  // EWDUMPOK
+    dataFileSet::iterator fileIter;
+    for(fileIter=m_dataFiles.begin(); fileIter != m_dataFiles.end(); fileIter++)
+    {
+        GCFile *  file = (*fileIter);
+        file->DebugDump(prefix+gcstr::indent+gcstr::indent);
+    }
+
+    m_structures.DebugDump(prefix+gcstr::indent);
+
+    wxLogDebug("%sPhase Info",(prefix+gcstr::indent).c_str());  // EWDUMPOK
+
+    gcPhaseInfo * info = BuildPhaseInfo(false);
+    // false argument allows us to build the object even if
+    // there are inconsistencies. this is appropriate since
+    // we're doing a debug dump for information
+
+    info->DebugDump(prefix+gcstr::indent+gcstr::indent);
+    delete info;
+
+    GCQuantum::ReportMax();
+}
+
+gcPhaseInfo *
+GCDataStore::BuildPhaseInfo(bool carpIfBroken) const
+{
+    gcPhaseInfo * phaseInfo = new gcPhaseInfo();
+
+    phaseInfo->AddRecords(m_phaseInfo);
+
+    for(dataFileSet::const_iterator i = m_dataFiles.begin(); i != m_dataFiles.end(); i++)
+    {
+        // get data file
+        const GCFile & fileRef = **i;
+
+        // get data file parse
+        if(GetStructures().HasParse(fileRef))
+        {
+            const GCParse & parse = GetStructures().GetParse(fileRef);
+
+            if(GetStructures().HasHapFileAdjacent(fileRef.GetId()))
+            {
+                size_t adjCount = GetStructures().GetHapFileAdjacent(fileRef.GetId());
+                gcPhaseInfo * adjRecs = parse.GetPhaseRecordsForAdjacency(adjCount);
+                phaseInfo->AddRecords(*adjRecs);
+                delete adjRecs;
+            }
+            else
+            {
+                // EWFIX.P4 -- can we cut down on the creation and deletion ?
+                gcPhaseInfo * defaultRecs = parse.GetDefaultPhaseRecords();
+                phaseInfo->AddRecords(*defaultRecs);
+                delete defaultRecs;
+            }
+        }
+        else
+        {
+            if(carpIfBroken)
+            {
+                gc_parse_missing_err e(fileRef.GetName());
+                throw e;
+            }
+            else
+            {
+                // EWFIX -- this code is intended to be triggered during
+                // a debug dump. If we wanted we could consider augmenting
+                // gcPhaseInfo to indicate the missing parse in its
+                // DebugDump routine.
+            }
+        }
+    }
+
+    return phaseInfo;
+}
+
+#if 0
+
+void
+GCDataStore::AddMapFile(wxString mapFileName)
+{
+    try
+    {
+        AddMapFileAsXml(mapFileName);
+    }
+    catch (const tixml_error& f)
+    {
+        wxLogVerbose(gcstr_map::notXmlMapFileTryOldFmt,
+                     wxFileName(mapFileName).GetFullName().c_str(),
+                     f.what());
+        AddMapFileAsOldFormat(mapFileName);
+    }
+}
+
+void
+GCDataStore::AddMapFileAsXml(wxString mapFileName)
+{
+    CmdFileSchema schema;
+    FrontEndWarnings warnings;
+    XmlParser parser(schema,warnings);
+
+    parser.ParseFileData(mapFileName.c_str());
+    TiXmlElement * topElem = parser.GetRootElement();
+
+    const char * value = topElem->Value();
+    std::string topTag(value);
+    bool matches = CaselessStrCmp(cnvstr::TAG_REGIONS,topTag);
+    if(!matches)
+    {
+        gc_map_err e((wxString::Format(gcerr_map::ERR_BAD_TOP_TAG,cnvstr::TAG_REGIONS.c_str(),topTag.c_str())).c_str());
+        throw e;
+    }
+
+    cmdParseRegions       (topElem,true); // true value allows using existing regions/loci
+
+    std::vector<std::string> warningStrings = warnings.GetAndClearWarnings();
+    assert(warningStrings.empty());
+}
+
+void
+GCDataStore::AddMapFileAsOldFormat(wxString mapFileName)
+{
+    wxString defaultRegionName = wxFileName(mapFileName).GetFullName();
+    ////////////////////////////////////////////////////
+    // open the file stream
+    if(! ::wxFileExists(mapFileName))
+    {
+        throw gc_map_file_missing(mapFileName);
+    }
+    wxFileInputStream fileStream(mapFileName);
+    if(!fileStream.Ok())
+    {
+        throw gc_map_file_read_err(mapFileName);
+    }
+    // treat it as text
+    wxTextInputStream textStream(fileStream);
+
+    ////////////////////////////////////////////////////
+    bool atLeastOneLine = false;
+    try
+    {
+        while(true)
+        {
+            wxString line = ReadLineSafely(&fileStream,&textStream);
+            wxStringTokenizer tokens(line);
+            wxStringTokenizer tokenizer(line);
+            if(tokenizer.HasMoreTokens())
+                // if not, a blank line, so we skip it
+            {
+                atLeastOneLine = true;
+                wxString locusName = tokenizer.GetNextToken();
+                wxString locations = tokenizer.GetString();
+                if(m_structures.HasLocus(locusName))
+                {
+                    gcLocus & locus = m_structures.GetLocus(locusName);
+                    locus.SetLocations(locations);
+                }
+                else
+                {
+                    gcRegion & region = m_structures.FetchOrMakeRegion(defaultRegionName);
+                    gcLocus & locus = m_structures.MakeLocus(region,locusName,true);
+                    locus.SetLocations(locations);
+                }
+            }
+        }
+    }
+    catch(const gc_eof& e)
+    {
+        if(!atLeastOneLine)
+        {
+            throw gc_map_file_empty(mapFileName);
+        }
+    }
+}
+
+#endif
+
+gcTraitInfo &
+GCDataStore::AddNewTrait(wxString name)
+{
+    return m_structures.MakeTrait(name);
+}
+
+void
+GCDataStore::Rename(GCQuantum & object, wxString newName)
+{
+    m_structures.Rename(object,newName);
+}
+
+#if 0
+
+void
+GCDataStore::AddHapFile(wxString hapFileName)
+{
+    CmdFileSchema schema;
+    FrontEndWarnings warnings;
+    XmlParser parser(schema,warnings);
+
+    parser.ParseFileData(hapFileName.c_str());
+    TiXmlElement * topElem = parser.GetRootElement();
+
+    const char * value = topElem->Value();
+    std::string topTag(value);
+    bool matches = CaselessStrCmp(cnvstr::TAG_INDIVIDUALS,topTag);
+    if(!matches)
+    {
+        gc_phase_err e((wxString::Format(gcerr_phase::badTopTag,cnvstr::TAG_INDIVIDUALS.c_str(),topTag.c_str())).c_str());
+        throw e;
+    }
+
+    cmdParseIndividuals       (topElem,hapFileName);
+
+    std::vector<std::string> warningStrings = warnings.GetAndClearWarnings();
+    assert(warningStrings.empty());
+}
+
+#endif
+
+void
+GCDataStore::DiagnosePhaseInfoProblems(const gcPhaseInfo& phaseInfo) const
+{
+    constObjVector pops     = m_structures.GetConstDisplayablePops();
+    constObjVector regions  = m_structures.GetConstDisplayableRegions();
+
+    for(constObjVector::const_iterator pIter = pops.begin(); pIter != pops.end(); pIter++)
+    {
+        for(constObjVector::const_iterator rIter = regions.begin(); rIter != regions.end(); rIter++)
+        {
+            // EWFIX.P4.BUG.564 -- make fancier to keep more info
+            std::set<wxString> fullSpecAsSample;
+            std::set<wxString> fullSpecAsIndividual;
+            std::set<wxString> partSpecAsSample;
+            std::set<wxString> partSpecAsIndividual;
+            std::set<wxString> simpleLabelNoSpec;
+
+            constObjVector loci = m_structures.GetConstDisplayableLinkedLociInMapOrderFor((*rIter)->GetId());
+            for(constObjVector::const_iterator lIter=loci.begin(); lIter != loci.end(); lIter++)
+            {
+                gcIdSet blockIds = m_structures.GetBlockIds((*pIter)->GetId(),(*lIter)->GetId());
+                for(gcIdSet::const_iterator b = blockIds.begin(); b != blockIds.end(); b++)
+                {
+                    const GCParseBlock * blockP = GetParseBlock(*b);
+                    const GCParseSamples & samples = blockP->GetSamples();
+                    for(size_t s = 0; s < samples.size(); s++)
+                    {
+                        const GCParseSample & sample = *(samples[s]);
+
+                        wxString name = sample.GetLabel();
+
+                        if(phaseInfo.HasIndividualRecord(name))
+                        {
+                            assert(!phaseInfo.HasSampleRecord(name));
+                            const gcPhaseRecord & rec = phaseInfo.GetIndividualRecord(name);
+                            if(rec.HasSamples())
+                            {
+                                fullSpecAsIndividual.insert(name);
+                            }
+                            else
+                            {
+                                partSpecAsIndividual.insert(name);
+                            }
+                        }
+
+                        if(phaseInfo.HasSampleRecord(name))
+                        {
+                            assert(!phaseInfo.HasIndividualRecord(name));
+                            const gcPhaseRecord & rec = phaseInfo.GetSampleRecord(name);
+                            if(rec.HasIndividual())
+                            {
+                                fullSpecAsSample.insert(name);
+                            }
+                            else
+                            {
+                                partSpecAsSample.insert(name);
+                            }
+                        }
+
+                        if(!phaseInfo.HasSampleRecord(name) && !phaseInfo.HasIndividualRecord(name))
+                        {
+                            simpleLabelNoSpec.insert(name);
+                        }
+                    }
+                }
+            }
+
+            if(!partSpecAsSample.empty() &&  !partSpecAsIndividual.empty())
+                // this only works if all
+            {
+                wxString repSample = *partSpecAsSample.begin();
+                wxString repIndividual = *partSpecAsIndividual.begin();
+                wxString regionName = (*rIter)->GetName();
+                throw gc_phase_matching_confusion(regionName,repSample,repIndividual);
+            }
+            if(!partSpecAsSample.empty() &&  !simpleLabelNoSpec.empty())
+                // this only works if all
+            {
+                wxString repSample = *partSpecAsSample.begin();
+                wxString repSimple = *simpleLabelNoSpec.begin();
+                wxString regionName = (*rIter)->GetName();
+                throw gc_phase_matching_confusion(regionName,repSample,repSimple);
+            }
+            if(!partSpecAsIndividual.empty() &&  !simpleLabelNoSpec.empty())
+                // this only works if all
+            {
+                wxString repIndividual = *partSpecAsIndividual.begin();
+                wxString repSimple = *simpleLabelNoSpec.begin();
+                wxString regionName = (*rIter)->GetName();
+                throw gc_phase_matching_confusion(regionName,repIndividual,repSimple);
+            }
+        }
+    }
+}
+
+bool
+GCDataStore::PhaseInfoHasAnyZeroes() const
+{
+    return m_phaseInfo.HasAnyZeroes();
+}
+
+//____________________________________________________________________________________
diff --git a/src/convModel/gc_datastore.h b/src/convModel/gc_datastore.h
new file mode 100644
index 0000000..487a01c
--- /dev/null
+++ b/src/convModel/gc_datastore.h
@@ -0,0 +1,257 @@
+// $Id: gc_datastore.h,v 1.99 2012/06/30 01:32:39 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_DATASTORE_H
+#define GC_DATASTORE_H
+
+#include "gc_file.h"
+#include "gc_locus_err.h"
+#include "gc_structures.h"
+#include "gc_types.h"
+#include "wx/string.h"
+
+class gcExportable;
+class gcNameResolvedInfo;
+class GCGenotypeResolution;
+class GCIndividual;
+class gcRegion;
+class GCLocusMatcher;
+class GCParseSample;
+class gcPhenotype;
+class GCPopMatcher;
+class gcLocus;
+class gcTraitInfo;
+class GCSequentialData;
+class gcUnphasedMarkers;
+class TiXmlDocument;
+class TiXmlElement;
+
+class GCDataStore
+// Everything we need to know to run the converter should
+// be stored here. (Or possibly everything we need to undo/redo
+// and put other stuff in another structure?)
+{
+  private:
+    dataFileSet                     m_dataFiles;
+    gcPhaseInfo                     m_phaseInfo;
+    wxString                        m_commandFileCurrentlyBeingParsed; // EWFIX ugh!
+    wxString                        m_commentString;
+    wxString                        m_outfileName;
+    GCStructures                    m_structures;
+
+  protected:
+    /////////////////////////////////////////////////
+    // Parse munging
+    /////////////////////////////////////////////////
+    GCParse *       OneParse(GCFile &,GCFileFormat,gcGeneralDataType,GCInterleaving);
+    GCParseVec *    AllParsesForFile(GCFile &);
+    void            MungeParses(GCParseVec &);
+    GCParseVec      GoodPhylipParses(GCFile & file);
+    GCParseVec      GoodMigrateParses(GCFile & file, gcGeneralDataType);
+    /////////////////////////////////////////////////
+
+    /////////////////////////////////////////////////
+    // for building TiXml DOM tree
+    /////////////////////////////////////////////////
+    TiXmlElement *  MakeBlockElem(const gcLocus & locusRef, bool requireMapPosition) const;
+    TiXmlElement *  MakeBlockElemWithMapPosition(const gcLocus & locusRef ) const;
+    TiXmlElement *  MakeBlockElemWithoutMapPosition(const gcLocus & locusRef ) const;
+    void AddDataBlockElem(TiXmlElement * sampleElem, const GCSequentialData &, const gcLocus & locusRef ) const;
+    void AddDataBlockElem(TiXmlElement * sampleElem, const GCSequentialData &, const gcLocus & locusRef, size_t siteIndex ) const;
+    void AddDefaultPhaseElem(TiXmlElement * individualElem) const;
+    void AddPhaseElem(TiXmlElement *, const GCIndividual &, const gcLocus & ) const;
+    void AddPopulationElemForRegion(TiXmlElement *, const gcNameResolvedInfo &, const gcRegion &,const gcPopulation&) const;
+    TiXmlElement *  MakePopulationElemForUnlinkedLocus(const gcNameResolvedInfo &, const gcPopulation&, const gcRegion &, const gcLocus &, size_t indexInLocus) const;
+    void AddRegionElem(TiXmlElement * dataElem, const gcExportable &, const gcRegion &) const;
+    void AddEffectivePopSizeElem(TiXmlElement * regionElem, const gcRegion &) const;
+    void AddSampleElem(TiXmlElement * individualElem, const GCIndividual &, const gcRegion&, size_t hapIndex) const;
+    void AddSampleElem(TiXmlElement * individualElem, const GCIndividual &, const gcRegion&, const gcLocus &, size_t siteIndex, size_t hapIndex) const;
+    void AddSpacingElem(TiXmlElement * regionElem, const gcRegion&) const;
+    TiXmlElement *  MakeSpacingElemMulti(const constObjVector & loci) const;
+    // TiXmlElement *  MakeSpacingElemSingle(const gcLocus &) const;
+    void AddTraitsElem(TiXmlElement * regionElem, const GCTraitInfoSet &) const;
+    void AddGenoResolutionElem(TiXmlElement * individualElem, const gcPhenotype &) const;
+    void AddIndividualElem(TiXmlElement * popElem, const GCIndividual&, const gcRegion &) const;
+    void AddIndividualElem(TiXmlElement * popElem, const GCIndividual&, const gcRegion &, const gcLocus&, size_t siteIndex) const;
+    void AddPanelElem(TiXmlElement * popElem, const gcPanel&) const;
+    TiXmlElement *  MakeUnlinkedSite(const gcExportable &, const gcRegion &, const gcLocus &, size_t index) const;
+    std::vector<TiXmlElement*>  MakeUnlinkedRegionElems(const gcExportable &, const gcRegion&) const;
+    void AddDivergenceElem(TiXmlElement * divergenceElem) const;
+    void AddDivergenceMigrationElem(TiXmlElement * divergenceElem) const;
+    void AddMigrationElem(TiXmlElement * migrationElem);
+
+    wxString        MakeUnlinkedName(wxString regionName, size_t siteIndex) const;
+
+    gcUnphasedMarkers * CheckPhaseMarkers(const gcUnphasedMarkers *,
+                                          wxString individualName,
+                                          const gcLocus & locusRef,
+                                          bool anyZeroes) const;
+
+    /////////////////////////////////////////////////
+    // for parsing cmd file
+    /////////////////////////////////////////////////
+    void                    cmdParseBlock(TiXmlElement *, gcRegion&, size_t numBlocksInRegion);
+    void                    cmdParseComment(TiXmlElement *);
+    void                    cmdParseDivergence(TiXmlElement *);
+    void                    cmdParseDivergences(TiXmlElement *);
+    gcPhaseRecord *         cmdParseIndividual(TiXmlElement *, wxString fileName);
+    void                    cmdParseIndividuals(TiXmlElement *, wxString fileName);
+    void                    cmdParseInfile(TiXmlElement *);
+    void                    cmdParseInfiles(TiXmlElement *);
+    void                    cmdParseOutfile(TiXmlElement *);
+    gcPhenotype &           cmdParsePhenotype(TiXmlElement *);
+    void                    cmdParsePanel(TiXmlElement *);
+    void                    cmdParsePanels(TiXmlElement *);
+    void                    cmdParsePopulation(std::map<wxString,int> &,TiXmlElement *);
+    void                    cmdParsePopulations(TiXmlElement *);
+    void                    cmdParseRegion(TiXmlElement *);
+    void                    cmdParseRegions(TiXmlElement *);
+    void                    cmdParseSpacing(TiXmlElement *, gcRegion&);
+    void                    cmdParseTrait(std::map<wxString,int> &,TiXmlElement *);
+    void                    cmdParseTraitLocation(TiXmlElement *, gcRegion&);
+    void                    cmdParseTraits(TiXmlElement *);
+    GCLocusMatcher          makeLocusMatcher(TiXmlElement *);
+    GCPopMatcher            makePopMatcher(TiXmlElement *);
+    gcPhenotype &           cmdParseGenoResolution(TiXmlElement *);
+
+  public:
+    GCDataStore();
+    virtual ~GCDataStore();
+
+    virtual void NukeContents();
+
+    //////////////////////////////////////////////////////////////
+    // error and warning messages
+    //////////////////////////////////////////////////////////////
+    virtual void        GCFatal             (wxString msg) const;
+    virtual void        GCFatalBatchWarnGUI (wxString msg) const;
+    virtual void        GCFatalUnlessDebug  (wxString msg) const;
+    virtual void        GCError             (wxString msg) const;
+    virtual void        GCWarning           (wxString msg) const;
+    virtual void        GCInfo              (wxString msg) const;
+
+    virtual void    fileRejectingError      (wxString msg,size_t lineNo) const;
+    virtual void    batchFileRejectGuiLog   (wxString msg,size_t lineNo) const;
+    virtual void    warnLog                 (wxString msg) const;
+    virtual void    warnLog                 (wxString msg,size_t lineNo) const;
+
+    virtual bool    guiQuestionBatchLog     (wxString msg,wxString stopButton, wxString continueButton) const;
+
+    //////////////////////////////////////////////////////////////
+    // generic data manipulation
+    //////////////////////////////////////////////////////////////
+    void    Rename(GCQuantum& object, wxString name);
+
+    //////////////////////////////////////////////////////////////
+    // data file methods
+    //////////////////////////////////////////////////////////////
+    GCFile &                    AddDataFile(wxString fullPathFileName);
+    GCFile &                    AddDataFile(wxString fullPathFileName,GCFileFormat,gcGeneralDataType,GCInterleaving);
+    void                        AssignParseLocus(const GCParseLocus &, gcLocus &);
+    bool                        CanAssignParseLocus(const GCParseLocus &, const gcLocus &) const;
+    void                        AssignPop(const GCParsePop &, gcPopulation &);
+    size_t                      GetDataFileCount() const;
+    const GCParseBlock *        GetParseBlock(size_t blockId) const;
+    constBlockVector            GetBlocks(size_t popId, size_t locusId) const;
+    constBlockVector            GetBlocksForLocus(size_t locusId) const;
+    GCFile &                    GetDataFile(size_t fileId);
+    const GCFile &              GetDataFile(size_t fileId) const;
+    const dataFileSet &         GetDataFiles() const;
+
+    bool                        FileInducesHaps(size_t fileId) const;
+
+#if 0
+    void                        AddHapFile(wxString fullPathFileName);
+#endif
+
+    locVector                   GetLociFor(const GCParse &, const GCLocusMatcher &);
+    popVector                   GetPopsFor(const GCParse &, const GCPopMatcher &);
+    size_t                      GetSelectedDataFileCount() const;
+    bool                        HasNoDataFiles() const;
+    bool                        HasUnparsedFiles() const;
+    void                        RemoveDataFile(GCFile & fileRef);
+    void                        RemoveFiles(bool selectedOnly);
+    void                        SelectAllFiles();
+    void                        SetSelected(const GCFile &,bool selected);
+    void                        SetSelected(const gcLocus &,bool selected);
+    void                        SetParseChoice( const GCParse &,
+                                                const GCPopMatcher &,
+                                                const GCLocusMatcher &);
+    void                        UnsetParseChoice(const GCParse &);
+    void                        UnselectAllFiles();
+
+#if 0
+    void                AddMapFile(wxString fullPathFileName);
+    void                AddMapFileAsXml(wxString fullPathFileName);
+    void                AddMapFileAsOldFormat(wxString fullPathFileName);
+#endif
+
+    const GCParse&            GetParse(size_t parseId) const ;
+    const GCParse&            GetParse(const GCFile&) const ;
+    const GCParse&            GetParse(const GCFile&, size_t index) const ;
+    bool                GetSelected(const GCFile&) const;
+    bool                HasParse(const GCFile&) const;
+
+    gcGeneralDataType   GetLegalLocusTypes(size_t locusId) const;
+
+    //////////////////////////////////////////////////////////////
+    // trait methods
+    //////////////////////////////////////////////////////////////
+
+    gcTraitInfo &       AddNewTrait(wxString name);
+
+    //////////////////////////////////////////////////////////////
+    // other methods
+    //////////////////////////////////////////////////////////////
+
+    void            DebugDump(wxString prefix=wxEmptyString) const;
+
+    gcExportable        BuildExportableData() const;
+    void                ThrowLocusWithoutDataType(wxString locusName, wxString regionName) const;
+    bool                FillExportInfo(gcNameResolvedInfo &, gcPhaseInfo &) const;
+    bool                AddLocusData(GCIndividual&,const gcLocus&,GCParseSample&) const;
+    bool                AddLocusData(GCIndividual&,const gcLocus&,GCParseSample&, const wxArrayString & sampleNames) const;
+    GCIndividual &      makeOrFetchInd(wxString name,std::map<wxString,GCIndividual*> &, gcNameResolvedInfo&) const;
+    TiXmlDocument *     ExportFile();
+    void                WriteExportedData(TiXmlDocument *);
+    int                 ProcessCmdFile(wxString fileName);
+
+    gcPhaseInfo *       BuildPhaseInfo(bool carpIfBroken=true) const;
+    void                DiagnosePhaseInfoProblems(const gcPhaseInfo&) const;
+    bool                PhaseInfoHasAnyZeroes() const;
+
+    void SetLamarcCommentString (wxString commentString);
+    wxString    GetOutfileName() const ;
+    void SetOutfileName     (wxString outFileName);
+
+    bool    NeedsDataFor(const gcPopulation & , const gcLocus &) const;
+
+    const GCStructures & GetStructures() const ;
+    GCStructures & GetStructures()       ;
+
+    TiXmlElement *      CmdExportGenoReso(const gcPhenotype &) const;
+    TiXmlElement *      CmdExportIndividuals() const;
+    TiXmlElement *      CmdExportInfile(const GCFile &) const;
+    TiXmlElement *      CmdExportLocus(const gcLocus &) const;
+    TiXmlElement *      CmdExportPhenotype(const gcPhenotype &) const;
+    TiXmlElement *      CmdExportPop(const gcPopulation &) const;
+    TiXmlElement *      CmdExportRegion(const gcRegion &) const;
+    TiXmlElement *      CmdExportSegment(const gcLocus &) const;
+    TiXmlElement *      CmdExportTrait(const gcTraitInfo &) const;
+    TiXmlDocument *     ExportBatch() const;
+    void                WriteBatchFile(TiXmlDocument*,wxString fname);
+
+    virtual void    GettingBusy(const wxString&) const;
+    virtual void    LessBusy(const wxString&) const;
+};
+
+#endif  // GC_DATASTORE_H
+
+//____________________________________________________________________________________
diff --git a/src/convModel/gc_datastore_export.cpp b/src/convModel/gc_datastore_export.cpp
new file mode 100644
index 0000000..38d598f
--- /dev/null
+++ b/src/convModel/gc_datastore_export.cpp
@@ -0,0 +1,1684 @@
+// $Id: gc_datastore_export.cpp,v 1.80 2013/11/07 22:46:06 mkkuhner Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+
+#include "gc_data.h"
+#include "gc_data_missing_err.h"
+#include "gc_datastore.h"
+#include "gc_default.h"
+#include "gc_errhandling.h"
+#include "gc_exportable.h"
+#include "gc_genotype_resolution.h"
+#include "gc_individual.h"
+#include "gc_individual_err.h"
+#include "gc_parse_block.h"
+#include "gc_parse_sample.h"
+#include "gc_phase.h"
+#include "gc_population.h"
+#include "gc_locus.h"
+#include "gc_locus_err.h"
+#include "gc_region.h"
+#include "gc_sequential_data.h"
+#include "gc_strings.h"
+#include "gc_strings_locus.h"
+#include "gc_structure_maps.h"
+#include "gc_structures.h"
+#include "gc_trait.h"
+#include "gc_types.h"
+#include "tinyxml.h"
+#include "xml_strings.h"
+#include "wx/log.h"
+#include "wx/string.h"
+
+//------------------------------------------------------------------------------------
+
+gcUnphasedMarkers *
+GCDataStore::CheckPhaseMarkers( const gcUnphasedMarkers * phaseInfo,
+                                wxString            indName,
+                                const gcLocus &     locusRef,
+                                bool                anyZeroes) const
+{
+    assert(locusRef.HasLength());
+
+    if(phaseInfo->NumMarkers() > 0)
+    {
+        long smallest = anyZeroes ? 0 : 1;
+        if(locusRef.HasOffset()) smallest = locusRef.GetOffset();
+
+        long largest = smallest + locusRef.GetLength() - 1;
+        if(smallest < 0 && largest >= 0 && (!anyZeroes)) largest++;
+
+        if(phaseInfo->Smallest() < smallest)
+        {
+            throw gc_phase_too_small(   phaseInfo->Smallest(),
+                                        indName,
+                                        smallest,
+                                        locusRef.GetName());
+        }
+
+        if(phaseInfo->Largest() > largest)
+        {
+            throw gc_phase_too_large(   phaseInfo->Largest(),
+                                        indName,
+                                        largest,
+                                        locusRef.GetName());
+        }
+    }
+
+    phaseInfo->CheckAgainstLocations(locusRef);
+
+    gcUnphasedMarkers * newMarkers = new gcUnphasedMarkers(*phaseInfo);
+    if(!anyZeroes)
+    {
+        newMarkers->ShiftNegsUp();
+    }
+    return newMarkers;
+}
+
+void
+GCDataStore::AddDefaultPhaseElem(TiXmlElement * individualElement) const
+{
+    TiXmlElement * phaseElement = new TiXmlElement( xmlstr::XML_TAG_PHASE.c_str());
+    individualElement->LinkEndChild(phaseElement);
+    phaseElement->SetAttribute( xmlstr::XML_ATTRTYPE_TYPE.c_str(),
+                                xmlstr::XML_ATTRVALUE_UNKNOWN.c_str());
+    TiXmlText * phaseText = new TiXmlText("");
+    phaseElement->LinkEndChild(phaseText);
+}
+
+void
+GCDataStore::AddPhaseElem(TiXmlElement * individualElem, const GCIndividual & indi, const gcLocus & locusRef) const
+// EWFIX.P3.BUG.537 -- FIXED ?? need to do this at the locus level as well ??
+// EWFIX.P4 -- add a locus name attribute in a later release ??
+{
+    const gcUnphasedMarkers * phaseInfo = indi.GetUnphased(locusRef);
+    if(phaseInfo == NULL || phaseInfo->NumMarkers() == 0)
+    {
+        if (locusRef.HasUnphasedMarkers())
+        {
+            phaseInfo = locusRef.GetUnphasedMarkers();
+        }
+    }
+
+    if(phaseInfo == NULL || phaseInfo->NumMarkers() == 0)
+    {
+        AddDefaultPhaseElem(individualElem);
+    }
+    else
+    {
+        // can throw
+        bool anyZeroes = GetStructures().AnyZeroes();   // EWFIX.P4 -- wasteful
+        gcUnphasedMarkers * newPhaseInfo = CheckPhaseMarkers(phaseInfo,
+                                                             indi.GetName(),
+                                                             locusRef,
+                                                             anyZeroes);
+
+        TiXmlElement * phaseElement = new TiXmlElement( xmlstr::XML_TAG_PHASE.c_str());
+        individualElem->LinkEndChild(phaseElement);
+
+        phaseElement->SetAttribute( xmlstr::XML_ATTRTYPE_TYPE.c_str(),
+                                    xmlstr::XML_ATTRVALUE_UNKNOWN.c_str());
+
+        TiXmlText * phaseText = new TiXmlText(newPhaseInfo->AsString().c_str());
+        delete newPhaseInfo;
+        phaseElement->LinkEndChild(phaseText);
+    }
+}
+
+void
+GCDataStore::AddDataBlockElem(TiXmlElement * sampleElement, const GCSequentialData & seqData, const gcLocus & locusRef, size_t siteIndex) const
+{
+    gcSpecificDataType dataType = locusRef.GetDataType();
+    if(dataType == sdatatype_NONE_SET)
+    {
+        wxString msg = wxString::Format(gcerr::missingDataTypeForLocus,locusRef.GetName().c_str());
+        gui_error g(msg.c_str());
+        throw g;
+    }
+
+    wxString dataTypeString = ToWxString(dataType);
+    wxString dataString = " "+seqData.GetData(siteIndex)+" ";
+
+    TiXmlElement * dataBlockElement = new TiXmlElement( xmlstr::XML_TAG_DATABLOCK.c_str());
+    sampleElement->LinkEndChild(dataBlockElement);
+    dataBlockElement->SetAttribute( xmlstr::XML_ATTRTYPE_TYPE.c_str(),
+                                    dataTypeString.c_str());
+
+    TiXmlText * data = new TiXmlText(dataString);
+    dataBlockElement->LinkEndChild(data);
+
+}
+
+void
+GCDataStore::AddDataBlockElem(TiXmlElement * sampleElem, const GCSequentialData & seqData, const gcLocus & locusRef ) const
+{
+    gcSpecificDataType dataType = locusRef.GetDataType();
+    if(dataType == sdatatype_NONE_SET)
+    {
+        wxString msg = wxString::Format(gcerr::missingDataTypeForLocus,locusRef.GetName().c_str());
+        gui_error g(msg.c_str());
+        throw g;
+    }
+
+    TiXmlElement * dataBlockElement = new TiXmlElement( xmlstr::XML_TAG_DATABLOCK.c_str());
+    sampleElem->LinkEndChild(dataBlockElement);
+
+    wxString dataTypeString = ToWxString(dataType);
+    dataBlockElement->SetAttribute( xmlstr::XML_ATTRTYPE_TYPE.c_str(),
+                                    dataTypeString.c_str());
+
+    wxString dataString = wxString::Format(" %s ",seqData.GetData().c_str());
+    TiXmlText * data = new TiXmlText(dataString);
+    dataBlockElement->LinkEndChild(data);
+
+}
+
+void
+GCDataStore::AddSampleElem(TiXmlElement * individualElem, const GCIndividual & individual, const gcRegion & regionRef, const gcLocus & locusRef, size_t siteIndex, size_t hapIndex) const
+{
+
+    const gcSample * sample = individual.GetSample(hapIndex);
+    assert(!locusRef.GetLinked());
+    const GCSequentialData & data = sample->GetData(&locusRef);
+
+    TiXmlElement * sampleElement = new TiXmlElement( xmlstr::XML_TAG_SAMPLE.c_str());
+    individualElem->LinkEndChild(sampleElement);
+    sampleElement->SetAttribute(xmlstr::XML_ATTRTYPE_NAME.c_str(),
+                                sample->GetLabel().c_str());
+
+    AddDataBlockElem(sampleElement,data,locusRef,siteIndex);
+}
+
+void
+GCDataStore::AddSampleElem(TiXmlElement * individualElem, const GCIndividual & individual, const gcRegion & regionRef, size_t hapIndex) const
+{
+    const gcSample * sample = individual.GetSample(hapIndex);
+
+    TiXmlElement * sampleElement = new TiXmlElement( xmlstr::XML_TAG_SAMPLE.c_str());
+    individualElem->LinkEndChild(sampleElement);
+    sampleElement->SetAttribute(xmlstr::XML_ATTRTYPE_NAME.c_str(),
+                                sample->GetLabel().c_str());
+
+    constObjVector loci = m_structures.GetConstDisplayableLinkedLociInMapOrderFor(regionRef.GetId());
+    for(constObjVector::const_iterator i = loci.begin(); i != loci.end(); i++)
+    {
+        const gcLocus * locP = dynamic_cast<const gcLocus*>(*i);
+        assert(locP != NULL);
+
+        const gcLocus & locusRef = *locP;
+        if(locusRef.GetLinked())
+        {
+            try
+            {
+                const GCSequentialData & data = sample->GetData(locP);
+                AddDataBlockElem(sampleElement,data,locusRef);
+            }
+            catch (const gc_data_error& e)
+                // for gc_sample_missing_locus_data
+            {
+                // EWFIX.P3 -- bad formatting because string e.what()
+                // contains quote characters
+                wxString msg = wxString::Format(gcerr::inIndividual,
+                                                individual.GetName().c_str(),
+                                                e.what());
+                throw gc_data_error (msg.c_str());
+            }
+        }
+    }
+}
+
+void
+GCDataStore::AddGenoResolutionElem(TiXmlElement * individualElem, const gcPhenotype & pheno) const
+{
+    size_t traitId = pheno.GetTraitId();
+    const gcTraitInfo & trait = GetStructures().GetTrait(traitId);
+    wxString traitName = trait.GetName();
+
+    TiXmlElement * genoResoElement = new TiXmlElement( xmlstr::XML_TAG_GENOTYPE_RESOLUTIONS.c_str());
+    individualElem->LinkEndChild(genoResoElement);
+
+    TiXmlElement * traitNameElement = new TiXmlElement( xmlstr::XML_TAG_TRAIT_NAME.c_str());
+    genoResoElement->LinkEndChild(traitNameElement);
+
+    TiXmlText * traitNameTextElement = new TiXmlText(traitName.c_str());
+    traitNameElement->LinkEndChild(traitNameTextElement);
+
+    const std::vector<gcHapProbability> & haps = pheno.GetHapProbabilities();
+    std::vector<gcHapProbability>::const_iterator i;
+    for(i = haps.begin(); i != haps.end(); i++)
+    {
+        const gcHapProbability & hapProb = *i;
+        double relProb = hapProb.GetPenetrance();
+        const gcIdVec & alleleIds = hapProb.GetAlleleIds();
+
+        TiXmlElement * hapElement = new TiXmlElement( xmlstr::XML_TAG_HAPLOTYPES.c_str());
+        genoResoElement->LinkEndChild(hapElement);
+
+        TiXmlElement * relProbElem = new TiXmlElement( xmlstr::XML_TAG_PENETRANCE.c_str());
+        hapElement->LinkEndChild(relProbElem);
+
+        TiXmlText * probText = new TiXmlText(wxString::Format("%f",relProb).c_str());
+        relProbElem->LinkEndChild(probText);
+
+        TiXmlElement * allelesElement = new TiXmlElement( xmlstr::XML_TAG_ALLELES.c_str());
+        hapElement->LinkEndChild(allelesElement);
+
+        wxString alleles = wxEmptyString;
+        for(size_t j=0; j < alleleIds.size(); j++)
+
+        {
+            if(j == 0)
+            {
+                alleles += " ";
+            }
+            const gcTraitAllele & traitAllele = GetStructures().GetAllele(alleleIds[j]);
+            alleles += traitAllele.GetName();
+            alleles += " ";
+        }
+
+        TiXmlText * alleleText = new TiXmlText(alleles.c_str());
+        allelesElement->LinkEndChild(alleleText);
+    }
+}
+
+void
+GCDataStore::AddIndividualElem(TiXmlElement * popElem, const GCIndividual & individual, const gcRegion & regionRef, const gcLocus & locusRef, size_t siteIndex) const
+{
+    wxString indName = individual.GetName();
+
+    TiXmlElement * individualElement = new TiXmlElement( xmlstr::XML_TAG_INDIVIDUAL.c_str());
+    popElem->LinkEndChild(individualElement);
+    individualElement->SetAttribute(    xmlstr::XML_ATTRTYPE_NAME.c_str(),
+                                        indName.c_str());
+
+    // NOTE : we don't put phase info in because this is a single unlinked site
+    for(size_t hapIndex=0; hapIndex < individual.GetNumSamples(); hapIndex++)
+    {
+        AddSampleElem(individualElement,individual,regionRef,locusRef,siteIndex,hapIndex);
+    }
+}
+
+void
+GCDataStore::AddIndividualElem(TiXmlElement * popElem, const GCIndividual & individual, const gcRegion & regionRef) const
+{
+    wxString indName = individual.GetName();
+
+    TiXmlElement * individualElement = new TiXmlElement( xmlstr::XML_TAG_INDIVIDUAL.c_str());
+    popElem->LinkEndChild(individualElement);
+    individualElement->SetAttribute(    xmlstr::XML_ATTRTYPE_NAME.c_str(),
+                                        indName.c_str());
+
+    const gcIdSet & phenoIds = individual.GetPhenotypeIds();
+    for(gcIdSet::const_iterator i=phenoIds.begin(); i!=phenoIds.end(); i++)
+    {
+        const gcPhenotype & pheno = GetStructures().GetPhenotype(*i);
+        AddGenoResolutionElem(individualElement,pheno);
+    }
+
+    if(individual.GetNumSamples() > 1)
+        // to put in phase elem only
+    {
+        gcIdVec locIds = GetStructures().GetLocusIdsForRegionByMapPosition(regionRef.GetId());
+
+        for(gcIdVec::const_iterator i = locIds.begin(); i != locIds.end(); i++)
+        {
+            size_t locusId = (*i);
+            const gcLocus & locusRef = m_structures.GetLocus(locusId);
+            if(locusRef.GetLinked())
+            {
+                AddPhaseElem(individualElement,individual,locusRef);
+            }
+        }
+
+    }
+
+    for(size_t hapIndex=0; hapIndex < individual.GetNumSamples(); hapIndex++)
+    {
+        AddSampleElem(individualElement,individual,regionRef,hapIndex);
+    }
+}
+
+void
+GCDataStore::AddPanelElem(TiXmlElement * popElem, const gcPanel & panel) const
+{
+    TiXmlElement * panelElement = new TiXmlElement( xmlstr::XML_TAG_PANEL.c_str());
+    popElem->LinkEndChild(panelElement);
+    panelElement->SetAttribute(xmlstr::XML_ATTRTYPE_NAME.c_str(), (panel.GetName()).c_str());
+
+    TiXmlElement * sizeElement = new TiXmlElement( xmlstr::XML_TAG_PANELSIZE.c_str());
+    panelElement->LinkEndChild(sizeElement);
+    wxString sizeString = wxString::Format(" %ld ",panel.GetNumPanels());
+    TiXmlText * sizeText = new TiXmlText(sizeString);
+    sizeElement->LinkEndChild(sizeText);
+}
+
+TiXmlElement *
+GCDataStore::MakeBlockElem(const gcLocus & locusRef, bool requireMapPosition) const
+{
+    TiXmlElement * blockElement  = new TiXmlElement( xmlstr::XML_TAG_BLOCK.c_str());
+    blockElement->SetAttribute( xmlstr::XML_ATTRTYPE_NAME.c_str(),
+                                locusRef.GetName().c_str());
+
+    if(locusRef.HasOffset())
+    {
+        wxString offsetString = wxString::Format(" %ld ",locusRef.GetOffset());
+
+        TiXmlElement * offsetElement  = new TiXmlElement( xmlstr::XML_TAG_OFFSET.c_str());
+        TiXmlText * offsetText = new TiXmlText(offsetString);
+        offsetElement->LinkEndChild(offsetText);
+        blockElement->LinkEndChild(offsetElement);
+    }
+
+    long mapPosition = 1;
+    if(requireMapPosition && !locusRef.HasMapPosition())
+    {
+        wxString msg = wxString::Format(gcerr::locusWithoutMapPosition,
+                                        locusRef.GetName().c_str());
+        gui_error g(msg);
+        throw g;
+    }
+    if(locusRef.HasMapPosition())
+    {
+        mapPosition = locusRef.GetMapPosition();
+    }
+    wxString mapPosString = wxString::Format(" %ld ",mapPosition);
+    TiXmlText * mapPosText = new TiXmlText(mapPosString);
+    TiXmlElement * mapPositionElement  = new TiXmlElement( xmlstr::XML_TAG_MAP_POSITION.c_str());
+    mapPositionElement->LinkEndChild(mapPosText);
+    blockElement->LinkEndChild(mapPositionElement);
+
+    TiXmlElement * lengthElement  = new TiXmlElement( xmlstr::XML_TAG_LENGTH.c_str());
+    if(locusRef.HasTotalLength())
+    {
+        if( ! (locusRef.GetDataType() == sdatatype_DNA && locusRef.GetLength() == locusRef.GetNumMarkers()))
+        {
+            wxString lengthString = wxString::Format(" %ld ",(long)locusRef.GetLength());
+            TiXmlText * lengthText = new TiXmlText(lengthString);
+            lengthElement->LinkEndChild(lengthText);
+            blockElement->LinkEndChild(lengthElement);
+        }
+    }
+    else
+    {
+        if(locusRef.GetDataType() != sdatatype_DNA && locusRef.GetNumMarkers() > 1)
+        {
+            delete lengthElement;
+            wxString msg = wxString::Format(gcerr::missingLengthForLocus,locusRef.GetName().c_str());
+            gui_error g(msg.c_str());
+            throw g;
+        }
+    }
+
+    if(locusRef.HasLocations())
+    {
+        TiXmlElement * locationsElement  = new TiXmlElement( xmlstr::XML_TAG_LOCATIONS.c_str());
+        wxString locationsString = locusRef.GetLocationsAsString();
+        TiXmlText * locationsText = new TiXmlText(locationsString);
+        locationsElement->LinkEndChild(locationsText);
+        blockElement->LinkEndChild(locationsElement);
+    }
+
+    return blockElement;
+}
+
+TiXmlElement *
+GCDataStore::MakeBlockElemWithMapPosition(const gcLocus & locusRef) const
+{
+    return MakeBlockElem(locusRef,true);
+}
+
+TiXmlElement *
+GCDataStore::MakeBlockElemWithoutMapPosition(const gcLocus & locusRef) const
+{
+    return MakeBlockElem(locusRef,false);
+}
+
+void
+GCDataStore::AddEffectivePopSizeElem(TiXmlElement * regionElem, const gcRegion & regionRef) const
+{
+    if(regionRef.HasEffectivePopulationSize())
+    {
+        double psize = regionRef.GetEffectivePopulationSize();
+        TiXmlElement * effPopSizeElem = new TiXmlElement( xmlstr::XML_TAG_EFFECTIVE_POPSIZE.c_str());
+        regionElem->LinkEndChild(effPopSizeElem);
+        TiXmlText * popSizeText = new TiXmlText(wxString::Format("%f",psize).c_str());
+        effPopSizeElem->LinkEndChild(popSizeText);
+    }
+}
+
+#if 0
+void
+GCDataStore::AddSpacingElemSingle(TiXmlElement * regionElem, const gcLocus & locusRef) const
+{
+    TiXmlElement * spacingElement = new TiXmlElement( xmlstr::XML_TAG_SPACING.c_str());
+    regionElem->LinkEndChild(spacingElement);
+    TiXmlElement * blockElement = MakeBlockElemWithoutMapPosition(locusRef);
+    spacingElement->LinkEndChild(blockElement);
+}
+#endif
+
+#if 0 // EWFIX.REMOVE
+TiXmlElement *
+GCDataStore::MakeSpacingElemMulti(const constObjVector & loci, const gcRegion & regionRef) const
+{
+    // check that loci don't overlap
+    GetStructures().VerifyLocusSeparations(regionRef);
+
+    TiXmlElement * spacingElement = new TiXmlElement( xmlstr::XML_TAG_SPACING.c_str());
+
+    long nextAllowable = gcdefault::badMapPosition;
+    wxString lastLocusName;
+
+    for(constObjVector::const_iterator iter = loci.begin(); iter != loci.end(); iter++)
+    {
+        const gcLocus * locP = dynamic_cast<const gcLocus*>(*iter);
+        if(locP == NULL)
+        {
+            wxString msg = wxString::Format(gcerr::corruptedDisplayableLociInMapOrder);
+            gc_implementation_error g(msg.c_str());
+            throw g;
+        }
+
+        if(!locP->HasMapPosition())
+        {
+            wxString msg = wxString::Format(gcerr::locusWithoutMapPosition,
+                                            locP->GetName().c_str());
+            gui_error g(msg);
+            throw g;
+        }
+        if(!locP->HasTotalLength() && ! (locP->GetDataType() == sdatatype_DNA))
+        {
+            wxString msg = wxString::Format(gcerr::locusWithoutLength,
+                                            locP->GetName().c_str());
+            gui_error g(msg);
+            throw g;
+        }
+        long mapPosition = locP->GetMapPosition();
+        size_t length = locP->GetLength();
+
+        if(iter != loci.begin())
+        {
+            if(mapPosition < nextAllowable)
+            {
+                wxString msg = wxString::Format(gcerr::locusOverlap,
+                                                lastLocusName.c_str(),
+                                                nextAllowable-1,
+                                                locP->GetName().c_str(),
+                                                mapPosition);
+                gui_error g(msg);
+                throw g;
+            }
+        }
+
+        nextAllowable = mapPosition + length;
+        lastLocusName = locP->GetName();
+
+        TiXmlElement * blockElement = MakeBlockElemWithMapPosition(*locP);
+        spacingElement->LinkEndChild(blockElement);
+    }
+
+    return spacingElement;
+}
+#endif
+
+void
+GCDataStore::AddSpacingElem(TiXmlElement * regionElem, const gcRegion & regionRef) const
+{
+    constObjVector loci = GetStructures().GetConstDisplayableLinkedLociInMapOrderFor(regionRef.GetId());
+
+    bool needMapInfo = (loci.size() > 1);
+
+    bool anyZeroes = GetStructures().AnyZeroes();   // EWFIX.P4 -- wasteful
+
+    TiXmlElement * spacingElement = new TiXmlElement( xmlstr::XML_TAG_SPACING.c_str());
+    regionElem->LinkEndChild(spacingElement);
+
+    bool havePrevious = false;
+    long lastExtent = 0;
+    size_t lastLocusId = 0;
+
+    for(constObjVector::const_iterator i=loci.begin(); i != loci.end(); i++)
+    {
+        const gcLocus * locusP = dynamic_cast<const gcLocus *>(*i);
+        assert(locusP != NULL);
+
+        assert(locusP->GetLinked());
+
+        TiXmlElement * blockElement  = new TiXmlElement( xmlstr::XML_TAG_BLOCK.c_str());
+        blockElement->SetAttribute( xmlstr::XML_ATTRTYPE_NAME.c_str(),
+                                    locusP->GetName().c_str());
+
+        long start = gcdefault::badMapPosition;
+        //////////////////////////////////////////////////////
+        // map position
+        if(! locusP->HasMapPosition())
+        {
+            if(needMapInfo)
+            {
+                // EWFIX.P3 -- should use specific error
+                throw gc_locus_err(wxString::Format(gcerr::locusWithoutMapPosition,
+                                                    locusP->GetName().c_str()));
+            }
+        }
+        else
+        {
+            start = locusP->GetMapPosition();
+            if(!anyZeroes && (start < 0))
+            {
+                start++;
+            }
+            TiXmlElement * mapPositionElement  = new TiXmlElement( xmlstr::XML_TAG_MAP_POSITION.c_str());
+            wxString mapPosString = wxString::Format(" %ld ",start);
+            TiXmlText * mapPosText = new TiXmlText(mapPosString);
+            mapPositionElement->LinkEndChild(mapPosText);
+            blockElement->LinkEndChild(mapPositionElement);
+        }
+
+        //////////////////////////////////////////////////////
+        // length
+        if(locusP->HasTotalLength())
+        {
+            TiXmlElement * lengthElement  = new TiXmlElement( xmlstr::XML_TAG_LENGTH.c_str());
+            wxString lengthString = wxString::Format(" %ld ",(long)locusP->GetTotalLength());
+            TiXmlText * lengthText = new TiXmlText(lengthString);
+            lengthElement->LinkEndChild(lengthText);
+            blockElement->LinkEndChild(lengthElement);
+        }
+
+        if(havePrevious)
+        {
+            if(start <= lastExtent)
+            {
+                long end = locusP->GetMapPosition()+locusP->GetLength()-1;
+                long lastStart = GetStructures().GetLocus(lastLocusId).GetMapPosition();
+                throw gc_locus_overlap(locusP->GetName(),start,end,
+                                       GetStructures().GetLocus(lastLocusId).GetName(),lastStart,lastExtent);
+            }
+        }
+
+        if(needMapInfo)
+        {
+            havePrevious = true;
+            size_t length = locusP->GetLength();
+            long stop = start + length;
+            stop -= 1;    // EWFIX explain why
+            lastExtent = stop;
+            lastLocusId = locusP->GetId();
+        }
+
+        //////////////////////////////////////////////////////
+        // offset
+        long offset = 1;
+        if(locusP->HasOffset())
+        {
+            offset = locusP->GetOffset();
+
+            if(!anyZeroes && (offset < 1))
+            {
+                offset++;
+            }
+            start += offset;
+            TiXmlElement * offsetElement  = new TiXmlElement( xmlstr::XML_TAG_OFFSET.c_str());
+            wxString offsetString = wxString::Format(" %ld ",offset);
+            TiXmlText * offsetText = new TiXmlText(offsetString);
+            offsetElement->LinkEndChild(offsetText);
+            blockElement->LinkEndChild(offsetElement);
+        }
+        else
+        {
+            if(locusP->GetDataType() == sdatatype_SNP && locusP->HasLocations())
+            {
+                // EWFIX.P3 -- should use specific error
+                throw gc_locus_err(wxString::Format(gcerr_locus::offsetMissingSnpLocations,locusP->GetName().c_str()));
+            }
+
+            if(needMapInfo)
+            {
+                bool lengthIsMarkers = false;
+                if(locusP->HasNumMarkers())
+                {
+                    if(locusP->HasLength())
+                    {
+                        lengthIsMarkers = (locusP->GetNumMarkers() == locusP->GetLength());
+                    }
+                    else
+                    {
+                        lengthIsMarkers = (locusP->GetNumMarkers() == 1);
+                    }
+                }
+                if(!lengthIsMarkers)
+                {
+                    throw gc_locus_err(wxString::Format(gcerr_locus::offsetMissingMultiSegment,locusP->GetName().c_str()));
+                }
+            }
+
+        }
+
+        //////////////////////////////////////////////////////
+        // locations
+        if(locusP->HasLocations())
+        {
+            std::vector<long> locations = locusP->GetLocations();
+            wxString locationString = " ";
+
+            if(locusP->HasNumMarkers())
+            {
+                size_t numLocs = locations.size();
+                size_t numMarkers = locusP->GetNumMarkers();
+                if(numLocs != numMarkers)
+                {
+                    throw gc_wrong_location_count(  (long)numLocs,
+                                                    (long)numMarkers,
+                                                    locusP->GetName());
+                }
+            }
+
+            for(size_t i = 0; i < locations.size(); i++)
+            {
+                long loc = locations[i];
+
+                // EWFIX.P3 -- mouse cut and paste from similar
+                // calculations in phase item
+                long smallest = anyZeroes ? 0 : 1;
+                if(locusP->HasOffset()) smallest = locusP->GetOffset();
+
+                long largest = smallest + locusP->GetLength() - 1;
+                if(smallest < 0 && largest >= 0 && (!anyZeroes)) largest++;
+
+                if(loc < smallest)
+                {
+                    throw gc_location_too_small(loc,
+                                                smallest,
+                                                locusP->GetName());
+                }
+
+                if(loc > largest)
+                {
+                    throw gc_location_too_large(loc,
+                                                largest,
+                                                locusP->GetName());
+                }
+
+                if(!anyZeroes && loc < 1)
+                {
+                    loc++;
+                }
+                locationString += wxString::Format("%ld ",loc);
+            }
+
+            TiXmlElement * locationElement  = new TiXmlElement( xmlstr::XML_TAG_LOCATIONS.c_str());
+            TiXmlText * locationText = new TiXmlText(locationString);
+            locationElement->LinkEndChild(locationText);
+            blockElement->LinkEndChild(locationElement);
+        }
+        else
+            // when num markers > 1
+            // dna -- required if length > markers
+            // snp -- required for recom
+            // msat/kallele -- required for recom
+        {
+            if(locusP->GetNumMarkers() > 1)
+            {
+                if(locusP->GetDataType() == sdatatype_DNA)
+                {
+                    if(locusP->HasTotalLength())
+                    {
+                        if(locusP->GetTotalLength() > locusP->GetNumMarkers())
+                        {
+                            throw gc_locus_err(wxString::Format(gcerr_locus::dnaBigLengthNeedsLocations,locusP->GetName().c_str()));
+                        }
+                    }
+                }
+                else
+                {
+                    bool continueOn = guiQuestionBatchLog(wxString::Format(gcstr::locationsForRecom,locusP->GetName().c_str()),gcstr::abandonExport,gcstr::continueExport);
+                    if(!continueOn)
+                    {
+                        throw gc_abandon_export();
+                    }
+                }
+            }
+        }
+
+        spacingElement->LinkEndChild(blockElement);
+    }
+}
+
+void
+GCDataStore::AddTraitsElem(TiXmlElement * regionElem, const GCTraitInfoSet & traits) const
+{
+    if(!traits.empty())
+    {
+        TiXmlElement * traitsElement = new TiXmlElement( xmlstr::XML_TAG_TRAITS.c_str());
+        regionElem->LinkEndChild(traitsElement);
+
+        for(GCTraitInfoSet::const_iterator i=traits.begin(); i!=traits.end(); i++)
+        {
+            const gcTraitInfo & traitInfo = m_structures.GetTrait(*i);
+            TiXmlText * name = new TiXmlText(traitInfo.GetName());
+
+            TiXmlElement * nameElement = new TiXmlElement( xmlstr::XML_TAG_NAME.c_str());
+            nameElement->LinkEndChild(name);
+
+            TiXmlElement * traitElement = new TiXmlElement( xmlstr::XML_TAG_TRAIT.c_str());
+            traitElement->LinkEndChild(nameElement);
+
+            traitsElement->LinkEndChild(traitElement);
+        }
+    }
+}
+
+TiXmlElement *
+GCDataStore::MakePopulationElemForUnlinkedLocus(const gcNameResolvedInfo & info, const gcPopulation & popRef, const gcRegion & regionRef, const gcLocus & locusRef, size_t siteIndex) const
+{
+    TiXmlElement * popElement = new TiXmlElement( xmlstr::XML_TAG_POPULATION.c_str());
+    popElement->SetAttribute(   xmlstr::XML_ATTRTYPE_NAME.c_str(),
+                                popRef.GetName().c_str());
+
+    std::vector<const GCIndividual*> individuals = info.GetIndividuals();
+    for(size_t individualIndex=0; individualIndex < individuals.size(); individualIndex++)
+    {
+        assert(individuals[individualIndex] != NULL);
+        const GCIndividual & individual = *(individuals[individualIndex]);
+        AddIndividualElem(popElement,individual,regionRef,locusRef,siteIndex);
+    }
+
+    return popElement;
+}
+
+void
+GCDataStore::AddPopulationElemForRegion(TiXmlElement * regionElem, const gcNameResolvedInfo & info, const gcRegion & regionRef, const gcPopulation & popRef) const
+{
+    TiXmlElement * popElement = new TiXmlElement( xmlstr::XML_TAG_POPULATION.c_str());
+    regionElem->LinkEndChild(popElement);
+    popElement->SetAttribute(   xmlstr::XML_ATTRTYPE_NAME.c_str(),
+                                popRef.GetName().c_str());
+
+    std::vector<const GCIndividual*> individuals = info.GetIndividuals();
+    for(size_t index=0; index < individuals.size(); index++)
+    {
+        assert(individuals[index] != NULL);
+        const GCIndividual & individual = *(individuals[index]);
+        AddIndividualElem(popElement,individual,info.GetRegionRef());
+    }
+
+    if (m_structures.GetPanelsState())
+    {
+        if (m_structures.HasPanel(regionRef.GetId(), popRef.GetId()))
+        {
+            const gcPanel &panelRef =  m_structures.GetPanel(regionRef.GetId(), popRef.GetId());
+            if (panelRef.GetBlessed() && (panelRef.GetNumPanels() > 0))
+            {
+                // only add > 0  panel counts
+                AddPanelElem(popElement, panelRef);
+            }
+        }
+    }
+}
+
+void
+GCDataStore::AddRegionElem(TiXmlElement * data, const gcExportable & expo, const gcRegion & regionRef) const
+{
+    // create the named region element
+    TiXmlElement * regionElement = new TiXmlElement( xmlstr::XML_TAG_REGION.c_str());
+    data->LinkEndChild(regionElement);
+    regionElement->SetAttribute(    xmlstr::XML_ATTRTYPE_NAME.c_str(),
+                                    regionRef.GetName().c_str());
+
+    AddEffectivePopSizeElem(regionElement,regionRef);
+    AddSpacingElem(regionElement,regionRef);
+    AddTraitsElem(regionElement,regionRef.GetTraitInfoSet());
+
+#if 0
+    TiXmlElement * effPopSizeElem = MakeEffectivePopSizeElem(regionRef);
+    if(effPopSizeElem != NULL)
+    {
+        regionElement->LinkEndChild(effPopSizeElem);
+    }
+
+    TiXmlElement * spacingElement = MakeSpacingElem(regionRef);
+    regionElement->LinkEndChild(spacingElement);
+
+    const GCTraitInfoSet & traits = regionRef.GetTraitInfoSet();
+    if(!traits.empty())
+    {
+        TiXmlElement * traitsElement = MakeTraitsElem(traits);
+        regionElement->LinkEndChild(traitsElement);
+    }
+#endif
+
+    // write out info for each population
+    constObjVector pops = m_structures.GetConstDisplayablePops();
+    for(constObjVector::const_iterator iter = pops.begin(); iter != pops.end(); iter++)
+    {
+        const gcPopulation * popP = dynamic_cast<const gcPopulation*>(*iter);
+        assert(popP != NULL);
+        const gcNameResolvedInfo & info = expo.GetInfo(*popP,regionRef);
+        AddPopulationElemForRegion(regionElement,info,regionRef,*popP);
+    }
+}
+
+void
+GCDataStore::AddDivergenceElem(TiXmlElement * forces) const
+{
+    wxLogVerbose("In AddDivergenceElem");  // JMDBG
+    // create divergence element
+    TiXmlElement * divergence = new TiXmlElement( xmlstr::XML_TAG_DIVERGENCE.c_str());
+    forces->LinkEndChild(divergence);
+
+    // boilerplate
+    TiXmlElement * prior = new TiXmlElement( xmlstr::XML_TAG_PRIOR.c_str());
+    prior->SetAttribute(xmlstr::XML_ATTRTYPE_TYPE.c_str(), "linear");
+    divergence->LinkEndChild(prior);
+
+    TiXmlElement * paramindex = new TiXmlElement( xmlstr::XML_TAG_PARAMINDEX.c_str());
+    wxString dataString = " default ";
+    TiXmlText * data = new TiXmlText(dataString);
+    paramindex->LinkEndChild(data);
+    prior->LinkEndChild(paramindex);
+
+    TiXmlElement * lower = new TiXmlElement( xmlstr::XML_TAG_PRIORLOWERBOUND.c_str());
+    dataString = " 0.0 ";
+    data = new TiXmlText(dataString);
+    lower->LinkEndChild(data);
+    prior->LinkEndChild(lower);
+
+    TiXmlElement * upper = new TiXmlElement( xmlstr::XML_TAG_PRIORUPPERBOUND.c_str());
+    dataString = " 0.01 ";
+    data = new TiXmlText(dataString);
+    upper->LinkEndChild(data);
+    prior->LinkEndChild(upper);
+    wxLogVerbose("    boilerplate out");  // JMDBG
+
+    // variable values
+    gcDisplayOrder parids = m_structures.GetParentIds();
+
+    TiXmlElement * method = new TiXmlElement( xmlstr::XML_TAG_METHOD.c_str());
+    dataString = " ";
+    for(gcDisplayOrder::const_iterator iter = parids.begin(); iter != parids.end(); iter++)
+    {
+        dataString += " USER ";
+    }
+    data = new TiXmlText(dataString);
+    method->LinkEndChild(data);
+    divergence->LinkEndChild(method);
+
+    TiXmlElement * startval = new TiXmlElement( xmlstr::XML_TAG_START_VALUES.c_str());
+    float sval = 0;
+    dataString = " ";
+    for(gcDisplayOrder::const_iterator iter = parids.begin(); iter != parids.end(); iter++)
+    {
+        sval += 0.002;
+        dataString += wxString::Format(_T(" %f "),sval);
+    }
+    data = new TiXmlText(dataString);
+    startval->LinkEndChild(data);
+    divergence->LinkEndChild(startval);
+    wxLogVerbose("    start values out");  // JMDBG
+
+    TiXmlElement * poptree = new TiXmlElement( xmlstr::XML_TAG_POPTREE.c_str());
+    divergence->LinkEndChild(poptree);
+
+    for(gcDisplayOrder::const_iterator iter = parids.begin(); iter != parids.end(); iter++)
+    {
+        TiXmlElement * epochb = new TiXmlElement( xmlstr::XML_TAG_EPOCH_BOUNDARY.c_str());
+
+        TiXmlElement * newpop = new TiXmlElement( xmlstr::XML_TAG_NEWPOP.c_str());
+        const gcParent & parent = m_structures.GetParent(*iter);
+        wxLogVerbose("    parentid: %i child1id: %i child2id: %i",(int)*iter, (int)parent.GetChild1Id(), (int)parent.GetChild2Id());  // JMDBG
+
+        dataString = " ";
+        if (m_structures.IsPop(parent.GetChild1Id()))
+        {
+            dataString += m_structures.GetPop(parent.GetChild1Id()).GetName().c_str();
+        }
+        else
+        {
+            dataString += m_structures.GetParent(parent.GetChild1Id()).GetName().c_str();
+        }
+
+        dataString += " ";
+        if (m_structures.IsPop(parent.GetChild2Id()))
+        {
+            dataString += m_structures.GetPop(parent.GetChild2Id()).GetName().c_str();
+        }
+        else
+        {
+            dataString += m_structures.GetParent(parent.GetChild2Id()).GetName().c_str();
+        }
+        dataString += " ";
+
+        data = new TiXmlText(dataString);
+        newpop->LinkEndChild(data);
+        epochb->LinkEndChild(newpop);
+
+        TiXmlElement * ancestor = new TiXmlElement( xmlstr::XML_TAG_ANCESTOR.c_str());
+        dataString = " ";
+        dataString += parent.GetName().c_str();
+        dataString += " ";
+        data = new TiXmlText(dataString);
+        ancestor->LinkEndChild(data);
+        epochb->LinkEndChild(ancestor);
+
+        poptree->LinkEndChild(epochb);
+    }
+    wxLogVerbose("    done");  // JMDBG
+}
+
+void
+GCDataStore::AddMigrationElem(TiXmlElement * forces)
+{
+    wxLogVerbose("In AddMigrationElem");  // JMDBG
+    //wxLogMessage("In AddMigrationElem");  // JMDBG
+    //cout << "In AddMigrationElem" << endl;
+
+
+    // create migration element
+    TiXmlElement * mig;
+    if (m_structures.GetDivMigMatrixDefined())
+    {
+        wxLogVerbose("Divergence Migration Matrix being output");  // JMDBG
+        //cout << "Divergence Migration Matrix being output" << endl;
+        mig = new TiXmlElement( xmlstr::XML_TAG_DIVMIG.c_str());
+    }
+    else if (m_structures.GetMigMatrixDefined())
+    {
+        wxLogVerbose("Migration Matrix being output");  // JMDBG
+        //cout << "Migration Matrix being output" << endl;
+        mig = new TiXmlElement( xmlstr::XML_TAG_MIGRATION.c_str());
+    }
+    else
+    {
+        // neither a migration or divergence migration matrix exist (batch converter case)
+        // so create the default migration matrix
+        wxLogVerbose("No Migration Matrix exists so creating it");  // JMDBG
+        //cout << "No Migration Matrix exists so creating it" << endl;
+
+        if ( m_structures.GetDivergenceState())
+        {
+            mig = new TiXmlElement( xmlstr::XML_TAG_DIVMIG.c_str());
+            //cout << "Divergence Migration Matrix being output" << endl;
+        }
+        else
+        {
+            mig = new TiXmlElement( xmlstr::XML_TAG_MIGRATION.c_str());
+            //cout << "Migration Matrix being output" << endl;
+        }
+        m_structures.MakeMigrationMatrix();
+#if 0
+        gcDisplayOrder popids = m_structures.GetDisplayablePopIds();
+
+        for(gcDisplayOrder::const_iterator iter = popids.begin(); iter != popids.end(); iter++)
+        {
+            for(gcDisplayOrder::const_iterator jter = popids.begin(); jter != popids.end(); jter++)
+            {
+                if (*jter!=*iter)
+                {
+                    gcMigration Mig1 = m_structures.MakeMigration(true, *iter, *jter );
+                    gcMigration Mig2 = m_structures.MakeMigration(true, *jter, *iter );
+                }
+            }
+        }
+#endif
+    }
+    forces->LinkEndChild(mig);
+
+    wxString startString = " ";
+    wxString methodString = " ";
+    wxString profilesString = " ";
+    wxString constraintsString = " ";
+
+    constObjVector popsToDisplay =  m_structures.GetConstDisplayablePops();
+    if (popsToDisplay.size() > 0)
+    {
+
+        // build full migration matrix data in a row by row manner
+        const gcDisplayOrder popids = m_structures.GetDisplayablePopIds();
+        const gcDisplayOrder parids = m_structures.GetParentIds();
+
+        // populations first
+        for(gcDisplayOrder::const_iterator iter = popids.begin(); iter != popids.end(); iter++)
+        {
+            for(gcDisplayOrder::const_iterator jter = popids.begin(); jter != popids.end(); jter++)
+            {
+                if(m_structures.HasMigration(*iter, *jter))
+                {
+                    // population / population pairs
+                    const gcMigration mig = m_structures.GetMigration(*iter, *jter);
+                    startString += mig.GetStartValueString();
+                    methodString += mig.GetMethodString();
+                    profilesString += mig.GetProfileAsString();
+                    constraintsString += mig.GetConstraintString();
+                }
+                else
+                {
+                    // doesn't exist so not allowed
+                    startString += "0";
+                    methodString += gcstr_mig::migmethodUser;
+                    profilesString += gcstr_mig::migprofileNone ;
+                    constraintsString += gcstr_mig::migconstraintInvalid;
+                }
+                startString += " ";
+                methodString += " ";
+                profilesString += " ";
+                constraintsString += " ";
+            }
+
+            if (parids.size() > 0)
+            {
+                // population / parent pairs
+                for(gcDisplayOrder::const_iterator kter = parids.begin(); kter != parids.end(); kter++)
+                {
+                    if(m_structures.HasMigration(*iter, *kter))
+                    {
+                        // migration exists, get values
+                        const gcMigration mig = m_structures.GetMigration(*iter, *kter);
+                        startString += mig.GetStartValueString();
+                        methodString += mig.GetMethodString();
+                        profilesString += mig.GetProfileAsString();
+                        constraintsString += mig.GetConstraintString();
+                    }
+                    else
+                    {
+                        // doesn't exist so not allowed
+                        startString += "0";
+                        methodString += gcstr_mig::migmethodUser;
+                        profilesString += gcstr_mig::migprofileNone ;
+                        constraintsString += gcstr_mig::migconstraintInvalid;
+                    }
+                    startString += " ";
+                    methodString += " ";
+                    profilesString += " ";
+                    constraintsString += " ";
+                }
+            }
+
+            // spacers
+        }
+
+        // now parents
+        if (parids.size() > 0)
+        {
+            for(gcDisplayOrder::const_iterator iter = parids.begin(); iter != parids.end(); iter++)
+            {
+                for(gcDisplayOrder::const_iterator jter = popids.begin(); jter != popids.end(); jter++)
+                {
+                    if(m_structures.HasMigration(*iter, *jter))
+                    {
+                        // parent / population pairs
+                        const gcMigration mig = m_structures.GetMigration(*iter, *jter);
+                        startString += mig.GetStartValueString();
+                        methodString += mig.GetMethodString();
+                        profilesString += mig.GetProfileAsString();
+                        constraintsString += mig.GetConstraintString();
+                    }
+                    else
+                    {
+                        // doesn't exist so not allowed
+                        startString += "0";
+                        methodString += gcstr_mig::migmethodUser;
+                        profilesString += gcstr_mig::migprofileNone ;
+                        constraintsString += gcstr_mig::migconstraintInvalid;
+                    }
+                    startString += " ";
+                    methodString += " ";
+                    profilesString += " ";
+                    constraintsString += " ";
+                }
+                // parent / parent pairs
+                for(gcDisplayOrder::const_iterator kter = parids.begin(); kter != parids.end(); kter++)
+                {
+                    if(m_structures.HasMigration(*iter, *kter))
+                    {
+                        // migration exists, get values
+                        const gcMigration mig = m_structures.GetMigration(*iter, *kter);
+                        startString += mig.GetStartValueString();
+                        methodString += mig.GetMethodString();
+                        profilesString += mig.GetProfileAsString();
+                        constraintsString += mig.GetConstraintString();
+                    }
+                    else
+                    {
+                        // doesn't exist so not allowed
+                        startString += "0";
+                        methodString += gcstr_mig::migmethodUser;
+                        profilesString += gcstr_mig::migprofileNone ;
+                        constraintsString += gcstr_mig::migconstraintInvalid;
+                    }
+                    startString += " ";
+                    methodString += " ";
+                    profilesString += " ";
+                    constraintsString += " ";
+                }
+            }
+        }
+    }
+
+    TiXmlElement * stvals = new TiXmlElement( xmlstr::XML_TAG_START_VALUES.c_str());
+    TiXmlText * data = new TiXmlText(startString);
+    stvals->LinkEndChild(data);
+    mig->LinkEndChild(stvals);
+
+    TiXmlElement * method = new TiXmlElement( xmlstr::XML_TAG_METHOD.c_str());
+    data = new TiXmlText(methodString);
+    method->LinkEndChild(data);
+    mig->LinkEndChild(method);
+
+    TiXmlElement * maxevts = new TiXmlElement( xmlstr::XML_TAG_MAX_EVENTS.c_str());
+    wxString dataString = " 10000 ";
+    data = new TiXmlText(dataString);
+    maxevts->LinkEndChild(data);
+    mig->LinkEndChild(maxevts);
+
+    TiXmlElement * profiles = new TiXmlElement( xmlstr::XML_TAG_PROFILES.c_str());
+    data = new TiXmlText(profilesString);
+    profiles->LinkEndChild(data);
+    mig->LinkEndChild(profiles);
+
+    TiXmlElement * constraints = new TiXmlElement( xmlstr::XML_TAG_CONSTRAINTS.c_str());
+    data = new TiXmlText(constraintsString);
+    constraints->LinkEndChild(data);
+    mig->LinkEndChild(constraints);
+
+    TiXmlElement * prior = new TiXmlElement( xmlstr::XML_TAG_PRIOR.c_str());
+    prior->SetAttribute(xmlstr::XML_ATTRTYPE_TYPE.c_str(), "linear");
+    mig->LinkEndChild(prior);
+
+    TiXmlElement * paramindex = new TiXmlElement( xmlstr::XML_TAG_PARAMINDEX.c_str());
+    dataString = " default ";
+    data = new TiXmlText(dataString);
+    paramindex->LinkEndChild(data);
+    prior->LinkEndChild(paramindex);
+
+    TiXmlElement * lower = new TiXmlElement( xmlstr::XML_TAG_PRIORLOWERBOUND.c_str());
+    dataString = " 0.0 ";
+    data = new TiXmlText(dataString);
+    lower->LinkEndChild(data);
+    prior->LinkEndChild(lower);
+
+    TiXmlElement * upper = new TiXmlElement( xmlstr::XML_TAG_PRIORUPPERBOUND.c_str());
+    dataString = " 100.0 ";
+    data = new TiXmlText(dataString);
+    upper->LinkEndChild(data);
+    prior->LinkEndChild(upper);
+    wxLogVerbose("    xml out");  // JMDBG
+}
+
+wxString
+GCDataStore::MakeUnlinkedName(wxString regionName, size_t index) const
+{
+    return wxString::Format(gcstr::regionNameUnlinked,regionName.c_str(),(int)index);
+}
+
+TiXmlElement *
+GCDataStore::MakeUnlinkedSite(const gcExportable & expo, const gcRegion & regionRef, const gcLocus & locus, size_t index) const
+{
+    TiXmlElement * regionElement = new TiXmlElement( xmlstr::XML_TAG_REGION.c_str());
+    regionElement->SetAttribute(    xmlstr::XML_ATTRTYPE_NAME.c_str(),
+                                    MakeUnlinkedName(regionRef.GetName(),index).c_str());
+
+    // EWFIX.P3 -- need to fix spacing info ??
+
+    constObjVector pops = m_structures.GetConstDisplayablePops();
+    for(constObjVector::const_iterator iter=pops.begin(); iter != pops.end(); iter++)
+    {
+        const gcPopulation * popP = dynamic_cast<const gcPopulation*>(*iter);
+        assert(popP != NULL);
+
+        const gcNameResolvedInfo & info = expo.GetInfo(*popP,regionRef);
+        TiXmlElement * popElement =
+            MakePopulationElemForUnlinkedLocus(info,*popP,regionRef,locus,index);
+        if(popElement != NULL)
+        {
+            regionElement->LinkEndChild(popElement);
+        }
+    }
+    return regionElement;
+}
+
+std::vector<TiXmlElement*>
+GCDataStore::MakeUnlinkedRegionElems(const gcExportable & expo, const gcRegion & regionRef) const
+{
+    std::vector<TiXmlElement*> elements;
+
+    // make sure that we are accessing these by order of creation -- a good choice for
+    // batch converter and for loci within a single input file. Less clear how good
+    // this is if users have been munging about in the gui
+    gcIdVec locIds = GetStructures().GetLocusIdsForRegionByCreation(regionRef.GetId());
+
+    for(gcIdVec::const_iterator i = locIds.begin(); i != locIds.end(); i++)
+    {
+        size_t locusId = *i;
+        const gcLocus & locus = m_structures.GetLocus(locusId);
+        if(!locus.GetLinked())
+        {
+            for(size_t index=0; index < locus.GetNumMarkers(); index++)
+            {
+                TiXmlElement * unlinkedElem = MakeUnlinkedSite(expo,regionRef,locus,index);
+                elements.push_back(unlinkedElem);
+            }
+        }
+    }
+
+    return elements;
+}
+
+
+TiXmlDocument *
+GCDataStore::ExportFile()
+{
+    TiXmlDocument * docP = new TiXmlDocument();
+    try
+    {
+        TiXmlDeclaration * decl = new TiXmlDeclaration( "1.0", "", "" );
+        docP->LinkEndChild( decl );
+
+        if(!(m_commentString.IsEmpty()))
+        {
+            TiXmlComment * comment = new TiXmlComment();
+            comment->SetValue(m_commentString.c_str());
+            docP->LinkEndChild(comment);
+        }
+
+        TiXmlElement * lamarc = new TiXmlElement( xmlstr::XML_TAG_LAMARC.c_str() );
+        docP->LinkEndChild( lamarc );
+        lamarc->SetAttribute( xmlstr::XML_ATTRTYPE_VERSION.c_str(),VERSION);
+
+        TiXmlElement * format = new TiXmlElement( xmlstr::XML_TAG_FORMAT.c_str() );
+        lamarc->LinkEndChild( format );
+        TiXmlElement * coordMunge = new TiXmlElement( xmlstr::XML_TAG_CONVERT_OUTPUT.c_str() );
+        format->LinkEndChild( coordMunge );
+        bool anyZeroes = GetStructures().AnyZeroes();   // EWFIX.P4 -- wasteful
+        TiXmlText * doConvertText = new TiXmlText(anyZeroes ? "false" : "true");
+        coordMunge->LinkEndChild(doConvertText);
+
+        // add migration if more than one population
+        // if (m_structures.GetPopCount() > 1)  MDEBUG removed this, added following
+        if (m_structures.GetConstDisplayablePops().size() > 1)
+        {
+            TiXmlElement * forces = new TiXmlElement( xmlstr::XML_TAG_FORCES.c_str() );
+            lamarc->LinkEndChild( forces );
+            AddMigrationElem(forces);
+
+            if (m_structures.GetDivergenceState())
+            {
+                AddDivergenceElem(forces);
+            }
+        }
+
+        TiXmlElement * data = new TiXmlElement( xmlstr::XML_TAG_DATA.c_str() );
+        lamarc->LinkEndChild( data );
+
+        const gcExportable exportable = BuildExportableData();
+
+        // do all regions that contain linked data
+        constObjVector regions = m_structures.GetConstDisplayableRegions();
+        for(constObjVector::const_iterator gIter = regions.begin();
+            gIter != regions.end(); gIter++)
+        {
+            const gcRegion * regionP = dynamic_cast<const gcRegion*>(*gIter);
+            assert(regionP != NULL);
+            if(m_structures.RegionHasAnyLinkedLoci(regionP->GetId()))
+            {
+                AddRegionElem(data,exportable,*regionP);
+            }
+        }
+
+        // do all regions that contain unlinked data
+        for(constObjVector::const_iterator gIter = regions.begin();
+            gIter != regions.end(); gIter++)
+        {
+            const gcRegion * regionP = dynamic_cast<const gcRegion*>(*gIter);
+            assert(regionP != NULL);
+            if(m_structures.RegionHasAnyUnLinkedLoci(regionP->GetId()))
+            {
+                std::vector<TiXmlElement *> regionElems = MakeUnlinkedRegionElems(exportable,*regionP);
+                for(std::vector<TiXmlElement*>::iterator i=regionElems.begin(); i != regionElems.end(); i++)
+                {
+                    data->LinkEndChild( *i );
+                }
+            }
+        }
+    }
+    catch(const gc_ex & e)
+    {
+        delete docP;
+        throw;
+    }
+
+    return docP;
+}
+
+void
+GCDataStore::WriteExportedData(TiXmlDocument * docP)
+{
+    docP->SaveFile( m_outfileName.c_str() );
+}
+
+void
+GCDataStore::ThrowLocusWithoutDataType(wxString locusName, wxString regionName) const
+{
+    wxString msg = wxString::Format(gcerr::locusWithoutDataType,
+                                    locusName.c_str(),
+                                    regionName.c_str());
+    gui_error g(msg);
+    throw g;
+}
+
+GCIndividual &
+GCDataStore::makeOrFetchInd(    wxString indName,
+                                std::map<wxString,GCIndividual*> & nameDict,
+                                gcNameResolvedInfo & info) const
+{
+    if(nameDict.find(indName) == nameDict.end())
+    {
+        GCIndividual * ind = new GCIndividual(indName,info.GetRegionRef());
+        info.AddIndividual(ind);
+        nameDict[indName] = ind;
+    }
+    std::map<wxString,GCIndividual*>::iterator iter = nameDict.find(indName);
+    assert(iter != nameDict.end());
+
+    GCIndividual & ind = *((*iter).second);
+    return ind;
+}
+
+bool
+GCDataStore::AddLocusData(GCIndividual & ind,
+                          const gcLocus & locus,
+                          GCParseSample & sample) const
+{
+    size_t samples = sample.GetSequencesPerLabel();
+    bool foundAnything = false;
+    wxString sampleName = sample.GetLabel();
+    for(size_t i=0; i < samples; i++)
+    {
+        if(samples > 1 || (sampleName == ind.GetName()))
+        {
+            // EWFIX.P4.BUG.564  -- need to preserve information about how we made
+            // this name so we can do better error reporting
+            sampleName = wxString::Format("%s_%d",sample.GetLabel().c_str(),(int)i);
+        }
+        ind.AddSample(sampleName,locus,&(sample.GetData(i)));
+        foundAnything = true;
+    }
+    return foundAnything;
+}
+
+bool
+GCDataStore::AddLocusData(GCIndividual & ind,
+                          const gcLocus & locus,
+                          GCParseSample & sample,
+                          const wxArrayString & sampleNames) const
+{
+    size_t numNames = sampleNames.Count();
+    size_t numSamples = sample.GetSequencesPerLabel();
+    bool foundAnything = false;
+    if((numNames != 0) && (numNames != numSamples))
+    {
+        // EWFIX.P4.BUG.564 -- use more data to make a better message
+        throw gc_ind_wrong_sample_count(ind);
+    }
+    for(size_t i=0; i < numSamples; i++)
+    {
+        wxString sampleName = "";
+        if(numNames != 0) sampleName = sampleNames[i];
+        ind.AddSample(sampleName,locus,&(sample.GetData(i)));
+        foundAnything = true;
+    }
+    return foundAnything;
+}
+
+bool
+GCDataStore::FillExportInfo(gcNameResolvedInfo & info, gcPhaseInfo & phaseInfo) const
+{
+    bool regionFoundAnything = false;
+    std::map<wxString,GCIndividual*> nameDict;
+
+    size_t popId    = info.GetPopRef().GetId();
+    size_t regionId = info.GetRegionRef().GetId();
+
+    // get list of loci for region
+    constObjVector loci = m_structures.GetConstDisplayableLociInMapOrderFor(regionId);
+
+    for(constObjVector::const_iterator lIter = loci.begin();
+        lIter != loci.end(); lIter++)
+    {
+        const gcLocus * locusP = dynamic_cast<const gcLocus*>(*lIter);
+        assert(locusP != NULL);
+
+        wxString locusName = locusP->GetName();
+
+        gcSpecificDataType dtype = locusP->GetDataType();
+        if(dtype == sdatatype_NONE_SET)
+        {
+            ThrowLocusWithoutDataType(locusP->GetName(),info.GetRegionRef().GetName());
+        }
+
+        size_t locusId = locusP->GetId();
+        constBlockVector blocks = GetBlocks(popId,locusId);
+        for(constBlockVector::const_iterator bIter = blocks.begin();
+            bIter != blocks.end(); bIter++)
+        {
+            assert(*bIter != NULL);
+            bool locusFoundAnything = false;
+
+            const GCParseBlock & block = **bIter;
+
+            const GCParseSamples & samples = block.GetSamples();
+
+            // EWFIX.P3 --  what I really need to do here is
+            // to have an individual name and a sample name for each
+            // member data item within each sample
+            for(size_t i=0; i < samples.size(); i++)
+            {
+                GCParseSample & sample = *(samples[i]);
+
+                const wxString & thisName = sample.GetLabel();
+                bool isInd = phaseInfo.HasIndividualRecord(thisName);
+                bool isSam = phaseInfo.HasSampleRecord(thisName);
+
+                if( (isInd) && (isSam) )
+                {
+                    const gcPhaseRecord & rec1 = phaseInfo.GetIndividualRecord(thisName);
+                    const gcPhaseRecord & rec2 = phaseInfo.GetSampleRecord(thisName);
+                    throw gc_both_individual_and_sample(thisName,rec1,rec2);
+                }
+
+                if (isInd)
+                {
+                    GCIndividual & ind = makeOrFetchInd(thisName,nameDict,info);
+                    const gcPhaseRecord & rec = phaseInfo.GetIndividualRecord(thisName);
+
+                    // get phenotype names
+                    // find those that apply to this region info.GetRegionRef();
+                    // get geno resolutions for them
+                    const gcIdSet & phenoIds = rec.GetPhenotypeIds();
+                    const GCTraitInfoSet & traits = info.GetRegionRef().GetTraitInfoSet();
+                    for(gcIdSet::const_iterator i=phenoIds.begin(); i != phenoIds.end(); i++)
+                    {
+                        size_t phenotypeId = *i;
+                        for(GCTraitInfoSet::const_iterator j=traits.begin(); j != traits.end(); j++)
+                        {
+                            const gcTraitInfo & traitRef = GetStructures().GetTrait(*j);
+                            const gcPhenotype & phenoRef = GetStructures().GetPhenotype(phenotypeId);
+                            if(traitRef.HasPhenotype(phenoRef))
+                            {
+                                ind.AddPhenotype(phenoRef);
+                            }
+                        }
+                    }
+
+                    if(rec.HasUnphased(locusName))
+                    {
+                        ind.AddPhase(*locusP,rec.GetUnphased(locusName));
+                    }
+                    if(rec.HasSamples())
+                    {
+                        const wxArrayString & sampleNames = rec.GetSamples();
+                        locusFoundAnything = AddLocusData(ind,*locusP,sample,sampleNames) || locusFoundAnything;
+                    }
+                    else
+                    {
+                        locusFoundAnything = AddLocusData(ind,*locusP,sample) || locusFoundAnything;
+                    }
+                }
+
+                if (isSam)
+                {
+                    const gcPhaseRecord & rec = phaseInfo.GetSampleRecord(thisName);
+                    wxString indName;
+                    if(rec.HasIndividual())
+                    {
+                        indName = rec.GetIndividual();
+                    }
+                    else
+                    {
+                        indName = gcdefault::createdIndividualPrefix;
+                        assert(rec.HasSamples());
+                        const wxArrayString & samples = rec.GetSamples();
+                        for(size_t i = 0; i < samples.Count(); i++)
+                        {
+                            indName = wxString::Format("%s:%s",
+                                                       indName.c_str(),
+                                                       samples[i].c_str());
+                        }
+                    }
+                    GCIndividual & ind = makeOrFetchInd(indName,nameDict,info);
+                    // EWFIX.P3 -- refactor -- appears above
+                    // get phenotype names
+                    // find those that apply to this region info.GetRegionRef();
+                    // get geno resolutions for them
+                    const gcIdSet & phenoIds = rec.GetPhenotypeIds();
+                    const GCTraitInfoSet & traits = info.GetRegionRef().GetTraitInfoSet();
+                    for(gcIdSet::const_iterator i=phenoIds.begin(); i != phenoIds.end(); i++)
+                    {
+                        size_t phenotypeId = *i;
+                        for(GCTraitInfoSet::const_iterator j=traits.begin(); j != traits.end(); j++)
+                        {
+                            const gcTraitInfo & traitRef = GetStructures().GetTrait(*j);
+                            const gcPhenotype & phenoRef = GetStructures().GetPhenotype(phenotypeId);
+                            if(traitRef.HasPhenotype(phenoRef))
+                            {
+                                ind.AddPhenotype(phenoRef);
+                            }
+                        }
+                    }
+                    locusFoundAnything = AddLocusData(ind,*locusP,sample) || locusFoundAnything;
+                    if(rec.HasUnphased(locusName))
+                    {
+                        ind.AddPhase(*locusP,rec.GetUnphased(locusName));
+                    }
+                }
+
+                if(!isInd && !isSam)
+                    // doesn't have any phase information
+                {
+                    GCIndividual & ind = makeOrFetchInd(thisName,nameDict,info);
+                    locusFoundAnything = AddLocusData(ind,*locusP,sample) || locusFoundAnything;
+                }
+            }
+
+#if 0
+            // At the moment, this cannot happen without
+            // causing another error (either missing_pop_region or
+            // missing_sample).
+            if(!locusFoundAnything)
+                // EWFIX.P3.BUG.511 -- eventually this restriction should
+                // be relaxed, but at the moment we require all pop,locus
+                // pairs have data
+            {
+                throw gc_data_missing_pop_locus(info.GetPopRef().GetName(),locusP->GetName());
+            }
+#endif
+            regionFoundAnything = regionFoundAnything || locusFoundAnything;
+        }
+    }
+    if(!regionFoundAnything)
+        // EWFIX.P3.BUG.511 -- eventually this restriction should
+        // be relaxed, but at the moment we require all pop,locus
+        // pairs have data
+    {
+        throw gc_data_missing_pop_region(info.GetPopRef().GetName(),info.GetRegionRef().GetName());
+    }
+    return regionFoundAnything;
+}
+
+gcExportable
+GCDataStore::BuildExportableData() const
+{
+    bool foundAnything = false;
+    gcExportable exportable;
+
+    constObjVector regions  = m_structures.GetConstDisplayableRegions();
+    constObjVector pops     = m_structures.GetConstDisplayablePops();
+
+    gcPhaseInfo * phaseInfo = BuildPhaseInfo();
+    assert(phaseInfo != NULL);
+
+    try
+    {
+        DiagnosePhaseInfoProblems(*phaseInfo);
+
+        for(constObjVector::const_iterator gIter = regions.begin();
+            gIter != regions.end(); gIter++)
+        {
+            const gcRegion * regionP = dynamic_cast<const gcRegion*>(*gIter);
+            assert(regionP != NULL);
+            bool regionFoundAnything = false;
+            for(constObjVector::const_iterator iter = pops.begin(); iter != pops.end(); iter++)
+            {
+                const gcPopulation * popP = dynamic_cast<const gcPopulation*>(*iter);
+                assert(popP != NULL);
+
+                gcNameResolvedInfo * info = new gcNameResolvedInfo(*popP,*regionP);
+                try
+                {
+                    bool localFoundAnything = FillExportInfo(*info,*phaseInfo);
+                    foundAnything |= localFoundAnything;
+                    regionFoundAnything |= localFoundAnything;
+                    gcPopRegionPair infoPair(popP,regionP);
+                    assert(exportable.find(infoPair) == exportable.end());
+                    exportable[infoPair] = info;
+                }
+                catch(const gc_ex & e)
+                {
+                    delete info;
+                    throw;
+                }
+            }
+            if(!regionFoundAnything)
+            {
+                throw gui_error(wxString::Format(gcerr::regionNoData,regionP->GetName().c_str()));
+            }
+        }
+    }
+    catch(const gc_ex & e)
+    {
+        delete phaseInfo;
+        throw;
+    }
+
+    delete phaseInfo;
+
+    if(!foundAnything)
+    {
+        gui_error g(gcerr::noDataFound);
+        throw g;
+    }
+
+    // EWFIX.P3 -- we should gather info about ghost populations,
+    // and other unexpected data gaps
+
+    return exportable;
+}
+
+//____________________________________________________________________________________
diff --git a/src/convModel/gc_datastore_files.cpp b/src/convModel/gc_datastore_files.cpp
new file mode 100644
index 0000000..bdfc7a5
--- /dev/null
+++ b/src/convModel/gc_datastore_files.cpp
@@ -0,0 +1,616 @@
+// $Id: gc_datastore_files.cpp,v 1.36 2011/06/22 18:22:22 jmcgill Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+
+#include "gc_creation_info.h"
+#include "gc_data.h"
+#include "gc_datastore.h"
+#include "gc_file.h"
+#include "gc_infile_err.h"
+#include "gc_loci_match.h"
+#include "gc_locus_err.h"
+#include "gc_parse_block.h"
+#include "gc_parse_locus.h"
+#include "gc_parse_sample.h"
+#include "gc_pop_match.h"
+#include "gc_strings.h"
+#include "gc_structures_err.h"
+
+#include "wx/file.h"
+#include "wx/filename.h"
+#include "wx/log.h"
+
+//------------------------------------------------------------------------------------
+
+GCFile &
+GCDataStore::AddDataFile(wxString           fullPathFileName,
+                         GCFileFormat    format,
+                         gcGeneralDataType   dataType,
+                         GCInterleaving  interleaving)
+// version to use when reading from batch command file
+{
+    // test for readability
+    if(! ::wxFileExists(fullPathFileName))
+    {
+        throw missing_file_error(fullPathFileName.c_str());
+    }
+
+    GCFile * file = new GCFile(*this,fullPathFileName);
+    GCParseVec * parseVecP = new GCParseVec();
+
+    try
+    {
+        GCParse * parse = OneParse(*file,format,dataType,interleaving);
+        parseVecP->push_back(parse);
+    }
+    catch(const gc_ex& e)
+    {
+        parseVecP->NukeContents();
+        delete parseVecP;
+        delete file;
+        throw;
+    }
+
+    file->SetParses(parseVecP);
+    m_structures.AddFile(*file);
+
+    m_dataFiles.insert(file);
+    wxLogVerbose(gcverbose::addedFile,fullPathFileName.c_str());
+    return *file;
+}
+
+GCFile &
+GCDataStore::AddDataFile(wxString fullPathFileName)
+{
+    // test for readability
+    if(! ::wxFileExists(fullPathFileName))
+    {
+        throw missing_file_error (fullPathFileName.c_str());
+    }
+
+    GCFile * file = new GCFile(*this,fullPathFileName);
+
+    GCParseVec * parseVecP = AllParsesForFile(*file);
+    assert(parseVecP != NULL);
+    if (parseVecP->size() == 0)
+    {
+        delete parseVecP;
+        delete file;
+        throw unparsable_file_error(fullPathFileName.c_str());
+    }
+
+    file->SetParses(parseVecP);
+    m_structures.AddFile(*file);
+    m_dataFiles.insert(file);
+    wxLogVerbose(gcverbose::addedFile,fullPathFileName.c_str());
+
+    try
+    {
+        if(file->GetParseCount() == 1)
+        {
+            GCPopMatcher p(popmatch_DEFAULT);
+            GCLocusMatcher l(locmatch_DEFAULT);
+            SetParseChoice(file->GetParse(0),p,l);
+        }
+    }
+    catch (const duplicate_file_error& e)
+    {
+        GCWarning(e.what());
+    }
+    catch (const missing_file_error& e)
+    {
+        GCWarning(e.what());
+    }
+    catch (const unparsable_file_error& e)
+    {
+        GCWarning(e.what());
+    }
+
+    return *file;
+}
+
+const GCParseBlock *
+GCDataStore::GetParseBlock(size_t blockId) const
+{
+    for(dataFileSet::const_iterator iter=m_dataFiles.begin();
+        iter != m_dataFiles.end(); iter++)
+    {
+        const GCFile & fileRef = **iter;
+        for(size_t index=0; index < fileRef.GetParseCount(); index++)
+        {
+            const GCParse & parseRef = fileRef.GetParse(index);
+            constBlockVector blocks = parseRef.GetBlocks();
+            for(constBlockVector::const_iterator biter = blocks.begin();
+                biter != blocks.end(); biter++)
+            {
+                const GCParseBlock * blockP = *biter;
+                if(blockId == blockP->GetId())
+                {
+                    return blockP;
+                }
+            }
+        }
+
+    }
+    assert(false);
+    return NULL;
+}
+
+constBlockVector
+GCDataStore::GetBlocks(size_t popId, size_t locusId) const
+{
+    gcIdSet ids = m_structures.GetBlockIds(popId,locusId);
+    constBlockVector retVal;
+
+    for(dataFileSet::const_iterator iter=m_dataFiles.begin();
+        iter != m_dataFiles.end(); iter++)
+    {
+        const GCFile & fileRef = **iter;
+        for(size_t index=0; index < fileRef.GetParseCount(); index++)
+        {
+            const GCParse & parseRef = fileRef.GetParse(index);
+            constBlockVector blocks = parseRef.GetBlocks();
+            for(constBlockVector::const_iterator biter = blocks.begin();
+                biter != blocks.end(); biter++)
+            {
+                const GCParseBlock * blockP = *biter;
+                size_t blockId = blockP->GetId();
+                if(ids.find(blockId) != ids.end())
+                {
+                    retVal.push_back(blockP);
+                }
+            }
+        }
+
+    }
+    return retVal;
+}
+
+constBlockVector
+GCDataStore::GetBlocksForLocus(size_t locusId) const
+{
+    gcIdSet ids = m_structures.GetBlocksForLocus(locusId);
+    constBlockVector retVal;
+
+    for(dataFileSet::const_iterator iter=m_dataFiles.begin();
+        iter != m_dataFiles.end(); iter++)
+    {
+        const GCFile & fileRef = **iter;
+        for(size_t index=0; index < fileRef.GetParseCount(); index++)
+        {
+            const GCParse & parseRef = fileRef.GetParse(index);
+            constBlockVector blocks = parseRef.GetBlocks();
+            for(constBlockVector::const_iterator biter = blocks.begin();
+                biter != blocks.end(); biter++)
+            {
+                const GCParseBlock * blockP = *biter;
+                size_t blockId = blockP->GetId();
+                if(ids.find(blockId) != ids.end())
+                {
+                    retVal.push_back(blockP);
+                }
+            }
+        }
+
+    }
+    return retVal;
+}
+size_t
+GCDataStore::GetDataFileCount() const
+{
+    return m_dataFiles.size();
+}
+
+GCFile &
+GCDataStore::GetDataFile(size_t fileIndex)
+{
+    for(dataFileSet::iterator i = m_dataFiles.begin(); i != m_dataFiles.end(); i++)
+    {
+        GCFile & dataFile = **i;
+        if(dataFile.GetId() == fileIndex)
+        {
+            return dataFile;
+        }
+    }
+    wxString msg = wxString::Format(gcerr::missingFileId,(int)fileIndex);
+    throw gc_implementation_error (msg.c_str());
+}
+
+const GCFile &
+GCDataStore::GetDataFile(size_t fileIndex) const
+{
+    for(dataFileSet::const_iterator i = m_dataFiles.begin(); i != m_dataFiles.end(); i++)
+    {
+        const GCFile & dataFile = **i;
+        if(dataFile.GetId() == fileIndex)
+        {
+            return dataFile;
+        }
+    }
+    wxString msg = wxString::Format(gcerr::missingFileId,(int)fileIndex);
+    throw gc_implementation_error (msg.c_str());
+}
+
+const GCParse &
+GCDataStore::GetParse(size_t parseIndex) const
+{
+    for(dataFileSet::const_iterator i = m_dataFiles.begin(); i != m_dataFiles.end(); i++)
+    {
+        const GCFile & dataFile = **i;
+        size_t numParses = dataFile.GetParseCount();
+        for(size_t p=0; p < numParses; p++)
+        {
+            const GCParse & parseRef = dataFile.GetParse(p);
+            if(parseRef.GetId() == parseIndex)
+            {
+                return parseRef;
+            }
+        }
+    }
+    wxString msg = wxString::Format(gcerr::missingParseId,(int)parseIndex);
+    throw gc_implementation_error(msg.c_str());
+}
+
+const dataFileSet &
+GCDataStore::GetDataFiles() const
+{
+    return m_dataFiles;
+}
+
+size_t
+GCDataStore::GetSelectedDataFileCount() const
+{
+    return m_structures.SelectedFileCount();
+}
+
+bool
+GCDataStore::HasNoDataFiles() const
+{
+    return m_dataFiles.empty();
+}
+
+bool
+GCDataStore::HasUnparsedFiles() const
+{
+    return m_structures.HasUnparsedFiles();
+}
+
+void
+GCDataStore::RemoveDataFile(GCFile & fileRef)
+{
+    dataFileSet::iterator matching = m_dataFiles.find(&fileRef);
+    if(matching != m_dataFiles.end())
+    {
+        wxLogVerbose(gcverbose::removedFile,fileRef.GetName().c_str());
+        gcIdSet blocks = fileRef.IdsOfAllBlocks();
+        m_structures.RemoveBlocks(blocks);
+        m_dataFiles.erase(matching);
+        delete &fileRef;
+        // EWFIX.P3 -- need an ignore method?? causes UNDO/REDO problem
+    }
+}
+
+void
+GCDataStore::RemoveFiles(bool selectedFilesOnly)
+{
+    for(dataFileSet::iterator iter=m_dataFiles.begin();
+        iter != m_dataFiles.end(); iter++)
+    {
+        GCFile & fileRef = **iter;
+        size_t fileId = fileRef.GetId();
+        if(!selectedFilesOnly || m_structures.GetFileSelection(fileId))
+        {
+            m_structures.RemoveFile(fileId);
+            RemoveDataFile(fileRef);
+        }
+
+    }
+}
+
+void
+GCDataStore::SelectAllFiles()
+{
+    m_structures.AllFileSelectionsTo(true);
+}
+
+void
+GCDataStore::UnselectAllFiles()
+{
+    m_structures.AllFileSelectionsTo(false);
+}
+
+bool
+GCDataStore::GetSelected(const GCFile & fileRef) const
+{
+    return m_structures.GetFileSelection(fileRef.GetId());
+}
+
+void
+GCDataStore::SetSelected(const GCFile & fileRef, bool selected)
+{
+    m_structures.SetFileSelection(fileRef.GetId(),selected);
+}
+
+locVector
+GCDataStore::GetLociFor(const GCParse & parseRef, const GCLocusMatcher & locMatch)
+{
+    locVector loci;
+
+    for(size_t i=0; i < parseRef.GetLociCount(); i++)
+    {
+        GCLocusSpec spec = locMatch.GetLocSpec(i,parseRef);
+        try
+        {
+            gcLocus & loc = m_structures.GetLocus(spec.GetLocusName());
+            loci.push_back(&loc);
+        }
+        catch(const gc_missing_locus& e)
+        {
+            loc_match locmatchType = locMatch.GetLocMatchType();
+            if(locmatchType == locmatch_DEFAULT || locmatchType == locmatch_LINKED)
+                // EWFIX.BUG674 -- take out single region
+            {
+                size_t lineNumber = parseRef.GetParseLocus(i).GetLineNumber();
+                wxString fileName = parseRef.GetFileRef().GetName();
+                gcCreationInfo creationInfo = gcCreationInfo::MakeDataFileCreationInfo(lineNumber,fileName);
+                try
+                {
+                    gcRegion & region = m_structures.GetRegion(spec.GetRegionName());
+                    gcLocus & loc = m_structures.MakeLocus(region,spec.GetLocusName(),spec.GetBlessedLocus(),creationInfo);
+                    gcGeneralDataType dType = parseRef.GetDataType();
+                    if(dType.size()== 1)
+                    {
+                        loc.SetDataType(*dType.begin());
+                    }
+                    loci.push_back(&loc);
+                }
+                catch(const missing_region& r)
+                {
+                    gcRegion & region = m_structures.MakeRegion(spec.GetRegionName(),spec.GetBlessedRegion());
+                    gcLocus & loc = m_structures.MakeLocus(region,spec.GetLocusName(),spec.GetBlessedLocus(),creationInfo);
+                    gcGeneralDataType dType = parseRef.GetDataType();
+                    if(dType.size() == 1)
+                    {
+                        loc.SetDataType(*dType.begin());
+                    }
+                    loci.push_back(&loc);
+                }
+            }
+            else
+            {
+                throw;
+            }
+        }
+    }
+
+    return loci;
+}
+
+popVector
+GCDataStore::GetPopsFor(const GCParse & parseRef, const GCPopMatcher & popMatch)
+{
+    popVector pops;
+
+    for(size_t i=0; i < parseRef.GetPopCount(); i++)
+    {
+        GCPopSpec spec = popMatch.GetPopSpec(i,parseRef);
+        try
+        {
+            gcPopulation & pop = m_structures.GetPop(spec.GetName());
+            pops.push_back(&pop);
+        }
+        catch(const gc_missing_population& e)
+        {
+            if(popMatch.GetPopMatchType() == popmatch_DEFAULT)
+            {
+                gcPopulation & pop = m_structures.MakePop(spec.GetName(),spec.GetBlessed());
+                pops.push_back(&pop);
+            }
+            else
+            {
+                throw;
+            }
+        }
+    }
+
+    return pops;
+}
+
+bool
+GCDataStore::CanAssignParseLocus(const GCParseLocus & pLocus, const gcLocus & locus) const
+{
+    // EWFIX.P3 -- refactor with AssignParseLocus
+    gcSpecificDataType  locusType   = locus.GetDataType();
+    gcGeneralDataType   parseType   = pLocus.GetDataType();
+
+    if(locusType != sdatatype_NONE_SET)
+    {
+        if(parseType.find(locusType) == parseType.end())
+        {
+            return false;
+        }
+    }
+    if(locus.HasNumMarkers())
+    {
+        if(locus.GetNumMarkers() != pLocus.GetNumMarkers())
+        {
+            return false;
+        }
+    }
+    return true;
+}
+
+void
+GCDataStore::AssignParseLocus(const GCParseLocus & pLocus, gcLocus & locus)
+{
+    gcSpecificDataType  locusType   = locus.GetDataType();
+    gcGeneralDataType   parseType   = pLocus.GetDataType();
+
+    if(locusType != sdatatype_NONE_SET)
+    {
+        if(parseType.find(locusType) == parseType.end())
+        {
+            throw gc_locus_types_mismatch(pLocus.GetName(),locus.GetName(),ToWxString(parseType),ToWxString(locusType));
+        }
+    }
+    else
+    {
+        if(parseType.size() == 1)
+        {
+            locus.SetDataType(*(parseType.begin()));
+        }
+    }
+
+    // number of markers
+    if(locus.HasNumMarkers())
+    {
+        if(locus.GetNumMarkers() != pLocus.GetNumMarkers())
+        {
+            throw gc_locus_site_count_mismatch(pLocus.GetLongName(),locus.GetLongName(),pLocus.GetNumMarkers(),locus.GetNumMarkers());
+        }
+    }
+    else
+    {
+        locus.SetNumMarkers(pLocus.GetNumMarkers());
+    }
+}
+
+void
+GCDataStore::AssignPop(const GCParsePop & pPop, gcPopulation & pop)
+{
+    // EWFIX.P5 LATER -- right now all pops are mergeable
+}
+
+void
+GCDataStore::SetParseChoice(const GCParse &         parseRef,
+                            const GCPopMatcher &    popMatch,
+                            const GCLocusMatcher &  locMatch)
+{
+    // remove assignments to old parse
+    const GCFile & fileRef = parseRef.GetFileRef();
+    if(m_structures.HasParse(fileRef))
+    {
+        const GCParse & oldParse = m_structures.GetParse(fileRef);
+        m_structures.UnsetParse(oldParse);
+        UnsetParseChoice(m_structures.GetParse(fileRef));
+    }
+
+    // check we can assign to these pops
+    popVector pops = GetPopsFor(parseRef,popMatch);
+    assert (pops.size() == parseRef.GetPopCount());
+    for(size_t i=0; i < pops.size(); i++)
+    {
+        const GCParsePop & parsePop = parseRef.GetParsePop(i);
+        gcPopulation & pop = *(pops[i]);
+        AssignPop(parsePop,pop);
+    }
+
+    locVector locs = GetLociFor(parseRef,locMatch);
+    assert (locs.size() == parseRef.GetLociCount());
+
+    for(size_t lIndex = 0; lIndex < locs.size(); lIndex++)
+    {
+        const GCParseLocus & parseLocus = parseRef.GetParseLocus(lIndex);
+        gcLocus & locus = *(locs[lIndex]);
+        AssignParseLocus(parseLocus,locus);
+
+        for(size_t pIndex = 0; pIndex < pops.size(); pIndex++)
+        {
+            const GCParseBlock & block = parseRef.GetBlock(pIndex,lIndex);
+            size_t blockId = block.GetId();
+            m_structures.AssignBlock(blockId,pops[pIndex]->GetId(),locs[lIndex]->GetId());
+        }
+    }
+
+    // if we make it this far, update the parse in structures
+    m_structures.SetParse(parseRef);
+
+    // and document the pop and locus matchers in file info
+    GetStructures().SetPopMatcher(fileRef,popMatch);
+    GetStructures().SetLocusMatcher(fileRef,locMatch);
+
+}
+
+void
+GCDataStore::UnsetParseChoice(const GCParse & parseRef)
+{
+    //
+    constBlockVector blocks = parseRef.GetBlocks();
+    for(constBlockVector::iterator i=blocks.begin(); i != blocks.end(); i++)
+    {
+        const GCParseBlock & block = *(*i);
+        m_structures.RemoveBlockAssignment(block.GetId());
+    }
+    m_structures.UnsetParse(parseRef);
+}
+
+const GCParse &
+GCDataStore::GetParse(const GCFile & file) const
+{
+    return m_structures.GetParse(file);
+}
+
+const GCParse &
+GCDataStore::GetParse(const GCFile & file, size_t index) const
+{
+    return file.GetParse(index);
+}
+
+bool
+GCDataStore::HasParse(const GCFile & file) const
+{
+    return m_structures.HasParse(file);
+}
+
+gcGeneralDataType
+GCDataStore::GetLegalLocusTypes(size_t locusId) const
+{
+    gcGeneralDataType allowedTypes = gcdata::allDataTypes();
+    constBlockVector blocks = GetBlocksForLocus(locusId);
+
+    for(constBlockVector::const_iterator i = blocks.begin(); i != blocks.end(); i++)
+    {
+        const GCParseBlock & pb = **i;
+        const GCParse & parse = pb.GetParse();
+        gcGeneralDataType thisType = parse.GetDataType();
+        allowedTypes.Intersect(thisType);
+    }
+    return allowedTypes;
+}
+
+bool
+GCDataStore::FileInducesHaps(size_t fileId) const
+{
+    const GCFile & fileRef = GetDataFile(fileId);
+
+    for(size_t index=0; index < fileRef.GetParseCount(); index++)
+    {
+        const GCParse & parseRef = fileRef.GetParse(index);
+        constBlockVector blocks = parseRef.GetBlocks();
+        for(constBlockVector::const_iterator biter = blocks.begin();
+            biter != blocks.end(); biter++)
+        {
+            const GCParseBlock & pb = **biter;
+            const GCParseSamples & samples = pb.GetSamples();
+            for(size_t j = 0; j < samples.size(); j++)
+            {
+                const GCParseSample & s = *(samples[j]);
+                if(s.GetSequencesPerLabel() > 1)
+                {
+                    return true;
+                }
+            }
+        }
+    }
+    return false;
+}
+
+//____________________________________________________________________________________
diff --git a/src/convModel/gc_datastore_readcmd.cpp b/src/convModel/gc_datastore_readcmd.cpp
new file mode 100644
index 0000000..94026d3
--- /dev/null
+++ b/src/convModel/gc_datastore_readcmd.cpp
@@ -0,0 +1,1114 @@
+// $Id: gc_datastore_readcmd.cpp,v 1.67 2012/06/30 01:32:40 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+
+#include "errhandling.h"        // for unrecognized_tag_error // EWFIX.P4 -- factor out xml errors
+#include "gc_cmdfile_err.h"
+#include "gc_data.h"
+#include "gc_datastore.h"
+#include "gc_errhandling.h"
+#include "gc_genotype_resolution.h"
+#include "gc_region.h"
+#include "gc_loci_match.h"
+#include "gc_locus_err.h"
+#include "gc_migrate.h"
+#include "gc_phase.h"
+#include "gc_phase_err.h"
+#include "gc_phase_info.h"
+#include "gc_phylip.h"
+#include "gc_pop_match.h"
+#include "gc_population.h"
+#include "gc_locus.h"
+#include "gc_strings.h"
+#include "gc_strings_cmdfile.h"
+#include "gc_structures_err.h"
+#include "gc_trait.h"
+#include "gc_trait_err.h"
+#include "gc_types.h"
+#include "cmdfileschema.h"
+#include "front_end_warnings.h"
+#include "tinyxml.h"
+#include "tixml_util.h"
+#include "xml.h"
+#include "cnv_strings.h"
+#include "wx/log.h"
+#include "wx/string.h"
+#include "wx/tokenzr.h"
+
+//------------------------------------------------------------------------------------
+
+void
+GCDataStore::cmdParseBlock(TiXmlElement * blockElem, gcRegion & regionRef, size_t numBlocksInRegion)
+{
+    try
+    {
+        ////////////////////////////////////////////////////////////////////
+        // name -- required by all
+        wxString name;
+        TiXmlElement * nameElem = tiwx_requiredChild(blockElem,cnvstr::TAG_NAME);
+        name = tiwx_nodeText(nameElem);
+        assert(! name.IsEmpty());
+        size_t lineNumber = blockElem->Row();
+        gcCreationInfo creationInfo = gcCreationInfo::MakeCmdFileCreationInfo(lineNumber,m_commandFileCurrentlyBeingParsed);
+        gcLocus & locusRef = GetStructures().FetchOrMakeLocus(regionRef,name,creationInfo);
+
+        ////////////////////////////////////////////////////////////////////
+        // data type -- required by all
+        wxString dataTypeString = tiwx_attributeValue(blockElem,cnvstr::ATTR_DATATYPE);
+        gcSpecificDataType dataType = ProduceSpecificDataTypeOrBarf(dataTypeString,false);
+        assert (dataType != sdatatype_NONE_SET);
+        locusRef.SetDataType(dataType);
+
+        ////////////////////////////////////////////////////////////////////
+        // markers -- required by all
+        TiXmlElement * markersElem = tiwx_requiredChild(blockElem,cnvstr::TAG_MARKERS);
+
+        try
+        {
+            size_t numMarkers = tiwx_size_t_from_text(markersElem);
+            locusRef.SetNumMarkers(numMarkers);
+        }
+        catch (const incorrect_xml& x)
+        {
+            fileRejectingError(x.what(),markersElem->Row());
+        }
+        catch (const gc_locus_err& z)
+        {
+            fileRejectingError(z.what(),markersElem->Row());
+        }
+
+        ////////////////////////////////////////////////////////////////////
+        // length -- required for SNP data
+        TiXmlElement * lengthElem = tiwx_optionalChild(blockElem,cnvstr::TAG_SCANNED_LENGTH);
+        assert(locusRef.HasNumMarkers());
+        if(lengthElem == NULL)
+        {
+            if(locusRef.GetNumMarkers() > 1)
+            {
+                if(locusRef.GetDataType() == sdatatype_SNP)
+                {
+                    batchFileRejectGuiLog(cnvstr::ERR_DATA_LENGTH_REQUIRED,blockElem->Row());
+                }
+                if(locusRef.GetDataType() == sdatatype_KALLELE ||
+                   locusRef.GetDataType() == sdatatype_MICROSAT )
+                {
+                    warnLog(wxString::Format(cnvstr::WARN_NO_LENGTH,name.c_str()),blockElem->Row());
+                }
+            }
+        }
+        else
+        {
+            try
+            {
+                size_t length = tiwx_size_t_from_text(lengthElem);
+                locusRef.SetTotalLength(length);
+            }
+            catch (const incorrect_xml& x)
+            {
+                fileRejectingError(x.what(),lengthElem->Row());
+            }
+            catch (const gc_locus_err& z)
+            {
+                fileRejectingError(z.what(),lengthElem->Row());
+            }
+        }
+
+        ////////////////////////////////////////////////////////////////////
+        // offset -- required  when ?? // EWFIX
+        TiXmlElement * offsetElem = tiwx_optionalChild(blockElem,cnvstr::TAG_FIRST_POSITION_SCANNED);
+        if(offsetElem == NULL)
+        {
+            if (locusRef.HasLocations())
+            {
+                batchFileRejectGuiLog(cnvstr::ERR_OFFSET_REQUIRED,blockElem->Row());
+            }
+        }
+        else
+        {
+            try
+            {
+                long offset = tiwx_long_from_text(offsetElem);
+                locusRef.SetOffset(offset);
+            }
+            catch (const incorrect_xml& x)
+            {
+                fileRejectingError(x.what(),offsetElem->Row());
+            }
+            catch (const gc_locus_err& z)
+            {
+                fileRejectingError(z.what(),offsetElem->Row());
+            }
+        }
+
+        ////////////////////////////////////////////////////////////////////
+        // locations -- optional, but required for recombination on non-DNA data
+        TiXmlElement *locationsElem = tiwx_optionalChild(blockElem,cnvstr::TAG_SCANNED_DATA_POSITIONS);
+        assert(locusRef.HasNumMarkers());
+        if(locationsElem == NULL)
+        {
+            if(locusRef.GetNumMarkers() > 1)
+                // dna -- required if length > markers
+                // snp -- required for recom
+                // msat/kallele -- required for recom
+            {
+                if ( locusRef.HasTotalLength() && locusRef.GetDataType() != sdatatype_SNP)
+                {
+                    batchFileRejectGuiLog(cnvstr::ERR_LOCATIONS_REQUIRED_WITH_LENGTH,blockElem->Row());
+                }
+                if (locusRef.GetDataType() != sdatatype_DNA && (locusRef.HasNumMarkers() && (locusRef.GetNumMarkers() > 1)))
+                {
+                    warnLog(wxString::Format(cnvstr::WARN_NO_LOCATIONS,name.c_str()),blockElem->Row());
+                }
+            }
+        }
+        else
+        {
+            try
+            {
+                wxString locations = tiwx_nodeText(locationsElem);
+                if(! locusRef.HasTotalLength())
+                {
+                    batchFileRejectGuiLog(cnvstr::ERR_LENGTH_REQUIRED_WITH_LOCATIONS,blockElem->Row());
+                }
+                locusRef.SetLocations(locations);
+
+                if(!locusRef.HasOffset())
+                {
+                    batchFileRejectGuiLog(cnvstr::ERR_LOCATIONS_REQUIRE_OFFSET,locationsElem->Row());
+                }
+            }
+            catch (const incorrect_xml& x)
+            {
+                fileRejectingError(x.what(),locationsElem->Row());
+            }
+            catch (const gc_locus_err& z)
+            {
+                fileRejectingError(z.what(),locationsElem->Row());
+            }
+        }
+
+        ////////////////////////////////////////////////////////////////////
+        // proximity -- linked/unlinked
+        // optional for microsat or kallele; must be linked for nucleotides
+        wxString proximityString = tiwx_attributeValue(blockElem,cnvstr::ATTR_PROXIMITY);
+        if (!proximityString.empty())
+        {
+            bool linked = ProduceBoolFromProximityOrBarf(proximityString);
+            if (linked==false && (locusRef.GetDataType() == sdatatype_DNA || locusRef.GetDataType()==sdatatype_SNP))
+            {
+                fileRejectingError(wxString::Format(cnvstr::ERR_NO_NUCLEOTIDES_UNLINKED,
+                                                    (ToWxString(locusRef.GetDataType())).c_str(),
+                                                    proximityString.c_str()),
+                                   blockElem->Row());
+            }
+            if (linked == false && numBlocksInRegion > 1)
+            {
+                warnLog(wxString::Format(gcstr::moveLocus,locusRef.GetName().c_str(),regionRef.GetName().c_str()));
+                gcRegion & newRegion = GetStructures().MakeRegion();
+                GetStructures().AssignLocus(locusRef,newRegion);
+            }
+            locusRef.SetLinkedUserValue(linked);
+        }
+        else
+            // set appropriate defaults
+        {
+            if(locusRef.GetDataType() == sdatatype_KALLELE || locusRef.GetDataType() == sdatatype_MICROSAT)
+                // kallele and msat data depend on number of segments in region
+            {
+                if(numBlocksInRegion > 1)
+                {
+                    //EWFIX.P3
+                    locusRef.SetLinkedUserValue(true);
+                }
+                else
+                {
+                    //EWFIX.P3 locusRef.SetLinked(false);
+                }
+            }
+            else
+                // others are linked
+            {
+                //EWFIX.P3 //locusRef.SetLinked(true);
+            }
+        }
+
+        ////////////////////////////////////////////////////////////////////
+        // mapPosition
+        TiXmlElement * mapPositionElem = tiwx_optionalChild(blockElem,cnvstr::TAG_MAP_POSITION);
+        if(mapPositionElem == NULL)
+        {
+            if(numBlocksInRegion > 1 && locusRef.GetLinked())
+            {
+                batchFileRejectGuiLog(cnvstr::ERR_MAP_POSITION_REQUIRED,blockElem->Row());
+            }
+        }
+        else
+        {
+            try
+            {
+                long mapPosition = tiwx_long_from_text(mapPositionElem);
+                locusRef.SetMapPosition(mapPosition);
+            }
+            catch (const incorrect_xml& x)
+            {
+                fileRejectingError(x.what(),mapPositionElem->Row());
+            }
+            catch (const gc_locus_err& z)
+            {
+                fileRejectingError(z.what(),mapPositionElem->Row());
+            }
+        }
+
+        ////////////////////////////////////////////////////////////////////
+        TiXmlElement * phaseElem = tiwx_optionalChild(blockElem,cnvstr::TAG_UNRESOLVED_MARKERS);
+        if(phaseElem != NULL)
+        {
+            try
+            {
+                wxString markerString = tiwx_nodeText(phaseElem);
+                gcUnphasedMarkers markers;
+                markers.ReadFromString(markerString);
+                locusRef.SetUnphasedMarkers(markers);
+            }
+            catch (const incorrect_xml& x)
+            {
+                fileRejectingError(x.what(),phaseElem->Row());
+            }
+            catch (const gc_locus_err& z)
+            {
+                fileRejectingError(z.what(),phaseElem->Row());
+            }
+        }
+
+    }
+    catch (const incorrect_xml& x)
+    {
+        fileRejectingError(x.what(),blockElem->Row());
+    }
+    catch (const gc_cmdfile_err& y)
+    {
+        fileRejectingError(y.what(),blockElem->Row());
+    }
+    catch (const gc_locus_err& z)
+    {
+        fileRejectingError(z.what(),blockElem->Row());
+    }
+}
+
+void
+GCDataStore::cmdParseInfile(TiXmlElement * fileElem)
+{
+    try
+    {
+        // format -- required by all
+        wxString formatString = tiwx_attributeValue(fileElem,cnvstr::ATTR_FORMAT);
+        GCFileFormat format = ProduceGCFileFormatOrBarf(formatString,false);
+
+        // data type -- required by all
+        wxString dataTypeString = tiwx_attributeValue(fileElem,cnvstr::ATTR_DATATYPE);
+        gcGeneralDataType dataType;
+        gcSpecificDataType sDataType = ProduceSpecificDataTypeOrBarf(dataTypeString,false);
+        dataType.insert(sDataType);
+
+        // interleaving -- required by all
+        wxString interleavingString = tiwx_attributeValue(fileElem,cnvstr::ATTR_SEQUENCEALIGNMENT);
+        GCInterleaving interleaving = interleaving_NONE_SET;
+        if(!interleavingString.IsEmpty())
+        {
+            interleaving = ProduceGCInterleavingOrBarf(interleavingString,false);
+        }
+
+        // name
+        TiXmlElement * nameElem = tiwx_requiredChild(fileElem,cnvstr::TAG_NAME);
+        wxString fileName = tiwx_nodeText(nameElem);
+
+        // info about population matching
+        TiXmlElement * popElem = tiwx_requiredChild(fileElem,cnvstr::TAG_POP_MATCHING);
+        GCPopMatcher  popMatch = makePopMatcher(popElem);
+
+        // info about loci matching
+        TiXmlElement * lociElem = tiwx_requiredChild(fileElem,cnvstr::TAG_SEGMENTS_MATCHING);
+        GCLocusMatcher  lociMatch = makeLocusMatcher(lociElem);
+
+        GCFile & file = AddDataFile(fileName,format,dataType,interleaving);
+
+        // this may throw
+        const GCParse & parse = file.GetParse(format,dataType,interleaving);
+        SetParseChoice(parse,popMatch,lociMatch);
+
+        // EWFIX.P4.BUG.535 -- no individual matching tag
+        TiXmlElement * indMatchElem = tiwx_optionalChild(fileElem,cnvstr::TAG_INDIVIDUALS_FROM_SAMPLES);
+        if(indMatchElem != NULL)
+        {
+            wxString matchType = tiwx_attributeValue(indMatchElem,cnvstr::ATTR_TYPE);
+            wxString numSamplesText = tiwx_nodeText(indMatchElem);
+            try
+            {
+                if(CaselessStrCmp(cnvstr::ATTR_VAL_BYADJACENCY,matchType))
+                {
+                    long numSamples;
+                    bool haveSamples = numSamplesText.ToLong(&numSamples);
+                    if(!haveSamples || numSamples <= 1)
+                    {
+                        throw gc_bad_ind_match_adjacency_value(numSamplesText);
+                    }
+                    else
+                    {
+                        GetStructures().SetHapFileAdjacent(file.GetId(),numSamples);
+                    }
+                }
+                else
+                {
+                    throw gc_bad_ind_match(matchType);
+                }
+            }
+            catch (gc_ex& e)
+            {
+                if(!e.hasRow())
+                {
+                    e.setRow(indMatchElem->Row());
+                }
+                throw;
+            }
+        }
+    }
+    catch(gc_cmdfile_err& g)
+    {
+        if(!g.hasRow())
+        {
+            g.setRow((size_t)fileElem->Row());
+        }
+        throw;
+    }
+}
+
+void
+GCDataStore::cmdParseInfiles(TiXmlElement * filesElem)
+{
+    if(filesElem != NULL)
+    {
+        std::vector<TiXmlElement*> files = tiwx_optionalChildren(filesElem,cnvstr::TAG_INFILE);
+        for(std::vector<TiXmlElement*>::iterator i = files.begin(); i != files.end(); i++)
+        {
+            cmdParseInfile(*i);
+        }
+    }
+}
+
+void
+GCDataStore::cmdParsePopulation(std::map<wxString,int> & popNameToRow, TiXmlElement * populationElem)
+{
+    wxString popName = tiwx_nodeText(populationElem);
+
+    std::map<wxString,int>::iterator iter = popNameToRow.find(popName);
+    if(iter == popNameToRow.end())
+    {
+        popNameToRow[popName] = populationElem->Row();
+    }
+    else
+    {
+        throw gc_name_repeat_pop(popName,(*iter).second,populationElem->Row());
+    }
+
+    GetStructures().FetchOrMakePop(popName);
+}
+
+void
+GCDataStore::cmdParsePopulations(TiXmlElement * populationsElem)
+{
+    if(populationsElem != NULL)
+    {
+        std::vector<TiXmlElement*> populations = tiwx_requiredChildren(populationsElem,cnvstr::TAG_POPULATION);
+        std::map<wxString,int> popNameToRow;
+        for(std::vector<TiXmlElement*>::iterator i = populations.begin(); i != populations.end(); i++)
+        {
+            cmdParsePopulation(popNameToRow,*i);
+        }
+    }
+}
+
+void
+GCDataStore::cmdParsePanel(TiXmlElement * panelElem)
+{
+    if(panelElem != NULL)
+    {
+        TiXmlElement* panel_region = tiwx_requiredChild(panelElem,cnvstr::TAG_PANEL_REGION);
+        wxString regionName = tiwx_nodeText(panel_region);
+        size_t regionId = GetStructures().GetRegion(regionName).GetId();
+
+        TiXmlElement* panel_pop = tiwx_requiredChild(panelElem,cnvstr::TAG_PANEL_POP);
+        wxString popName = tiwx_nodeText(panel_pop);
+        size_t popId = GetStructures().GetPop(popName).GetId();
+
+        wxString panelName;
+        TiXmlElement* panel_name = tiwx_optionalChild(panelElem,cnvstr::TAG_PANEL_NAME);
+        if (panel_name != NULL)
+        {
+            panelName = tiwx_nodeText(panel_name);
+        }
+        else
+        {
+            panelName = "panel:";
+            panelName += regionName;
+            panelName += ":";
+            panelName += popName;
+        }
+
+        TiXmlElement* panel_size = tiwx_requiredChild(panelElem,cnvstr::TAG_PANEL_SIZE);
+        long panelSize = tiwx_long_from_text(panel_size);
+
+        gcPanel & panel = GetStructures().MakePanel(panelName, true, regionId, popId);
+        panel.SetNumPanels(panelSize);
+
+        GetStructures().SetPanelsState(true);
+    }
+}
+
+void
+GCDataStore::cmdParsePanels(TiXmlElement * panelsElem)
+{
+    if(panelsElem != NULL)
+    {
+        std::vector<TiXmlElement*> panels = tiwx_requiredChildren(panelsElem,cnvstr::TAG_PANEL);
+        for(std::vector<TiXmlElement*>::iterator i = panels.begin(); i != panels.end(); i++)
+        {
+            cmdParsePanel(*i);
+        }
+    }
+}
+
+gcPhaseRecord *
+GCDataStore::cmdParseIndividual(TiXmlElement * individualElem, wxString fileName)
+{
+    TiXmlElement * nameElem = tiwx_requiredChild(individualElem,cnvstr::TAG_NAME);
+    wxString indName = tiwx_nodeText(nameElem);
+
+    wxArrayString sampleNames;
+    std::vector<TiXmlElement*> samples = tiwx_requiredChildren(individualElem,cnvstr::TAG_SAMPLE);
+    for(std::vector<TiXmlElement*>::iterator i = samples.begin(); i != samples.end(); i++)
+    {
+        TiXmlElement * sampleNameElem = tiwx_requiredChild((*i),cnvstr::TAG_NAME);
+        wxString sampleName = tiwx_nodeText(sampleNameElem);
+        sampleNames.Add(sampleName);
+    }
+
+    gcPhaseRecord * rec = gcPhaseRecord::MakeFullPhaseRecord(fileName,individualElem->Row(),indName,sampleNames);
+
+    std::vector<TiXmlElement*> phaseInfo = tiwx_optionalChildren(individualElem,cnvstr::TAG_PHASE);
+    for(std::vector<TiXmlElement*>::iterator i = phaseInfo.begin(); i != phaseInfo.end(); i++)
+    {
+        TiXmlElement * locusNameElem = tiwx_requiredChild((*i),cnvstr::TAG_SEGMENT_NAME);
+        wxString locusName = tiwx_nodeText(locusNameElem);
+
+        TiXmlElement * unphasedElem = tiwx_requiredChild((*i),cnvstr::TAG_UNRESOLVED_MARKERS);
+        wxString unphasedString = tiwx_nodeText(unphasedElem);
+
+        gcUnphasedMarkers markers;
+        markers.ReadFromString(unphasedString);
+
+        rec->AddUnphased(locusName,markers);
+    }
+
+    std::vector<TiXmlElement*> phenoInfo = tiwx_optionalChildren(individualElem,cnvstr::TAG_HAS_PHENOTYPE);
+    for(std::vector<TiXmlElement*>::iterator i = phenoInfo.begin(); i != phenoInfo.end(); i++)
+    {
+        TiXmlElement * phenoNameElem = (*i);
+        wxString phenoName = tiwx_nodeText(phenoNameElem);
+        const gcPhenotype & pheno = GetStructures().GetPhenotype(phenoName);
+        rec->AddPhenotypeId(pheno.GetId());
+    }
+
+    std::vector<TiXmlElement*> genoInfo = tiwx_optionalChildren(individualElem,cnvstr::TAG_GENO_RESOLUTIONS);
+    for(std::vector<TiXmlElement*>::iterator i = genoInfo.begin(); i != genoInfo.end(); i++)
+    {
+        gcPhenotype & pheno = cmdParseGenoResolution(*i);
+        rec->AddPhenotypeId(pheno.GetId());
+    }
+
+    return rec;
+}
+
+void
+GCDataStore::cmdParseIndividuals(TiXmlElement * individualsElem,wxString fileName)
+{
+    // EWFIX.P3.BUG.524 -- need to find way to catch an error and remove adding of
+    // all individuals if we're reading a separate phase file
+    if(individualsElem != NULL)
+    {
+        std::vector<TiXmlElement*> individuals = tiwx_optionalChildren(individualsElem,cnvstr::TAG_INDIVIDUAL);
+        for(std::vector<TiXmlElement*>::iterator i = individuals.begin(); i != individuals.end(); i++)
+        {
+            gcPhaseRecord * recP = cmdParseIndividual(*i,fileName);
+            m_phaseInfo.AddRecord(*recP);
+            delete recP;
+        }
+    }
+}
+
+gcPhenotype &
+GCDataStore::cmdParsePhenotype(TiXmlElement * phenoElem)
+{
+    try
+    {
+        TiXmlElement * nameElem = tiwx_requiredChild(phenoElem,cnvstr::TAG_NAME);
+        wxString nameString = tiwx_nodeText(nameElem);
+        TiXmlElement * genoElem = tiwx_requiredChild(phenoElem,cnvstr::TAG_GENO_RESOLUTIONS);
+
+        gcPhenotype & pheno = cmdParseGenoResolution(genoElem);
+        GetStructures().Rename(pheno,nameString);
+        return pheno;
+    }
+    catch (const incorrect_xml& x)
+    {
+        fileRejectingError(x.what(),phenoElem->Row());
+    }
+    catch(gc_ex & e)
+    {
+        if(!e.hasRow())
+        {
+            e.setRow(phenoElem->Row());
+        }
+        throw;
+    }
+    assert(false);
+    throw implementation_error("Reached end of GCDataStore::cmdParsePhenotype without returning a value.");
+}
+
+void
+GCDataStore::cmdParseTraitLocation(TiXmlElement * traitElem, gcRegion & regionRef)
+{
+    try
+    {
+        TiXmlElement * nameElem = tiwx_requiredChild(traitElem,cnvstr::TAG_TRAIT_NAME);
+        wxString traitName = tiwx_nodeText(nameElem);
+        gcTraitInfo & trait = GetStructures().GetTrait(traitName);
+        GetStructures().AssignTrait(trait,regionRef);
+    }
+    catch(gc_ex & e)
+    {
+        if(!e.hasRow())
+        {
+            e.setRow(traitElem->Row());
+        }
+        throw;
+    }
+}
+
+void
+GCDataStore::cmdParseTrait(std::map<wxString,int> & traitNameToRow, TiXmlElement * traitElem)
+{
+    TiXmlElement * nameElem = tiwx_requiredChild(traitElem,cnvstr::TAG_NAME);
+    wxString traitName = tiwx_nodeText(nameElem);
+
+    std::map<wxString,int>::iterator iter = traitNameToRow.find(traitName);
+    if(iter == traitNameToRow.end())
+    {
+        traitNameToRow[traitName] = nameElem->Row();
+    }
+    else
+    {
+        throw gc_name_repeat_trait(traitName,(*iter).second,nameElem->Row());
+    }
+
+    std::vector<TiXmlElement*> alleles = tiwx_optionalChildren(traitElem,cnvstr::TAG_ALLELE);
+    gcTraitInfo & traitInfo = GetStructures().FetchOrMakeTrait(traitName);
+    std::map<wxString,int> alleleInfo;
+
+    for(std::vector<TiXmlElement*>::iterator i = alleles.begin(); i != alleles.end(); i++)
+    {
+        wxString alleleName = tiwx_nodeText(*i);
+        std::map<wxString,int>::iterator alleleIter = alleleInfo.find(alleleName);
+        if(alleleIter == alleleInfo.end())
+        {
+            alleleInfo[alleleName]=(*i)->Row();
+        }
+        else
+        {
+            throw gc_name_repeat_allele(alleleName,(*alleleIter).second,(*i)->Row());
+        }
+        gcTraitAllele & allele = GetStructures().FetchOrMakeAllele(traitInfo,alleleName);
+        GetStructures().AssignAllele(allele,traitInfo);
+    }
+
+}
+
+void
+GCDataStore::cmdParseTraits(TiXmlElement * traitsElem)
+{
+    if(traitsElem != NULL)
+    {
+        std::vector<TiXmlElement*> traits = tiwx_optionalChildren(traitsElem,cnvstr::TAG_TRAIT_INFO);
+        std::map<wxString,int> traitNameToRow;
+        for(size_t i = 0; i < traits.size(); i++)
+        {
+            cmdParseTrait(traitNameToRow,traits[i]);
+        }
+
+        std::vector<TiXmlElement*> phenotypes = tiwx_optionalChildren(traitsElem,cnvstr::TAG_PHENOTYPE);
+        for(std::vector<TiXmlElement*>::iterator i = phenotypes.begin(); i != phenotypes.end(); i++)
+        {
+            gcPhenotype & pheno = cmdParsePhenotype(*i);
+            pheno.SetHasExplicitName();
+        }
+    }
+}
+
+void
+GCDataStore::cmdParseRegion(TiXmlElement * regionElem)
+{
+    // name
+    TiXmlElement * nameElem = tiwx_requiredChild(regionElem,cnvstr::TAG_NAME);
+    wxString regionName = tiwx_nodeText(nameElem);
+    gcRegion & regionRef = GetStructures().FetchOrMakeRegion(regionName);
+
+    // effective popsize
+    TiXmlElement * popSizeElem = tiwx_optionalChild(regionElem,cnvstr::TAG_EFFECTIVE_POPSIZE);
+    if (popSizeElem != NULL)
+    {
+        double effectivePopSize = tiwx_double_from_text(popSizeElem);
+        regionRef.SetEffectivePopulationSize(effectivePopSize);
+    }
+
+    // do spacing
+    TiXmlElement * spacingElement = tiwx_requiredChild(regionElem,cnvstr::TAG_SEGMENTS);
+    cmdParseSpacing(spacingElement,regionRef);
+
+    // traits assigned here
+    std::vector<TiXmlElement*> traits = tiwx_optionalChildren(regionElem,cnvstr::TAG_TRAIT_LOCATION);
+    for(std::vector<TiXmlElement*>::iterator i = traits.begin(); i != traits.end(); i++)
+    {
+        cmdParseTraitLocation(*i,regionRef);
+    }
+
+}
+
+void
+GCDataStore::cmdParseRegions(TiXmlElement * regionsElem)
+{
+    if(regionsElem != NULL)
+    {
+        std::vector<TiXmlElement*> regions = tiwx_requiredChildren(regionsElem,cnvstr::TAG_REGION);
+        for(std::vector<TiXmlElement*>::iterator i = regions.begin(); i != regions.end(); i++)
+        {
+            cmdParseRegion(*i);
+        }
+    }
+}
+
+void
+GCDataStore::cmdParseSpacing(TiXmlElement * spacingElem, gcRegion & regionRef)
+{
+    std::vector<TiXmlElement*> blocks = tiwx_optionalChildren(spacingElem,cnvstr::TAG_SEGMENT);
+    for(std::vector<TiXmlElement*>::iterator i = blocks.begin(); i != blocks.end(); i++)
+    {
+        cmdParseBlock(*i,regionRef,blocks.size());
+    }
+}
+
+void
+GCDataStore::cmdParseOutfile(TiXmlElement * outfileElem)
+{
+    if(outfileElem != NULL)
+    {
+        wxString outfileName = tiwx_nodeText(outfileElem);
+        SetOutfileName(outfileName);
+    }
+}
+
+void
+GCDataStore::cmdParseComment(TiXmlElement * commentElem)
+{
+    if(commentElem != NULL)
+    {
+        wxString commentString = tiwx_nodeText(commentElem);
+        SetLamarcCommentString(commentString);
+    }
+}
+
+GCLocusMatcher
+GCDataStore::makeLocusMatcher(TiXmlElement * lociMatchElem)
+{
+    // read type attribute
+    wxString lociTypeString = tiwx_attributeValue(lociMatchElem,cnvstr::ATTR_TYPE);
+    try
+    {
+        if(CaselessStrCmp(cnvstr::ATTR_VAL_SINGLE,lociTypeString))
+        {
+            wxString locusName = tiwx_nodeText(lociMatchElem);
+            if(locusName.IsEmpty())
+            {
+                throw gc_locus_match_single_empty();
+            }
+            if(!GetStructures().HasLocus(locusName))
+            {
+                throw gc_missing_locus(locusName);
+            }
+            GCLocusMatcher matcher(locmatch_SINGLE,locusName);
+            return matcher;
+        }
+        if(CaselessStrCmp(cnvstr::ATTR_VAL_BYLIST,lociTypeString))
+        {
+            wxArrayString nameArray;
+            std::vector<TiXmlElement*> names = tiwx_optionalChildren(lociMatchElem,cnvstr::TAG_SEGMENT_NAME);
+            for(std::vector<TiXmlElement*>::iterator i = names.begin(); i != names.end(); i++)
+            {
+                TiXmlElement * locusNameElem = *i;
+                wxString locusName = tiwx_nodeText(locusNameElem);
+                if(!GetStructures().HasLocus(locusName))
+                {
+                    throw gc_missing_locus(locusName);
+                }
+                nameArray.Add(locusName);
+            }
+            GCLocusMatcher matcher(locmatch_VECTOR,nameArray);
+            return matcher;
+        }
+        if(CaselessStrCmp(cnvstr::ATTR_VAL_LINKED,lociTypeString))
+        {
+            GCLocusMatcher matcher(locmatch_LINKED);
+            return matcher;
+        }
+        if(CaselessStrCmp(cnvstr::ATTR_VAL_UNLINKED,lociTypeString))
+        {
+            // this is currently the default
+            GCLocusMatcher matcher;
+            return matcher;
+        }
+        if(CaselessStrCmp(cnvstr::ATTR_VAL_DEFAULT,lociTypeString))
+        {
+            GCLocusMatcher matcher;
+            return matcher;
+        }
+
+        throw gc_locus_match_unknown(lociTypeString);
+    }
+    catch (gc_ex& e)
+    {
+        if(!e.hasRow())
+        {
+            e.setRow(lociMatchElem->Row());
+        }
+        throw;
+    }
+
+    assert(false);
+    return GCLocusMatcher(locmatch_DEFAULT);
+}
+
+GCPopMatcher
+GCDataStore::makePopMatcher(TiXmlElement * popMatchElem)
+{
+    // read type attribute
+    wxString popTypeString = tiwx_attributeValue(popMatchElem,cnvstr::ATTR_TYPE);
+
+    try
+    {
+        if(CaselessStrCmp(cnvstr::ATTR_VAL_SINGLE,popTypeString))
+        {
+            wxString popName = tiwx_nodeText(popMatchElem);
+            if(popName.IsEmpty())
+            {
+                throw gc_pop_match_single_empty();
+            }
+            if(!GetStructures().HasPop(popName))
+            {
+                throw gc_missing_population(popName);
+            }
+            GCPopMatcher popM(popmatch_SINGLE,popName);
+            return popM;
+        }
+
+        if(CaselessStrCmp(cnvstr::ATTR_VAL_BYLIST,popTypeString))
+        {
+            wxArrayString nameArray;
+            std::vector<TiXmlElement*> names = tiwx_optionalChildren(popMatchElem,cnvstr::TAG_POP_NAME);
+            for(std::vector<TiXmlElement*>::iterator i = names.begin(); i != names.end(); i++)
+            {
+                TiXmlElement * popNameElem = *i;
+                wxString popName = tiwx_nodeText(popNameElem);
+                if(!GetStructures().HasPop(popName))
+                {
+                    throw gc_missing_population(popName);
+                }
+                nameArray.Add(popName);
+            }
+            GCPopMatcher matcher(popmatch_VECTOR,nameArray);
+            return matcher;
+        }
+
+        if(CaselessStrCmp(cnvstr::ATTR_VAL_BYNAME,popTypeString))
+        {
+            wxString popName = tiwx_nodeText(popMatchElem);
+            if(!popName.IsEmpty())
+            {
+                throw gc_pop_match_byname_not_empty();
+            }
+            GCPopMatcher matcher(popmatch_NAME);
+            return matcher;
+        }
+
+        if(CaselessStrCmp(cnvstr::ATTR_VAL_DEFAULT,popTypeString))
+        {
+            GCPopMatcher matcher;
+            return matcher;
+        }
+
+        throw gc_pop_match_unknown(popTypeString);
+    }
+    catch (gc_ex& e)
+    {
+        if(!e.hasRow())
+        {
+            e.setRow(popMatchElem->Row());
+        }
+        throw;
+    }
+
+    assert(false);
+    GCPopMatcher matcher(popmatch_DEFAULT);
+    return matcher;
+}
+
+gcPhenotype &
+GCDataStore::cmdParseGenoResolution(TiXmlElement * genoElem)
+{
+    try
+    {
+        TiXmlElement * traitElem = tiwx_requiredChild(genoElem,cnvstr::TAG_TRAIT_NAME);
+        wxString traitName = tiwx_nodeText(traitElem);
+
+        gcTraitInfo & trait = GetStructures().GetTrait(traitName);
+        gcPhenotype & pheno = GetStructures().MakePhenotype();
+        GetStructures().AssignPhenotype(pheno,trait);
+
+        std::vector<TiXmlElement*> reso = tiwx_optionalChildren(genoElem,cnvstr::TAG_HAPLOTYPES);
+        for(std::vector<TiXmlElement*>::iterator i = reso.begin(); i != reso.end(); i++)
+        {
+            gcHapProbability hapProb;
+
+            TiXmlElement * hapElem = *i;
+
+            TiXmlElement * probElem = tiwx_requiredChild(hapElem,cnvstr::TAG_PENETRANCE);
+            double penetrance = tiwx_double_from_text(probElem);
+            hapProb.SetPenetrance(penetrance);
+
+            TiXmlElement * allelesElem = tiwx_requiredChild(hapElem,cnvstr::TAG_ALLELES);
+            wxString allelesString = tiwx_nodeText(allelesElem);
+            wxStringTokenizer tokenizer(allelesString);
+            while(tokenizer.HasMoreTokens())
+            {
+                wxString alleleName = tokenizer.GetNextToken();
+                const gcTraitAllele & allele = GetStructures().GetAllele(trait,alleleName);
+
+#if 0
+                assert(allele.HasTraitId());
+                if(allele.GetTraitId() != trait.GetId())
+                {
+                    throw gc_allele_trait_mismatch(allele,trait,pheno,allelesElem->Row());
+                }
+#endif
+                hapProb.AddAlleleId(allele.GetId());
+            }
+            pheno.AddHapProbability(hapProb);
+        }
+
+        return pheno;
+    }
+    catch (const incorrect_xml& x)
+    {
+        fileRejectingError(x.what(),genoElem->Row());
+    }
+    catch(gc_ex & e)
+    {
+        if(!e.hasRow())
+        {
+            e.setRow(genoElem->Row());
+        }
+        throw;
+    }
+    assert(false);
+    throw implementation_error("Reached end of GCDataStore::cmdParseGenoResolution without returning.");
+}
+
+void
+GCDataStore::cmdParseDivergence(TiXmlElement * dvgElem)
+{
+    if(dvgElem != NULL)
+    {
+        TiXmlElement* child1 = tiwx_requiredChild(dvgElem,cnvstr::TAG_DIV_CHILD1);
+        wxString child1Name = tiwx_nodeText(child1);
+        size_t child1Id;
+        if (GetStructures().IsPop(child1Name))
+        {
+            child1Id = GetStructures().GetPop(child1Name).GetId();
+        }
+        else
+        {
+            child1Id = GetStructures().GetParent(child1Name).GetId();
+        }
+
+        TiXmlElement* child2 = tiwx_requiredChild(dvgElem,cnvstr::TAG_DIV_CHILD2);
+        wxString child2Name = tiwx_nodeText(child2);
+        size_t child2Id;
+        if (GetStructures().IsPop(child2Name))
+        {
+            child2Id = GetStructures().GetPop(child2Name).GetId();
+        }
+        else
+        {
+            child2Id = GetStructures().GetParent(child2Name).GetId();
+        }
+
+        wxString ancestorName;
+        TiXmlElement* div_ancestor = tiwx_optionalChild(dvgElem,cnvstr::TAG_DIV_ANCESTOR);
+        if (div_ancestor != NULL)
+        {
+            ancestorName = tiwx_nodeText(div_ancestor);
+        }
+        else
+        {
+            ancestorName = "parent:";
+            ancestorName += child1Name;
+            ancestorName += ":";
+            ancestorName += child2Name;
+
+        }
+        gcParent & parent = GetStructures().MakeParent(ancestorName);
+        parent.SetChild1Id(child1Id);
+        parent.SetChild2Id(child2Id);
+        parent.SetBlessed(true);
+
+        if (GetStructures().IsPop(child1Name))
+        {
+            GetStructures().GetPop(child1Name).SetParentId(parent.GetId());
+        }
+        else
+        {
+            GetStructures().GetParent(child1Name).SetParentId(parent.GetId());
+        }
+
+        if (GetStructures().IsPop(child2Name))
+        {
+            GetStructures().GetPop(child2Name).SetParentId(parent.GetId());
+        }
+        else
+        {
+            GetStructures().GetParent(child2Name).SetParentId(parent.GetId());
+        }
+
+        GetStructures().SetDivergenceState(true);
+    }
+}
+
+void
+GCDataStore::cmdParseDivergences(TiXmlElement * panelsElem)
+{
+    if(panelsElem != NULL)
+    {
+        std::vector<TiXmlElement*> panels = tiwx_requiredChildren(panelsElem,cnvstr::TAG_DIVERGENCE);
+        for(std::vector<TiXmlElement*>::iterator i = panels.begin(); i != panels.end(); i++)
+        {
+            cmdParseDivergence(*i);
+        }
+    }
+}
+
+
+int
+GCDataStore::ProcessCmdFile(wxString fileName)
+{
+    try
+    {
+        m_commandFileCurrentlyBeingParsed = fileName;
+
+        CmdFileSchema schema;
+        FrontEndWarnings warnings;
+        XmlParser parser(schema,warnings);
+
+        //parser.ParseFileData(fileName.c_str());
+        parser.ParseFileData((const char *)fileName.mb_str());// JRM hack
+        TiXmlElement * topElem = parser.GetRootElement();
+
+        const char * value = topElem->Value();
+        std::string topTag(value);
+        bool matches = CaselessStrCmp(cnvstr::TAG_CONVERTER_CMD,topTag);
+        if(!matches)
+        {
+            gc_data_error e((wxString::Format(cnvstr::ERR_BAD_TOP_TAG,topTag.c_str())).c_str());
+            throw e;
+        }
+
+        cmdParseTraits        (tiwx_optionalChild(topElem,cnvstr::TAG_TRAITS));
+        cmdParseRegions       (tiwx_optionalChild(topElem,cnvstr::TAG_REGIONS));
+        cmdParsePopulations   (tiwx_optionalChild(topElem,cnvstr::TAG_POPULATIONS));
+        cmdParsePanels        (tiwx_optionalChild(topElem,cnvstr::TAG_PANELS));
+        cmdParseIndividuals   (tiwx_optionalChild(topElem,cnvstr::TAG_INDIVIDUALS),fileName);
+        cmdParseInfiles       (tiwx_optionalChild(topElem,cnvstr::TAG_INFILES));
+        cmdParseOutfile       (tiwx_optionalChild(topElem,cnvstr::TAG_OUTFILE));
+        cmdParseComment       (tiwx_optionalChild(topElem,cnvstr::TAG_ADDCOMMENT));
+        cmdParseDivergences   (tiwx_optionalChild(topElem,cnvstr::TAG_DIVERGENCES));
+
+        std::vector<std::string> warningStrings = warnings.GetAndClearWarnings();
+        if(!warningStrings.empty())
+        {
+            wxString combinedMsg = "";
+            for(size_t i=0; i < warningStrings.size(); i++)
+            {
+                combinedMsg += wxString((warningStrings[i]).c_str());
+            }
+            GCWarning(combinedMsg);
+        }
+
+    }
+    catch(const unrecognized_tag_error& e)
+    {
+        m_commandFileCurrentlyBeingParsed = wxEmptyString;
+        wxString msg = wxString::Format(cnvstr::ERR_UNRECOGNIZED_TAG,
+                                        e.what(),
+                                        e.where());
+        GCFatalBatchWarnGUI(msg);
+    }
+    catch(const data_error& e)
+        // this is here to catch errors that come from the xml
+        // processing
+        // EWFIX.P4 -- make xml errors into their own error type
+        // instead of just data_error type
+    {
+        m_commandFileCurrentlyBeingParsed = wxEmptyString;
+        GCFatalBatchWarnGUI(e.what());
+    }
+    catch(const gc_ex& e)
+    {
+        m_commandFileCurrentlyBeingParsed = wxEmptyString;
+        wxString msg = wxString::Format(gcerr_cmdfile::inCmdFile,fileName.c_str());
+        if(e.hasRow())
+        {
+            msg += wxString::Format(gcerr_cmdfile::atRow,(int)e.getRow());
+        }
+        if(e.hasFile())
+        {
+            msg += wxString::Format(gcerr_cmdfile::inFile,e.getFile().c_str());
+        }
+        msg += wxString::Format(gcerr_cmdfile::messageIs,e.what());
+        GCFatalBatchWarnGUI(msg);
+        return 1;
+    }
+    catch(const std::exception& e)
+    {
+        m_commandFileCurrentlyBeingParsed = wxEmptyString;
+        GCFatalBatchWarnGUI(wxString::Format(gcerr::uncaughtException,e.what()));
+        return 1;
+    }
+    m_commandFileCurrentlyBeingParsed = wxEmptyString;
+    return 0;
+}
+
+//____________________________________________________________________________________
diff --git a/src/convModel/gc_datastore_writebatch.cpp b/src/convModel/gc_datastore_writebatch.cpp
new file mode 100644
index 0000000..e7afe04
--- /dev/null
+++ b/src/convModel/gc_datastore_writebatch.cpp
@@ -0,0 +1,539 @@
+// $Id: gc_datastore_writebatch.cpp,v 1.10 2011/06/22 18:22:22 jmcgill Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+
+#include "cnv_strings.h"
+#include "gc_data.h"
+#include "gc_datastore.h"
+#include "gc_parse_block.h"
+#include "gc_phase_info.h"
+#include "gc_strings.h"
+#include "tinyxml.h"
+#include "wx/datetime.h"
+
+//------------------------------------------------------------------------------------
+
+TiXmlElement *
+GCDataStore::CmdExportIndividuals() const
+{
+    const stringToRecord & indRecords = m_phaseInfo.GetIndividualRecords();
+    if(indRecords.empty())
+    {
+        return NULL;
+    }
+    TiXmlElement * indsElem = new TiXmlElement(cnvstr::TAG_INDIVIDUALS.c_str());
+
+    for(stringToRecord::const_iterator i=indRecords.begin(); i != indRecords.end(); i++)
+    {
+        const wxString & iName = (*i).first;
+        const gcPhaseRecord & rec = (*i).second;
+
+        TiXmlElement * iElem = new TiXmlElement(cnvstr::TAG_INDIVIDUAL.c_str());
+        indsElem->LinkEndChild(iElem);
+
+        TiXmlElement * name = new TiXmlElement(cnvstr::TAG_NAME.c_str());
+        iElem->LinkEndChild(name);
+        TiXmlText * iNameText = new TiXmlText(iName.c_str());
+        name->LinkEndChild(iNameText);
+
+        const gcIdSet & phenoIds = rec.GetPhenotypeIds();
+        for(gcIdSet::const_iterator pIter = phenoIds.begin(); pIter != phenoIds.end(); pIter++)
+        {
+            const gcPhenotype & pheno = GetStructures().GetPhenotype(*pIter);
+            if(pheno.HasExplicitName())
+            {
+                TiXmlElement * phenoRefE = new TiXmlElement(cnvstr::TAG_HAS_PHENOTYPE.c_str());
+                iElem->LinkEndChild(phenoRefE);
+                TiXmlText * phenoNameText = new TiXmlText(pheno.GetName().c_str());
+                phenoRefE->LinkEndChild(phenoNameText);
+            }
+            else
+            {
+                iElem->LinkEndChild(CmdExportGenoReso(pheno));
+            }
+        }
+
+        const wxArrayString & sampleNames = rec.GetSamples();
+        for(size_t index=0; index < sampleNames.Count(); index++)
+        {
+            const wxString & sampleName = sampleNames[index];
+            TiXmlElement * sampleE = new TiXmlElement(cnvstr::TAG_SAMPLE.c_str());
+            iElem->LinkEndChild(sampleE);
+            TiXmlElement * nameE = new TiXmlElement(cnvstr::TAG_NAME.c_str());
+            sampleE->LinkEndChild(nameE);
+            TiXmlText * nameText = new TiXmlText(sampleName);
+            nameE->LinkEndChild(nameText);
+        }
+    }
+
+    return indsElem;
+}
+
+TiXmlElement *
+GCDataStore::CmdExportInfile(const GCFile& fileRef) const
+{
+    TiXmlElement * fileE = new TiXmlElement(cnvstr::TAG_INFILE.c_str());
+
+    // file attributes
+    GCFileFormat fform = fileRef.GetFormat();
+    fileE->SetAttribute(cnvstr::ATTR_FORMAT.c_str(),ToWxString(fform).c_str());
+
+    gcGeneralDataType dtype;
+
+    if(GetStructures().HasParse(fileRef))
+    {
+        const GCParse & parseRef = GetStructures().GetParse(fileRef);
+        constBlockVector blocks = parseRef.GetBlocks();
+        if(!blocks.empty())
+        {
+            const GCParseBlock * blockP = blocks[0];
+            assert(blockP != NULL);
+            size_t locusId = GetStructures().GetLocusForBlock(blockP->GetId());
+            const gcLocus & locusRef = GetStructures().GetLocus(locusId);
+            dtype = locusRef.GetDataType();
+        }
+        else
+        {
+            dtype = parseRef.GetDataType();
+        }
+    }
+
+    if(dtype.size() != 1)
+    {
+        gcGeneralDataType dtype = fileRef.GetGeneralDataType();
+    }
+
+    if(dtype.size() != 1)
+        // put comment in to tell user what to do
+    {
+        TiXmlComment * comment = new TiXmlComment();
+        comment->SetValue(gcstr::instructionsMultipleDataTypes.c_str());
+        fileE->LinkEndChild(comment);
+
+    }
+    fileE->SetAttribute(cnvstr::ATTR_DATATYPE.c_str(),ToWxString(dtype).c_str());
+
+    GCInterleaving inter = fileRef.GetInterleaving();
+    if(inter == interleaving_MOOT)
+        // sequences fit on one line, so assume
+        // it's sequential
+    {
+        inter = interleaving_SEQUENTIAL;
+    }
+    fileE->SetAttribute(cnvstr::ATTR_SEQUENCEALIGNMENT.c_str(),ToWxString(inter));
+
+    // name
+    TiXmlElement * name = new TiXmlElement(cnvstr::TAG_NAME.c_str());
+    TiXmlText * fname = new TiXmlText(fileRef.GetName().c_str());
+    name->LinkEndChild(fname);
+    fileE->LinkEndChild(name);
+
+    const GCLocusMatcher & locMatch = GetStructures().GetLocusMatcher(fileRef);
+    loc_match locMatchType = locMatch.GetLocMatchType();
+
+    TiXmlElement * lMatch = new TiXmlElement(cnvstr::TAG_SEGMENTS_MATCHING.c_str());
+    fileE->LinkEndChild(lMatch);
+    lMatch->SetAttribute(cnvstr::ATTR_TYPE.c_str(),ToWxString(locMatchType).c_str());
+
+    if(locMatchType != locmatch_DEFAULT)
+    {
+        if(locMatchType == locmatch_SINGLE)
+        {
+            const wxArrayString & locNames = locMatch.GetLociNames();
+            TiXmlText * lname = new TiXmlText(locNames[0].c_str());
+            lMatch->LinkEndChild(lname);
+        }
+
+        if(locMatchType == locmatch_VECTOR)
+        {
+            const wxArrayString & locNames = locMatch.GetLociNames();
+            for(size_t i=0; i < locNames.Count(); i++)
+            {
+                TiXmlElement * locName = new TiXmlElement(cnvstr::TAG_SEGMENT_NAME.c_str());
+                lMatch->LinkEndChild(locName);
+                TiXmlText * lname = new TiXmlText(locNames[i].c_str());
+                locName->LinkEndChild(lname);
+            }
+        }
+    }
+
+    const GCPopMatcher & popMatch = GetStructures().GetPopMatcher(fileRef);
+    pop_match popMatchType = popMatch.GetPopMatchType();
+
+    TiXmlElement * pMatch = new TiXmlElement(cnvstr::TAG_POP_MATCHING.c_str());
+    fileE->LinkEndChild(pMatch);
+    pMatch->SetAttribute(cnvstr::ATTR_TYPE.c_str(),ToWxString(popMatchType).c_str());
+
+    if(popMatchType != popmatch_DEFAULT)
+    {
+
+        if(popMatchType == popmatch_SINGLE)
+        {
+            const wxArrayString & popNames = popMatch.GetPopNames();
+            TiXmlText * pname = new TiXmlText(popNames[0].c_str());
+            pMatch->LinkEndChild(pname);
+        }
+
+        if(popMatchType == popmatch_VECTOR)
+        {
+            const wxArrayString & popNames = popMatch.GetPopNames();
+            for(size_t i=0; i < popNames.Count(); i++)
+            {
+                TiXmlElement * popName = new TiXmlElement(cnvstr::TAG_POP_NAME.c_str());
+                pMatch->LinkEndChild(popName);
+                TiXmlText * pname = new TiXmlText(popNames[i].c_str());
+                popName->LinkEndChild(pname);
+            }
+        }
+    }
+
+    if(GetStructures().HasHapFileAdjacent(fileRef.GetId()))
+    {
+        size_t numAdj = GetStructures().GetHapFileAdjacent(fileRef.GetId());
+        TiXmlElement * adj = new TiXmlElement(cnvstr::TAG_INDIVIDUALS_FROM_SAMPLES.c_str());
+        adj->SetAttribute(cnvstr::ATTR_TYPE.c_str(),cnvstr::ATTR_VAL_BYADJACENCY.c_str());
+        fileE->LinkEndChild(adj);
+        TiXmlText * adjVal = new TiXmlText(wxString::Format("%d",(int)numAdj).c_str());
+        adj->LinkEndChild(adjVal);
+    }
+
+    return fileE;
+}
+
+TiXmlElement *
+GCDataStore::CmdExportLocus(const gcLocus& locusRef) const
+{
+    TiXmlElement * locE = new TiXmlElement(cnvstr::TAG_SEGMENT.c_str());
+    TiXmlText * name = new TiXmlText(locusRef.GetName().c_str());
+    locE->LinkEndChild(name);
+    return locE;
+}
+
+TiXmlElement *
+GCDataStore::CmdExportPhenotype(const gcPhenotype & pheno) const
+{
+    TiXmlElement * phenoE = new TiXmlElement(cnvstr::TAG_PHENOTYPE.c_str());
+
+    // name
+    TiXmlElement * name = new TiXmlElement(cnvstr::TAG_NAME.c_str());
+    phenoE->LinkEndChild(name);
+    TiXmlText * nameText = new TiXmlText(pheno.GetName());
+    name->LinkEndChild(nameText);
+
+    // geno reso
+    phenoE->LinkEndChild(CmdExportGenoReso(pheno));
+    return phenoE;
+}
+
+TiXmlElement *
+GCDataStore::CmdExportGenoReso(const gcPhenotype & pheno) const
+{
+    // geno reso
+    TiXmlElement * genoE = new TiXmlElement(cnvstr::TAG_GENO_RESOLUTIONS.c_str());
+
+    assert(pheno.HasTraitId());
+    const gcTraitInfo & trait = GetStructures().GetTrait(pheno.GetTraitId());
+    TiXmlElement * tName = new TiXmlElement(cnvstr::TAG_TRAIT_NAME.c_str());
+    genoE->LinkEndChild(tName);
+    TiXmlText * tNameText = new TiXmlText(trait.GetName());
+    tName->LinkEndChild(tNameText);
+
+    const std::vector<gcHapProbability> & hapProbs = pheno.GetHapProbabilities();
+    assert(!hapProbs.empty());
+    for(size_t i=0; i < hapProbs.size(); i++)
+    {
+        const gcHapProbability & hprob = hapProbs[i];
+        assert(hprob.HasPenetrance());
+
+        TiXmlElement * hapE = new TiXmlElement(cnvstr::TAG_HAPLOTYPES.c_str());
+        genoE->LinkEndChild(hapE);
+
+        TiXmlElement * pen = new TiXmlElement(cnvstr::TAG_PENETRANCE.c_str());
+        hapE->LinkEndChild(pen);
+        TiXmlText * penText = new TiXmlText(wxString::Format("%f",hprob.GetPenetrance()));
+        pen->LinkEndChild(penText);
+
+        wxString alleleString = " ";
+        const gcIdVec & alleleIds = hprob.GetAlleleIds();
+        for(gcIdVec::const_iterator iter = alleleIds.begin(); iter != alleleIds.end(); iter++)
+        {
+            alleleString += GetStructures().GetAllele(*iter).GetName();
+            alleleString += " ";
+        }
+        TiXmlElement * alleles = new TiXmlElement(cnvstr::TAG_ALLELES.c_str());
+        hapE->LinkEndChild(alleles);
+        TiXmlText * allelesText = new TiXmlText(alleleString);
+        alleles->LinkEndChild(allelesText);
+    }
+
+    return genoE;
+}
+
+TiXmlElement *
+GCDataStore::CmdExportPop(const gcPopulation & popRef) const
+{
+    TiXmlElement * popE = new TiXmlElement(cnvstr::TAG_POPULATION.c_str());
+    TiXmlText * name = new TiXmlText(popRef.GetName().c_str());
+    popE->LinkEndChild(name);
+    return popE;
+}
+
+TiXmlElement *
+GCDataStore::CmdExportTrait(const gcTraitInfo & traitRef) const
+{
+    TiXmlElement * traitE = new TiXmlElement(cnvstr::TAG_TRAIT_INFO.c_str());
+
+    // one name
+    TiXmlElement * traitName = new TiXmlElement(cnvstr::TAG_NAME.c_str());
+    traitE->LinkEndChild(traitName);
+    TiXmlText * nameText = new TiXmlText(traitRef.GetName().c_str());
+    traitName->LinkEndChild(nameText);
+
+    // many alleles
+    const gcIdSet & alleleIds = traitRef.GetAlleleIds();
+    for(gcIdSet::const_iterator i = alleleIds.begin(); i != alleleIds.end(); i++)
+    {
+        const gcTraitAllele & alleleRef = GetStructures().GetAllele(*i);
+        TiXmlElement * alleleElem = new TiXmlElement(cnvstr::TAG_ALLELE.c_str());
+        traitE->LinkEndChild(alleleElem);
+        TiXmlText * alleleText = new TiXmlText(alleleRef.GetName().c_str());
+        alleleElem->LinkEndChild(alleleText);
+    }
+
+    return traitE;
+}
+
+TiXmlElement *
+GCDataStore::CmdExportRegion(const gcRegion & regRef) const
+{
+    TiXmlElement * regE = new TiXmlElement(cnvstr::TAG_REGION.c_str());
+
+    // name
+    TiXmlElement * name = new TiXmlElement(cnvstr::TAG_NAME.c_str());
+    TiXmlText * rname = new TiXmlText(regRef.GetName().c_str());
+    name->LinkEndChild(rname);
+    regE->LinkEndChild(name);
+
+    // effective pop size
+    if(regRef.HasEffectivePopulationSize())
+    {
+        TiXmlElement * effPop = new TiXmlElement(cnvstr::TAG_EFFECTIVE_POPSIZE.c_str());
+        TiXmlText * esize = new TiXmlText(wxString::Format("%f",regRef.GetEffectivePopulationSize()).c_str());
+        effPop->LinkEndChild(esize);
+        regE->LinkEndChild(effPop);
+    }
+
+    // spacing -- EWFIX -- may drop down to next level
+    TiXmlElement * spacing = new TiXmlElement(cnvstr::TAG_SEGMENTS.c_str());
+    regE->LinkEndChild(spacing);
+    gcIdVec segIds = GetStructures().GetLocusIdsForRegionByMapPosition(regRef.GetId());
+    for(gcIdVec::iterator i=segIds.begin(); i != segIds.end(); i++)
+    {
+        size_t locusId = *i;
+        const gcLocus & locusRef = GetStructures().GetLocus(locusId);
+        spacing->LinkEndChild(CmdExportSegment(locusRef));
+    }
+
+    // trait location
+    const GCTraitInfoSet & traits = regRef.GetTraitInfoSet();
+    for(GCTraitInfoSet::const_iterator i=traits.begin(); i != traits.end(); i++)
+    {
+        const gcTraitInfo & traitInfo = GetStructures().GetTrait(*i);
+        TiXmlElement * traitElem = new TiXmlElement(cnvstr::TAG_TRAIT_LOCATION.c_str());
+        regE->LinkEndChild(traitElem);
+        TiXmlElement * traitName = new TiXmlElement(cnvstr::TAG_TRAIT_NAME.c_str());
+        traitElem->LinkEndChild(traitName);
+        TiXmlText * traitNameText = new TiXmlText(traitInfo.GetName().c_str());
+        traitName->LinkEndChild(traitNameText);
+    }
+
+    return regE;
+}
+
+TiXmlElement *
+GCDataStore::CmdExportSegment(const gcLocus& locusRef) const
+{
+    TiXmlElement * locusE = new TiXmlElement(cnvstr::TAG_SEGMENT.c_str());
+
+    locusE->SetAttribute(cnvstr::ATTR_DATATYPE.c_str(),locusRef.GetDataTypeString().c_str());
+    if(locusRef.HasLinkedUserValue())
+    {
+        locusE->SetAttribute(cnvstr::ATTR_PROXIMITY.c_str(),locusRef.GetLinkedUserValueString().c_str());
+    }
+
+    TiXmlElement * nameE = new TiXmlElement(cnvstr::TAG_NAME.c_str());
+    TiXmlText * nameText = new TiXmlText(locusRef.GetName().c_str());
+    nameE->LinkEndChild(nameText);
+    locusE->LinkEndChild(nameE);
+
+    if(locusRef.HasNumMarkers())
+    {
+        TiXmlElement * markersE = new TiXmlElement(cnvstr::TAG_MARKERS.c_str());
+        TiXmlText * markersText = new TiXmlText(locusRef.GetNumMarkersString().c_str());
+        markersE->LinkEndChild(markersText);
+        locusE->LinkEndChild(markersE);
+    }
+
+    if(locusRef.HasOffset())
+    {
+        TiXmlElement * offsetE = new TiXmlElement(cnvstr::TAG_FIRST_POSITION_SCANNED.c_str());
+        TiXmlText * offsetText = new TiXmlText(locusRef.GetOffsetString().c_str());
+        offsetE->LinkEndChild(offsetText);
+        locusE->LinkEndChild(offsetE);
+    }
+
+    if(locusRef.HasMapPosition())
+    {
+        TiXmlElement * mapE = new TiXmlElement(cnvstr::TAG_MAP_POSITION.c_str());
+        TiXmlText * mapText = new TiXmlText(locusRef.GetMapPositionString().c_str());
+        mapE->LinkEndChild(mapText);
+        locusE->LinkEndChild(mapE);
+    }
+
+    if(locusRef.HasTotalLength())
+    {
+        TiXmlElement * lengthE = new TiXmlElement(cnvstr::TAG_SCANNED_LENGTH.c_str());
+        TiXmlText * lengthText = new TiXmlText(locusRef.GetTotalLengthString().c_str());
+        lengthE->LinkEndChild(lengthText);
+        locusE->LinkEndChild(lengthE);
+    }
+
+    if(locusRef.HasLocations())
+    {
+        TiXmlElement * locationsE = new TiXmlElement(cnvstr::TAG_SCANNED_DATA_POSITIONS.c_str());
+        TiXmlText * locationsText = new TiXmlText(locusRef.GetLocationsAsString().c_str());
+        locationsE->LinkEndChild(locationsText);
+        locusE->LinkEndChild(locationsE);
+    }
+
+    if(locusRef.HasUnphasedMarkers())
+    {
+        TiXmlElement * unphasedE = new TiXmlElement(cnvstr::TAG_UNRESOLVED_MARKERS.c_str());
+        TiXmlText * unphasedText = new TiXmlText(locusRef.GetUnphasedMarkersAsString().c_str());
+        unphasedE->LinkEndChild(unphasedText);
+        locusE->LinkEndChild(unphasedE);
+    }
+
+    return locusE;
+}
+
+TiXmlDocument *
+GCDataStore::ExportBatch() const
+{
+    TiXmlDocument * docP = new TiXmlDocument();
+    TiXmlDeclaration * decl = new TiXmlDeclaration( "1.0", "", "" );
+    docP->LinkEndChild( decl );
+
+    TiXmlComment * comment = new TiXmlComment();
+    wxDateTime now = wxDateTime::Now();
+    comment->SetValue(wxString::Format(gcstr::batchOutComment,now.Format().c_str()).c_str());
+    docP->LinkEndChild(comment);
+
+    TiXmlElement * top = new TiXmlElement( cnvstr::TAG_CONVERTER_CMD.c_str() );
+    docP->LinkEndChild( top );
+    top->SetAttribute(cnvstr::ATTR_VERSION.c_str(),VERSION);
+
+    constObjVector traits = GetStructures().GetConstTraits();
+    if(!traits.empty())
+    {
+        TiXmlElement * traitsElem = new TiXmlElement( cnvstr::TAG_TRAITS.c_str() );
+        top->LinkEndChild( traitsElem );
+
+        for(constObjVector::const_iterator iter = traits.begin(); iter != traits.end(); iter++)
+        {
+            const gcTraitInfo * traitP = dynamic_cast<const gcTraitInfo*>(*iter);
+            assert (traitP != NULL);
+            traitsElem->LinkEndChild(CmdExportTrait(*traitP));
+        }
+
+        const gcPhenoMap & phenos = GetStructures().GetPhenotypeMap();
+        for(gcPhenoMap::const_iterator i = phenos.begin(); i != phenos.end(); i++)
+        {
+            const gcPhenotype & phenoRef = (*i).second;
+            if(phenoRef.HasExplicitName())
+            {
+                traitsElem->LinkEndChild(CmdExportPhenotype(phenoRef));
+            }
+        }
+
+    }
+
+    constObjVector regs = GetStructures().GetConstDisplayableRegions();
+    if(!regs.empty())
+    {
+        TiXmlElement * regsElem = new TiXmlElement( cnvstr::TAG_REGIONS.c_str() );
+        top->LinkEndChild( regsElem );
+        for(constObjVector::const_iterator iter = regs.begin(); iter != regs.end(); iter++)
+        {
+            const gcRegion * regP = dynamic_cast<const gcRegion*>(*iter);
+            assert (regP != NULL);
+            regsElem->LinkEndChild(CmdExportRegion(*regP));
+        }
+    }
+
+    constObjVector pops = GetStructures().GetConstDisplayablePops();
+    if(!pops.empty())
+    {
+        TiXmlElement * popsElem = new TiXmlElement( cnvstr::TAG_POPULATIONS.c_str() );
+        top->LinkEndChild( popsElem );
+        for(constObjVector::const_iterator iter = pops.begin(); iter != pops.end(); iter++)
+        {
+            const gcPopulation * popP = dynamic_cast<const gcPopulation*>(*iter);
+            assert (popP != NULL);
+            popsElem->LinkEndChild(CmdExportPop(*popP));
+        }
+    }
+
+    TiXmlElement * individualsElem = CmdExportIndividuals();
+    if(individualsElem != NULL)
+    {
+        top->LinkEndChild( individualsElem );
+    }
+
+    const dataFileSet & files = GetDataFiles();
+    if(!files.empty())
+    {
+        TiXmlElement * infElem = new TiXmlElement( cnvstr::TAG_INFILES.c_str() );
+        top->LinkEndChild( infElem );
+        for(dataFileSet::const_iterator iter= files.begin(); iter != files.end(); iter++)
+        {
+            const GCFile & fileRef = *(*iter);
+            infElem->LinkEndChild(CmdExportInfile(fileRef));
+        }
+    }
+
+    if(!(m_outfileName.IsEmpty()))
+    {
+        TiXmlElement * outf = new TiXmlElement( cnvstr::TAG_OUTFILE.c_str() );
+        top->LinkEndChild( outf );
+        TiXmlText * outName = new TiXmlText(m_outfileName.c_str());
+        outf->LinkEndChild(outName);
+    }
+
+    if(!(m_commentString.IsEmpty()))
+    {
+        TiXmlElement * commentElem = new TiXmlElement( cnvstr::TAG_ADDCOMMENT.c_str() );
+        top->LinkEndChild( commentElem );
+        TiXmlText * commentText = new TiXmlText(m_commentString.c_str());
+        commentElem->LinkEndChild(commentText);
+    }
+
+    return docP;
+
+}
+
+void
+GCDataStore::WriteBatchFile(TiXmlDocument * docP, wxString fileName)
+{
+    docP->SaveFile( fileName.c_str());
+}
+
+//____________________________________________________________________________________
diff --git a/src/convModel/gc_default.cpp b/src/convModel/gc_default.cpp
new file mode 100644
index 0000000..bd88746
--- /dev/null
+++ b/src/convModel/gc_default.cpp
@@ -0,0 +1,30 @@
+// $Id: gc_default.cpp,v 1.12 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include "gc_default.h"
+#include "wx/string.h"
+
+const long       gcdefault::badDisplayIndex = -999;
+const size_t     gcdefault::badIndex        = 999;
+const size_t     gcdefault::badLength       = 9999;
+const long       gcdefault::badLocation     = -9999;
+const long       gcdefault::badMapPosition  = -9999;
+const wxString   gcdefault::createdIndividualPrefix = "createdIndividual";
+const wxString   gcdefault::delimiter       = ".";
+const wxString   gcdefault::emptyBlock      = "<empty>";
+const wxString   gcdefault::locusName       = "<unnamed segment>";
+const size_t     gcdefault::migrateSequenceNameLength   = 10;
+const double     gcdefault::penetrance      = 0.0;
+const size_t     gcdefault::numSites        = 0;
+const wxString   gcdefault::popName         = "<unnamed population>";
+const wxString   gcdefault::regionName      = "<unnamed region>";
+const wxString   gcdefault::unnamedObject   = "<unnamed object>";
+
+//____________________________________________________________________________________
diff --git a/src/convModel/gc_default.h b/src/convModel/gc_default.h
new file mode 100644
index 0000000..5d6fde2
--- /dev/null
+++ b/src/convModel/gc_default.h
@@ -0,0 +1,39 @@
+// $Id: gc_default.h,v 1.12 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_DEFAULT_H
+#define GC_DEFAULT_H
+
+#include "gc_types.h"
+#include "wx/string.h"
+
+class gcdefault
+{
+  public:
+    static const long       badDisplayIndex;
+    static const size_t     badIndex;
+    static const size_t     badLength;
+    static const long       badLocation;
+    static const long       badMapPosition;
+    static const wxString   createdIndividualPrefix;
+    static const wxString   delimiter;
+    static const wxString   emptyBlock;
+    static const wxString   locusName;
+    static const double     penetrance;
+    static const size_t     migrateSequenceNameLength;
+    static const size_t     numSites;
+    static const wxString   popName;
+    static const wxString   regionName;
+    static const wxString   unnamedObject;
+};
+
+#endif  // GC_DEFAULT_H
+
+//____________________________________________________________________________________
diff --git a/src/convModel/gc_dictionary.cpp b/src/convModel/gc_dictionary.cpp
new file mode 100644
index 0000000..84aae4c
--- /dev/null
+++ b/src/convModel/gc_dictionary.cpp
@@ -0,0 +1,107 @@
+// $Id: gc_dictionary.cpp,v 1.3 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+
+#include "gc_dictionary.h"
+#include "gc_strings.h" // EWFIX.P4 -- move to own file ?
+#include "gc_structures_err.h"
+
+//------------------------------------------------------------------------------------
+
+gcDictionary::gcDictionary()
+{
+}
+
+gcDictionary::~gcDictionary()
+{
+}
+
+bool
+gcDictionary::HasName(const wxString & name) const
+{
+    return (! (find(name) == end()) );
+}
+
+void
+gcDictionary::ReserveName(const wxString & name)
+{
+    if(!name.IsEmpty())
+    {
+        if(HasName(name))
+        {
+            duplicate_name_error e(name,infoString());
+            throw e;
+        }
+        insert(name);
+    }
+    else
+    {
+        empty_name_error e(infoString());
+        throw e;
+    }
+}
+
+void
+gcDictionary::FreeName(const wxString & name)
+{
+    if(!name.IsEmpty())
+    {
+        iterator nameIter = find(name);
+        if(nameIter == end())
+        {
+            missing_name_error e(name,infoString());
+            throw e;
+        }
+        erase(nameIter);
+    }
+    else
+    {
+        empty_name_error e(infoString());
+        throw e;
+    }
+}
+
+wxString
+gcDictionary::ReserveOrMakeName(wxString name,
+                                wxString prefixToUseIfNameEmpty)
+{
+    if(!name.IsEmpty())
+    {
+        ReserveName(name);
+        return name;
+    }
+    wxString prefix = prefixToUseIfNameEmpty;
+    if(prefix.IsEmpty())
+    {
+        prefix = gcstr::object;
+    }
+    for(int i=0; true; i++)
+    {
+        wxString nameCandidate = wxString::Format(gcstr::nameCandidate,prefix.c_str(),i);
+        try
+        {
+            ReserveName(nameCandidate);
+            return nameCandidate;
+        }
+        catch(duplicate_name_error& e)
+        {
+            // do nothing -- we just keep trying
+        }
+        catch(empty_name_error& f)
+        {
+            // do nothing -- we just keep trying
+        }
+    }
+    assert(false);
+    return gcstr::badName;
+}
+
+//____________________________________________________________________________________
diff --git a/src/convModel/gc_dictionary.h b/src/convModel/gc_dictionary.h
new file mode 100644
index 0000000..1873f66
--- /dev/null
+++ b/src/convModel/gc_dictionary.h
@@ -0,0 +1,38 @@
+// $Id: gc_dictionary.h,v 1.4 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_DICTIONARY_H
+#define GC_DICTIONARY_H
+
+#include "wx/string.h"
+#include <set>
+
+class gcDictionary : private std::set<wxString>
+{
+  private:
+
+  protected:
+    // tells you what types of things are in this dictionary
+    virtual const wxString &    infoString() const = 0;
+
+  public:
+    gcDictionary();
+    virtual ~gcDictionary();
+
+    bool        HasName     (const wxString & name) const;
+    void        FreeName    (const wxString & name);
+    void        ReserveName (const wxString & name);
+    wxString    ReserveOrMakeName(  wxString useIfNotEmpty,
+                                    wxString prefixIfCreatingName=wxEmptyString);
+};
+
+#endif  // GC_DICTIONARY_H
+
+//____________________________________________________________________________________
diff --git a/src/convModel/gc_exportable.cpp b/src/convModel/gc_exportable.cpp
new file mode 100644
index 0000000..b214fa6
--- /dev/null
+++ b/src/convModel/gc_exportable.cpp
@@ -0,0 +1,133 @@
+// $Id: gc_exportable.cpp,v 1.7 2011/06/22 18:22:22 jmcgill Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include "gc_errhandling.h"
+#include "gc_exportable.h"
+#include "gc_individual.h"
+#include "gc_population.h"
+#include "gc_region.h"
+#include "gc_strings.h"
+#include "wx/log.h"
+#include "wx/string.h"
+
+gcPopRegionPair::gcPopRegionPair(const gcPopulation * p, const gcRegion * r)
+    :
+    std::pair<const gcPopulation *,const gcRegion *>(p,r)
+{
+}
+
+gcPopRegionPair::~gcPopRegionPair()
+{
+}
+
+void
+gcPopRegionPair::DebugDump(wxString prefix) const
+{
+    wxString pname = (*first).GetName();
+    wxString rname = (*second).GetName();
+    wxLogDebug("%spop/region:(%s,%s)",prefix.c_str(),pname.c_str(),rname.c_str());  // EWDUMPOK
+}
+
+//------------------------------------------------------------------------------------
+
+gcNameResolvedInfo::gcNameResolvedInfo(const gcPopulation & pop, const gcRegion & reg)
+    :
+    m_populationRef(pop),
+    m_regionRef(reg)
+{
+}
+
+gcNameResolvedInfo::~gcNameResolvedInfo()
+{
+    for(std::vector<GCIndividual*>::iterator i = m_individuals.begin();
+        i != m_individuals.end(); i++)
+    {
+        delete *i;
+    }
+}
+
+void
+gcNameResolvedInfo::AddIndividual(GCIndividual * ind)
+{
+    m_individuals.push_back(ind);
+}
+
+const gcPopulation &
+gcNameResolvedInfo::GetPopRef() const
+{
+    return m_populationRef;
+}
+
+const gcRegion &
+gcNameResolvedInfo::GetRegionRef() const
+{
+    return m_regionRef;
+}
+
+std::vector<const GCIndividual*>
+gcNameResolvedInfo::GetIndividuals() const
+{
+    std::vector<const GCIndividual*> inds;
+    for(size_t index=0; index < m_individuals.size(); index++)
+    {
+        inds.push_back(m_individuals[index]);
+    }
+    return inds;
+}
+
+void
+gcNameResolvedInfo::DebugDump(wxString prefix) const
+{
+    wxLogDebug("%svector of %d individuals",prefix.c_str(),(int)m_individuals.size());  // EWDUMPOK
+}
+
+//------------------------------------------------------------------------------------
+
+gcExportable::gcExportable()
+{
+}
+
+gcExportable::~gcExportable()
+{
+    for(iterator i=begin(); i != end(); i++)
+    {
+        delete (*i).second;
+    }
+}
+
+const gcNameResolvedInfo &
+gcExportable::GetInfo(const gcPopulation & pop, const gcRegion & region) const
+{
+    gcPopRegionPair pair(&pop,&region);
+    const_iterator i = find(pair);
+    if(i == end())
+    {
+        wxString popName = pop.GetName();
+        wxString regionName = region.GetName();
+        gui_error g(wxString::Format(gcerr::nameResolutionPairMissing,popName.c_str(),regionName.c_str()));
+        throw g;
+    }
+    return *((*i).second);
+}
+
+void
+gcExportable::DebugDump(wxString prefix) const
+{
+    wxLogDebug("%sgcExportable:",prefix.c_str());   // EWDUMPOK
+    for(const_iterator i=begin(); i!=end(); i++)
+    {
+        gcPopRegionPair pair = (*i).first;
+        const gcNameResolvedInfo * info = (*i).second;
+        pair.DebugDump(wxString::Format("%s    ",prefix.c_str()));
+        info->DebugDump(wxString::Format("%s    ",prefix.c_str()));
+    }
+}
+
+//____________________________________________________________________________________
diff --git a/src/convModel/gc_exportable.h b/src/convModel/gc_exportable.h
new file mode 100644
index 0000000..02d6b37
--- /dev/null
+++ b/src/convModel/gc_exportable.h
@@ -0,0 +1,58 @@
+// $Id: gc_exportable.h,v 1.7 2011/06/22 18:22:22 jmcgill Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_EXPORTABLE_H
+#define GC_EXPORTABLE_H
+
+#include <map>
+#include <vector>
+
+class GCIndividual;
+class gcPopulation;
+class gcRegion;
+
+class gcPopRegionPair : public std::pair<const gcPopulation *, const gcRegion *>
+{
+  public:
+    gcPopRegionPair(const gcPopulation *, const gcRegion *);
+    virtual ~gcPopRegionPair();
+    void DebugDump(wxString prefix=wxEmptyString) const;
+};
+
+class gcNameResolvedInfo
+{
+  private:
+    const gcPopulation &        m_populationRef;
+    const gcRegion &            m_regionRef;
+    std::vector<GCIndividual*>  m_individuals;
+
+  public:
+    gcNameResolvedInfo(const gcPopulation &, const gcRegion &);
+    virtual ~gcNameResolvedInfo();
+
+    void                                AddIndividual(GCIndividual*);
+    const gcPopulation &                GetPopRef() const;
+    const gcRegion &                    GetRegionRef() const;
+    std::vector<const GCIndividual*>    GetIndividuals() const;
+    void DebugDump(wxString prefix=wxEmptyString) const;
+};
+
+class gcExportable : public std::map<gcPopRegionPair,gcNameResolvedInfo *>
+{
+  public:
+    gcExportable();
+    ~gcExportable();
+    const gcNameResolvedInfo &  GetInfo(const gcPopulation &, const gcRegion &) const;
+    void DebugDump(wxString prefix=wxEmptyString) const;
+};
+
+#endif  // GC_EXPORTABLE_H
+
+//____________________________________________________________________________________
diff --git a/src/convModel/gc_file.cpp b/src/convModel/gc_file.cpp
new file mode 100644
index 0000000..7d6fa36
--- /dev/null
+++ b/src/convModel/gc_file.cpp
@@ -0,0 +1,272 @@
+// $Id: gc_file.cpp,v 1.38 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+
+#include "Converter_LamarcDS.h"
+#include "gc_data.h"
+#include "gc_datastore.h"
+#include "gc_errhandling.h"
+#include "gc_file.h"
+#include "gc_strings.h"
+#include "gc_strings_structures.h"
+#include "gc_structures_err.h"
+#include "wx/filename.h"
+#include "wx/log.h"
+
+//------------------------------------------------------------------------------------
+
+std::map<wxString,wxString> GCFile::s_fileNameMap;
+
+//------------------------------------------------------------------------------------
+
+GCFile::GCFile( GCDataStore &       dataStoreRef,
+                wxString            name)
+    :
+    m_dataStore(dataStoreRef),
+    m_fullPathFileName(name),
+    m_parses(NULL)
+{
+    std::map<wxString,wxString>::const_iterator iter = s_fileNameMap.find(GetShortName());
+    if(iter != s_fileNameMap.end())
+    {
+        wxString oldFileName = (*iter).second;
+        wxString msg;
+        if(oldFileName == GetName())
+        {
+            throw duplicate_file_error(oldFileName);
+        }
+        else
+        {
+            throw duplicate_file_base_name_error(oldFileName);
+        }
+    }
+    else
+    {
+        s_fileNameMap[GetShortName()] = GetName();
+    }
+}
+
+GCFile::~GCFile()
+{
+    std::map<wxString,wxString>::iterator iter = s_fileNameMap.find(GetShortName());
+    if(iter != s_fileNameMap.end())
+    {
+        s_fileNameMap.erase(iter);
+    }
+
+    if(m_parses != NULL)
+    {
+        m_parses->NukeContents();
+        delete(m_parses);
+    }
+}
+
+void
+GCFile::SetParses(GCParseVec * parses)
+{
+    m_parses = parses;
+}
+
+wxString
+GCFile::GetName() const
+{
+    wxFileName fname(m_fullPathFileName);
+    return fname.GetFullPath();
+}
+
+wxString
+GCFile::GetShortName() const
+{
+    wxFileName fname(m_fullPathFileName);
+    return fname.GetFullName(); // "FullName == file name + extention
+}
+
+const GCParse &
+GCFile::GetParse(size_t choice) const
+{
+    assert(m_parses != NULL);
+    assert(m_parses->size() > choice);
+    const GCParse * parseP = (*m_parses)[choice];
+    assert(parseP != NULL);
+    return *parseP;
+}
+
+size_t
+GCFile::GetParseCount() const
+{
+    if(m_parses == NULL) return 0;
+    return m_parses->size();
+}
+
+void
+GCFile::DebugDump(wxString prefix) const
+{
+    wxLogDebug("%sfile (id %d) %s", // EWDUMPOK
+               prefix.c_str(),
+               (int)GetId(),
+               GetName().c_str());
+    for(GCParseVec::const_iterator i = m_parses->begin(); i != m_parses->end(); i++)
+    {
+        const GCParse & parse = **i;
+        parse.DebugDump(prefix+gcstr::indent);
+    }
+}
+
+const GCParse &
+GCFile::GetParse(   GCFileFormat        format,
+                    gcGeneralDataType   dataType,
+                    GCInterleaving      interleaving) const
+{
+    if(m_parses != NULL)
+    {
+        for(size_t index = 0; index < m_parses->size(); index++)
+        {
+            GCParse & parse = *((*m_parses)[index]);
+            if( parse.GetFormat() == format )
+            {
+                if( parse.GetInterleaving() == interleaving_MOOT ||
+                    parse.GetInterleaving() == interleaving)
+                {
+                    if( parse.GetDataType().CompatibleWith(dataType) )
+                    {
+                        return parse;
+                    }
+                }
+            }
+        }
+    }
+    wxString fileFmtStr     = ToWxString(format);
+    wxString dataTypeStr    = ToWxString(dataType);
+    wxString interleaveStr  = ToWxString(interleaving);
+    wxString msg = wxString::Format(gcerr::noSuchParse,
+                                    GetName().c_str(),
+                                    fileFmtStr.c_str(),
+                                    dataTypeStr.c_str(),
+                                    interleaveStr.c_str());
+    gc_data_error e(msg.c_str());
+    throw e;
+}
+
+bool
+GCFileCompare::operator()(const GCFile* p1, const GCFile* p2)
+{
+    const wxString name1 = p1->GetName();
+    const wxString name2 = p2->GetName();
+    assert( (p1->GetId() != p2->GetId()) || (name1==name2));
+    return (name1 < name2);
+}
+
+gcIdSet
+GCFile::IdsOfAllBlocks() const
+{
+    gcIdSet retSet;
+    for(size_t i=0; i < m_parses->size(); i++)
+    {
+        const GCParse & parse = *((*m_parses)[i]);
+        gcIdSet pSet = parse.IdsOfAllBlocks();
+        retSet.insert(pSet.begin(),pSet.end());
+    }
+    return retSet;
+}
+
+gcIdSet
+GCFile::IdsOfAllParses() const
+{
+    gcIdSet retSet;
+    for(size_t i=0; i < m_parses->size(); i++)
+    {
+        const GCParse & parse = *((*m_parses)[i]);
+        retSet.insert(parse.GetId());
+    }
+    return retSet;
+}
+
+gcGeneralDataType
+GCFile::GetGeneralDataType() const
+{
+    gcGeneralDataType myType;
+
+    for(size_t i=0; i < GetParseCount(); i++)
+    {
+        const GCParse & parse = GetParse(i);
+        myType.Union(parse.GetDataType());
+    }
+    return myType;
+
+}
+
+wxString
+GCFile::GetDataTypeString() const
+{
+    gcGeneralDataType myType = GetGeneralDataType();
+    return ToWxString(myType).c_str();
+}
+
+GCFileFormat
+GCFile::GetFormat() const
+{
+    GCFileFormat myType = format_NONE_SET;
+    for(size_t i=0; i < GetParseCount(); i++)
+    {
+        const GCParse & parse = GetParse(i);
+        if (i == 0)
+        {
+            myType = parse.GetFormat();
+        }
+        else
+        {
+            if(myType != parse.GetFormat())
+            {
+                return format_NONE_SET;
+            }
+        }
+    }
+    return myType;
+}
+
+wxString
+GCFile::GetFormatString() const
+{
+    GCFileFormat myType = format_NONE_SET;
+    return ToWxString(myType).c_str();
+}
+
+GCInterleaving
+GCFile::GetInterleaving() const
+{
+    GCInterleaving myType = interleaving_NONE_SET;
+    for(size_t i=0; i < GetParseCount(); i++)
+    {
+        const GCParse & parse = GetParse(i);
+        if (i == 0)
+        {
+            myType = parse.GetInterleaving();
+        }
+        else
+        {
+            if(myType != parse.GetInterleaving())
+            {
+                return interleaving_NONE_SET;
+            }
+        }
+    }
+    return myType;
+
+}
+
+wxString
+GCFile::GetInterleavingString() const
+{
+    GCInterleaving myType = GetInterleaving();
+    return ToWxString(myType).c_str();
+}
+
+//____________________________________________________________________________________
diff --git a/src/convModel/gc_file.h b/src/convModel/gc_file.h
new file mode 100644
index 0000000..5bbd196
--- /dev/null
+++ b/src/convModel/gc_file.h
@@ -0,0 +1,77 @@
+// $Id: gc_file.h,v 1.36 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_FILE_H
+#define GC_FILE_H
+
+#include "gc_parse.h"
+#include "gc_quantum.h"
+#include "gc_structure_maps.h"
+#include "gc_types.h"
+#include "wx/string.h"
+
+class GCDataStore;
+class GCFileLines;
+
+class GCFile : public GCQuantum
+{
+    friend class GCDataStore;   // EWFIX.P3 -- think about making a
+    // separate object that has access
+    // to the SetParses method -- it
+    // should only be necessary at GCFile
+    // creation time
+  private:
+    GCDataStore &       m_dataStore;
+    wxString            m_fullPathFileName;
+    GCParseVec *        m_parses;   // we own this
+
+    GCFile();       // undefined
+    GCFile( GCDataStore & dataStoreRef, wxString name);
+
+  protected:
+    void SetParses(GCParseVec * parses);
+    static std::map<wxString,wxString> s_fileNameMap;
+
+  public:
+    virtual ~GCFile();
+
+    wxString            GetDataTypeString()     const;
+    wxString            GetFormatString()       const;
+    wxString            GetInterleavingString() const;
+    wxString            GetName()               const;
+    bool                GetNeedsSettings()      const;
+    wxString            GetShortName()          const;
+    const GCParse &     GetParse(size_t choice) const;
+    const GCParse &     GetParse(GCFileFormat,gcGeneralDataType,GCInterleaving) const;
+    size_t              GetParseCount()         const;
+
+    void DebugDump(wxString prefix=wxEmptyString) const;
+
+    wxArrayString   PossibleSettings() const;
+
+    gcIdSet         IdsOfAllParses() const;
+    gcIdSet         IdsOfAllBlocks() const;
+
+    GCFileFormat        GetFormat()             const;
+    gcGeneralDataType   GetGeneralDataType()    const;
+    GCInterleaving      GetInterleaving()       const;
+};
+
+struct GCFileCompare
+{
+    bool operator()(const GCFile*,const GCFile*);
+};
+
+typedef std::set<GCFile*,GCFileCompare>         dataFileSet;
+typedef std::set<const GCFile*,GCFileCompare>   constDataFileSet;
+
+#endif  // GC_FILE_H
+
+//____________________________________________________________________________________
diff --git a/src/convModel/gc_file_info.cpp b/src/convModel/gc_file_info.cpp
new file mode 100644
index 0000000..3adeea0
--- /dev/null
+++ b/src/convModel/gc_file_info.cpp
@@ -0,0 +1,215 @@
+// $Id: gc_file_info.cpp,v 1.5 2012/06/30 01:32:40 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+
+#include "gc_default.h"
+#include "gc_file_info.h"
+#include "gc_strings.h"
+#include "wx/log.h"
+#include "wx/string.h"
+
+//------------------------------------------------------------------------------------
+
+gcFileInfo::gcFileInfo()
+    :
+    m_selected(false),
+    m_hasParse(false),
+    m_parseId(gcdefault::badIndex),
+    m_hasAdjacentHapAssignment(false),
+    m_adjacentHapAssignment(gcdefault::badIndex),
+    m_hasGenoFile(false),
+    m_genoFileId(gcdefault::badIndex)
+{
+}
+
+gcFileInfo::~gcFileInfo()
+{
+}
+
+bool
+gcFileInfo::GetSelected() const
+{
+    return m_selected;
+}
+
+void
+gcFileInfo::SetSelected(bool selected)
+{
+    m_selected = selected;
+}
+
+//------------------------------------------------------------------------------------
+
+bool
+gcFileInfo::HasParse() const
+{
+    return m_hasParse;
+}
+
+size_t
+gcFileInfo::GetParse() const
+{
+#if 0
+    assert(HasParse());
+#endif
+
+    if(!HasParse())
+    {
+        assert(false);
+    }
+
+    return m_parseId;
+}
+
+void
+gcFileInfo::SetParse(size_t parseId)
+{
+    m_hasParse = true;
+    m_parseId = parseId;
+}
+
+void
+gcFileInfo::UnsetParse()
+{
+    m_hasParse = false;
+}
+
+//------------------------------------------------------------------------------------
+
+bool
+gcFileInfo::HasAdjacentHapAssignment() const
+{
+    return m_hasAdjacentHapAssignment;
+}
+
+size_t
+gcFileInfo::GetAdjacentHapAssignment() const
+{
+    assert(HasAdjacentHapAssignment());
+    return  m_adjacentHapAssignment;
+}
+
+void
+gcFileInfo::SetAdjacentHapAssignment(size_t numAdj)
+{
+    m_hasAdjacentHapAssignment = true;
+    m_adjacentHapAssignment = numAdj;
+}
+
+void
+gcFileInfo::UnsetAdjacentHapAssignment()
+{
+    m_hasAdjacentHapAssignment = false;
+    m_adjacentHapAssignment = gcdefault::badIndex;
+}
+
+//------------------------------------------------------------------------------------
+
+void
+gcFileInfo::DebugDump(wxString prefix) const
+{
+    if(m_selected)
+    {
+        wxLogDebug("%sselected",prefix.c_str());   // EWDUMPOK
+    }
+    else
+    {
+        wxLogDebug("%sun selected",prefix.c_str());// EWDUMPOK
+    }
+
+    if(m_hasParse)
+    {
+        wxLogDebug("%sparse id %d",                 // EWDUMPOK
+                   prefix.c_str(),
+                   (int)m_parseId);
+    }
+    else
+    {
+        wxLogDebug("%sno parse",                   // EWDUMPOK
+                   prefix.c_str());
+    }
+
+    if(HasAdjacentHapAssignment())
+    {
+        wxLogDebug("%sadjacent haps: %d",                   // EWDUMPOK
+                   prefix.c_str(),
+                   (int)GetAdjacentHapAssignment());
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+bool
+gcFileInfo::HasGenoFile() const
+{
+    return m_hasGenoFile;
+}
+
+void
+gcFileInfo::UnsetGenoFile()
+{
+    m_hasGenoFile = false;
+}
+
+//------------------------------------------------------------------------------------
+
+const GCPopMatcher &
+gcFileInfo::GetPopMatcher() const
+{
+    return m_popMatch;
+}
+
+void
+gcFileInfo::SetPopMatcher(const GCPopMatcher & p)
+{
+    m_popMatch = p;
+}
+
+//------------------------------------------------------------------------------------
+
+const GCLocusMatcher &
+gcFileInfo::GetLocMatcher() const
+{
+    return m_locMatch;
+}
+
+void
+gcFileInfo::SetLocMatcher(const GCLocusMatcher & p)
+{
+    m_locMatch = p;
+}
+
+//------------------------------------------------------------------------------------
+
+gcFileMap::gcFileMap()
+{
+}
+
+gcFileMap::~gcFileMap()
+{
+}
+
+void
+gcFileMap::DebugDump(wxString prefix) const
+{
+    wxLogDebug("%sfile map:",prefix.c_str()); // EWDUMPOK
+    for(const_iterator i=begin(); i != end(); i++)
+    {
+        wxLogDebug("%s%sfor id %d",             // EWDUMPOK
+                   prefix.c_str(),
+                   gcstr::indent.c_str(),
+                   (int)((*i).first));
+        const gcFileInfo & info = (*i).second;
+        info.DebugDump(prefix+gcstr::indent+gcstr::indent);
+    }
+}
+
+//____________________________________________________________________________________
diff --git a/src/convModel/gc_file_info.h b/src/convModel/gc_file_info.h
new file mode 100644
index 0000000..ba25bf4
--- /dev/null
+++ b/src/convModel/gc_file_info.h
@@ -0,0 +1,77 @@
+// $Id: gc_file_info.h,v 1.4 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_FILE_INFO_H
+#define GC_FILE_INFO_H
+
+#include <list>
+#include <map>
+#include <set>
+
+#include "gc_locus.h"
+#include "gc_population.h"
+#include "gc_quantum.h"
+#include "gc_region.h"
+#include "gc_trait.h"
+
+#include "gc_loci_match.h"
+#include "gc_pop_match.h"
+
+#include "wx/string.h"
+
+class gcFileInfo
+{
+  private:
+    bool            m_selected;
+    bool            m_hasParse;
+    size_t          m_parseId;
+    GCPopMatcher    m_popMatch;
+    GCLocusMatcher  m_locMatch;
+    bool            m_hasAdjacentHapAssignment;
+    size_t          m_adjacentHapAssignment;
+    bool            m_hasGenoFile;
+    size_t          m_genoFileId;
+
+  public:
+    gcFileInfo();
+    virtual ~gcFileInfo()               ;
+    bool    GetSelected() const         ;
+    bool    HasParse() const            ;
+    size_t  GetParse() const            ;
+    void    SetSelected(bool selected)  ;
+    void    SetParse(size_t parseId)    ;
+    void    UnsetParse()                ;
+    void    DebugDump(wxString prefix=wxEmptyString) const;
+
+    bool    HasAdjacentHapAssignment()  const ;
+    size_t  GetAdjacentHapAssignment()  const;
+    void    SetAdjacentHapAssignment(size_t numAdj);
+    void    UnsetAdjacentHapAssignment();
+
+    bool    HasGenoFile() const;
+    void    UnsetGenoFile();
+
+    const GCPopMatcher &    GetPopMatcher() const;
+    const GCLocusMatcher &  GetLocMatcher() const;
+    void    SetPopMatcher(const GCPopMatcher & p);
+    void    SetLocMatcher(const GCLocusMatcher & l);
+};
+
+class gcFileMap : public std::map<size_t,gcFileInfo>
+{
+  public:
+    gcFileMap() ;
+    virtual ~gcFileMap() ;
+    void DebugDump(wxString prefix=wxEmptyString) const;
+};
+
+#endif  // GC_FILE_INFO_H
+
+//____________________________________________________________________________________
diff --git a/src/convModel/gc_id_set.cpp b/src/convModel/gc_id_set.cpp
new file mode 100644
index 0000000..1a584d1
--- /dev/null
+++ b/src/convModel/gc_id_set.cpp
@@ -0,0 +1,25 @@
+// $Id: gc_id_set.cpp,v 1.3 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include "gc_id_set.h"
+
+wxString
+GCIdSet::AsString() const
+{
+    wxString listOfIds;
+    for(iterator i=begin(); i != end(); i++)
+    {
+        size_t id = *i;
+        listOfIds += wxString::Format("%d ",(int)id);
+    }
+    return listOfIds;
+}
+
+//____________________________________________________________________________________
diff --git a/src/convModel/gc_id_set.h b/src/convModel/gc_id_set.h
new file mode 100644
index 0000000..5a35b6a
--- /dev/null
+++ b/src/convModel/gc_id_set.h
@@ -0,0 +1,26 @@
+// $Id: gc_id_set.h,v 1.4 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_ID_SET_H
+#define GC_ID_SET_H
+
+#include <set>
+#include "wx/string.h"
+
+class GCIdSet : public std::set<size_t>
+{
+  private:
+  public:
+    wxString    AsString()  const;
+};
+
+#endif  // GC_ID_SET_H
+
+//____________________________________________________________________________________
diff --git a/src/convModel/gc_individual.cpp b/src/convModel/gc_individual.cpp
new file mode 100644
index 0000000..2537e10
--- /dev/null
+++ b/src/convModel/gc_individual.cpp
@@ -0,0 +1,198 @@
+// $Id: gc_individual.cpp,v 1.18 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+
+#include "gc_errhandling.h"
+#include "gc_individual.h"
+#include "gc_individual_err.h"
+#include "gc_locus.h"
+#include "gc_parse_sample.h"
+#include "gc_phase.h"
+#include "gc_region.h"
+#include "gc_sequential_data.h"
+#include "gc_strings.h"
+#include "wx/log.h"
+
+//------------------------------------------------------------------------------------
+
+gcSample::gcSample(wxString label)
+    :
+    m_label(label)
+{
+}
+
+gcSample::~gcSample()
+{
+}
+
+void
+gcSample::AddLocusData(const gcLocus * locusP, const GCSequentialData * dataP)
+{
+    assert(locusP != NULL);
+    std::map<const gcLocus*,const GCSequentialData *>::iterator iter;
+    iter = m_data.find(locusP);
+    if(iter != m_data.end())
+    {
+        throw gc_sample_locus_repeat(m_label,locusP->GetName());
+    }
+    m_data[locusP] = dataP;
+}
+
+const GCSequentialData &
+gcSample::GetData(const gcLocus * locusP) const
+{
+    std::map<const gcLocus*,const GCSequentialData *>::const_iterator iter;
+    iter = m_data.find(locusP);
+    if(iter == m_data.end())
+    {
+
+        throw gc_sample_missing_locus_data(GetLabel(),locusP->GetName());
+    }
+    return *((*iter).second);
+}
+
+wxString
+gcSample::GetLabel() const
+{
+    return m_label;
+}
+
+//------------------------------------------------------------------------------------
+
+GCIndividual::GCIndividual( wxString                name,
+                            const gcRegion &        regionRef)
+    :
+    m_name(name),
+    m_regionRef(regionRef)
+{
+}
+
+GCIndividual::~GCIndividual()
+{
+    for(std::vector<gcSample*>::iterator i=m_samples.begin(); i!=m_samples.end(); i++)
+    {
+        delete *i;
+    }
+}
+
+void
+GCIndividual::AddPhenotype(const gcPhenotype & pheno)
+{
+    m_phenotypeIds.insert(pheno.GetId());
+}
+
+const gcIdSet &
+GCIndividual::GetPhenotypeIds() const
+{
+    return m_phenotypeIds;
+}
+
+void
+GCIndividual::AddPhase(const gcLocus & locusRef, const gcUnphasedMarkers & phaseInfo)
+{
+    std::map<const gcLocus*,gcUnphasedMarkers>::iterator iter = m_phaseInfo.find(&locusRef);
+    if(iter == m_phaseInfo.end())
+    {
+        m_phaseInfo[&locusRef] = phaseInfo;
+    }
+    else
+    {
+        gcUnphasedMarkers & markers = (*iter).second;
+        markers.Merge(phaseInfo);
+    }
+}
+
+void
+GCIndividual::AddSample(wxString label, const gcLocus& locus, const GCSequentialData * data)
+{
+
+    assert(locus.GetRegionId() == m_regionRef.GetId());
+    gcSample * thisSample = GetSample(label);
+    if(thisSample == NULL)
+    {
+        thisSample = new gcSample(label);
+        m_samples.push_back(thisSample);
+    }
+
+    assert(thisSample != NULL);
+    thisSample->AddLocusData(&locus,data);
+}
+
+#if 0  // EWFIX.REMOVE
+const gcTraitSet &
+GCIndividual::GetGenotypeResolutions() const
+{
+    return m_traits;
+}
+#endif
+
+wxString
+GCIndividual::GetName() const
+{
+    return m_name;
+}
+
+const gcRegion &
+GCIndividual::GetRegion() const
+{
+    return m_regionRef;
+}
+
+size_t
+GCIndividual::GetNumSamples() const
+{
+    return m_samples.size();
+}
+
+const gcUnphasedMarkers *
+GCIndividual::GetUnphased(const gcLocus & locusRef) const
+{
+    std::map<const gcLocus*,gcUnphasedMarkers>::const_iterator iter = m_phaseInfo.find(&locusRef);
+
+    if(iter == m_phaseInfo.end())
+    {
+        return NULL;
+    }
+    return &((*iter).second);
+}
+
+gcSample *
+GCIndividual::GetSample(wxString label)
+{
+    for(std::vector<gcSample*>::iterator i = m_samples.begin(); i != m_samples.end(); i++)
+    {
+        gcSample * sample = *i;
+        if(sample->GetLabel() == label)
+        {
+            return sample;
+        }
+    }
+    return NULL;
+}
+
+const gcSample *
+GCIndividual::GetSample(size_t index) const
+{
+    assert(index < m_samples.size());
+    return m_samples[index];
+}
+
+void
+GCIndividual::DebugDump(wxString prefix) const
+{
+    wxLogDebug("%sIndividual %s of region %s with %d samples",  // EWDUMPOK
+               prefix.c_str(),
+               m_name.c_str(),
+               m_regionRef.GetName().c_str(),
+               (int)(m_samples.size()));
+}
+
+//____________________________________________________________________________________
diff --git a/src/convModel/gc_individual.h b/src/convModel/gc_individual.h
new file mode 100644
index 0000000..aff8b38
--- /dev/null
+++ b/src/convModel/gc_individual.h
@@ -0,0 +1,80 @@
+// $Id: gc_individual.h,v 1.13 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_INDIVIDUAL_H
+#define GC_INDIVIDUAL_H
+
+#include <map>
+#include <vector>
+
+#include "gc_phase.h"
+#include "gc_phenotype.h"
+#include "gc_set_util.h"
+#include "wx/string.h"
+
+class gcLocus;
+class gcPhenotype;
+class gcRegion;
+class GCSequentialData;
+
+class gcSample
+{
+  private:
+    wxString                                            m_label;
+    std::map<const gcLocus*, const GCSequentialData *>  m_data;
+
+  protected:
+
+  public:
+    gcSample(wxString label);
+    virtual ~gcSample();
+
+    void        AddLocusData(const gcLocus *, const GCSequentialData *);
+
+    const GCSequentialData &    GetData(const gcLocus *)    const;
+    wxString                    GetLabel()                  const;
+};
+
+class GCIndividual
+{
+  private:
+    wxString                                            m_name;
+    const gcRegion &                                    m_regionRef;
+    std::vector<gcSample*>                              m_samples;
+    std::map<const gcLocus*, gcUnphasedMarkers>         m_phaseInfo;
+
+    gcIdSet                                             m_phenotypeIds;
+
+    GCIndividual();                 // undefined
+
+  protected:
+    gcSample *                      GetSample(wxString label);
+
+  public:
+    GCIndividual(wxString name, const gcRegion &);
+    ~GCIndividual();
+
+    void    AddPhase(const gcLocus &, const gcUnphasedMarkers &);
+    void    AddPhenotype(const gcPhenotype&);
+    void    AddSample(wxString label, const gcLocus&,const GCSequentialData *);
+
+    const gcIdSet &             GetPhenotypeIds()               const;
+    wxString                    GetName()                       const;
+    size_t                      GetNumSamples()                 const;
+    const gcRegion &            GetRegion()                     const;
+    const gcSample *            GetSample(size_t hapIndex)      const;
+    const gcUnphasedMarkers  *  GetUnphased(const gcLocus &)    const;
+
+    void    DebugDump(wxString prefix=wxEmptyString) const;
+};
+
+#endif  // GC_INDIVIDUAL_H
+
+//____________________________________________________________________________________
diff --git a/src/convModel/gc_mapfile.cpp b/src/convModel/gc_mapfile.cpp
new file mode 100644
index 0000000..f16ec03
--- /dev/null
+++ b/src/convModel/gc_mapfile.cpp
@@ -0,0 +1,123 @@
+// $Id: gc_mapfile.cpp,v 1.11 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#if 0  // Potentially DEAD CODE (bobgian, Feb 2010)
+
+#include "gc_data.h"
+#include "gc_region.h"
+#include "gc_mapfile.h"
+#include "gc_locus.h"
+#include "tixml_util.h"
+#include "xml.h"
+#include "xml_strings.h"
+
+#include "wx/filename.h"
+#include "wx/log.h"
+
+size_t GCMapFile::s_mapFileCount = 0;
+
+GCMapFile::GCMapFile(wxString mapFileName)
+    :
+    m_mapFileId(s_mapFileCount++),
+    m_mapFileName(mapFileName),
+    m_xmlParserPointer( new XmlParser())
+
+{
+    m_xmlParserPointer->ParseFileData(std::string(mapFileName.c_str()));
+
+    TiXmlElement * topElem = m_xmlParserPointer->GetRootElement();
+
+    vector<TiXmlElement*> regionVec = ti_optionalChildren(topElem,xmlstr::XML_TAG_REGION);
+    for(size_t i=0; i < regionVec.size(); i++)
+    {
+        TiXmlElement * regionElem = regionVec[i];
+
+        size_t numHaps = ti_size_t_from_text(
+            ti_optionalChild(regionElem,xmlstr::XML_TAG_HAP_COUNT));
+
+        gcRegion & linkageGroup = AddLinkageGroup(numHaps);
+
+        TiXmlElement * spacingElem
+            = ti_optionalChild(regionElem,xmlstr::XML_TAG_SPACING);
+        vector<TiXmlElement*> blockVec
+            = ti_optionalChildren(spacingElem,xmlstr::XML_TAG_BLOCK);
+        for(size_t j=0; j < blockVec.size(); j++)
+        {
+            TiXmlElement * blockElem = blockVec[j];
+
+            wxString dataTypeString = ti_attributeValue(blockElem,xmlstr::XML_ATTRTYPE_TYPE);
+            GCDataType dataType = ProduceGCDataTypeOrBarf(dataTypeString);
+            size_t numSites = ti_size_t_from_text(
+                ti_optionalChild(blockElem,xmlstr::XML_TAG_NUM_SITES));
+            long offset = ti_long_from_text(
+                ti_optionalChild(blockElem,xmlstr::XML_TAG_OFFSET));
+            long mapPosition = ti_long_from_text(
+                ti_optionalChild(blockElem,xmlstr::XML_TAG_MAP_POSITION));
+            size_t length = ti_size_t_from_text(
+                ti_optionalChild(blockElem,xmlstr::XML_TAG_LENGTH));
+            wxString locationsString = ti_nodeText(
+                ti_optionalChild(blockElem,xmlstr::XML_TAG_LOCATIONS));
+
+            gcLocus & locus
+                = linkageGroup.AddLocus(dataType,numSites);
+            locus.SetOffset(offset);
+            locus.SetMapPosition(mapPosition);
+            locus.SetLength(length);
+            locus.SetLocations(locationsString);
+        }
+    }
+}
+
+GCMapFile::~GCMapFile()
+{
+}
+
+gcRegion &
+GCMapFile::AddLinkageGroup(size_t numHaps)
+{
+    gcRegion * group = new gcRegion(*this,numHaps);
+    m_linkageGroups.insert(group);
+    return *group;
+}
+
+size_t
+GCMapFile::GetId() const
+{
+    return m_mapFileId;
+}
+
+wxString
+GCMapFile::GetShortName() const
+{
+    wxFileName fname(GetName());
+    return fname.GetFullName(); // Full name == file name + extension
+}
+
+void
+GCMapFile::DebugDump(wxString prefix) const
+{
+    wxLogDebug("%sMap File: %s",prefix.c_str(),m_mapFileName.c_str());  // EWDUMPOK
+    for(linkageGroupSet::const_iterator i=m_linkageGroups.begin(); i != m_linkageGroups.end(); i++)
+    {
+        const gcRegion & group = **i;
+        group.DebugDump(prefix+gcstr::indent);
+    }
+    wxLogDebug("%s*********",prefix.c_str());   // EWDUMPOK
+}
+
+long
+GCMapFile::ExtractLongValue(TiXmlElement * ancestor, std::string tag, bool required)
+{
+    return -1;
+}
+
+#endif
+
+//____________________________________________________________________________________
diff --git a/src/convModel/gc_mapfile.h b/src/convModel/gc_mapfile.h
new file mode 100644
index 0000000..211362b
--- /dev/null
+++ b/src/convModel/gc_mapfile.h
@@ -0,0 +1,66 @@
+// $Id: gc_mapfile.h,v 1.11 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_MAPFILE_H
+#define GC_MAPFILE_H
+
+#if 0  // Potentially DEAD CODE (bobgian, Feb 2010)
+
+#include "wx/string.h"
+
+class GCLocus;
+class GCLinkageGroup;
+class GCSequentialLocus;
+class TiXmlElement;
+class XmlParser;
+
+class GCMapFile
+{
+  private:
+    static size_t       s_mapFileCount;
+
+    const size_t        m_mapFileId;
+    wxString            m_mapFileName;
+
+    XmlParser *         m_xmlParserPointer;
+
+    linkageGroupSet     m_linkageGroups;
+
+    GCMapFile();        // undefined
+
+  protected:
+    GCLinkageGroup &    AddLinkageGroup(size_t numHaps);
+
+    //long                GetOffsetFromTiXml(TiXmlElement * blockElement);
+    //size_t              GetNumSitesFromTiXml(TiXmlElement * blockElement);
+    //wxString            GetLocationsFromTiXml(TiXmlElement * blockElement);
+    //GCDataType          GetDataTypeFromTiXml(TiXmlElement * blockElement);
+    //long                GetMapPositionFromTiXml(TiXmlElement * blockElement);
+
+    GCSequentialLocus & BuildLocus(TiXmlElement * blockElement);
+
+  public:
+    GCMapFile(wxString mapFileName);
+    ~GCMapFile();
+
+    size_t      GetId() const;
+    wxString    GetShortName() const;
+    wxString    GetName() const    {return m_mapFileName;};
+
+    void        DebugDump(wxString prefix=wxEmptyString) const;
+
+    static long     ExtractLongValue(TiXmlElement * element, std::string tag, bool required);
+};
+
+#endif
+
+#endif  // GC_MAPFILE_H
+
+//____________________________________________________________________________________
diff --git a/src/convModel/gc_migration.cpp b/src/convModel/gc_migration.cpp
new file mode 100644
index 0000000..0a66ad3
--- /dev/null
+++ b/src/convModel/gc_migration.cpp
@@ -0,0 +1,252 @@
+// $Id: gc_migration.cpp,v 1.4 2012/06/30 01:32:40 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+
+#include "gc_creation_info.h"           // JRM this doing anything?
+#include "gc_errhandling.h"
+#include "gc_data.h"                    // for ToWxString
+#include "gc_default.h"
+#include "gc_migration.h"
+#include "gc_strings.h"
+#include "gc_strings_mig.h"
+#include "gc_structures_err.h"
+#include "wx/log.h"
+#include "wx/string.h"
+#include "wx/tokenzr.h"
+
+//------------------------------------------------------------------------------------
+
+gcMigration::gcMigration()
+    :
+    m_blessed(false),
+    m_hasFrom(false),
+    m_fromId(gcdefault::badIndex),
+    m_hasTo(false),
+    m_toId(gcdefault::badIndex),
+    m_startValue(50.0),
+    m_method(migmethod_USER),
+    m_profile(migprofile_NONE),
+    m_constraint(migconstraint_UNCONSTRAINED)
+{
+    SetName(wxString::Format(gcstr_mig::internalName,(long)GetId()));
+}
+
+gcMigration::~gcMigration()
+{
+}
+
+void
+gcMigration::SetBlessed(bool blessed)
+{
+    m_blessed = blessed;
+}
+
+bool
+gcMigration::GetBlessed() const
+{
+    return m_blessed;
+}
+
+void
+gcMigration::DebugDump(wxString prefix) const
+{
+#if 0
+    wxLogDebug("%spanel %s (panel id %ld)", // JMDUMPOK
+               prefix.c_str(),
+               GetName().c_str(),
+               (long)GetId());
+    wxLogDebug("%sregion %s",(prefix+gcstr::indent).c_str(),ToWxString(GetFromID()).c_str());  // JMDUMPOK
+    wxLogDebug("%s%s population",(prefix+gcstr::indent).c_str(),GetToulation().c_str());  // JMDUMPOK
+    wxLogDebug("%s%s number of panels",(prefix+gcstr::indent).c_str(),GetStartValue.c_str());   // JMDUMPOK
+#endif
+
+    wxLogVerbose("%smatrix %s (matrix id %ld)", // JMDUMPOK
+                 prefix.c_str(),
+                 GetName().c_str(),
+                 (long)GetId());
+    wxLogVerbose("%sFrom %s",(prefix+gcstr::indent).c_str(),GetFromIdString().c_str());  // JMDUMPOK
+    wxLogVerbose("%sTo %s",(prefix+gcstr::indent).c_str(),GetToIdString().c_str());  // JMDUMPOK
+    wxLogVerbose("%s%s start value",(prefix+gcstr::indent).c_str(),GetStartValueString().c_str());   // JMDUMPOK
+    wxLogVerbose("%s%s migration method",(prefix+gcstr::indent).c_str(),GetMethodString().c_str());   // JMDUMPOK
+    wxLogVerbose("%s%s migration profile",(prefix+gcstr::indent).c_str(),GetProfileAsString().c_str());   // JMDUMPOK
+    wxLogVerbose("%s%s migration constraint",(prefix+gcstr::indent).c_str(),GetConstraintString().c_str());   // JMDUMPOK
+}
+
+void
+gcMigration::SetFromId(size_t id)
+{
+    m_fromId  = id;
+    m_hasFrom = true;
+}
+
+bool
+gcMigration::HasFrom() const
+{
+    return m_hasFrom;
+}
+
+size_t
+gcMigration::GetFromId() const
+{
+    if(!HasFrom())
+    {
+        wxString msg = wxString::Format(gcerr::unsetFromId,GetName().c_str());
+        throw gc_implementation_error(msg.c_str());
+    }
+    return m_fromId;
+}
+
+wxString
+gcMigration::GetFromIdString() const
+{
+    if(HasFrom())
+    {
+        return wxString::Format("%d",(int)GetFromId());
+    }
+    return gcstr::unknown;
+}
+
+void
+gcMigration::SetToId(size_t id)
+{
+    m_toId  = id;
+    m_hasTo = true;
+}
+
+bool
+gcMigration::HasTo() const
+{
+    return m_hasTo;
+}
+
+size_t
+gcMigration::GetToId() const
+{
+    if(!HasTo())
+    {
+        wxString msg = wxString::Format(gcerr::unsetToId,GetName().c_str());
+        throw gc_implementation_error(msg.c_str());
+    }
+    return m_toId;
+}
+
+wxString
+gcMigration::GetToIdString() const
+{
+    if(HasTo())
+    {
+        return wxString::Format("%d",(int)GetToId());
+    }
+    return gcstr::unknown;
+}
+
+void
+gcMigration::SetStartValue(double val)
+{
+    m_startValue = val;
+}
+
+double
+gcMigration::GetStartValue() const
+{
+    return m_startValue;
+}
+
+wxString
+gcMigration::GetStartValueString() const
+{
+    return wxString::Format("%f",GetStartValue());
+}
+
+void
+gcMigration::SetMethod(migration_method val)
+{
+    m_method = val;
+}
+
+migration_method
+gcMigration::GetMethod() const
+{
+    return m_method;
+}
+
+wxString
+gcMigration::GetMethodString() const
+{
+    switch (m_method)
+    {
+        case migmethod_USER:
+            return gcstr_mig::migmethodUser;
+        case migmethod_FST:
+            return gcstr_mig::migmethodFST;
+        default:
+            return wxString::Format("Unknown Migration Method");
+    }
+}
+
+void
+gcMigration::SetProfile(migration_profile val)
+{
+    m_profile = val;
+}
+
+migration_profile
+gcMigration::GetProfile() const
+{
+    return m_profile;
+}
+
+wxString
+gcMigration::GetProfileAsString() const
+{
+    switch (m_profile)
+    {
+        case migprofile_NONE:
+            return gcstr_mig::migprofileNone;
+        case migprofile_FIXED:
+            return gcstr_mig::migprofileFixed;
+        case migprofile_PERCENTILE:
+            return gcstr_mig::migprofilePercentile;
+        default:
+            return wxString::Format("Unknown Migration Profile");
+    }
+}
+
+void
+gcMigration::SetConstraint(migration_constraint val)
+{
+    m_constraint = val;
+}
+
+migration_constraint
+gcMigration::GetConstraint() const
+{
+    return m_constraint;
+}
+
+wxString
+gcMigration::GetConstraintString() const
+{
+    switch (m_constraint)
+    {
+        case migconstraint_INVALID:
+            return gcstr_mig::migconstraintInvalid;
+        case migconstraint_CONSTANT:
+            return gcstr_mig::migconstraintConstant;
+        case migconstraint_SYMMETRIC:
+            return gcstr_mig::migconstraintSymmetric;
+        case migconstraint_UNCONSTRAINED:
+            return gcstr_mig::migconstraintUnconstained;
+        default:
+            return wxString::Format("Unknown Migration Constraint");
+    }
+}
+//____________________________________________________________________________________
diff --git a/src/convModel/gc_migration.h b/src/convModel/gc_migration.h
new file mode 100644
index 0000000..c082f9e
--- /dev/null
+++ b/src/convModel/gc_migration.h
@@ -0,0 +1,85 @@
+// $Id: gc_migration.h,v 1.4 2012/06/30 01:32:40 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_MIGRATION_H
+#define GC_MIGRATION_H
+
+#include "wx/string.h"
+#include "gc_creation_info.h"
+#include "gc_phase.h"
+#include "gc_quantum.h"
+#include "gc_types.h"
+
+class GCStructures;
+
+class gcMigration : public GCQuantum
+{
+    friend class GCStructures;
+
+  private:
+    bool                        m_blessed;  // true if user has edited m_nPanels
+
+    bool                        m_hasFrom;
+    size_t                      m_fromId;
+
+    bool                        m_hasTo;
+    size_t                      m_toId;
+
+    double                      m_startValue;
+    migration_method            m_method;
+    migration_profile           m_profile;
+    migration_constraint        m_constraint;
+
+    gcCreationInfo              m_creationInfo;  //JRM this doing anything?
+
+    void   SetCreationInfo(const gcCreationInfo &);
+
+  public:
+
+    gcMigration();
+    ~gcMigration();
+
+    void        SetBlessed(bool blessed);
+    bool        GetBlessed()            const;
+
+    bool        HasFrom()             const;
+    void        SetFromId(size_t id);
+    size_t      GetFromId()           const;
+    wxString    GetFromIdString()     const;
+
+    bool        HasTo()                const;
+    void        SetToId(size_t id);
+    size_t      GetToId()              const;
+    wxString    GetToIdString()        const;
+
+    void        SetStartValue(double val);
+    double      GetStartValue()          const;
+    wxString    GetStartValueString()    const;
+
+    void                 SetMethod(migration_method val);
+    migration_method     GetMethod()          const;
+    wxString             GetMethodString()    const;
+
+    void                 SetProfile(migration_profile val);
+    migration_profile    GetProfile()          const;
+    wxString             GetProfileAsString()    const;
+
+    void                 SetConstraint(migration_constraint val);
+    migration_constraint GetConstraint()          const;
+    wxString             GetConstraintString()    const;
+
+
+    void        DebugDump(wxString prefix=wxEmptyString) const;
+
+};
+
+#endif  // GC_MIGRATION_H
+
+//____________________________________________________________________________________
diff --git a/src/convModel/gc_panel.cpp b/src/convModel/gc_panel.cpp
new file mode 100644
index 0000000..19fb6d9
--- /dev/null
+++ b/src/convModel/gc_panel.cpp
@@ -0,0 +1,174 @@
+// $Id: gc_panel.cpp,v 1.2 2012/06/30 01:32:40 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+
+#include "gc_creation_info.h"           // JRM this doing anything?
+#include "gc_errhandling.h"
+#include "gc_data.h"                    // for ToWxString
+#include "gc_default.h"
+#include "gc_panel.h"
+#include "gc_strings.h"
+#include "gc_structures_err.h"
+#include "wx/log.h"
+#include "wx/string.h"
+#include "wx/tokenzr.h"
+
+//------------------------------------------------------------------------------------
+
+gcPanel::gcPanel()
+    :
+    m_blessed(false),
+    m_hasRegion(false),
+    m_regionId(gcdefault::badIndex),
+    m_hasPop(false),
+    m_popId(gcdefault::badIndex),
+    m_nPanels(0)
+{
+}
+
+gcPanel::~gcPanel()
+{
+}
+
+void
+gcPanel::SetBlessed(bool blessed)
+{
+    m_blessed = blessed;
+}
+
+bool
+gcPanel::GetBlessed() const
+{
+    return m_blessed;
+}
+
+void
+gcPanel::DebugDump(wxString prefix) const
+{
+#if 0
+    wxLogDebug("%spanel %s (panel id %ld)", // JMDUMPOK
+               prefix.c_str(),
+               GetName().c_str(),
+               (long)GetId());
+    wxLogDebug("%sregion %s",(prefix+gcstr::indent).c_str(),ToWxString(GetRegionID()).c_str());  // JMDUMPOK
+    wxLogDebug("%s%s population",(prefix+gcstr::indent).c_str(),GetPopulation().c_str());  // JMDUMPOK
+    wxLogDebug("%s%s number of panels",(prefix+gcstr::indent).c_str(),GetNumPanels.c_str());   // JMDUMPOK
+#endif
+
+    wxLogVerbose("%spanel %s (panel id %ld)", // JMDUMPOK
+                 prefix.c_str(),
+                 GetName().c_str(),
+                 (long)GetId());
+    wxLogVerbose("%sregion %s",(prefix+gcstr::indent).c_str(),GetRegionIdString().c_str());  // JMDUMPOK
+    wxLogVerbose("%s%s population",(prefix+gcstr::indent).c_str(),GetPopIdString().c_str());  // JMDUMPOK
+    wxLogVerbose("%s%s number of panels",(prefix+gcstr::indent).c_str(),GetNumPanelsString().c_str());   // JMDUMPOK
+}
+
+void
+gcPanel::SetRegionId(size_t id)
+{
+    m_regionId  = id;
+    m_hasRegion = true;
+}
+
+void
+gcPanel::UnsetRegionId()
+{
+    m_regionId  = gcdefault::badIndex;
+    m_hasRegion = false;
+}
+
+bool
+gcPanel::HasRegion() const
+{
+    return m_hasRegion;
+}
+
+size_t
+gcPanel::GetRegionId() const
+{
+    if(!HasRegion())
+    {
+        wxString msg = wxString::Format(gcerr::unsetRegionId,GetName().c_str());
+        throw gc_implementation_error(msg.c_str());
+    }
+    return m_regionId;
+}
+
+wxString
+gcPanel::GetRegionIdString() const
+{
+    if(HasRegion())
+    {
+        return wxString::Format("%d",(int)GetRegionId());
+    }
+    return gcstr::unknown;
+}
+
+void
+gcPanel::SetPopId(size_t id)
+{
+    m_popId  = id;
+    m_hasPop = true;
+}
+
+void
+gcPanel::UnsetPopId()
+{
+    m_popId  = gcdefault::badIndex;
+    m_hasPop = false;
+}
+
+bool
+gcPanel::HasPop() const
+{
+    return m_hasPop;
+}
+
+size_t
+gcPanel::GetPopId() const
+{
+    if(!HasPop())
+    {
+        wxString msg = wxString::Format(gcerr::unsetPopId,GetName().c_str());
+        throw gc_implementation_error(msg.c_str());
+    }
+    return m_popId;
+}
+
+wxString
+gcPanel::GetPopIdString() const
+{
+    if(HasPop())
+    {
+        return wxString::Format("%d",(int)GetPopId());
+    }
+    return gcstr::unknown;
+}
+
+void
+gcPanel::SetNumPanels(long val)
+{
+    m_nPanels = val;
+}
+
+long
+gcPanel::GetNumPanels() const
+{
+    return m_nPanels;
+}
+
+wxString
+gcPanel::GetNumPanelsString() const
+{
+    return wxString::Format("%d",(int)GetNumPanels());
+}
+//____________________________________________________________________________________
diff --git a/src/convModel/gc_panel.h b/src/convModel/gc_panel.h
new file mode 100644
index 0000000..79b68a7
--- /dev/null
+++ b/src/convModel/gc_panel.h
@@ -0,0 +1,72 @@
+// $Id: gc_panel.h,v 1.2 2012/06/30 01:32:40 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_PANEL_H
+#define GC_PANEL_H
+
+#include "wx/string.h"
+#include "gc_creation_info.h"
+#include "gc_phase.h"
+#include "gc_quantum.h"
+#include "gc_types.h"
+
+class GCStructures;
+
+class gcPanel : public GCQuantum
+{
+    friend class GCStructures;
+
+  private:
+    bool                        m_blessed;  // true if user has edited m_nPanels
+
+    bool                        m_hasRegion;
+    size_t                      m_regionId;
+
+    bool                        m_hasPop;
+    size_t                      m_popId;
+
+    long                        m_nPanels;
+
+    gcCreationInfo              m_creationInfo;  //JRM this doing anything?
+
+    void   SetCreationInfo(const gcCreationInfo &);
+
+  public:
+
+    gcPanel();
+    ~gcPanel();
+
+    void        SetBlessed(bool blessed);
+    bool        GetBlessed()            const;
+
+    bool        HasRegion()             const;
+    void        SetRegionId(size_t id);
+    void        UnsetRegionId();
+    size_t      GetRegionId()           const;
+    wxString    GetRegionIdString()     const;
+
+    bool        HasPop()                const;
+    void        SetPopId(size_t id);
+    void        UnsetPopId();
+    size_t      GetPopId()              const;
+    wxString    GetPopIdString()        const;
+
+    void        SetNumPanels(long val);
+    long        GetNumPanels()          const;
+    wxString    GetNumPanelsString()    const;
+
+
+    void        DebugDump(wxString prefix=wxEmptyString) const;
+
+};
+
+#endif  // GC_PANEL_H
+
+//____________________________________________________________________________________
diff --git a/src/convModel/gc_parent.cpp b/src/convModel/gc_parent.cpp
new file mode 100644
index 0000000..3a37e02
--- /dev/null
+++ b/src/convModel/gc_parent.cpp
@@ -0,0 +1,268 @@
+// $Id: gc_parent.cpp,v 1.3 2012/06/30 01:32:40 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+
+#include "gc_creation_info.h"           // JRM this doing anything?
+#include "gc_errhandling.h"
+#include "gc_data.h"                    // for ToWxString
+#include "gc_default.h"
+#include "gc_parent.h"
+#include "gc_strings.h"
+#include "gc_structures_err.h"
+#include "wx/log.h"
+#include "wx/string.h"
+#include "wx/tokenzr.h"
+
+//------------------------------------------------------------------------------------
+
+gcParent::gcParent()
+    :
+    m_blessed(false),
+    m_level(0),
+    m_index(0),
+    m_length(0),
+    m_hasChild1(false),
+    m_child1Id(gcdefault::badIndex),
+    m_hasChild2(false),
+    m_child2Id(gcdefault::badIndex),
+    m_hasParent(false),
+    m_parentId(gcdefault::badIndex)
+{
+}
+
+gcParent::~gcParent()
+{
+}
+
+void
+gcParent::SetBlessed(bool blessed)
+{
+    m_blessed = blessed;
+}
+
+bool
+gcParent::GetBlessed() const
+{
+    return m_blessed;
+}
+
+void
+gcParent::SetDispLevel(int level)
+{
+    m_level = level;
+}
+
+int
+gcParent::GetDispLevel() const
+{
+    return m_level;
+}
+
+void
+gcParent::SetDispIndex(int index)
+{
+    m_index = index;
+}
+
+int
+gcParent::GetDispIndex() const
+{
+    return m_index;
+}
+
+void
+gcParent::SetDispLength(int length)
+{
+    m_length = length;
+}
+
+int
+gcParent::GetDispLength() const
+{
+    return m_length;
+}
+
+void
+gcParent::DebugDump(wxString prefix) const
+{
+    wxLogVerbose("%sparent %s (panel id %ld)", // JMDUMPOK
+                 prefix.c_str(),
+                 GetName().c_str(),
+                 (long)GetId());
+    wxLogVerbose("%sparent %s",(prefix+gcstr::indent).c_str(),GetParentIdString().c_str());  // JMDUMPOK
+    wxLogVerbose("%schild1 %s",(prefix+gcstr::indent).c_str(),GetChild1IdString().c_str());  // JMDUMPOK
+    wxLogVerbose("%schild2 %s",(prefix+gcstr::indent).c_str(),GetChild2IdString().c_str());   // JMDUMPOK
+}
+
+void
+gcParent::SetChild1Id(size_t id)
+{
+    m_child1Id  = id;
+    m_hasChild1 = true;
+}
+
+void
+gcParent::ClearChild1Id()
+{
+    m_child1Id  = gcdefault::badIndex;
+    m_hasChild1 = false;
+}
+
+bool
+gcParent::HasChild1() const
+{
+    return m_hasChild1;
+}
+
+size_t
+gcParent::GetChild1Id() const
+{
+    if(!HasChild1())
+    {
+        wxString msg = wxString::Format(gcerr::unsetChild1Id,GetName().c_str());
+        throw gc_implementation_error(msg.c_str());
+    }
+    return m_child1Id;
+}
+
+size_t
+gcParent::GetChild1Id()
+{
+    if(!HasChild1())
+    {
+        wxString msg = wxString::Format(gcerr::unsetChild1Id,GetName().c_str());
+        throw gc_implementation_error(msg.c_str());
+    }
+    return m_child1Id;
+}
+
+wxString
+gcParent::GetChild1IdString() const
+{
+    if(HasChild1())
+    {
+        return wxString::Format("%d",(int)GetChild1Id());
+    }
+    return gcstr::unknown;
+}
+
+void
+gcParent::SetChild2Id(size_t id)
+{
+    m_child2Id  = id;
+    m_hasChild2 = true;
+}
+
+void
+gcParent::ClearChild2Id()
+{
+    m_child2Id  = gcdefault::badIndex;
+    m_hasChild2 = false;
+}
+
+bool
+gcParent::HasChild2() const
+{
+    return m_hasChild2;
+}
+
+size_t
+gcParent::GetChild2Id() const
+{
+    if(!HasChild2())
+    {
+        wxString msg = wxString::Format(gcerr::unsetChild2Id,GetName().c_str());
+        throw gc_implementation_error(msg.c_str());
+    }
+    return m_child2Id;
+}
+
+size_t
+gcParent::GetChild2Id()
+{
+    if(!HasChild2())
+    {
+        wxString msg = wxString::Format(gcerr::unsetChild2Id,GetName().c_str());
+        throw gc_implementation_error(msg.c_str());
+    }
+    return m_child2Id;
+}
+
+wxString
+gcParent::GetChild2IdString() const
+{
+    if(HasChild2())
+    {
+        return wxString::Format("%d",(int)GetChild2Id());
+    }
+    return gcstr::unknown;
+}
+
+void
+gcParent::SetParentId(size_t id)
+{
+    m_parentId  = id;
+    m_hasParent = true;
+}
+
+void
+gcParent::ClearParentId()
+{
+    m_parentId  = gcdefault::badIndex;
+    m_hasParent = false;
+}
+
+bool
+gcParent::HasParent() const
+{
+    return m_hasParent;
+}
+
+size_t
+gcParent::GetParentId() const
+{
+#if 0
+    // shut off for debugging  JRMHACK
+    if(!HasParent())
+    {
+        wxString msg = wxString::Format(gcerr::unsetParentId,GetName().c_str());
+        throw gc_implementation_error(msg.c_str());
+    }
+#endif
+
+    return m_parentId;
+}
+
+size_t
+gcParent::GetParentId()
+{
+#if 0
+    // shut off for debugging  JRMHACK
+    if(!HasParent())
+    {
+        wxString msg = wxString::Format(gcerr::unsetParentId,GetName().c_str());
+        throw gc_implementation_error(msg.c_str());
+    }
+#endif
+
+    return m_parentId;
+}
+
+wxString
+gcParent::GetParentIdString() const
+{
+    if(HasParent())
+    {
+        return wxString::Format("%d",(int)GetParentId());
+    }
+    return gcstr::unknown;
+}
+//____________________________________________________________________________________
diff --git a/src/convModel/gc_parent.h b/src/convModel/gc_parent.h
new file mode 100644
index 0000000..abac7f4
--- /dev/null
+++ b/src/convModel/gc_parent.h
@@ -0,0 +1,84 @@
+// $Id: gc_parent.h,v 1.3 2012/06/30 01:32:40 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_PARENT_H
+#define GC_PARENT_H
+
+#include "wx/string.h"
+#include "gc_creation_info.h"
+#include "gc_phase.h"
+#include "gc_quantum.h"
+#include "gc_types.h"
+
+class GCStructures;
+
+class gcParent : public GCQuantum
+{
+    friend class GCStructures;
+
+  private:
+    bool                        m_blessed;
+    int                         m_level;      // for lam_conv display layout
+    int                         m_index;      // for lam_conv display layout
+    int                         m_length;     // for lam_conv display layout
+    bool                        m_hasChild1;
+    size_t                      m_child1Id;
+    bool                        m_hasChild2;
+    size_t                      m_child2Id;
+    bool                        m_hasParent;
+    size_t                      m_parentId;
+
+    gcCreationInfo              m_creationInfo;  //JRM this doing anything?
+
+    void   SetCreationInfo(const gcCreationInfo &);
+
+  public:
+
+    gcParent();
+    ~gcParent();
+
+    void        SetBlessed(bool blessed);
+    bool        GetBlessed()            const;
+
+    int         GetDispLevel()          const;
+    void        SetDispLevel(int level);
+    int         GetDispIndex()          const;
+    void        SetDispIndex(int index);
+    int         GetDispLength()         const;
+    void        SetDispLength(int length);
+
+    void        SetChild1Id(size_t id);
+    void        ClearChild1Id();
+    bool        HasChild1()             const;
+    size_t      GetChild1Id()           const;
+    size_t      GetChild1Id();
+    wxString    GetChild1IdString()     const;
+
+    void        SetChild2Id(size_t id);
+    void        ClearChild2Id();
+    bool        HasChild2()             const;
+    size_t      GetChild2Id()           const;
+    size_t      GetChild2Id();
+    wxString    GetChild2IdString()     const;
+
+    void        SetParentId(size_t id);
+    void        ClearParentId();
+    bool        HasParent()             const;
+    size_t      GetParentId()           const;
+    size_t      GetParentId();
+    wxString    GetParentIdString()     const;
+
+    void        DebugDump(wxString prefix=wxEmptyString) const;
+
+};
+
+#endif  // GC_PARENT_H
+
+//____________________________________________________________________________________
diff --git a/src/convModel/gc_phase.cpp b/src/convModel/gc_phase.cpp
new file mode 100644
index 0000000..64374d8
--- /dev/null
+++ b/src/convModel/gc_phase.cpp
@@ -0,0 +1,158 @@
+// $Id: gc_phase.cpp,v 1.9 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+
+#include "gc_locus.h"
+#include "gc_phase.h"
+#include "gc_phase_err.h"
+#include "wx/log.h"
+#include "wx/tokenzr.h"
+
+//------------------------------------------------------------------------------------
+
+gcUnphasedMarkers::gcUnphasedMarkers()
+{
+}
+
+gcUnphasedMarkers::~gcUnphasedMarkers()
+{
+}
+
+wxString
+gcUnphasedMarkers::AsString() const
+// used for DebugDump and for outputting XML file
+{
+    wxString retVal;
+    for(std::set<long>::const_iterator iter = begin(); iter != end(); iter++)
+    {
+        if(iter != begin()) retVal += " ";
+        retVal += wxString::Format("%d",(int)(*iter));
+    }
+    return retVal;
+}
+
+size_t
+gcUnphasedMarkers::NumMarkers() const
+{
+    return std::set<long>::size();
+}
+
+void
+gcUnphasedMarkers::AddMarker(long marker)
+{
+    insert(marker);
+}
+
+void
+gcUnphasedMarkers::Merge(const gcUnphasedMarkers & unphased)
+{
+    for(std::set<long>::const_iterator i = unphased.begin(); i != unphased.end(); i++)
+    {
+        AddMarker(*i);
+    }
+}
+
+void
+gcUnphasedMarkers::DebugDump(wxString prefix) const
+{
+    wxLogDebug("%sunphased markers: %s",      // EWDUMPOK
+               prefix.c_str(),
+               AsString().c_str());
+}
+
+void
+gcUnphasedMarkers::ReadFromString(wxString line)
+{
+    wxStringTokenizer tokenizer(line);
+    while(tokenizer.HasMoreTokens())
+    {
+        wxString longToken = tokenizer.GetNextToken();
+        long longVal;
+        if(! longToken.ToLong(&longVal))
+        {
+            throw gc_phase_marker_not_legal(longToken);
+        }
+        AddMarker(longVal);
+    }
+}
+
+bool
+gcUnphasedMarkers::operator!=(const gcUnphasedMarkers& markers) const
+{
+    return (!( *this == markers) );
+}
+
+bool
+gcUnphasedMarkers::HasZero() const
+{
+    if(find(0) != end()) return true;
+    return false;
+}
+
+long
+gcUnphasedMarkers::Smallest() const
+{
+    assert(!empty());
+    return *(begin());
+}
+
+long
+gcUnphasedMarkers::Largest() const
+{
+    assert(!empty());
+    return *(end());
+}
+
+void
+gcUnphasedMarkers::ShiftNegsUp()
+{
+    gcUnphasedMarkers tmpStorage;
+    for(iterator i = begin(); i != end(); )
+    {
+        assert(*i != 0);
+        if(*i < 0)
+        {
+            tmpStorage.insert((*i)+1);
+            erase(i++);
+        }
+        else
+        {
+            i++;
+        }
+    }
+    for(gcUnphasedMarkers::iterator i=tmpStorage.begin(); i != tmpStorage.end(); i++)
+    {
+        insert(*i);
+    }
+}
+
+void
+gcUnphasedMarkers::CheckAgainstLocations(const gcLocus& locusRef) const
+{
+    if(locusRef.HasLocations())
+    {
+        std::vector<long> locs = locusRef.GetLocations();
+        std::set<long> locSet;
+        for(size_t i=0; i < locs.size(); i++)
+        {
+            locSet.insert(locs[i]);
+        }
+        for(const_iterator iter=begin(); iter != end(); iter++)
+        {
+            if(locSet.find(*iter) == locSet.end())
+            {
+                throw gc_phase_not_location(*iter,locusRef.GetName());
+            }
+        }
+    }
+}
+
+//____________________________________________________________________________________
diff --git a/src/convModel/gc_phase.h b/src/convModel/gc_phase.h
new file mode 100644
index 0000000..cd1067f
--- /dev/null
+++ b/src/convModel/gc_phase.h
@@ -0,0 +1,51 @@
+// $Id: gc_phase.h,v 1.10 2012/06/30 01:32:40 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_PHASE_H
+#define GC_PHASE_H
+
+#include <set>
+#include "wx/string.h"
+
+class gcLocus;
+
+class gcUnphasedMarkers : private std::set<long>
+// represents the markers which
+{
+  private:
+
+  protected:
+
+  public:
+    gcUnphasedMarkers();
+    virtual ~gcUnphasedMarkers();
+
+    wxString        AsString()      const;
+    size_t          NumMarkers()    const;
+
+    long            Smallest()      const;
+    long            Largest ()      const;
+
+    void            AddMarker(long marker);
+    void            Merge(const gcUnphasedMarkers& );
+    void            ReadFromString(wxString line);
+
+    void            DebugDump(wxString prefix=wxEmptyString) const;
+
+    bool            operator!=(const gcUnphasedMarkers&) const;
+    bool            HasZero() const;
+
+    void            ShiftNegsUp();
+    void            CheckAgainstLocations(const gcLocus&) const;
+};
+
+#endif  // GC_PHASE_H
+
+//____________________________________________________________________________________
diff --git a/src/convModel/gc_phase_info.cpp b/src/convModel/gc_phase_info.cpp
new file mode 100644
index 0000000..6fab35d
--- /dev/null
+++ b/src/convModel/gc_phase_info.cpp
@@ -0,0 +1,584 @@
+// $Id: gc_phase_info.cpp,v 1.13 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+
+#include "gc_data.h"
+#include "gc_phase_err.h"
+#include "gc_phase_info.h"
+#include "gc_strings.h"
+#include "gc_strings_phase.h"
+#include "wx/log.h"
+
+//------------------------------------------------------------------------------------
+
+gcPhaseRecord::gcPhaseRecord()
+    :
+    m_phaseSource(phaseSource_NONE_SET),
+    m_fileName(wxEmptyString),
+    m_hasLineNumber(false),
+    m_lineNumber(0),
+    m_individual(wxEmptyString),
+    m_sampleCountIfNoSamples(0)
+{
+    assert(m_samples.IsEmpty());
+    assert(m_unphasedInfo.empty());
+}
+
+gcPhaseRecord::~gcPhaseRecord()
+{
+}
+
+void
+gcPhaseRecord::AddPhenotypeId(size_t phenoId)
+{
+    m_phenotypeIds.insert(phenoId);
+}
+
+const gcIdSet &
+gcPhaseRecord::GetPhenotypeIds() const
+{
+    return m_phenotypeIds;
+}
+
+void
+gcPhaseRecord::MergePhenotypeIds(const gcPhaseRecord & otherRec)
+{
+    m_phenotypeIds.insert(otherRec.m_phenotypeIds.begin(),otherRec.m_phenotypeIds.end());
+}
+
+gcPhaseSource
+gcPhaseRecord::GetPhaseSource() const
+{
+    return m_phaseSource;
+}
+
+wxString
+gcPhaseRecord::GetDescriptiveName() const
+{
+    wxString indName = gcstr::unknown;
+    wxString lineName = gcstr::unknown;
+    wxString fileName = gcstr::unknown;
+    wxString sizeName = wxString::Format("%ld",(long)GetSampleCount());
+    if(HasIndividual())
+    {
+        indName = GetIndividual();
+    }
+    if(HasFileName())
+    {
+        fileName = GetFileName();
+    }
+    if(HasLineNumber())
+    {
+        lineName = wxString::Format("%ld",(long)(GetLineNumber()));
+    }
+    wxString samplesString = "";
+    wxArrayString sampleNames = GetSamples();
+    for(size_t i=0; i < sampleNames.Count(); i++)
+    {
+        if(i != 0)
+        {
+            samplesString += ", ";
+        }
+        samplesString += wxString::Format("\"%s\"",sampleNames[i].c_str());
+    }
+
+    switch(m_phaseSource)
+    {
+        case phaseSource_NONE_SET:
+            // EWFIX -- better reply
+            return gcstr::unknown;
+            break;
+        case phaseSource_PHASE_FILE:
+            return wxString::Format(gcstr_phase::descPhaseFile,indName.c_str(),lineName.c_str(),fileName.c_str(),samplesString.c_str());
+            break;
+        case phaseSource_MULTI_PHASE_SAMPLE:
+            return wxString::Format(gcstr_phase::descMultiPhase,sizeName.c_str(),lineName.c_str(),fileName.c_str(),samplesString.c_str());
+            break;
+        case phaseSource_FILE_ADJACENCY:
+            return wxString::Format(gcstr_phase::descFileAdjacency,lineName.c_str(),fileName.c_str(),samplesString.c_str());
+            break;
+        case phaseSource_COUNT:
+            assert(false);
+            return gcstr::unknown;
+            break;
+    }
+    assert(false);
+    return wxT("");
+}
+
+bool
+gcPhaseRecord::HasFileName() const
+{
+    return (! (m_fileName.IsEmpty()));
+}
+
+const wxString &
+gcPhaseRecord::GetFileName() const
+{
+    assert(HasFileName());
+    return m_fileName;
+}
+
+bool
+gcPhaseRecord::HasLineNumber() const
+{
+    return m_hasLineNumber;
+}
+
+size_t
+gcPhaseRecord::GetLineNumber() const
+{
+    return m_lineNumber;
+}
+
+void
+gcPhaseRecord::SetLineNumber(size_t lineNumber)
+{
+    m_hasLineNumber = true;
+    m_lineNumber = lineNumber;
+}
+
+bool
+gcPhaseRecord::HasIndividual() const
+{
+    return (! (m_individual.IsEmpty()));
+}
+
+const wxString &
+gcPhaseRecord::GetIndividual() const
+{
+    assert(HasIndividual());
+    return m_individual;
+}
+
+bool
+gcPhaseRecord::HasSamples() const
+{
+    return (! (m_samples.IsEmpty()));
+}
+
+const wxArrayString &
+gcPhaseRecord::GetSamples() const
+{
+    assert(HasSamples());
+    return m_samples;
+}
+
+size_t
+gcPhaseRecord::GetSampleCount() const
+{
+    if(! HasSamples())
+    {
+        return m_sampleCountIfNoSamples;
+    }
+    return m_samples.Count();
+}
+
+bool
+gcPhaseRecord::HasAnyZeroes() const
+{
+    for(gcIndPhaseInfo::const_iterator i = m_unphasedInfo.begin(); i != m_unphasedInfo.end(); i++)
+    {
+        const gcUnphasedMarkers & markers = (*i).second;
+        if(markers.HasZero()) return true;
+    }
+    return false;
+}
+
+void
+gcPhaseRecord::AddUnphased(wxString locusName, const gcUnphasedMarkers & unphased)
+{
+    gcIndPhaseInfo::iterator iter = m_unphasedInfo.find(locusName);
+    if(iter == m_unphasedInfo.end())
+    {
+        m_unphasedInfo[locusName] = unphased;
+    }
+    else
+    {
+        gcUnphasedMarkers & oldUnphased = (*iter).second;
+        oldUnphased.Merge(unphased);
+    }
+}
+
+bool
+gcPhaseRecord::HasUnphased(wxString locusName) const
+{
+    gcIndPhaseInfo::const_iterator iter = m_unphasedInfo.find(locusName);
+    return (iter != m_unphasedInfo.end());
+}
+
+const gcUnphasedMarkers &
+gcPhaseRecord::GetUnphased(wxString locusName) const
+{
+    assert(HasUnphased(locusName));
+    gcIndPhaseInfo::const_iterator iter = m_unphasedInfo.find(locusName);
+    const gcUnphasedMarkers & markers = (*iter).second;
+    return markers;
+}
+
+wxArrayString
+gcPhaseRecord::GetUnphasedLocusNames() const
+{
+    wxArrayString names;
+    for(gcIndPhaseInfo::const_iterator i = m_unphasedInfo.begin(); i != m_unphasedInfo.end(); i++)
+    {
+        const wxString & name = (*i).first;
+        names.Add(name);
+    }
+    return names;
+}
+
+bool
+gcPhaseRecord::operator==(const gcPhaseRecord& rec) const
+{
+    if (GetPhaseSource() != rec.GetPhaseSource()) return false;
+
+    if (HasFileName() != rec.HasFileName()) return false;
+    if (GetFileName() != rec.GetFileName()) return false;
+
+    if (HasIndividual() != rec.HasIndividual()) return false;
+    if (GetIndividual() != rec.GetIndividual()) return false;
+
+    if (HasSamples() != rec.HasSamples()) return false;
+    if (GetSamples() != rec.GetSamples()) return false;
+    if (GetSampleCount() != rec.GetSampleCount()) return false;
+
+    wxArrayString locusNames = GetUnphasedLocusNames();
+    wxArrayString recLocusNames = rec.GetUnphasedLocusNames();
+    if (locusNames.Count() != recLocusNames.Count()) return false;
+    for(size_t i=0; i < locusNames.Count(); i++)
+    {
+        wxString locusName = locusNames[i];
+
+        if(HasUnphased(locusName) != rec.HasUnphased(locusName)) return false;
+        if(GetUnphased(locusName) != rec.GetUnphased(locusName)) return false;
+    }
+
+    return true;
+}
+
+bool
+gcPhaseRecord::operator!=(const gcPhaseRecord & rec) const
+{
+    return !(operator==(rec));
+}
+
+void
+gcPhaseRecord::DebugDump(wxString prefix) const
+{
+    wxString indName = "";
+    if(HasIndividual())
+    {
+        indName = GetIndividual();
+    }
+    wxLogDebug("%s%s:",prefix.c_str(),indName.c_str());
+
+    wxLogDebug("%s%sphasesource: %s",prefix.c_str(),gcstr::indent.c_str(),
+               ToWxString(GetPhaseSource()).c_str());
+
+    if( !(HasSamples()) )
+    {
+        wxLogDebug("%s%ssampleCount: %d",prefix.c_str(),gcstr::indent.c_str(),
+                   (int)GetSampleCount());
+    }
+    else
+    {
+        const wxArrayString & samples = GetSamples();
+        for(size_t i = 0 ; i < samples.Count(); i++)
+        {
+            wxLogDebug("%s%ssample:%s", prefix.c_str(),gcstr::indent.c_str(),
+                       samples[i].c_str());
+        }
+    }
+    wxLogDebug("%s%sfilename: %s",prefix.c_str(),gcstr::indent.c_str(),
+               GetFileName().c_str());
+
+    wxLogDebug("%s%sphenoIds: %s",prefix.c_str(),gcstr::indent.c_str(),GetPhenotypeIds().AsString().c_str());
+
+    wxArrayString locusNames = GetUnphasedLocusNames();
+    for(size_t i=0; i < locusNames.Count(); i++)
+    {
+        wxString locusName = locusNames[i];
+
+        if(HasUnphased(locusName))
+        {
+            wxLogDebug("%s%sunphased markers for %s: %s",
+                       prefix.c_str(),
+                       gcstr::indent.c_str(),
+                       locusName.c_str(),
+                       GetUnphased(locusName).AsString().c_str());
+        }
+
+    }
+
+}
+
+gcPhaseRecord
+gcPhaseRecord::MakeAdjacentPhaseRecord( wxString        fileName,
+                                        size_t          lineNumber,
+                                        wxArrayString   samples)
+{
+    gcPhaseRecord newRec;
+    newRec.m_phaseSource = phaseSource_FILE_ADJACENCY;
+    newRec.m_fileName = fileName;
+    newRec.SetLineNumber(lineNumber);
+    newRec.m_samples = samples;
+    return newRec;
+}
+
+gcPhaseRecord
+gcPhaseRecord::MakeAllelicPhaseRecord(  wxString        fileName,
+                                        size_t          lineNumber,
+                                        wxString        individualName,
+                                        size_t          numSamples)
+{
+    gcPhaseRecord newRec;
+    newRec.m_phaseSource = phaseSource_MULTI_PHASE_SAMPLE;
+    newRec.m_fileName = fileName;
+    newRec.SetLineNumber(lineNumber);
+    newRec.m_individual = individualName;
+    newRec.m_sampleCountIfNoSamples = numSamples;
+    assert(newRec.m_samples.IsEmpty());
+    return newRec;
+}
+
+gcPhaseRecord *
+gcPhaseRecord::MakeFullPhaseRecord(     wxString        fileName,
+                                        size_t          lineNumber,
+                                        wxString        individualName,
+                                        wxArrayString   samples)
+{
+    gcPhaseRecord * newRec = new gcPhaseRecord();
+    newRec->m_phaseSource = phaseSource_PHASE_FILE;
+    newRec->m_fileName = fileName;
+    newRec->SetLineNumber(lineNumber);
+    newRec->m_individual = individualName;
+    newRec->m_samples = samples;
+    return newRec;
+}
+
+//------------------------------------------------------------------------------------
+
+gcPhaseInfo::gcPhaseInfo()
+{
+};
+
+gcPhaseInfo::~gcPhaseInfo()
+{
+};
+
+bool
+gcPhaseInfo::AddRecordIndividual(const gcPhaseRecord & rec)
+{
+    if(rec.HasIndividual())
+        // nothing to do if it doesn't
+    {
+        const wxString & indName = rec.GetIndividual();
+        if(HasIndividualRecord(indName))
+            // need to merge info or complain if not possible
+        {
+            bool didReplace = MergeIndividualRecs(GetIndividualRecord(indName),rec);
+            return didReplace;
+        }
+        else
+        {
+            m_fromIndividual.insert(recordPair(indName,rec));
+            return true;
+        }
+    }
+    return false;
+}
+
+bool
+gcPhaseInfo::AddRecordSample(const gcPhaseRecord & rec)
+{
+
+    bool addedAnything = false;
+    // the samples half
+    if(rec.HasSamples())
+    {
+        wxArrayString samples = rec.GetSamples();
+        bool anyPresent = false;
+
+        for(size_t i=0; i < samples.Count(); i++)
+        {
+            if(HasSampleRecord(samples[i]))
+                // checking that if this sample name already occurs, it
+                // occurs in the same configuration
+            {
+                anyPresent = true;
+                const gcPhaseRecord & oldRecord = GetSampleRecord(samples[i]);
+                assert(oldRecord.HasSamples());
+                if(oldRecord.GetSamples() != samples)
+                {
+                    throw gc_phase_mismatch(oldRecord,rec);
+                }
+            }
+        }
+
+        if(anyPresent == false)
+        {
+            for(size_t i=0; i < samples.Count(); i++)
+            {
+                m_fromSample[samples[i]] = rec;
+                addedAnything = true;
+            }
+        }
+    }
+    return addedAnything;
+}
+
+bool
+gcPhaseInfo::MergeIndividualRecs(   const gcPhaseRecord & oldRec,
+                                    const gcPhaseRecord & newRec)
+{
+    assert(oldRec.HasIndividual());
+    assert(newRec.HasIndividual());
+    assert(oldRec.GetIndividual() == newRec.GetIndividual());
+
+    if(oldRec.GetSampleCount() != newRec.GetSampleCount())
+    {
+        throw gc_phase_mismatch(oldRec,newRec);
+    }
+
+    if(oldRec.HasSamples())
+    {
+        if(newRec.HasSamples())
+            // need to make sure they match
+        {
+            if(oldRec.GetSamples() != newRec.GetSamples())
+            {
+                throw gc_phase_mismatch(oldRec,newRec);
+            }
+        }
+    }
+    else
+    {
+        if(newRec.HasSamples())
+            // need to replace old rec with this rec
+        {
+            gcPhaseRecord replacement = newRec;
+            replacement.MergePhenotypeIds(oldRec);
+            m_fromIndividual[newRec.GetIndividual()] = replacement;
+            return true;
+        }
+    }
+    gcPhaseRecord replacement = oldRec;
+    replacement.MergePhenotypeIds(newRec);
+    m_fromIndividual[oldRec.GetIndividual()] = replacement;
+    return false;
+
+}
+
+void
+gcPhaseInfo::AddRecord(const gcPhaseRecord & phaseRecord)
+{
+    bool addedI = AddRecordIndividual(phaseRecord);
+    bool addedS = AddRecordSample(phaseRecord);
+
+    assert(     (phaseRecord.GetPhaseSource() != phaseSource_PHASE_FILE)
+                ||  (addedI == addedS) );
+
+#ifdef NDEBUG  // Silence compiler warning if variables not used.
+    (void)addedI;
+    (void)addedS;
+#endif // NDEBUG
+}
+
+void
+gcPhaseInfo::AddRecords(const gcPhaseInfo & rs)
+{
+    for(stringToRecord::const_iterator i = rs.m_fromIndividual.begin();
+        i != rs.m_fromIndividual.end();
+        i++)
+    {
+        const gcPhaseRecord & rec = (*i).second;
+        AddRecord(rec);
+    }
+    for(stringToRecord::const_iterator i = rs.m_fromSample.begin();
+        i != rs.m_fromSample.end();
+        i++)
+    {
+        const gcPhaseRecord & rec = (*i).second;
+        AddRecord(rec);
+    }
+}
+
+bool
+gcPhaseInfo::HasIndividualRecord(wxString name) const
+{
+    stringToRecord::const_iterator iter = m_fromIndividual.find(name);
+    return (iter != m_fromIndividual.end());
+}
+
+const gcPhaseRecord &
+gcPhaseInfo::GetIndividualRecord(wxString name) const
+{
+    stringToRecord::const_iterator iter = m_fromIndividual.find(name);
+    assert (iter != m_fromIndividual.end());
+    return (*iter).second;
+}
+
+bool
+gcPhaseInfo::HasSampleRecord(wxString name) const
+{
+    stringToRecord::const_iterator iter = m_fromSample.find(name);
+    return (iter != m_fromSample.end());
+}
+
+const gcPhaseRecord &
+gcPhaseInfo::GetSampleRecord(wxString name) const
+{
+    stringToRecord::const_iterator iter = m_fromSample.find(name);
+    assert (iter != m_fromSample.end());
+    return (*iter).second;
+}
+
+void
+gcPhaseInfo::DebugDump(wxString prefix) const
+{
+    wxLogDebug("%sIndividual phase records:",prefix.c_str());
+    for(stringToRecord::const_iterator i=m_fromIndividual.begin(); i != m_fromIndividual.end(); i++)
+    {
+        (*i).second.DebugDump(prefix+gcstr::indent);
+    }
+
+    wxLogDebug("%sSample phase records:",prefix.c_str());
+    for(stringToRecord::const_iterator i=m_fromSample.begin(); i != m_fromSample.end(); i++)
+    {
+        (*i).second.DebugDump(prefix+gcstr::indent);
+    }
+}
+
+const stringToRecord &
+gcPhaseInfo::GetIndividualRecords() const
+{
+    return m_fromIndividual;
+}
+
+bool
+gcPhaseInfo::HasAnyZeroes() const
+{
+    for(stringToRecord::const_iterator i=m_fromIndividual.begin(); i != m_fromIndividual.end(); i++)
+    {
+        const gcPhaseRecord & rec = (*i).second;
+        if(rec.HasAnyZeroes()) return true;
+    }
+
+    for(stringToRecord::const_iterator i=m_fromSample.begin(); i != m_fromSample.end(); i++)
+    {
+        const gcPhaseRecord & rec = (*i).second;
+        if(rec.HasAnyZeroes()) return true;
+    }
+    return false;
+}
+
+//____________________________________________________________________________________
diff --git a/src/convModel/gc_phase_info.h b/src/convModel/gc_phase_info.h
new file mode 100644
index 0000000..055a4e4
--- /dev/null
+++ b/src/convModel/gc_phase_info.h
@@ -0,0 +1,150 @@
+// $Id: gc_phase_info.h,v 1.12 2012/06/30 01:32:40 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_PHASE_INFO
+#define GC_PHASE_INFO
+
+#include <map>
+#include <set>
+
+#include "gc_genotype_resolution.h"
+#include "gc_phase.h"       // for gcUnphasedMarkers
+#include "gc_phase_err.h"
+#include "gc_set_util.h"
+#include "gc_types.h"
+#include "wx/arrstr.h"
+#include "wx/string.h"
+
+class gcPhaseInfo;
+
+typedef std::map<wxString,gcUnphasedMarkers>    gcIndPhaseInfo;
+typedef std::set<wxString>                      gcPhenotypeNames;
+
+class gcPhaseRecord
+{
+    friend class gcPhaseInfo;
+
+  private:
+    gcPhaseSource       m_phaseSource;
+    wxString            m_fileName;
+    bool                m_hasLineNumber;
+    size_t              m_lineNumber;
+    wxString            m_individual;
+    size_t              m_sampleCountIfNoSamples;
+    wxArrayString       m_samples;
+    gcIndPhaseInfo      m_unphasedInfo;
+    gcIdSet             m_phenotypeIds;
+
+  protected:
+    void                SetLineNumber(size_t lineNumber);
+
+  public:
+    // best created with static methods below
+    gcPhaseRecord();
+
+    // not a virtual destructor -- we don't want these things
+    // polymorphic
+    // EWFIX.P3 -- WHY ???
+    ~gcPhaseRecord();
+
+    void                    AddPhenotypeId (size_t phenoId);
+    const gcIdSet &         GetPhenotypeIds() const;
+    void                    MergePhenotypeIds(const gcPhaseRecord&);
+
+    gcPhaseSource           GetPhaseSource()    const;
+
+    wxString                GetDescriptiveName()const;
+
+    bool                    HasFileName()       const;
+    const wxString &        GetFileName()       const;
+
+    bool                    HasLineNumber()     const;
+    size_t                  GetLineNumber()     const;
+
+    bool                    HasIndividual()     const;
+    const wxString &        GetIndividual()     const;
+
+    bool                    HasSamples()        const;
+    const wxArrayString &   GetSamples()        const;
+    size_t                  GetSampleCount()    const;
+
+    bool                    HasAnyZeroes()      const;
+
+    void        AddUnphased(wxString locusName, const gcUnphasedMarkers & );
+    bool                        HasUnphased(const wxString locusName) const;
+    const gcUnphasedMarkers &   GetUnphased(const wxString locusName) const;
+    wxArrayString               GetUnphasedLocusNames() const;
+
+    bool operator==(const gcPhaseRecord&)       const;
+    bool operator!=(const gcPhaseRecord&)       const;
+
+    void    DebugDump(wxString prefix=wxEmptyString) const;
+
+    static gcPhaseRecord MakeAdjacentPhaseRecord(   wxString        fileName,
+                                                    size_t          lineNumber,
+                                                    wxArrayString   samples);
+    static gcPhaseRecord MakeAllelicPhaseRecord(    wxString        fileName,
+                                                    size_t          lineNumber,
+                                                    wxString        individualName,
+                                                    size_t          numSamples);
+
+    // ARGH! only this one is a pointer because in cmdParseIndividual
+    // we need to make a bunch of them and then choose whether to use
+    // or lose them all at once
+    static gcPhaseRecord * MakeFullPhaseRecord(     wxString        fileName,
+                                                    size_t          lineNumber,
+                                                    wxString        individualName,
+                                                    wxArrayString   sampleNames);
+};
+
+typedef std::pair<wxString,gcPhaseRecord>       recordPair;
+typedef std::map<wxString,gcPhaseRecord>        stringToRecord;
+
+class gcPhaseInfo
+{
+  private:
+    stringToRecord      m_fromIndividual;
+    stringToRecord      m_fromSample;
+
+  protected:
+    // returns true if new record was added
+    bool    AddRecordIndividual (const gcPhaseRecord &);
+    // returns true if new record was added
+    bool    AddRecordSample     (const gcPhaseRecord &);
+
+    // returns true if old record was replaced by new
+    bool    MergeIndividualRecs (const gcPhaseRecord & curRec,
+                                 const gcPhaseRecord & newRec);
+  public:
+    gcPhaseInfo();
+    virtual ~gcPhaseInfo();
+
+    void    AddRecord(const gcPhaseRecord&);
+    void    AddRecords(const gcPhaseInfo &);
+
+    void    RemoveRecord(const gcPhaseRecord&);
+
+    bool                    HasIndividualRecord(wxString name)  const;
+    const gcPhaseRecord &   GetIndividualRecord(wxString name)  const;
+
+    bool                    HasSampleRecord(wxString name)      const;
+    const gcPhaseRecord &   GetSampleRecord(wxString name)      const;
+
+    void    DebugDump(wxString prefix=wxEmptyString) const;
+
+    const stringToRecord &  GetIndividualRecords() const;
+
+    bool    HasAnyZeroes() const;
+
+};
+
+#endif  // GC_PHASE_INFO
+
+//____________________________________________________________________________________
diff --git a/src/convModel/gc_phenotype.cpp b/src/convModel/gc_phenotype.cpp
new file mode 100644
index 0000000..e49e35b
--- /dev/null
+++ b/src/convModel/gc_phenotype.cpp
@@ -0,0 +1,154 @@
+// $Id: gc_phenotype.cpp,v 1.6 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+
+#include "gc_default.h"
+#include "gc_phenotype.h"
+#include "gc_strings.h"
+#include "gc_trait_err.h"
+#include "wx/log.h"
+#include "wx/string.h"
+
+//------------------------------------------------------------------------------------
+
+gcHapProbability::gcHapProbability()
+    :   m_hasPenetrance(false),
+        m_penetrance(gcdefault::penetrance)
+{
+}
+
+gcHapProbability::~gcHapProbability()
+{
+}
+
+bool
+gcHapProbability::HasPenetrance() const
+{
+    return m_hasPenetrance;
+}
+
+double
+gcHapProbability::GetPenetrance() const
+{
+    assert(HasPenetrance());
+    return m_penetrance;
+}
+
+void
+gcHapProbability::SetPenetrance(double penetrance)
+{
+    if(penetrance < 0)
+    {
+        throw gc_haplotype_probability_negative(penetrance);
+    }
+    m_hasPenetrance = true;
+    m_penetrance = penetrance;
+}
+
+void
+gcHapProbability::UnsetPenetrance()
+{
+    m_hasPenetrance = false;
+}
+
+void
+gcHapProbability::AddAlleleId(size_t alleleId)
+{
+    m_alleleIds.push_back(alleleId);
+}
+
+const gcIdVec &
+gcHapProbability::GetAlleleIds() const
+{
+    return m_alleleIds;
+}
+
+void
+gcHapProbability::DebugDump(wxString prefix) const
+{
+    wxLogDebug("%shap probability %lf for alleles %s",   // EWDUMPOK
+               prefix.c_str(),
+               GetPenetrance(),
+               GetAlleleIds().AsString().c_str());
+}
+
+//------------------------------------------------------------------------------------
+
+gcPhenotype::gcPhenotype()
+    :   m_hasTraitId(false),
+        m_traitId(gcdefault::badIndex),
+        m_hasExplicitName(false)
+{
+}
+
+gcPhenotype::~gcPhenotype()
+{
+}
+
+void
+gcPhenotype::AddHapProbability(const gcHapProbability & hp)
+{
+    m_hapProbabilities.push_back(hp);
+}
+
+const std::vector<gcHapProbability> &
+gcPhenotype::GetHapProbabilities() const
+{
+    return m_hapProbabilities;
+}
+
+bool
+gcPhenotype::HasTraitId() const
+{
+    return m_hasTraitId;
+}
+
+size_t
+gcPhenotype::GetTraitId() const
+{
+    assert(HasTraitId());
+    return m_traitId;
+}
+
+void
+gcPhenotype::SetTraitId(size_t traitId)
+{
+    m_hasTraitId = true;
+    m_traitId = traitId;
+}
+
+bool
+gcPhenotype::HasExplicitName() const
+{
+    return m_hasExplicitName;
+}
+
+void
+gcPhenotype::SetHasExplicitName()
+{
+    m_hasExplicitName = true;
+}
+
+void
+gcPhenotype::DebugDump(wxString prefix) const
+{
+    wxLogDebug("%sphenotype %s of trait %d",   // EWDUMPOK
+               prefix.c_str(),
+               GetName().c_str(),
+               (int)GetTraitId());
+    for(size_t i = 0; i < m_hapProbabilities.size(); i++)
+    {
+        const gcHapProbability & hp = m_hapProbabilities[i];
+        hp.DebugDump(prefix+gcstr::indent);
+    }
+}
+
+//____________________________________________________________________________________
diff --git a/src/convModel/gc_phenotype.h b/src/convModel/gc_phenotype.h
new file mode 100644
index 0000000..4283629
--- /dev/null
+++ b/src/convModel/gc_phenotype.h
@@ -0,0 +1,77 @@
+// $Id: gc_phenotype.h,v 1.6 2012/06/30 01:32:40 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_PHENOTYPE_H
+#define GC_PHENOTYPE_H
+
+#include <vector>
+
+#include "gc_quantum.h"
+#include "gc_set_util.h"
+#include "wx/string.h"
+
+class GCStructures;
+
+class gcHapProbability
+{
+  private:
+    bool            m_hasPenetrance;
+    double          m_penetrance;
+    gcIdVec         m_alleleIds;
+
+  public:
+    gcHapProbability();
+    virtual ~gcHapProbability();
+
+    bool    HasPenetrance() const;
+    double  GetPenetrance() const;
+    void    SetPenetrance(double penetrance);
+    void    UnsetPenetrance();
+
+    void    AddAlleleId(size_t alleleId);
+    const gcIdVec & GetAlleleIds() const;
+
+    void    DebugDump(wxString prefix=wxEmptyString) const;
+};
+
+class gcPhenotype : public GCQuantum
+{
+    friend class GCStructures;
+
+  private:
+    bool                            m_hasTraitId;
+    size_t                          m_traitId;
+    bool                            m_hasExplicitName;
+
+    std::vector<gcHapProbability>   m_hapProbabilities;
+
+    void    SetTraitId(size_t traitId);
+    void    UnsetTraitId();
+
+  public:
+    gcPhenotype();
+    virtual ~gcPhenotype();
+
+    void    AddHapProbability(const gcHapProbability &);
+
+    bool    HasExplicitName() const;
+
+    const std::vector<gcHapProbability> & GetHapProbabilities() const;
+    bool    HasTraitId() const;
+    size_t  GetTraitId() const;
+
+    void    SetHasExplicitName();
+
+    void    DebugDump(wxString prefix=wxEmptyString) const;
+};
+
+#endif  // GC_PHENOTYPE_H
+
+//____________________________________________________________________________________
diff --git a/src/convModel/gc_set_util.cpp b/src/convModel/gc_set_util.cpp
new file mode 100644
index 0000000..480491b
--- /dev/null
+++ b/src/convModel/gc_set_util.cpp
@@ -0,0 +1,78 @@
+// $Id: gc_set_util.cpp,v 1.3 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include "gc_set_util.h"
+#include "wx/log.h"
+
+gcIdSet::gcIdSet()
+{
+}
+
+gcIdSet::~gcIdSet()
+{
+}
+
+void
+gcIdSet::DebugDump(wxString prefix) const
+{
+    wxString ids="";
+    for(const_iterator i=begin(); i != end(); i++)
+    {
+        size_t id = *i;
+        ids += wxString::Format("%d ",(int)id);
+    }
+    wxLogDebug("%s%s", prefix.c_str(), ids.c_str());    // EWDUMPOK
+}
+
+wxString
+gcIdSet::AsString() const
+{
+    wxString ids="";
+    for(const_iterator i=begin(); i != end(); i++)
+    {
+        size_t blockId = *i;
+        ids += wxString::Format("%d ",(int)blockId);
+    }
+    return ids;
+}
+
+gcIdVec::gcIdVec()
+{
+}
+
+gcIdVec::~gcIdVec()
+{
+}
+
+void
+gcIdVec::DebugDump(wxString prefix) const
+{
+    wxString ids="";
+    for(const_iterator i=begin(); i != end(); i++)
+    {
+        size_t id = *i;
+        ids += wxString::Format("%d ",(int)id);
+    }
+    wxLogDebug("%s%s", prefix.c_str(), ids.c_str());    // EWDUMPOK
+}
+
+wxString
+gcIdVec::AsString() const
+{
+    wxString ids="";
+    for(const_iterator i=begin(); i != end(); i++)
+    {
+        size_t blockId = *i;
+        ids += wxString::Format("%d ",(int)blockId);
+    }
+    return ids;
+}
+
+//____________________________________________________________________________________
diff --git a/src/convModel/gc_set_util.h b/src/convModel/gc_set_util.h
new file mode 100644
index 0000000..04e0884
--- /dev/null
+++ b/src/convModel/gc_set_util.h
@@ -0,0 +1,39 @@
+// $Id: gc_set_util.h,v 1.5 2012/06/30 01:32:40 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_SET_UTIL_H
+#define GC_SET_UTIL_H
+
+#include <set>
+#include <vector>
+
+#include "wx/string.h"
+
+class gcIdSet : public std::set<size_t>
+{
+  public:
+    gcIdSet() ;
+    ~gcIdSet() ;
+    void DebugDump(wxString prefix=wxEmptyString) const;
+    wxString AsString() const;
+};
+
+class gcIdVec : public std::vector<size_t>
+{
+  public:
+    gcIdVec() ;
+    ~gcIdVec() ;
+    void DebugDump(wxString prefix=wxEmptyString) const;
+    wxString AsString() const;
+};
+
+#endif  // GC_SET_UTIL_H
+
+//____________________________________________________________________________________
diff --git a/src/convModel/gc_structure_maps.cpp b/src/convModel/gc_structure_maps.cpp
new file mode 100644
index 0000000..3a826aa
--- /dev/null
+++ b/src/convModel/gc_structure_maps.cpp
@@ -0,0 +1,413 @@
+// $Id: gc_structure_maps.cpp,v 1.18 2012/06/30 01:32:40 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include "gc_data.h"
+#include "gc_default.h"
+#include "gc_region.h"
+#include "gc_population.h"
+#include "gc_locus.h"
+#include "gc_panel.h"
+#include "gc_strings.h"
+#include "gc_structure_maps.h"
+#include "gc_trait.h"
+#include "wx/log.h"
+#include "wx/string.h"
+
+//------------------------------------------------------------------------------------
+
+gcDisplayOrder::gcDisplayOrder()
+{
+}
+
+gcDisplayOrder::~gcDisplayOrder()
+{
+}
+
+wxString
+gcDisplayOrder::AsString() const
+{
+    wxString ids="";
+    for(const_iterator i=begin(); i != end(); i++)
+    {
+        size_t blockId = *i;
+        ids += wxString::Format("%d ",(int)blockId);
+    }
+    return ids;
+}
+
+void
+gcDisplayOrder::DebugDump(wxString prefix) const
+{
+    wxLogDebug("%s%s", prefix.c_str(), AsString().c_str()); // EWDUMPOK
+}
+
+//------------------------------------------------------------------------------------
+
+gcPopLocusIdPair::gcPopLocusIdPair()
+{
+}
+
+gcPopLocusIdPair::gcPopLocusIdPair(size_t popId, size_t locId)
+    : std::pair<size_t,size_t>(popId,locId)
+{
+}
+
+gcPopLocusIdPair::~gcPopLocusIdPair()
+{
+}
+
+void
+gcPopLocusIdPair::DebugDump(wxString prefix) const
+{
+    wxLogDebug("%s<%d,%d>", // EWDUMPOK
+               prefix.c_str(),
+               (int)first,
+               (int)second);
+}
+
+//------------------------------------------------------------------------------------
+
+gcBlockSetMap::gcBlockSetMap()
+{
+}
+
+gcBlockSetMap::~gcBlockSetMap()
+{
+}
+
+void
+gcBlockSetMap::DebugDump(wxString prefix) const
+{
+    wxLogDebug("%sblock from:",prefix.c_str()); // EWDUMPOK
+    for(const_iterator i=begin(); i != end(); i++)
+    {
+        gcPopLocusIdPair idP = (*i).first;
+        gcIdSet set = (*i).second;
+        wxLogDebug("%s<%d,%d> -> %s",   // EWDUMPOK
+                   (prefix+gcstr::indent).c_str(),
+                   (int)(idP.first),
+                   (int)(idP.second),
+                   set.AsString().c_str());
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+gcRegionMap::gcRegionMap()
+{
+}
+
+gcRegionMap::~gcRegionMap()
+{
+}
+
+void
+gcRegionMap::DebugDump(wxString prefix) const
+{
+    wxLogDebug("%sregions:",prefix.c_str()); // EWDUMPOK
+    for(const_iterator i=begin(); i != end(); i++)
+    {
+        const gcRegion & region = (*i).second;
+        region.DebugDump(prefix+gcstr::indent);
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+gcLocusMap::gcLocusMap()
+{
+}
+
+gcLocusMap::~gcLocusMap()
+{
+}
+
+void
+gcLocusMap::DebugDump(wxString prefix) const
+{
+    wxLogDebug("%ssegments:",prefix.c_str());   // EWDUMPOK
+    for(const_iterator i=begin(); i != end(); i++)
+    {
+        const gcLocus & locus = (*i).second;
+        locus.DebugDump(prefix+gcstr::indent);
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+
+gcRegionPopIdPair::gcRegionPopIdPair()
+{
+}
+
+gcRegionPopIdPair::gcRegionPopIdPair(size_t regionId, size_t popId)
+    : std::pair<size_t,size_t>(regionId,popId)
+{
+}
+
+gcRegionPopIdPair::~gcRegionPopIdPair()
+{
+}
+
+void
+gcRegionPopIdPair::DebugDump(wxString prefix) const
+{
+    wxLogDebug("%s<%d,%d>", // JMDUMPOK
+               prefix.c_str(),
+               (int)first,
+               (int)second);
+}
+
+//------------------------------------------------------------------------------------
+
+gcPanelMap::gcPanelMap()
+{
+}
+
+gcPanelMap::~gcPanelMap()
+{
+}
+
+void
+gcPanelMap::DebugDump(wxString prefix) const
+{
+    wxLogDebug("%ssegments:",prefix.c_str());   // JMDUMPOK
+    for(const_iterator i=begin(); i != end(); i++)
+    {
+        const gcPanel & panel = (*i).second;
+        panel.DebugDump(prefix+gcstr::indent);
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+gcParentMap::gcParentMap()
+{
+}
+
+gcParentMap::~gcParentMap()
+{
+}
+
+void
+gcParentMap::DebugDump(wxString prefix) const
+{
+    wxLogDebug("%ssegments:",prefix.c_str());   // JMDUMPOK
+    for(const_iterator i=begin(); i != end(); i++)
+    {
+        const gcParent & parent = (*i).second;
+        parent.DebugDump(prefix+gcstr::indent);
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+gcPopMap::gcPopMap()
+{
+}
+
+gcPopMap::~gcPopMap()
+{
+}
+
+void
+gcPopMap::DebugDump(wxString prefix) const
+{
+    wxLogDebug("%spopulations:",prefix.c_str());    // EWDUMPOK
+    for(const_iterator i=begin(); i != end(); i++)
+    {
+        const gcPopulation & pop = (*i).second;
+        pop.DebugDump(prefix+gcstr::indent);
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+gcPopMultiMap::gcPopMultiMap()
+{
+}
+
+gcPopMultiMap::~gcPopMultiMap()
+{
+}
+
+void
+gcPopMultiMap::DebugDump(wxString prefix) const
+{
+    wxLogDebug("%spop correspondence:",prefix.c_str()); // EWDUMPOK
+    for(const_iterator i=begin(); i != end(); i++)
+    {
+        wxLogDebug("%spop %d file %d parsePop %d",  // EWDUMPOK
+                   (prefix+gcstr::indent).c_str(),(int)((*i).first),(int)((*i).second.first),(int)((*i).second.second));
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+gcTraitMap::gcTraitMap()
+{
+}
+
+gcTraitMap::~gcTraitMap()
+{
+}
+
+void
+gcTraitMap::DebugDump(wxString prefix) const
+{
+    wxLogDebug("%strait classes:",prefix.c_str());  // EWDUMPOK
+    for(const_iterator i=begin(); i != end(); i++)
+    {
+        const gcTraitInfo & trait = (*i).second;
+        trait.DebugDump(prefix+gcstr::indent);
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+gcAlleleMap::gcAlleleMap()
+{
+}
+
+gcAlleleMap::~gcAlleleMap()
+{
+}
+
+void
+gcAlleleMap::DebugDump(wxString prefix) const
+{
+    wxLogDebug("%salleles:",prefix.c_str());  // EWDUMPOK
+    for(const_iterator i=begin(); i != end(); i++)
+    {
+        const gcTraitAllele & allele = (*i).second;
+        allele.DebugDump(prefix+gcstr::indent);
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+gcPhenoMap::gcPhenoMap()
+{
+}
+
+gcPhenoMap::~gcPhenoMap()
+{
+}
+
+void
+gcPhenoMap::DebugDump(wxString prefix) const
+{
+    wxLogDebug("%sphenotypes:",prefix.c_str());  // EWDUMPOK
+    for(const_iterator i=begin(); i != end(); i++)
+    {
+        const gcPhenotype & pheno = (*i).second;
+        pheno.DebugDump(prefix+gcstr::indent);
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+gcStringMap::gcStringMap()
+{
+}
+
+gcStringMap::~gcStringMap()
+{
+}
+
+void
+gcStringMap::DebugDump(wxString prefix) const
+{
+    wxLogDebug("%sstring map:",prefix.c_str()); // EWDUMPOK
+    for(const_iterator i=begin(); i != end(); i++)
+    {
+        wxLogDebug("%s%d:%s",   // EWDUMPOK
+                   (prefix+gcstr::indent).c_str(),
+                   (int)((*i).first),
+                   (*i).second.c_str());
+    }
+}
+//------------------------------------------------------------------------------------
+
+
+gcMigrationPair::gcMigrationPair()
+{
+}
+
+gcMigrationPair::gcMigrationPair(size_t fromId, size_t toId)
+    : std::pair<size_t,size_t>(fromId,toId)
+{
+}
+
+gcMigrationPair::~gcMigrationPair()
+{
+}
+
+void
+gcMigrationPair::DebugDump(wxString prefix) const
+{
+    wxLogDebug("%s<%d,%d>", // JMDUMPOK
+               prefix.c_str(),
+               (int)first,
+               (int)second);
+}
+
+//------------------------------------------------------------------------------------
+
+gcMigrationMap::gcMigrationMap()
+{
+}
+
+gcMigrationMap::~gcMigrationMap()
+{
+}
+
+void
+gcMigrationMap::DebugDump(wxString prefix) const
+{
+    wxLogDebug("%ssegments:",prefix.c_str());   // JMDUMPOK
+    for(const_iterator i=begin(); i != end(); i++)
+    {
+        const gcMigration & mig = (*i).second;
+        mig.DebugDump(prefix+gcstr::indent);
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+constBlockVector::constBlockVector()
+{
+}
+
+constBlockVector::~constBlockVector()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+popVector::popVector()
+{
+}
+
+popVector::~popVector()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+locVector::locVector()
+{
+}
+
+locVector::~locVector()
+{
+}
+
+//____________________________________________________________________________________
diff --git a/src/convModel/gc_structure_maps.h b/src/convModel/gc_structure_maps.h
new file mode 100644
index 0000000..1f799e0
--- /dev/null
+++ b/src/convModel/gc_structure_maps.h
@@ -0,0 +1,198 @@
+// $Id: gc_structure_maps.h,v 1.15 2011/12/01 22:32:42 jmcgill Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_STRUCTURE_MAPS_H
+#define GC_STRUCTURE_MAPS_H
+
+#include <list>
+#include <map>
+#include <set>
+
+#include "gc_locus.h"
+#include "gc_migration.h"
+#include "gc_panel.h"
+#include "gc_parent.h"
+#include "gc_population.h"
+#include "gc_quantum.h"
+#include "gc_region.h"
+#include "gc_parent.h"
+#include "gc_phenotype.h"
+#include "gc_set_util.h"
+#include "gc_trait.h"
+#include "gc_trait_allele.h"
+
+#include "gc_loci_match.h"
+#include "gc_pop_match.h"
+
+#include "wx/string.h"
+
+class GCParseBlock;
+
+class gcDisplayOrder :  public std::list<size_t>
+{
+  public:
+    gcDisplayOrder() ;
+    ~gcDisplayOrder() ;
+    void DebugDump(wxString prefix=wxEmptyString) const;
+    wxString AsString() const;
+};
+
+class gcPopLocusIdPair : public std::pair<size_t, size_t>
+{
+  public:
+    gcPopLocusIdPair() ;
+    gcPopLocusIdPair(size_t i,size_t j);
+    ~gcPopLocusIdPair() ;
+    void DebugDump(wxString prefix=wxEmptyString) const;
+};
+
+class gcBlockSetMap : public std::map<gcPopLocusIdPair,gcIdSet>
+{
+  public:
+    gcBlockSetMap() ;
+    ~gcBlockSetMap() ;
+    void DebugDump(wxString prefix=wxEmptyString) const;
+};
+
+class gcRegionMap : public std::map<size_t,gcRegion>
+{
+  public:
+    gcRegionMap() ;
+    virtual ~gcRegionMap() ;
+    void DebugDump(wxString prefix=wxEmptyString) const;
+};
+
+class gcPopMap : public std::map<size_t,gcPopulation>
+{
+  public:
+    gcPopMap() ;
+    virtual ~gcPopMap() ;
+    void DebugDump(wxString prefix=wxEmptyString) const;
+};
+
+class gcPopMultiMap : public std::multimap<size_t, std::pair< size_t, size_t> >
+{
+  public:
+    gcPopMultiMap() ;
+    virtual ~gcPopMultiMap() ;
+    void DebugDump(wxString prefix=wxEmptyString) const;
+};
+
+class gcLocusMap : public std::map<size_t,gcLocus>
+{
+  public:
+    gcLocusMap() ;
+    virtual ~gcLocusMap() ;
+    void DebugDump(wxString prefix=wxEmptyString) const;
+};
+
+
+class gcRegionPopIdPair : public std::pair<size_t, size_t>
+{
+  public:
+    gcRegionPopIdPair() ;
+    gcRegionPopIdPair(size_t i,size_t j);
+    ~gcRegionPopIdPair() ;
+    void DebugDump(wxString prefix=wxEmptyString) const;
+};
+
+class gcPanelMap : public std::map<gcRegionPopIdPair,gcPanel>
+{
+  public:
+    gcPanelMap() ;
+    virtual ~gcPanelMap() ;
+    void DebugDump(wxString prefix=wxEmptyString) const;
+};
+
+class gcParentMap : public std::map<size_t,gcParent>
+{
+  public:
+    gcParentMap() ;
+    virtual ~gcParentMap() ;
+    void DebugDump(wxString prefix=wxEmptyString) const;
+};
+
+class gcTraitMap : public std::map<size_t,gcTraitInfo>
+{
+  public:
+    gcTraitMap() ;
+    virtual ~gcTraitMap() ;
+    void DebugDump(wxString prefix=wxEmptyString) const;
+};
+
+class gcAlleleMap : public std::map<size_t,gcTraitAllele>
+{
+  public:
+    gcAlleleMap() ;
+    virtual ~gcAlleleMap() ;
+    void DebugDump(wxString prefix=wxEmptyString) const;
+};
+
+class gcPhenoMap : public std::map<size_t,gcPhenotype>
+{
+  public:
+    gcPhenoMap() ;
+    virtual ~gcPhenoMap() ;
+    void DebugDump(wxString prefix=wxEmptyString) const;
+};
+
+class gcStringMap : public std::map<size_t,wxString>
+{
+  public:
+    gcStringMap() ;
+    virtual ~gcStringMap() ;
+    void DebugDump(wxString prefix=wxEmptyString) const;
+};
+
+class gcMigrationPair : public std::pair<size_t, size_t>
+{
+  public:
+    gcMigrationPair() ;
+    gcMigrationPair(size_t i,size_t j);
+    virtual ~gcMigrationPair() ;
+    void DebugDump(wxString prefix=wxEmptyString) const;
+};
+
+class gcMigrationMap : public std::map<gcMigrationPair,gcMigration>
+{
+  public:
+    gcMigrationMap() ;
+    virtual ~gcMigrationMap() ;
+    void DebugDump(wxString prefix=wxEmptyString) const;
+};
+
+typedef std::vector<GCQuantum *>        objVector;
+typedef std::vector<const GCQuantum *>  constObjVector;
+
+class constBlockVector : public std::vector<const GCParseBlock *>
+{
+  public:
+    constBlockVector() ;
+    virtual ~constBlockVector() ;
+    //void DebugDump(wxString prefix=wxEmptyString) const;
+};
+
+class popVector : public std::vector<gcPopulation *>
+{
+  public:
+    popVector() ;
+    virtual ~popVector() ;
+};
+
+class locVector : public std::vector<gcLocus *>
+{
+  public:
+    locVector() ;
+    virtual ~locVector() ;
+};
+
+#endif  // GC_STRUCTURE_MAPS_H
+
+//____________________________________________________________________________________
diff --git a/src/convModel/gc_structures.cpp b/src/convModel/gc_structures.cpp
new file mode 100644
index 0000000..036c354
--- /dev/null
+++ b/src/convModel/gc_structures.cpp
@@ -0,0 +1,3084 @@
+// $Id: gc_structures.cpp,v 1.66 2012/06/30 01:32:40 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+
+#include "gc_creation_info.h"
+#include "gc_default.h"
+#include "gc_datastore.h"
+#include "gc_errhandling.h"
+#include "gc_file.h"
+#include "gc_genotype_resolution.h"
+#include "gc_loci_match.h"
+#include "gc_locus_err.h"
+#include "gc_parse.h"
+#include "gc_parse_block.h"
+#include "gc_pop_match.h"
+#include "gc_strings.h"
+#include "gc_strings_infile.h"
+#include "gc_strings_structures.h"
+#include "gc_structures.h"
+#include "gc_structures_err.h"
+#include "gc_trait_err.h"
+#include "wx/log.h"
+
+//------------------------------------------------------------------------------------
+
+const wxString &
+gcNameSet::infoString() const
+{
+    return gcstr_structures::objDict;
+}
+
+gcNameSet::gcNameSet()
+{
+}
+
+gcNameSet::~gcNameSet()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+const wxString &
+gcTraitNameSet::infoString() const
+{
+    return gcstr_structures::traitDict;
+}
+
+gcTraitNameSet::gcTraitNameSet()
+{
+}
+
+gcTraitNameSet::~gcTraitNameSet()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+GCStructures::GCStructures(const GCDataStore * dsp)
+    :
+    m_dataStoreP(dsp),
+    m_divergenceState(false),
+    //m_divMigMatrixDefined(false),
+    //m_migMatrixDefined(false)
+    m_panelsState(false)
+{
+}
+
+GCStructures::~GCStructures()
+{
+}
+
+gcIdSet
+GCStructures::GetBlockIds(size_t popId, size_t locusId) const
+{
+    gcIdSet outSet;
+    gcPopLocusIdPair p(popId,locusId);
+    gcBlockSetMap::const_iterator iter = m_blocks.find(p);
+    if(iter != m_blocks.end())
+    {
+        outSet = (*iter).second;
+    }
+    return outSet;
+}
+
+gcIdSet
+GCStructures::GetBlocksForLocus(size_t locusId) const
+{
+    gcIdSet blockIds;
+
+    for(gcPopMap::const_iterator i=m_pops.begin();
+        i != m_pops.end(); i++)
+    {
+        size_t popId = (*i).first;
+        gcPopLocusIdPair p(popId,locusId);
+        gcBlockSetMap::const_iterator biter = m_blocks.find(p);
+        if(biter != m_blocks.end())
+        {
+            const gcIdSet & blocks = (*biter).second;
+            if(!blocks.empty())
+            {
+                blockIds.insert(blocks.begin(),blocks.end());
+            }
+        }
+    }
+    return blockIds;
+}
+
+bool
+GCStructures::HaveBlocksForRegion(size_t regionId) const
+{
+    gcIdVec locusIds = GetLocusIdsForRegionByCreation(regionId);
+
+    if(locusIds.size() == 0)
+    {
+        return false;
+    }
+    else
+    {
+        for(gcIdVec::const_iterator liter=locusIds.begin(); liter != locusIds.end(); liter++)
+        {
+            size_t locusId = (*liter);
+            if(HaveBlocksForLocus(locusId))
+            {
+                return true;
+            }
+        }
+    }
+    return false;
+}
+
+bool
+GCStructures::HaveBlocksForLocus(size_t locusId) const
+{
+    for(gcPopMap::const_iterator i=m_pops.begin();
+        i != m_pops.end(); i++)
+    {
+        size_t popId = (*i).first;
+        gcPopLocusIdPair p(popId,locusId);
+        gcBlockSetMap::const_iterator biter = m_blocks.find(p);
+        if(biter != m_blocks.end())
+        {
+            const gcIdSet & blocks = (*biter).second;
+            if(!blocks.empty())
+            {
+                return true;
+            }
+        }
+    }
+    return false;
+}
+
+bool
+GCStructures::HaveBlocksForPop(size_t popId) const
+{
+    // EWFIX.P5 SPEED -- could make faster with additional structures
+    for(gcLocusMap::const_iterator i = m_loci.begin(); i != m_loci.end(); i++)
+    {
+        size_t locusId = (*i).first;
+        gcPopLocusIdPair p(popId,locusId);
+        gcBlockSetMap::const_iterator biter = m_blocks.find(p);
+        if(biter != m_blocks.end())
+        {
+            const gcIdSet & blocks = (*biter).second;
+            if(!blocks.empty())
+            {
+                return true;
+            }
+        }
+    }
+    return false;
+}
+
+bool
+GCStructures::IsBlessedRegion(size_t regionId) const
+{
+    const gcRegion & region = GetRegion(regionId);
+    if(region.GetBlessed()) return true;
+    gcIdVec loci = GetLocusIdsForRegionByCreation(regionId);
+    for(gcIdVec::const_iterator iter=loci.begin(); iter!=loci.end();iter++)
+    {
+        const gcLocus & locus = GetLocus(*iter);
+        if(locus.GetBlessed()) return true;
+    }
+    return false;
+}
+
+bool
+GCStructures::IsBlessedLocus(size_t locusId) const
+{
+    return GetLocus(locusId).GetBlessed();
+}
+
+bool
+GCStructures::IsBlessedPop(size_t popId) const
+{
+    return GetPop(popId).GetBlessed();
+}
+
+bool
+GCStructures::GetDivergenceState() const
+{
+    return  m_divergenceState;
+}
+
+bool
+GCStructures::GetDivergenceState()
+{
+    return  m_divergenceState;
+}
+
+void
+GCStructures::SetDivergenceState(bool state)
+{
+    m_divergenceState = state;
+}
+
+bool
+GCStructures::GetPanelsState() const
+{
+    return  m_panelsState;
+}
+
+bool
+GCStructures::GetPanelsState()
+{
+    return  m_panelsState;
+}
+
+void
+GCStructures::SetPanelsState(bool state)
+{
+    m_panelsState = state;
+}
+
+
+bool
+GCStructures::GetDivMigMatrixDefined() const
+{
+    return  m_divMigMatrixDefined;
+}
+
+bool
+GCStructures::GetDivMigMatrixDefined()
+{
+    return  m_divMigMatrixDefined;
+}
+
+void
+GCStructures::SetDivMigMatrixDefined(bool state)
+{
+    m_divMigMatrixDefined = state;
+}
+
+bool
+GCStructures::GetMigMatrixDefined() const
+{
+    return  m_migMatrixDefined;
+}
+
+bool
+GCStructures::GetMigMatrixDefined()
+{
+    return  m_migMatrixDefined;
+}
+
+void
+GCStructures::SetMigMatrixDefined(bool state)
+{
+    m_migMatrixDefined = state;
+}
+
+constObjVector
+GCStructures::GetConstDisplayableRegions() const
+{
+    constObjVector toReturn;
+    for(gcDisplayOrder::const_iterator i=m_regionDisplay.begin();
+        i != m_regionDisplay.end();
+        i++)
+    {
+        size_t regionId = *i;
+        if(HaveBlocksForRegion(regionId) || IsBlessedRegion(regionId))
+        {
+            toReturn.push_back(&(GetRegion(regionId)));
+        }
+    }
+    return toReturn;
+}
+
+objVector
+GCStructures::GetDisplayableRegions()
+{
+    objVector toReturn;
+    for(gcDisplayOrder::const_iterator i=m_regionDisplay.begin();
+        i != m_regionDisplay.end();
+        i++)
+    {
+        size_t regionId = *i;
+        if(HaveBlocksForRegion(regionId) || IsBlessedRegion(regionId))
+        {
+            toReturn.push_back(&(GetRegion(regionId)));
+        }
+    }
+    return toReturn;
+}
+
+gcDisplayOrder
+GCStructures::GetDisplayableRegionIds() const
+{
+    gcDisplayOrder toReturn;
+    for(gcDisplayOrder::const_iterator i=m_regionDisplay.begin();
+        i != m_regionDisplay.end();
+        i++)
+    {
+        size_t regionId = *i;
+        if(HaveBlocksForRegion(regionId) || IsBlessedRegion(regionId))
+        {
+            toReturn.push_back(regionId);
+        }
+    }
+    return toReturn;
+}
+
+constObjVector
+GCStructures::GetConstDisplayableLoci() const
+{
+    constObjVector regionsInOrder = GetConstDisplayableRegions();
+
+    constObjVector toReturn;
+    for(gcRegionMap::const_iterator giter=m_regions.begin(); giter != m_regions.end(); giter++)
+    {
+        const gcRegion & region = (*giter).second;
+        gcIdVec loci = GetLocusIdsForRegionByMapPosition(region.GetId());
+        for(gcIdVec::const_iterator liter=loci.begin(); liter != loci.end(); liter++)
+        {
+            size_t locusId = *liter;
+            if(HaveBlocksForLocus(locusId) || IsBlessedLocus(locusId))
+            {
+                toReturn.push_back(&(GetLocus(locusId)));
+            }
+        }
+    }
+    return toReturn;
+}
+
+objVector
+GCStructures::GetDisplayableLoci()
+{
+    objVector regionsInOrder = GetDisplayableRegions();
+
+    objVector toReturn;
+    for(gcRegionMap::const_iterator giter=m_regions.begin(); giter != m_regions.end(); giter++)
+    {
+        const gcRegion & region = (*giter).second;
+        gcIdVec loci = GetLocusIdsForRegionByMapPosition(region.GetId());
+        for(gcIdVec::const_iterator liter=loci.begin(); liter != loci.end(); liter++)
+        {
+            size_t locusId = *liter;
+            if(HaveBlocksForLocus(locusId) || IsBlessedLocus(locusId))
+            {
+                toReturn.push_back(&(GetLocus(locusId)));
+            }
+        }
+    }
+    return toReturn;
+}
+
+gcDisplayOrder
+GCStructures::GetDisplayableLocusIds() const
+{
+    gcDisplayOrder toReturn;
+
+    for(gcRegionMap::const_iterator giter=m_regions.begin(); giter != m_regions.end(); giter++)
+    {
+        const gcRegion & region = (*giter).second;
+        gcIdVec loci = GetLocusIdsForRegionByMapPosition(region.GetId());
+        for(gcIdVec::const_iterator liter=loci.begin(); liter != loci.end(); liter++)
+        {
+            size_t locusId = *liter;
+            if(HaveBlocksForLocus(locusId) || IsBlessedLocus(locusId))
+            {
+                toReturn.push_back(locusId);
+            }
+        }
+    }
+    return toReturn;
+}
+
+#if 0
+constObjVector
+GCStructures::GetConstDisplayableLociFor(size_t regionId) const
+{
+    constObjVector toReturn;
+    const gcRegion & region = GetRegion(regionId);
+    gcIdVec loci = GetLocusIdsdForRegionByMapPosition(region.GetId());
+    for(gcIdVec::const_iterator liter=loci.begin(); liter != loci.end(); liter++)
+    {
+        size_t locusId = *liter;
+        if(HaveBlocksForLocus(locusId) || IsBlessedLocus(locusId))
+        {
+            toReturn.push_back(&(GetLocus(locusId)));
+        }
+    }
+    return toReturn;
+}
+#endif
+
+constObjVector
+GCStructures::GetConstDisplayableLociInMapOrderFor(size_t regionId) const
+{
+    constObjVector toReturn;
+    const gcRegion & region = GetRegion(regionId);
+    gcIdVec loci = GetLocusIdsForRegionByMapPosition(region.GetId());
+    for(gcIdVec::const_iterator liter=loci.begin(); liter != loci.end(); liter++)
+    {
+        size_t locusId = *liter;
+        if(HaveBlocksForLocus(locusId) || IsBlessedLocus(locusId))
+        {
+            toReturn.push_back(&(GetLocus(locusId)));
+        }
+    }
+    return toReturn;
+}
+
+constObjVector
+GCStructures::GetConstDisplayableLinkedLociInMapOrderFor(size_t regionId) const
+{
+    constObjVector toReturn;
+    const gcRegion & region = GetRegion(regionId);
+    gcIdVec loci = GetLocusIdsForRegionByMapPosition(region.GetId());
+    for(gcIdVec::const_iterator liter=loci.begin(); liter != loci.end(); liter++)
+    {
+        size_t locusId = *liter;
+        if(HaveBlocksForLocus(locusId) || IsBlessedLocus(locusId))
+        {
+            const gcLocus & locusRef = GetLocus(locusId);
+            if(locusRef.GetLinked())
+            {
+                toReturn.push_back(&locusRef);
+            }
+        }
+    }
+    return toReturn;
+}
+
+objVector
+GCStructures::GetDisplayableLociFor(size_t regionId)
+{
+
+    objVector toReturn;
+    const gcRegion & region = GetRegion(regionId);
+    const GCLocusInfoMap & loci = region.GetLocusInfoMap();
+    for(GCLocusInfoMap::const_iterator liter=loci.begin(); liter != loci.end(); liter++)
+    {
+        size_t locusId = (*liter).first;
+        if(HaveBlocksForLocus(locusId) || IsBlessedLocus(locusId))
+        {
+            toReturn.push_back(&(GetLocus(locusId)));
+        }
+    }
+    return toReturn;
+}
+
+constObjVector
+GCStructures::GetConstDisplayablePops()  const
+{
+    constObjVector toReturn;
+    for(gcDisplayOrder::const_iterator i=m_popDisplay.begin();
+        i != m_popDisplay.end();
+        i++)
+    {
+        size_t popId = *i;
+        if(HaveBlocksForPop(popId) || IsBlessedPop(popId))
+        {
+            toReturn.push_back(&(GetPop(popId)));
+        }
+    }
+    return toReturn;
+}
+
+gcDisplayOrder
+GCStructures::GetDisplayablePopIds() const
+{
+    gcDisplayOrder toReturn;
+    for(gcDisplayOrder::const_iterator i=m_popDisplay.begin();
+        i != m_popDisplay.end();
+        i++)
+    {
+        size_t popId = *i;
+        if(HaveBlocksForPop(popId) || IsBlessedPop(popId))
+        {
+            toReturn.push_back(popId);
+        }
+    }
+    return toReturn;
+}
+
+objVector
+GCStructures::GetDisplayablePops()
+{
+    objVector toReturn;
+    for(gcDisplayOrder::const_iterator i=m_popDisplay.begin();
+        i != m_popDisplay.end();
+        i++)
+    {
+        size_t popId = *i;
+        if(HaveBlocksForPop(popId) || IsBlessedPop(popId))
+        {
+            toReturn.push_back(&(GetPop(popId)));
+        }
+    }
+    return toReturn;
+}
+
+gcDisplayOrder
+GCStructures::GetParentIds() const
+{
+    gcDisplayOrder toReturn;
+    for(gcParentMap::const_iterator i=m_parents.begin();
+        i != m_parents.end();
+        i++)
+    {
+        size_t parentId = (*i).first;
+        toReturn.push_back(parentId);
+    }
+    return toReturn;
+}
+
+objVector
+GCStructures::GetParents()
+{
+    objVector toReturn;
+    for(gcParentMap::const_iterator i=m_parents.begin();
+        i != m_parents.end();
+        i++)
+    {
+        size_t parentId = (*i).first;
+        toReturn.push_back(&(GetParent(parentId)));
+    }
+    return toReturn;
+}
+
+constObjVector
+GCStructures::GetConstParents() const
+{
+    constObjVector toReturn;
+    for(gcParentMap::const_iterator i=m_parents.begin();
+        i != m_parents.end();
+        i++)
+    {
+        size_t parentId = (*i).first;
+        toReturn.push_back(&(GetParent(parentId)));
+    }
+    return toReturn;
+}
+
+constObjVector
+GCStructures::GetConstTraits() const
+{
+    constObjVector toReturn;
+    for(gcTraitMap::const_iterator i=m_traitClasses.begin();
+        i != m_traitClasses.end();
+        i++)
+    {
+        const gcTraitInfo & trait = (*i).second;
+        toReturn.push_back(&trait);
+    }
+    return toReturn;
+}
+
+gcTraitAllele &
+GCStructures::GetAllele(size_t id)
+{
+    gcAlleleMap::iterator iter = m_alleles.find(id);
+    assert(iter != m_alleles.end());
+    return (*iter).second;
+}
+
+const gcTraitAllele &
+GCStructures::GetAllele(size_t id) const
+{
+    gcAlleleMap::const_iterator iter = m_alleles.find(id);
+    assert(iter != m_alleles.end());
+    return (*iter).second;
+}
+
+gcRegion &
+GCStructures::GetRegion(size_t id)
+{
+    gcRegionMap::iterator iter = m_regions.find(id);
+    assert(iter != m_regions.end());
+    return (*iter).second;
+}
+
+const gcRegion &
+GCStructures::GetRegion(size_t id) const
+{
+    gcRegionMap::const_iterator iter = m_regions.find(id);
+    assert(iter != m_regions.end());
+    return (*iter).second;
+}
+
+gcLocus &
+GCStructures::GetLocus(size_t id)
+{
+    gcLocusMap::iterator iter = m_loci.find(id);
+    assert(iter != m_loci.end());
+    return (*iter).second;
+}
+
+const gcLocus &
+GCStructures::GetLocus(size_t id) const
+{
+    gcLocusMap::const_iterator iter = m_loci.find(id);
+    assert(iter != m_loci.end());
+    return (*iter).second;
+}
+
+gcPanel &
+GCStructures::GetPanel(size_t id)
+{
+    for (gcPanelMap::iterator i=m_panels.begin();i != m_panels.end();i++)
+    {
+        if((*i).second.GetId() == id)
+        {
+            return (*i).second;
+        }
+    }
+    wxString badId = wxString::Format("%i",(int)id);
+    throw missing_panel_id(badId);
+}
+
+const gcPanel &
+GCStructures::GetPanel(size_t id) const
+{
+    for (gcPanelMap::const_iterator i=m_panels.begin();i != m_panels.end();i++)
+    {
+        if((*i).second.GetId() == id)
+        {
+            return (*i).second;
+        }
+    }
+    wxString badId = wxString::Format("%i",(int)id);
+    throw missing_panel_id(badId);
+}
+
+gcParent &
+GCStructures::GetParent(size_t id)
+{
+    for (gcParentMap::iterator i=m_parents.begin();i != m_parents.end();i++)
+    {
+        if((*i).second.GetId() == id)
+        {
+            return (*i).second;
+        }
+    }
+    wxString badId = wxString::Format("%i",(int)id);
+    throw missing_parent_id(badId);
+}
+
+const gcParent &
+GCStructures::GetParent(size_t id) const
+{
+    for (gcParentMap::const_iterator i=m_parents.begin();i != m_parents.end();i++)
+    {
+        if((*i).second.GetId() == id)
+        {
+            return (*i).second;
+        }
+    }
+    wxString badId = wxString::Format("%i",(int)id);
+    throw missing_parent_id(badId);
+}
+
+gcPhenotype &
+GCStructures::GetPhenotype(size_t id)
+{
+    gcPhenoMap::iterator iter = m_phenotypes.find(id);
+    assert(iter != m_phenotypes.end());
+    return (*iter).second;
+}
+
+const gcPhenotype &
+GCStructures::GetPhenotype(size_t id) const
+{
+    gcPhenoMap::const_iterator iter = m_phenotypes.find(id);
+    assert(iter != m_phenotypes.end());
+    return (*iter).second;
+}
+
+gcPopulation &
+GCStructures::GetPop(size_t id)
+{
+    gcPopMap::iterator iter = m_pops.find(id);
+    assert(iter != m_pops.end());
+    return (*iter).second;
+}
+
+const gcPopulation &
+GCStructures::GetPop(size_t id) const
+{
+    gcPopMap::const_iterator iter = m_pops.find(id);
+    assert(iter != m_pops.end());
+    return (*iter).second;
+}
+
+gcTraitInfo &
+GCStructures::GetTrait(size_t id)
+{
+    assert(m_traitClasses.find(id) != m_traitClasses.end());
+    return m_traitClasses[id];
+}
+
+const gcTraitInfo &
+GCStructures::GetTrait(size_t id) const
+{
+    gcTraitMap::const_iterator iter = m_traitClasses.find(id);
+    assert(iter != m_traitClasses.end());
+    return (*iter).second;
+}
+
+void
+GCStructures::AssignAllele(gcTraitAllele & allele, gcTraitInfo & trait)
+{
+    if(allele.HasTraitId())
+    {
+        size_t oldTraitId = allele.GetTraitId();
+        gcTraitInfo & oldTrait = GetTrait(oldTraitId);
+        oldTrait.RemoveAllele(allele);
+    }
+    allele.SetTraitId(trait.GetId());
+    trait.AddAllele(allele);
+}
+
+void
+GCStructures::AssignLocus(size_t locusId, size_t regionId)
+{
+    gcLocus & locus = GetLocus(locusId);
+    gcRegion & newRegion = GetRegion(regionId);
+    return AssignLocus(locus,newRegion);
+}
+
+void
+GCStructures::AssignLocus(gcLocus & locus, gcRegion & newRegion)
+{
+    if(locus.HasRegion())
+    {
+        size_t regionId = locus.GetRegionId();
+        gcRegion & regionRef = GetRegion(regionId);
+        regionRef.RemoveLocusId(locus.GetId());
+    }
+
+    locus.SetRegionId(newRegion.GetId());
+    newRegion.AddLocus(locus);
+}
+
+void
+GCStructures::AssignPhenotype(gcPhenotype & phenotype, gcTraitInfo & trait)
+{
+    if(phenotype.HasTraitId())
+    {
+        size_t oldTraitId = phenotype.GetTraitId();
+        gcTraitInfo & oldTrait = GetTrait(oldTraitId);
+        oldTrait.RemovePhenotype(phenotype);
+    }
+    phenotype.SetTraitId(trait.GetId());
+    trait.AddPhenotype(phenotype);
+}
+
+void
+GCStructures::AssignTrait(size_t traitId, size_t regionId)
+{
+    gcTraitInfo & trait = GetTrait(traitId);
+    gcRegion & newRegion = GetRegion(regionId);
+    AssignTrait(trait,newRegion);
+}
+
+void
+GCStructures::AssignTrait(gcTraitInfo & trait, gcRegion & newRegion)
+{
+    if(trait.HasRegionId())
+    {
+        size_t oldRegionId = trait.GetRegionId();
+        gcRegion & oldRegion = GetRegion(oldRegionId);
+        oldRegion.RemoveTraitId(trait.GetId());
+    }
+    newRegion.AddTraitId(trait.GetId());
+}
+
+size_t
+GCStructures::GetPopForBlock(size_t blockId) const
+{
+    for(gcBlockSetMap::const_iterator i=m_blocks.begin(); i != m_blocks.end(); i++)
+    {
+        gcPopLocusIdPair popLoc = (*i).first;
+        const gcIdSet & blocks = (*i).second;
+        if(blocks.find(blockId) != blocks.end())
+        {
+            return popLoc.first;
+        }
+    }
+    assert(false);
+    return gcdefault::badIndex;
+}
+
+size_t
+GCStructures::GetLocusForBlock(size_t blockId) const
+{
+    for(gcBlockSetMap::const_iterator i=m_blocks.begin(); i != m_blocks.end(); i++)
+    {
+        gcPopLocusIdPair popLoc = (*i).first;
+        const gcIdSet & blocks = (*i).second;
+        if(blocks.find(blockId) != blocks.end())
+        {
+            return popLoc.second;
+        }
+    }
+    assert(false);
+    return gcdefault::badIndex;
+}
+
+void
+GCStructures::AssignBlockToPop(size_t blockId, size_t popId)
+{
+    size_t oldLocus = GetLocusForBlock(blockId);
+    AssignBlock(blockId,popId,oldLocus);
+}
+
+void
+GCStructures::AssignBlockToLocus(size_t blockId, size_t locusId)
+{
+    size_t oldPop = GetPopForBlock(blockId);
+    AssignBlock(blockId,oldPop,locusId);
+}
+
+void
+GCStructures::AssignBlock(size_t blockId, size_t popId, size_t locusId)
+{
+    RemoveBlockAssignment(blockId);
+
+    gcPopLocusIdPair plPair(popId,locusId);
+    gcBlockSetMap::iterator iter = m_blocks.find(plPair);
+    if(iter == m_blocks.end())
+        // we need to insert a new, empty set
+    {
+        m_blocks[plPair] = gcIdSet();
+        iter = m_blocks.find(plPair);
+    }
+    gcIdSet & blockSet = (*iter).second;
+    blockSet.insert(blockId);
+}
+
+void
+GCStructures::RemoveBlockAssignment(size_t blockId)
+{
+    size_t foundCount = 0;
+    for(gcBlockSetMap::iterator i=m_blocks.begin(); i != m_blocks.end(); i++)
+    {
+        gcIdSet & blockSetRef = (*i).second;
+        gcIdSet::iterator j = blockSetRef.find(blockId);
+        if(j != blockSetRef.end())
+        {
+            foundCount++;
+            blockSetRef.erase(j);
+        }
+    }
+    assert(foundCount < 2);
+}
+
+gcTraitAllele &
+GCStructures::FetchOrMakeAllele(gcTraitInfo& trait,wxString name)
+{
+    if(HasAllele(name))
+    {
+        return GetAllele(trait,name);
+    }
+    return MakeAllele(name);
+}
+
+gcLocus &
+GCStructures::FetchOrMakeLocus(gcRegion & region, wxString name, const gcCreationInfo & createInfo)
+{
+    if(HasLocus(name))
+    {
+        return GetLocus(region,name);
+    }
+    return MakeLocus(region,name,true, createInfo); // true == blessed
+}
+
+gcPopulation &
+GCStructures::FetchOrMakePop(wxString name)
+{
+    if(HasPop(name))
+    {
+        return GetPop(name);
+    }
+    return MakePop(name,true);  // true == blessed
+}
+
+gcRegion &
+GCStructures::FetchOrMakeRegion(wxString name)
+{
+    wxLogVerbose("****FetchOrMakeRegion name: %s", name.c_str());  // JMDBG
+    if(HasRegion(name))
+    {
+        return GetRegion(name);
+    }
+    return MakeRegion(name,true);   // true == blessed
+}
+
+gcTraitInfo &
+GCStructures::FetchOrMakeTrait(wxString name)
+{
+    if(HasTrait(name))
+    {
+        return GetTrait(name);
+    }
+    return MakeTrait(name);
+}
+
+gcTraitAllele &
+GCStructures::MakeAllele(wxString name)
+{
+    wxString nameToUse = m_traitNames.ReserveOrMakeName(name,gcstr::allele);
+    gcTraitAllele newAllele;
+    newAllele.SetName(nameToUse);
+    size_t newId = newAllele.GetId();
+    // don't do any setting after this step --
+    // it won't propagate
+    m_alleles[newId] = newAllele;
+    return m_alleles[newId];
+}
+
+gcLocus &
+GCStructures::MakeLocus(size_t regionId, wxString name, bool blessed, const gcCreationInfo & createInfo)
+{
+    gcRegion & region = GetRegion(regionId);
+    return MakeLocus(region,name,blessed,createInfo);
+}
+
+gcLocus &
+GCStructures::MakeLocus(gcRegion & region, wxString name, bool blessed, const gcCreationInfo & creationInfo)
+{
+    wxString nameToUse = m_names.ReserveOrMakeName(name,gcstr::locus);
+    gcLocus newLocus;
+    newLocus.SetName(nameToUse);
+    newLocus.SetBlessed(blessed);
+    newLocus.SetCreationInfo(creationInfo);
+    size_t newId = newLocus.GetId();
+    // don't make any direct changes to newLocus after
+    // this step -- it won't propagate
+    m_loci[newId] = newLocus;
+    AssignLocus(m_loci[newId],region);
+    return m_loci[newId];
+}
+
+gcPopulation &
+GCStructures::MakePop(wxString name,bool blessed)
+{
+    wxString nameToUse = m_names.ReserveOrMakeName(name,gcstr::population);
+    gcPopulation newPop;
+    newPop.SetName(nameToUse);
+    newPop.SetBlessed(blessed);
+    size_t newId = newPop.GetId();
+    // don't do any setting after this step --
+    // it won't propagate
+    m_pops[newId] = newPop;
+    m_popDisplay.push_back(newId);
+    return m_pops[newId];
+}
+
+gcPhenotype &
+GCStructures::MakePhenotype(wxString name)
+{
+    wxString nameToUse = m_traitNames.ReserveOrMakeName(name,gcstr::phenotype);
+    gcPhenotype newPhenotype;
+    newPhenotype.SetName(nameToUse);
+    size_t newId = newPhenotype.GetId();
+    // don't do any setting after this step --
+    // it won't propagate
+    m_phenotypes[newId] = newPhenotype;
+    return m_phenotypes[newId];
+}
+
+gcRegion &
+GCStructures::MakeRegion(wxString name,bool blessed)
+{
+    wxLogVerbose("****MakeRegion name: %s", name.c_str());  // JMDBG
+    wxString nameToUse = m_names.ReserveOrMakeName(name,gcstr::region);
+    gcRegion newRegion;
+    newRegion.SetName(nameToUse);
+    newRegion.SetBlessed(blessed);
+    wxLogVerbose("****MakeRegion nameToUse: %s blessed: %i", nameToUse.c_str(), (int)blessed);  // JMDBG
+    size_t newId = newRegion.GetId();
+
+    // create panels for each region / population pair
+    for(gcPopMap::const_iterator i=m_pops.begin(); i != m_pops.end(); i++)
+    {
+        size_t popId = (*i).first;
+        wxString popname =  (*i).second.GetName();
+        wxString pname;
+        pname.Printf(wxT("panel:%s:%s"), newRegion.GetName().c_str(), (*i).second.GetName().c_str());
+        wxString panelNameToUse = m_names.ReserveOrMakeName(pname,gcstr::panel);
+        wxLogVerbose("***MakeRegion create Panel name: %s", panelNameToUse.c_str());  // JMDBG
+        gcPanel newPanel = MakePanel(panelNameToUse, blessed, newId, popId);
+    }
+
+    // don't do any setting after this step --
+    // it won't propagate
+    m_regions[newId] = newRegion;
+    m_regionDisplay.push_back(newId);
+    return m_regions[newId];
+}
+
+gcPanel &
+GCStructures::CreatePanel(size_t regionId, size_t popId )
+{
+    wxString pname;
+    pname.Printf(wxT("panel:%s:%s"), GetRegion(regionId).GetName().c_str(), GetPop(popId).GetName().c_str());
+    wxString panelNameToUse = m_names.ReserveOrMakeName(pname,gcstr::panel);
+    wxLogVerbose("***CreatePanel name: %s", panelNameToUse.c_str());  // JMDBG
+    return MakePanel(panelNameToUse, false, regionId, popId);
+}
+
+gcPanel &
+GCStructures::MakePanel(wxString name, bool blessed, size_t regionId, size_t popId )
+{
+    gcPanel newPanel;
+    newPanel.SetName(name);
+    newPanel.SetBlessed(blessed);
+    newPanel.SetRegionId(regionId);
+    newPanel.SetPopId(popId);
+    wxLogVerbose("****MakePanel name: %s", name.c_str());  // JMDBG
+    size_t newId = newPanel.GetId();
+
+    gcRegionPopIdPair regPopPair = gcRegionPopIdPair(regionId, popId);
+
+    // don't do any setting after this step --
+    // it won't propagate
+    m_panels[regPopPair] = newPanel;
+    ////m_panelDisplay.push_back(newId);
+    return m_panels[regPopPair];
+}
+
+gcParent &
+GCStructures::MakeParent(wxString name)
+{
+    wxString nameToUse = m_names.ReserveOrMakeName(name,gcstr::parent);
+    gcParent newParent;
+    newParent.SetName(nameToUse);
+    newParent.SetDispLevel(0);
+    wxLogVerbose("****MakeParent name: %s", name.c_str());  // JMDBG
+    size_t newId = newParent.GetId();
+    // don't do any setting after this step --
+    // it won't propagate
+    m_parents[newId] = newParent;
+    return m_parents[newParent.GetId()];
+}
+
+gcMigration &
+GCStructures::MakeMigration(bool blessed, size_t fromId, size_t toId )
+{
+    gcMigration newMigration;
+
+    newMigration.m_blessed = true;
+    newMigration.m_hasFrom = true;
+    newMigration.m_fromId = fromId;
+    newMigration.m_hasTo = true;
+    newMigration.m_toId = toId;
+    newMigration.m_startValue = 50.0;
+    newMigration.m_method = migmethod_USER;
+    newMigration.m_profile = migprofile_NONE;
+    newMigration.m_constraint = migconstraint_UNCONSTRAINED;
+
+    newMigration.SetName(wxString::Format(_T("cell%i:%i "), (int)fromId, (int)toId)); //JMDBG
+
+    wxLogVerbose("****MakeMigration from: %i to: %i", (int)fromId, (int)toId );  // JMDBG
+    size_t newId = newMigration.GetId();
+
+    gcMigrationPair fromToPair = gcMigrationPair(fromId, toId);
+
+    // don't do any setting after this step --
+    // it won't propagate
+    m_migrations[fromToPair] = newMigration;
+    return m_migrations[fromToPair];
+}
+
+gcTraitInfo &
+GCStructures::MakeTrait(wxString name)
+{
+    wxString nameToUse = m_traitNames.ReserveOrMakeName(name,gcstr::trait);
+    gcTraitInfo newTrait;
+    newTrait.SetName(nameToUse);
+    // don't do any setting after this step --
+    // it won't propagate
+    assert(m_traitClasses.find(newTrait.GetId()) == m_traitClasses.end());
+    m_traitClasses[newTrait.GetId()] = newTrait;
+    return m_traitClasses[newTrait.GetId()];
+}
+
+void
+GCStructures::Rename(GCQuantum & object, wxString newName)
+{
+    wxString oldName = object.GetName();
+    wxLogVerbose("old name: %s \nnew name: %s", oldName.c_str(), newName.c_str());  // JMDBG
+    if(newName.Cmp(oldName) == 0) return;
+
+    assert(m_names.HasName(oldName) || m_traitNames.HasName(oldName));
+    assert(! (m_names.HasName(oldName) && m_traitNames.HasName(oldName)));
+
+    wxLogVerbose("checking m_names");  // JMDBG
+    if(m_names.HasName(oldName))
+    {
+        wxLogVerbose("in m_names");  // JMDBG
+        m_names.ReserveName(newName);
+        object.SetName(newName);
+        m_names.FreeName(oldName);
+    }
+
+    wxLogVerbose("checking m_traitNames");  // JMDBG
+    if(m_traitNames.HasName(oldName))
+    {
+        wxLogVerbose("in m_traitNames");  // JMDBG
+        m_traitNames.ReserveName(newName);
+        object.SetName(newName);
+        m_traitNames.FreeName(oldName);
+    }
+}
+
+void
+GCStructures::DebugDump(wxString prefix) const
+{
+    wxLogDebug(gcstr::structureDump,prefix.c_str());    // EWDUMPOK
+    m_pops.DebugDump((prefix+gcstr::indent).c_str());
+    m_regions.DebugDump((prefix+gcstr::indent).c_str());
+    m_loci.DebugDump((prefix+gcstr::indent).c_str());
+    m_traitClasses.DebugDump((prefix+gcstr::indent).c_str());
+    m_blocks.DebugDump((prefix+gcstr::indent).c_str());
+    m_files.DebugDump((prefix+gcstr::indent).c_str());
+}
+
+gcRegion &
+GCStructures::GetRegion(wxString name)
+{
+    for(gcRegionMap::iterator i=m_regions.begin();
+        i != m_regions.end();
+        i++)
+    {
+        if((*i).second.GetName().CmpNoCase(name) == 0)
+        {
+            return (*i).second;
+        }
+    }
+    missing_region e(wxString::Format(gcerr::missingRegion,name.c_str()).c_str());
+    throw e;
+}
+
+const gcRegion &
+GCStructures::GetRegion(wxString name) const
+{
+    for(gcRegionMap::const_iterator i=m_regions.begin();
+        i != m_regions.end();
+        i++)
+    {
+        if((*i).second.GetName().CmpNoCase(name) == 0)
+        {
+            return (*i).second;
+        }
+    }
+    missing_region e(wxString::Format(gcerr::missingRegion,name.c_str()).c_str());
+    throw e;
+}
+
+bool
+GCStructures::HasRegion(wxString name) const
+{
+    for(gcRegionMap::const_iterator i=m_regions.begin();
+        i != m_regions.end();
+        i++)
+    {
+        if((*i).second.GetName().CmpNoCase(name) == 0)
+        {
+            return true;
+        }
+    }
+    return false;
+}
+
+gcTraitAllele &
+GCStructures::GetAllele(wxString name)
+{
+    for(gcAlleleMap::iterator i=m_alleles.begin();
+        i != m_alleles.end();
+        i++)
+    {
+        if((*i).second.GetName().Cmp(name) == 0)    // alleles case sensitive
+        {
+            return (*i).second;
+        }
+    }
+    throw gc_missing_allele(name);
+}
+
+const gcTraitAllele &
+GCStructures::GetAllele(wxString name) const
+{
+    for(gcAlleleMap::const_iterator i=m_alleles.begin();
+        i != m_alleles.end();
+        i++)
+    {
+        if((*i).second.GetName().Cmp(name) == 0)  // alleles case sensitive
+        {
+            return (*i).second;
+        }
+    }
+    throw gc_missing_allele(name);
+}
+
+gcTraitAllele &
+GCStructures::GetAllele(gcTraitInfo& trait, wxString alleleName)
+{
+    gcTraitAllele & allele = GetAllele(alleleName);
+    assert(allele.HasTraitId());
+    if(allele.GetTraitId() != trait.GetId())
+    {
+        const gcTraitInfo & oldTrait = GetTrait(allele.GetTraitId());
+        throw gc_allele_trait_mismatch(alleleName,trait.GetName(),oldTrait.GetName());
+    }
+    return allele;
+}
+
+bool
+GCStructures::HasAllele(wxString name) const
+{
+    for(gcAlleleMap::const_iterator i=m_alleles.begin();
+        i != m_alleles.end();
+        i++)
+    {
+        if((*i).second.GetName().Cmp(name) == 0)    // alleles case sensitive
+        {
+            return true;
+        }
+    }
+    return false;
+}
+
+gcLocus &
+GCStructures::GetLocus(wxString name)
+{
+    for(gcLocusMap::iterator i=m_loci.begin();
+        i != m_loci.end();
+        i++)
+    {
+        if((*i).second.GetName().CmpNoCase(name) == 0)
+        {
+            return (*i).second;
+        }
+    }
+    throw gc_missing_locus(name);
+}
+
+const gcLocus &
+GCStructures::GetLocus(wxString name) const
+{
+    for(gcLocusMap::const_iterator i=m_loci.begin();
+        i != m_loci.end();
+        i++)
+    {
+        if((*i).second.GetName().CmpNoCase(name) == 0)
+        {
+            return (*i).second;
+        }
+    }
+    throw gc_missing_locus(name);
+}
+
+gcLocus &
+GCStructures::GetLocus(gcRegion& region, wxString locusName)
+{
+    gcLocus & locus = GetLocus(locusName);
+    assert(locus.HasRegion());
+    if(locus.GetRegionId() != region.GetId())
+    {
+        const gcRegion & oldRegion = GetRegion(locus.GetRegionId());
+        throw gc_locus_region_mismatch(locusName,region.GetName(),oldRegion.GetName());
+    }
+    return locus;
+}
+
+gcPanel &
+GCStructures::GetPanel(wxString name)
+{
+    for(gcPanelMap::iterator i=m_panels.begin();i != m_panels.end();i++)
+    {
+        if((*i).second.GetName().CmpNoCase(name) == 0)
+        {
+            return (*i).second;
+        }
+    }
+    throw missing_panel(name);
+}
+
+gcMigration &
+GCStructures::GetMigration(size_t id)
+{
+    for (gcMigrationMap::iterator i=m_migrations.begin();i != m_migrations.end();i++)
+    {
+        if((*i).second.GetId() == id)
+        {
+            return (*i).second;
+        }
+    }
+    wxString badId = wxString::Format("%i",(int)id);
+    throw missing_migration_id(badId);
+}
+
+const gcMigration &
+GCStructures::GetMigration(size_t id) const
+{
+    for (gcMigrationMap::const_iterator i=m_migrations.begin();i != m_migrations.end();i++)
+    {
+        if((*i).second.GetId() == id)
+        {
+            return (*i).second;
+        }
+    }
+    wxString badId = wxString::Format("%i",(int)id);
+    throw missing_migration_id(badId);
+}
+
+bool
+GCStructures::HasMigration(size_t fromId, size_t toId) const
+{
+    gcMigrationPair p(fromId, toId);
+    gcMigrationMap::const_iterator iter = m_migrations.find(p);
+    if(iter != m_migrations.end())
+    {
+        return true;
+    }
+    return false;
+}
+
+gcMigration &
+GCStructures::GetMigration(size_t fromId, size_t toId)
+{
+    gcMigrationPair p(fromId, toId);
+    gcMigrationMap::iterator iter = m_migrations.find(p);
+    if(iter != m_migrations.end())
+    {
+        return (*iter).second;
+    }
+
+    wxString fromName;
+    if (IsPop(fromId))
+    {
+        fromName = GetPop(fromId).GetName();
+    }
+    else
+    {
+        if(IsParent(fromId))
+        {
+            fromName = GetParent(fromId).GetName();
+        }
+        else
+        {
+            fromName = wxString("Bad from ID:%s", fromId);
+        }
+    }
+
+    wxString toName;
+    if (IsPop(toId))
+    {
+        toName = GetPop(toId).GetName();
+    }
+    else
+    {
+        if(IsParent(toId))
+        {
+            toName = GetParent(toId).GetName();
+        }
+        else
+        {
+            toName = wxString("Bad to ID:%s", toId);
+        }
+    }
+    throw missing_migration(fromName, toName);
+}
+
+const gcMigration &
+GCStructures::GetMigration(size_t fromId, size_t toId) const
+{
+    gcMigrationPair p(fromId, toId);
+    gcMigrationMap::const_iterator iter = m_migrations.find(p);
+    if(iter != m_migrations.end())
+    {
+        return (*iter).second;
+    }
+
+    wxString fromName;
+    if (IsPop(fromId))
+    {
+        fromName = GetPop(fromId).GetName();
+    }
+    else
+    {
+        if(IsParent(fromId))
+        {
+            fromName = GetParent(fromId).GetName();
+        }
+        else
+        {
+            fromName = wxString("Bad from ID:%s", fromId);
+        }
+    }
+
+    wxString toName;
+    if (IsPop(toId))
+    {
+        toName = GetPop(toId).GetName();
+    }
+    else
+    {
+        if(IsParent(toId))
+        {
+            toName = GetParent(toId).GetName();
+        }
+        else
+        {
+            toName = wxString("Bad to ID:%s", toId);
+        }
+    }
+    throw missing_migration(fromName, toName);
+}
+
+const gcPanel &
+GCStructures::GetPanel(wxString name) const
+{
+    for(gcPanelMap::const_iterator i=m_panels.begin();i != m_panels.end();i++)
+    {
+        if((*i).second.GetName().CmpNoCase(name) == 0)
+        {
+            return (*i).second;
+        }
+    }
+    throw missing_panel(name);
+}
+
+bool
+GCStructures::HasPanel(size_t regionId, size_t popId) const
+{
+    gcRegionPopIdPair p(regionId, popId);
+    gcPanelMap::const_iterator iter = m_panels.find(p);
+    if(iter == m_panels.end())
+    {
+        return false;
+    }
+    return true;
+}
+
+gcPanel &
+GCStructures::GetPanel(size_t regionId, size_t popId)
+{
+    gcRegionPopIdPair p(regionId, popId);
+    gcPanelMap::iterator iter = m_panels.find(p);
+    if(iter != m_panels.end())
+    {
+        return (*iter).second;
+    }
+    wxString regionName = GetRegion(regionId).GetName();
+    wxString popName = GetPop(popId).GetName();
+    throw missing_panel(regionName, popName);
+}
+
+const gcPanel &
+GCStructures::GetPanel(size_t regionId, size_t popId) const
+{
+    gcRegionPopIdPair p(regionId, popId);
+    gcPanelMap::const_iterator iter = m_panels.find(p);
+    if(iter != m_panels.end())
+    {
+        return (*iter).second;
+    }
+    wxString regionName = GetRegion(regionId).GetName();
+    wxString popName = GetPop(popId).GetName();
+    throw missing_panel(regionName, popName);
+}
+
+gcParent &
+GCStructures::GetParent(wxString name)
+{
+    for(gcParentMap::iterator i=m_parents.begin();i != m_parents.end();i++)
+    {
+        if((*i).second.GetName().CmpNoCase(name) == 0)
+        {
+            return (*i).second;
+        }
+    }
+    throw missing_parent(name);
+}
+
+const gcParent &
+GCStructures::GetParent(wxString name) const
+{
+    for(gcParentMap::const_iterator i=m_parents.begin();i != m_parents.end();i++)
+    {
+        if((*i).second.GetName().CmpNoCase(name) == 0)
+        {
+            return (*i).second;
+        }
+    }
+    throw missing_parent(name);
+}
+
+gcPhenotype &
+GCStructures::GetPhenotype(wxString name)
+{
+    for(gcPhenoMap::iterator i=m_phenotypes.begin();
+        i != m_phenotypes.end();
+        i++)
+    {
+        if((*i).second.GetName().Cmp(name) == 0)  // phenotypes case sensitive
+        {
+            return (*i).second;
+        }
+    }
+    throw gc_missing_phenotype(name);
+}
+
+const gcPhenotype &
+GCStructures::GetPhenotype(wxString name) const
+{
+    for(gcPhenoMap::const_iterator i=m_phenotypes.begin();
+        i != m_phenotypes.end();
+        i++)
+    {
+        if((*i).second.GetName().Cmp(name) == 0)  // phenotypes case sensitive
+        {
+            return (*i).second;
+        }
+    }
+    throw gc_missing_phenotype(name);
+}
+
+bool
+GCStructures::HasLocus(wxString name) const
+{
+    for(gcLocusMap::const_iterator i=m_loci.begin();
+        i != m_loci.end();
+        i++)
+    {
+        if((*i).second.GetName().CmpNoCase(name) == 0)
+        {
+            return true;
+        }
+    }
+    return false;
+}
+
+gcPopulation &
+GCStructures::GetPop(wxString name)
+{
+    for(gcPopMap::iterator i=m_pops.begin();
+        i != m_pops.end();
+        i++)
+    {
+        if((*i).second.GetName().CmpNoCase(name) == 0)
+        {
+            return (*i).second;
+        }
+    }
+    throw gc_missing_population(name);
+}
+
+const gcPopulation &
+GCStructures::GetPop(wxString name) const
+{
+    for(gcPopMap::const_iterator i=m_pops.begin();
+        i != m_pops.end();
+        i++)
+    {
+        if((*i).second.GetName().CmpNoCase(name) == 0)
+        {
+            return (*i).second;
+        }
+    }
+    throw gc_missing_population(name);
+}
+
+bool
+GCStructures::HasPop(wxString name) const
+{
+    for(gcPopMap::const_iterator i=m_pops.begin();
+        i != m_pops.end();
+        i++)
+    {
+        if((*i).second.GetName().CmpNoCase(name) == 0)
+        {
+            return true;
+        }
+    }
+    return false;
+}
+
+gcTraitInfo &
+GCStructures::GetTrait(wxString name)
+{
+    for(gcTraitMap::iterator i=m_traitClasses.begin();
+        i != m_traitClasses.end();
+        i++)
+    {
+        if((*i).second.GetName().CmpNoCase(name) == 0)
+        {
+            return (*i).second;
+        }
+    }
+    missing_trait e(wxString::Format(gcerr::missingTrait,name.c_str()).c_str());
+    throw e;
+}
+
+const gcTraitInfo &
+GCStructures::GetTrait(wxString name) const
+{
+    for(gcTraitMap::const_iterator i=m_traitClasses.begin();
+        i != m_traitClasses.end();
+        i++)
+    {
+        if((*i).second.GetName().CmpNoCase(name) == 0)
+        {
+            return (*i).second;
+        }
+    }
+    missing_trait e(wxString::Format(gcerr::missingTrait,name.c_str()).c_str());
+    throw e;
+}
+
+bool
+GCStructures::HasTrait(wxString name) const
+{
+    for(gcTraitMap::const_iterator i=m_traitClasses.begin();
+        i != m_traitClasses.end();
+        i++)
+    {
+        if((*i).second.GetName().CmpNoCase(name) == 0)
+        {
+            return true;
+        }
+    }
+    return false;
+}
+
+long
+GCStructures::GetPopDisplayIndexOf(size_t popId) const
+{
+    long popIndex = 0;
+    for(    gcDisplayOrder::const_iterator i = m_popDisplay.begin();
+            i != m_popDisplay.end();
+            i++, popIndex++)
+    {
+        if((*i) == popId) return popIndex;
+    }
+    return gcdefault::badDisplayIndex;
+}
+
+long
+GCStructures::GetLocusDisplayIndexOf(size_t locusId) const
+{
+    long locusIndex = 0;
+    for(    gcDisplayOrder::const_iterator i = m_regionDisplay.begin();
+            i != m_regionDisplay.end();
+            i++)
+    {
+        size_t regionId = (*i);
+        const gcRegion & region = GetRegion(regionId);
+
+        const GCLocusInfoMap & loci = region.GetLocusInfoMap();
+
+        if(loci.size() == 0)
+        {
+            locusIndex++;
+        }
+        else
+        {
+
+            for(GCLocusInfoMap::const_iterator liter=loci.begin();
+                liter != loci.end(); liter++)
+            {
+                if(locusId == ((*liter).first))
+                {
+                    return locusIndex;
+                }
+                locusIndex++;
+            }
+        }
+
+    }
+    return gcdefault::badDisplayIndex;
+}
+
+bool
+GCStructures::RegionHasAnyLinkedLoci(size_t regionId) const
+{
+    const gcRegion & region = GetRegion(regionId);
+    const GCLocusInfoMap & loci = region.GetLocusInfoMap();
+    for(GCLocusInfoMap::const_iterator i=loci.begin(); i != loci.end(); i++)
+    {
+        size_t locusId = (*i).first;
+        const gcLocus & locus = GetLocus(locusId);
+        if(locus.GetLinked())
+        {
+            return true;
+        }
+    }
+    return false;
+}
+
+bool
+GCStructures::RegionHasAnyUnLinkedLoci(size_t regionId) const
+{
+    const gcRegion & region = GetRegion(regionId);
+    const GCLocusInfoMap & loci = region.GetLocusInfoMap();
+    for(GCLocusInfoMap::const_iterator i=loci.begin(); i != loci.end(); i++)
+    {
+        size_t locusId = (*i).first;
+        const gcLocus & locus = GetLocus(locusId);
+        if(!(locus.GetLinked()))
+        {
+            return true;
+        }
+    }
+    return false;
+}
+
+void
+GCStructures::FragmentRegion(size_t regionId)
+{
+    gcRegion & regionRef = GetRegion(regionId);
+    gcIdVec loci = GetLocusIdsForRegionByCreation(regionId);
+    for(gcIdVec::const_iterator i=loci.begin(); i != loci.end(); i++)
+    {
+        size_t locusId = *i;
+        gcLocus & locusRef = GetLocus(locusId);
+
+        wxLogVerbose("****FragmentRegion ");  // JMDBG
+        gcRegion & newRegion = MakeRegion("",false); // false == not blessed
+        if(regionRef.HasEffectivePopulationSize())
+        {
+            newRegion.SetEffectivePopulationSize(regionRef.GetEffectivePopulationSize());
+        }
+
+        // propogate panel size out to all the new panels
+        for(gcPopMap::const_iterator i=m_pops.begin(); i != m_pops.end(); i++)
+        {
+            size_t popId = (*i).first;
+            if (HasBlock(locusId, popId))
+            {
+                GetPanel(newRegion.GetId(),popId).SetNumPanels(GetPanel(regionId,popId).GetNumPanels());
+                GetPanel(newRegion.GetId(),popId).SetBlessed(true);
+            }
+        }
+
+        AssignLocus(locusRef,newRegion);
+    }
+
+    // unbless old region
+    regionRef.SetBlessed(false);
+}
+
+void
+GCStructures::LocusToOwnRegion(size_t locusId)
+{
+    gcLocus & locusRef = GetLocus(locusId);
+
+    wxLogVerbose("****LocusToOwnRegion ");  // JMDBG
+    gcRegion & newRegion = MakeRegion("",false); // false == not blessed
+    if(locusRef.HasRegion())
+    {
+        gcRegion & regionRef = GetRegion(locusRef.GetRegionId());
+        if(regionRef.HasEffectivePopulationSize())
+        {
+            newRegion.SetEffectivePopulationSize(regionRef.GetEffectivePopulationSize());
+        }
+    }
+    AssignLocus(locusRef,newRegion);
+}
+
+void
+GCStructures::MergeRegions(gcIdVec regions)
+{
+    if(regions.size() < 2) return;
+
+    // testing mergeability
+
+    // population size set
+    bool haveEffPopSize = false;
+    double effPopSize = 1;
+
+    for(gcIdVec::iterator iter=regions.begin(); iter != regions.end(); iter++)
+    {
+        size_t regionId = *iter;
+        gcRegion * regionP = &GetRegion(regionId);
+
+        // effective population size
+        if(regionP->HasEffectivePopulationSize())
+        {
+            double pSize = regionP->GetEffectivePopulationSize();
+            if(haveEffPopSize)
+            {
+                if(effPopSize != pSize)
+                {
+                    throw effective_pop_size_clash(effPopSize,pSize);
+                }
+            }
+            else
+            {
+                haveEffPopSize = true;
+                effPopSize = pSize;
+            }
+        }
+    }
+
+    // test panel size and blessed state
+    gcDisplayOrder popIds =  GetDisplayablePopIds();
+
+    for(gcDisplayOrder::iterator piter=popIds.begin(); piter !=popIds.end(); piter++)
+    {
+        size_t popId = *piter;
+        gcPopulation * curPop = &GetPop(popId);
+        gcRegion * region1;
+        gcRegion * region2;
+
+        bool isBlessed = false;
+        long panelSize = 0;
+        //size_t lastId = 0;
+
+        for(gcIdVec::iterator iter=regions.begin(); iter != regions.end(); iter++)
+        {
+            size_t regionId = *iter;
+            wxLogVerbose("In MergeRegions testing region: %i pop: %i", regionId, popId);
+            if (HasPanel(regionId, popId))
+            {
+                const gcPanel &panelId =  GetPanel(regionId, popId);
+                long numP = panelId.GetNumPanels();
+                wxLogVerbose("In MergeRegions region: %i pop: %i HasPanel numP: %i", regionId, popId, numP);
+                if (panelId.GetBlessed())
+                {
+                    if(!isBlessed)
+                    {
+                        // blessed overides unblessed, no matter what the value
+                        panelSize = numP;
+                        isBlessed = true;
+                        region1 = &GetRegion(regionId);
+                    }
+                    else
+                    {
+                        if (panelSize != numP)
+                        {
+                            wxLogVerbose("In MergeRegions panelSize: %i numP: %i", panelSize, numP);
+                            // can't merge panels that are both blessed and have different number of members
+                            region2 = &GetRegion(regionId);
+                            throw panel_size_clash(curPop->GetName(),region1->GetName(), region2->GetName());
+                        }
+                    }
+                }
+                else
+                {
+                    if (numP > 0)
+                    {
+                        // this is a bug - you can't have >0 panel size that isn't blessed
+                        region1 = &GetRegion(regionId);
+                        throw panel_blessed_error(region1->GetName(), curPop->GetName());
+                    }
+                }
+            }
+        }
+    }
+
+    // everything worked so combine panels
+    for(gcDisplayOrder::iterator piter=popIds.begin(); piter !=popIds.end(); piter++)
+    {
+        size_t popId = *piter;
+
+        bool isFirstRegion = true;
+        gcPanel * firstPanel;
+        gcPanel * curPanel;
+        for(gcIdVec::iterator iter=regions.begin(); iter != regions.end(); iter++)
+        {
+            size_t regionId = *iter;
+            wxLogVerbose("In MergeRegions - regionId: %i popId: %i", regionId, popId);
+            if (isFirstRegion)
+            {
+                // everything propogates to the first region selected
+                isFirstRegion = false;
+                if (HasPanel(regionId, popId))
+                {
+                    firstPanel = &GetPanel(regionId, popId);
+                    wxLogVerbose(" first panel regionId: %i popId: %i", regionId, popId);
+                    firstPanel->SetBlessed(true);
+                }
+                else
+                {
+                    // corner case - first region did not contain this pop so create it
+                    CreatePanel(regionId, popId);
+                }
+            }
+            else
+            {
+                // largest panel size wins
+                if (HasPanel(regionId, popId))
+                {
+                    curPanel =  &GetPanel(regionId, popId);
+                }
+                if (curPanel->GetNumPanels() > firstPanel->GetNumPanels())
+                {
+                    firstPanel->SetNumPanels(curPanel->GetNumPanels());
+                }
+            }
+        }
+    }
+
+    // combine regions
+    bool firstItem = true;
+    gcRegion * firstRegionPointer = NULL;
+    for(gcIdVec::iterator iter=regions.begin(); iter != regions.end(); iter++)
+    {
+        size_t regionId = *iter;
+        gcRegion * regionP = &GetRegion(regionId);
+        assert(regionP != NULL);
+        if(firstItem)
+        {
+            firstRegionPointer = regionP;
+        }
+        else
+        {
+            gcRegion & regionRef = *regionP;
+            const GCLocusInfoMap & lmap = regionRef.GetLocusInfoMap();
+            gcIdSet locusIds;
+            for(GCLocusInfoMap::const_iterator miter=lmap.begin(); miter != lmap.end(); miter++)
+            {
+                locusIds.insert((*miter).first);
+            }
+            for(gcIdSet::const_iterator liter=locusIds.begin(); liter != locusIds.end(); liter++)
+            {
+                size_t locusId = *liter;
+                AssignLocus(locusId,firstRegionPointer->GetId());    // EWFIX.P5 SPEED
+            }
+
+            // unbless region that's now empty
+            regionRef.SetBlessed(false);
+        }
+        firstItem = false;
+    }
+    if(haveEffPopSize)
+    {
+        firstRegionPointer->SetEffectivePopulationSize(effPopSize);
+    }
+}
+
+void
+GCStructures::MergeLoci(gcIdVec locusIds)
+// this operation can fail part way through. If that happens
+// in the batch converter, the thrown error will result in
+// aborting the program (cleanly, one would hope). If we're
+// in the gui, the thrown error should be caught in
+// gcUpdatingDialog::OnButton 's call to DoUpdateData
+{
+    // find all blocks that are assigned to one of these loci and
+    // assign them all to the first
+    if(locusIds.size() < 2) return;
+
+    bool firstItem = true;
+    gcLocus * firstLocusPointer = NULL;
+    for(gcIdVec::iterator iter = locusIds.begin(); iter != locusIds.end(); iter++)
+    {
+        size_t locusId = *iter;
+        gcLocus * locusP = &(GetLocus(locusId));
+        assert(locusP != NULL);
+        if(firstItem)
+        {
+            firstLocusPointer = locusP;
+        }
+        else
+        {
+
+            firstLocusPointer->MergeWith(*locusP);
+
+            for(gcPopMap::const_iterator piter=m_pops.begin(); piter != m_pops.end(); piter++)
+            {
+                size_t popId = (*piter).first;
+                gcPopLocusIdPair oldPair(popId,locusP->GetId());
+                gcBlockSetMap::iterator biter = m_blocks.find(oldPair);
+                if(biter != m_blocks.end())
+                {
+                    gcIdSet blocks = (*biter).second;
+                    for(gcIdSet::iterator bsetIter=blocks.begin(); bsetIter != blocks.end(); bsetIter++)
+                    {
+                        size_t blockId = *bsetIter;
+                        AssignBlock(blockId,popId,firstLocusPointer->GetId());  // EWFIX.P5 SPEED
+                    }
+                }
+            }
+            // unbless locus that's now empty
+            locusP->SetBlessed(false);
+        }
+        firstItem = false;
+    }
+
+}
+
+void
+GCStructures::MergePops(gcIdVec popIds)
+{
+    // find all blocks that are assigned to one of these pops and
+    // assign them all to the first
+    if(popIds.size() < 2) return;
+
+    bool firstItem = true;
+    gcPopulation * firstPopP = NULL;
+    for(gcIdVec::iterator iter = popIds.begin(); iter != popIds.end(); iter++)
+    {
+        size_t popId = *iter;
+        gcPopulation * popP = &(GetPop(popId));
+        assert(popP != NULL);
+        if (popP->GetBlessed())
+        {
+            wxLogVerbose("****in GCStructures::MergePops popP: %i Blessed: true before merge", popId);  // JMDBG
+        }
+        else
+        {
+            wxLogVerbose("****in GCStructures::MergePops popP: %i Blessed: false before merge", popId);  // JMDBG
+        }
+
+        if(firstItem)
+        {
+            firstPopP = popP;
+        }
+        else
+        {
+            for(gcLocusMap::const_iterator liter=m_loci.begin(); liter != m_loci.end(); liter++)
+            {
+                size_t locusId = (*liter).first;
+                gcPopLocusIdPair oldPair(popP->GetId(),locusId);
+                gcBlockSetMap::iterator biter = m_blocks.find(oldPair);
+                if(biter != m_blocks.end())
+                {
+                    gcIdSet blocks = (*biter).second;
+                    for(gcIdSet::iterator bsetIter=blocks.begin(); bsetIter != blocks.end(); bsetIter++)
+                    {
+                        size_t blockId = *bsetIter;
+                        AssignBlock(blockId,firstPopP->GetId(),locusId);
+                    }
+                }
+            }
+            // unbless pop that's now empty
+            popP->SetBlessed(false);
+        }
+        firstItem = false;
+        if (popP->GetBlessed())
+        {
+            wxLogVerbose("****in GCStructures::MergePops popP: %i Blessed: true after merge", popId);  // JMDBG
+        }
+        else
+        {
+            wxLogVerbose("****in GCStructures::MergePops popP: %i Blessed: false after merge", popId);  // JMDBG
+        }
+    }
+
+}
+
+bool
+GCStructures::HasBlock(size_t locusId, size_t popId) const
+{
+    gcPopLocusIdPair p(popId,locusId);
+    gcBlockSetMap::const_iterator iter = m_blocks.find(p);
+    if(iter != m_blocks.end())
+    {
+        return true;
+    }
+    return false;
+}
+
+void
+GCStructures::RemoveBlocks(gcIdSet blocksInFile)
+{
+
+    // check in structures
+    for(gcBlockSetMap::iterator iter=m_blocks.begin(); iter != m_blocks.end(); )
+    {
+        gcIdSet & bSet = (*iter).second;
+        for(gcIdSet::iterator biter=bSet.begin(); biter != bSet.end(); )
+        {
+            size_t blockId = *biter;
+            if(blocksInFile.find(blockId) != blocksInFile.end())
+            {
+                bSet.erase(biter++);
+            }
+            else
+            {
+                biter++;
+            }
+        }
+        if(bSet.empty())
+        {
+            m_blocks.erase(iter++);
+        }
+        else
+        {
+            iter++;
+        }
+    }
+}
+
+void
+GCStructures::RemoveBlocksForLocus(size_t locusId)
+{
+    for(gcBlockSetMap::iterator iter=m_blocks.begin(); iter != m_blocks.end(); )
+    {
+        gcPopLocusIdPair p = (*iter).first;
+        if(p.second == locusId)
+        {
+            m_blocks.erase(iter++);
+        }
+        else
+        {
+            iter++;
+        }
+    }
+}
+
+void
+GCStructures::RemoveBlocksForPop(size_t popId)
+{
+    for(gcBlockSetMap::iterator iter=m_blocks.begin(); iter != m_blocks.end(); )
+    {
+        gcPopLocusIdPair p = (*iter).first;
+        if(p.first == popId)
+        {
+            m_blocks.erase(iter++);
+        }
+        else
+        {
+            iter++;
+        }
+    }
+}
+
+gcIdVec
+GCStructures::GetLocusIdsForRegionByCreation(size_t regionId) const
+{
+    const gcRegion & region = GetRegion(regionId);
+    const GCLocusInfoMap & loci = region.GetLocusInfoMap();
+    gcIdVec vec;
+    for(GCLocusInfoMap::const_iterator iter=loci.begin(); iter != loci.end(); iter++)
+    {
+        size_t locusId = (*iter).first;
+        if(iter != loci.begin())
+        {
+            assert(locusId > vec.back());
+        }
+        vec.push_back(locusId);
+    }
+    return vec;
+}
+
+gcIdVec
+GCStructures::GetLocusIdsForRegionByMapPosition(size_t regionId) const
+{
+    const gcRegion & region = GetRegion(regionId);
+    const GCLocusInfoMap & loci = region.GetLocusInfoMap();
+
+    std::multimap<long,size_t> mapByPos;
+
+    for(GCLocusInfoMap::const_iterator iter=loci.begin(); iter != loci.end(); iter++)
+    {
+        size_t locusId = (*iter).first;
+
+        const gcLocus & locus = GetLocus(locusId);
+        long pos = gcdefault::badMapPosition;
+        if(locus.HasMapPosition())
+        {
+            pos = locus.GetMapPosition();
+        }
+        mapByPos.insert(std::pair<long, size_t>(pos,locusId));
+    }
+
+    gcIdVec vec;
+    for(std::multimap<long,size_t>::iterator i=mapByPos.begin(); i!=mapByPos.end(); i++)
+    {
+        size_t id = (*i).second;
+        vec.push_back(id);
+    }
+    return vec;
+}
+
+void
+GCStructures::RemoveRegion(gcRegion & region)
+{
+
+    // remove all blocks for this region
+    gcIdVec locusIds = GetLocusIdsForRegionByCreation(region.GetId());
+    for(gcIdVec::const_iterator iter=locusIds.begin(); iter != locusIds.end(); iter++)
+    {
+        size_t locusId = (*iter);
+        RemoveLocus(GetLocus(locusId));
+    }
+
+    // remove from display and master map
+    // no need to delete, it lives there
+    gcDisplayOrder::iterator displayIter =
+        find(m_regionDisplay.begin(),m_regionDisplay.end(),region.GetId());
+    m_regionDisplay.erase(displayIter);
+    gcRegionMap::iterator masterIter = m_regions.find(region.GetId());
+    m_regions.erase(masterIter);
+
+}
+
+void
+GCStructures::RemoveLocus(gcLocus & locus)
+{
+    // we do everything by id in this class
+    size_t locId = locus.GetId();
+
+    // remove all blocks for this locus
+    RemoveBlocksForLocus(locId);
+
+    // remove this locus from its region
+    if(locus.HasRegion())
+    {
+        size_t oldRegionId = locus.GetRegionId();
+        gcRegion & oldRegion = GetRegion(oldRegionId);
+        oldRegion.RemoveLocusId(locId);
+    }
+
+    // remove from master map -- no need to delete, it
+    // lives there
+    gcLocusMap::iterator masterIter = m_loci.find(locId);
+    m_loci.erase(masterIter);
+
+}
+
+void
+GCStructures::RemoveMigration(size_t fromId, size_t toId)
+{
+    gcMigrationPair p(fromId, toId);
+    gcMigrationMap::iterator masterIter = m_migrations.find(p);
+    m_migrations.erase(masterIter);
+}
+
+void
+GCStructures::RemovePop(gcPopulation & pop)
+{
+    // we do everything by id in this class
+    size_t popId = pop.GetId();
+
+    // remove all blocks for this pop
+    RemoveBlocksForPop(popId);
+
+    // remove from display and master map
+    gcDisplayOrder::iterator displayIter =
+        find(m_popDisplay.begin(),m_popDisplay.end(),popId);
+    m_popDisplay.erase(displayIter);
+    gcPopMap::iterator masterIter = m_pops.find(popId);
+    m_pops.erase(masterIter);
+
+}
+
+void
+GCStructures::RemovePanel(size_t regionId, size_t popId)
+{
+    gcRegionPopIdPair p(regionId, popId);
+    // remove from master map
+    gcPanelMap::iterator masterIter = m_panels.find(p);
+    m_panels.erase(masterIter);
+}
+
+void
+GCStructures::RemoveParent(size_t parentId)
+{
+    const gcParent & parRef = GetParent(parentId);
+    m_names.FreeName(parRef.GetName());
+    gcParentMap::iterator masterIter = m_parents.find(parentId);
+    m_parents.erase(masterIter);
+}
+
+
+void
+GCStructures::RemoveRegions(objVector regions)
+{
+    for(objVector::iterator i=regions.begin(); i != regions.end(); i++)
+    {
+        GCQuantum * object = *i;
+        gcRegion * regionP = dynamic_cast<gcRegion *>(object);
+        assert(regionP != NULL);
+        RemoveRegion(*regionP);
+    }
+}
+
+void
+GCStructures::RemoveLoci(objVector loci)
+{
+    for(objVector::iterator iter = loci.begin(); iter != loci.end(); iter++)
+    {
+        GCQuantum * object = *iter;
+        gcLocus * locP = dynamic_cast<gcLocus *>(object);
+        assert(locP != NULL);
+        RemoveLocus(*locP);
+    }
+}
+
+void
+GCStructures::RemovePops(objVector pops)
+{
+    for(objVector::iterator iter = pops.begin(); iter != pops.end(); iter++)
+    {
+        GCQuantum * object = *iter;
+        gcPopulation * popP = dynamic_cast<gcPopulation *>(object);
+        assert(popP != NULL);
+        RemovePop(*popP);
+    }
+}
+
+void
+GCStructures::RemoveParents()
+{
+    for (gcParentMap::iterator i=m_parents.begin();i != m_parents.end();i++)
+    {
+        RemoveParent((*i).second.GetId());
+    }
+
+    // clean up the populations
+    for (gcPopMap::const_iterator i=m_pops.begin();i != m_pops.end();i++)
+    {
+        GetPop((*i).second.GetId()).ClearParent();
+    }
+}
+
+bool
+GCStructures::GetFileSelection(size_t fileId) const
+{
+    gcFileMap::const_iterator iter = m_files.find(fileId);
+    assert(iter != m_files.end());
+    const gcFileInfo & fileInfo = (*iter).second;
+    return fileInfo.GetSelected();
+}
+
+void
+GCStructures::SetFileSelection(size_t fileId, bool value)
+{
+    gcFileMap::iterator iter = m_files.find(fileId);
+    assert(iter != m_files.end());
+    gcFileInfo & fileInfo = (*iter).second;
+    return fileInfo.SetSelected(value);
+}
+
+size_t
+GCStructures::SelectedFileCount() const
+{
+    size_t count = 0;
+    for(gcFileMap::const_iterator iter=m_files.begin(); iter != m_files.end(); iter++)
+    {
+        const gcFileInfo & fileInfo = (*iter).second;
+        if(fileInfo.GetSelected())
+        {
+            count++;
+        }
+    }
+    return count;
+}
+
+void
+GCStructures::AllFileSelectionsTo(bool value)
+{
+    for(gcFileMap::iterator iter=m_files.begin(); iter != m_files.end(); iter++)
+    {
+        gcFileInfo & fileInfo = (*iter).second;
+        fileInfo.SetSelected(value);
+    }
+}
+
+const GCLocusMatcher &
+GCStructures::GetLocusMatcher(const GCFile & fileRef) const
+{
+    gcFileMap::const_iterator iter = m_files.find(fileRef.GetId());
+    assert(iter != m_files.end());
+    const gcFileInfo & fileInfo = (*iter).second;
+    return fileInfo.GetLocMatcher();
+}
+
+const GCPopMatcher &
+GCStructures::GetPopMatcher(const GCFile & fileRef) const
+{
+    gcFileMap::const_iterator iter = m_files.find(fileRef.GetId());
+    assert(iter != m_files.end());
+    const gcFileInfo & fileInfo = (*iter).second;
+    return fileInfo.GetPopMatcher();
+}
+
+void
+GCStructures::SetLocusMatcher(const GCFile & fileRef, const GCLocusMatcher & l)
+{
+    gcFileMap::iterator iter = m_files.find(fileRef.GetId());
+    assert(iter != m_files.end());
+    gcFileInfo & fileInfo = (*iter).second;
+    fileInfo.SetLocMatcher(l);
+}
+
+void
+GCStructures::SetPopMatcher(const GCFile & fileRef, const GCPopMatcher & p)
+{
+    gcFileMap::iterator iter = m_files.find(fileRef.GetId());
+    assert(iter != m_files.end());
+    gcFileInfo & fileInfo = (*iter).second;
+    fileInfo.SetPopMatcher(p);
+}
+
+const gcPhenoMap &
+GCStructures::GetPhenotypeMap() const
+{
+    return m_phenotypes;
+}
+
+// EWFIX.P3 -- this should share code with ::GetParse(const GCFile&) const
+const GCParse &
+GCStructures::GetParse(size_t fileId) const
+{
+    gcFileMap::const_iterator iter = m_files.find(fileId);
+    assert(iter != m_files.end());
+    const gcFileInfo & fileInfo = (*iter).second;
+    size_t parseId = fileInfo.GetParse();
+    for(size_t i=0; i < m_dataStoreP->GetDataFile(fileId).GetParseCount(); i++)
+    {
+        const GCParse & parse = m_dataStoreP->GetDataFile(fileId).GetParse(i);
+        if(parse.GetId() == parseId)
+        {
+            return parse;
+        }
+    }
+    wxString msg = wxString::Format(gcerr::missingParse,m_dataStoreP->GetDataFile(fileId).GetName().c_str());
+    gc_implementation_error e(msg.c_str());
+    // implementation error -- you should have checked HasParse first!
+    throw e;
+}
+
+const GCParse &
+GCStructures::GetParse(const GCFile & file) const
+{
+    gcFileMap::const_iterator iter = m_files.find(file.GetId());
+    assert(iter != m_files.end());
+    const gcFileInfo & fileInfo = (*iter).second;
+    size_t parseId = fileInfo.GetParse();
+    for(size_t i=0; i < file.GetParseCount(); i++)
+    {
+        const GCParse & parse = file.GetParse(i);
+        if(parse.GetId() == parseId)
+        {
+            return parse;
+        }
+    }
+    wxString msg = wxString::Format(gcerr::missingParse,file.GetName().c_str());
+    gc_implementation_error e(msg.c_str());
+    // implementation error -- you should have checked HasParse first!
+    throw e;
+}
+
+bool
+GCStructures::HasParse(const GCFile & file) const
+{
+    gcFileMap::const_iterator iter = m_files.find(file.GetId());
+    assert(iter != m_files.end());
+    const gcFileInfo & fileInfo = (*iter).second;
+    return fileInfo.HasParse();
+}
+
+bool
+GCStructures::HasUnparsedFiles() const
+{
+    for(gcFileMap::const_iterator iter=m_files.begin(); iter != m_files.end(); iter++)
+    {
+        const gcFileInfo & fileInfo = (*iter).second;
+        if(!(fileInfo.HasParse())) return true;
+    }
+    return false;
+}
+
+void
+GCStructures::SetParse(const GCParse & parse)
+{
+    const GCFile & fileRef = parse.GetFileRef();
+    gcFileMap::iterator iter = m_files.find(fileRef.GetId());
+    assert(iter != m_files.end());
+    gcFileInfo & fileInfo = (*iter).second;
+    fileInfo.SetParse(parse.GetId());
+
+    // EWFIX.BUG.588
+    if(parse.GetHasSpacesInNames())
+    {
+        m_dataStoreP->GCWarning(wxString::Format(gcerr_infile::shortSampleName,fileRef.GetShortName().c_str()));
+    }
+}
+
+void
+GCStructures::UnsetParse(const GCParse & parse)
+{
+    const GCFile & fileRef = parse.GetFileRef();
+    gcFileMap::iterator iter = m_files.find(fileRef.GetId());
+    assert(iter != m_files.end());
+    gcFileInfo & fileInfo = (*iter).second;
+    if(fileInfo.HasParse())
+    {
+        if(fileInfo.GetParse() == parse.GetId())
+        {
+            fileInfo.UnsetParse();
+        }
+    }
+
+    for(size_t popIndex = 0; popIndex < parse.GetPopCount(); popIndex++)
+    {
+        for(size_t locIndex = 0; locIndex < parse.GetLociCount(); locIndex++)
+        {
+            const GCParseBlock & block = parse.GetBlock(popIndex,locIndex);
+            RemoveBlockAssignment(block.GetId());
+        }
+    }
+}
+
+void
+GCStructures::AddFile(const GCFile & file)
+{
+    gcFileInfo g;
+    g.UnsetParse();
+    g.SetSelected(false);
+    m_files[file.GetId()] = g;
+}
+
+void
+GCStructures::RemoveFile(size_t fileId)
+{
+    gcFileMap::iterator iter = m_files.find(fileId);
+    assert(iter != m_files.end());
+    m_files.erase(iter);
+}
+
+size_t
+GCStructures::GetHapFileAdjacent(size_t fileId) const
+{
+    assert(HasHapFileAdjacent(fileId));
+
+    gcFileMap::const_iterator iter = m_files.find(fileId);
+    assert(iter != m_files.end());
+    const gcFileInfo & fileInfo = (*iter).second;
+
+    return fileInfo.GetAdjacentHapAssignment();
+}
+
+void
+GCStructures::SetHapFileAdjacent(size_t fileId, size_t numHaps)
+{
+    gcFileMap::iterator iter = m_files.find(fileId);
+    assert(iter != m_files.end());
+    gcFileInfo & fileInfo = (*iter).second;
+
+    // EWFIX.P4
+    // this part is here to throw an error if this would cause
+    // a problem
+    const GCParse & parse = GetParse(fileId);
+    gcPhaseInfo * info = parse.GetPhaseRecordsForAdjacency(numHaps);
+    delete info;
+
+    fileInfo.SetAdjacentHapAssignment(numHaps);
+}
+
+void
+GCStructures::UnsetHapFileAdjacent(size_t fileId)
+{
+    gcFileMap::iterator iter = m_files.find(fileId);
+    assert(iter != m_files.end());
+    gcFileInfo & fileInfo = (*iter).second;
+
+    fileInfo.UnsetAdjacentHapAssignment();
+}
+
+bool
+GCStructures::HasHapFileAdjacent(size_t fileId) const
+{
+    gcFileMap::const_iterator iter = m_files.find(fileId);
+    assert(iter != m_files.end());
+    const gcFileInfo & fileInfo = (*iter).second;
+    return fileInfo.HasAdjacentHapAssignment();
+}
+
+void
+GCStructures::UnsetGenoFile(size_t fileId)
+{
+    gcFileMap::iterator iter = m_files.find(fileId);
+    assert(iter != m_files.end());
+    gcFileInfo & fileInfo = (*iter).second;
+
+    fileInfo.UnsetGenoFile();
+}
+
+bool
+GCStructures::HasGenoFile(size_t fileId) const
+{
+    gcFileMap::const_iterator iter = m_files.find(fileId);
+    assert(iter != m_files.end());
+    const gcFileInfo & fileInfo = (*iter).second;
+    return fileInfo.HasGenoFile();
+}
+
+void
+GCStructures::VerifyLocusSeparations(const gcRegion& regionRef) const
+{
+    constObjVector loci =
+        GetConstDisplayableLociInMapOrderFor(regionRef.GetId());
+
+    bool havePrevious = false;
+    long lastExtent = 0;
+    size_t lastLocusId = 0;
+
+    for(constObjVector::const_iterator i=loci.begin(); i != loci.end(); i++)
+    {
+        const gcLocus * locusP = dynamic_cast<const gcLocus *>(*i);
+        assert(locusP != NULL);
+
+        if(locusP->GetLinked())
+            // no need to check unlinked loci, they are their own region
+        {
+            long start = 0;
+            long stop = 0;
+            if(locusP->HasMapPosition())
+                // EWFIX.P3 -- should we insist ?
+            {
+                start += locusP->GetMapPosition();
+                stop  += locusP->GetMapPosition();
+            }
+            if(locusP->HasOffset())
+                // EWFIX.P3 -- should we insist ?
+            {
+                start += locusP->GetOffset();
+                stop  += locusP->GetOffset();
+            }
+            stop   += locusP->GetLength();
+            stop -= 1;    // EWFIX explain why
+
+            if(havePrevious)
+            {
+                if(start <= lastExtent)
+                {
+                    long end = locusP->GetMapPosition()+locusP->GetLength()-1;
+                    long lastStart = GetLocus(lastLocusId).GetMapPosition();
+                    throw gc_locus_overlap(locusP->GetName(),start,end,
+                                           GetLocus(lastLocusId).GetName(),lastStart,lastExtent);
+                }
+            }
+
+            havePrevious = true;
+            lastExtent = stop;
+            lastLocusId = locusP->GetId();
+        }
+    }
+}
+
+bool
+GCStructures::AnyZeroes() const
+{
+    constObjVector loci = GetConstDisplayableLoci();
+
+    for(constObjVector::const_iterator i = loci.begin(); i != loci.end(); i++)
+    {
+        const GCQuantum * q = *i;
+        const gcLocus * locusP = dynamic_cast<const gcLocus*>(q);
+        assert(locusP != NULL);
+
+        if(locusP->HasMapPosition())
+        {
+            if (locusP->GetMapPosition() == 0) return true;
+        }
+
+        if(locusP->HasOffset())
+        {
+            if (locusP->GetOffset() == 0) return true;
+        }
+
+        if(locusP->HasLocationZero()) return true;
+
+        if (locusP->HasUnphasedMarkers())
+        {
+            const gcUnphasedMarkers * phaseInfo = locusP->GetUnphasedMarkers();
+            if(phaseInfo != NULL)
+            {
+                if(phaseInfo->HasZero()) return true;
+            }
+        }
+
+    }
+
+    assert(m_dataStoreP != NULL);
+    return m_dataStoreP->PhaseInfoHasAnyZeroes();
+}
+
+int
+GCStructures::GetPopCount()
+{
+    return m_pops.size();
+}
+
+int
+GCStructures::GetPopCount() const
+{
+    return m_pops.size();
+}
+
+int
+GCStructures::GetParentCount()
+{
+    return m_parents.size();
+}
+
+int
+GCStructures::GetParentCount() const
+{
+    return m_parents.size();
+}
+
+bool
+GCStructures::IsPop(size_t id)
+{
+    gcPopMap::iterator iter = m_pops.find(id);
+    if (iter == m_pops.end())
+    {
+        return false;
+    }
+    return true;
+}
+
+bool
+GCStructures::IsPop(size_t id) const
+{
+    gcPopMap::const_iterator iter = m_pops.find(id);
+    if (iter == m_pops.end())
+    {
+        return false;
+    }
+    return true;
+}
+bool
+GCStructures::IsPop(wxString name)
+{
+    for(gcPopMap::iterator i=m_pops.begin();
+        i != m_pops.end();
+        i++)
+    {
+        if((*i).second.GetName().CmpNoCase(name) == 0)
+        {
+            return true;
+        }
+    }
+    return false;
+}
+
+bool
+GCStructures::IsPop(wxString name) const
+{
+    for(gcPopMap::const_iterator i=m_pops.begin();
+        i != m_pops.end();
+        i++)
+    {
+        if((*i).second.GetName().CmpNoCase(name) == 0)
+        {
+            return true;
+        }
+    }
+    return false;
+}
+
+bool
+GCStructures::IsParent(size_t id)
+{
+    gcParentMap::iterator iter = m_parents.find(id);
+    if (iter == m_parents.end())
+    {
+        return false;
+    }
+    return true;
+}
+
+bool
+GCStructures::IsParent(size_t id) const
+{
+    gcParentMap::const_iterator iter = m_parents.find(id);
+    if (iter == m_parents.end())
+    {
+        return false;
+    }
+    return true;
+}
+
+int
+GCStructures::GetUnusedPopCount()
+{
+    int count = 0;
+    gcDisplayOrder ids = GetDisplayablePopIds();
+    for(gcDisplayOrder::iterator iter=ids.begin(); iter != ids.end(); iter++)
+    {
+        size_t id = *iter;
+        wxLogVerbose("GCStructures::GetUnusedPopCount pop id:%i name:%s", id, GetPop(id).GetName().c_str());  // JMDBG
+        if (!GetPop(id).HasParent())
+        {
+            count++;
+            wxLogVerbose("     has no parent");
+        }
+        else
+        {
+            wxLogVerbose("     has parent: %i", (int)GetPop(id).GetParentId());
+        }
+    }
+    wxLogVerbose("GCStructures::GetUnusedPopCount count: %i", count);
+    return count;
+}
+
+int
+GCStructures::GetUnusedPopCount() const
+{
+    int count = 0;
+    gcDisplayOrder ids = GetDisplayablePopIds();
+    for(gcDisplayOrder::iterator iter=ids.begin(); iter != ids.end(); iter++)
+    {
+        size_t id = *iter;
+        if (!GetPop(id).HasParent())
+        {
+            count++;
+            wxLogVerbose("GCStructures::GetUnusedPopCount const pop id:%i name:%s has no parent", id, GetPop(id).GetName().c_str());  // JMDBG
+        }
+        else
+        {
+            wxLogVerbose("GCStructures::GetUnusedPopCount const pop id:%i name:%s has parent: %i", id, GetPop(id).GetName().c_str(), (int)GetPop(id).GetParentId());// JMDBG
+        }
+    }
+    wxLogVerbose("GCStructures::GetUnusedPopCount count: %i", count);
+    return count;
+}
+
+int
+GCStructures::GetUnusedParentCount()
+{
+    int count = 0;
+    gcDisplayOrder ids = GetParentIds();
+    for(gcDisplayOrder::iterator iter=ids.begin(); iter != ids.end(); iter++)
+    {
+        size_t id = *iter;
+        if (!GetParent(id).HasParent())
+        {
+            count++;
+            wxLogVerbose("GCStructures::GetUnusedParentCount par id: %i name:%s has no parent", id, GetParent(id).GetName().c_str());  // JMDBG
+        }
+        else
+        {
+            wxLogVerbose("GCStructures::GetUnusedParentCount par id: %i name:%s has parent: %s", id, GetParent(id).GetName().c_str(), (int)GetParent(id).GetParentId());  // JMDBG
+        }
+    }
+    wxLogVerbose("GCStructures::GetUnusedParentCount count: %i", count);
+    return count;
+}
+
+int
+GCStructures::GetUnusedParentCount() const
+{
+    int count = 0;
+    gcDisplayOrder ids = GetParentIds();
+    for(gcDisplayOrder::iterator iter=ids.begin(); iter != ids.end(); iter++)
+    {
+        size_t id = *iter;
+        wxLogVerbose("GCStructures::GetUnusedParentCount const par id: %i name:%s", id, GetParent(id).GetName().c_str());  // JMDBG
+        if (!GetParent(id).HasParent())
+        {
+            count++;
+            wxLogVerbose("     has no parent");
+        }
+        else
+        {
+            wxLogVerbose("     has parent: &s", (int)GetParent(id).GetParentId());
+        }
+    }
+    wxLogVerbose("GCStructures::GetUnusedParentCount count: %i", count);
+    return count;
+}
+
+void
+GCStructures::ClearPopDisplayOrder()
+{
+    gcDisplayOrder ids = GetDisplayablePopIds() ;
+    for(gcDisplayOrder::iterator iter=ids.begin(); iter != ids.end(); iter++)
+    {
+        GetPop(*iter).SetDispOrder(0);
+    }
+}
+
+size_t
+GCStructures::FindTopParent()
+{
+    size_t retid = gcdefault::badIndex;
+    gcDisplayOrder ids = GetParentIds();
+    for(gcDisplayOrder::iterator iter=ids.begin(); iter != ids.end(); iter++)
+    {
+        if (!GetParent(*iter).HasParent())
+        {
+            retid = *iter;
+        }
+    }
+    assert(retid != gcdefault::badIndex);
+    return retid;
+}
+
+bool
+GCStructures::HasParents() const
+{
+    if (m_parents.size() > 0)
+    {
+        return true;
+    }
+    else
+    {
+        return false;
+    }
+}
+
+void
+GCStructures::MakeMigrationMatrix()
+{
+    // convenience to make the code easier to read
+    gcDisplayOrder popids = GetDisplayablePopIds();
+    gcDisplayOrder parids = GetParentIds();
+
+    int matrixDim = popids.size() + parids.size();
+
+    assert(matrixDim > 0);
+
+    size_t idex;
+    size_t jdex;
+
+    // build whole matrix
+    for (int i=0; i<matrixDim; i++)
+    {
+        for (int j=0; j<matrixDim; j++)
+        {
+            if (j!=i)
+            {
+                if (i < (int)popids.size())
+                {
+                    int count = 0;
+                    for(gcDisplayOrder::const_iterator iter = popids.begin(); iter != popids.end(); iter++)
+                    {
+                        if (count == i)
+                        {
+                            idex = *iter;
+                        }
+                        count ++;
+                    }
+                }
+                else
+                {
+                    int ioff = i - popids.size();
+                    int count = 0;
+                    for(gcDisplayOrder::const_iterator iter = parids.begin(); iter != parids.end(); iter++)
+                    {
+                        if (count == ioff)
+                        {
+                            idex = *iter;
+                        }
+                        count ++;
+                    }
+                }
+
+                if (j < (int) popids.size())
+                {
+                    int count = 0;
+                    for(gcDisplayOrder::const_iterator jter = popids.begin(); jter != popids.end(); jter++)
+                    {
+                        if (count == j)
+                        {
+                            jdex = *jter;
+                        }
+                        count ++;
+                    }
+                }
+                else
+                {
+                    int joff = j - popids.size();
+                    int count = 0;
+                    for(gcDisplayOrder::const_iterator jter = parids.begin(); jter != parids.end(); jter++)
+                    {
+                        if (count == joff)
+                        {
+                            jdex = *jter;
+                        }
+                        count ++;
+                    }
+                }
+                if (!HasMigration(idex, jdex))
+                {
+                    gcMigration Mig1 = MakeMigration(true, idex, jdex );
+                }
+                if (!HasMigration(jdex, idex))
+                {
+                    gcMigration Mig2 = MakeMigration(true, jdex, idex );
+                }
+            }
+        }
+    }
+
+    if (GetUnusedParentCount() < 2)
+    {
+        // divergence linkages finished so need to remove some of the matrix members
+
+        // creation order list - used to decide which children have been used
+        // and are thus no longer available for migration
+        size_t* orderids = new size_t[matrixDim];
+        for (int i=0; i<matrixDim; i++)
+        {
+            orderids[i] = gcdefault::badIndex;
+        }
+
+        // populations can always migrate between each other before parents are declared
+        int mdx = 0;
+        for(gcDisplayOrder::const_iterator iter = popids.begin(); iter != popids.end(); iter++)
+        {
+            orderids[mdx] = *iter;
+            mdx++;
+        }
+
+        // now remove those that disappear as parents are declared
+        for(gcDisplayOrder::const_iterator iter = parids.begin(); iter != parids.end(); iter++)
+        {
+            orderids[mdx] = *iter;
+            const gcParent & parent = GetParent(*iter);
+
+            if (!parent.HasParent())
+            {
+                // no migrations possible at top
+                for(gcDisplayOrder::const_iterator jter = popids.begin(); jter != popids.end(); jter++)
+                {
+                    RemoveMigration(*iter, *jter );
+                    RemoveMigration(*jter, *iter );
+                }
+
+                for(gcDisplayOrder::const_iterator jter = parids.begin(); jter != parids.end(); jter++)
+                {
+                    if (*jter != *iter)
+                    {
+                        RemoveMigration(*iter, *jter );
+                        RemoveMigration(*jter, *iter );
+                    }
+                }
+            }
+            else
+            {
+                size_t child1Id = parent.GetChild1Id();
+                size_t child2Id = parent.GetChild2Id();
+                for (int i=0; i<matrixDim; i++)
+                {
+                    if (child1Id == orderids[i])
+                    {
+                        RemoveMigration(*iter, child1Id );
+                        RemoveMigration(child1Id, *iter );
+                    }
+                    if (child2Id == orderids[i])
+                    {
+                        RemoveMigration(*iter, child2Id );
+                        RemoveMigration(child2Id, *iter );
+                    }
+                }
+            }
+            mdx ++;
+        }
+    }
+}
+
+//____________________________________________________________________________________
diff --git a/src/convModel/gc_structures.h b/src/convModel/gc_structures.h
new file mode 100644
index 0000000..328acc9
--- /dev/null
+++ b/src/convModel/gc_structures.h
@@ -0,0 +1,331 @@
+// $Id: gc_structures.h,v 1.38 2012/06/30 01:32:40 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_STRUCTURES_H
+#define GC_STRUCTURES_H
+
+#include <set>
+#include <map>
+#include <vector>
+
+#include "gc_dictionary.h"
+#include "gc_file_info.h"
+#include "gc_locus.h"
+#include "gc_migration.h"
+#include "gc_panel.h"
+#include "gc_parent.h"
+#include "gc_phase_info.h"
+#include "gc_population.h"
+#include "gc_structure_maps.h"
+#include "gc_region.h"
+#include "gc_trait.h"
+#include "wx/string.h"
+
+class gcTraitAllele;
+
+class gcNameSet : public gcDictionary
+{
+  protected:
+    virtual const wxString &    infoString() const;
+  public:
+    gcNameSet();
+    virtual ~gcNameSet();
+};
+
+class gcTraitNameSet : public gcDictionary
+{
+  protected:
+    virtual const wxString &    infoString() const;
+  public:
+    gcTraitNameSet();
+    virtual ~gcTraitNameSet();
+};
+
+class GCStructures
+{
+  private:
+    const GCDataStore * m_dataStoreP;
+    gcNameSet           m_names;
+    gcTraitNameSet      m_traitNames;
+    gcBlockSetMap       m_blocks;
+    gcRegionMap         m_regions;
+    gcLocusMap          m_loci;
+    gcPanelMap          m_panels;
+    gcParentMap         m_parents;
+    gcPopMap            m_pops;
+    gcTraitMap          m_traitClasses;
+    gcAlleleMap         m_alleles;
+    gcPhenoMap          m_phenotypes;
+    gcFileMap           m_files;
+    gcMigrationMap      m_migrations;
+
+    gcDisplayOrder      m_popDisplay;
+    gcDisplayOrder      m_regionDisplay;
+
+    bool                m_divergenceState;
+    bool                m_panelsState;
+    bool                m_divMigMatrixDefined;
+    bool                m_migMatrixDefined;
+
+
+  protected:
+    bool                HaveBlocksForRegion(size_t regionId) const;
+    bool                HaveBlocksForLocus(size_t locusId) const;
+    bool                HaveBlocksForPop(size_t popId) const;
+    bool                IsBlessedRegion(size_t locusId) const;
+    bool                IsBlessedLocus(size_t locusId) const;
+    bool                IsBlessedPop(size_t popId) const;
+
+  public:
+    GCStructures(const GCDataStore *);
+    ~GCStructures();
+
+    bool AnyZeroes() const;
+
+    void AssignBlockToPop(size_t blockId, size_t popId);
+    void AssignBlockToLocus(size_t blockId, size_t locusId);
+
+    void AssignBlock(size_t blockId, size_t popId, size_t locusId);
+    void AssignLocus(size_t locusId, size_t regionId);
+    void AssignTrait(size_t traitId, size_t regionId);
+
+    void AssignAllele(gcTraitAllele &, gcTraitInfo &);
+    void AssignLocus(gcLocus &, gcRegion &);
+    void AssignPhenotype(gcPhenotype &, gcTraitInfo &);
+    void AssignTrait(gcTraitInfo &, gcRegion &);
+
+    void RemoveBlockAssignment(size_t blockId);
+
+    void DebugDump(wxString prefix=wxEmptyString) const;
+
+    bool GetDivergenceState();
+    bool GetDivergenceState() const;
+    void SetDivergenceState(bool state);
+
+    bool GetPanelsState();
+    bool GetPanelsState() const;
+    void SetPanelsState(bool state);
+
+    bool GetDivMigMatrixDefined();
+    bool GetDivMigMatrixDefined() const;
+    void SetDivMigMatrixDefined(bool state);
+
+    bool GetMigMatrixDefined();
+    bool GetMigMatrixDefined() const;
+    void SetMigMatrixDefined(bool state);
+
+    gcIdSet     GetBlockIds(size_t popId, size_t locusId) const;
+    gcIdSet     GetBlocksForLocus(size_t locusId) const;
+
+    gcIdSet GetPanelIdsForRegions(gcIdSet regionIds) const;
+
+    gcDisplayOrder  GetDisplayableLocusIds() const;
+    gcDisplayOrder  GetDisplayablePopIds() const;
+    gcDisplayOrder  GetDisplayableRegionIds() const;
+    gcDisplayOrder  GetParentIds() const;
+
+    objVector   GetDisplayableRegions() ;
+    objVector   GetDisplayableLoci() ;
+    objVector   GetDisplayableLociFor(size_t regionId) ;
+    objVector   GetDisplayablePops() ;
+    objVector   GetParents() ;
+
+    constObjVector  GetConstDisplayableRegions()   const;
+    constObjVector  GetConstDisplayableLoci()     const;
+
+#if 0
+    constObjVector  GetConstDisplayableLociFor(size_t regionId) const ;
+#endif
+
+    constObjVector  GetConstDisplayableLociInMapOrderFor(size_t regionId) const ;
+    constObjVector  GetConstDisplayableLinkedLociInMapOrderFor(size_t regionId) const ;
+    constObjVector  GetConstDisplayablePops()  const;
+    constObjVector  GetConstParents()  const;
+
+    constObjVector  GetConstTraits() const;
+
+    gcIdVec         GetLocusIdsForRegionByCreation(size_t regionId) const;
+    gcIdVec         GetLocusIdsForRegionByMapPosition(size_t regionId) const;
+
+    gcTraitAllele &     GetAllele(size_t id);
+    gcRegion &          GetRegion(size_t id);
+    gcLocus &           GetLocus(size_t id);
+    gcPhenotype &       GetPhenotype(size_t id);
+    gcPopulation &      GetPop(size_t id);
+    gcTraitInfo &       GetTrait(size_t id);
+    gcPanel &           GetPanel(size_t id);
+    gcParent &          GetParent(size_t id);
+    gcMigration &       GetMigration(size_t id);
+
+    const gcTraitAllele &   GetAllele(size_t id) const;
+    const gcRegion &        GetRegion(size_t id) const;
+    const gcLocus &         GetLocus(size_t id) const;
+    const gcPhenotype &     GetPhenotype(size_t id) const;
+    const gcPopulation &    GetPop(size_t id) const;
+    const gcTraitInfo &     GetTrait(size_t id) const;
+    const gcPanel &         GetPanel(size_t id) const;
+    const gcPanel &         GetPanel(size_t regionId, size_t popId) const;
+    const gcParent &        GetParent(size_t id) const;
+    const gcMigration &     GetMigration(size_t id) const;
+    const gcMigration &     GetMigration(size_t fromId, size_t toId) const;
+
+    gcTraitAllele &             GetAllele(gcTraitInfo&,wxString name);
+    gcTraitAllele &             GetAllele(wxString name);
+    gcRegion &                  GetRegion(wxString name);
+    gcLocus &                   GetLocus(gcRegion&,wxString name);
+    gcLocus &                   GetLocus(wxString name);
+    gcPhenotype &               GetPhenotype(wxString name);
+    gcPopulation &              GetPop(wxString name);
+    gcTraitInfo &               GetTrait(wxString name);
+    gcPanel &                   GetPanel(wxString name);
+    gcPanel &                   GetPanel(size_t regionId, size_t popId);
+    gcParent &                  GetParent(wxString name);
+    gcMigration &               GetMigration(wxString name);
+    gcMigration &               GetMigration(size_t fromId, size_t toId);
+
+    const gcTraitAllele &       GetAllele(wxString name) const;
+    const gcRegion &            GetRegion(wxString name) const;
+    const gcLocus &             GetLocus(wxString name) const;
+    const gcPhenotype &         GetPhenotype(wxString name) const;
+    const gcPopulation &        GetPop(wxString name) const;
+    const gcTraitInfo &         GetTrait(wxString name) const;
+    const gcPanel &             GetPanel(wxString name) const;
+    const gcParent &            GetParent(wxString name) const;
+
+    bool  HasParents() const;
+    bool  HasPanel(size_t regionId, size_t popId) const;
+    bool  HasBlock(size_t locusId, size_t popId) const;
+    bool  HasMigration(size_t fromId, size_t toId) const;
+
+    const GCPopMatcher &        GetPopMatcher(const GCFile&) const;
+    const GCLocusMatcher &      GetLocusMatcher(const GCFile&) const;
+    void                        SetPopMatcher(const GCFile&, const GCPopMatcher &);
+    void                        SetLocusMatcher(const GCFile&, const GCLocusMatcher &);
+
+    const gcPhenoMap &          GetPhenotypeMap() const;
+
+    bool            HasAllele(wxString name) const;
+    bool            HasLocus(wxString name) const;
+    bool            HasPop(wxString name) const;
+    bool            HasRegion(wxString name) const;
+    bool            HasTrait(wxString name) const;
+    bool            HasPanel(wxString name) const;
+    bool            HasMigration(wxString name) const;
+
+    long GetPopDisplayIndexOf(size_t popId) const;
+    long GetLocusDisplayIndexOf(size_t locusId) const;
+
+    size_t          GetPopForBlock(size_t blockId) const;
+    size_t          GetLocusForBlock(size_t blockId) const;
+
+    gcTraitAllele & FetchOrMakeAllele(gcTraitInfo&, wxString name);
+    gcLocus &       FetchOrMakeLocus(gcRegion&, wxString name, const gcCreationInfo&);
+    //gcPhenotype &   FetchOrMakePhenotype();
+    gcPanel &       FetchOrMakePanel(wxString name);
+    gcPopulation &  FetchOrMakePop(wxString name);
+    gcRegion &      FetchOrMakeRegion(wxString name);
+    gcTraitInfo &   FetchOrMakeTrait(wxString name);
+
+    gcTraitAllele & MakeAllele(wxString name);
+    gcRegion &      MakeRegion(wxString name=wxEmptyString,bool blessed=false);
+    gcLocus &       MakeLocus(size_t regionId, wxString name, bool blessed, const gcCreationInfo&);
+    gcLocus &       MakeLocus(gcRegion &, wxString name, bool blessed, const gcCreationInfo&);
+    gcPhenotype &   MakePhenotype(wxString name=wxEmptyString);
+    gcPopulation &  MakePop(wxString name=wxEmptyString, bool blessed=false);
+    gcTraitInfo &   MakeTrait(wxString name);
+    gcPanel &       MakePanel(wxString name, bool blessed, size_t m_regionId, size_t m_popId);
+    gcParent &      MakeParent(wxString name);
+    gcMigration &   MakeMigration(bool blessed, size_t m_fromId, size_t m_toId);
+
+    gcPanel & CreatePanel(size_t m_regionId, size_t m_popId);
+
+    void Rename(GCQuantum & object, wxString newName);
+
+    bool RegionHasAnyLinkedLoci(size_t regionId) const;
+    bool RegionHasAnyUnLinkedLoci(size_t regionId) const;
+
+    void FragmentRegion(size_t regionId);
+    void LocusToOwnRegion(size_t locusId);
+
+    void MergeLoci(gcIdVec locusIds);
+    void MergePops(gcIdVec popIds);
+    void MergeRegions(gcIdVec regionIds);
+    //void MergePanels(gcIdVec panelIds);
+
+    void RemoveBlocks(gcIdSet blockIds);
+    void RemoveBlocksForLocus(size_t locusId);
+    void RemoveBlocksForPop(size_t popId);
+    void RemoveRegion(gcRegion & region);
+    void RemoveLocus(gcLocus & locus);
+    void RemovePop(gcPopulation & pop);
+    void RemoveRegions(objVector regions);
+    void RemoveLoci(objVector loci);
+    void RemovePops(objVector pops);
+    void RemoveFile(size_t fileId);
+    void RemovePanel(size_t regionId, size_t popId);
+    void RemoveParent(size_t parentId);
+    void RemoveParents();
+    void RemoveMigration(size_t fromId, size_t toId);
+
+    bool    GetFileSelection(size_t fileId) const;
+    void    SetFileSelection(size_t fileId, bool selected);
+    size_t  SelectedFileCount() const;
+    void    AllFileSelectionsTo(bool selectValue);
+
+    void AddFile(const GCFile &);
+    bool HasUnparsedFiles() const;
+
+    void SetParse(const GCParse & parse);
+    void UnsetParse(const GCParse & parse);
+
+    bool            HasParse(const GCFile &) const;
+    const GCParse & GetParse(const GCFile &) const;
+    const GCParse & GetParse(size_t fileId ) const;
+
+    void    SetHapFileAdjacent( size_t fileId, size_t numHaps);
+    bool    HasHapFileAdjacent(size_t fileId) const;
+    size_t  GetHapFileAdjacent(size_t fileId) const;
+    void    UnsetHapFileAdjacent(size_t fileId);
+
+    bool    HasGenoFile(size_t fileId) const;
+    void    UnsetGenoFile(size_t fileId);
+
+    void    VerifyLocusSeparations(const gcRegion&) const;    // throws if not separated
+
+    int GetPopCount();
+    int GetPopCount() const;
+
+    int GetParentCount();
+    int GetParentCount() const;
+
+    bool IsPop(size_t id);
+    bool IsPop(size_t id) const;
+    bool IsPop(wxString name);
+    bool IsPop(wxString name) const;
+
+    bool IsParent(size_t id);
+    bool IsParent(size_t id) const;
+
+    int GetUnusedPopCount();
+    int GetUnusedPopCount() const;
+
+    int GetUnusedParentCount();
+    int GetUnusedParentCount() const;
+
+    void ClearPopDisplayOrder();
+
+    size_t FindTopParent();
+
+    void MakeMigrationMatrix();
+};
+
+#endif  // GC_STRUCTURES_H
+
+//____________________________________________________________________________________
diff --git a/src/convModel/gc_trait.cpp b/src/convModel/gc_trait.cpp
new file mode 100644
index 0000000..9656d54
--- /dev/null
+++ b/src/convModel/gc_trait.cpp
@@ -0,0 +1,130 @@
+// $Id: gc_trait.cpp,v 1.12 2011/06/22 18:22:22 jmcgill Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+
+#include "gc_default.h"
+#include "gc_genotype_resolution.h"
+#include "gc_phenotype.h"
+#include "gc_strings_trait.h"
+#include "gc_trait.h"
+#include "gc_trait_allele.h"
+#include "gc_trait_err.h"
+#include "wx/log.h"
+
+//------------------------------------------------------------------------------------
+
+gcTraitInfo::gcTraitInfo()
+    :
+    m_hasRegion(false),
+    m_regionId(gcdefault::badIndex)
+{
+}
+
+gcTraitInfo::~gcTraitInfo()
+{
+}
+
+void
+gcTraitInfo::AddAllele(const gcTraitAllele& allele)
+{
+    if(m_alleleIds.find(allele.GetId()) != m_alleleIds.end())
+    {
+        throw gc_trait_allele_name_reuse(allele.GetName());
+    }
+    m_alleleIds.insert(allele.GetId());
+}
+
+void
+gcTraitInfo::RemoveAllele(const gcTraitAllele& allele)
+{
+    gcIdSet::iterator iter = m_alleleIds.find(allele.GetId());
+    assert(iter != m_alleleIds.end());
+    m_alleleIds.erase(iter);
+}
+
+bool
+gcTraitInfo::HasAlleleId(size_t alleleId) const
+{
+    return (!(m_alleleIds.find(alleleId) == m_alleleIds.end()));
+}
+
+void
+gcTraitInfo::AddPhenotype(const gcPhenotype& pheno)
+{
+    if(m_phenotypeIds.find(pheno.GetId()) != m_phenotypeIds.end())
+    {
+        throw gc_trait_phenotype_name_reuse(pheno.GetName(),GetName());
+    }
+    m_phenotypeIds.insert(pheno.GetId());
+}
+
+void
+gcTraitInfo::RemovePhenotype(const gcPhenotype& pheno)
+{
+    gcIdSet::iterator iter = m_phenotypeIds.find(pheno.GetId());
+    assert(iter != m_phenotypeIds.end());
+    m_phenotypeIds.erase(iter);
+}
+
+bool
+gcTraitInfo::HasPhenotype(const gcPhenotype & phenotype) const
+{
+    return (!(m_phenotypeIds.find(phenotype.GetId()) == m_phenotypeIds.end()));
+}
+
+const gcIdSet &
+gcTraitInfo::GetAlleleIds() const
+{
+    return m_alleleIds;
+}
+
+const gcIdSet &
+gcTraitInfo::GetPhenotypeIds() const
+{
+    return m_phenotypeIds;
+}
+
+size_t
+gcTraitInfo::GetRegionId() const
+{
+    return m_regionId;
+}
+
+bool
+gcTraitInfo::HasRegionId() const
+{
+    return m_hasRegion;
+}
+
+void
+gcTraitInfo::SetRegionId(size_t id)
+{
+    m_hasRegion = true;
+    m_regionId = id;
+}
+
+void
+gcTraitInfo::UnsetRegionId()
+{
+    m_hasRegion = false;
+}
+
+void
+gcTraitInfo::DebugDump(wxString prefix) const
+{
+    wxLogDebug("%strait \"%s\" has alleles: %s, phenotypes: %s",    // EWDUMPOK
+               prefix.c_str(),
+               GetName().c_str(),
+               m_alleleIds.AsString().c_str(),
+               m_phenotypeIds.AsString().c_str());
+}
+
+//____________________________________________________________________________________
diff --git a/src/convModel/gc_trait.h b/src/convModel/gc_trait.h
new file mode 100644
index 0000000..f215dfc
--- /dev/null
+++ b/src/convModel/gc_trait.h
@@ -0,0 +1,63 @@
+// $Id: gc_trait.h,v 1.12 2011/06/22 18:22:22 jmcgill Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_TRAIT_H
+#define GC_TRAIT_H
+
+#include <map>
+#include <set>
+#include "gc_quantum.h"
+#include "gc_set_util.h"
+#include "wx/string.h"
+
+class GCGenotypeResolution;
+class gcPhenotype;
+class GCStructures;
+class gcTraitAllele;
+
+class gcTraitInfo : public GCQuantum
+{
+    friend class GCStructures;
+
+  private:
+    bool                    m_hasRegion;
+    size_t                  m_regionId;
+
+    gcIdSet                 m_alleleIds;
+    gcIdSet                 m_phenotypeIds;
+
+    void SetRegionId(size_t id);
+    void UnsetRegionId();
+
+    void AddAllele(const gcTraitAllele &);
+    void RemoveAllele(const gcTraitAllele &);
+
+    void AddPhenotype(const gcPhenotype&);
+    void RemovePhenotype(const gcPhenotype&);
+
+  public:
+    gcTraitInfo();
+    ~gcTraitInfo();
+
+    const gcIdSet & GetAlleleIds() const;
+    bool            HasAlleleId(size_t alleleId) const;
+
+    const gcIdSet & GetPhenotypeIds() const;
+    bool            HasPhenotype(const gcPhenotype &)const;
+
+    size_t      GetRegionId()       const   ;
+    bool        HasRegionId()       const   ;
+
+    void        DebugDump(wxString prefix=wxEmptyString) const;
+};
+
+#endif  // GC_TRAIT_H
+
+//____________________________________________________________________________________
diff --git a/src/convModel/gc_trait_allele.cpp b/src/convModel/gc_trait_allele.cpp
new file mode 100644
index 0000000..8f7615f
--- /dev/null
+++ b/src/convModel/gc_trait_allele.cpp
@@ -0,0 +1,66 @@
+// $Id: gc_trait_allele.cpp,v 1.4 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+
+#include "gc_default.h"
+#include "gc_trait_allele.h"
+#include "gc_trait_err.h"
+
+//------------------------------------------------------------------------------------
+
+gcTraitAllele::gcTraitAllele()
+    :
+    m_hasTraitId(false),
+    m_traitId(gcdefault::badIndex)
+{
+}
+
+gcTraitAllele::~gcTraitAllele()
+{
+}
+
+bool
+gcTraitAllele::HasTraitId() const
+{
+    return m_hasTraitId;
+}
+
+size_t
+gcTraitAllele::GetTraitId() const
+{
+    assert(HasTraitId());
+    return m_traitId;
+}
+
+void
+gcTraitAllele::SetTraitId(size_t traitId)
+{
+    m_hasTraitId = true;
+    m_traitId = traitId;
+}
+
+void
+gcTraitAllele::UnsetTraitId()
+{
+    m_hasTraitId = false;
+}
+
+void
+gcTraitAllele::SetName(wxString alleleName)
+{
+    if(alleleName.Find(' ') != wxNOT_FOUND)
+    {
+        throw gc_trait_allele_name_spaces(alleleName);
+    }
+    GCQuantum::SetName(alleleName);
+}
+
+//____________________________________________________________________________________
diff --git a/src/convModel/gc_trait_allele.h b/src/convModel/gc_trait_allele.h
new file mode 100644
index 0000000..43121c0
--- /dev/null
+++ b/src/convModel/gc_trait_allele.h
@@ -0,0 +1,43 @@
+// $Id: gc_trait_allele.h,v 1.5 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_TRAIT_ALLELE_H
+#define GC_TRAIT_ALLELE_H
+
+#include "gc_quantum.h"
+#include "wx/string.h"
+
+class gcTraitAllele : public GCQuantum
+{
+    friend class GCStructures;
+
+  private:
+    bool                            m_hasTraitId;
+    size_t                          m_traitId;
+
+    void    SetTraitId(size_t traitId);
+    void    UnsetTraitId();
+
+  protected:
+
+  public:
+    gcTraitAllele();
+    virtual ~gcTraitAllele();
+
+    bool    HasTraitId()    const;
+    size_t  GetTraitId()    const;
+
+    virtual void    SetName(wxString name); // overrides gc_quantum version
+    // so name can be checked for spaces
+};
+
+#endif  // GC_TRAIT_ALLELE_H
+
+//____________________________________________________________________________________
diff --git a/src/convModel/gc_types.cpp b/src/convModel/gc_types.cpp
new file mode 100644
index 0000000..5f2a567
--- /dev/null
+++ b/src/convModel/gc_types.cpp
@@ -0,0 +1,107 @@
+// $Id: gc_types.cpp,v 1.9 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include "gc_types.h"
+
+gcGeneralDataType::gcGeneralDataType()
+    : std::set<gcSpecificDataType> ()
+{
+}
+
+gcGeneralDataType::~gcGeneralDataType()
+{
+}
+
+bool
+gcGeneralDataType::HasAllelic() const
+{
+    if(find(sdatatype_MICROSAT) != end()) return true;
+    if(find(sdatatype_KALLELE) != end()) return true;
+    return false;
+}
+
+bool
+gcGeneralDataType::HasNucleic() const
+{
+    if(find(sdatatype_DNA) != end()) return true;
+    if(find(sdatatype_SNP) != end()) return true;
+    return false;
+}
+
+bool
+gcGeneralDataType:: CompatibleWith(const gcGeneralDataType& dtype) const
+{
+
+    for(const_iterator i = begin(); i != end() ; i++ )
+    {
+        gcSpecificDataType s1 = *i;
+        for(const_iterator j = dtype.begin(); j != dtype.end() ; j++ )
+        {
+            gcSpecificDataType s2 = *j;
+
+            if(s1 == s2) return true;
+
+        }
+    }
+
+    return false;
+}
+
+void
+gcGeneralDataType::Intersect(const gcGeneralDataType & dtype)
+{
+    gcGeneralDataType::iterator i = begin();
+    while(i != end())
+    {
+        if(dtype.find(*i) == dtype.end())
+        {
+            erase(i++);
+        }
+        else
+        {
+            i++;
+        }
+    }
+}
+
+void
+gcGeneralDataType::Disallow(gcSpecificDataType stype)
+{
+    gcGeneralDataType::iterator i = find(stype);
+    if(i != end())
+    {
+        erase(i);
+    }
+}
+
+void
+gcGeneralDataType::Union(const gcGeneralDataType & dtype)
+{
+    for(gcGeneralDataType::const_iterator i = dtype.begin(); i != dtype.end(); i++)
+    {
+        Union(*i);
+    }
+}
+
+void
+gcGeneralDataType::Union(const gcSpecificDataType dtype)
+{
+    insert(dtype);
+}
+
+gcGeneralDataType &
+gcGeneralDataType::operator=(const gcSpecificDataType dtype)
+{
+    clear();
+    Union(dtype);
+    return *this;
+}
+
+//____________________________________________________________________________________
diff --git a/src/convModel/gc_types.h b/src/convModel/gc_types.h
new file mode 100644
index 0000000..cce6881
--- /dev/null
+++ b/src/convModel/gc_types.h
@@ -0,0 +1,132 @@
+// $Id: gc_types.h,v 1.30 2012/06/30 01:32:40 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_TYPES_H
+#define GC_TYPES_H
+
+#include <map>
+#include <list>
+#include <set>
+#include <string>
+#include "Converter_types.h"
+
+enum GCFileFormat
+{
+    // should include each type of file converter can read
+    format_NONE_SET,
+    format_PHYLIP,
+    format_MIGRATE
+};
+
+enum gcSpecificDataType
+{
+    //  data types as used in lamarc
+    sdatatype_NONE_SET,
+    sdatatype_DNA,
+    sdatatype_SNP,
+    sdatatype_KALLELE,
+    sdatatype_MICROSAT
+};
+
+class gcGeneralDataType : public std::set<gcSpecificDataType>
+{
+  public:
+    gcGeneralDataType();
+    virtual ~gcGeneralDataType();
+    bool HasAllelic() const;
+    bool HasNucleic() const;
+    bool CompatibleWith(const gcGeneralDataType&) const;
+    void Disallow(const gcSpecificDataType);
+    void Intersect(const gcGeneralDataType&);
+    void Union(const gcGeneralDataType&);
+    void Union(const gcSpecificDataType);
+    gcGeneralDataType & operator=(const gcSpecificDataType);
+};
+
+enum GCInterleaving
+{
+    // are data sequences presented all at once or for interleaved comparison
+    interleaving_NONE_SET,
+    interleaving_SEQUENTIAL,
+    interleaving_INTERLEAVED,
+    interleaving_MOOT
+};
+
+enum gcPhaseSource
+{
+    phaseSource_NONE_SET,
+    phaseSource_PHASE_FILE,
+    phaseSource_MULTI_PHASE_SAMPLE,
+    phaseSource_FILE_ADJACENCY,
+    phaseSource_COUNT
+};
+
+enum loc_match
+{
+    locmatch_DEFAULT,
+    locmatch_SINGLE,
+    locmatch_LINKED,
+    locmatch_VECTOR
+};
+
+enum pop_match
+{
+    popmatch_DEFAULT,
+    popmatch_NAME,
+    popmatch_SINGLE,
+    popmatch_VECTOR
+};
+
+enum matrix_type
+{
+    matrixtype_MIGRATION,
+    matrixtype_DIVERGENCE
+};
+
+enum matrix_cell_type
+{
+    matrixcelltype_EMPTY,
+    matrixcelltype_INVALID,
+    matrixcelltype_CORNER,
+    matrixcelltype_LABEL,
+    matrixcelltype_VALUE
+};
+
+enum matrix_cell_source
+{
+    matrixcellsource_NONE,
+    matrixcellsource_POP,
+    matrixcellsource_PARENT
+};
+
+enum migration_method
+{
+    migmethod_USER,
+    migmethod_FST
+};
+
+enum migration_profile
+{
+    migprofile_NONE,
+    migprofile_FIXED,
+    migprofile_PERCENTILE
+};
+
+enum migration_constraint
+{
+    migconstraint_INVALID,
+    migconstraint_CONSTANT,
+    migconstraint_SYMMETRIC,
+    migconstraint_UNCONSTRAINED
+};
+
+#endif  // GC_TYPES_H
+
+//____________________________________________________________________________________
diff --git a/src/convParse/gc_genotype_resolution.cpp b/src/convParse/gc_genotype_resolution.cpp
new file mode 100644
index 0000000..e909b29
--- /dev/null
+++ b/src/convParse/gc_genotype_resolution.cpp
@@ -0,0 +1,117 @@
+// $Id: gc_genotype_resolution.cpp,v 1.9 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include "gc_genotype_resolution.h"
+#include "gc_strings.h"
+#include "gc_trait_err.h"
+#include "wx/log.h"
+
+GCHaplotypeProbability::GCHaplotypeProbability()
+    :
+    m_penetrance(0) // EWFIX.P3
+{
+}
+
+GCHaplotypeProbability::GCHaplotypeProbability(double penetrance, wxArrayString alleleNames)
+    :
+    m_penetrance(penetrance)
+{
+    if(penetrance < 0)
+    {
+        throw gc_haplotype_probability_negative(penetrance);
+    }
+    // EWFIX.P4 -- we take all allele names, even if they
+    // are misspellings. Perhaps we should require a definition ?
+    m_alleleNames = alleleNames;
+}
+
+GCHaplotypeProbability::~GCHaplotypeProbability()
+{
+}
+
+double
+GCHaplotypeProbability::GetPenetrance() const
+{
+    return m_penetrance;
+}
+
+wxString
+GCHaplotypeProbability::GetAllelesAsString() const
+{
+    wxString asString = " ";
+    for(size_t i=0; i < m_alleleNames.GetCount(); i++)
+    {
+        asString += m_alleleNames[i];
+        asString += " ";
+    }
+
+    return asString;
+}
+
+void
+GCHaplotypeProbability::DebugDump(wxString prefix) const
+{
+    wxLogDebug("%sprobability %f for alleles %s",prefix.c_str(),GetPenetrance(),GetAllelesAsString().c_str());
+}
+
+//------------------------------------------------------------------------------------
+
+GCGenotypeResolution::GCGenotypeResolution()
+    :
+    m_traitName(wxEmptyString)
+{
+}
+
+GCGenotypeResolution::GCGenotypeResolution(wxString traitName)
+    :
+    m_traitName(traitName)
+{
+}
+
+void
+GCGenotypeResolution::foo() const
+{
+    wxLogDebug("EWFIX -- deleting %p",this);
+}
+
+GCGenotypeResolution::~GCGenotypeResolution()
+{
+    foo();
+}
+
+void
+GCGenotypeResolution::AppendHap(double penetrance, wxArrayString alleles)
+{
+    m_probabilities.push_back(GCHaplotypeProbability(penetrance,alleles));
+}
+
+wxString
+GCGenotypeResolution::GetTraitName() const
+{
+    return m_traitName;
+}
+
+const std::vector<GCHaplotypeProbability> &
+GCGenotypeResolution::GetHapProbs() const
+{
+    return m_probabilities;
+}
+
+void
+GCGenotypeResolution::DebugDump(wxString prefix) const
+{
+    wxLogDebug("%s:resolution for trait %s",prefix.c_str(),GetTraitName().c_str());
+    for(size_t i=0; i < m_probabilities.size(); i++)
+    {
+        m_probabilities[i].DebugDump(prefix+gcstr::indent);
+    }
+}
+
+//____________________________________________________________________________________
diff --git a/src/convParse/gc_genotype_resolution.h b/src/convParse/gc_genotype_resolution.h
new file mode 100644
index 0000000..d2858ef
--- /dev/null
+++ b/src/convParse/gc_genotype_resolution.h
@@ -0,0 +1,58 @@
+// $Id: gc_genotype_resolution.h,v 1.8 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_GENOTYPE_RESOLUTION_H
+#define GC_GENOTYPE_RESOLUTION_H
+
+#include <set>
+#include <vector>
+#include "wx/arrstr.h"
+#include "wx/string.h"
+
+class GCHaplotypeProbability
+{
+  private:
+    double          m_penetrance;
+    wxArrayString   m_alleleNames;
+
+  public:
+    GCHaplotypeProbability();       // shoult only be used by stl containers
+    GCHaplotypeProbability(double penetrance, wxArrayString alleleNames);
+    ~GCHaplotypeProbability();
+
+    double      GetPenetrance() const;
+    wxString    GetAllelesAsString() const;
+    void    DebugDump(wxString prefix=wxEmptyString) const;
+};
+
+class GCGenotypeResolution
+{
+  private:
+    wxString                               m_traitName;
+    std::vector<GCHaplotypeProbability>    m_probabilities;
+
+  public:
+    GCGenotypeResolution();                 // should only be used by stl containers
+    GCGenotypeResolution(wxString traitName);
+    void foo() const;
+    virtual ~GCGenotypeResolution();
+
+    void AppendHap(double penetrance, wxArrayString alleleNames);
+    wxString GetTraitName() const;
+    const std::vector<GCHaplotypeProbability> & GetHapProbs() const;
+
+    void    DebugDump(wxString prefix=wxEmptyString) const;
+};
+
+typedef std::set<const GCGenotypeResolution*>   gcTraitSet;
+
+#endif  // GC_GENOTYPE_RESOLUTION_H
+
+//____________________________________________________________________________________
diff --git a/src/convParse/gc_loci_match.cpp b/src/convParse/gc_loci_match.cpp
new file mode 100644
index 0000000..1db5936
--- /dev/null
+++ b/src/convParse/gc_loci_match.cpp
@@ -0,0 +1,165 @@
+// $Id: gc_loci_match.cpp,v 1.16 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+
+#include "gc_datastore.h"
+#include "gc_default.h"
+#include "gc_errhandling.h"
+#include "gc_loci_match.h"
+#include "gc_parse.h"
+#include "gc_strings.h"
+#include "wx/filename.h"
+
+//------------------------------------------------------------------------------------
+
+GCLocusSpec::GCLocusSpec(bool blessedLocus, bool blessedRegion, wxString locusName, wxString regionName)
+    :
+    m_blessedLocus(blessedLocus),
+    m_blessedRegion(blessedRegion),
+    m_locusName(locusName),
+    m_regionName(regionName)
+{
+}
+
+GCLocusSpec::~GCLocusSpec()
+{
+}
+
+bool
+GCLocusSpec::GetBlessedLocus() const
+{
+    return m_blessedLocus;
+}
+
+bool
+GCLocusSpec::GetBlessedRegion() const
+{
+    return m_blessedRegion;
+}
+
+wxString
+GCLocusSpec::GetLocusName() const
+{
+    return m_locusName;
+}
+
+wxString
+GCLocusSpec::GetRegionName() const
+{
+    return m_regionName;
+}
+
+//------------------------------------------------------------------------------------
+
+GCLocusMatcher::GCLocusMatcher()
+    :
+    m_locMatchType(locmatch_DEFAULT)
+{
+    m_locNames.Empty();
+}
+
+GCLocusMatcher::GCLocusMatcher(loc_match locMatchType)
+    :
+    m_locMatchType(locMatchType)
+{
+    assert(m_locMatchType == locmatch_DEFAULT || m_locMatchType == locmatch_LINKED);
+    m_locNames.Empty();
+}
+
+GCLocusMatcher::GCLocusMatcher(loc_match locMatchType, wxString name)
+    :
+    m_locMatchType(locMatchType)
+{
+    assert(m_locMatchType == locmatch_SINGLE);
+    m_locNames.Empty();
+    m_locNames.Add(name);
+}
+
+GCLocusMatcher::GCLocusMatcher(loc_match locMatchType, wxArrayString locNames)
+    :
+    m_locMatchType(locMatchType),
+    m_locNames(locNames)
+{
+    assert(m_locMatchType == locmatch_VECTOR);
+}
+
+GCLocusMatcher::~GCLocusMatcher()
+{
+}
+
+GCLocusSpec
+GCLocusMatcher::GetLocSpec(size_t index, const GCParse & parse) const
+{
+    if(!HandlesThisManyLoci(index))
+    {
+        wxString msg = wxString::Format(gcerr::tooFewLociInSpec,(int)index);
+        gc_implementation_error e(msg.c_str());
+        throw e;
+    }
+    wxString shortName = parse.GetFileRef().GetShortName();
+    wxString locName = wxString::Format(gcstr::locusNameFromFile,
+                                        (int)index+1,
+                                        shortName.c_str());
+    wxString regName = wxString::Format(gcstr::regionNameFromFile,
+                                        shortName.c_str());
+
+    switch(m_locMatchType)
+    {
+        case locmatch_DEFAULT:
+            regName = wxString::Format("%s_%d",regName.c_str(),(int)index+1); // EWFIX.STRING
+            return GCLocusSpec(false,false,locName,regName);
+            break;
+        case locmatch_SINGLE:
+            return GCLocusSpec(true,false,m_locNames[0],gcdefault::regionName);
+            break;
+        case locmatch_LINKED:
+            return GCLocusSpec(false,false,locName,regName);
+            break;
+        case locmatch_VECTOR:
+            return GCLocusSpec(true,false,m_locNames[index],gcdefault::regionName);
+            break;
+    }
+    assert(false);
+    return GCLocusSpec(false,false,gcerr::emptyName,gcerr::emptyName);
+}
+
+bool
+GCLocusMatcher::HandlesThisManyLoci(size_t count) const
+{
+    switch(m_locMatchType)
+    {
+        case locmatch_DEFAULT:
+        case locmatch_SINGLE:
+        case locmatch_LINKED:
+            return true;
+            break;
+        case locmatch_VECTOR:
+            return (count <= m_locNames.size());
+            break;
+    }
+    assert(false);
+    return false;
+
+}
+
+loc_match
+GCLocusMatcher::GetLocMatchType() const
+{
+    return m_locMatchType;
+}
+
+const wxArrayString &
+GCLocusMatcher::GetLociNames() const
+{
+    return m_locNames;
+}
+
+//____________________________________________________________________________________
diff --git a/src/convParse/gc_loci_match.h b/src/convParse/gc_loci_match.h
new file mode 100644
index 0000000..c9e1cfe
--- /dev/null
+++ b/src/convParse/gc_loci_match.h
@@ -0,0 +1,59 @@
+// $Id: gc_loci_match.h,v 1.11 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_LOCI_MATCH_H
+#define GC_LOCI_MATCH_H
+
+#include "gc_types.h"
+#include "wx/arrstr.h"
+
+class GCParse;
+
+class GCLocusSpec
+{
+  private:
+    bool            m_blessedLocus;
+    bool            m_blessedRegion;
+    wxString        m_locusName;
+    wxString        m_regionName;
+    GCLocusSpec();    // undefined
+
+  public:
+    GCLocusSpec(bool blessedLocus, bool blessedRegion, wxString locusName, wxString regionName);
+    virtual ~GCLocusSpec();
+
+    bool        GetBlessedLocus()   const ;
+    bool        GetBlessedRegion()  const ;
+    wxString    GetLocusName()      const ;
+    wxString    GetRegionName()     const ;
+};
+
+class GCLocusMatcher
+{
+  protected:
+    loc_match       m_locMatchType;
+    wxArrayString   m_locNames;
+
+  public:
+    GCLocusMatcher();
+    GCLocusMatcher(loc_match locMatchType);
+    GCLocusMatcher(loc_match locMatchType, wxString name);
+    GCLocusMatcher(loc_match locMatchType, wxArrayString names);
+    ~GCLocusMatcher();
+    GCLocusSpec GetLocSpec(size_t index, const GCParse & parse) const ;
+    bool        HandlesThisManyLoci(size_t count) const ;
+    loc_match   GetLocMatchType() const ;
+
+    const wxArrayString &   GetLociNames()  const;
+};
+
+#endif  // GC_LOCI_MATCH_H
+
+//____________________________________________________________________________________
diff --git a/src/convParse/gc_locus.cpp b/src/convParse/gc_locus.cpp
new file mode 100644
index 0000000..25335df
--- /dev/null
+++ b/src/convParse/gc_locus.cpp
@@ -0,0 +1,678 @@
+// $Id: gc_locus.cpp,v 1.35 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+
+#include "gc_creation_info.h"
+#include "gc_errhandling.h"
+#include "gc_data.h"            // for ToWxString
+#include "gc_default.h"
+#include "gc_locus.h"
+#include "gc_locus_err.h"
+#include "gc_strings.h"
+#include "gc_strings_locus.h"
+#include "gc_structures_err.h"
+#include "wx/log.h"
+#include "wx/string.h"
+#include "wx/tokenzr.h"
+
+//------------------------------------------------------------------------------------
+
+gcLocus::gcLocus()
+    :
+    m_blessed(false),
+    m_hasRegion(false),
+    m_regionId(gcdefault::badIndex),
+    m_inducedByFile(false),
+    m_fileId(gcdefault::badIndex),
+    m_dataType(sdatatype_NONE_SET),
+    m_hasNumMarkers(false),
+    m_numMarkers(0),
+    m_hasTotalLength(false),
+    m_totalLength(0),
+    m_hasLinkedUserValue(false),
+    m_linkedUserValue(false),
+    m_hasOffset(false),
+    m_offset(1),
+    m_hasMapPosition(false),
+    m_mapPosition(1),
+    m_hasUnphasedMarkers(false)
+{
+    assert(m_unphasedMarkers.NumMarkers() == 0);
+    m_locations.clear();    // shouldn't be necessary
+}
+
+gcLocus::~gcLocus()
+{
+}
+
+bool
+gcLocus::GetBlessed() const
+{
+    return m_blessed;
+}
+
+void
+gcLocus::DebugDump(wxString prefix) const
+{
+    wxLogDebug("%slocus %s (locus id %ld)", // EWDUMPOK
+               prefix.c_str(),
+               GetName().c_str(),
+               (long)GetId());
+    wxLogDebug("%sdatatype %s",(prefix+gcstr::indent).c_str(),ToWxString(GetDataType()).c_str());  // EWDUMPOK
+    wxLogDebug("%s%s markers",(prefix+gcstr::indent).c_str(),GetNumMarkersString().c_str());  // EWDUMPOK
+    wxLogDebug("%s%s total length",(prefix+gcstr::indent).c_str(),GetLengthString().c_str());   // EWDUMPOK
+    wxLogDebug("%s%s offset",(prefix+gcstr::indent).c_str(),GetOffsetString().c_str()); // EWDUMPOK
+    wxLogDebug("%s%s map position",(prefix+gcstr::indent).c_str(),GetMapPositionString().c_str());  // EWDUMPOK
+    wxLogDebug("%slocations:%s",(prefix+gcstr::indent).c_str(),GetLocationsAsString().c_str()); // EWDUMPOK
+    if(GetLinked())
+    {
+        wxLogDebug("%slinked",(prefix+gcstr::indent).c_str());  // EWDUMPOK
+    }
+    else
+    {
+        wxLogDebug("%sunlinked",(prefix+gcstr::indent).c_str());    // EWDUMPOK
+    }
+}
+
+bool
+gcLocus::HasLocations() const
+{
+    return !(m_locations.empty());
+}
+
+std::vector<long>
+gcLocus::GetLocations() const
+{
+    return m_locations;
+}
+
+wxString
+gcLocus::GetLocationsAsString() const
+{
+    wxString locationString = " ";
+    for(size_t index=0; index < m_locations.size(); index++)
+    {
+        locationString += wxString::Format("%ld ",m_locations[index]);
+    }
+    return locationString;
+}
+
+bool
+gcLocus::HasLocationZero() const
+{
+    for(size_t i = 0; i < m_locations.size(); i++)
+    {
+        long loc = m_locations[i];
+        if(loc == 0) return true;
+    }
+    return false;
+}
+
+wxString
+gcLocus::GetDataTypeString() const
+{
+    return ToWxString(m_dataType);
+}
+
+gcSpecificDataType
+gcLocus::GetDataType() const
+{
+    return m_dataType;
+}
+
+void
+gcLocus::SetDataType(gcSpecificDataType type)
+{
+    m_dataType = type;
+}
+
+bool
+gcLocus::HasLength() const
+{
+    if(HasTotalLength())
+    {
+        return true;
+    }
+    if(GetDataType() == sdatatype_DNA)
+    {
+        return HasNumMarkers();
+    }
+    return false;
+}
+
+size_t
+gcLocus::GetLength() const
+{
+    if(HasTotalLength())
+    {
+        return GetTotalLength();
+    }
+    if(HasNumMarkers())
+    {
+        if(GetDataType() == sdatatype_DNA)
+        {
+            return GetNumMarkers();
+        }
+        if(GetNumMarkers() == 1)
+        {
+            return 1;
+        }
+        else
+        {
+            throw gc_locus_without_length(GetName());
+        }
+    }
+    return gcdefault::badLength;
+}
+
+wxString
+gcLocus::GetLengthString() const
+{
+    if(HasLength())
+    {
+        return wxString::Format("%d",(int)GetLength());
+    }
+    return gcstr::unknown;
+}
+
+bool
+gcLocus::HasTotalLength() const
+{
+    return m_hasTotalLength;
+}
+
+size_t
+gcLocus::GetTotalLength() const
+{
+    assert(HasTotalLength());
+    return m_totalLength;
+}
+
+wxString
+gcLocus::GetTotalLengthString() const
+{
+    if(HasTotalLength())
+    {
+        return wxString::Format("%d",(int)GetTotalLength());
+    }
+    return gcstr::unknown;
+}
+
+void
+gcLocus::SetTotalLength(size_t totalLength)
+{
+    if(HasNumMarkers())
+    {
+        if(totalLength < GetNumMarkers())
+        {
+            throw gc_locus_err(wxString::Format(gcerr::lengthTooShort,(int)totalLength,(int)GetNumMarkers()).c_str());
+        }
+    }
+    m_hasTotalLength = true;
+    m_totalLength = totalLength;
+}
+
+void
+gcLocus::UnsetTotalLength()
+{
+    m_hasTotalLength = false;
+}
+
+bool
+gcLocus::HasNumMarkers() const
+{
+    return m_hasNumMarkers;
+}
+
+size_t
+gcLocus::GetNumMarkers() const
+{
+    if(!HasNumMarkers())
+    {
+        wxString msg = wxString::Format(gcerr_locus::unsetNumMarkers,GetName().c_str());
+        throw gc_implementation_error(msg.c_str());
+    }
+    return m_numMarkers;
+}
+
+wxString
+gcLocus::GetNumMarkersString() const
+{
+    if(HasNumMarkers())
+    {
+        return wxString::Format("%d",(int)GetNumMarkers());
+    }
+    return gcstr::unknown;
+}
+
+void
+gcLocus::SetNumMarkers(size_t numMarkers)
+{
+    if(numMarkers == 0)
+    {
+        throw gc_num_markers_zero();
+    }
+    m_hasNumMarkers = true;
+    m_numMarkers = numMarkers;
+}
+
+bool
+gcLocus::HasOffset() const
+{
+    return m_hasOffset;
+}
+
+long
+gcLocus::GetOffset() const
+{
+    if(!HasOffset())
+    {
+        wxString msg = wxString::Format(gcerr_locus::unsetOffset,GetName().c_str());
+        throw gc_implementation_error(msg.c_str());
+    }
+    return m_offset;
+}
+
+wxString
+gcLocus::GetOffsetString() const
+{
+    if(HasOffset())
+    {
+        return wxString::Format("%d",(int)GetOffset());
+    }
+    return gcstr::unknown;
+}
+
+void
+gcLocus::UnsetNumMarkers()
+{
+    m_hasNumMarkers = false;
+}
+
+bool
+gcLocus::HasRegion() const
+{
+    return m_hasRegion;
+}
+
+size_t
+gcLocus::GetRegionId() const
+{
+    if(!HasRegion())
+    {
+        wxString msg = wxString::Format(gcerr::unsetRegionId,GetName().c_str());
+        throw gc_implementation_error(msg.c_str());
+    }
+    return m_regionId;
+}
+
+void
+gcLocus::SetFileId(size_t id)
+{
+    m_inducedByFile = true;
+    m_fileId = id;
+}
+
+void
+gcLocus::UnsetFileId()
+{
+    m_inducedByFile = false;
+}
+
+void
+gcLocus::SetRegionId(size_t id)
+{
+    m_hasRegion = true;
+    m_regionId = id;
+}
+
+void
+gcLocus::UnsetRegionId()
+{
+    m_hasRegion = false;
+}
+
+bool
+gcLocus::HasLinkedUserValue() const
+{
+    return m_hasLinkedUserValue;
+}
+
+bool
+gcLocus::GetLinkedUserValue() const
+{
+    if(!HasLinkedUserValue())
+    {
+        wxString msg = wxString::Format(gcerr_locus::unsetLinkedUserValue,GetName().c_str());
+        throw gc_implementation_error(msg.c_str());
+    }
+    return m_linkedUserValue;
+}
+
+bool
+gcLocus::GetLinked() const
+{
+    if(HasLinkedUserValue())
+    {
+        return GetLinkedUserValue();
+    }
+    if(HasTotalLength())
+    {
+        return true;
+    }
+    if(HasLocations())
+    {
+        return true;
+    }
+    switch(GetDataType())
+    {
+        case sdatatype_NONE_SET:
+            // EWFIX.P4 -- should be able to determine values
+            // by reading data structures
+            return true;
+            break;
+        case sdatatype_DNA:
+        case sdatatype_SNP:
+            return true;
+            break;
+        case sdatatype_KALLELE:
+        case sdatatype_MICROSAT:
+            return false;
+            break;
+    }
+    assert(false);
+    return true;
+}
+
+wxString
+gcLocus::GetLinkedString() const
+{
+    bool val = GetLinked();
+    if(val) return gcstr::linkageYes;
+    return gcstr::linkageNo;
+}
+
+wxString
+gcLocus::GetLinkedUserValueString() const
+{
+    if(HasLinkedUserValue())
+    {
+        bool val = GetLinkedUserValue();
+        if(val) return gcstr::linkageYes;
+        return gcstr::linkageNo;
+    }
+    else
+    {
+        return gcstr::unknown;
+    }
+}
+
+void
+gcLocus::SetLinkedUserValue(bool linked)
+{
+    if(! linked)
+    {
+        if(GetDataType() == sdatatype_DNA || GetDataType() == sdatatype_SNP)
+        {
+            throw gc_unlinked_nuc();
+        }
+    }
+    m_hasLinkedUserValue = true;
+    m_linkedUserValue = linked;
+}
+
+bool
+gcLocus::HasMapPosition() const
+{
+    return m_hasMapPosition;
+}
+
+long
+gcLocus::GetMapPosition() const
+{
+    if(!HasMapPosition())
+    {
+        wxString msg = wxString::Format(gcerr::unsetMapPosition,GetName().c_str());
+        throw gc_implementation_error(msg.c_str());
+    }
+    return m_mapPosition;
+}
+
+wxString
+gcLocus::GetMapPositionString() const
+{
+    if(HasMapPosition())
+    {
+        return wxString::Format("%d",(int)GetMapPosition());
+    }
+    return gcstr::unknown;
+}
+
+void
+gcLocus::SetMapPosition(long mapPosition)
+{
+    // we don't test the value since during use of the
+    // GUI, the user needs to pass through impossible
+    // states
+    m_hasMapPosition = true;
+    m_mapPosition = mapPosition;
+}
+
+void
+gcLocus::UnsetMapPosition()
+{
+    m_hasMapPosition = false;
+}
+
+bool
+gcLocus::HasUnphasedMarkers() const
+{
+    return m_hasUnphasedMarkers;
+}
+
+const gcUnphasedMarkers *
+gcLocus::GetUnphasedMarkers() const
+{
+    assert(HasUnphasedMarkers());
+    return &m_unphasedMarkers;
+}
+
+wxString
+gcLocus::GetUnphasedMarkersAsString() const
+{
+    assert(HasUnphasedMarkers());
+    return m_unphasedMarkers.AsString();
+}
+
+void
+gcLocus::SetUnphasedMarkers(gcUnphasedMarkers markers)
+{
+    m_hasUnphasedMarkers = true;
+    m_unphasedMarkers = markers;
+}
+
+void
+gcLocus::SetBlessed(bool blessed)
+{
+    m_blessed = blessed;
+}
+
+void
+gcLocus::SetCreationInfo(const gcCreationInfo & creationInfo)
+{
+    m_creationInfo = creationInfo;
+}
+
+void
+gcLocus::SetOffset(long offset)
+{
+    // we don't test the value since during use of the
+    // GUI, the user needs to pass through impossible
+    // states
+    m_offset = offset;
+    m_hasOffset = true;
+}
+
+void
+gcLocus::SetLocations(wxString locationString)
+{
+    wxStringTokenizer tokenizer(locationString);
+    m_locations.clear();
+    while(tokenizer.HasMoreTokens())
+    {
+        wxString token = tokenizer.GetNextToken();
+        long location;
+        if(!token.ToLong(&location))
+        {
+            wxString msg = wxString::Format(gcerr::notALocation,locationString.c_str());
+            throw gc_data_error(msg.c_str());
+        }
+        if(!m_locations.empty())
+            // cannot check the first one!
+        {
+            long previous = m_locations.back();
+            if(location <= previous)
+            {
+                throw gc_locations_out_of_order(GetName(),previous,location);
+            }
+        }
+        m_locations.push_back(location);
+    }
+    if(HasNumMarkers())
+    {
+        if(m_locations.size() != GetNumMarkers())
+        {
+            throw gc_set_locations_err(GetName(),locationString,GetNumMarkers(),m_locations.size());
+        }
+    }
+    else
+    {
+        SetNumMarkers(m_locations.size());
+    }
+}
+
+void
+gcLocus::LocusMergeLogic(bool doSettings, gcLocus & locusRef)
+{
+    // data type
+    if(GetDataType() != sdatatype_NONE_SET)
+    {
+        if(locusRef.GetDataType() != sdatatype_NONE_SET)
+        {
+            if(locusRef.GetDataType() != GetDataType())
+            {
+                throw gc_locus_user_data_type_mismatch(GetName(),locusRef.GetName(),GetDataType(),locusRef.GetDataType());
+            }
+        }
+    }
+
+    // num Markers
+    if(HasNumMarkers())
+    {
+        if(locusRef.HasNumMarkers())
+        {
+            if(locusRef.GetNumMarkers() != GetNumMarkers())
+            {
+                throw gc_locus_site_count_mismatch(GetName(),locusRef.GetName(),GetNumMarkers(),locusRef.GetNumMarkers());
+            }
+        }
+    }
+
+    // EWFIX.P3 -- this is probably not correct
+    // total length
+    if(HasTotalLength())
+    {
+        if(locusRef.HasTotalLength())
+        {
+            if(locusRef.GetLength() != GetLength())
+            {
+                throw gc_locus_length_mismatch(GetName(),locusRef.GetName(),GetLength(),locusRef.GetLength());
+            }
+        }
+    }
+
+    // user-set linked value
+    if(HasLinkedUserValue())
+    {
+        if(locusRef.HasLinkedUserValue())
+        {
+            if(locusRef.GetLinkedUserValue() != GetLinkedUserValue())
+            {
+                throw gc_locus_user_linked_mismatch(GetName(),locusRef.GetName(),GetLinkedUserValue(),locusRef.GetLinkedUserValue());
+            }
+        }
+    }
+
+    // offset
+    if(HasOffset())
+    {
+        if(locusRef.HasOffset())
+        {
+            if(locusRef.GetOffset() != GetOffset())
+            {
+                throw gc_locus_offset_mismatch(GetName(),locusRef.GetName(),GetOffset(),locusRef.GetOffset());
+            }
+        }
+    }
+
+    // map position
+    if(HasMapPosition())
+    {
+        if(locusRef.HasMapPosition())
+        {
+            if(locusRef.GetMapPosition() != GetMapPosition())
+            {
+                throw gc_locus_map_position_mismatch(GetName(),locusRef.GetName(),GetMapPosition(),locusRef.GetMapPosition());
+            }
+        }
+    }
+
+    if(doSettings)
+        // this is a real merge
+    {
+        // order is important -- setting data type and then num Markers can
+        // result in setting the length
+        if(GetDataType() == sdatatype_NONE_SET)             SetDataType(locusRef.GetDataType());
+        if(!HasNumMarkers() && locusRef.HasNumMarkers())    SetNumMarkers(locusRef.GetNumMarkers());
+        if(!HasTotalLength() && locusRef.HasTotalLength())  SetTotalLength(locusRef.GetTotalLength());
+        if(!HasLinkedUserValue() && locusRef.HasLinkedUserValue()) SetLinkedUserValue(locusRef.GetLinkedUserValue());
+        if(!HasOffset() && locusRef.HasOffset())            SetOffset(locusRef.GetOffset());
+        if(!HasMapPosition() && locusRef.HasMapPosition())  SetMapPosition(locusRef.GetMapPosition());
+    }
+}
+
+bool
+gcLocus::CanMergeWith(gcLocus & locus)
+{
+    try
+    {
+        LocusMergeLogic(false,locus);
+    }
+    catch (const gc_locus_err& e)
+    {
+        return false;
+    }
+    return true;
+}
+
+void
+gcLocus::MergeWith(gcLocus & locus)
+{
+    LocusMergeLogic(true,locus);
+}
+
+wxString
+gcLocus::GetLongName() const
+{
+    return wxString::Format("%s %s",GetName().c_str(),m_creationInfo.GetDescriptiveName().c_str());
+}
+
+//____________________________________________________________________________________
diff --git a/src/convParse/gc_locus.h b/src/convParse/gc_locus.h
new file mode 100644
index 0000000..0f16247
--- /dev/null
+++ b/src/convParse/gc_locus.h
@@ -0,0 +1,130 @@
+// $Id: gc_locus.h,v 1.21 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_LOCUS_H
+#define GC_LOCUS_H
+
+#include "wx/string.h"
+#include "gc_creation_info.h"
+#include "gc_phase.h"
+#include "gc_quantum.h"
+#include "gc_types.h"
+
+class GCStructures;
+
+class gcLocus : public GCQuantum
+{
+    friend class GCStructures;
+
+  private:
+
+    bool                        m_blessed;
+
+    bool                        m_hasRegion;
+    size_t                      m_regionId;
+
+    bool                        m_inducedByFile;
+    size_t                      m_fileId;
+
+    gcSpecificDataType          m_dataType;
+
+    bool                        m_hasNumMarkers;
+    size_t                      m_numMarkers;
+
+    bool                        m_hasTotalLength;
+    size_t                      m_totalLength;
+
+    bool                        m_hasLinkedUserValue;
+    bool                        m_linkedUserValue;
+
+    bool                        m_hasOffset;
+    long                        m_offset;
+
+    bool                        m_hasMapPosition;
+    long                        m_mapPosition;
+
+    bool                        m_hasUnphasedMarkers;
+    gcUnphasedMarkers           m_unphasedMarkers;
+
+    gcCreationInfo              m_creationInfo;
+
+    std::vector<long>           m_locations;
+
+    void    SetBlessed(bool blessed);
+    void    SetCreationInfo(const gcCreationInfo &);
+    void    SetFileId(size_t id);
+    void    UnsetFileId();
+    void    SetRegionId(size_t id);
+    void    UnsetRegionId();
+
+  protected:
+    void    LocusMergeLogic(bool doSettings, gcLocus & locus);
+
+  public:
+    gcLocus();
+    ~gcLocus();
+
+    bool                GetBlessed()            const;
+    gcSpecificDataType  GetDataType()           const;
+    wxString            GetDataTypeString()     const;
+    size_t              GetLength()             const;
+    wxString            GetLengthString()       const;
+    bool                GetLinked()             const;
+    wxString            GetLinkedString()       const;
+    bool                GetLinkedUserValue()    const;
+    wxString            GetLinkedUserValueString()const;
+    std::vector<long>   GetLocations()          const;
+    wxString            GetLocationsAsString()  const;
+    wxString            GetLongName()           const;
+    long                GetMapPosition()        const;
+    wxString            GetMapPositionString()  const;
+    size_t              GetNumMarkers()         const;
+    wxString            GetNumMarkersString()   const;
+    long                GetOffset()             const;
+    wxString            GetOffsetString()       const;
+    size_t              GetRegionId()           const;
+    size_t              GetTotalLength()        const;
+    wxString            GetTotalLengthString()  const;
+    const gcUnphasedMarkers * GetUnphasedMarkers()    const;
+    wxString            GetUnphasedMarkersAsString()    const;
+
+    bool        HasLength()         const ;
+    bool        HasLocations()      const ;
+    bool        HasLocationZero()   const ;
+    bool        HasLinkedUserValue()const ;
+    bool        HasMapPosition()    const ;
+    bool        HasNumMarkers()     const ;
+    bool        HasOffset()         const ;
+    bool        HasRegion()         const ;
+    bool        HasTotalLength()    const ;
+    bool        HasUnphasedMarkers()const ;
+
+    void    SetDataType(gcSpecificDataType type);
+    void    SetLinkedUserValue(bool linked);
+    void    SetLocations(wxString locationString);
+    void    SetMapPosition(long position);
+    void    SetNumMarkers(size_t numMarkers);
+    void    SetOffset(long offset);
+    void    SetTotalLength(size_t length);
+    void    SetUnphasedMarkers(gcUnphasedMarkers);
+
+    void    UnsetNumMarkers();
+    void    UnsetMapPosition();
+    void    UnsetTotalLength();
+
+    void    DebugDump(wxString prefix=wxEmptyString) const;
+
+    bool    CanMergeWith(gcLocus & locus) ;
+    void       MergeWith(gcLocus & locus) ;
+};
+
+#endif  // GC_LOCUS_H
+
+//____________________________________________________________________________________
diff --git a/src/convParse/gc_migrate.cpp b/src/convParse/gc_migrate.cpp
new file mode 100644
index 0000000..b0d268f
--- /dev/null
+++ b/src/convParse/gc_migrate.cpp
@@ -0,0 +1,386 @@
+// $Id: gc_migrate.cpp,v 1.33 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+
+#include "gc_data.h"
+#include "gc_datastore.h"
+#include "gc_errhandling.h"
+#include "gc_file.h"
+#include "gc_file_util.h"
+#include "gc_infile_err.h"
+#include "gc_migrate.h"
+#include "gc_parse_block.h"
+#include "gc_strings_infile.h"
+
+#include "wx/log.h"
+#include "wx/tokenzr.h"
+#include "wx/txtstrm.h"
+#include "wx/wfstream.h"
+
+//------------------------------------------------------------------------------------
+
+GCMigrateParser::GCMigrateParser(const GCDataStore& ds)
+    :   GCParser(ds)
+{
+}
+
+GCParse *
+GCMigrateParser::Parse( GCFile &            fileRef,
+                        gcGeneralDataType   dataType,
+                        GCInterleaving      interleaving)
+{
+    SetUpStreams(fileRef.GetName());
+    assert( !( dataType.HasAllelic() && dataType.HasNucleic()));
+    if(dataType.HasAllelic()) return AlleleParse(fileRef,dataType,interleaving);
+    if(dataType.HasNucleic()) return NucParse(fileRef,dataType,interleaving);
+    assert(false);
+    return NULL;
+}
+
+GCParse *
+GCMigrateParser::NucParse(GCFile & fileRef, gcGeneralDataType dataType, GCInterleaving interleaving)
+{
+
+    GCParse & parseData = MakeParse(fileRef,format_MIGRATE,dataType,interleaving);
+
+    gcSpecificDataType  dataTypeSpecInFile;
+    size_t              numPops;
+    size_t              numLoci;
+    wxString            comment;
+
+    try
+    {
+        ParseMigrateFirstLine(dataTypeSpecInFile,numPops,numLoci,comment);
+        SetDataTypeFromFile(parseData,dataTypeSpecInFile);
+
+        std::vector<size_t> locusLengths = ParseMigrateLocusLengths();
+        for(size_t i=0;
+            i < locusLengths.size();
+            i++)
+        {
+            AddLocus(parseData,i,locusLengths[i]);
+        }
+
+        for(size_t popIndex = 0;
+            popIndex < numPops;
+            popIndex++)
+        {
+            wxString popComment;
+            std::vector<size_t> numSamples = ParseMigratePopulationInfo(popComment,locusLengths.size());
+            assert(numSamples.size() == locusLengths.size());
+            AddPop(parseData,popIndex,popComment);
+
+            for(size_t locIndex = 0;
+                locIndex < numLoci;
+                locIndex++)
+            {
+                FillData(parseData,popIndex,locIndex,interleaving,numSamples[locIndex]);
+            }
+        }
+        CheckNoExtraData();
+        return &parseData;
+    }
+    catch(gc_eof& e)
+    {
+        if(CompleteParse(parseData))
+        {
+            return &parseData;
+        }
+        else
+        {
+            delete &parseData;
+            e.setFile(fileRef.GetName());
+            throw;
+        }
+    }
+    catch(gc_infile_err& f)
+    {
+        delete &parseData;
+        f.setFile(fileRef.GetName());
+        f.setRow(m_linesRead);
+        throw;
+    }
+    assert(false);
+    return NULL;
+
+}
+
+GCParse *
+GCMigrateParser::AlleleParse(   GCFile &            fileRef,
+                                gcGeneralDataType   dataType,
+                                GCInterleaving      interleaving)
+{
+    gcSpecificDataType  dataTypeSpecInFile;
+    size_t              numPops;
+    size_t              numSites;
+    wxString            delimiter;
+    wxString            comment;
+
+    ParseMigrateFirstLine(dataTypeSpecInFile,numPops,numSites,delimiter,comment);
+    GCParse & parseData = MakeParse(fileRef,format_MIGRATE,dataType,interleaving,delimiter);
+    SetDataTypeFromFile(parseData,dataTypeSpecInFile);
+
+    try
+    {
+        AddLocus(parseData,0,numSites);
+        for(size_t popIndex = 0;
+            popIndex < numPops;
+            popIndex++)
+        {
+            wxString popComment;
+            std::vector<size_t> numSamples = ParseMigratePopulationInfo(popComment,1);    // EWFIX.P3 -- constant
+            assert(numSamples.size() == 1);
+            AddPop(parseData,popIndex,popComment);
+            FillData(parseData,popIndex,0,interleaving,numSamples[0]);
+        }
+        CheckNoExtraData();
+        return &parseData;
+    }
+    catch(gc_eof& e)
+    {
+        if(CompleteParse(parseData))
+        {
+            return &parseData;
+        }
+        else
+        {
+            delete &parseData;
+            e.setFile(fileRef.GetName());
+            throw;
+        }
+    }
+    catch(gc_infile_err& f)
+    {
+        delete &parseData;
+        f.setFile(fileRef.GetName());
+        f.setRow(m_linesRead);
+        throw;
+    }
+    assert(false);
+    return NULL;
+}
+
+GCMigrateParser::~GCMigrateParser()
+{
+}
+
+void
+GCMigrateParser::ParseMigrateFirstLine(
+    gcSpecificDataType &        dataTypeSpecInFile,
+    size_t &            numPopsRef,
+    size_t &            numLociRef,
+    wxString &          comment)
+{
+    wxString firstLine = ReadLine();
+    wxStringTokenizer tokenizer(firstLine);
+
+    dataTypeSpecInFile = sdatatype_NONE_SET;
+    wxString word = tokenizer.GetNextToken();
+    if(!word.IsNumber() && word.Len() == 1)
+        // we're looking for an optional single char token indicating
+        // the data type in this file. If it's a number, then the
+        // token is not here
+    {
+        if(word.IsSameAs("a",false)) dataTypeSpecInFile = sdatatype_KALLELE;
+        if(word.IsSameAs("e",false)) dataTypeSpecInFile = sdatatype_KALLELE;
+        if(word.IsSameAs("m",false)) dataTypeSpecInFile = sdatatype_MICROSAT;
+        if(word.IsSameAs("n",false)) dataTypeSpecInFile = sdatatype_SNP;
+        if(word.IsSameAs("s",false)) dataTypeSpecInFile = sdatatype_DNA;
+        if(dataTypeSpecInFile == sdatatype_NONE_SET)
+        {
+            wxString msg = wxString::Format(gcerr_migrate::firstToken,word.c_str());
+            m_dataStore.GCWarning(msg);
+        }
+        word = tokenizer.GetNextToken();
+    }
+
+    // OK. Now word should be a number indicating the number of populations
+    long longVal;
+    if(!word.ToLong(&longVal))
+    {
+        throw gc_migrate_bad_pop_count(word);
+    }
+    if(longVal <= 0)
+    {
+        throw gc_migrate_bad_pop_count(word);
+    }
+    numPopsRef = (size_t)longVal;
+
+    // The next word should be a number indicating the number of loci
+    word = tokenizer.GetNextToken();
+    if(!word.ToLong(&longVal) || longVal <= 0)
+    {
+        throw gc_migrate_bad_locus_count(word);
+    }
+    numLociRef = (size_t)longVal;
+
+    comment = tokenizer.GetString();
+}
+
+void
+GCMigrateParser::ParseMigrateFirstLine( gcSpecificDataType& dataTypeSpecInFile,
+                                        size_t &            numPopsRef,
+                                        size_t &            numLociRef,
+                                        wxString &          delimiter,
+                                        wxString &          comment)
+{
+    // this gets us the default values, which is that there is no
+    // delimiter specified.
+    delimiter.Empty();
+    ParseMigrateFirstLine(dataTypeSpecInFile,numPopsRef,numLociRef,comment);
+
+    wxStringTokenizer tokenizer(comment);
+    if(tokenizer.HasMoreTokens())
+    {
+        wxString mayBeDelimiter = tokenizer.GetNextToken();
+        if(IsLegalDelimiter(mayBeDelimiter))
+        {
+            delimiter = mayBeDelimiter;
+            comment = tokenizer.GetString();
+        }
+    }
+}
+
+bool
+GCMigrateParser::IsLegalDelimiter(wxString delimCandidate)
+{
+    if(delimCandidate.Length() != 1) return false;
+    if(delimCandidate[0] == gcstr_migrate::missingData)
+    {
+        throw gc_migrate_bad_delimiter(delimCandidate);
+        return false;
+    }
+    return true;
+}
+
+std::vector<size_t>
+GCMigrateParser::ParseMigrateLocusLengths()
+{
+    wxString lociLengthLine = ReadLine();
+    wxStringTokenizer tokenizer(lociLengthLine);
+    std::vector<size_t> locusLengths;
+
+    size_t index = 0;
+    while(tokenizer.CountTokens() != 0)
+    {
+        wxString token = tokenizer.GetNextToken();
+        long longVal;
+        if(!token.ToLong(&longVal))
+        {
+            throw gc_migrate_locus_length_not_positive(token);
+        }
+        if(longVal <= 0)
+        {
+            throw gc_migrate_locus_length_not_positive(token);
+        }
+        size_t locusLength = (size_t)longVal;
+        locusLengths.push_back(locusLength);
+        index++;
+    }
+    return locusLengths;
+}
+
+std::vector<size_t>
+GCMigrateParser::ParseMigratePopulationInfo(wxString & populationName, size_t locusCount)
+{
+    std::vector<size_t> numSamplesForEachLocus;
+
+    wxString line = ReadLine();
+    wxStringTokenizer tokenizer(line);
+    wxString lastToken = wxEmptyString;
+    bool shouldUseLastToken = false;
+
+    try
+    {
+        for(size_t i = 0;
+            i < locusCount ;
+            i++)
+        {
+            lastToken = tokenizer.GetNextToken();
+            long longVal;
+            if(!lastToken.ToLong(&longVal))
+            {
+                throw gc_migrate_missing_sequence_count(lastToken);
+            }
+            if(longVal <= 0)
+            {
+                throw gc_migrate_bad_sequence_count(lastToken);
+            }
+            size_t sequenceCount = (size_t)longVal;
+            numSamplesForEachLocus.push_back(sequenceCount);
+        }
+    }
+    catch (const gc_migrate_missing_sequence_count & e)
+    {
+        if(numSamplesForEachLocus.size() == 1)
+        {
+            for(size_t i=1;
+                i < locusCount;
+                i++)
+            {
+                numSamplesForEachLocus.push_back(numSamplesForEachLocus[0]);
+            }
+            shouldUseLastToken = true;
+        }
+        else
+        {
+            throw gc_migrate_too_few_sequence_lengths(locusCount,line);
+        }
+
+    }
+    assert(numSamplesForEachLocus.size() == locusCount);
+
+    populationName = tokenizer.GetString();
+    if(shouldUseLastToken)
+    {
+        populationName = wxString::Format("%s %s",
+                                          lastToken.c_str(),
+                                          populationName.c_str());
+    }
+
+    populationName.Trim(true);
+    populationName.Trim(false);
+    return numSamplesForEachLocus;
+}
+
+bool
+GCMigrateParser::CompleteParse(GCParse & parseData)
+{
+    // check we have pops
+    size_t pcount = parseData.GetPopCount();
+    if(pcount < 1) return false;
+
+    // check we have a locus
+    size_t lcount = parseData.GetLociCount();
+    if(lcount < 1) return false;
+
+    // check we have a block for each
+    constBlockVector blocks = parseData.GetBlocks();
+    if(blocks.size() != pcount * lcount) return false;
+
+    // check block has correct number of sequences
+    constBlockVector::const_iterator i;
+    for(i=blocks.begin(); i != blocks.end(); i++)
+    {
+        const GCParseBlock * blockP = *i;
+        if(blockP == NULL) return false;
+        size_t expectedNumSequences = blockP->GetExpectedNumSequences();
+        const GCParseSamples & samples = blockP->GetSamples();
+        if(samples.size() != expectedNumSequences) return false;
+
+        // check block has correct number of sites
+        if(blockP->HasIncompleteSequences()) return false;
+    }
+
+    return true;
+}
+
+//____________________________________________________________________________________
diff --git a/src/convParse/gc_migrate.h b/src/convParse/gc_migrate.h
new file mode 100644
index 0000000..548e820
--- /dev/null
+++ b/src/convParse/gc_migrate.h
@@ -0,0 +1,61 @@
+// $Id: gc_migrate.h,v 1.17 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_MIGRATE_H
+#define GC_MIGRATE_H
+
+#include "gc_parser.h"
+#include "gc_types.h"
+
+class GCFile;
+
+class GCMigrateParser : public GCParser
+{
+  private:
+    GCMigrateParser();  // undefined
+
+  protected:
+    void    ParseMigrateFirstLine(
+        gcSpecificDataType& dataTypeSpecInFile,
+        size_t &            numPops,
+        size_t &            numLoci,
+        wxString &          populationName);
+    void    ParseMigrateFirstLine(
+        gcSpecificDataType& dataTypeSpecInFile,
+        size_t &            numPops,
+        size_t &            numLoci,
+        wxString &          delimiter,
+        wxString &          populationName);
+
+    std::vector<size_t> ParseMigratePopulationInfo(wxString & popName, size_t numLoci);
+    std::vector<size_t> ParseMigrateLocusLengths();
+
+    GCParse * NucParse(     GCFile &        fileRef,
+                            gcGeneralDataType   dataType,
+                            GCInterleaving  interleaving);
+    GCParse * AlleleParse(  GCFile &            fileRef,
+                            gcGeneralDataType   dataType,
+                            GCInterleaving      interleaving);
+
+    bool            IsLegalDelimiter(wxString delimCandidate);
+    bool            CompleteParse(GCParse&);
+
+  public:
+    GCMigrateParser(const GCDataStore& dataStore);
+    virtual ~GCMigrateParser();
+
+    GCParse * Parse(GCFile &            fileRef,
+                    gcGeneralDataType   dataType,
+                    GCInterleaving      interleaving);
+};
+
+#endif  // GC_MIGRATE_H
+
+//____________________________________________________________________________________
diff --git a/src/convParse/gc_parse.cpp b/src/convParse/gc_parse.cpp
new file mode 100644
index 0000000..e17428b
--- /dev/null
+++ b/src/convParse/gc_parse.cpp
@@ -0,0 +1,413 @@
+// $Id: gc_parse.cpp,v 1.23 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+
+#include "gc_data.h"
+#include "gc_default.h"
+#include "gc_file.h"
+#include "gc_file_util.h"
+#include "gc_infile_err.h"
+#include "gc_parse.h"
+#include "gc_parse_block.h"
+#include "gc_parse_locus.h"
+#include "gc_parse_pop.h"
+#include "gc_parse_sample.h"
+#include "gc_strings.h"
+#include "gc_types.h"
+#include "wx/log.h"
+
+//------------------------------------------------------------------------------------
+
+GCParse::GCParse(   GCFile &            file,
+                    GCFileFormat        format,
+                    gcGeneralDataType   dtype,
+                    GCInterleaving      interleaving,
+                    wxString            delim)
+    :
+    m_filePointer(&file),
+    m_format(format),
+    m_dataType(dtype),
+    m_interleaving(interleaving),
+    m_delimiter(delim),
+    m_multiLineSeenInFile(false),
+    m_hasSpacesInNames(false)
+{
+}
+
+GCParse::~GCParse()
+{
+    for(GCParseLoci::iterator i=m_loci.begin(); i != m_loci.end(); i++)
+    {
+        delete *i;
+    }
+    for(GCParsePops::iterator i=m_pops.begin(); i != m_pops.end(); i++)
+    {
+        delete *i;
+    }
+    for(GCParseBlocks::iterator i=m_blocks.begin(); i != m_blocks.end(); i++)
+    {
+        delete *i;
+    }
+}
+
+wxString
+GCParse::GetName() const
+{
+    assert(m_filePointer != NULL);
+    return wxString::Format(gcstr::parseSettingsForFile,
+                            m_filePointer->GetShortName().c_str(),
+                            GetSettings().c_str());
+}
+
+wxString
+GCParse::GetSettings() const
+{
+    wxString desc =
+        wxString::Format(gcstr::parseSettings,
+                         GetFormatString().c_str(),
+                         GetDataTypeString().c_str(),
+                         GetInterleavingString().c_str());
+
+    return desc;
+}
+
+gcGeneralDataType
+GCParse::GetDataType() const
+{
+    return m_dataType;
+}
+
+GCFileFormat
+GCParse::GetFormat() const
+{
+    return m_format;
+}
+
+bool
+GCParse::GetHasSpacesInNames() const
+{
+    return m_hasSpacesInNames;
+}
+
+GCInterleaving
+GCParse::GetInterleaving() const
+{
+    return m_interleaving;
+}
+
+wxString
+GCParse::GetDelimiter() const
+{
+    return m_delimiter;
+}
+
+bool
+GCParse::GetMultiLineSeenInFile() const
+{
+    return m_multiLineSeenInFile;
+}
+
+void
+GCParse::SetDataTypeFromFile(gcSpecificDataType dtype)
+{
+    if(dtype != sdatatype_NONE_SET)
+    {
+        gcGeneralDataType::iterator i = m_dataType.find(dtype);
+        if(i == m_dataType.end())
+        {
+            throw gc_parse_data_type_spec_mismatch(dtype,GetDataType());
+        }
+        m_dataType.clear();
+        m_dataType.insert(dtype);
+    }
+}
+
+const GCParseLocus &
+GCParse::GetParseLocus(size_t locusIndex) const
+{
+    assert(locusIndex < m_loci.size());
+    return *(m_loci[locusIndex]);
+}
+
+GCParseLocus &
+GCParse::GetParseLocus(size_t locusIndex)
+{
+    assert(locusIndex < m_loci.size());
+    return *(m_loci[locusIndex]);
+}
+
+const GCParsePop &
+GCParse::GetParsePop(size_t popIndex) const
+{
+    assert(popIndex < m_pops.size());
+    return *(m_pops[popIndex]);
+}
+
+constBlockVector
+GCParse::GetBlocks() const
+{
+    constBlockVector retVal;
+    for(GCParseBlocks::const_iterator i = m_blocks.begin();
+        i != m_blocks.end(); i ++)
+    {
+        const GCParseBlock * blockP = *i;
+        retVal.push_back(blockP);
+    }
+    return retVal;
+}
+
+const GCParseBlock &
+GCParse::GetBlock(size_t popId, size_t locusId) const
+{
+    // rather wasteful, but correct
+    for(GCParseBlocks::const_iterator i = m_blocks.begin();
+        i != m_blocks.end(); i ++)
+    {
+        const GCParseBlock & block = **i;
+        size_t blockPopId = block.GetPopRef().GetIndexInParse();
+        size_t blockLocId = block.GetLocusRef().GetIndexInParse();
+        if((popId == blockPopId) && (locusId == blockLocId))
+        {
+            return block;
+        }
+    }
+    wxString msg = wxString::Format(gcerr::noBlockForPopLocus,(int)popId,(int)locusId);
+    gc_implementation_error e(msg.c_str());
+    throw e;
+}
+
+const GCFile &
+GCParse::GetFileRef() const
+{
+    return *m_filePointer;
+}
+
+size_t
+GCParse::GetPopCount() const
+{
+    return m_pops.size();
+}
+
+size_t
+GCParse::GetLociCount() const
+{
+    return m_loci.size();
+}
+
+void
+GCParse::DebugDump(wxString prefix) const
+{
+    wxLogDebug("%sGCParse:%s",prefix.c_str(),GetSettings().c_str());    // EWDUMPOK
+    wxLogDebug("%sPopulations:",(prefix+gcstr::indent).c_str());    // EWDUMPOK
+    for(size_t i = 0; i < m_pops.size() ; i++)
+    {
+        const GCParsePop popRef = GetParsePop(i);
+        wxLogDebug("%s%5d:\"%s\"",  // EWDUMPOK
+                   (prefix+gcstr::indent+gcstr::indent).c_str(),
+                   (int)(popRef.GetIndexInParse()),
+                   (popRef.GetName()).c_str());
+    }
+    wxLogDebug("%sLoci:",(prefix+gcstr::indent).c_str());   // EWDUMPOK
+    for(size_t i = 0; i < m_loci.size() ; i++)
+    {
+        const GCParseLocus locRef = GetParseLocus(i);
+        wxLogDebug("%s%5d:%5d markers of type %s",    // EWDUMPOK
+                   (prefix+gcstr::indent+gcstr::indent).c_str(),
+                   (int)(locRef.GetIndexInParse()),
+                   (int)(locRef.GetNumMarkers()),
+                   (ToWxString(locRef.GetDataType())).c_str());
+    }
+    wxLogDebug("%sBlocks:",(prefix+gcstr::indent).c_str()); // EWDUMPOK
+    for(size_t i = 0; i < m_blocks.size() ; i++)
+    {
+        const GCParseBlock & blockRef = *(m_blocks[i]);
+        blockRef.DebugDump(prefix+gcstr::indent+gcstr::indent);
+    }
+}
+
+gcIdSet
+GCParse::IdsOfAllBlocks() const
+{
+    gcIdSet blockIds;
+    for(size_t i=0; i < m_blocks.size(); i++)
+    {
+        const GCParseBlock & block = *(m_blocks[i]);
+        blockIds.insert(block.GetId());
+    }
+    return blockIds;
+}
+
+wxString
+GCParse::GetFormatString() const
+{
+    return wxString::Format(gcstr::parseFormat,ToWxString(GetFormat()).c_str());
+}
+
+wxString
+GCParse::GetDataTypeString() const
+{
+    return wxString::Format(gcstr::parseDataType,ToWxString(GetDataType()).c_str());
+}
+
+wxString
+GCParse::GetInterleavingString() const
+{
+    GCInterleaving il = GetInterleaving();
+    if(il == interleaving_MOOT) il = interleaving_SEQUENTIAL; // EWFIX.P3 -- make blank ??
+    return wxString::Format(gcstr::parseInterleaving,ToWxString(il).c_str());
+}
+
+gcPhaseInfo *
+GCParse::GetDefaultPhaseRecords() const
+{
+    gcPhaseInfo * phaseRecords = new gcPhaseInfo();
+
+    const wxString & fileName = GetFileRef().GetName();
+    for(GCParseBlocks::const_iterator i = m_blocks.begin(); i != m_blocks.end(); i++)
+    {
+        const GCParseBlock * pbP = *i;
+        const GCParseSamples & samples = pbP->GetSamples();
+        for(GCParseSamples::const_iterator j=samples.begin(); j != samples.end(); j++)
+        {
+            const GCParseSample * sampP = *j;
+            if(sampP->GetSequencesPerLabel() > 1)
+            {
+                gcPhaseRecord rec
+                    = gcPhaseRecord::MakeAllelicPhaseRecord(fileName,
+                                                            sampP->GetLine(),
+                                                            sampP->GetLabel(),
+                                                            sampP->GetSequencesPerLabel());
+                phaseRecords->AddRecord(rec);
+            }
+        }
+    }
+    return phaseRecords;
+}
+
+gcPhaseInfo *
+GCParse::GetPhaseRecordsForAdjacency(size_t adj) const
+{
+    gcPhaseInfo * phaseRecords = new gcPhaseInfo();
+
+    const wxString & fileName = GetFileRef().GetName();
+    for(GCParseBlocks::const_iterator i = m_blocks.begin(); i != m_blocks.end(); i++)
+    {
+        const GCParseBlock * pbP = *i;
+        const GCParseSamples & samples = pbP->GetSamples();
+        wxArrayString holdingArray;
+        const GCParseSample * sampP = NULL;
+        for(GCParseSamples::const_iterator j=samples.begin(); j != samples.end(); j++)
+        {
+            sampP = *j;
+            if(sampP->GetSequencesPerLabel() > 1)
+            {
+                delete phaseRecords;
+                throw gc_adjacent_phase_resolution_for_multisample_input(GetFileRef().GetName());
+            }
+            else
+            {
+                holdingArray.Add(sampP->GetLabel());
+                if(holdingArray.Count() == adj)
+                {
+                    gcPhaseRecord rec
+                        = gcPhaseRecord::MakeAdjacentPhaseRecord(fileName,sampP->GetLine(),holdingArray);
+                    phaseRecords->AddRecord(rec);
+                    holdingArray.Empty();
+                }
+            }
+        }
+        if(! holdingArray.IsEmpty())
+        {
+            assert(sampP != NULL);
+            size_t lineNum = sampP->GetLine();
+            wxString fname = GetFileRef().GetName();
+            size_t numSamples = samples.size();
+            delete phaseRecords;
+            throw gc_individual_sample_adj_mismatch(lineNum,fname,numSamples,adj);
+        }
+    }
+    return phaseRecords;
+}
+
+void
+GCParse::SetCannotBeMsat()
+{
+    m_dataType.Disallow(sdatatype_MICROSAT);
+}
+
+void
+GCParse::SetHasSpacesInNames()
+{
+    m_hasSpacesInNames = true;
+}
+
+GCParseVec::GCParseVec()
+    :
+    std::vector<GCParse*>()
+{
+}
+
+GCParseVec::~GCParseVec()
+{
+}
+
+void
+GCParseVec::NukeContents()
+{
+    for(iterator i=begin(); i != end(); i++)
+    {
+        delete *i;
+    }
+}
+
+bool
+GCParseVec::MungeParses(GCParseVec::iterator i1, GCParseVec::iterator i2)
+{
+    GCParse & p1 = **(i1);
+    GCParse & p2 = **(i2);
+    if(p1.GetFormat() != p2.GetFormat()) return false;
+    if(p1.GetDataType() != p2.GetDataType()) return false;
+
+    if(p1.GetMultiLineSeenInFile()) return false;
+    if(p2.GetMultiLineSeenInFile()) return false;
+
+    p1.m_interleaving = interleaving_MOOT;
+    return true;
+}
+
+bool
+GCParseVec::MungeParses()
+{
+    bool mungedAnything = false;
+    std::vector<GCParse*>::iterator outerIter = begin();
+    while(outerIter != end())
+    {
+        std::vector<GCParse*>::iterator innerIter = outerIter;
+        innerIter++;
+        while(innerIter != end())
+        {
+            if(MungeParses(outerIter,innerIter))
+            {
+                mungedAnything = true;
+                delete *innerIter;
+                erase(innerIter);
+            }
+            else
+            {
+                innerIter++;
+            }
+        }
+        outerIter++;
+    }
+    return mungedAnything;
+}
+
+//____________________________________________________________________________________
diff --git a/src/convParse/gc_parse.h b/src/convParse/gc_parse.h
new file mode 100644
index 0000000..ceecbaf
--- /dev/null
+++ b/src/convParse/gc_parse.h
@@ -0,0 +1,111 @@
+// $Id: gc_parse.h,v 1.17 2012/06/30 01:32:40 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_PARSE_H
+#define GC_PARSE_H
+
+#include <vector>
+
+#include "gc_phase_info.h"
+#include "gc_quantum.h"
+#include "gc_structure_maps.h"
+#include "gc_types.h"
+#include "wx/string.h"
+
+class GCDataStore;
+class GCFile;
+class GCParseBlock;
+class GCParseLocus;
+class GCParsePop;
+class GCParser;
+
+typedef std::vector<GCParseBlock*>  GCParseBlocks;
+typedef std::vector<GCParsePop*>    GCParsePops;
+typedef std::vector<GCParseLocus*>  GCParseLoci;
+
+class GCParse : public GCQuantum
+{
+    friend class GCParser;
+    friend class GCParseVec;
+
+  private:
+    const GCFile *      m_filePointer;      // we don't own this
+    GCFileFormat        m_format;
+    gcGeneralDataType   m_dataType;
+    GCInterleaving      m_interleaving;
+    wxString            m_delimiter;
+    bool                m_multiLineSeenInFile;
+    bool                m_hasSpacesInNames;
+    GCParsePops         m_pops;             // we own the contents
+    GCParseLoci         m_loci;             // we own the contents
+    GCParseBlocks       m_blocks;           // we own the contents
+
+    GCParse();      // undefined
+
+  protected:
+    GCParseLocus &      GetParseLocus(size_t locusIndex) ;
+    void                SetDataTypeFromFile(gcSpecificDataType dtype);
+    void                SetHasSpacesInNames();
+
+  public:
+    GCParse(    GCFile &            fileRef,
+                GCFileFormat        format,
+                gcGeneralDataType   dataType,
+                GCInterleaving      interleaving,
+                wxString            delim=wxEmptyString);
+    ~GCParse();
+
+    wxString                GetSettings()   const;
+    gcGeneralDataType       GetDataType()   const ;
+    GCFileFormat            GetFormat()     const ;
+    GCInterleaving          GetInterleaving() const ;
+    wxString                GetDelimiter()  const ;
+    bool                    GetMultiLineSeenInFile() const ;
+    virtual wxString        GetName() const;
+    const GCParseLocus &    GetParseLocus(size_t locusIndex) const ;
+    const GCParsePop   &    GetParsePop  (size_t popIndex)   const;
+
+    constBlockVector        GetBlocks() const;
+    const GCParseBlock &    GetBlock(size_t popId, size_t locusId) const;
+
+    const GCFile &          GetFileRef()    const ;
+    size_t                  GetPopCount()   const ;
+    size_t                  GetLociCount()  const ;
+    bool                    GetHasSpacesInNames() const;
+
+    void DebugDump(wxString prefix=wxEmptyString) const;
+
+    gcIdSet     IdsOfAllBlocks() const;
+
+    wxString    GetFormatString() const;
+    wxString    GetDataTypeString() const;
+    wxString    GetInterleavingString() const;
+
+    gcPhaseInfo *   GetDefaultPhaseRecords() const;
+    gcPhaseInfo *   GetPhaseRecordsForAdjacency(size_t adj) const;
+
+    void    SetCannotBeMsat();
+};
+
+class GCParseVec : public std::vector<GCParse*>
+{
+  private:
+    bool MungeParses(GCParseVec::iterator,GCParseVec::iterator);
+
+  public:
+    GCParseVec();
+    virtual ~GCParseVec();
+    bool MungeParses();
+    void NukeContents();
+};
+
+#endif  // GC_PARSE_H
+
+//____________________________________________________________________________________
diff --git a/src/convParse/gc_parse_block.cpp b/src/convParse/gc_parse_block.cpp
new file mode 100644
index 0000000..3742dc6
--- /dev/null
+++ b/src/convParse/gc_parse_block.cpp
@@ -0,0 +1,166 @@
+// $Id: gc_parse_block.cpp,v 1.8 2012/06/30 01:32:40 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+
+#include "gc_default.h"
+#include "gc_parse.h"
+#include "gc_parse_block.h"
+#include "gc_parse_locus.h"
+#include "gc_parse_pop.h"
+#include "gc_parse_sample.h"
+#include "gc_strings.h"
+#include "wx/log.h"
+
+//------------------------------------------------------------------------------------
+
+GCParseSamples::GCParseSamples()
+{
+}
+
+GCParseSamples::~GCParseSamples()
+{
+}
+
+void
+GCParseSamples::DebugDump(wxString prefix) const
+{
+    for(GCParseSamples::const_iterator i=begin(); i != end(); i++)
+    {
+        (**i).DebugDump(prefix);
+    }
+}
+
+#if 0
+GCParseBlock::GCParseBlock()
+    :
+    m_parse(NULL),
+    m_indexInParse(gcdefault::badIndex),
+    m_popPointer(NULL),
+    m_locusPointer(NULL)
+{
+}
+#endif
+
+GCParseBlock::GCParseBlock( GCParse *               parseParent,
+                            size_t                  indexInParse,
+                            size_t                  expectedNumSequences,
+                            const GCParsePop &      popRef,
+                            const GCParseLocus &    locusRef)
+    :
+    m_parse(parseParent),
+    m_indexInParse(indexInParse),
+    m_expectedNumSequences(expectedNumSequences),
+    m_popPointer(&popRef),
+    m_locusPointer(&locusRef)
+{
+}
+
+GCParseBlock::~GCParseBlock()
+{
+    for(GCParseSamples::iterator i = m_samples.begin(); i != m_samples.end(); i++)
+    {
+        delete *i;
+    }
+}
+
+size_t
+GCParseBlock::GetExpectedNumSequences() const
+{
+    return m_expectedNumSequences;
+}
+
+size_t
+GCParseBlock::GetIndexInParse() const
+{
+    return m_indexInParse;
+}
+
+GCParse &
+GCParseBlock::GetParse()
+{
+    return *m_parse;
+}
+
+const GCParse &
+GCParseBlock::GetParse() const
+{
+    return *m_parse;
+}
+
+const GCParseLocus &
+GCParseBlock::GetLocusRef() const
+{
+    return *m_locusPointer;
+}
+
+const GCParsePop &
+GCParseBlock::GetPopRef() const
+{
+    return *m_popPointer;
+}
+
+GCParseSample &
+GCParseBlock::FindSample(size_t indexInBlock)
+{
+    assert(indexInBlock < m_samples.size());
+    return *(m_samples[indexInBlock]);
+}
+
+const GCParseSample &
+GCParseBlock::FindSample(size_t indexInBlock) const
+{
+    assert(indexInBlock < m_samples.size());
+    return *(m_samples[indexInBlock]);
+}
+
+const GCParseSamples &
+GCParseBlock::GetSamples() const
+{
+    return m_samples;
+}
+
+bool
+GCParseBlock::HasIncompleteSequences() const
+{
+    for(size_t i=0; i < m_samples.size(); i++)
+    {
+        const GCParseSample & parseSample = *(m_samples[i]);
+        if(parseSample.GetLength() < m_locusPointer->GetNumMarkers())
+        {
+            return true;
+        }
+    }
+    return false;
+}
+
+void
+GCParseBlock::DebugDump(wxString prefix) const
+{
+    wxLogDebug("%sid %5d; pop %5d; locus %5d",  // EWDUMPOK
+               prefix.c_str(),
+               (int)(GetIndexInParse()),
+               (int)(GetPopRef().GetIndexInParse()),
+               (int)(GetLocusRef().GetIndexInParse()));
+    for(size_t i=0; i < m_samples.size(); i++)
+    {
+        const GCParseSample & sampleRef = *(m_samples[i]);
+        sampleRef.DebugDump(prefix+gcstr::indent);
+    }
+}
+
+void
+GCParseBlock::SetCannotBeMsat()
+{
+    assert(m_parse != NULL);
+    m_parse->SetCannotBeMsat();
+}
+
+//____________________________________________________________________________________
diff --git a/src/convParse/gc_parse_block.h b/src/convParse/gc_parse_block.h
new file mode 100644
index 0000000..57a70be
--- /dev/null
+++ b/src/convParse/gc_parse_block.h
@@ -0,0 +1,73 @@
+// $Id: gc_parse_block.h,v 1.10 2012/06/30 01:32:40 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_PARSE_BLOCK_H
+#define GC_PARSE_BLOCK_H
+
+#include <vector>
+
+#include "gc_quantum.h"
+#include "wx/string.h"
+
+class GCParse;
+class GCParseLocus;
+class GCParsePop;
+class GCParser;
+class GCParseSample;
+
+class GCParseSamples : public std::vector<GCParseSample*>
+{
+  public:
+    GCParseSamples();
+    virtual ~GCParseSamples();
+    void DebugDump(wxString prefix=wxEmptyString) const;
+};
+
+class GCParseBlock : public GCQuantum
+{
+    friend class GCParser;
+  private:
+    GCParse *         m_parse;
+    size_t                  m_indexInParse;
+    size_t                  m_expectedNumSequences;
+    const GCParsePop *      m_popPointer;
+    const GCParseLocus *    m_locusPointer;
+    GCParseSamples          m_samples;
+
+  protected:
+    GCParseSample & FindSample(size_t indexInBlock);
+    GCParse &       GetParse();
+
+  public:
+    GCParseBlock(   GCParse *               parseParent,
+                    size_t                  indexInParse,
+                    size_t                  expectedNumSequences,
+                    const GCParsePop &      popRef,
+                    const GCParseLocus &    locusRef);
+    virtual ~GCParseBlock();
+
+    const GCParseSample &   FindSample(size_t indexInBlock) const;
+
+    size_t                  GetExpectedNumSequences()       const;
+    size_t                  GetIndexInParse()               const;
+    const GCParseLocus &    GetLocusRef()                   const;
+    const GCParse &         GetParse()                      const;
+    const GCParsePop &      GetPopRef()                     const;
+    const GCParseSamples &  GetSamples()                    const;
+
+    bool                    HasIncompleteSequences()        const;
+    void                    DebugDump(wxString prefix=wxEmptyString)   const;
+
+    void                    SetCannotBeMsat();
+};
+
+#endif  // GC_PARSE_BLOCK_H
+
+//____________________________________________________________________________________
diff --git a/src/convParse/gc_parse_locus.cpp b/src/convParse/gc_parse_locus.cpp
new file mode 100644
index 0000000..2be66a4
--- /dev/null
+++ b/src/convParse/gc_parse_locus.cpp
@@ -0,0 +1,93 @@
+// $Id: gc_parse_locus.cpp,v 1.13 2012/06/30 01:32:40 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+
+#include "gc_default.h"
+#include "gc_file.h"
+#include "gc_parse.h"
+#include "gc_parse_locus.h"
+#include "gc_strings_parse_locus.h"
+
+//------------------------------------------------------------------------------------
+
+GCParseLocus::GCParseLocus( const GCParse *     parse,
+                            size_t              indexInParse,
+                            size_t              lineNumber,
+                            size_t              numMarkers)
+    :
+    m_parse(parse),
+    m_indexInParse(indexInParse),
+    m_lineNumber(lineNumber),
+    m_numMarkers(numMarkers)
+{
+}
+
+GCParseLocus::~GCParseLocus()
+{
+}
+
+const GCParse &
+GCParseLocus::GetParse() const
+{
+    return *m_parse;
+}
+
+size_t
+GCParseLocus::GetIndexInParse() const
+{
+    return m_indexInParse;
+}
+
+size_t
+GCParseLocus::GetLineNumber() const
+{
+    return m_lineNumber;
+}
+
+size_t
+GCParseLocus::GetNumMarkers() const
+{
+    return m_numMarkers;
+}
+
+gcGeneralDataType
+GCParseLocus::GetDataType() const
+{
+    assert(m_parse != NULL);
+    return m_parse->GetDataType();
+}
+
+#if 0
+gcSpecificDataType
+GCParseLocus::GetSpecificDataType() const
+{
+    assert(m_parse != NULL);
+    return m_parse->GetDataTypeSpecFromFile();
+}
+#endif
+
+wxString
+GCParseLocus::GetName() const
+{
+    long segCount = 1 + (long)m_indexInParse;
+    wxString fileName = m_parse->GetFileRef().GetName();
+    return wxString::Format(gcstr_parselocus::nameShort,segCount,fileName.c_str());
+}
+
+wxString
+GCParseLocus::GetLongName() const
+{
+    long segCount = 1 + (long)m_indexInParse;
+    wxString fileName = m_parse->GetFileRef().GetName();
+    return wxString::Format(gcstr_parselocus::nameLong,segCount,fileName.c_str());
+}
+
+//____________________________________________________________________________________
diff --git a/src/convParse/gc_parse_locus.h b/src/convParse/gc_parse_locus.h
new file mode 100644
index 0000000..235aeac
--- /dev/null
+++ b/src/convParse/gc_parse_locus.h
@@ -0,0 +1,52 @@
+// $Id: gc_parse_locus.h,v 1.13 2012/06/30 01:32:40 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_PARSE_LOCUS_H
+#define GC_PARSE_LOCUS_H
+
+#include "gc_types.h"
+#include "wx/string.h"
+
+class GCParse;
+
+class GCParseLocus
+{
+  private:
+    GCParseLocus();     // undefined
+
+    const GCParse *     m_parse;
+    size_t              m_indexInParse;
+    size_t              m_lineNumber;
+    size_t              m_numMarkers;
+
+  public:
+    GCParseLocus(   const GCParse *     parse,
+                    size_t              indexInParse,
+                    size_t              lineNumber,
+                    size_t              numMarkers);
+    ~GCParseLocus();
+
+    const GCParse &     GetParse()              const ;
+    size_t              GetIndexInParse()       const ;
+    size_t              GetLineNumber()         const ;
+    size_t              GetNumMarkers()         const ;
+    gcGeneralDataType   GetDataType()           const ;
+
+#if 0
+    gcSpecificDataType  GetSpecificDataType()   const ;
+#endif
+
+    wxString            GetName()               const ;
+    wxString            GetLongName()           const ;
+};
+
+#endif  // GC_PARSE_LOCUS_H
+
+//____________________________________________________________________________________
diff --git a/src/convParse/gc_parse_pop.cpp b/src/convParse/gc_parse_pop.cpp
new file mode 100644
index 0000000..343a018
--- /dev/null
+++ b/src/convParse/gc_parse_pop.cpp
@@ -0,0 +1,51 @@
+// $Id: gc_parse_pop.cpp,v 1.5 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+
+#include "gc_default.h"
+#include "gc_parse_pop.h"
+
+//------------------------------------------------------------------------------------
+
+GCParsePop::GCParsePop( const GCParse * parse,
+                        size_t          indexInParse,
+                        wxString        name)
+    :
+    m_parse(parse),
+    m_indexInParse(indexInParse),
+    m_name(name)
+{
+    assert(m_parse != NULL);
+}
+
+GCParsePop::~GCParsePop()
+{
+}
+
+const GCParse &
+GCParsePop::GetParse() const
+{
+    return *m_parse;
+}
+
+size_t
+GCParsePop::GetIndexInParse() const
+{
+    return m_indexInParse;
+}
+
+wxString
+GCParsePop::GetName() const
+{
+    return m_name;
+}
+
+//____________________________________________________________________________________
diff --git a/src/convParse/gc_parse_pop.h b/src/convParse/gc_parse_pop.h
new file mode 100644
index 0000000..2f779b7
--- /dev/null
+++ b/src/convParse/gc_parse_pop.h
@@ -0,0 +1,37 @@
+// $Id: gc_parse_pop.h,v 1.6 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_PARSE_POP_H
+#define GC_PARSE_POP_H
+
+#include "wx/string.h"
+
+class GCParse;
+
+class GCParsePop
+{
+  private:
+    GCParsePop();           // undefined
+
+    const GCParse *         m_parse;
+    const size_t            m_indexInParse;
+    const wxString          m_name;
+  public:
+    GCParsePop(const GCParse * parse, size_t indexInParse, wxString name);
+    ~GCParsePop();
+
+    const GCParse & GetParse()          const;
+    size_t          GetIndexInParse()   const;
+    wxString        GetName()           const;
+};
+
+#endif  // GC_PARSE_POP_H
+
+//____________________________________________________________________________________
diff --git a/src/convParse/gc_parse_sample.cpp b/src/convParse/gc_parse_sample.cpp
new file mode 100644
index 0000000..1fa5cb3
--- /dev/null
+++ b/src/convParse/gc_parse_sample.cpp
@@ -0,0 +1,121 @@
+// $Id: gc_parse_sample.cpp,v 1.13 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+
+#include "gc_errhandling.h"
+#include "gc_parse_block.h"
+#include "gc_parse_locus.h"
+#include "gc_parse_sample.h"
+#include "gc_sequential_data.h"
+#include "gc_strings.h"
+#include "wx/log.h"
+#include "wx/tokenzr.h"
+
+//------------------------------------------------------------------------------------
+
+GCParseSample::GCParseSample(GCParseBlock & block, size_t line, wxString label)
+    :
+    m_block(&block),
+    m_lineInFile(line),
+    m_label(label)
+{
+}
+
+GCParseSample::~GCParseSample()
+{
+    for(std::vector<GCSequentialData*>::iterator i = m_data.begin(); i != m_data.end(); i++)
+    {
+        delete *i;
+    }
+}
+
+GCParseBlock &
+GCParseSample::GetBlock()
+{
+    assert(m_block != NULL);
+    return *m_block;
+}
+
+const GCParseBlock &
+GCParseSample::GetBlock() const
+{
+    assert(m_block != NULL);
+    return *m_block;
+}
+
+const GCSequentialData &
+GCParseSample::GetData(size_t index) const
+{
+    if(index >= m_data.size())
+    {
+        wxString msg = wxString::Format(gcerr::tooBigDataIndex,
+                                        (int)index,
+                                        (int)(m_data.size()));
+        gc_implementation_error e(msg.c_str());
+        throw e;
+    }
+    assert(index < m_data.size());
+    return *(m_data[index]);
+}
+
+wxString
+GCParseSample::GetLabel() const
+{
+    return m_label;
+}
+
+size_t
+GCParseSample::GetLine() const
+{
+    return m_lineInFile;
+}
+
+bool
+GCParseSample::allLengthsEqual() const
+{
+    if(m_data.empty()) return true;
+    size_t firstLength = (m_data[0])->GetNumMarkers();
+    std::vector<GCSequentialData*>::const_iterator iter;
+    for(iter = m_data.begin(); iter != m_data.end(); iter++)
+    {
+        const GCSequentialData & seq = **iter;
+        if(seq.GetNumMarkers() != firstLength) return false;
+    }
+    return true;
+}
+
+size_t
+GCParseSample::GetLength() const
+{
+    if(m_data.empty()) return 0;
+    assert(allLengthsEqual());
+    const GCSequentialData & seq = *(m_data[0]);
+    return seq.GetNumMarkers();
+}
+
+size_t
+GCParseSample::GetSequencesPerLabel() const
+{
+    return m_data.size();
+}
+
+void
+GCParseSample::DebugDump(wxString prefix) const
+{
+    wxLogDebug("%s%10s:", prefix.c_str(), m_label.c_str()); // EWDUMPOK
+    for(size_t i=0; i < m_data.size(); i++)
+    {
+        const GCSequentialData & seq = *(m_data[i]);
+        seq.DebugDump(prefix+gcstr::indent);
+    }
+}
+
+//____________________________________________________________________________________
diff --git a/src/convParse/gc_parse_sample.h b/src/convParse/gc_parse_sample.h
new file mode 100644
index 0000000..d0a7647
--- /dev/null
+++ b/src/convParse/gc_parse_sample.h
@@ -0,0 +1,54 @@
+// $Id: gc_parse_sample.h,v 1.9 2012/06/30 01:32:40 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_PARSE_SAMPLE_H
+#define GC_PARSE_SAMPLE_H
+
+#include <vector>
+
+#include "gc_types.h"
+#include "wx/string.h"
+
+class GCParseBlock;
+class GCParser;
+class GCSequentialData;
+
+class GCParseSample
+{
+    friend class GCParser;
+  private:
+    GCParseSample();    // undefined
+
+  protected:
+    GCParseBlock * const            m_block;
+    size_t                          m_lineInFile;
+    wxString                        m_label;
+    std::vector<GCSequentialData*>  m_data;
+
+    bool allLengthsEqual()  const;
+
+  public:
+    GCParseSample(GCParseBlock &, size_t lineInFile, wxString label);
+    virtual ~GCParseSample();
+
+    GCParseBlock &              GetBlock();
+    const GCParseBlock &        GetBlock()                      const;
+    const GCSequentialData &    GetData(size_t index)           const;
+    wxString                    GetLabel()                      const;
+    size_t                      GetLine()                       const;
+    size_t                      GetLength()                     const;
+    size_t                      GetSequencesPerLabel()          const;
+
+    void                        DebugDump(wxString prefix=wxEmptyString)   const;
+};
+
+#endif  // GC_PARSE_SAMPLE_H
+
+//____________________________________________________________________________________
diff --git a/src/convParse/gc_parser.cpp b/src/convParse/gc_parser.cpp
new file mode 100644
index 0000000..26c2c3d
--- /dev/null
+++ b/src/convParse/gc_parser.cpp
@@ -0,0 +1,368 @@
+// $Id: gc_parser.cpp,v 1.28 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+
+#include "cnv_strings.h"
+#include "gc_default.h"
+#include "gc_file.h"
+#include "gc_file_util.h"
+#include "gc_infile_err.h"
+#include "gc_parse.h"
+#include "gc_parse_block.h"
+#include "gc_parse_locus.h"
+#include "gc_parse_pop.h"
+#include "gc_parse_sample.h"
+#include "gc_parser.h"
+#include "gc_sequential_data.h"
+#include "gc_strings.h"
+
+#include "wx/log.h"
+#include "wx/wfstream.h"
+#include "wx/tokenzr.h"
+#include "wx/txtstrm.h"
+
+//------------------------------------------------------------------------------------
+
+GCParser::GCParser(const GCDataStore& dataStore)
+    :
+    m_dataStore(dataStore),
+    m_linesRead(0),
+    m_fileStreamPointer(NULL),
+    m_textStreamPointer(NULL)
+{
+}
+
+GCParser::~GCParser()
+{
+    delete m_textStreamPointer;
+    delete m_fileStreamPointer;
+}
+
+void
+GCParser::SetUpStreams(wxString fileName)
+{
+    if(! ::wxFileExists(fileName))
+    {
+        throw gc_file_missing_error(fileName);
+    }
+
+    m_fileStreamPointer = new wxFileInputStream(fileName);
+
+    if(!m_fileStreamPointer->Ok())
+    {
+        throw gc_file_read_error(fileName);
+    }
+
+    m_textStreamPointer = new wxTextInputStream(*m_fileStreamPointer);
+}
+
+bool
+GCParser::HasContent(const wxString& line) const
+{
+    wxString mungeMe = line;
+    mungeMe.Trim(true);
+    return(!(mungeMe.IsEmpty()));
+}
+
+wxString
+GCParser::ReadLine(bool skipBlankLines)
+{
+    m_linesRead++;
+    wxString line =  ReadLineSafely(m_fileStreamPointer,m_textStreamPointer);
+    while(skipBlankLines && (!HasContent(line)) )
+    {
+        m_linesRead++;
+        line =  ReadLineSafely(m_fileStreamPointer,m_textStreamPointer);
+    }
+    return line;
+}
+
+void
+GCParser::CheckNoExtraData()
+{
+    try
+    {
+        wxString line = ReadLine();
+    }
+    catch(const gc_eof& e)
+        // we want this to happen, so return
+    {
+        return;
+    }
+
+    // Oops! didn't see an eof, there must be extra data
+    throw gc_extra_file_data();
+}
+
+// pass in populationRef, expected number of sequences, locusRef, interleaved
+
+void
+GCParser::FillDataInterleaved(  GCParseBlock &         block,
+                                GCParseLocus &         locus,
+                                size_t                 expectedNumSequences)
+{
+
+    for(size_t sequenceIndex = 0; sequenceIndex < expectedNumSequences; sequenceIndex++)
+        // read in first line for each sequence
+    {
+        wxString line = ReadLine();
+        wxString label = line.Left(gcdefault::migrateSequenceNameLength);
+        label = label.Strip();
+        wxString data  = line.Remove(0,gcdefault::migrateSequenceNameLength);
+
+        GCParseSample & sample = MakeSample(block,sequenceIndex,m_linesRead,label);
+        AddDataToSample(sample,locus,data);
+
+    }
+
+    while(block.HasIncompleteSequences())
+    {
+        block.GetParse().m_multiLineSeenInFile = true;
+        for(size_t sequenceIndex = 0; sequenceIndex < expectedNumSequences; sequenceIndex++)
+        {
+            wxString data  = ReadLine();
+            GCParseSample & sample = block.FindSample(sequenceIndex);
+            AddDataToSample(sample,locus,data);
+        }
+    }
+}
+
+void
+GCParser::FillDataNonInterleaved(   GCParseBlock &          block,
+                                    GCParseLocus &         locus,
+                                    size_t                  expectedNumSequences)
+{
+    for(size_t sequenceIndex = 0; sequenceIndex < expectedNumSequences; sequenceIndex++)
+        // read in first line for each sequence
+    {
+        wxString line  = ReadLine();
+        wxString label = line.Left(gcdefault::migrateSequenceNameLength);
+        label = label.Strip();
+        wxString data  = line.Remove(0,gcdefault::migrateSequenceNameLength);
+
+        GCParseSample & sample = MakeSample(block,sequenceIndex,m_linesRead,label);
+        AddDataToSample(sample,locus,data);
+        while(sample.GetLength() < block.GetLocusRef().GetNumMarkers())
+        {
+            block.GetParse().m_multiLineSeenInFile = true;
+            data  = ReadLine();
+            try
+            {
+                AddDataToSample(sample,locus,data);
+            }
+            catch(const gc_illegal_dna_character& e)
+            {
+                throw gc_too_few_markers(e.what());
+            }
+        }
+    }
+}
+
+void
+GCParser::FillData( GCParse &               parseData,
+                    size_t                  popIndex,
+                    size_t                  locIndex,
+                    GCInterleaving          interleaving,
+                    size_t                  expectedNumSequences)
+{
+    const GCParsePop   & popRef   = parseData.GetParsePop(popIndex);
+    GCParseLocus & locusRef = parseData.GetParseLocus(locIndex);
+    GCParseBlock & blockRef = AddBlock(parseData,popRef,locusRef,expectedNumSequences);
+
+    if(interleaving == interleaving_SEQUENTIAL)
+    {
+        FillDataNonInterleaved(blockRef,locusRef,expectedNumSequences);
+        return;
+    }
+
+    if(interleaving == interleaving_INTERLEAVED)
+    {
+        FillDataInterleaved(blockRef,locusRef,expectedNumSequences);
+        return;
+    }
+
+    assert(false);
+}
+
+GCParse &
+GCParser::MakeParse(    GCFile &            fileRef,
+                        GCFileFormat        format,
+                        gcGeneralDataType   dataType,
+                        GCInterleaving      interleaving,
+                        wxString            delimiter)
+{
+    if(format == format_MIGRATE && delimiter.IsEmpty())
+        // EWFIX.P3 -- refactor, this is the wrong place for this
+        //
+        // this means it cannot be microsat
+    {
+        gcGeneralDataType::iterator i = dataType.find(sdatatype_MICROSAT);
+        if(i != dataType.end())
+        {
+            dataType.erase(i);
+            if(dataType.empty())
+            {
+                throw gc_migrate_missing_msat_delimiter(fileRef.GetName());
+            }
+        }
+    }
+
+    return *(new GCParse(fileRef,format,dataType,interleaving,delimiter));
+}
+
+GCParseBlock &
+GCParser::AddBlock(GCParse & parse, const GCParsePop & pop, const GCParseLocus & loc,
+                   size_t numSequences)
+{
+    size_t nextIndex = parse.m_blocks.size();
+    parse.m_blocks.push_back(new GCParseBlock(&parse,nextIndex,numSequences,pop,loc));
+    return *(parse.m_blocks[nextIndex]);
+}
+
+GCParseLocus &
+GCParser::AddLocus(GCParse & parse, size_t expectedLocusIndex, size_t numMarkers)
+{
+    size_t nextIndex = parse.m_loci.size();
+    assert(nextIndex == expectedLocusIndex);
+    parse.m_loci.push_back(
+        new GCParseLocus(&parse,nextIndex,m_linesRead,numMarkers));
+    return *(parse.m_loci[nextIndex]);
+}
+
+GCParsePop &
+GCParser::AddPop(GCParse & parse, size_t expectedPopIndex, wxString name)
+{
+    size_t nextIndex = parse.m_pops.size();
+    assert(nextIndex == expectedPopIndex);
+    parse.m_pops.push_back( new GCParsePop(&parse,nextIndex,name));
+    return *(parse.m_pops[nextIndex]);
+}
+
+GCParseSample &
+GCParser::MakeSample(   GCParseBlock &  block,
+                        size_t          indexInBlock,
+                        size_t          lineInFile,
+                        wxString        label)
+{
+    assert(indexInBlock == block.m_samples.size());
+    GCParseSample * parseSample = new GCParseSample(block,lineInFile,label);
+    block.m_samples.push_back(parseSample);
+
+    if(label.Find(' ') != wxNOT_FOUND)
+    {
+        block.GetParse().SetHasSpacesInNames();
+    }
+
+    return *parseSample;
+}
+
+void
+GCParser::AddDataToSample(GCParseSample & sample, GCParseLocus & locus, wxString data)
+{
+    gcGeneralDataType dataType = sample.GetBlock().GetParse().GetDataType();
+    assert(dataType.HasAllelic() ^ dataType.HasNucleic());
+    if(dataType.HasAllelic())
+    {
+        AddAllelicDataToSample(sample,locus,data);
+    }
+
+    if(dataType.HasNucleic())
+    {
+        AddNucDataToSample(sample,locus,data);
+    }
+}
+
+void
+GCParser::AddNucDataToSample(GCParseSample & sample, GCParseLocus & locus, wxString data)
+{
+
+    if(sample.m_data.size() == 0)
+    {
+        sample.m_data.push_back(new GCNucData(sample.GetBlock()));
+    }
+
+    assert(sample.m_data.size() == 1);  // no haps for Nuc data input
+    GCSequentialData * seqData = sample.m_data[0];
+    GCNucData * nucData = dynamic_cast<GCNucData*>(seqData);
+    assert(nucData != NULL);
+    (*nucData).AddMarker(data);
+
+}
+
+void
+GCParser::AddAllelicDataToSample(GCParseSample & sample, GCParseLocus & locus, wxString data)
+{
+    wxString delim = sample.GetBlock().GetParse().GetDelimiter();
+
+    wxStringTokenizer outerTokenizer(data);
+    while(outerTokenizer.HasMoreTokens())
+    {
+        wxString thisToken = outerTokenizer.GetNextToken();
+        wxArrayString tokens = makeKalleleTokens(thisToken,delim);
+
+        if(sample.m_data.size() == 0)
+        {
+            for(size_t i=0; i < tokens.Count(); i++)
+            {
+                sample.m_data.push_back(new GCAllelicData(sample.GetBlock()));
+            }
+        }
+        if(sample.m_data.size() != tokens.Count())
+        {
+            throw gc_token_count_mismatch(delim,thisToken,sample.GetLabel(),tokens.Count(),sample.m_data.size());
+        }
+
+        for(size_t i=0; i < tokens.Count(); i++)
+        {
+            GCSequentialData * sd = sample.m_data[i];
+            GCAllelicData * adata = dynamic_cast<GCAllelicData*>(sd);
+            if(adata == NULL)
+            {
+                gc_implementation_error e(gcerr::badSequentialDataCast.c_str());
+                throw e;
+            }
+            adata->AddMarker(tokens[i]);
+        }
+    }
+}
+
+wxArrayString
+GCParser::makeKalleleTokens(wxString tokensTogether, wxString delim)
+{
+    wxArrayString retArr;
+    if(delim.IsEmpty())
+        // break up into characters
+    {
+        for(size_t i = 0; i < tokensTogether.Len(); i++)
+        {
+            retArr.Add(tokensTogether[i]);
+        }
+    }
+    else
+    {
+        tokensTogether.Replace(delim," ",true);
+        wxStringTokenizer tokenizer(tokensTogether);
+        while(tokenizer.HasMoreTokens())
+        {
+            wxString nextToken = tokenizer.GetNextToken();
+            retArr.Add(nextToken);
+        }
+    }
+    return retArr;
+}
+
+void
+GCParser::SetDataTypeFromFile(GCParse & parse, gcSpecificDataType type)
+{
+    parse.SetDataTypeFromFile(type);
+}
+
+//____________________________________________________________________________________
diff --git a/src/convParse/gc_parser.h b/src/convParse/gc_parser.h
new file mode 100644
index 0000000..902610d
--- /dev/null
+++ b/src/convParse/gc_parser.h
@@ -0,0 +1,70 @@
+// $Id: gc_parser.h,v 1.14 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_PARSER_H
+#define GC_PARSER_H
+
+#include "gc_types.h"
+#include "wx/string.h"
+
+class wxFileInputStream;
+class wxTextInputStream;
+class GCParseBlock;
+class GCParseSample;
+
+class GCParser
+{
+  private:
+    GCParser();         // undefined
+
+  protected:
+    const GCDataStore &                 m_dataStore;
+    size_t                              m_linesRead;
+    wxFileInputStream *                 m_fileStreamPointer;
+    wxTextInputStream *                 m_textStreamPointer;
+
+    void SetUpStreams(wxString fileName);
+    bool HasContent(const wxString&) const;
+    wxString ReadLine(bool skipBlankLines=true);
+
+    void CheckNoExtraData();
+    virtual bool CompleteParse(GCParse&) = 0;
+
+    void FillData              (GCParse&, size_t popIndex, size_t locIndex, GCInterleaving, size_t expectedSequences);
+    void FillDataInterleaved   (GCParseBlock &,  GCParseLocus &, size_t expectedSequences);
+    void FillDataNonInterleaved(GCParseBlock &,  GCParseLocus &, size_t expectedSequences);
+
+    GCParseBlock &  AddBlock(   GCParse & , const GCParsePop &, const GCParseLocus &, size_t expectedSequences);
+    GCParseLocus &  AddLocus(   GCParse & , size_t expectedIndex, size_t locusLength);
+    GCParsePop &    AddPop(     GCParse & , size_t expectedIndex, wxString comment);
+    GCParse &       MakeParse(  GCFile &            fileRef,
+                                GCFileFormat        format,
+                                gcGeneralDataType   dataType,
+                                GCInterleaving      interleaving,
+                                wxString            delimiter=wxEmptyString);
+    GCParseSample & MakeSample( GCParseBlock &      block,
+                                size_t              indexInBlock,
+                                size_t              lineInFile,
+                                wxString            label);
+    void AddDataToSample(GCParseSample &, GCParseLocus &, wxString data);
+    void AddNucDataToSample(GCParseSample &, GCParseLocus &, wxString data);
+    void AddAllelicDataToSample(GCParseSample &, GCParseLocus &, wxString data);
+    wxArrayString makeKalleleTokens(wxString tokensTogether, wxString delim);
+
+    void SetDataTypeFromFile(GCParse & parse, gcSpecificDataType type);
+
+  public:
+    GCParser(const GCDataStore&);
+    virtual ~GCParser();
+};
+
+#endif  // GC_PARSER_H
+
+//____________________________________________________________________________________
diff --git a/src/convParse/gc_phylip.cpp b/src/convParse/gc_phylip.cpp
new file mode 100644
index 0000000..e6cb27e
--- /dev/null
+++ b/src/convParse/gc_phylip.cpp
@@ -0,0 +1,190 @@
+// $Id: gc_phylip.cpp,v 1.29 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+
+#include "gc_errhandling.h"
+#include "gc_data.h"                // for ToWxString
+#include "gc_datastore.h"
+#include "gc_default.h"             // for gcdefault::migrateSequenceNameLength
+#include "gc_file.h"
+#include "gc_file_util.h"
+#include "gc_infile_err.h"
+#include "gc_parse_block.h"
+#include "gc_phylip.h"
+#include "gc_strings.h"
+#include "gc_strings_parse.h"
+
+#include "wx/log.h"
+#include "wx/tokenzr.h"
+#include "wx/txtstrm.h"
+#include "wx/wfstream.h"
+
+//------------------------------------------------------------------------------------
+
+GCPhylipParser::GCPhylipParser(const GCDataStore& ds)
+    :   GCParser(ds)
+{
+}
+
+GCPhylipParser::~GCPhylipParser()
+{
+}
+
+void
+GCPhylipParser::ParseTopPhylipLine(  size_t* numSequences,
+                                     size_t* numSites,
+                                     bool* hasWeights)
+{
+    assert(m_textStreamPointer != NULL);
+    wxString line = ReadLine();
+    wxStringTokenizer tokenizer(line);
+
+    wxString sequenceString = tokenizer.GetNextToken();
+    wxString siteString     = tokenizer.GetNextToken();
+
+    long longVal;
+    if(!sequenceString.ToLong(&longVal))
+    {
+        throw gc_phylip_first_token(sequenceString);
+    }
+    if(longVal <= 0)
+    {
+        throw gc_phylip_first_token(sequenceString);
+    }
+    *numSequences = (size_t)longVal;
+
+    if(!siteString.ToLong(&longVal))
+    {
+        throw gc_phylip_second_token(siteString);
+    }
+    if(longVal <= 0)
+    {
+        throw gc_phylip_second_token(siteString);
+    }
+    *numSites = (size_t)longVal;
+
+    *hasWeights = false;
+    if(tokenizer.HasMoreTokens())
+    {
+        wxString weightToken = tokenizer.GetNextToken();
+        if(weightToken.CmpNoCase("w") == 0) *hasWeights = true;
+    }
+}
+
+bool
+GCPhylipParser::ParsePhylipWeightsLine(size_t numSites, wxString fileName)
+// we don't do anything with this information, but
+// we need to make sure we skip over all of it.
+{
+    wxLogMessage(gcerr_parse::ignoringPhylipWeights,fileName.c_str());
+    wxString line = ReadLine();
+    wxString label = line.Left(gcdefault::migrateSequenceNameLength);
+    wxString data  = line.Remove(0,gcdefault::migrateSequenceNameLength);
+
+    // remove any whitespace characters from data
+    data.Replace(" ","",true);
+    data.Replace("\t","",true);
+
+    while (data.Length() < numSites)
+    {
+        line = ReadLine();
+        line.Replace(" ","",true);
+        line.Replace("\t","",true);
+        data += line;
+    }
+
+    return true;
+}
+
+GCParse *
+GCPhylipParser::Parse(GCFile & fileRef, gcGeneralDataType dataTypes, GCInterleaving interleaving)
+{
+    SetUpStreams(fileRef.GetName());
+    GCParse & parseData = MakeParse(fileRef,format_PHYLIP,dataTypes,interleaving);
+
+    try
+    {
+        size_t numSequences;
+        size_t numSites;
+        bool hasWeights;
+
+        ParseTopPhylipLine(&numSequences,&numSites,&hasWeights);
+        if(hasWeights)
+            // skipping this line
+        {
+            ParsePhylipWeightsLine(numSites,fileRef.GetName());
+        }
+
+        AddPop(parseData,0,wxEmptyString); // no pop names in phylip
+        AddLocus(parseData,0,numSites);
+        FillData(parseData,0,0,interleaving,numSequences);
+        CheckNoExtraData();
+        return &parseData;
+    }
+    catch(gc_infile_err& e)
+    {
+        delete &parseData;
+        e.setFile(fileRef.GetName());
+        e.setRow(m_linesRead);
+        throw;
+    }
+    catch(gc_eof& f)
+    {
+        if(CompleteParse(parseData))
+        {
+            return &parseData;
+        }
+        else
+        {
+            delete &parseData;
+            f.setFile(fileRef.GetName());
+            throw;
+        }
+    }
+
+    assert(false);
+    return NULL;
+}
+
+bool
+GCPhylipParser::CompleteParse(GCParse& parseData)
+{
+    // check we have a pop
+    size_t pcount = parseData.GetPopCount();
+    if(pcount != 1) return false;
+    const GCParsePop & pop = parseData.GetParsePop(0);
+
+    // Silence compiler warning about unrefereced variable.
+    (void)pop;
+
+    // check we have a locus
+    size_t lcount = parseData.GetLociCount();
+    if(lcount != 1) return false;
+    //const GCParseLocus & loc = parseData.GetParseLocus(0);
+
+    // check we have a single block
+    constBlockVector blocks = parseData.GetBlocks();
+    if(blocks.size() != 1) return false;
+
+    // check block has correct number of sequences
+    const GCParseBlock * blockP = blocks[0];
+    if(blockP == NULL) return false;
+    size_t expectedNumSequences = blockP->GetExpectedNumSequences();
+    const GCParseSamples & samples = blockP->GetSamples();
+    if(samples.size() != expectedNumSequences) return false;
+
+    // check block has correct number of sites
+    if(blockP->HasIncompleteSequences()) return false;
+
+    return true;
+}
+
+//____________________________________________________________________________________
diff --git a/src/convParse/gc_phylip.h b/src/convParse/gc_phylip.h
new file mode 100644
index 0000000..30bfe9c
--- /dev/null
+++ b/src/convParse/gc_phylip.h
@@ -0,0 +1,46 @@
+// $Id: gc_phylip.h,v 1.16 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_PHYLIP_H
+#define GC_PHYLIP_H
+
+#include "gc_parser.h"
+#include "gc_types.h"
+
+class GCFile;
+class GCParse;
+
+class GCPhylipParser : public GCParser
+{
+  private:
+    GCPhylipParser();           // undefined
+
+  protected:
+    void ParseTopPhylipLine(size_t              *   numSequences,
+                            size_t              *   numSites,
+                            bool                *   hasWeights);
+    bool ParsePhylipWeightsLine(size_t numSites, wxString fileName);
+    bool CompleteParse  (GCParse&);
+
+  public:
+    GCPhylipParser(const GCDataStore&);
+    virtual ~GCPhylipParser();
+
+    GCParse * Parse(GCFile &            fileRef,
+                    gcGeneralDataType   dataType,
+                    GCInterleaving      interleaving);
+
+    void BadFirstToken  (wxString token) const;
+    void BadSecondToken (wxString token) const;
+};
+
+#endif  // GC_PHYLIP_H
+
+//____________________________________________________________________________________
diff --git a/src/convParse/gc_pop_match.cpp b/src/convParse/gc_pop_match.cpp
new file mode 100644
index 0000000..94c45c3
--- /dev/null
+++ b/src/convParse/gc_pop_match.cpp
@@ -0,0 +1,159 @@
+// $Id: gc_pop_match.cpp,v 1.19 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+
+#include "gc_errhandling.h"
+#include "gc_file.h"
+#include "gc_parse.h"
+#include "gc_parse_pop.h"
+#include "gc_pop_match.h"
+#include "gc_strings.h"
+#include "wx/filename.h"
+#include "wx/string.h"
+
+//------------------------------------------------------------------------------------
+
+GCPopSpec::GCPopSpec(bool blessed, wxString name)
+    :
+    m_blessed(blessed),
+    m_name(name)
+{
+}
+
+GCPopSpec::~GCPopSpec()
+{
+}
+
+bool
+GCPopSpec::GetBlessed() const
+{
+    return m_blessed;
+}
+
+wxString
+GCPopSpec::GetName() const
+{
+    return m_name;
+}
+
+//------------------------------------------------------------------------------------
+
+GCPopMatcher::GCPopMatcher()
+    :
+    m_popMatchType(popmatch_DEFAULT)
+{
+    m_popNames.Empty();
+}
+
+GCPopMatcher::GCPopMatcher(pop_match pMatchType)
+    :
+    m_popMatchType(pMatchType)
+{
+    assert(m_popMatchType == popmatch_DEFAULT || m_popMatchType == popmatch_NAME);
+    m_popNames.Empty();
+}
+
+GCPopMatcher::GCPopMatcher(pop_match pMatchType, wxString name)
+    :
+    m_popMatchType(pMatchType)
+{
+    assert(m_popMatchType == popmatch_SINGLE);
+    m_popNames.Empty();
+    m_popNames.Add(name);
+}
+
+GCPopMatcher::GCPopMatcher(pop_match pMatchType, wxArrayString names)
+    :
+    m_popMatchType(pMatchType),
+    m_popNames(names)
+{
+    assert(m_popMatchType == popmatch_VECTOR);
+    assert(!(m_popNames.IsEmpty()));
+}
+
+GCPopMatcher::~GCPopMatcher()
+{
+}
+
+GCPopSpec
+GCPopMatcher::GetPopSpec(size_t index, const GCParse& parse) const
+{
+    if(!HandlesThisManyPops(index))
+    {
+        wxString msg = wxString::Format(gcerr::tooFewPopsInSpec,(int)index);
+        gc_implementation_error e(msg.c_str());
+        throw e;
+    }
+
+    wxString shortName = parse.GetFileRef().GetShortName();
+    wxString popName = parse.GetParsePop(index).GetName();
+    if(popName.IsEmpty())
+    {
+        popName = wxString::Format(gcstr::populationNameFromFile,
+                                   (int)index+1,
+                                   shortName.c_str());
+    }
+
+    switch(m_popMatchType)
+    {
+        case popmatch_DEFAULT:
+            return GCPopSpec(false,popName);
+            break;
+        case popmatch_NAME:
+            return GCPopSpec(false,parse.GetParsePop(index).GetName());
+            break;
+        case popmatch_SINGLE:
+            assert(m_popNames.size() == 1);
+            return GCPopSpec(true,m_popNames[0]);
+            break;
+        case popmatch_VECTOR:
+            return GCPopSpec(true,m_popNames[index]);
+            break;
+    };
+    assert(false);
+    return GCPopSpec(false,gcerr::emptyName);
+}
+
+bool
+GCPopMatcher::HandlesThisManyPops(size_t count) const
+{
+    switch(m_popMatchType)
+    {
+        case popmatch_DEFAULT:
+            return true;
+            break;
+        case popmatch_NAME:
+            return true;
+            break;
+        case popmatch_SINGLE:
+            return true;
+            break;
+        case popmatch_VECTOR:
+            return(count <= m_popNames.size());
+            break;
+    };
+    assert(false);
+    return false;
+}
+
+pop_match
+GCPopMatcher::GetPopMatchType() const
+{
+    return m_popMatchType;
+}
+
+const wxArrayString &
+GCPopMatcher::GetPopNames() const
+{
+    return m_popNames;
+}
+
+//____________________________________________________________________________________
diff --git a/src/convParse/gc_pop_match.h b/src/convParse/gc_pop_match.h
new file mode 100644
index 0000000..5b54030
--- /dev/null
+++ b/src/convParse/gc_pop_match.h
@@ -0,0 +1,56 @@
+// $Id: gc_pop_match.h,v 1.10 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_POP_MATCH_H
+#define GC_POP_MATCH_H
+
+#include "gc_types.h"
+#include "wx/arrstr.h"
+
+class GCFile;
+class GCParse;
+
+class GCPopSpec
+{
+  private:
+    bool            m_blessed;
+    wxString        m_name;
+    GCPopSpec();    // undefined
+
+  public:
+    GCPopSpec(bool blessed, wxString name);
+    ~GCPopSpec();
+
+    bool        GetBlessed()    const ;
+    wxString    GetName()       const ;
+};
+
+class GCPopMatcher
+{
+  protected:
+    pop_match       m_popMatchType;
+    wxArrayString   m_popNames;
+
+  public:
+    GCPopMatcher();
+    GCPopMatcher(pop_match popMatchType);
+    GCPopMatcher(pop_match popMatchType, wxString name);
+    GCPopMatcher(pop_match popMatchType, wxArrayString names);
+    ~GCPopMatcher();
+    GCPopSpec   GetPopSpec(size_t index, const GCParse &) const;
+    bool        HandlesThisManyPops(size_t count) const;
+    pop_match   GetPopMatchType() const ;
+
+    const wxArrayString &   GetPopNames() const;
+};
+
+#endif  // GC_POP_MATCH_H
+
+//____________________________________________________________________________________
diff --git a/src/convParse/gc_population.cpp b/src/convParse/gc_population.cpp
new file mode 100644
index 0000000..51ec7f9
--- /dev/null
+++ b/src/convParse/gc_population.cpp
@@ -0,0 +1,120 @@
+// $Id: gc_population.cpp,v 1.13 2012/06/30 01:32:40 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include "gc_population.h"
+#include "gc_default.h"
+#include "gc_strings.h"
+#include "gc_strings_pop.h"
+#include "gc_errhandling.h"
+#include "wx/log.h"
+
+gcPopulation::gcPopulation()
+    :
+    m_blessed(false),
+    m_parentID(gcdefault::badIndex),
+    m_displayOrder(0),
+    m_index(0)
+{
+    SetName(wxString::Format(gcstr_pop::internalName,(long)GetId()));
+}
+
+gcPopulation::~gcPopulation()
+{
+}
+
+bool
+gcPopulation::GetBlessed() const
+{
+    return m_blessed;
+}
+
+void
+gcPopulation::SetBlessed(bool blessed)
+{
+    m_blessed = blessed;
+}
+
+int
+gcPopulation::GetDispIndex() const
+{
+    return m_index;
+}
+
+void
+gcPopulation::SetDispIndex(int index)
+{
+    m_index = index;
+}
+
+void
+gcPopulation::SetParentId(size_t id)
+{
+    m_parentID = id;
+}
+
+size_t
+gcPopulation::GetParentId() const
+{
+    if(!HasParent())
+    {
+        wxString msg = wxString::Format(gcerr::unsetParentId,GetName().c_str());
+        throw gc_implementation_error(msg.c_str());
+    }
+    return(m_parentID);
+}
+
+bool
+gcPopulation::HasParent() const
+{
+    if (m_parentID == gcdefault::badIndex)
+    {
+        return false;
+    }
+    return true;
+}
+
+void
+gcPopulation::ClearParent()
+{
+    m_parentID = gcdefault::badIndex;
+}
+
+void
+gcPopulation::SetDispOrder(int order)
+{
+    m_displayOrder = order;
+}
+
+int
+gcPopulation::GetDispOrder() const
+{
+    return m_displayOrder;
+}
+
+bool
+gcPopulation::HasDispOrder() const
+{
+    if (m_displayOrder > 0)
+    {
+        return true;
+    }
+    return false;
+}
+
+void
+gcPopulation::DebugDump(wxString prefix) const
+{
+    wxLogDebug("%spopulation %s (pop id %ld)",  // EWDUMPOK
+               prefix.c_str(),
+               GetName().c_str(),
+               (long)GetId());
+}
+
+//____________________________________________________________________________________
diff --git a/src/convParse/gc_population.h b/src/convParse/gc_population.h
new file mode 100644
index 0000000..08f7a19
--- /dev/null
+++ b/src/convParse/gc_population.h
@@ -0,0 +1,53 @@
+// $Id: gc_population.h,v 1.15 2012/06/30 01:32:40 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_POPULATION_H
+#define GC_POPULATION_H
+
+#include "gc_quantum.h"
+#include "wx/string.h"
+
+class GCStructures;
+
+class gcPopulation : public GCQuantum
+{
+    friend class GCStructures;
+
+  private:
+    bool    m_blessed;
+    size_t  m_parentID;      // used by divergence
+    int     m_displayOrder;  // used by divergence
+    int     m_index;         // used by lam_conv
+
+    void    SetBlessed(bool blessed);
+
+  public:
+    gcPopulation();
+    virtual ~gcPopulation();
+    bool    GetBlessed() const ;
+
+    int     GetDispIndex()          const;
+    void    SetDispIndex(int index);
+
+    void    SetParentId(size_t id);
+    void    ClearParent();
+    size_t  GetParentId() const;
+    bool    HasParent() const;
+
+    void    SetDispOrder(int order);
+    int     GetDispOrder() const;
+    bool    HasDispOrder() const;
+
+    void    DebugDump(wxString prefix=wxEmptyString) const;
+};
+
+#endif  // GC_POPULATION_H
+
+//____________________________________________________________________________________
diff --git a/src/convParse/gc_region.cpp b/src/convParse/gc_region.cpp
new file mode 100644
index 0000000..f8bed61
--- /dev/null
+++ b/src/convParse/gc_region.cpp
@@ -0,0 +1,308 @@
+// $Id: gc_region.cpp,v 1.18 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include "gc_errhandling.h"
+#include "gc_locus.h"
+#include "gc_region.h"
+#include "gc_strings.h"
+#include "gc_strings_region.h"
+#include "wx/log.h"
+
+GCMapPosition::GCMapPosition()
+    :
+    m_hasPosition(false),
+    m_position(0)
+{
+}
+
+GCMapPosition::~GCMapPosition()
+{
+}
+
+long
+GCMapPosition::GetPosition() const
+{
+    if(!m_hasPosition)
+    {
+        gc_implementation_error e(gcerr::regionNoPositionToGet.c_str());
+        throw e;
+    }
+    return m_position;
+}
+
+bool
+GCMapPosition::HasPosition() const
+{
+    return m_hasPosition;
+}
+
+void
+GCMapPosition::SetPosition(long position)
+{
+    m_hasPosition = true;
+    m_position = position;
+}
+
+void
+GCMapPosition::UnsetPosition()
+{
+    m_hasPosition = false;
+}
+
+wxString
+GCMapPosition::AsString() const
+{
+    if(HasPosition())
+    {
+        return wxString::Format(gcstr_region::mapPosition,GetPosition());
+    }
+    return gcstr::mapPositionUnset;
+}
+
+void
+GCMapPosition::DebugDump(wxString prefix) const
+{
+    wxLogDebug("%s%s",prefix.c_str(),AsString().c_str());   // EWDUMPOK
+}
+
+GCLocusInfoMap::GCLocusInfoMap()
+{
+}
+
+GCLocusInfoMap::~GCLocusInfoMap()
+{
+}
+
+wxString
+GCLocusInfoMap::AsString() const
+{
+    wxString retString = "";
+    for(const_iterator i = begin(); i != end(); i++)
+    {
+        size_t locusId = (*i).first;
+        GCMapPosition mapPosition = (*i).second;
+        retString += wxString::Format(gcstr_region::locusMapPosition,(int)locusId,mapPosition.AsString().c_str());
+    }
+    return retString;
+}
+
+GCTraitInfoSet::GCTraitInfoSet()
+{
+}
+
+GCTraitInfoSet::~GCTraitInfoSet()
+{
+}
+
+wxString
+GCTraitInfoSet::AsString() const
+{
+    wxString retString = "";
+    for(const_iterator i = begin(); i != end(); i++)
+    {
+        retString += wxString::Format(gcstr_region::traitIndexListMember,(int)(*i));
+    }
+    return retString;
+}
+
+gcRegion::gcRegion()
+    :
+    m_name(wxString::Format(gcstr_region::internalName,(long)m_objId)),
+    m_blessed(false),
+    m_hasEffectivePopulationSize(false),
+    m_effectivePopulationSize(0.0)
+{
+}
+
+gcRegion::~gcRegion()
+{
+}
+
+void
+gcRegion::AddLocus(gcLocus & locus)
+{
+    if(m_loci.find(locus.GetId()) != m_loci.end())
+    {
+        wxString msg = wxString::Format(gcerr::duplicateLocusInRegion,
+                                        locus.GetName().c_str(),
+                                        GetName().c_str());
+        gc_implementation_error e(msg.c_str());
+        throw e;
+    }
+
+    m_loci[locus.GetId()] = GCMapPosition();
+}
+
+void
+gcRegion::AddLocus(gcLocus & locus, long mapPosition)
+{
+    AddLocus(locus);
+    m_loci[locus.GetId()].SetPosition(mapPosition);
+}
+
+void
+gcRegion::AddTraitId(size_t traitId)
+{
+    if(m_traits.find(traitId) != m_traits.end())
+    {
+        wxString msg = wxString::Format(gcerr::regionTraitAlreadyAdded,
+                                        (int)traitId);
+        gc_implementation_error e(msg.c_str());
+        throw e;
+    }
+    m_traits.insert(traitId);
+}
+
+void
+gcRegion::RemoveLocusId(size_t locusId)
+{
+    GCLocusInfoMap::iterator iter = m_loci.find(locusId);
+    if(iter == m_loci.end())
+    {
+        wxString msg = wxString::Format(gcerr::regionNoSuchLocus,
+                                        (int)locusId,
+                                        (int)GetId());
+        gc_implementation_error e(msg.c_str());
+        throw e;
+    }
+    m_loci.erase(iter);
+}
+
+void
+gcRegion::RemoveTraitId(size_t traitId)
+{
+    GCTraitInfoSet::iterator iter = m_traits.find(traitId);
+    if(iter == m_traits.end())
+    {
+        wxString msg = wxString::Format(gcerr::regionNoSuchTrait,
+                                        (int)traitId,
+                                        (int)GetId());
+        gc_implementation_error e(msg.c_str());
+        throw e;
+    }
+    m_traits.erase(iter);
+}
+
+bool
+gcRegion::GetBlessed() const
+{
+    return m_blessed;
+}
+
+void
+gcRegion::SetBlessed(bool blessed)
+{
+    m_blessed = blessed;
+}
+
+wxString
+gcRegion::GetName() const
+{
+    return m_name;
+}
+
+void
+gcRegion::SetName(wxString newName)
+{
+    m_name = newName;
+}
+
+bool
+gcRegion::HasEffectivePopulationSize() const
+{
+    return m_hasEffectivePopulationSize;
+}
+
+double
+gcRegion::GetEffectivePopulationSize() const
+{
+    if(!(HasEffectivePopulationSize()))
+    {
+        wxString msg = wxString::Format(gcerr::regionNoEffectivePopSize,
+                                        (int)GetId());
+        gc_implementation_error e(msg.c_str());
+        throw e;
+    }
+    return m_effectivePopulationSize;
+}
+
+void
+gcRegion::SetEffectivePopulationSize(double effectivePopulationSize)
+{
+    if( ! (effectivePopulationSize > 0))
+    {
+        wxString msg = wxString::Format(gcerr::badEffectivePopSize,
+                                        GetName().c_str(),
+                                        effectivePopulationSize);
+        gc_data_error e(msg.c_str());
+        throw e;
+    }
+
+    m_hasEffectivePopulationSize = true;
+    m_effectivePopulationSize = effectivePopulationSize;
+}
+
+const GCLocusInfoMap &
+gcRegion::GetLocusInfoMap() const
+{
+    return m_loci;
+}
+
+size_t
+gcRegion::GetLocusCount() const
+{
+    return m_loci.size();
+}
+
+const GCTraitInfoSet &
+gcRegion::GetTraitInfoSet() const
+{
+    return m_traits;
+}
+
+void
+gcRegion::DebugDump(wxString prefix) const
+{
+    wxLogDebug("%sregion \"%s\", (id %ld)",  // EWDUMPOK
+               prefix.c_str(),
+               GetName().c_str(),
+               (long)GetId());
+
+    if(HasEffectivePopulationSize())
+    {
+        wxLogDebug("%seffecive population size %f", // EWDUMPOK
+                   (prefix+gcstr::indent).c_str(),
+                   GetEffectivePopulationSize());
+    }
+
+    wxLogDebug("%sloci:%s", // EWDUMPOK
+               (prefix+gcstr::indent).c_str(),
+               m_loci.AsString().c_str());
+
+    wxLogDebug("%straits:%s",   // EWDUMPOK
+               (prefix+gcstr::indent).c_str(),
+               m_traits.AsString().c_str());
+}
+
+bool
+gcRegion::CanMergeWith(const gcRegion & regionRef) const
+{
+    if(HasEffectivePopulationSize() && regionRef.HasEffectivePopulationSize())
+    {
+        if(GetEffectivePopulationSize() != regionRef.GetEffectivePopulationSize())
+        {
+            return false;
+        }
+    }
+
+    return true;
+}
+
+//____________________________________________________________________________________
diff --git a/src/convParse/gc_region.h b/src/convParse/gc_region.h
new file mode 100644
index 0000000..7038ec5
--- /dev/null
+++ b/src/convParse/gc_region.h
@@ -0,0 +1,102 @@
+// $Id: gc_region.h,v 1.15 2012/06/30 01:32:40 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_REGION_H
+#define GC_REGION_H
+
+#include <map>
+#include <set>
+
+#include "gc_quantum.h"
+#include "gc_types.h"
+#include "wx/string.h"
+
+class gcLocus;
+class GCStructures;
+
+class GCMapPosition
+{
+  private:
+    bool        m_hasPosition;
+    long        m_position;
+  public:
+    GCMapPosition();
+    ~GCMapPosition();
+
+    long        GetPosition() const;
+    bool        HasPosition() const;
+    void        SetPosition(long position);
+    void        UnsetPosition();
+    void        DebugDump(wxString prefix=wxEmptyString) const;
+    wxString    AsString() const;
+};
+
+class GCLocusInfoMap : public std::map<size_t, GCMapPosition>
+{
+  public:
+    GCLocusInfoMap() ;
+    ~GCLocusInfoMap() ;
+    wxString AsString() const;
+};
+
+class GCTraitInfoSet : public std::set<size_t>
+{
+  public:
+    GCTraitInfoSet() ;
+    ~GCTraitInfoSet() ;
+    wxString AsString() const;
+};
+
+class gcRegion : public GCQuantum
+{
+    friend class GCStructures;
+
+  private:
+    wxString                            m_name;
+    bool                                m_blessed;
+    bool                                m_hasEffectivePopulationSize;
+    double                              m_effectivePopulationSize;
+
+    GCLocusInfoMap                      m_loci;
+    GCTraitInfoSet                      m_traits;
+
+    void        AddLocus(gcLocus &);
+    void        AddLocus(gcLocus &, long mapPosition);
+    void        AddTraitId(size_t traitClassId);
+    void        RemoveLocusId(size_t locusId);
+    void        RemoveTraitId(size_t traitClassId);
+    void        SetName(wxString newName);
+    void        SetBlessed(bool blessed);
+
+  protected:
+
+    const GCLocusInfoMap &  GetLocusInfoMap() const ;
+
+  public:
+    gcRegion();
+    ~gcRegion();
+
+    bool        GetBlessed()                const ;
+    wxString    GetName()                   const ;
+    double      GetEffectivePopulationSize()const ;
+    bool        HasEffectivePopulationSize()const ;
+    size_t      GetLocusCount()             const ;
+
+    const GCTraitInfoSet &  GetTraitInfoSet() const ;
+
+    void    DebugDump(wxString=wxEmptyString) const;
+
+    void    SetEffectivePopulationSize(double effectivePopulationSize);
+    bool    CanMergeWith(const gcRegion&) const;
+};
+
+#endif  // GC_REGION_H
+
+//____________________________________________________________________________________
diff --git a/src/convParse/gc_sequential_data.cpp b/src/convParse/gc_sequential_data.cpp
new file mode 100644
index 0000000..53840de
--- /dev/null
+++ b/src/convParse/gc_sequential_data.cpp
@@ -0,0 +1,187 @@
+// $Id: gc_sequential_data.cpp,v 1.25 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+
+#include "gc_errhandling.h" // EWFIX.P4 -- can this be taken out
+#include "gc_data.h"
+#include "gc_infile_err.h"
+#include "gc_parse.h"
+#include "gc_parse_block.h"
+#include "gc_parse_locus.h"
+#include "gc_sequential_data.h"
+#include "gc_strings.h"
+
+#include "wx/log.h"
+#include "wx/regex.h"
+#include "wx/tokenzr.h"
+
+//------------------------------------------------------------------------------------
+
+GCSequentialData::GCSequentialData(GCParseBlock & block)
+    :
+    m_block(block)
+{
+}
+
+GCSequentialData::~GCSequentialData()
+{
+}
+
+void
+GCSequentialData::CheckMarkerCount()
+{
+    size_t currentMarkerCount = GetNumMarkers();
+    size_t wantedMarkerCount = GetParseBlock().GetLocusRef().GetNumMarkers();
+    if(currentMarkerCount > wantedMarkerCount)
+    {
+        throw gc_too_many_markers(currentMarkerCount,wantedMarkerCount);
+    }
+}
+
+void
+GCSequentialData::DebugDump(wxString prefix) const
+{
+    wxLogDebug("%sdata: %s",prefix.c_str(),GetData().c_str());  // EWDUMPOK
+}
+
+GCParseBlock &
+GCSequentialData::GetParseBlock()
+{
+    return m_block;
+}
+
+const GCParseBlock &
+GCSequentialData::GetParseBlock() const
+{
+    return m_block;
+}
+
+//------------------------------------------------------------------------------------
+
+GCAllelicData::GCAllelicData(GCParseBlock& block)
+    :
+    GCSequentialData(block)
+{
+}
+
+GCAllelicData::~GCAllelicData()
+{
+}
+
+void
+GCAllelicData::AddMarker(wxString data)
+{
+    const GCParseBlock & b = GetParseBlock();
+    const GCParse & p = b.GetParse();
+    assert(!(p.GetDataType().empty()));
+    long longVal;
+    wxString questionMark("?");
+    if (data != questionMark)
+    {
+        if(!data.ToLong(&longVal))
+        {
+            GetParseBlock().SetCannotBeMsat();
+        }
+        else
+        {
+            if(longVal <= 0)
+            {
+                GetParseBlock().SetCannotBeMsat();
+            }
+        }
+        if (p.GetDataType().empty())
+        {
+            throw gc_illegal_msat(data);
+        }
+    }
+
+    m_data.Add(data);
+    CheckMarkerCount();
+}
+
+size_t
+GCAllelicData::GetNumMarkers() const
+{
+    return m_data.Count();
+}
+
+wxString
+GCAllelicData::GetData() const
+{
+    wxString dataOut;
+    for(size_t index=0; index < m_data.Count(); index++)
+    {
+        dataOut += wxString::Format(" %s",m_data[index].c_str());
+    }
+    return dataOut;
+}
+
+wxString
+GCAllelicData::GetData(size_t markerIndex) const
+{
+    assert(markerIndex < m_data.Count());
+    return wxString::Format(" %s ",m_data[markerIndex].c_str());
+}
+
+//------------------------------------------------------------------------------------
+
+GCNucData::GCNucData(GCParseBlock& block)
+    :
+    GCSequentialData(block)
+{
+}
+
+GCNucData::~GCNucData()
+{
+}
+
+void
+GCNucData::AddMarker(wxString data)
+{
+    // don't move the dash from its position just after the
+    // caret -- otherwise the regex compiler will think it's
+    // indicating a character range
+    wxRegEx illegalData("[^-ACGTUMRWSYKVHDBNOX? \t]",wxRE_ICASE);
+    if(illegalData.Matches(data))
+    {
+        size_t start;
+        size_t length;
+        illegalData.GetMatch(&start,&length);
+        throw gc_illegal_dna_character(data[start],start+1,data);
+    }
+
+    wxString dataNoWhiteSpace = data;
+    dataNoWhiteSpace.Replace(" ","",true);
+    dataNoWhiteSpace.Replace("\t","",true);
+    m_data+=dataNoWhiteSpace;
+    CheckMarkerCount();
+}
+
+size_t
+GCNucData::GetNumMarkers() const
+{
+    return m_data.Len();
+}
+
+wxString
+GCNucData::GetData() const
+{
+    return m_data;
+}
+
+wxString
+GCNucData::GetData(size_t markerIndex) const
+{
+    assert(markerIndex < m_data.Len());
+    return wxString::Format(" %c ",m_data[markerIndex]);
+}
+
+//____________________________________________________________________________________
diff --git a/src/convParse/gc_sequential_data.h b/src/convParse/gc_sequential_data.h
new file mode 100644
index 0000000..ca67c7e
--- /dev/null
+++ b/src/convParse/gc_sequential_data.h
@@ -0,0 +1,78 @@
+// $Id: gc_sequential_data.h,v 1.16 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_SEQUENTIAL_DATA_H
+#define GC_SEQUENTIAL_DATA_H
+
+#include "gc_types.h"
+#include "wx/arrstr.h"
+#include "wx/string.h"
+
+class GCParseBlock;
+class GCParser;
+
+class GCSequentialData
+{
+  private:
+    GCParseBlock &          m_block;
+
+  protected:
+    GCSequentialData();     // undefined
+    GCSequentialData(GCParseBlock &);
+    GCParseBlock &    GetParseBlock();
+
+  public:
+    virtual ~GCSequentialData();
+
+    virtual void        AddMarker(wxString data) = 0;
+    void        CheckMarkerCount();
+    virtual size_t      GetNumMarkers()  const = 0;
+    virtual wxString    GetData() const = 0;
+    virtual wxString    GetData(size_t siteIndex) const = 0;
+
+    void    DebugDump(wxString prefix=wxEmptyString) const;
+    const GCParseBlock &    GetParseBlock() const;
+};
+
+class GCAllelicData : public GCSequentialData
+{
+  private:
+    wxArrayString           m_data;
+
+  public:
+    GCAllelicData();        // undefined
+    GCAllelicData(GCParseBlock&);
+    virtual ~GCAllelicData();
+
+    void        AddMarker(wxString data);
+    size_t      GetNumMarkers() const;
+    wxString    GetData() const;
+    wxString    GetData(size_t siteIndex) const;
+};
+
+class GCNucData : public GCSequentialData
+{
+  private:
+    wxString                m_data;
+
+  public:
+    GCNucData();            // undefined
+    GCNucData(GCParseBlock&);
+    virtual ~GCNucData();
+
+    void        AddMarker(wxString data);
+    size_t      GetNumMarkers() const;
+    wxString    GetData() const;
+    wxString    GetData(size_t siteIndex) const;
+};
+
+#endif  // GC_SEQUENTIAL_DATA_H
+
+//____________________________________________________________________________________
diff --git a/src/convParse/tixml_util.cpp b/src/convParse/tixml_util.cpp
new file mode 100644
index 0000000..3837f43
--- /dev/null
+++ b/src/convParse/tixml_util.cpp
@@ -0,0 +1,178 @@
+// $Id: tixml_util.cpp,v 1.10 2012/06/30 01:32:40 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+
+#include "errhandling.h"
+#include "tinyxml.h"
+#include "tixml_base.h"
+#include "tixml_util.h"
+#include "cnv_strings.h"
+
+//------------------------------------------------------------------------------------
+
+#if 0
+TiXmlElement *
+tiwx_singleElement(TiXmlElement* ancestor, wxString nodeName,bool required)
+{
+    TiXmlElement * elem = NULL;
+    try
+    {
+        elem = ti_singleElement(ancestor,nodeName,required);
+    }
+    catch(incorrect_xml_missing_tag f)
+    {
+        assert(required);
+        wxString msg = wxString::Format(cnvstr::ERR_MISSING_TAG,nodeName.c_str());
+        incorrect_xml e(msg.c_str());
+        throw e;
+        return elem;
+    }
+    catch(incorrect_xml_extra_tag g)
+    {
+        wxString msg = wxString::Format(cnvstr::ERR_EXTRA_TAG,nodeName.c_str());
+        incorrect_xml e(msg.c_str());
+        throw e;
+        return NULL;
+    }
+}
+#endif
+
+TiXmlElement *
+tiwx_optionalChild(TiXmlElement* ancestor, wxString nodeName)
+{
+    try
+    {
+        //TiXmlElement * elem = ti_optionalChild(ancestor,nodeName.c_str());
+        TiXmlElement * elem = ti_optionalChild(ancestor,(const char *)nodeName.mb_str());// JRM hack
+        return elem;
+    }
+    catch(incorrect_xml_extra_tag g)
+    {
+        //wxString msg = wxString::Format(cnvstr::ERR_EXTRA_TAG,nodeName.c_str());
+        //incorrect_xml e(msg.c_str());
+        wxString msg = wxString::Format(cnvstr::ERR_EXTRA_TAG,(const char *)nodeName.mb_str());// JRM hack
+        incorrect_xml e((const char *)msg.mb_str());// JRM hack
+        throw e;
+        return NULL;
+    }
+}
+
+TiXmlElement *
+tiwx_requiredChild(TiXmlElement* ancestor, wxString nodeName)
+{
+    try
+    {
+        //TiXmlElement * elem = ti_requiredChild(ancestor,nodeName.c_str());
+        TiXmlElement * elem = ti_requiredChild(ancestor,(const char *)nodeName.mb_str());// JRM hack
+        return elem;
+    }
+    catch(incorrect_xml_missing_tag f)
+    {
+        //wxString msg = wxString::Format(cnvstr::ERR_MISSING_TAG,nodeName.c_str());
+        //incorrect_xml e(msg.c_str());
+        wxString msg = wxString::Format(cnvstr::ERR_MISSING_TAG,(const char *)nodeName.mb_str());// JRM hack
+        incorrect_xml e((const char *)msg.mb_str());// JRM hack
+        throw e;
+        return NULL;
+    }
+    catch(incorrect_xml_extra_tag g)
+    {
+        //wxString msg = wxString::Format(cnvstr::ERR_EXTRA_TAG,nodeName.c_str());
+        //incorrect_xml e(msg.c_str());
+        wxString msg = wxString::Format(cnvstr::ERR_EXTRA_TAG,(const char *)nodeName.mb_str());// JRM hack
+        incorrect_xml e((const char *)msg.mb_str());// JRM hack
+        throw e;
+        return NULL;
+    }
+}
+
+wxString
+tiwx_nodeText(TiXmlElement * node)
+{
+    return wxString(ti_nodeText(node).c_str());
+}
+
+wxString
+tiwx_attributeValue(TiXmlElement * node, wxString attrName)
+{
+    //return wxString(ti_attributeValue(node,attrName.c_str()).c_str());
+    return wxString(ti_attributeValue(node,(const char *)attrName.mb_str()).c_str());// JRM hack
+}
+
+double
+tiwx_double_from_text(TiXmlElement * node)
+{
+    double value;
+    try
+    {
+        value = ti_double_from_text(node);
+    }
+    catch (incorrect_xml_not_double f)
+    {
+        wxString msg = wxString::Format(cnvstr::ERR_NOT_DOUBLE,f.text().c_str());
+        //incorrect_xml e(msg.c_str());
+        incorrect_xml e((const char *)msg.mb_str());// JRM hack
+        throw e;
+    }
+    return value;
+}
+
+long
+tiwx_long_from_text(TiXmlElement * node) throw (incorrect_xml)
+{
+    long value;
+    try
+    {
+        value = ti_long_from_text(node);
+    }
+    catch (incorrect_xml_not_long f)
+    {
+        wxString msg = wxString::Format(cnvstr::ERR_NOT_LONG,f.text().c_str());
+        //incorrect_xml e(msg.c_str());
+        incorrect_xml e((const char *)msg.mb_str());// JRM hack
+        throw e;
+    }
+    return value;
+}
+
+size_t
+tiwx_size_t_from_text(TiXmlElement * node) throw (incorrect_xml)
+{
+    size_t value;
+    try
+    {
+        value = ti_size_t_from_text(node);
+    }
+    catch (incorrect_xml_not_size_t f)
+    {
+        wxString msg = wxString::Format(cnvstr::ERR_NOT_SIZE_T,f.text().c_str());
+        //incorrect_xml e(msg.c_str());
+        incorrect_xml e((const char *)msg.mb_str());// JRM hack
+        throw e;
+    }
+    return value;
+}
+
+std::vector<TiXmlElement *>
+tiwx_optionalChildren(TiXmlElement* ancestor, wxString nodeName)
+{
+    //return ti_optionalChildren(ancestor,nodeName.c_str());
+    return ti_optionalChildren(ancestor,(const char *)nodeName.mb_str());// JRM hack
+}
+
+std::vector<TiXmlElement *>
+tiwx_requiredChildren(TiXmlElement* ancestor, wxString nodeName)
+{
+    //return ti_requiredChildren(ancestor,nodeName.c_str());
+    return ti_requiredChildren(ancestor,(const char *)nodeName.mb_str());// JRM hack
+}
+
+//____________________________________________________________________________________
diff --git a/src/convParse/tixml_util.h b/src/convParse/tixml_util.h
new file mode 100644
index 0000000..458ba77
--- /dev/null
+++ b/src/convParse/tixml_util.h
@@ -0,0 +1,36 @@
+// $Id: tixml_util.h,v 1.9 2012/06/30 01:32:40 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef TIXML_UTIL_H
+#define TIXML_UTIL_H
+
+#include <string>
+#include <vector>
+
+#include "errhandling.h"
+#include "wx/string.h"
+
+class TiXmlElement;
+
+TiXmlElement *          tiwx_optionalChild(TiXmlElement* ancestor, wxString nodeName);
+TiXmlElement *          tiwx_requiredChild(TiXmlElement* ancestor, wxString nodeName);
+wxString                tiwx_nodeText(TiXmlElement *);
+wxString                tiwx_attributeValue(TiXmlElement*,wxString attributeName);
+
+double                  tiwx_double_from_text(TiXmlElement *);
+long                    tiwx_long_from_text(TiXmlElement *) throw (incorrect_xml);
+size_t                  tiwx_size_t_from_text(TiXmlElement *) throw (incorrect_xml);
+
+std::vector<TiXmlElement *>  tiwx_optionalChildren(TiXmlElement* ancestor, wxString nodeName);
+std::vector<TiXmlElement *>  tiwx_requiredChildren(TiXmlElement* ancestor, wxString nodeName);
+
+#endif  // TIXML_UTIL_H
+
+//____________________________________________________________________________________
diff --git a/src/convStrings/cnv_strings.cpp b/src/convStrings/cnv_strings.cpp
new file mode 100644
index 0000000..cf29109
--- /dev/null
+++ b/src/convStrings/cnv_strings.cpp
@@ -0,0 +1,124 @@
+// $Id: cnv_strings.cpp,v 1.19 2012/02/17 22:02:34 jmcgill Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include "cnv_strings.h"
+#include "wx/intl.h"
+
+// attribute names -- we don't use wxTRANSLATE because these are input files
+const wxString cnvstr::ATTR_DATATYPE                = "datatype";
+const wxString cnvstr::ATTR_FORMAT                  = "format";
+const wxString cnvstr::ATTR_PROXIMITY               = "marker-proximity";
+const wxString cnvstr::ATTR_SEQUENCEALIGNMENT       = "sequence-alignment";
+const wxString cnvstr::ATTR_TYPE                    = "type";
+const wxString cnvstr::ATTR_VERSION                 = "version";
+
+// attribute values -- we don't use wxTRANSLATE because these are input files
+const wxString cnvstr::ATTR_VAL_BYADJACENCY         = "byAdjacency";
+const wxString cnvstr::ATTR_VAL_BYLIST              = "byList";
+const wxString cnvstr::ATTR_VAL_BYNAME              = "byName";
+const wxString cnvstr::ATTR_VAL_DEFAULT             = "default";
+const wxString cnvstr::ATTR_VAL_KNOWN               = "known";
+const wxString cnvstr::ATTR_VAL_LINKED              = "linked";
+const wxString cnvstr::ATTR_VAL_SINGLE              = "single";
+const wxString cnvstr::ATTR_VAL_UNKNOWN             = "unknown";
+const wxString cnvstr::ATTR_VAL_UNLINKED            = "unlinked";
+
+// tag names -- we don't use wxTRANSLATE because these are input files
+const wxString cnvstr::TAG_ADDCOMMENT               = "lamarc-header-comment";
+const wxString cnvstr::TAG_ALLELE                   = "allele";
+const wxString cnvstr::TAG_ALLELES                  = "alleles";
+const wxString cnvstr::TAG_CONVERTER_CMD            = "lamarc-converter-cmd";
+const wxString cnvstr::TAG_DIVERGENCE               = "divergence";
+const wxString cnvstr::TAG_DIVERGENCES              = "divergences";
+const wxString cnvstr::TAG_DIV_ANCESTOR             = "ancestor";
+const wxString cnvstr::TAG_DIV_CHILD1               = "child1";
+const wxString cnvstr::TAG_DIV_CHILD2               = "child2";
+const wxString cnvstr::TAG_EFFECTIVE_POPSIZE        = "effective-popsize";
+const wxString cnvstr::TAG_FIRST_POSITION_SCANNED   = "first-position-scanned";
+const wxString cnvstr::TAG_GENO_RESOLUTIONS         = "genotype-resolutions";
+const wxString cnvstr::TAG_HAPLOTYPES               = "haplotypes";
+const wxString cnvstr::TAG_HAS_PHENOTYPE            = "has-phenotype";
+const wxString cnvstr::TAG_INDIVIDUAL               = "individual";
+const wxString cnvstr::TAG_INDIVIDUALS              = "individuals";
+const wxString cnvstr::TAG_INDIVIDUALS_FROM_SAMPLES = "individuals-from-samples";
+const wxString cnvstr::TAG_INFILE                   = "infile";
+const wxString cnvstr::TAG_INFILES                  = "infiles";
+const wxString cnvstr::TAG_MAP_POSITION             = "map-position";
+const wxString cnvstr::TAG_MARKERS                  = "markers";
+const wxString cnvstr::TAG_NAME                     = "name";
+const wxString cnvstr::TAG_OUTFILE                  = "outfile";
+const wxString cnvstr::TAG_PANEL                    = "panel";
+const wxString cnvstr::TAG_PANELS                   = "panels";
+const wxString cnvstr::TAG_PANEL_NAME               = "panel-name";
+const wxString cnvstr::TAG_PANEL_POP                = "panel-pop";
+const wxString cnvstr::TAG_PANEL_REGION             = "panel-region";
+const wxString cnvstr::TAG_PANEL_SIZE               = "panel-size";
+const wxString cnvstr::TAG_PENETRANCE               = "penetrance";
+const wxString cnvstr::TAG_PHASE                    = "phase";
+const wxString cnvstr::TAG_PHENOTYPE                = "phenotype";
+const wxString cnvstr::TAG_POPULATION               = "population";
+const wxString cnvstr::TAG_POPULATIONS              = "populations";
+const wxString cnvstr::TAG_POP_MATCHING             = "population-matching";
+const wxString cnvstr::TAG_POP_NAME                 = "population-name";
+const wxString cnvstr::TAG_REGION                   = "region";
+const wxString cnvstr::TAG_REGIONS                  = "regions";
+const wxString cnvstr::TAG_SAMPLE                   = "sample";
+const wxString cnvstr::TAG_SAMPLES_PER_INDIVIDUAL   = "samples-per-individual";
+const wxString cnvstr::TAG_SCANNED_DATA_POSITIONS   = "locations";
+const wxString cnvstr::TAG_SCANNED_LENGTH           = "length";
+const wxString cnvstr::TAG_SEGMENT                  = "segment";
+const wxString cnvstr::TAG_SEGMENTS                 = "segments";
+const wxString cnvstr::TAG_SEGMENTS_MATCHING        = "segments-matching";
+const wxString cnvstr::TAG_SEGMENT_NAME             = "segment-name";
+const wxString cnvstr::TAG_TRAIT                    = "trait";
+const wxString cnvstr::TAG_TRAIT_LOCATION           = "trait-location";
+const wxString cnvstr::TAG_TRAIT_NAME               = "trait-name";
+const wxString cnvstr::TAG_TRAITS                   = "traits";
+const wxString cnvstr::TAG_TRAIT_INFO               = "trait-info";
+const wxString cnvstr::TAG_UNRESOLVED_MARKERS       = "unresolved-markers";
+
+// errors -- use wxTRANSLATE to enable internationalization
+const wxString cnvstr::ERR_BAD_TOP_TAG      = wxTRANSLATE("Expected top level tag <lamarc-converter-cmd> but got \"%s\"");
+const wxString cnvstr::ERR_BYNAME_POP_MATCHER_NO_VALUE = wxTRANSLATE("\"byName\" population matcher should have no text within tag");
+const wxString cnvstr::ERR_DATA_LENGTH_REQUIRED = wxTRANSLATE("SNP data requires <length> tag");
+const wxString cnvstr::ERR_DNA_LOCATIONS    = wxTRANSLATE("Can't set locations for segment %s (near line %d) because it is DNA data.");
+const wxString cnvstr::ERR_EMPTY_POP_NAME   = wxTRANSLATE("Empty population name");
+const wxString cnvstr::ERR_EXTRA_TAG        = wxTRANSLATE("extra xml tag \"%s\"");
+const wxString cnvstr::ERR_HAP_DATA_SIZE_MISMATCH = wxTRANSLATE("Expected %d haplotypes but data has %d");
+const wxString cnvstr::ERR_LENGTH_REQUIRED_WITH_LOCATIONS = wxTRANSLATE("If you provide a <locations> tag for this data type, you must provide a <length> tag as well.");
+const wxString cnvstr::ERR_LOCATIONS_REQUIRED_WITH_LENGTH = wxTRANSLATE("If you provide a <length> tag to a <segment> for this data type, you must provide a <locations> tag as well.");
+const wxString cnvstr::ERR_LOCATIONS_REQUIRE_OFFSET = wxTRANSLATE("if you specify locations for a segment, you must also specify the first position scanned.");
+const wxString cnvstr::ERR_LOCATION_SITE_MISMATCH= wxTRANSLATE("Number of items in <location> tag at line %d should be %d, the number of sites in the segment");
+const wxString cnvstr::ERR_MAP_POSITION_REQUIRED = wxTRANSLATE("<map-position> tag required for any segment in a multi-segment region");
+const wxString cnvstr::ERR_MISSING_FILE     = wxTRANSLATE("File \"%s\" does not exist or is not readable.");
+const wxString cnvstr::ERR_MISSING_TAG      = wxTRANSLATE("missing xml tag \"%s\"");
+const wxString cnvstr::ERR_NAME_REPEAT      = wxTRANSLATE("Identifier \"%s\" in line %d conflicts with previous definition in line %d. All population, region, and segment names must be distinct.");
+const wxString cnvstr::ERR_NOT_DOUBLE       = wxTRANSLATE("\"%s\" is not a real number");
+const wxString cnvstr::ERR_NOT_LONG         = wxTRANSLATE("\"%s\" is not an integer");
+const wxString cnvstr::ERR_NOT_SIZE_T       = wxTRANSLATE("\"%ld\" is not non-negative");
+const wxString cnvstr::ERR_NO_DATATYPE      = wxTRANSLATE("<infile> tag at line %d requrires the datatype attribute.");
+const wxString cnvstr::ERR_NO_FORMAT        = wxTRANSLATE("<infile> tag at line %d requrires the format attribute.");
+const wxString cnvstr::ERR_NO_INTERLEAVING  = wxTRANSLATE("<infile> tag at line %d requrires sequence-alignment attribute");
+const wxString cnvstr::ERR_NO_NUCLEOTIDES_UNLINKED = wxTRANSLATE("data type \"%s\" may not be set \"%s\"");
+const wxString cnvstr::ERR_NO_SUCH_POP_NAME = wxTRANSLATE("Population \"%s\" not defined");
+const wxString cnvstr::ERR_NO_UNKNOWN_DATATYPES = wxTRANSLATE("<%s> tag at line %d may not use the 'unknown' datatype \"%s\".");
+const wxString cnvstr::ERR_OFFSET_REQUIRED = wxTRANSLATE("Segment tag at line %d requires the <first-position-scanned> tag when you are using <locations>.");
+const wxString cnvstr::ERR_ROW_WRAP         = wxTRANSLATE("near line %d:%s");
+const wxString cnvstr::ERR_SHORT_DATA_LENGTH= wxTRANSLATE("Data length for segment at line %d is shorter than number of sites");
+const wxString cnvstr::ERR_TRAIT_REPEAT     = wxTRANSLATE("Region \"%s\" redefines trait \"%s\" in line %d");
+const wxString cnvstr::ERR_UNKNOWN_SEGMENT_MATCHER  = wxTRANSLATE("Unknown segment matching type \"%s\"");
+const wxString cnvstr::ERR_UNKNOWN_POP_MATCHER    = wxTRANSLATE("Unknown population matching type \"%s\" in line %d");
+const wxString cnvstr::ERR_UNRECOGNIZED_TAG = wxTRANSLATE("Unrecognized tag: \"%s\" in line %d");
+
+// warnings -- use wxTRANSLATE to enable internationalization
+const wxString cnvstr::WARN_NO_LOCATIONS  = wxTRANSLATE("No location data set for segment %s. You will not be allowed to estimate recombination using this data.");
+const wxString cnvstr::WARN_NO_LENGTH     = wxTRANSLATE("No length data set for segment %s. You will not be allowed to estimate recombination using this data.");
+
+//____________________________________________________________________________________
diff --git a/src/convStrings/cnv_strings.h b/src/convStrings/cnv_strings.h
new file mode 100644
index 0000000..b2049e1
--- /dev/null
+++ b/src/convStrings/cnv_strings.h
@@ -0,0 +1,127 @@
+// $Id: cnv_strings.h,v 1.18 2012/02/17 22:02:34 jmcgill Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef CNV_STR_H
+#define CNV_STR_H
+
+#include "wx/string.h"
+
+class cnvstr
+{
+  public:
+    static const wxString ATTR_DATATYPE;
+    static const wxString ATTR_FORMAT;
+    static const wxString ATTR_PROXIMITY;
+    static const wxString ATTR_SEQUENCEALIGNMENT;
+    static const wxString ATTR_TYPE;
+    static const wxString ATTR_VERSION;
+
+    static const wxString ATTR_VAL_BYADJACENCY;
+    static const wxString ATTR_VAL_BYLIST;
+    static const wxString ATTR_VAL_BYNAME;
+    static const wxString ATTR_VAL_DEFAULT;
+    static const wxString ATTR_VAL_KNOWN;
+    static const wxString ATTR_VAL_LINKED;
+    static const wxString ATTR_VAL_SINGLE;
+    static const wxString ATTR_VAL_UNKNOWN;
+    static const wxString ATTR_VAL_UNLINKED;
+
+    static const wxString TAG_ADDCOMMENT;
+    static const wxString TAG_ALLELE;
+    static const wxString TAG_ALLELES;
+    static const wxString TAG_CONVERTER_CMD;
+    static const wxString TAG_DIVERGENCE;
+    static const wxString TAG_DIVERGENCES;
+    static const wxString TAG_DIV_ANCESTOR;
+    static const wxString TAG_DIV_CHILD1;
+    static const wxString TAG_DIV_CHILD2;
+    static const wxString TAG_EFFECTIVE_POPSIZE;
+    static const wxString TAG_FIRST_POSITION_SCANNED;
+    static const wxString TAG_GENO_RESOLUTIONS;
+    static const wxString TAG_HAPLOTYPES;
+    static const wxString TAG_HAS_PHENOTYPE;
+    static const wxString TAG_INDIVIDUAL;
+    static const wxString TAG_INDIVIDUALS;
+    static const wxString TAG_INDIVIDUALS_FROM_SAMPLES;
+    static const wxString TAG_INFILE;
+    static const wxString TAG_INFILES;
+    static const wxString TAG_MAP_POSITION;
+    static const wxString TAG_MARKERS;
+    static const wxString TAG_NAME;
+    static const wxString TAG_OUTFILE;
+    static const wxString TAG_PANEL;
+    static const wxString TAG_PANELS;
+    static const wxString TAG_PANEL_NAME;
+    static const wxString TAG_PANEL_POP;
+    static const wxString TAG_PANEL_REGION;
+    static const wxString TAG_PANEL_SIZE;
+    static const wxString TAG_PENETRANCE;
+    static const wxString TAG_PHASE;
+    static const wxString TAG_PHENOTYPE;
+    static const wxString TAG_POPULATION;
+    static const wxString TAG_POPULATIONS;
+    static const wxString TAG_POP_MATCHING;
+    static const wxString TAG_POP_NAME;
+    static const wxString TAG_REGION;
+    static const wxString TAG_REGIONS;
+    static const wxString TAG_SAMPLE;
+    static const wxString TAG_SAMPLES_PER_INDIVIDUAL;
+    static const wxString TAG_SCANNED_DATA_POSITIONS;
+    static const wxString TAG_SCANNED_LENGTH;
+    static const wxString TAG_SEGMENT;
+    static const wxString TAG_SEGMENTS;
+    static const wxString TAG_SEGMENTS_MATCHING;
+    static const wxString TAG_SEGMENT_NAME;
+    static const wxString TAG_TRAIT;
+    static const wxString TAG_TRAIT_LOCATION;
+    static const wxString TAG_TRAIT_NAME;
+    static const wxString TAG_TRAITS;
+    static const wxString TAG_TRAIT_INFO;
+    static const wxString TAG_UNRESOLVED_MARKERS;
+
+    static const wxString ERR_BAD_TOP_TAG;
+    static const wxString ERR_BYNAME_POP_MATCHER_NO_VALUE;
+    static const wxString ERR_DATA_LENGTH_REQUIRED;
+    static const wxString ERR_DNA_LOCATIONS;
+    static const wxString ERR_EMPTY_POP_NAME;
+    static const wxString ERR_EXTRA_TAG;
+    static const wxString ERR_HAP_DATA_SIZE_MISMATCH;
+    static const wxString ERR_LENGTH_REQUIRED_WITH_LOCATIONS;
+    static const wxString ERR_LOCATIONS_REQUIRED_WITH_LENGTH;
+    static const wxString ERR_LOCATIONS_REQUIRE_OFFSET;
+    static const wxString ERR_LOCATION_SITE_MISMATCH;
+    static const wxString ERR_MAP_POSITION_REQUIRED;
+    static const wxString ERR_MISSING_FILE;
+    static const wxString ERR_MISSING_TAG;
+    static const wxString ERR_NAME_REPEAT;
+    static const wxString ERR_NOT_DOUBLE;
+    static const wxString ERR_NOT_LONG;
+    static const wxString ERR_NOT_SIZE_T;
+    static const wxString ERR_NO_DATATYPE;
+    static const wxString ERR_NO_FORMAT;
+    static const wxString ERR_NO_INTERLEAVING;
+    static const wxString ERR_NO_NUCLEOTIDES_UNLINKED;
+    static const wxString ERR_NO_SUCH_POP_NAME;
+    static const wxString ERR_NO_UNKNOWN_DATATYPES;
+    static const wxString ERR_OFFSET_REQUIRED;
+    static const wxString ERR_ROW_WRAP;
+    static const wxString ERR_SHORT_DATA_LENGTH;
+    static const wxString ERR_TRAIT_REPEAT;
+    static const wxString ERR_UNKNOWN_SEGMENT_MATCHER;
+    static const wxString ERR_UNKNOWN_POP_MATCHER;
+    static const wxString ERR_UNRECOGNIZED_TAG;
+
+    static const wxString WARN_NO_LENGTH;
+    static const wxString WARN_NO_LOCATIONS;
+};
+
+#endif  // CNV_STR_H
+
+//____________________________________________________________________________________
diff --git a/src/convStrings/gc_strings.h b/src/convStrings/gc_strings.h
new file mode 100644
index 0000000..cb51232
--- /dev/null
+++ b/src/convStrings/gc_strings.h
@@ -0,0 +1,550 @@
+// $Id: gc_strings.h,v 1.44 2012/06/30 01:32:40 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_STRINGS_H
+#define GC_STRINGS_H
+
+#include "wx/string.h"
+
+class gcerr
+{
+  public:
+    static const wxString abandonExport;
+
+    static const wxString badDivergenceId;
+    static const wxString badEffectivePopSize;
+    static const wxString badHapFile;
+    static const wxString badMapFile;
+    static const wxString badName;
+    // static const wxString badSamplesPer;
+    static const wxString badSequentialDataCast;
+
+    static const wxString corruptedDisplayableLociInMapOrder;
+
+    static const wxString duplicateLocusInRegion;
+    static const wxString duplicateMapPosition;
+
+    static const wxString emptyGroupName;
+    static const wxString emptyLocusName;
+    static const wxString emptyName;
+    static const wxString emptyPopulationName;
+    static const wxString emptyTraitName;
+
+    static const wxString fatalError;
+    static const wxString fileWithRow;
+    static const wxString fileWithoutRow;
+
+    static const wxString hapFileIllegalDelimiter;
+    static const wxString hapFilePositionNegative;
+    static const wxString hapFilePositionNotLong;
+    static const wxString hapFilePositionUnordered;
+    static const wxString hapFileReadErrUnknown;
+
+    static const wxString incompatibleLocusLengths;
+    static const wxString incompatibleLocusTypes;
+    static const wxString incompatibleNumHaps;
+    static const wxString inIndividual;
+
+    static const wxString lengthTooShort;
+
+    static const wxString locusOverlap;
+    static const wxString locusWithoutDataType;
+    static const wxString locusWithoutLength;
+    static const wxString locusWithoutMapPosition;
+
+    static const wxString mapFileError;
+
+    static const wxString migrationRateTooSmall;
+
+    static const wxString missingDataTypeForLocus;
+    static const wxString missingHapFileId;
+    static const wxString missingFileId;
+    static const wxString missingLengthForLocus;
+    static const wxString missingParse;
+    static const wxString missingParseId;
+    static const wxString missingRegion;
+    static const wxString missingTrait;
+
+    static const wxString mungeParseDataTypeMismatch;
+    static const wxString mungeParseFormatMismatch;
+    static const wxString mungeParseNeedsTwo;
+    static const wxString mungeParseNull;
+
+    static const wxString nameResolutionPairMissing;
+
+    static const wxString noBlockForPopLocus;
+    static const wxString noDataFound;
+    static const wxString noSuchParse;
+    static const wxString notALocation;
+
+    static const wxString panelBlessedError;
+    static const wxString panelSizeClash;
+
+    static const wxString provideDoDelete;
+
+    static const wxString regionNoData;
+    static const wxString regionNoEffectivePopSize;
+    static const wxString regionNoPositionToGet;
+    // static const wxString regionNoSamplesPerIndividual;
+    static const wxString regionNoSuchLocus;
+    static const wxString regionNoSuchTrait;
+    static const wxString regionTraitAlreadyAdded;
+
+    static const wxString requireCmdFile;
+    static const wxString shortDna;
+
+    static const wxString tooBigDataIndex;
+    static const wxString tooFewLociInSpec;
+    static const wxString tooFewPopsInSpec;
+    static const wxString tooManySites;
+
+    static const wxString unableToExport;
+    static const wxString uncaughtException;
+    static const wxString unsetChild1Id;
+    static const wxString unsetChild2Id;
+    static const wxString unsetFromId;
+    static const wxString unsetLength;
+    static const wxString unsetMapPosition;
+    static const wxString unsetNumSites;
+    static const wxString unsetParentId;
+    static const wxString unsetPopId;
+    static const wxString unsetToId;
+    static const wxString unsetRegionId;
+
+    static const wxString wrongDivergenceCount;
+};
+
+class gcstr
+{
+  public:
+    static const wxString abandonExport;
+    static const wxString addHapFile;
+    static const wxString addLocus;
+    static const wxString addPanel;
+    static const wxString addParent;
+    static const wxString addPop;
+    static const wxString addRegion;
+    static const wxString adjacent;
+    static const wxString all;
+    static const wxString allelic;
+    static const wxString allele;
+    static const wxString allFiles;
+    static const wxString assignTabTitle;
+    static const wxString badLocusLength;
+    static const wxString badLocusLength1;
+    static const wxString badLocusLength2;
+    static const wxString badLocusPosition;
+    static const wxString badName;
+    static const wxString badRegionLength1;
+    static const wxString badRegionLength2;
+    static const wxString batchFileDefault;
+    static const wxString batchOutComment;
+    static const wxString batchSafeFinish;
+    static const wxString blockFromFiles;
+    static const wxString blockInfo1;
+    static const wxString blockInfo2;
+    static const wxString blockFileInfo;
+    static const wxString blockLocusChoice;
+    static const wxString blockLocusIndexInFile;
+    static const wxString blockPloidyInfo;
+    static const wxString blockPloidyTitle;
+    static const wxString blockPopChoice;
+    static const wxString blockPopIndexInFile;
+    static const wxString blocksFromFiles;
+    static const wxString buttonHide;
+    static const wxString buttonSelectAll;
+    static const wxString buttonShow;
+    static const wxString buttonUnselectAll;
+    static const wxString byfile;
+    static const wxString byprog;
+    static const wxString byuser;
+    static const wxString cancelString;
+    static const wxString cannotWrite;
+    static const wxString chooseDataType;
+    static const wxString childPopsInstructions;
+    static const wxString chooseFileType;
+    static const wxString chooseHapResolution;
+    static const wxString chooseOne;
+    static const wxString chooseOneGroup;
+    static const wxString chooseOneLocus;
+    static const wxString chooseOneParse;
+    static const wxString chooseOnePop;
+    static const wxString continueExport;
+    static const wxString continueString;
+    static const wxString converterInfo;
+    static const wxString converterTitle;
+    static const wxString createNewRegion;
+    static const wxString createParent2Child;
+    static const wxString createParentFirst2Children;
+    static const wxString createParentNext2Children;
+    static const wxString dataBlocks;
+    static const wxString dataFileBatchExport;
+    static const wxString dataFileButtonAdd;
+    static const wxString dataFileButtonAllSelect;
+    static const wxString dataFileButtonAllUnselect;
+    static const wxString dataFileButtonRemoveSelected;
+    static const wxString dataFileExport;
+    static const wxString dataFiles;
+    static const wxString dataFilesInstructions;
+    static const wxString dataFilesSelect;
+    static const wxString dataFilesTitle;
+    static const wxString dataType;
+    static const wxString delMapFile;
+    static const wxString divergence;
+    static const wxString divergeInstructions;
+    static const wxString divergenceSelect;
+    static const wxString divMigMatrix;
+    static const wxString dna;
+    static const wxString doneThanks;
+    static const wxString editApply;
+    static const wxString editCancel;
+    static const wxString editDelete;
+    static const wxString editFileSettings;
+    static const wxString editLocus;
+    static const wxString editMigration;
+    static const wxString editOK;
+    static const wxString editParseBlock;
+    static const wxString editPanel;
+    static const wxString editParent;
+    static const wxString editPop;
+    static const wxString editRegion;
+    static const wxString enterNewName;
+    static const wxString error;
+    static const wxString errorWrap;
+    static const wxString errorWrapNoFile;
+    static const wxString exportFileDefault;
+    static const wxString exportFileGlob;
+    static const wxString exportWarning;
+    static const wxString falseVal;
+    static const wxString fileAlreadyAdded;
+    static const wxString fileDelete;
+    static const wxString fileEmpty;
+    static const wxString fileLabelDataType;
+    static const wxString fileLabelFormat;
+    static const wxString fileLabelInterleaving;
+    static const wxString fileLabelName;
+    static const wxString fileLabelRemove;
+    // static const wxString fileLabelUnlinked;
+    static const wxString fileSetting;
+    static const wxString firstPositionScanned;
+    static const wxString fragmentRegion;
+    static const wxString fullPath;
+    static const wxString genoFileDefault;
+    static const wxString globAll;
+    static const wxString hapFileBarf;
+    static const wxString hapFileDefault;
+    static const wxString hapFileEmptyFirstLine;
+    static const wxString hapFileMissing;
+    static const wxString hapFileParseFailed;
+    static const wxString hapFileReuse;
+    static const wxString hapFileSelect;
+    static const wxString hapFileSelectAnother;
+    static const wxString hapFileToken1;
+    static const wxString hapFileToken2;
+    static const wxString hapFileToken3;
+    static const wxString hapFileToken3Missing;
+    static const wxString hapFileUnSelect;
+    static const wxString hapFilesSelect;
+    static const wxString havePatience;
+    static const wxString hiddenContent;
+    static const wxString indent;
+    static const wxString information;
+    static const wxString instructionsMultipleDataTypes;
+    static const wxString interleaved;
+    static const wxString interleavedNoKalleleMsat;
+    static const wxString kallele;
+    static const wxString linkGCreateTitle;
+    static const wxString linkGEnterNewName;
+    static const wxString linkageCaption;
+    static const wxString linkageNo;
+    static const wxString linkageYes;
+    static const wxString locations;
+    static const wxString locationsForRecom;
+    static const wxString lociTabTitle;
+    static const wxString locus;
+    static const wxString locusButtonAdd;
+    static const wxString locusButtonMergeSelected;
+    static const wxString locusButtonRemoveSelected;
+    static const wxString locusCreate;
+    static const wxString locusCreateTitle;
+    static const wxString locusDialogMapPosition;
+    static const wxString locusEditString;
+    static const wxString locusEnterNewName;
+    static const wxString locusExists;
+    static const wxString locusForAll;
+    static const wxString locusLabelDataType;
+    static const wxString locusLabelLength;
+    static const wxString locusLabelLinked;
+    static const wxString locusLabelMapFile;
+    static const wxString locusLabelMapPosition;
+    static const wxString locusLabelName;
+    static const wxString locusLabelOffset;
+    static const wxString locusLabelRegionName;
+    static const wxString locusLabelSites;
+    static const wxString locusLength;
+    static const wxString locusLengthIllegal;
+    static const wxString locusLengthVsMarkers;
+    static const wxString locusNameFromFile;
+    static const wxString locusNewName;
+    static const wxString locusOffsetIllegal;
+    static const wxString locusOwnRegion;
+    static const wxString locusRename;
+    static const wxString locusRenameChoice;
+    static const wxString locusSelect;
+    static const wxString locusMarkerCount;
+    static const wxString locusUnlinked;
+    static const wxString locusUse;
+    static const wxString logWindowHeader;
+    static const wxString mapFileExportFailed;
+    static const wxString mapFileLastPositionTooLate;
+    static const wxString mapFileMarkerPositionMismatch;
+    static const wxString mapFileOffsetAfterFirstPosition;
+    static const wxString mapFileRegionMissing;
+    static const wxString mapFileSelect;
+    static const wxString mapFileSelectAnother;
+    static const wxString mapFileUnSelect;
+    static const wxString mapFilesSelect;
+    static const wxString mapPositionUnset;
+    static const wxString members;
+    static const wxString mergeLinkGInstructions;
+    static const wxString mergeLinkGTitle;
+    static const wxString mergeLociInstructions;
+    static const wxString mergeLociTitle;
+    static const wxString mergePopsInstructions;
+    static const wxString mergePopsTitle;
+    static const wxString microsat;
+    static const wxString migrate;
+    static const wxString migrationMatrix;
+    static const wxString migConstraint;
+    static const wxString migMethod;
+    static const wxString migProfile;
+    static const wxString migLabelRate;
+    static const wxString migLabelConstraint;
+    static const wxString migLabelMethod;
+    static const wxString migLabelProfile;
+    static const wxString migRate;
+    static const wxString moot;
+    static const wxString moveLocus;
+    static const wxString multiPhaseSample;
+    static const wxString nameCandidate;
+    static const wxString nearRow;
+    static const wxString newName;
+    static const wxString no;
+    static const wxString noChoice;
+    static const wxString noChoiceLocus;
+    static const wxString noChoicePopulation;
+    static const wxString noChoiceRegion;
+    static const wxString notebookLabelDataTab;
+    static const wxString notebookLabelLocusTab;
+    static const wxString notebookLabelRegionTab;
+    static const wxString notebookLabelUnitTab;
+    static const wxString noWarningsFound;
+    static const wxString nuc;
+    static const wxString object;
+    static const wxString off;
+    static const wxString on;
+    static const wxString panel;
+    static const wxString panelLabelName;
+    static const wxString panelMemberCount;
+    static const wxString panelRename;
+    static const wxString panelToggle;
+    static const wxString parent;
+    static const wxString parentLabelName;
+    static const wxString parentRename;
+    static const wxString parseAbort;
+    static const wxString parseDataType;
+    static const wxString parseFormat;
+    static const wxString parseGood;
+    static const wxString parseInfo;
+    static const wxString parseInfoNone;
+    static const wxString parseInterleaving;
+    static const wxString parseSettings;
+    static const wxString parseSettingsForFile;
+    static const wxString parseWarning;
+    static const wxString phaseFile;
+    static const wxString phenotype;
+    static const wxString phylip;
+    static const wxString phylipNoKalleleMsat;
+    static const wxString plainLong;
+    static const wxString ploidy;
+    static const wxString ploidy_1;
+    static const wxString ploidy_2;
+    static const wxString ploidy_3;
+    static const wxString ploidy_4;
+    static const wxString popEditButton;
+    static const wxString population;
+    static const wxString populationCreate;
+    static const wxString popButtonAdd;
+    static const wxString popButtonMergeSelected;
+    static const wxString popButtonRemoveSelected;
+    static const wxString popCreateTitle;
+    static const wxString popEnterNewName;
+    static const wxString popLabelName;
+    static const wxString popTabTitle;
+    static const wxString populationExists;
+    static const wxString populationForAll;
+    static const wxString populationNameFromFile;
+    static const wxString populationNewName;
+    static const wxString populationRename;
+    static const wxString populationRenameChoice;
+    static const wxString populationSelect;
+    static const wxString populationUse;
+    static const wxString questionHeader;
+    static const wxString region;
+    static const wxString regionChoice;
+    static const wxString regionCreate;
+    static const wxString regionEditString;
+    static const wxString regionEffPopSize;
+    static const wxString regionEnterNewName;
+    static const wxString regionExists;
+    static const wxString regionForAll;
+    static const wxString regionLabelDataType;
+    static const wxString regionLabelEffPopSize;
+    static const wxString regionLabelLength;
+    static const wxString regionLabelMapFile;
+    static const wxString regionLabelMapPosition;
+    static const wxString regionLabelName;
+    static const wxString regionLabelOffset;
+    static const wxString regionLabelSamples;
+    static const wxString regionLabelSites;
+    static const wxString regionLengthIllegal;
+    static const wxString regionLengthVsMarkers;
+    static const wxString regionNameFromFile;
+    static const wxString regionNameUnlinked;
+    static const wxString regionNewName;
+    static const wxString regionOffsetIllegal;
+    static const wxString regionPositionInfo;
+    static const wxString regionRename;
+    static const wxString regionRenameChoice;
+    // static const wxString regionSamplesPer;
+    static const wxString regionSelect;
+    static const wxString regionUse;
+    static const wxString removeFiles;
+    static const wxString removeGroupsInstructions;
+    static const wxString removeGroupsTitle;
+    static const wxString removeLociInstructions;
+    static const wxString removeLociTitle;
+    static const wxString removePopsInstructions;
+    static const wxString removePopsTitle;
+    static const wxString renameLinkGTitle;
+    static const wxString renameLocusTitle;
+    static const wxString renamePopTitle;
+    static const wxString renamePopTitleFor;
+    static const wxString saveFileInstructionsForMac;
+    static const wxString selectAll;
+    static const wxString sequential;
+    static const wxString setDataTypesAll;
+    static const wxString setFormats;
+    static const wxString setInterleaving;
+    static const wxString snp;
+    static const wxString structureDump;
+    // static const wxString tooFewSamplesPerIndividual;
+    // static const wxString tooManySamplesPerIndividual;
+    static const wxString trait;
+    static const wxString traitEnterNewName;
+    static const wxString trueVal;
+    static const wxString typeClashDialog;
+    static const wxString unknown;
+    static const wxString unrecognizedFileFormat;
+    static const wxString unselectAll;
+    static const wxString unsetValueLocations;
+    static const wxString unsetValueLocusLength;
+    static const wxString unsetValueLocusPosition;
+    static const wxString unsetValueOffset;
+    static const wxString unsetValueRegionEffectivePopulationSize;
+    static const wxString unsetValueRegionSamples;
+    static const wxString usageHeader;
+    static const wxString userTypeOverride;
+
+    static const wxString warning;
+    static const wxString warningMissingPopRegPair;
+    static const wxString warningNeedFile;
+    static const wxString warningNeedFileDataType;
+    static const wxString warningNeedFileDataTypes;
+    static const wxString warningNeedFileFormat;
+    static const wxString warningNeedFileFormats;
+    static const wxString warningNeedFilesParsed;
+    static const wxString warningStringsHeader;
+    static const wxString warningUnsetLinkageGroup;
+    static const wxString warningUnsetLocus;
+    static const wxString warningUnsetPopulation;
+    static const wxString warningUnsetRegion;
+    static const wxString xmlFiles;
+    static const wxString yes;
+
+    // stuff for command line parsing
+    static const wxString cmdBatch;
+    static const wxString cmdBatchChar;
+    static const wxString cmdBatchDescription;
+    static const wxString cmdCommand;
+    static const wxString cmdCommandChar;
+    static const wxString cmdCommandDescription;
+    static const wxString cmdDump;
+    static const wxString cmdDumpChar;
+    static const wxString cmdDumpDescription;
+    static const wxString cmdFileFormat;
+    static const wxString cmdFileFormatChar;
+    static const wxString cmdFileFormatDescription;
+    static const wxString cmdInput;
+    static const wxString cmdInputChar;
+    static const wxString cmdInputDescription;
+    static const wxString cmdInterleaved;
+    static const wxString cmdInterleavedChar;
+    static const wxString cmdInterleavedDescription;
+    static const wxString cmdMapFile;
+    static const wxString cmdMapFileChar;
+    static const wxString cmdMapFileDescription;
+    static const wxString cmdOutput;
+    static const wxString cmdOutputChar;
+    static const wxString cmdOutputDescription;
+    static const wxString cmdWriteBatch;
+    static const wxString cmdWriteBatchChar;
+    static const wxString cmdWriteBatchDescription;
+};
+
+class gcverbose
+{
+  public:
+    static const wxString addedFile;
+    static const wxString addedLocus;
+    static const wxString addedPopulation;
+    static const wxString addedRegion;
+    static const wxString addedUnit;
+    static const wxString exportSuccess;
+    static const wxString exportTry;
+    static const wxString firstPositionNotLong;
+    static const wxString locationsNotIntegers;
+    static const wxString locusLengthNotLong;
+    static const wxString locusPositionNotLong;
+    static const wxString noSetDataType;
+    static const wxString noSetFileFormat;
+    static const wxString noSetIsInterleaved;
+    //static const wxString noSetIsUnlinked;
+    static const wxString parseAttemptExiting;
+    static const wxString parseAttemptFailed;
+    static const wxString parseAttemptPossible;
+    static const wxString parseAttemptSettings;
+    static const wxString parseAttemptStarted;
+    static const wxString parseAttemptSuccessful;
+    static const wxString removedFile;
+    static const wxString removedLocus;
+    static const wxString removedPopulation;
+    static const wxString removedRegion;
+    static const wxString removedUnit;
+    static const wxString setDataType;
+    static const wxString setDataTypeAndFileFormat;
+    static const wxString setFileFormat;
+    static const wxString setIsInterleaved;
+    //static const wxString setIsUnlinked;
+};
+
+#endif  // GC_STRINGS_H
+
+//____________________________________________________________________________________
diff --git a/src/convStrings/gc_strings_cmdfile.cpp b/src/convStrings/gc_strings_cmdfile.cpp
new file mode 100644
index 0000000..84fa635
--- /dev/null
+++ b/src/convStrings/gc_strings_cmdfile.cpp
@@ -0,0 +1,41 @@
+// $Id: gc_strings_cmdfile.cpp,v 1.12 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include "gc_strings_cmdfile.h"
+#include "wx/intl.h"
+
+const wxString gcerr_cmdfile::atRow                 =wxTRANSLATE("Near row %d ");
+
+const wxString gcerr_cmdfile::badCmdFile            =wxTRANSLATE("Problem reading command file %s:%s");
+const wxString gcerr_cmdfile::badYesNo              =wxTRANSLATE("Did not recognize \"%s\". Allowed strings are \"yes\" and \"no\"");
+const wxString gcerr_cmdfile::badFileFormat         =wxTRANSLATE("Did not recognize file format \"%s\". Allowed formats are \"phylip\" and \"migrate\"");
+const wxString gcerr_cmdfile::badGeneralDataType    =wxTRANSLATE("Did not recognize general data type \"%s\". Allowed data types are \"nucleotide\" and \"allelic\"");
+const wxString gcerr_cmdfile::badInterleaving       =wxTRANSLATE("Did not recognize sequence alignment type \"%s\". Allowed formats are \"interleaved\" and \"sequential\"");
+const wxString gcerr_cmdfile::badProximity          =wxTRANSLATE("Did not recognize marker proximity of \"%s\". Allowed strings are \"linked\" and \"unlinked\"");
+const wxString gcerr_cmdfile::badSpecificDataType   =wxTRANSLATE("Did not recognize specific data type \"%s\". Allowed data types are \"dna\", \"snp\", \"kallele\" and \"microsat\"");
+
+const wxString gcerr_cmdfile::deprecatedGeneralDataType =wxTRANSLATE("Use of general data type \"%s\" is disallowed for infile parsing. Please update to use one of \"dna\", \"snp\", \"kallele\" or \"microsat\"");
+
+const wxString gcerr_cmdfile::inCmdFile             =wxTRANSLATE("While processing command file \"%s\", the following error occured:\n\n");
+const wxString gcerr_cmdfile::inFile                =wxTRANSLATE("in file %s" );
+
+const wxString gcerr_cmdfile::locusMatchByNameNotEmpty=wxTRANSLATE("<segments-matching type=\"byName\"> should have no text between tag open and close.");
+const wxString gcerr_cmdfile::locusMatchSingleEmpty =wxTRANSLATE("<segments-matching type=\"single\"> requres segment name between tag open and close.");
+const wxString gcerr_cmdfile::locusMatchUnknown     =wxTRANSLATE("<segments-matching type=\"%s\"> unrecognized.  Allowed types are \"byName\" or \"single\".");
+
+const wxString gcerr_cmdfile::messageIs             =wxTRANSLATE(": %s");
+
+const wxString gcerr_cmdfile::popMatchByNameNotEmpty=wxTRANSLATE("<population-matching type=\"byName\"> should have no text between tag open and close.");
+const wxString gcerr_cmdfile::popMatchSingleEmpty   =wxTRANSLATE("<population-matching type=\"single\"> requires population name between tag open and close.");
+const wxString gcerr_cmdfile::popMatchUnknown       =wxTRANSLATE("population-matching type \"%s\" unrecognized");
+
+const wxString gcstr_cmdfile::cmdFilesSelect        =wxTRANSLATE("Select a converter command file");
+
+//____________________________________________________________________________________
diff --git a/src/convStrings/gc_strings_cmdfile.h b/src/convStrings/gc_strings_cmdfile.h
new file mode 100644
index 0000000..03afc2c
--- /dev/null
+++ b/src/convStrings/gc_strings_cmdfile.h
@@ -0,0 +1,53 @@
+// $Id: gc_strings_cmdfile.h,v 1.10 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_STRINGS_CMDFILE_H
+#define GC_STRINGS_CMDFILE_H
+
+#include "wx/string.h"
+
+class gcerr_cmdfile
+{
+  public:
+    static const wxString atRow;
+
+    static const wxString badCmdFile;
+    static const wxString badFileFormat;
+    static const wxString badGeneralDataType;
+    static const wxString badInterleaving;
+    static const wxString badProximity;
+    static const wxString badSpecificDataType;
+    static const wxString badYesNo;
+
+    static const wxString deprecatedGeneralDataType;
+
+    static const wxString inCmdFile;
+    static const wxString inFile;
+
+    static const wxString locusMatchByNameNotEmpty;
+    static const wxString locusMatchSingleEmpty;
+    static const wxString locusMatchUnknown;
+
+    static const wxString messageIs;
+
+    static const wxString popMatchByNameNotEmpty;
+    static const wxString popMatchSingleEmpty;
+    static const wxString popMatchUnknown;
+};
+
+class gcstr_cmdfile
+{
+  public:
+    static const wxString cmdFilesSelect;
+};
+
+#endif  // GC_STRINGS_CMDFILE_H
+
+//____________________________________________________________________________________
diff --git a/src/convStrings/gc_strings_creation.cpp b/src/convStrings/gc_strings_creation.cpp
new file mode 100644
index 0000000..b297fac
--- /dev/null
+++ b/src/convStrings/gc_strings_creation.cpp
@@ -0,0 +1,18 @@
+// $Id: gc_strings_creation.cpp,v 1.3 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include "gc_strings_creation.h"
+#include "wx/intl.h"
+
+const wxString gcstr_creation::cmdfile  =wxTRANSLATE("defined near line %ld of command file %s");
+const wxString gcstr_creation::datafile =wxTRANSLATE("induced near line %ld of data file %s");
+const wxString gcstr_creation::gui      =wxTRANSLATE("created in the GUI");
+
+//____________________________________________________________________________________
diff --git a/src/convStrings/gc_strings_creation.h b/src/convStrings/gc_strings_creation.h
new file mode 100644
index 0000000..a049ab4
--- /dev/null
+++ b/src/convStrings/gc_strings_creation.h
@@ -0,0 +1,26 @@
+// $Id: gc_strings_creation.h,v 1.4 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_STRINGS_CREATION_H
+#define GC_STRINGS_CREATION_H
+
+#include "wx/string.h"
+
+class gcstr_creation
+{
+  public:
+    static const wxString cmdfile;
+    static const wxString datafile;
+    static const wxString gui;
+};
+
+#endif  // GC_STRINGS_CREATION_H
+
+//____________________________________________________________________________________
diff --git a/src/convStrings/gc_strings_data.cpp b/src/convStrings/gc_strings_data.cpp
new file mode 100644
index 0000000..2073bba
--- /dev/null
+++ b/src/convStrings/gc_strings_data.cpp
@@ -0,0 +1,17 @@
+// $Id: gc_strings_data.cpp,v 1.5 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include "gc_strings_data.h"
+#include "wx/intl.h"
+
+const wxString gcerr_data::missingPopLocus =   wxTRANSLATE("Did not find any data for population \"%s\" at segment \"%s\". Please remove or merge populations, segments, and regions as appropriate.");
+const wxString gcerr_data::missingPopRegion=   wxTRANSLATE("Did not find any data for population \"%s\" at region \"%s\". Please remove or merge populations, segments, and regions as appropriate.");
+
+//____________________________________________________________________________________
diff --git a/src/convStrings/gc_strings_data.h b/src/convStrings/gc_strings_data.h
new file mode 100644
index 0000000..a744699
--- /dev/null
+++ b/src/convStrings/gc_strings_data.h
@@ -0,0 +1,30 @@
+// $Id: gc_strings_data.h,v 1.4 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_STRINGS_DATA_H
+#define GC_STRINGS_DATA_H
+
+#include "wx/string.h"
+
+class gcerr_data
+{
+  public:
+    static const wxString missingPopLocus;
+    static const wxString missingPopRegion;
+};
+
+class gcstr_data
+{
+  public:
+};
+
+#endif  // GC_STRINGS_DATA_H
+
+//____________________________________________________________________________________
diff --git a/src/convStrings/gc_strings_err.cpp b/src/convStrings/gc_strings_err.cpp
new file mode 100644
index 0000000..fd9233a
--- /dev/null
+++ b/src/convStrings/gc_strings_err.cpp
@@ -0,0 +1,99 @@
+// $Id: gc_strings_err.cpp,v 1.8 2012/06/30 01:32:40 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include "gc_strings.h"
+#include "wx/intl.h"
+
+const wxString gcerr::abandonExport          = "internal message -- abandoning export. if you're reading this, there's an error.";
+
+const wxString gcerr::badDivergenceId    = wxTRANSLATE("Bad child id in \"%s\" Couldn't set Parent id for \"%s\".");
+const wxString gcerr::badEffectivePopSize    = wxTRANSLATE("Couldn't set effective population size for region \"%s\" to %f. The value must be positive.");
+const wxString gcerr::badHapFile    = wxTRANSLATE("Failed to read hap file \"%s\" because of the following error:\n%s\nIgnoring.");
+const wxString gcerr::badMapFile    = wxTRANSLATE("Failed to read map file \"%s\" because of the following error:\n%s\nIgnoring.");
+const wxString gcerr::badName = wxTRANSLATE("Name \"%s\" contains a space which will break the XML parser.");
+
+// const wxString gcerr::badSamplesPer = wxTRANSLATE("Couldn't set samples per individual for %s to %d. The value must be positive");
+const wxString gcerr::badSequentialDataCast = wxTRANSLATE("GCSequentialData wasn't allelic when it should have been guaranteed to be so.");
+const wxString gcerr::corruptedDisplayableLociInMapOrder= wxTRANSLATE("map order of displayable segments incorrectly calculated");
+const wxString gcerr::duplicateLocusInRegion= wxTRANSLATE("Segment \"%s\" added to region \"%s\" twice.");
+const wxString gcerr::duplicateMapPosition= wxTRANSLATE("More than one segment of region \"%s\" has map position %ld");
+const wxString gcerr::emptyGroupName    =   wxTRANSLATE("Empty region name not allowed. Ignoring your command.");
+const wxString gcerr::emptyLocusName    =   wxTRANSLATE("Empty segment name not allowed. Ignoring your command.");
+const wxString gcerr::emptyPopulationName = wxTRANSLATE("Empty population name not allowed. Ignoring your command.");
+const wxString gcerr::emptyName         =   wxTRANSLATE("Empty name for population, region, segment, or trait");
+const wxString gcerr::fatalError        =   wxTRANSLATE("Cannot recover. Exiting.");
+const wxString gcerr::fileWithRow       =   wxTRANSLATE("Near row %d of file %s:\n%s");
+const wxString gcerr::fileWithoutRow    =   wxTRANSLATE("In file %s:\n%s");
+const wxString gcerr::hapFileIllegalDelimiter   =   wxTRANSLATE("Delimiter \"%s\" illegal -- must be a single character.");
+const wxString gcerr::hapFilePositionNegative   =   wxTRANSLATE("In phase information file \"%s\": position %d cannot be negative");
+const wxString gcerr::hapFilePositionNotLong    =   wxTRANSLATE("In phase information file \"%s\": position \"%s\" not a long value");
+const wxString gcerr::hapFilePositionUnordered  =   wxTRANSLATE("In phase information file \"%s\": position %d comes after later position %d");
+const wxString gcerr::hapFileReadErrUnknown     =   wxTRANSLATE("Unknown error occured while reading phase information file \"%s\"");
+const wxString gcerr::incompatibleLocusLengths=   wxTRANSLATE("Incompatible segment lengths %d and %d");
+const wxString gcerr::incompatibleLocusTypes  =   wxTRANSLATE("Incompatible data types %s and %s");
+const wxString gcerr::incompatibleNumHaps=  wxTRANSLATE("Incompatible hap counts %d and %d");
+const wxString gcerr::inIndividual      =   wxTRANSLATE("Error writing data for individual \"%s\": %s");
+const wxString gcerr::lengthTooShort    =   wxTRANSLATE("Length %d too short for data with %d sites");
+const wxString gcerr::locusOverlap  = wxTRANSLATE("Segment \"%s\", ending at position %ld overlaps segment \"%s\" starting at position %ld.");
+const wxString gcerr::locusWithoutDataType  = wxTRANSLATE("Segment \"%s\" in region \"%s\" needs data type assignment.");
+const wxString gcerr::locusWithoutLength  = wxTRANSLATE("Segment \"%s\" needs data length assignment.");
+const wxString gcerr::locusWithoutMapPosition  = wxTRANSLATE("Segment \"%s\" needs map position assignment.");
+const wxString gcerr::migrationRateTooSmall = wxTRANSLATE("The Migration Rate cannot be less than zero");
+const wxString gcerr::missingDataTypeForLocus   = wxTRANSLATE("Segment \"%s\" needs a data type assignment");
+const wxString gcerr::missingHapFileId  =   wxTRANSLATE("No hap file with id\"%d\" found");
+const wxString gcerr::missingFileId     =   wxTRANSLATE("No file with id\"%d\" found");
+const wxString gcerr::missingLengthForLocus   = wxTRANSLATE("Segment \"%s\" needs a total length assignment");
+const wxString gcerr::missingParse      =   wxTRANSLATE("Could not find any parse data assigned to file \"%s\"");
+const wxString gcerr::missingParseId    =   wxTRANSLATE("No parse with id\"%d\" found");
+
+//const wxString gcerr::missingPhaseDataForLocus=   wxTRANSLATE("Individual \"%s\" is missing phase information for segment \"%s\". Did you write a phase info file?");
+const wxString gcerr::missingRegion     =   wxTRANSLATE("Did not find region \"%s\"");
+
+//const wxString gcerr::missingSampleDataForLocus=   wxTRANSLATE("Sample \"%s\" of individual \"%s\" is missing data for segment \"%s\".");
+const wxString gcerr::missingTrait      =   wxTRANSLATE("Did not find trait \"%s\"");
+const wxString gcerr::mungeParseDataTypeMismatch=   wxTRANSLATE("Tried to munge parses with incompatible data types");
+const wxString gcerr::mungeParseFormatMismatch=   wxTRANSLATE("Tried to munge parses with incompatible formats");
+const wxString gcerr::mungeParseNeedsTwo=   wxTRANSLATE("Tried to munge other than two parses");
+const wxString gcerr::mungeParseNull    =   wxTRANSLATE("Tried to munge null parse");
+const wxString gcerr::nameResolutionPairMissing =   wxTRANSLATE("Did not find (population, region) pair (%s,%s) in name resolution");
+const wxString gcerr::noBlockForPopLocus=   wxTRANSLATE("Did not find a block for pop %d and segment %d");
+const wxString gcerr::noDataFound       =   wxTRANSLATE("Did not find any exportable data. Have you added a data file?");
+const wxString gcerr::noSuchParse       =   wxTRANSLATE("Unable to read %s as a %s file with %s data in the '%s' format.");
+const wxString gcerr::notALocation      =   wxTRANSLATE("\"%s\" does not specify a location. It should be a long integer");
+const wxString gcerr::provideDoDelete   =   wxTRANSLATE("Subclasses of gcUpdatingDialog must override virtual method ::DoDelete method if they include a button with wxID_DELETE");
+const wxString gcerr::regionNoData =   wxTRANSLATE("No data found for region \"%s\".");
+const wxString gcerr::regionNoEffectivePopSize =   wxTRANSLATE("Call to GetEffectivePopSize() for region id %d without HasEffectivePopSize()");
+const wxString gcerr::regionNoPositionToGet =   wxTRANSLATE("Call to GetPosition() without HasPosition()");
+
+// const wxString gcerr::regionNoSamplesPerIndividual =   wxTRANSLATE("Call to GetSamplesPerIndividual() for region id %d without HasSamplesPerIndividual()");
+const wxString gcerr::regionNoSuchLocus =   wxTRANSLATE("Segment with id %d not associated with region id %d");
+const wxString gcerr::regionTraitAlreadyAdded=  wxTRANSLATE("Attempt to re-add trait with id %d");
+const wxString gcerr::regionNoSuchTrait =   wxTRANSLATE("Trait with id %d not associated with region id %d");
+const wxString gcerr::requireCmdFile    =   wxTRANSLATE("Option -c <cmdfile> required for batch (non-gui) converter.");
+const wxString gcerr::shortDna          =   wxTRANSLATE("Tried to add bogus nuclear data \"%s\" to sample \"%s\". Is the sample too short or does it have non-dna data?");
+const wxString gcerr::tooBigDataIndex   =   wxTRANSLATE("Index %d request parse sample larger than actual hap count of %d");
+const wxString gcerr::tooFewLociInSpec  =   wxTRANSLATE("Spec for segment doesn't allow segment with index %d");
+const wxString gcerr::tooFewPopsInSpec  =   wxTRANSLATE("Spec for populations doesn't allow segment with index %d");
+const wxString gcerr::tooManySites      =   wxTRANSLATE("Found more data sites than expected.");
+const wxString gcerr::unableToExport    =   wxTRANSLATE("Unable to export file because:%s");
+const wxString gcerr::uncaughtException =   wxTRANSLATE("Uncaught Exception:%s");
+const wxString gcerr::unsetChild1Id     =   wxTRANSLATE("Call to GetChild1Id() for segment \"%s\" without checking HasChild1Id()");
+const wxString gcerr::unsetChild2Id     =   wxTRANSLATE("Call to GetChild2Id() for segment \"%s\" without checking HasChild2Id()");
+const wxString gcerr::unsetFromId       =   wxTRANSLATE("Call to GetFromId() for segment \"%s\" without checking HasFromId()");
+const wxString gcerr::unsetLength       =   wxTRANSLATE("Call to GetLength() for segment \"%s\" without checking HasLength()");
+const wxString gcerr::unsetMapPosition  =   wxTRANSLATE("Call to GetMapPosition() for segment \"%s\" without checking HasMapPosition()");
+const wxString gcerr::unsetNumSites     =   wxTRANSLATE("Call to GetNumSites() for segment \"%s\" without checking HasNumSites()");
+const wxString gcerr::unsetParentId     =   wxTRANSLATE("Call to GetParentId() for segment \"%s\" without checking HasParentId()");
+const wxString gcerr::unsetPopId        =   wxTRANSLATE("Call to GetPopId() for panel \"%s\" without checking HasPopId()");
+const wxString gcerr::unsetRegionId     =   wxTRANSLATE("Call to GetRegionId() for segment \"%s\" without checking HasRegionId()");
+const wxString gcerr::unsetToId         =   wxTRANSLATE("Call to GetToId() for segment \"%s\" without checking HasToId()");
+const wxString gcerr::wrongDivergenceCount     =   wxTRANSLATE("Only 2 children can be merged in Divergence");
+
+//____________________________________________________________________________________
diff --git a/src/convStrings/gc_strings_individual.cpp b/src/convStrings/gc_strings_individual.cpp
new file mode 100644
index 0000000..a1582c8
--- /dev/null
+++ b/src/convStrings/gc_strings_individual.cpp
@@ -0,0 +1,20 @@
+// $Id: gc_strings_individual.cpp,v 1.6 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include "gc_strings_individual.h"
+#include "wx/intl.h"
+
+const wxString gcerr_ind::missingPhaseForLocus  = wxTRANSLATE("Individual \"%s\" is missing phase data for segment \"%s\".\n\nDid you write a phase information file?");
+const wxString gcerr_ind::phaseLocusRepeat      = wxTRANSLATE("Attempted to add phase data to individual \"%s\" from segment \"%s\" a second time.\n\nCheck to see if individual name is duplicated.");
+const wxString gcerr_ind::sampleLocusRepeat     = wxTRANSLATE("Attempted to add data to sample \"%s\" from segment \"%s\" a second time.\n\nCheck to see if sample name is duplicated.");
+const wxString gcerr_ind::sampleMissingLocusData= wxTRANSLATE("Cannot find data for sample \"%s\" of segment \"%s\".\n\nYou may need to provide individual/sample resolution information in a phase file.");
+const wxString gcerr_ind::wrongSampleCount      = wxTRANSLATE("Individual \"%s\" has conflicting sample counts for data.");
+
+//____________________________________________________________________________________
diff --git a/src/convStrings/gc_strings_individual.h b/src/convStrings/gc_strings_individual.h
new file mode 100644
index 0000000..a7daf8f
--- /dev/null
+++ b/src/convStrings/gc_strings_individual.h
@@ -0,0 +1,35 @@
+// $Id: gc_strings_individual.h,v 1.6 2012/06/30 01:32:40 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_STRINGS_INDIVIDUAL_H
+#define GC_STRINGS_INDIVIDUAL_H
+
+#include "wx/string.h"
+
+class gcerr_ind
+{
+  public:
+    static const wxString missingPhaseForLocus;
+    static const wxString phaseLocusRepeat;
+    static const wxString sampleLocusRepeat;
+    static const wxString sampleMissingLocusData;
+    static const wxString wrongSampleCount;
+};
+
+#if 0
+class gcstr_individual
+{
+  public:
+};
+#endif
+
+#endif  // GC_STRINGS_INDIVIDUAL_H
+
+//____________________________________________________________________________________
diff --git a/src/convStrings/gc_strings_infile.cpp b/src/convStrings/gc_strings_infile.cpp
new file mode 100644
index 0000000..d280123
--- /dev/null
+++ b/src/convStrings/gc_strings_infile.cpp
@@ -0,0 +1,43 @@
+// $Id: gc_strings_infile.cpp,v 1.16 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include "gc_strings_infile.h"
+#include "wx/intl.h"
+
+const wxString gcerr_infile::extraFileData   = wxTRANSLATE("File contains extra data");
+const wxString gcerr_infile::fileParseError  = wxTRANSLATE("Near line %d: %s.");
+const wxString gcerr_infile::illegalDna      = wxTRANSLATE("Illegal character \"%c\" at position %d in nucleotide sequence \"%s\".");
+const wxString gcerr_infile::illegalMsat     = wxTRANSLATE("Illegal microsattelite \"%s\".");
+const wxString gcerr_infile::parseDataTypeSpecMismatch=wxTRANSLATE("File specifies data of type %s during attempt to parse as %s data.");
+const wxString gcerr_infile::parseMissingErr =wxTRANSLATE("File \"%s\" did not parse correctly. \n\nIt may be broken or may have conflicted with a command file. \n\nPlease remove and replace it before trying again.");
+const wxString gcerr_infile::prematureEndOfFile=wxTRANSLATE("File \"%s\" ended prematurely.");
+const wxString gcerr_infile::shortSampleName=wxTRANSLATE("File \"%s\" has at least one sample name containing an embedded space.\n\nThis is often a symptom that your input file was incorrectly formatted. (Phylip and Migrate use the first ten characters of a line for the sample name.)\n\nYou may wish to remove this file from the converter, edit it, and re-add it.");
+const wxString gcerr_infile::tokenCountMismatch=wxTRANSLATE("While using delimiter \"%s\" to separate \"%s\" into tokens for sample \"%s\", saw %d tokens when we expected %d.");
+const wxString gcerr_infile::tooFewMarkersInSample=wxTRANSLATE("%s\nIs the sequence on the previous line too short?");
+const wxString gcerr_infile::tooManyMarkersInSample=wxTRANSLATE("Found %d data markers, but expected only %d.");
+const wxString gcerr_infile::unableToParseBecause=wxTRANSLATE("Unable to parse file %s as a \"%s\" file of type\" %s\" and \"%s\" sequences because:\n\t%s");
+
+const wxString gcerr_migrate::badDelimiter   = wxTRANSLATE("Delimiter \"%s\" illegal in migrate files. A better choice would be \".\"");
+const wxString gcerr_migrate::badLocusCount  = wxTRANSLATE("Expected a segment count but got \"%s\" instead. Segment count should be a positive integer.");
+const wxString gcerr_migrate::badPopCount    = wxTRANSLATE("Expected a population count but got \"%s\" instead. Population count should be a positive integer.");
+const wxString gcerr_migrate::badSequenceCount=wxTRANSLATE("Migrate parser was expecting sequence count of 1 or greater but got \"%s\" instead.");
+const wxString gcerr_migrate::firstToken  = wxTRANSLATE("Ignoring first token \"%s\" of migrate file. Expected data type indicator of \"e\", \"m\", \"n\", or \"s\", or population count.");
+const wxString gcerr_migrate::locusLengthNotPositive=   wxTRANSLATE("token \"%s\" in segment lengths not a positive number");
+const wxString gcerr_migrate::missingMsatDelimiter=wxTRANSLATE("File %s cannot be microsatellite data -- it is missing an appropriate delimiter in its first line");
+const wxString gcerr_migrate::missingSequenceCount=wxTRANSLATE("Migrate parser was expecting sequence count but got \"%s\" instead.");
+const wxString gcerr_migrate::parseErr      = wxTRANSLATE("Error while parsing near line %d of file %s: %s.");
+const wxString gcerr_migrate::tooFewSequenceLengths = wxTRANSLATE("Found only %d sequence lengths in line \"%s\" of migrate file.");
+
+const char gcstr_migrate::missingData =  '?';
+
+const wxString gcerr_phylip::badFirstToken   =   wxTRANSLATE("Not a phylip file: first token \"%s\" should be a positive integer.");
+const wxString gcerr_phylip::badSecondToken  =   wxTRANSLATE("Not a phylip file: second token \"%s\" should be a positive integer.");
+
+//____________________________________________________________________________________
diff --git a/src/convStrings/gc_strings_infile.h b/src/convStrings/gc_strings_infile.h
new file mode 100644
index 0000000..83f12b8
--- /dev/null
+++ b/src/convStrings/gc_strings_infile.h
@@ -0,0 +1,63 @@
+// $Id: gc_strings_infile.h,v 1.14 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_STRINGS_INFILE_H
+#define GC_STRINGS_INFILE_H
+
+#include "wx/string.h"
+
+class gcerr_infile
+{
+  public:
+    static const wxString extraFileData;
+    static const wxString fileParseError;
+    static const wxString illegalDna;
+    static const wxString illegalMsat;
+    static const wxString parseDataTypeSpecMismatch;
+    static const wxString parseMissingErr;
+    static const wxString prematureEndOfFile;
+    static const wxString shortSampleName;
+    static const wxString tokenCountMismatch;
+    static const wxString tooFewMarkersInSample;
+    static const wxString tooManyMarkersInSample;
+    static const wxString unableToParseBecause;
+};
+
+class gcerr_migrate
+{
+  public:
+    static const wxString badDelimiter;
+    static const wxString badLocusCount;
+    static const wxString badPopCount;
+    static const wxString badSequenceCount;
+    static const wxString firstToken;
+    static const wxString locusLengthNotPositive;
+    static const wxString missingMsatDelimiter;
+    static const wxString missingSequenceCount;
+    static const wxString parseErr;
+    static const wxString tooFewSequenceLengths;
+};
+
+class gcstr_migrate
+{
+  public:
+    static const char missingData;
+};
+
+class gcerr_phylip
+{
+  public:
+    static const wxString badFirstToken;
+    static const wxString badSecondToken;
+};
+
+#endif  // GC_STRINGS_INFILE_H
+
+//____________________________________________________________________________________
diff --git a/src/convStrings/gc_strings_io.cpp b/src/convStrings/gc_strings_io.cpp
new file mode 100644
index 0000000..680894e
--- /dev/null
+++ b/src/convStrings/gc_strings_io.cpp
@@ -0,0 +1,19 @@
+// $Id: gc_strings_io.cpp,v 1.5 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include "gc_strings_io.h"
+#include "wx/intl.h"
+
+const wxString gc_io::eof           =   wxTRANSLATE("possible premature end of file");
+const wxString gc_io::fileMissing   =   wxTRANSLATE("File %s missing or unreadable.");
+const wxString gc_io::fileReadError =   wxTRANSLATE("Unknown error reading file.");
+const wxString gc_io::fileReadErrorWithName =   wxTRANSLATE("Unknown error reading file %s.");
+
+//____________________________________________________________________________________
diff --git a/src/convStrings/gc_strings_io.h b/src/convStrings/gc_strings_io.h
new file mode 100644
index 0000000..13fa190
--- /dev/null
+++ b/src/convStrings/gc_strings_io.h
@@ -0,0 +1,27 @@
+// $Id: gc_strings_io.h,v 1.5 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_STRINGS_IO_H
+#define GC_STRINGS_IO_H
+
+#include "wx/string.h"
+
+class gc_io
+{
+  public:
+    static const wxString eof;
+    static const wxString fileMissing;
+    static const wxString fileReadError;
+    static const wxString fileReadErrorWithName;
+};
+
+#endif  // GC_STRINGS_IO_H
+
+//____________________________________________________________________________________
diff --git a/src/convStrings/gc_strings_locus.cpp b/src/convStrings/gc_strings_locus.cpp
new file mode 100644
index 0000000..31cc20f
--- /dev/null
+++ b/src/convStrings/gc_strings_locus.cpp
@@ -0,0 +1,41 @@
+// $Id: gc_strings_locus.cpp,v 1.15 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include "gc_strings_locus.h"
+#include "wx/intl.h"
+
+const wxString gcerr_locus::unsetLinkedUserValue =   "call to GetLinkedUserValue() without HaveLinkedUserValue()";
+const wxString gcerr_locus::unsetOffset =   "call to GetOffset() without HaveOffset()";
+
+const wxString gcerr_locus::dnaBigLengthNeedsLocations =wxTRANSLATE("Segment \"%s\" of type DNA requires locations since its total length is greater than its number of markers.");
+const wxString gcerr_locus::lengthMismatch =wxTRANSLATE("Unable to merge data from segment \"%s\" with segment \"%s\" because they have lengths of %d and %d.");
+const wxString gcerr_locus::lengthMissing =wxTRANSLATE("Segment \"%s\" needs a total length assignment.");
+const wxString gcerr_locus::locationsOutOfOrder =wxTRANSLATE("In segment \"%s\": location %ld cannot appear after %ld, they must be strictly increasing.");
+const wxString gcerr_locus::locationTooLarge =wxTRANSLATE("%d cannot be a location for segment \"%s\" since the largest possible value is %d.");
+const wxString gcerr_locus::locationTooSmall =wxTRANSLATE("%d cannot be a location for segment \"%s\" since the smallest possible value is %d.");
+const wxString gcerr_locus::mapPositionMismatch =wxTRANSLATE("Unable to merge data from segment \"%s\" with segment \"%s\" because they have map positions of %ld and %ld.");
+const wxString gcerr_locus::mergeFailure   =wxTRANSLATE("Something unexpected happened while trying to merge segments.\n\nIt is possible your operation did not complete.");
+const wxString gcerr_locus::missing        =wxTRANSLATE("Segment \"%s\" not defined");
+const wxString gcerr_locus::numMarkersZero =wxTRANSLATE("Segment must have one or more markers.");
+const wxString gcerr_locus::offsetMismatch =wxTRANSLATE("Unable to merge data from segment \"%s\" with segment \"%s\" because they have first position scanned of %ld and %ld.");
+const wxString gcerr_locus::offsetMissingMultiSegment =wxTRANSLATE("Segment \"%s\" requires you to set the first position scanned since multiple segments appear in the same region.");
+const wxString gcerr_locus::offsetMissingSnpLocations =wxTRANSLATE("Segment \"%s\" requires you to set the first position scanned because it is SNP data with locations.");
+const wxString gcerr_locus::overlap =wxTRANSLATE("segment %s (%ld:%ld) overlaps segment %s (%ld:%ld)");
+const wxString gcerr_locus::setLocs =wxTRANSLATE("Unable to set locations for segment \"%s\" to \"%s\" because they have different site counts of %ld and %ld.");
+const wxString gcerr_locus::siteCountMismatch =wxTRANSLATE("Unable to merge data from the following segments because they have different site counts of %d and %d:\n\n\t%s\n\n\t%s");
+
+//const wxString gcerr_structures::hapMismatch    =wxTRANSLATE("Unable to merge data from segment \"%s\" with segment \"%s\" because parsed data has ploidy of %d and segment has %d.");
+const wxString gcerr_locus::typeMismatch   =wxTRANSLATE("Unable to merge data from segment \"%s\" with segment \"%s\" because they have incompatible data types of %s and %s.");
+const wxString gcerr_locus::unlinkedNuc =wxTRANSLATE("Cannot set DNA or SNP data to be unlinked");
+const wxString gcerr_locus::unsetNumMarkers=wxTRANSLATE("Num Markers not set for segment \"%s\".");
+const wxString gcerr_locus::userDataTypeMismatch =wxTRANSLATE("Unable to merge data from segment \"%s\" with segment \"%s\" because they have different data types of %s and %s.");
+const wxString gcerr_locus::wrongLocationCount =wxTRANSLATE("Cannot have %ld locations for segment \"%s\".\n\nThe number of locations must be %ld, the number of data markers.");
+
+//____________________________________________________________________________________
diff --git a/src/convStrings/gc_strings_locus.h b/src/convStrings/gc_strings_locus.h
new file mode 100644
index 0000000..3c4ebb0
--- /dev/null
+++ b/src/convStrings/gc_strings_locus.h
@@ -0,0 +1,46 @@
+// $Id: gc_strings_locus.h,v 1.14 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_STRINGS_LOCUS_H
+#define GC_STRINGS_LOCUS_H
+
+#include "wx/string.h"
+
+class gcerr_locus
+{
+  public:
+    static const wxString dnaBigLengthNeedsLocations;
+    static const wxString lengthMismatch;
+    static const wxString lengthMissing;
+    static const wxString locationsOutOfOrder;
+    static const wxString locationTooLarge;
+    static const wxString locationTooSmall;
+    static const wxString mapPositionMismatch;
+    static const wxString mergeFailure;
+    static const wxString missing;
+    static const wxString numMarkersZero;
+    static const wxString offsetMismatch;
+    static const wxString offsetMissingMultiSegment;
+    static const wxString offsetMissingSnpLocations;
+    static const wxString overlap;
+    static const wxString setLocs;
+    static const wxString siteCountMismatch;
+    static const wxString typeMismatch;
+    static const wxString unlinkedNuc;
+    static const wxString unsetLinkedUserValue;
+    static const wxString unsetNumMarkers;
+    static const wxString unsetOffset;
+    static const wxString userDataTypeMismatch;
+    static const wxString wrongLocationCount;
+};
+
+#endif  // GC_STRINGS_LOCUS_H
+
+//____________________________________________________________________________________
diff --git a/src/convStrings/gc_strings_map.cpp b/src/convStrings/gc_strings_map.cpp
new file mode 100644
index 0000000..7970509
--- /dev/null
+++ b/src/convStrings/gc_strings_map.cpp
@@ -0,0 +1,21 @@
+// $Id: gc_strings_map.cpp,v 1.5 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include "gc_strings_map.h"
+#include "wx/intl.h"
+
+const wxString gcstr_map::notXmlMapFileTryOldFmt    =wxTRANSLATE("%s. Trying to read %s as old map file format.");
+
+const wxString gcerr_map::ERR_BAD_TOP_TAG   =wxTRANSLATE("expected top xml tag of <%s> but got <%s> instead.");
+const wxString gcerr_map::fileEmpty         =wxTRANSLATE("File %s empty");
+const wxString gcerr_map::fileMissing       =wxTRANSLATE("File %s missing");
+const wxString gcerr_map::fileReadErr       =wxTRANSLATE("Unknown error while reading file %s");
+
+//____________________________________________________________________________________
diff --git a/src/convStrings/gc_strings_map.h b/src/convStrings/gc_strings_map.h
new file mode 100644
index 0000000..c3b6afa
--- /dev/null
+++ b/src/convStrings/gc_strings_map.h
@@ -0,0 +1,108 @@
+// $Id: gc_strings_map.h,v 1.6 2012/06/30 01:32:40 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_STRINGS_MAP_H
+#define GC_STRINGS_MAP_H
+
+#include "wx/string.h"
+
+class gcstr_map
+{
+  public:
+
+#if 0
+    static const wxString ATTR_ADJACENCY;
+    static const wxString ATTR_DATATYPE;
+    static const wxString ATTR_FORMAT;
+    static const wxString ATTR_SEQUENCEALIGNMENT;
+    static const wxString ATTR_TYPE;
+
+    static const wxString ATTR_VAL_BYLIST;
+    static const wxString ATTR_VAL_BYNAME;
+    static const wxString ATTR_VAL_LINKED;
+    static const wxString ATTR_VAL_SINGLE;
+    static const wxString ATTR_VAL_UNLINKED;
+
+    static const wxString TAG_ADDCOMMENT;
+    static const wxString TAG_ALLELE;
+    static const wxString TAG_ALLELES;
+    static const wxString TAG_BLOCK;
+    static const wxString TAG_CONVERTER_CMD;
+    static const wxString TAG_EFFECTIVE_POPSIZE;
+    static const wxString TAG_FIRST_POSITION_SCANNED;
+    static const wxString TAG_GENO_RESOLUTIONS;
+    static const wxString TAG_HAPLOTYPES;
+    static const wxString TAG_INDIVIDUAL_MATCHING;
+    static const wxString TAG_INFILE;
+    static const wxString TAG_INFILES;
+    static const wxString TAG_LOCI_MATCHING;
+    static const wxString TAG_LOCUS_NAME;
+    static const wxString TAG_MAP_POSITION;
+    static const wxString TAG_NAME;
+    static const wxString TAG_OUTFILE;
+    static const wxString TAG_PHASE_FILE;
+    static const wxString TAG_POPULATION;
+    static const wxString TAG_POPULATIONS;
+    static const wxString TAG_POP_MATCHING;
+    static const wxString TAG_POP_NAME;
+    static const wxString TAG_REGION;
+    static const wxString TAG_REGIONS;
+    static const wxString TAG_RELATIVE_PROB;
+    static const wxString TAG_SAMPLES_PER_INDIVIDUAL;
+    static const wxString TAG_SCANNED_DATA_POSITIONS;
+    static const wxString TAG_SCANNED_LENGTH;
+    static const wxString TAG_SITES;
+    static const wxString TAG_SPACING;
+    static const wxString TAG_TRAIT;
+    static const wxString TAG_TRAIT_INFO;
+
+    static const wxString ERR_BYNAME_POP_MATCHER_NO_VALUE;
+    static const wxString ERR_DATA_LENGTH_REQUIRED;
+    static const wxString ERR_DNA_LOCATIONS;
+    static const wxString ERR_EMPTY_POP_NAME;
+    static const wxString ERR_EXTRA_TAG;
+    static const wxString ERR_HAP_DATA_SIZE_MISMATCH;
+    static const wxString ERR_LOCATION_SITE_MISMATCH;
+    static const wxString ERR_MISSING_FILE;
+    static const wxString ERR_MISSING_TAG;
+    static const wxString ERR_NAME_REPEAT;
+    static const wxString ERR_NO_DATA_LENGTH_FOR_DNA;
+    static const wxString ERR_NO_DATATYPE;
+    static const wxString ERR_NO_FORMAT;
+    static const wxString ERR_NO_INTERLEAVING;
+    static const wxString ERR_NO_SUCH_POP_NAME;
+    static const wxString ERR_NOT_DOUBLE;
+    static const wxString ERR_NOT_LONG;
+    static const wxString ERR_NOT_SIZE_T;
+    static const wxString ERR_ROW_WRAP;
+    static const wxString ERR_SHORT_DATA_LENGTH;
+    static const wxString ERR_TRAIT_REPEAT;
+    static const wxString ERR_UNKNOWN_LOCUS_MATCHER;
+    static const wxString ERR_UNKNOWN_POP_MATCHER;
+    static const wxString ERR_UNRECOGNIZED_TAG;
+
+    static const wxString WARN_NO_LOCATIONS;
+#endif
+
+    static const wxString notXmlMapFileTryOldFmt;
+};
+
+class gcerr_map
+{
+  public:
+    static const wxString ERR_BAD_TOP_TAG;
+    static const wxString fileEmpty;
+    static const wxString fileMissing;
+    static const wxString fileReadErr;
+};
+
+#endif  // GC_STRINGS_MAP_H
+
+//____________________________________________________________________________________
diff --git a/src/convStrings/gc_strings_mig.cpp b/src/convStrings/gc_strings_mig.cpp
new file mode 100644
index 0000000..17884cb
--- /dev/null
+++ b/src/convStrings/gc_strings_mig.cpp
@@ -0,0 +1,25 @@
+// $Id: gc_strings_mig.cpp,v 1.2 2011/12/30 22:50:10 jmcgill Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include "gc_strings_mig.h"
+#include "wx/intl.h"
+
+const wxString gcstr_mig::internalName = "internalMig_%ld";
+const wxString gcstr_mig::migmethodUser     =   wxTRANSLATE("USER");
+const wxString gcstr_mig::migmethodFST      =   wxTRANSLATE("FST");
+const wxString gcstr_mig::migprofileNone        =   wxTRANSLATE("None");
+const wxString gcstr_mig::migprofileFixed       =   wxTRANSLATE("Fixed");
+const wxString gcstr_mig::migprofilePercentile  =   wxTRANSLATE("Percentile");
+const wxString gcstr_mig::migconstraintInvalid      = wxTRANSLATE("Invalid");
+const wxString gcstr_mig::migconstraintConstant     = wxTRANSLATE("Constant");
+const wxString gcstr_mig::migconstraintSymmetric    = wxTRANSLATE("Symmetric");
+const wxString gcstr_mig::migconstraintUnconstained = wxTRANSLATE("Unconstrained");
+
+//____________________________________________________________________________________
diff --git a/src/convStrings/gc_strings_mig.h b/src/convStrings/gc_strings_mig.h
new file mode 100644
index 0000000..1f13f5d
--- /dev/null
+++ b/src/convStrings/gc_strings_mig.h
@@ -0,0 +1,33 @@
+// $Id: gc_strings_mig.h,v 1.2 2011/12/30 22:50:10 jmcgill Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_STRINGS_MIG_H
+#define GC_STRINGS_MIG_H
+
+#include "wx/string.h"
+
+class gcstr_mig
+{
+  public:
+    static const wxString internalName;
+    static const wxString migmethodUser;
+    static const wxString migmethodFST;
+    static const wxString migprofileNone;
+    static const wxString migprofileFixed;
+    static const wxString migprofilePercentile;
+    static const wxString migconstraintInvalid;
+    static const wxString migconstraintConstant;
+    static const wxString migconstraintSymmetric;
+    static const wxString migconstraintUnconstained;
+};
+
+#endif  //GC_STRINGS_MIG_H
+
+//____________________________________________________________________________________
diff --git a/src/convStrings/gc_strings_parse.cpp b/src/convStrings/gc_strings_parse.cpp
new file mode 100644
index 0000000..71c3a4c
--- /dev/null
+++ b/src/convStrings/gc_strings_parse.cpp
@@ -0,0 +1,20 @@
+// $Id: gc_strings_parse.cpp,v 1.5 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include "gc_strings_parse.h"
+#include "wx/intl.h"
+
+const wxString gcerr_parse::extraFileData   = wxTRANSLATE("File contains extra data");
+const wxString gcerr_parse::ignoringPhylipWeights   = wxTRANSLATE("Ignoring weights information in file %s.");
+
+const wxString gcstr_parse::parsingStarting = wxTRANSLATE("Starting parsing of %s. This can take a while.");
+const wxString gcstr_parse::parsingDone     = wxTRANSLATE("Done parsing %s.");
+
+//____________________________________________________________________________________
diff --git a/src/convStrings/gc_strings_parse.h b/src/convStrings/gc_strings_parse.h
new file mode 100644
index 0000000..8cd625e
--- /dev/null
+++ b/src/convStrings/gc_strings_parse.h
@@ -0,0 +1,32 @@
+// $Id: gc_strings_parse.h,v 1.6 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_STRINGS_PARSE_H
+#define GC_STRINGS_PARSE_H
+
+#include "wx/string.h"
+
+class gcerr_parse
+{
+  public:
+    static const wxString extraFileData;
+    static const wxString ignoringPhylipWeights;
+};
+
+class gcstr_parse
+{
+  public:
+    static const wxString parsingDone;
+    static const wxString parsingStarting;
+};
+
+#endif  // GC_STRINGS_PARSE_H
+
+//____________________________________________________________________________________
diff --git a/src/convStrings/gc_strings_parse_locus.cpp b/src/convStrings/gc_strings_parse_locus.cpp
new file mode 100644
index 0000000..06314e2
--- /dev/null
+++ b/src/convStrings/gc_strings_parse_locus.cpp
@@ -0,0 +1,17 @@
+// $Id: gc_strings_parse_locus.cpp,v 1.3 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include "gc_strings_parse_locus.h"
+#include "wx/intl.h"
+
+const wxString gcstr_parselocus::nameLong = wxTRANSLATE("segment number %ld read during parsing of %s");
+const wxString gcstr_parselocus::nameShort= wxTRANSLATE("segment %ld of %s");
+
+//____________________________________________________________________________________
diff --git a/src/convStrings/gc_strings_parse_locus.h b/src/convStrings/gc_strings_parse_locus.h
new file mode 100644
index 0000000..3bd14b3
--- /dev/null
+++ b/src/convStrings/gc_strings_parse_locus.h
@@ -0,0 +1,25 @@
+// $Id: gc_strings_parse_locus.h,v 1.4 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_STRINGS_PARSE_LOCUS_H
+#define GC_STRINGS_PARSE_LOCUS_H
+
+#include "wx/string.h"
+
+class gcstr_parselocus
+{
+  public:
+    static const wxString nameLong;
+    static const wxString nameShort;
+};
+
+#endif  // GC_STRINGS_PARSE_LOCUS_H
+
+//____________________________________________________________________________________
diff --git a/src/convStrings/gc_strings_phase.cpp b/src/convStrings/gc_strings_phase.cpp
new file mode 100644
index 0000000..0f25c64
--- /dev/null
+++ b/src/convStrings/gc_strings_phase.cpp
@@ -0,0 +1,38 @@
+// $Id: gc_strings_phase.cpp,v 1.11 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include "gc_strings_phase.h"
+#include "wx/intl.h"
+
+const wxString gcerr_phase::adjacentPhaseForMultiSample = wxTRANSLATE("Cannot group adjacent samples from file %s into single individuals -- each sample is already a multi-phase sample");
+const wxString gcerr_phase::badIndMatchAdjacencyValue = wxTRANSLATE("Cannot group each \"%s\" adjacent samples into single individual. Please use an integer greater or equal to 2.");
+const wxString gcerr_phase::badIndMatchType  = wxTRANSLATE("Bad value \"%s\" for individual-matching type attribute. Current legal values are: byAdjacency");
+const wxString gcerr_phase::badTopTag = wxTRANSLATE("expected top xml tag of <%s> but got <%s> instead");
+const wxString gcerr_phase::bothIndividualAndSample = wxTRANSLATE("Name \"%s\" appears as both an individual name and a sample name. This is not legal.");
+const wxString gcerr_phase::individualPhenotypeNameRepeat = wxTRANSLATE("Phenotype \"%s\" appears more once for individual \"%s\".");
+const wxString gcerr_phase::markerNotLegal =wxTRANSLATE("unresolved phase position \"%s\" illegal -- must be an integer.");
+const wxString gcerr_phase::matchingConfusion = wxTRANSLATE("Cannot determine how to match up all data samples in region %s.\n\nPlease write a converter command file specifying individual and sample relationships using the <individuals> tag.\n\nSamples that need information include \"%s\" and \"%s\".");
+const wxString gcerr_phase::mergeMismatch = wxTRANSLATE("Cannot resolve the following individuals\n\n%s\n\n%s\n\nIf present, sample names must match and occur in the same order.");
+const wxString gcerr_phase::noIndividualForSample = wxTRANSLATE("No information found relating sample %s to any individual");
+const wxString gcerr_phase::noSampleForIndividual = wxTRANSLATE("No information found relating individual %s to any samples");
+const wxString gcerr_phase::notLocation = wxTRANSLATE("Cannot assign unresolved phase to marker at position %ld in segment \"%s\" because the segment locations do not include it.");
+const wxString gcerr_phase::tooLarge = wxTRANSLATE("Cannot assign unresolved phase to marker at position %ld for individual \"%s\" because position is too large. Segment \"%s\" has largest marker position of %ld.");
+const wxString gcerr_phase::tooSmall = wxTRANSLATE("Cannot assign unresolved phase to marker at position %ld for individual \"%s\" because position is too small. Segment \"%s\" has smallest marker position of %ld.");
+const wxString gcerr_phase::unevenAdjDivisor      = wxTRANSLATE("Cannot evenly allocate %d samples to individuals with %d samples each.");
+
+const wxString gcstr_phase::adjacentHaps1     =   wxTRANSLATE("Group every ");
+const wxString gcstr_phase::adjacentHaps2     =   wxTRANSLATE(" adjacent samples into one individual");
+const wxString gcstr_phase::descFileAdjacency =   wxTRANSLATE("induced by adjacency near line %s of file %s, and containing samples %s");
+const wxString gcstr_phase::descMultiPhase    =   wxTRANSLATE("induced by multiploid data near line %s of file %s, and containing samples %s");
+const wxString gcstr_phase::descPhaseFile     =   wxTRANSLATE("%s defined near line %s of file %s, and containing samples %s");
+const wxString gcstr_phase::known   =   wxTRANSLATE("known");
+const wxString gcstr_phase::unknown =   wxTRANSLATE("unknown");
+
+//____________________________________________________________________________________
diff --git a/src/convStrings/gc_strings_phase.h b/src/convStrings/gc_strings_phase.h
new file mode 100644
index 0000000..675f2ac
--- /dev/null
+++ b/src/convStrings/gc_strings_phase.h
@@ -0,0 +1,50 @@
+// $Id: gc_strings_phase.h,v 1.12 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_STRINGS_PHASE_H
+#define GC_STRINGS_PHASE_H
+
+#include "wx/string.h"
+
+class gcerr_phase
+{
+  public:
+    static const wxString adjacentPhaseForMultiSample;
+    static const wxString badIndMatchAdjacencyValue;
+    static const wxString badIndMatchType;
+    static const wxString badTopTag;
+    static const wxString bothIndividualAndSample;
+    static const wxString individualPhenotypeNameRepeat;
+    static const wxString markerNotLegal;
+    static const wxString matchingConfusion;
+    static const wxString mergeMismatch;
+    static const wxString noIndividualForSample;
+    static const wxString noSampleForIndividual;
+    static const wxString notLocation;
+    static const wxString tooLarge;
+    static const wxString tooSmall;
+    static const wxString unevenAdjDivisor;
+};
+
+class gcstr_phase
+{
+  public:
+    static const wxString adjacentHaps1;
+    static const wxString adjacentHaps2;
+    static const wxString descFileAdjacency;
+    static const wxString descMultiPhase;
+    static const wxString descPhaseFile;
+    static const wxString known;
+    static const wxString unknown;
+};
+
+#endif  // GC_STRINGS_PHASE_H
+
+//____________________________________________________________________________________
diff --git a/src/convStrings/gc_strings_pop.cpp b/src/convStrings/gc_strings_pop.cpp
new file mode 100644
index 0000000..921ffbe
--- /dev/null
+++ b/src/convStrings/gc_strings_pop.cpp
@@ -0,0 +1,16 @@
+// $Id: gc_strings_pop.cpp,v 1.3 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include "gc_strings_pop.h"
+#include "wx/intl.h"
+
+const wxString gcstr_pop::internalName = "internalPop_%ld";
+
+//____________________________________________________________________________________
diff --git a/src/convStrings/gc_strings_pop.h b/src/convStrings/gc_strings_pop.h
new file mode 100644
index 0000000..1b7c74c
--- /dev/null
+++ b/src/convStrings/gc_strings_pop.h
@@ -0,0 +1,24 @@
+// $Id: gc_strings_pop.h,v 1.4 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_STRINGS_POP_H
+#define GC_STRINGS_POP_H
+
+#include "wx/string.h"
+
+class gcstr_pop
+{
+  public:
+    static const wxString internalName;
+};
+
+#endif  //GC_STRINGS_POP_H
+
+//____________________________________________________________________________________
diff --git a/src/convStrings/gc_strings_region.cpp b/src/convStrings/gc_strings_region.cpp
new file mode 100644
index 0000000..a1964b7
--- /dev/null
+++ b/src/convStrings/gc_strings_region.cpp
@@ -0,0 +1,24 @@
+// $Id: gc_strings_region.cpp,v 1.9 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include "gc_strings_region.h"
+#include "wx/intl.h"
+
+const wxString gcstr_region::effPopSize     =wxTRANSLATE("Relative effective population size: %.2f");
+const wxString gcstr_region::internalName   =           "internalGroup_%ld";
+const wxString gcstr_region::locusMapPosition       =   "(%d : %s)";
+const wxString gcstr_region::mapPosition            =   "@ %ld";
+const wxString gcstr_region::numLoci        =wxTRANSLATE("Number of segments: %d");
+
+// const wxString gcstr_region::samplesPer     =wxTRANSLATE("Samples per individual: %d");
+const wxString gcstr_region::tabTitle       =wxTRANSLATE("Properties of %d Regions");
+const wxString gcstr_region::traitIndexListMember   =   "%d ";
+
+//____________________________________________________________________________________
diff --git a/src/convStrings/gc_strings_region.h b/src/convStrings/gc_strings_region.h
new file mode 100644
index 0000000..0ae1929
--- /dev/null
+++ b/src/convStrings/gc_strings_region.h
@@ -0,0 +1,36 @@
+// $Id: gc_strings_region.h,v 1.6 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_STRINGS_REGION_H
+#define GC_STRINGS_REGION_H
+
+#include "wx/string.h"
+
+class gcerr_region
+{
+  public:
+};
+
+class gcstr_region
+{
+  public:
+    static const wxString effPopSize;
+    static const wxString internalName;
+    static const wxString locusMapPosition;
+    static const wxString mapPosition;
+    static const wxString numLoci;
+    // static const wxString samplesPer;
+    static const wxString tabTitle;
+    static const wxString traitIndexListMember;
+};
+
+#endif  // GC_STRINGS_REGION_H
+
+//____________________________________________________________________________________
diff --git a/src/convStrings/gc_strings_str_a_m.cpp b/src/convStrings/gc_strings_str_a_m.cpp
new file mode 100644
index 0000000..b064360
--- /dev/null
+++ b/src/convStrings/gc_strings_str_a_m.cpp
@@ -0,0 +1,228 @@
+// $Id: gc_strings_str_a_m.cpp,v 1.8 2011/12/01 22:32:42 jmcgill Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include "gc_strings.h"
+#include "wx/intl.h"
+
+const wxString gcstr::abandonExport     =   wxTRANSLATE("Abort Export");
+const wxString gcstr::addHapFile        =   wxTRANSLATE("Select New Haplotype/Phase Resolution File");
+const wxString gcstr::addLocus          =   wxTRANSLATE("Adding New Segment");
+const wxString gcstr::addPanel          =   wxTRANSLATE("Adding New Panel");
+const wxString gcstr::addParent         =   wxTRANSLATE("Adding New Parent");
+const wxString gcstr::addPop            =   wxTRANSLATE("Adding New Population");
+const wxString gcstr::addRegion         =   wxTRANSLATE("Adding New Region");
+const wxString gcstr::adjacent          =   "adjacent";
+const wxString gcstr::all               =   "all";
+const wxString gcstr::allele            =   wxTRANSLATE("allele");
+const wxString gcstr::allelic           =   "Allelic";
+const wxString gcstr::allFiles          =   "*";
+const wxString gcstr::assignTabTitle    =   wxTRANSLATE("Data Partitions");
+
+const wxString gcstr::badLocusLength    =   wxTRANSLATE("Attempted to set length of segment \"%s\" to illegal value \"%s\". All lengths must be positive integers. Ignoring.");
+const wxString gcstr::badLocusLength1   =   wxTRANSLATE("The following segments have suspiciously short lengths:");
+const wxString gcstr::badLocusLength2   =   wxTRANSLATE("You may wish to cancel this operation and edit segment lengths in the Genomic Segments Properties tab");
+const wxString gcstr::badLocusPosition  =   wxTRANSLATE("Attempted to set position of segment \"%s\" to illegal value \"%s\". All map positions must be integers. Ignoring.");
+const wxString gcstr::badRegionLength1  =   wxTRANSLATE("The following regions have suspiciously short lengths:");
+const wxString gcstr::badRegionLength2  =   wxTRANSLATE("You may wish to cancel this operation and edit region lengths in the Genomic Region Properties tab");
+const wxString gcstr::batchFileDefault  =   "exported_batch_commands.xml";
+const wxString gcstr::batchOutComment   =   wxTRANSLATE("Exported from lamarc converter %s");
+const wxString gcstr::batchSafeFinish   =   wxTRANSLATE("batch converter finished successfully");
+const wxString gcstr::blockFromFiles    =   wxTRANSLATE("1 block from %d files");
+const wxString gcstr::blockInfo1        =   wxTRANSLATE("%d samples of type %s");
+const wxString gcstr::blockInfo2        =   wxTRANSLATE("from file %s");
+const wxString gcstr::blockFileInfo     =   wxTRANSLATE("data file:");
+const wxString gcstr::blockLocusChoice  =   wxTRANSLATE("Choose a segment for this block:");
+const wxString gcstr::blockLocusIndexInFile  =   wxTRANSLATE("Locus order within file");
+const wxString gcstr::blockPopChoice    =   wxTRANSLATE("Choose a population for this block:");
+const wxString gcstr::blockPopIndexInFile  =   wxTRANSLATE("Population order within file");
+const wxString gcstr::blockPloidyInfo   =   wxTRANSLATE("%d %s samples\n\nChange sample ploidy in Data File panel.");
+const wxString gcstr::blockPloidyTitle  =   wxTRANSLATE("Samples and Sequences");
+const wxString gcstr::blocksFromFiles   =   wxTRANSLATE("%d blocks from %d files");
+const wxString gcstr::buttonHide        =   wxTRANSLATE("Hide Detail");
+const wxString gcstr::buttonSelectAll   =   wxTRANSLATE("Select All");
+const wxString gcstr::buttonUnselectAll =   wxTRANSLATE("Unselect All");
+const wxString gcstr::buttonShow        =   wxTRANSLATE("Show Detail");
+const wxString gcstr::byfile            =   "by file";
+const wxString gcstr::byprog            =   "by program";
+const wxString gcstr::byuser            =   "by user";
+const wxString gcstr::cancelString      =   wxTRANSLATE("Cancel");
+const wxString gcstr::childPopsInstructions =   wxTRANSLATE("Child Populations:");
+const wxString gcstr::chooseHapResolution = wxTRANSLATE("Choose Haplotype Resolution");
+const wxString gcstr::chooseOne         =   wxTRANSLATE("Choose one");
+const wxString gcstr::chooseOneGroup    =   wxTRANSLATE("Choose one Region");
+const wxString gcstr::chooseOneLocus    =   wxTRANSLATE("Choose one Segment");
+const wxString gcstr::chooseOneParse    =   wxTRANSLATE("Choose one Parse");
+const wxString gcstr::chooseOnePop      =   wxTRANSLATE("Choose one Population");
+const wxString gcstr::continueExport    =   wxTRANSLATE("Export Without Locations");
+const wxString gcstr::continueString    =   wxTRANSLATE("Export");
+const wxString gcstr::cannotWrite       =   wxTRANSLATE("Unable to write file \"%s\"");
+const wxString gcstr::chooseDataType    =   wxTRANSLATE("Choose");
+const wxString gcstr::chooseFileType    =   wxTRANSLATE("Choose");
+const wxString gcstr::converterInfo     =   wxTRANSLATE("A GUI version of the LAMARC converter.\n\nlamarc at u.washington.edu\n\nhttp://evolution.gs.washington.edu/lamarc/\n\nCopyright 2009 Mary K. Kuhner, Peter Beerli, Joe Felsenstein, Bob Giansiracusa, James R. McGill, Eric Rynes, Lucian Smith, Elizabeth A. Walkup, Jon Yamato");
+const wxString gcstr::converterTitle    =   wxTRANSLATE("LAMARC File Converter");
+const wxString gcstr::createNewRegion   =   wxTRANSLATE("place in its own region");
+const wxString gcstr::createParent2Child =   wxTRANSLATE("Pick the 2 children of this parent");
+const wxString gcstr::createParentFirst2Children = wxTRANSLATE("Create parent of first 2 children");
+const wxString gcstr::createParentNext2Children = wxTRANSLATE("Create parent of next 2 children");
+const wxString gcstr::dataBlocks        =   wxTRANSLATE("parsed data");
+const wxString gcstr::dataFileBatchExport=  wxTRANSLATE("Select data file to write batch commands to");
+const wxString gcstr::dataFileButtonAdd =   wxTRANSLATE("Add");
+const wxString gcstr::dataFileButtonAllSelect =   wxTRANSLATE("Select All");
+const wxString gcstr::dataFileButtonAllUnselect =   wxTRANSLATE("Unselect All");
+const wxString gcstr::dataFileButtonRemoveSelected =   wxTRANSLATE("Remove Selected");
+const wxString gcstr::dataFileExport    =   wxTRANSLATE("Select data file to write");
+const wxString gcstr::dataFiles         =   "Phylip and Migrate files (*.phy;*.mig)|*.phy;*.mig|All Files (*)|*";
+const wxString gcstr::dataFilesInstructions   =   wxTRANSLATE("Add a file using the button above.");
+const wxString gcstr::dataFilesSelect   =   wxTRANSLATE("Select data files to read");
+const wxString gcstr::dataFilesTitle    =   wxTRANSLATE("Information on %d Data Files");
+const wxString gcstr::dataType          =   wxTRANSLATE("Data Type");
+const wxString gcstr::divergence        =   wxTRANSLATE("Divergence");
+const wxString gcstr::divergeInstructions =   wxTRANSLATE("Pick Children:");
+const wxString gcstr::divergenceSelect  =   wxTRANSLATE("Select 2 child Populations");
+const wxString gcstr::divMigMatrix      =   wxTRANSLATE("Diverge_Mig");
+const wxString gcstr::dna               =   "DNA";
+const wxString gcstr::doneThanks        =   wxTRANSLATE("Operation completed.");
+const wxString gcstr::editApply         =   wxTRANSLATE("Apply");
+const wxString gcstr::editCancel        =   wxTRANSLATE("Cancel");
+const wxString gcstr::editDelete        =   wxTRANSLATE("Delete");
+const wxString gcstr::editFileSettings  =   wxTRANSLATE("Edit/Review Settings for file \"%s\"");
+const wxString gcstr::editLocus         =   wxTRANSLATE("Edit segment \"%s\"");
+const wxString gcstr::editMigration     =   wxTRANSLATE("Edit mig from \"%s\" to \"%s\"");
+const wxString gcstr::editOK            =   wxTRANSLATE("OK");
+const wxString gcstr::editParseBlock    =   wxTRANSLATE("Edit parsed data properties");
+const wxString gcstr::editPanel         =   wxTRANSLATE("Edit panel \"%s\"");
+const wxString gcstr::editParent        =   wxTRANSLATE("Edit parent \"%s\"");
+const wxString gcstr::editPop           =   wxTRANSLATE("Edit population \"%s\"");
+const wxString gcstr::editRegion        =   wxTRANSLATE("Edit Region \"%s\"");
+const wxString gcstr::enterNewName      =   wxTRANSLATE("Enter new name");
+const wxString gcstr::error             =   wxTRANSLATE("ERROR");
+const wxString gcstr::errorWrap         =   wxTRANSLATE("Error Type \"%s\" for file \"%s\":\n%s");
+const wxString gcstr::errorWrapNoFile   =   wxTRANSLATE("Error Type \"%s\":\n%s");
+const wxString gcstr::exportFileDefault =   "infile.xml";
+const wxString gcstr::exportFileGlob    =   "Lamarc files (*.xml)|*.xml|All files (*)|*";
+const wxString gcstr::exportWarning     =   wxTRANSLATE("Warning: export may not be correct");
+const wxString gcstr::falseVal          =   wxTRANSLATE("false");
+const wxString gcstr::fileAlreadyAdded      =   wxTRANSLATE("Cannot add file \"%s\". It is already added.");
+const wxString gcstr::fileDelete            =   wxTRANSLATE("Delete");
+const wxString gcstr::fileEmpty             =   wxTRANSLATE("File \"%s\" appears to be empty.");
+const wxString gcstr::fileLabelDataType     =   wxTRANSLATE("Data Type");
+const wxString gcstr::fileLabelFormat       =   wxTRANSLATE("File Format");
+const wxString gcstr::fileLabelInterleaving =   wxTRANSLATE("Sequence Alignment");
+const wxString gcstr::fileLabelName         =   wxTRANSLATE("File Name");
+const wxString gcstr::fileLabelRemove       =   wxTRANSLATE("Remove File");
+
+//const wxString gcstr::fileLabelUnlinked     =   wxTRANSLATE("Unlink Microsats");
+const wxString gcstr::fileSetting           =   wxTRANSLATE("phase grouping set at file level");
+const wxString gcstr::firstPositionScanned  =   wxTRANSLATE("First Position Scanned");
+const wxString gcstr::fragmentRegion        =   wxTRANSLATE("Unlink all Segments in this Region");
+const wxString gcstr::fullPath              =   wxTRANSLATE("Full Path to File:");
+const wxString gcstr::genoFileDefault   =   wxTRANSLATE("No trait resolution set for this file");
+const wxString gcstr::globAll               =   "*";
+const wxString gcstr::hapFileEmptyFirstLine =   wxTRANSLATE("No data found in first line of phase information file \"%s\"");
+const wxString gcstr::hapFileBarf       =   wxTRANSLATE("Implementation error reading phase information file \"%s\"");
+const wxString gcstr::hapFileDefault    =   wxTRANSLATE("Use default haplotype and phase resolution");
+const wxString gcstr::hapFileMissing    =   wxTRANSLATE("Phase information file \"%s\" missing or un-readable");
+const wxString gcstr::hapFileParseFailed=   wxTRANSLATE("Parse of phase information file \"%s\" failed:%s");
+const wxString gcstr::hapFileToken1     =   wxTRANSLATE("First token of phase information file \"%s\" was \"%s\" but expected an integer");
+const wxString gcstr::hapFileToken2     =   wxTRANSLATE("Second token of phase information file \"%s\" was \"%s\" but expected \"adjacent\" or end of line");
+const wxString gcstr::hapFileToken3     =   wxTRANSLATE("Third token of phase information file \"%s\" was \"%s\" but expected an integer");
+const wxString gcstr::hapFileToken3Missing =   wxTRANSLATE("Third token of phase information file \"%s\" missing");
+const wxString gcstr::hapFileReuse      =   wxTRANSLATE("Attempt to add phase information file \"%s\" a second time");
+const wxString gcstr::hapFileSelect     =   wxTRANSLATE("select phase information file");
+const wxString gcstr::hapFileSelectAnother  =   wxTRANSLATE("file not listed here");
+const wxString gcstr::hapFileUnSelect   =   wxTRANSLATE("un-select current file");
+const wxString gcstr::hapFilesSelect    =   wxTRANSLATE("Select and apply file(s) with haplotype correspondence information.");
+const wxString gcstr::havePatience      =   wxTRANSLATE("About to start a lengthy operation. Please wait.");
+const wxString gcstr::hiddenContent     =   wxTRANSLATE("IMPLEMENTATION ERROR -- this should be a hidden object");
+const wxString gcstr::indent            =   "  ";
+const wxString gcstr::information       =   wxTRANSLATE("Information");
+const wxString gcstr::instructionsMultipleDataTypes=wxTRANSLATE("The \"datatype\" attribute for the above infile tag should be edited to list only one datatype.");
+const wxString gcstr::interleaved       =   wxTRANSLATE("Interleaved");
+const wxString gcstr::interleavedNoKalleleMsat  =   wxTRANSLATE("Interleaved format not compatible with Kallele or Microsat data (file \"%s\"");
+const wxString gcstr::kallele           =   "Kallele";
+const wxString gcstr::linkGCreateTitle  =   wxTRANSLATE("Create a New Region");
+const wxString gcstr::linkGEnterNewName=   wxTRANSLATE("Enter the name of the new region");
+const wxString gcstr::linkageCaption   =   wxTRANSLATE("Linkage:");
+const wxString gcstr::linkageNo        =   wxTRANSLATE("unlinked");
+const wxString gcstr::linkageYes       =   wxTRANSLATE("linked");
+const wxString gcstr::locations         =   wxTRANSLATE("Locations of sampled markers");
+const wxString gcstr::locationsForRecom =wxTRANSLATE("Segment \"%s\" will need its locations item set if you wish to estimate recombination in Lamarc.\n\nDo you wish to continue your file export?");
+
+const wxString gcstr::lociTabTitle      =   wxTRANSLATE("Properties of %d Segments");
+const wxString gcstr::locus             =   wxTRANSLATE("contiguous segment");
+const wxString gcstr::locusButtonAdd    =   wxTRANSLATE("Add");
+const wxString gcstr::locusButtonMergeSelected    =   wxTRANSLATE("Merge Selected");
+const wxString gcstr::locusButtonRemoveSelected    =   wxTRANSLATE("Remove Selected");
+const wxString gcstr::locusCreate       =   wxTRANSLATE("Create new Segment");
+const wxString gcstr::locusCreateTitle  =   wxTRANSLATE("Create new Segment");
+const wxString gcstr::locusDialogMapPosition=   wxTRANSLATE("Map Position in Region:");
+const wxString gcstr::locusEditString  =   wxTRANSLATE("Edit Segments");
+const wxString gcstr::locusEnterNewName=   wxTRANSLATE("Enter the name of the new segment");
+const wxString gcstr::locusExists       =   wxTRANSLATE("Segment \"%s\" already exists.");
+const wxString gcstr::locusForAll      =   wxTRANSLATE("Set All Units to One Segment");
+const wxString gcstr::locusLabelDataType   =   wxTRANSLATE("data type:%s");
+const wxString gcstr::locusLabelLength     =   wxTRANSLATE("total length:%s");
+const wxString gcstr::locusLabelLinked     =   wxTRANSLATE("sites linked:%s");
+const wxString gcstr::locusLabelMapFile    =   wxTRANSLATE("Map Position File (optional)");
+const wxString gcstr::locusLabelMapPosition=   wxTRANSLATE("position in region:%s");
+const wxString gcstr::locusLabelName       =   wxTRANSLATE("name:%s");
+const wxString gcstr::locusLabelOffset     =   wxTRANSLATE("Offset");
+const wxString gcstr::locusLabelRegionName =   wxTRANSLATE("region:%s");
+const wxString gcstr::locusLabelSites      =   wxTRANSLATE("# sites:%s");
+const wxString gcstr::locusLength          =   wxTRANSLATE("Total Length");
+const wxString gcstr::locusLengthIllegal   =   wxTRANSLATE("Illegal value \"%s\" for length of segment \"%s\". Ignoring.");
+const wxString gcstr::locusLengthVsMarkers =   wxTRANSLATE("Segment length (%ld) must be > number of markers (%ld). Ignoring.");
+const wxString gcstr::locusNameFromFile    =   wxTRANSLATE("segment %d of %s");
+const wxString gcstr::locusNewName =    wxTRANSLATE("Name for New Segment");
+const wxString gcstr::locusOffsetIllegal   =   wxTRANSLATE("Illegal value \"%s\" for length of segment \"%s\". Ignoring.");
+const wxString gcstr::locusOwnRegion    =   wxTRANSLATE("Place this segment in its own region");
+const wxString gcstr::locusRename       =   wxTRANSLATE("Rename Segment");
+const wxString gcstr::locusRenameChoice =   wxTRANSLATE("Segment to Rename");
+const wxString gcstr::locusSelect       =   wxTRANSLATE("Select a Segment");
+const wxString gcstr::locusMarkerCount  =   wxTRANSLATE("Number of Markers");
+const wxString gcstr::locusUnlinked     =   wxTRANSLATE("unlinked markers");
+const wxString gcstr::locusUse          =   wxTRANSLATE("Use this Segment");
+const wxString gcstr::logWindowHeader   =   wxTRANSLATE("Log Window for Lamarc Converter. Use menu item \"View > Log Verbosely\" to change verbosity.");
+const wxString gcstr::mapFileExportFailed   =   wxTRANSLATE("File export failed while reading data for region \"%s\". \n\nThe error was \"%s:%s\n\nPlease check your settings for that region and please report the error.");
+const wxString gcstr::mapFileLastPositionTooLate =
+    wxTRANSLATE("Map position file \"%s\" has last position of %ld, but region \"%s\" length (%ld) and first position sequenced (%ld) allow last position of %ld or less only");
+const wxString gcstr::mapFileMarkerPositionMismatch =
+    wxTRANSLATE("Map position file \"%s\" has %ld distinct positions, but region \"%s\" requires at least %ld");
+const wxString gcstr::mapFileOffsetAfterFirstPosition =
+    wxTRANSLATE("Map position file \"%s\" has first position %ld, which is before %ld, the start of sequencing in region \"%s\"");
+const wxString gcstr::mapFileRegionMissing  =   wxTRANSLATE("Cannot export file because map position file \"%s\" has no data for region \"%s\"");
+const wxString gcstr::mapFileSelect     =   wxTRANSLATE("select map position file");
+const wxString gcstr::mapFileSelectAnother  =   wxTRANSLATE("file not listed here");
+const wxString gcstr::mapFileUnSelect   =   wxTRANSLATE("un-select current file");
+const wxString gcstr::mapFilesSelect    =   wxTRANSLATE("Select and apply file(s) with data sample spacing information.");
+const wxString gcstr::mapPositionUnset  =   wxTRANSLATE("<unset map position>");
+const wxString gcstr::members           =   wxTRANSLATE("members: %s");
+const wxString gcstr::mergeLinkGTitle        =   wxTRANSLATE("Combine Regions");
+const wxString gcstr::mergeLinkGInstructions =   wxTRANSLATE("Merge with selected Regions:");
+const wxString gcstr::mergeLociTitle        =   wxTRANSLATE("Combine Segments");
+const wxString gcstr::mergeLociInstructions =   wxTRANSLATE("Merge with selected Segments:");
+const wxString gcstr::mergePopsTitle        =   wxTRANSLATE("Combine Populations");
+const wxString gcstr::mergePopsInstructions =   wxTRANSLATE("Merge with selected Populations:");
+const wxString gcstr::microsat          =   "Microsat";
+const wxString gcstr::migrate           =   "Migrate";
+const wxString gcstr::migrationMatrix   =   wxTRANSLATE("Migration Matrix");
+const wxString gcstr::migConstraint     =   wxTRANSLATE("Constraint: %s");
+const wxString gcstr::migMethod         =   wxTRANSLATE("Method: %s");
+const wxString gcstr::migProfile        =   wxTRANSLATE("Profile: %s");
+const wxString gcstr::migRate           =   wxTRANSLATE("Migration Rate: %s");
+const wxString gcstr::migLabelConstraint =  wxTRANSLATE("Constraint");
+const wxString gcstr::migLabelMethod    =  wxTRANSLATE("Method");
+const wxString gcstr::migLabelProfile   =  wxTRANSLATE("Profile");
+const wxString gcstr::migLabelRate      =  wxTRANSLATE("Migration Rate");
+const wxString gcstr::moot              =   wxTRANSLATE("Moot");
+const wxString gcstr::moveLocus         =   wxTRANSLATE("Moving segment %s out of region %s -- segments with unlinked data do not belong in multi-segment regions.");
+const wxString gcstr::multiPhaseSample  =   wxTRANSLATE("induced by microsat/kallele file");
+
+//____________________________________________________________________________________
diff --git a/src/convStrings/gc_strings_str_n_z.cpp b/src/convStrings/gc_strings_str_n_z.cpp
new file mode 100644
index 0000000..3e8f9ff
--- /dev/null
+++ b/src/convStrings/gc_strings_str_n_z.cpp
@@ -0,0 +1,210 @@
+// $Id: gc_strings_str_n_z.cpp,v 1.7 2012/02/15 18:13:41 jmcgill Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include "gc_strings.h"
+#include "wx/intl.h"
+
+const wxString gcstr::nameCandidate     =   "%s_%d";
+const wxString gcstr::nearRow           =   wxTRANSLATE("Near row %d: %s");
+const wxString gcstr::newName           =   wxTRANSLATE("New name");
+const wxString gcstr::no                =   "no";
+const wxString gcstr::noChoice          =   wxTRANSLATE("No other objects to select");
+const wxString gcstr::noChoiceLocus     =   wxTRANSLATE("No other segments to select");
+const wxString gcstr::noChoicePopulation=   wxTRANSLATE("No other populations to select");
+const wxString gcstr::noChoiceRegion    =   wxTRANSLATE("No other regions to select");
+const wxString gcstr::noWarningsFound   =   wxTRANSLATE("No warning messages were found. You should be able to export a lamarc file now");
+const wxString gcstr::nuc               =   "Nucleotide";
+const wxString gcstr::object            =   "object";
+const wxString gcstr::off               =   wxTRANSLATE("Off");
+const wxString gcstr::on                =   wxTRANSLATE("On");
+const wxString gcstr::panel             =   "panel";
+const wxString gcstr::panelLabelName    =   wxTRANSLATE("name: %s");
+const wxString gcstr::panelMemberCount  =   wxTRANSLATE("Number of Members");
+const wxString gcstr::panelRename       =   wxTRANSLATE("Rename Panel");
+const wxString gcstr::panelToggle       =   wxTRANSLATE("Use Panels");
+const wxString gcstr::parseAbort        =   wxTRANSLATE("Aborted    parse of %s(%s:%s,%s):%s");
+const wxString gcstr::parseDataType     =   wxTRANSLATE("data type:%s");
+const wxString gcstr::parseFormat       =   wxTRANSLATE("format:%s");
+const wxString gcstr::parseGood         =   wxTRANSLATE("Successful parse of %s(%s:%s,%s)");
+const wxString gcstr::parseInfo         =   wxTRANSLATE("Parsing Information from File:");
+const wxString gcstr::parseInfoNone     =   wxTRANSLATE("Unable to parse file.\n\nPlease check you have a file in correct Phylip or Migrate format");
+const wxString gcstr::parent            =   "parent";
+const wxString gcstr::parentLabelName   =   wxTRANSLATE("name: %s");
+const wxString gcstr::parentRename      =   wxTRANSLATE("Rename Parent");
+const wxString gcstr::parseInterleaving =   wxTRANSLATE("interleaving:%s");
+const wxString gcstr::parseSettings     =   wxTRANSLATE("%s;%s;%s");
+const wxString gcstr::parseSettingsForFile= wxTRANSLATE("%s parsed with %s");
+const wxString gcstr::parseWarning      =   wxTRANSLATE("Conflict while parsing file");
+const wxString gcstr::phaseFile         =   wxTRANSLATE("phase grouping set with phase resolution file");
+const wxString gcstr::phenotype         =   wxTRANSLATE("phenotype");
+const wxString gcstr::phylip            =   "Phylip";
+const wxString gcstr::phylipNoKalleleMsat       =   wxTRANSLATE("Phylip format not compatible with Kallele or Microsat data (file \"%s\"");
+const wxString gcstr::plainLong         =   "%ld";
+const wxString gcstr::ploidy            =   "%ld-ploid";
+const wxString gcstr::ploidy_1          =   wxTRANSLATE("haploid");
+const wxString gcstr::ploidy_2          =   wxTRANSLATE("diploid");
+const wxString gcstr::ploidy_3          =   wxTRANSLATE("triploid");
+const wxString gcstr::ploidy_4          =   wxTRANSLATE("tetraploid");
+const wxString gcstr::popEditButton     =   wxTRANSLATE("Edit");
+const wxString gcstr::population        =   wxTRANSLATE("population");
+const wxString gcstr::populationExists  =   wxTRANSLATE("Population \"%s\" already exists.");
+const wxString gcstr::populationCreate  =   wxTRANSLATE("Create new Population");
+const wxString gcstr::popButtonAdd    =   wxTRANSLATE("Add");
+const wxString gcstr::popButtonMergeSelected    =   wxTRANSLATE("Merge Selected");
+const wxString gcstr::popButtonRemoveSelected    =   wxTRANSLATE("Remove Selected");
+const wxString gcstr::popCreateTitle    =   wxTRANSLATE("Create a New Population");
+const wxString gcstr::popEnterNewName   =   wxTRANSLATE("New name:");
+const wxString gcstr::popLabelName      =   wxTRANSLATE("name: %s");
+const wxString gcstr::popTabTitle       =   wxTRANSLATE("Properties of %d Populations");
+const wxString gcstr::populationNameFromFile    =   wxTRANSLATE("pop %d of %s");
+const wxString gcstr::populationNewName =   wxTRANSLATE("Name for New Population");
+const wxString gcstr::populationRename          =   wxTRANSLATE("Rename Population");
+const wxString gcstr::populationRenameChoice    =   wxTRANSLATE("Population to Rename");
+const wxString gcstr::populationSelect  =   wxTRANSLATE("Select a Population");
+const wxString gcstr::populationUse     =   wxTRANSLATE("Use this Population");
+const wxString gcstr::populationForAll  =   wxTRANSLATE("Set All Units to One Population");
+const wxString gcstr::questionHeader    =   wxTRANSLATE("Warning: possible problem detected");
+const wxString gcstr::region            =   wxTRANSLATE("region");
+const wxString gcstr::regionChoice      =   wxTRANSLATE("Assign to Region:");
+const wxString gcstr::regionCreate      =   wxTRANSLATE("Create new Region");
+const wxString gcstr::regionEditString  =   wxTRANSLATE("Edit Genomic Regions");
+const wxString gcstr::regionEffPopSize  =   wxTRANSLATE("Effective Population Size:");
+const wxString gcstr::regionEnterNewName=   wxTRANSLATE("Enter the name of the new region");
+const wxString gcstr::regionExists      =   wxTRANSLATE("Region \"%s\" already exists.");
+const wxString gcstr::regionForAll      =   wxTRANSLATE("Set All Units to One Genomic Region");
+const wxString gcstr::regionLabelDataType   =   wxTRANSLATE("Data Type");
+const wxString gcstr::regionLabelEffPopSize =   wxTRANSLATE("effective population size:");
+const wxString gcstr::regionLabelLength     =   wxTRANSLATE("Length");
+const wxString gcstr::regionLabelMapFile    =   wxTRANSLATE("Map Position File (optional)");
+const wxString gcstr::regionLabelMapPosition=   wxTRANSLATE("Map Position");
+const wxString gcstr::regionLabelName       =   wxTRANSLATE("name: %s");
+const wxString gcstr::regionLabelOffset     =   wxTRANSLATE("First Position Sequenced");
+const wxString gcstr::regionLabelSamples    =   wxTRANSLATE("samples per individual:");
+const wxString gcstr::regionLabelSites      =   wxTRANSLATE("Sites");
+const wxString gcstr::regionLengthIllegal   =   wxTRANSLATE("Illegal value \"%s\" for length of region \"%s\". Ignoring.");
+const wxString gcstr::regionLengthVsMarkers =   wxTRANSLATE("Region length (%ld) must be > number of markers (%ld). Ignoring.");
+const wxString gcstr::regionNameFromFile    =   wxTRANSLATE("from %s");
+const wxString gcstr::regionNameUnlinked    =   wxTRANSLATE("%s_%08ld");
+const wxString gcstr::regionNewName     =   wxTRANSLATE("Name for New Region");
+const wxString gcstr::regionOffsetIllegal   =   wxTRANSLATE("Illegal value \"%s\" for length of region \"%s\". Ignoring.");
+const wxString gcstr::regionPositionInfo    =   wxTRANSLATE("Position info for segments:");
+const wxString gcstr::regionRename      =   wxTRANSLATE("Rename Region");
+const wxString gcstr::regionRenameChoice=   wxTRANSLATE("Region to Rename");
+const wxString gcstr::regionSelect      =   wxTRANSLATE("Select a Region");
+const wxString gcstr::regionUse         =   wxTRANSLATE("Use this Region");
+const wxString gcstr::removeFiles       =   wxTRANSLATE("Remove Files");
+const wxString gcstr::removeGroupsTitle        =   wxTRANSLATE("Remove Regions");
+const wxString gcstr::removeGroupsInstructions =   wxTRANSLATE("Choose one or more");
+const wxString gcstr::removeLociTitle        =   wxTRANSLATE("Remove Segments");
+const wxString gcstr::removeLociInstructions =   wxTRANSLATE("Choose one or more");
+const wxString gcstr::removePopsTitle        =   wxTRANSLATE("Remove Populations");
+const wxString gcstr::removePopsInstructions =   wxTRANSLATE("Choose one or more");
+const wxString gcstr::renameLinkGTitle  =   wxTRANSLATE("Rename a Region");
+const wxString gcstr::renameLocusTitle  =   wxTRANSLATE("Rename Segment");
+const wxString gcstr::renamePopTitle    =   wxTRANSLATE("Rename Population");
+const wxString gcstr::renamePopTitleFor =   wxTRANSLATE("Rename Population %s");
+const wxString gcstr::saveFileInstructionsForMac    =   wxTRANSLATE(" (button to right of file name shows/hides directory information)");
+const wxString gcstr::selectAll         =   wxTRANSLATE("Select All");
+const wxString gcstr::sequential        =   wxTRANSLATE("Sequential");
+const wxString gcstr::setDataTypesAll   =   wxTRANSLATE("Set Data Type for all Files");
+const wxString gcstr::setFormats        =   wxTRANSLATE("Set Formats for Files");
+const wxString gcstr::setInterleaving   =   wxTRANSLATE("Set Interleaving for Files");
+const wxString gcstr::structureDump     =   wxTRANSLATE("%sStructures dump:");
+const wxString gcstr::trait             =   wxTRANSLATE("trait");
+const wxString gcstr::traitEnterNewName=   wxTRANSLATE("Enter the name of the new trait class");
+const wxString gcstr::trueVal           =   wxTRANSLATE("true");
+const wxString gcstr::typeClashDialog   =   wxTRANSLATE("File \"%s\" specifies data type \"%s\", but you have specified \"%s\".\n\nPlease specify which one you want.");
+const wxString gcstr::unselectAll       =   wxTRANSLATE("Unselect All");
+const wxString gcstr::unsetValueLocations       =   wxTRANSLATE("<enter an ordered list of integers>");
+const wxString gcstr::unsetValueLocusLength     =   wxTRANSLATE("<enter a positive integer>");
+const wxString gcstr::unsetValueLocusPosition   =   wxTRANSLATE("<enter an integer>");
+const wxString gcstr::unsetValueOffset          =   wxTRANSLATE("<enter an integer>");
+const wxString gcstr::unsetValueRegionEffectivePopulationSize   =   wxTRANSLATE("<enter a positive number>");
+const wxString gcstr::unsetValueRegionSamples   =   wxTRANSLATE("<enter a positive integer>");
+const wxString gcstr::snp                       =   "SNP";
+const wxString gcstr::warning                   =   wxTRANSLATE("Warning");
+const wxString gcstr::warningMissingPopRegPair  =   wxTRANSLATE("Missing data sample covering (%s,%s) pair");
+const wxString gcstr::warningNeedFile           =   wxTRANSLATE("Add one or more files");
+const wxString gcstr::warningNeedFileDataType   =   wxTRANSLATE("File \"%s\" needs its data type set");
+const wxString gcstr::warningNeedFileDataTypes  =   wxTRANSLATE("At least one file needs its data type set");
+const wxString gcstr::warningNeedFileFormat     =   wxTRANSLATE("File \"%s\" needs its file format set");
+const wxString gcstr::warningNeedFileFormats    =   wxTRANSLATE("At least one file needs its file format set");
+const wxString gcstr::warningNeedFilesParsed    =   wxTRANSLATE("At least one file has not been parsed.");
+const wxString gcstr::warningStringsHeader      =   wxTRANSLATE("You will need to fix the following things before you can create a Lamarc output file:");
+const wxString gcstr::warningUnsetLinkageGroup  =   wxTRANSLATE("At least one data unit has no region assigned");
+const wxString gcstr::warningUnsetLocus         =   wxTRANSLATE("At least one data unit has no segment assigned");
+const wxString gcstr::warningUnsetPopulation    =   wxTRANSLATE("At least one data unit has no population assigned");
+const wxString gcstr::warningUnsetRegion        =   wxTRANSLATE("At least one data unit has no region assigned");
+const wxString gcstr::unknown                   =      "???";
+const wxString gcstr::unrecognizedFileFormat    =   wxTRANSLATE("Did not recognize file format \"%s\". Setting to \"none\"");
+const wxString gcstr::usageHeader               =   wxTRANSLATE("**********************************************************************\nInput File Conversion Program for LAMARC Version %s\n**********************************************************************");
+const wxString gcstr::userTypeOverride          =   wxTRANSLATE("Overriding type %s in file %s to user-specified choice of %s");
+const wxString gcstr::xmlFiles                  =   "XML Files (*.xml)|*.xml|All Files (*)|*";
+const wxString gcstr::yes                       =   "yes";
+
+const wxString gcstr::cmdBatch                   =   "batchonly";
+const wxString gcstr::cmdBatchChar               =   "b";
+const wxString gcstr::cmdBatchDescription        =   wxTRANSLATE("run in batch mode and exit immediately.");
+const wxString gcstr::cmdCommand                 =   "commandfile";
+const wxString gcstr::cmdCommandChar             =   "c";
+const wxString gcstr::cmdCommandDescription      =   wxTRANSLATE("command file for batch run; see documentation");
+const wxString gcstr::cmdDump                    =   "dump";
+const wxString gcstr::cmdDumpChar                =   "d";
+const wxString gcstr::cmdDumpDescription         =   wxTRANSLATE("do debug dump just before exiting");
+const wxString gcstr::cmdFileFormat              =   "format";
+const wxString gcstr::cmdFileFormatChar          =   "f";
+const wxString gcstr::cmdFileFormatDescription   =   wxTRANSLATE("input file format (MIGRATE or PHYLIP)");
+const wxString gcstr::cmdInput                   =   "inputfile";
+const wxString gcstr::cmdInputChar               =   "i";
+const wxString gcstr::cmdInputDescription        =   wxTRANSLATE("input file");
+const wxString gcstr::cmdInterleaved             =   "interleaved";
+const wxString gcstr::cmdInterleavedChar         =   "n";
+const wxString gcstr::cmdInterleavedDescription  =   wxTRANSLATE("input file is in interleaved format");
+const wxString gcstr::cmdMapFile                 =   "mapfile";
+const wxString gcstr::cmdMapFileChar             =   "m";
+const wxString gcstr::cmdMapFileDescription      =   wxTRANSLATE("map file");
+const wxString gcstr::cmdOutput                  =   "outputfile";
+const wxString gcstr::cmdOutputChar              =   "o";
+const wxString gcstr::cmdOutputDescription       =   wxTRANSLATE("output file; default is \"%s\"");
+const wxString gcstr::cmdWriteBatch              =   "writebatch";
+const wxString gcstr::cmdWriteBatchChar          =   "w";
+const wxString gcstr::cmdWriteBatchDescription   =   wxTRANSLATE("write batch file for data state at end of run");
+
+const wxString gcverbose::addedFile             = wxTRANSLATE("Added file \"%s\"");
+const wxString gcverbose::addedLocus            = wxTRANSLATE("Added segment \"%s\"");
+const wxString gcverbose::addedPopulation       = wxTRANSLATE("Added population \"%s\"");
+const wxString gcverbose::addedRegion           = wxTRANSLATE("Added region \"%s\"");
+const wxString gcverbose::addedUnit             = wxTRANSLATE("Added unit \"%s\"");
+const wxString gcverbose::exportSuccess         = wxTRANSLATE("Successful export to file \"%s\"");
+const wxString gcverbose::exportTry             = wxTRANSLATE("Attempting export to file \"%s\"");
+const wxString gcverbose::firstPositionNotLong  = wxTRANSLATE("Ignoring first scanned position of \"%s\" for segment \"%s\". It should have been an integer.");
+const wxString gcverbose::locationsNotIntegers  = wxTRANSLATE("Ignoring locations of \"%s\" for segment \"%s\". It should have been an ordered list of integers.");
+const wxString gcverbose::locusLengthNotLong    = wxTRANSLATE("Ignoring length of \"%s\" for segment \"%s\". It should have been a positive integer.");
+const wxString gcverbose::locusPositionNotLong  = wxTRANSLATE("Ignoring starting position of \"%s\" for segment \"%s\".  It should have been an integer.");
+const wxString gcverbose::noSetDataType         = wxTRANSLATE("Failed to set data type of file \"%s\" to \"%s\"");
+const wxString gcverbose::noSetFileFormat       = wxTRANSLATE("Failed to set format of file \"%s\" to \"%s\"");
+const wxString gcverbose::noSetIsInterleaved    = wxTRANSLATE("Failed to set interleaving of file \"%s\" to \"%s\"");
+const wxString gcverbose::parseAttemptExiting   = wxTRANSLATE("Exiting call to parse file \"%s\"");
+const wxString gcverbose::parseAttemptFailed    = wxTRANSLATE("Failed to parse file \"%s\"");
+const wxString gcverbose::parseAttemptPossible  = wxTRANSLATE("Entering call to parse file \"%s\"");
+const wxString gcverbose::parseAttemptSettings  = wxTRANSLATE("Setting values following parse file \"%s\"");
+const wxString gcverbose::parseAttemptStarted   = wxTRANSLATE("Starting to parse file \"%s\"");
+const wxString gcverbose::parseAttemptSuccessful= wxTRANSLATE("Succeeded parsing file \"%s\"");
+const wxString gcverbose::removedFile           = wxTRANSLATE("Removed file \"%s\"");
+const wxString gcverbose::removedLocus          = wxTRANSLATE("Removed segment \"%s\"");
+const wxString gcverbose::removedPopulation     = wxTRANSLATE("Removed population \"%s\"");
+const wxString gcverbose::removedRegion         = wxTRANSLATE("Removed region \"%s\"");
+const wxString gcverbose::removedUnit           = wxTRANSLATE("Removed unit \"%s\"");
+const wxString gcverbose::setDataType           = wxTRANSLATE("Set data type of file \"%s\" to \"%s\"");
+const wxString gcverbose::setDataTypeAndFileFormat  = wxTRANSLATE("Set data type of file \"%s\" to \"%s\" and therefore file format to \"%s\"");
+const wxString gcverbose::setFileFormat         = wxTRANSLATE("Set format of file \"%s\" to \"%s\"");
+const wxString gcverbose::setIsInterleaved      = wxTRANSLATE("Set interleaving of file \"%s\" to \"%s\"");
+
+//____________________________________________________________________________________
diff --git a/src/convStrings/gc_strings_structures.cpp b/src/convStrings/gc_strings_structures.cpp
new file mode 100644
index 0000000..7f3e3e8
--- /dev/null
+++ b/src/convStrings/gc_strings_structures.cpp
@@ -0,0 +1,56 @@
+// $Id: gc_strings_structures.cpp,v 1.14 2012/06/30 01:32:40 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include "gc_strings_structures.h"
+#include "wx/intl.h"
+
+const wxString gcstr_structures::objDict            = wxTRANSLATE("population, segment, or region");
+const wxString gcstr_structures::traitDict          = wxTRANSLATE("trait name, allele name, or phenotype");
+
+const wxString gcerr_structures::duplicateFileBaseName = wxTRANSLATE("Attempt to add a second file with base name \"%s\". Ignoring.");
+const wxString gcerr_structures::duplicateFileName  = wxTRANSLATE("Attempt to add file \"%s\" a second time. Ignoring.");
+const wxString gcerr_structures::duplicateName      =   wxTRANSLATE("Re-use of name \"%s\" for %s.");
+const wxString gcerr_structures::mismatchAlleleTrait=   wxTRANSLATE("Allele \"%s\" of trait \"%s\" is illegal. Allele already const assigned to trait \"%s\".");
+const wxString gcerr_structures::migrationNotDefined=   wxTRANSLATE("Migration from \"%s\" to \"%s\" does not exist");
+const wxString gcerr_structures::mismatchLocusRegion=   wxTRANSLATE("Segment \"%s\" of region \"%s\" is illegal. Segment already assigned to region \"%s\".");
+const wxString gcerr_structures::missingFile        =   wxTRANSLATE("File \"%s\" missing or unreadable. Unable to complete your operation.");
+const wxString gcerr_structures::missingMigration   =   wxTRANSLATE("Migration \"%s\" not defined");
+const wxString gcerr_structures::missingMigrationId =   wxTRANSLATE("Migration Id \"%s\" not defined");
+const wxString gcerr_structures::missingName        =   wxTRANSLATE("Did not find name \"%s\" for population, group, segment, or trait");
+const wxString gcerr_structures::missingPanel       =   wxTRANSLATE("Panel \"%s\" not defined");
+const wxString gcerr_structures::missingPanelId     =   wxTRANSLATE("Panel Id \"%s\" not defined");
+const wxString gcerr_structures::missingParent      =   wxTRANSLATE("Parent \"%s\" not defined");
+const wxString gcerr_structures::missingParentId    =   wxTRANSLATE("Parent Id \"%s\" not defined");
+const wxString gcerr_structures::missingPopulation  =   wxTRANSLATE("Population \"%s\" not defined");
+const wxString gcerr_structures::missingRegion      =   wxTRANSLATE("Region \"%s\" not defined");
+const wxString gcerr_structures::missingTrait       =   wxTRANSLATE("Trait \"%s\" not defined");
+const wxString gcerr_structures::panelBlessedError  =   wxTRANSLATE("Panel for region\"%s\" and population \"%s\" has a >0 value but is not blessed");
+const wxString gcerr_structures::panelNotDefined    =   wxTRANSLATE("Panel for region\"%s\" and population \"%s\" does not exist");
+const wxString gcerr_structures::panelSizeClash     =   wxTRANSLATE("Panels for population \"%s\" in regions \"%s\" and \"%s\" have different sizes");
+
+#if 0
+const wxString gcerr_structures::locusMergeFailure  =   wxTRANSLATE("Something unexpected happened while trying to merge segments.\n\nIt is possible your operation did not complete.");
+const wxString gcerr_structures::missingLocus       =   wxTRANSLATE("Segment \"%s\" not defined");
+const wxString gcerr_structures::plocusHapMismatch  =   wxTRANSLATE("Unable to assign parsed data from %s to segment \"%s\" because parsed data has ploidy of %d and segment has %d.");
+const wxString gcerr_structures::plocusLengthMismatch=  wxTRANSLATE("Unable to assign parsed data from %s to segment \"%s\" because parsed data has length %d and segment has length %d.");
+const wxString gcerr_structures::plocusTypeMismatch =   wxTRANSLATE("Unable to assign parsed data from %s to segment \"%s\" because they have incompatible data types.\n\nParsed data is type %s and segment has type %s.");
+#endif
+
+const wxString gcerr_structures::nameRepeatAllele=   wxTRANSLATE("Allele name \"%s\" occurs previously at line %d.");
+const wxString gcerr_structures::nameRepeatLocus=   wxTRANSLATE("Segment name \"%s\" occurs previously at line %d.");
+const wxString gcerr_structures::nameRepeatPop  =   wxTRANSLATE("Population name \"%s\" occurs previously at line %d.");
+const wxString gcerr_structures::nameRepeatRegion=  wxTRANSLATE("Region name \"%s\" occurs previously at line %d.");
+const wxString gcerr_structures::nameRepeatTrait=  wxTRANSLATE("Trait name \"%s\" occurs previously at line %d.");
+const wxString gcerr_structures::regionEffPopSizeClash  =   wxTRANSLATE("Unable to merge regions because relative effective population sizes %f and %f are different");
+
+// const wxString gcerr_structures::regionSamplesPerClash  =   wxTRANSLATE("Unable to merge regions because samples per individual of %ld and %ld are different");
+const wxString gcerr_structures::unparsableFile     =   wxTRANSLATE("File \"%s\" unparsable. Unable to complete your operation. \n\nTry running program with \"--verbose\" option for more information.");
+
+//____________________________________________________________________________________
diff --git a/src/convStrings/gc_strings_structures.h b/src/convStrings/gc_strings_structures.h
new file mode 100644
index 0000000..d5aefc8
--- /dev/null
+++ b/src/convStrings/gc_strings_structures.h
@@ -0,0 +1,69 @@
+// $Id: gc_strings_structures.h,v 1.11 2012/06/30 01:32:40 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_STRINGS_STRUCTURES_H
+#define GC_STRINGS_STRUCTURES_H
+
+#include "wx/string.h"
+
+class gcstr_structures
+{
+  public:
+    static const wxString objDict;
+    static const wxString traitDict;
+};
+
+class gcerr_structures
+{
+  public:
+
+    static const wxString duplicateFileBaseName;
+    static const wxString duplicateFileName;
+    static const wxString duplicateName;
+
+#if 0
+    static const wxString locusMergeFailure;
+    static const wxString locusHapMismatch;
+    static const wxString locusLengthMismatch;
+    static const wxString locusTypeMismatch;
+    static const wxString missingLocus;
+#endif
+
+    static const wxString migrationNotDefined;
+    static const wxString mismatchAlleleTrait;
+    static const wxString mismatchLocusRegion;
+
+    static const wxString missingFile;
+    static const wxString missingMigration;
+    static const wxString missingMigrationId;
+    static const wxString missingName;
+    static const wxString missingPopulation;
+    static const wxString missingPanel;
+    static const wxString missingPanelId;
+    static const wxString missingParent;
+    static const wxString missingParentId;
+    static const wxString missingRegion;
+    static const wxString missingTrait;
+    static const wxString nameRepeatAllele;
+    static const wxString nameRepeatLocus;
+    static const wxString nameRepeatPop;
+    static const wxString nameRepeatRegion;
+    static const wxString nameRepeatTrait;
+    static const wxString panelBlessedError;
+    static const wxString panelNotDefined;
+    static const wxString panelSizeClash;
+    static const wxString regionEffPopSizeClash;
+    // static const wxString regionSamplesPerClash;
+    static const wxString unparsableFile;
+};
+
+#endif  // GC_STRINGS_STRUCTURES_H
+
+//____________________________________________________________________________________
diff --git a/src/convStrings/gc_strings_trait.cpp b/src/convStrings/gc_strings_trait.cpp
new file mode 100644
index 0000000..06eccd1
--- /dev/null
+++ b/src/convStrings/gc_strings_trait.cpp
@@ -0,0 +1,27 @@
+// $Id: gc_strings_trait.cpp,v 1.6 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include "gc_strings_trait.h"
+#include "wx/intl.h"
+
+const wxString gcerr_trait::alleleMissing   =   wxTRANSLATE("cannot find trait allele with name %s.");
+const wxString gcerr_trait::alleleNameReuse =   wxTRANSLATE("re-use of allele name %s not allowed");
+const wxString gcerr_trait::alleleNameSpaces=   wxTRANSLATE("Allele name \"%s\" not allowed because it contains one or more spaces");
+const wxString gcerr_trait::alleleTraitMismatch= wxTRANSLATE("Allele %s in phenotype %s does not belong to trait %s.");
+const wxString gcerr_trait::hapProbabilityNegative = wxTRANSLATE("Relative penetrance of %f illegal -- must be a non-negative number");
+const wxString gcerr_trait::phenoTraitMismatch= wxTRANSLATE("Phenotype %s trait %s differs from enclosing trait %s.");
+const wxString gcerr_trait::phenotypeMissing   =   wxTRANSLATE("cannot find phenotype with name %s.");
+const wxString gcerr_trait::phenotypeNameReuse =   wxTRANSLATE("re-use of phenotype name %s for trait %s not allowed");
+
+const wxString gcstr_trait::alleleListMember=              ("%s ");
+const wxString gcstr_trait::generatedName   =              ("trait_%ld");
+const wxString gcstr_trait::internalName    =              ("internalTrait_%ld");
+
+//____________________________________________________________________________________
diff --git a/src/convStrings/gc_strings_trait.h b/src/convStrings/gc_strings_trait.h
new file mode 100644
index 0000000..a67e1ea
--- /dev/null
+++ b/src/convStrings/gc_strings_trait.h
@@ -0,0 +1,39 @@
+// $Id: gc_strings_trait.h,v 1.7 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_STRINGS_TRAIT_H
+#define GC_STRINGS_TRAIT_H
+
+#include "wx/string.h"
+
+class gcerr_trait
+{
+  public:
+    static const wxString alleleMissing;
+    static const wxString alleleNameReuse;
+    static const wxString alleleNameSpaces;
+    static const wxString alleleTraitMismatch;
+    static const wxString hapProbabilityNegative;
+    static const wxString phenoTraitMismatch;
+    static const wxString phenotypeMissing;
+    static const wxString phenotypeNameReuse;
+};
+
+class gcstr_trait
+{
+  public:
+    static const wxString alleleListMember;
+    static const wxString generatedName;
+    static const wxString internalName;
+};
+
+#endif  // GC_STRINGS_TRAIT_H
+
+//____________________________________________________________________________________
diff --git a/src/convUtil/gc_file_util.cpp b/src/convUtil/gc_file_util.cpp
new file mode 100644
index 0000000..3c8a6b0
--- /dev/null
+++ b/src/convUtil/gc_file_util.cpp
@@ -0,0 +1,111 @@
+// $Id: gc_file_util.cpp,v 1.6 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+
+#include "gc_errhandling.h"
+#include "gc_file_util.h"
+#include "gc_strings_io.h"
+#include "wx/log.h"
+#include "wx/txtstrm.h"
+#include "wx/wfstream.h"
+
+//------------------------------------------------------------------------------------
+
+gc_file_error::gc_file_error(const wxString & msg) throw()
+    : gc_ex(msg)
+{
+}
+
+gc_file_error::~gc_file_error() throw()
+{}
+
+//------------------------------------------------------------------------------------
+
+gc_eof::gc_eof() throw()
+    : gc_file_error(gc_io::eof)
+{
+}
+
+gc_eof::~gc_eof() throw()
+{}
+
+//------------------------------------------------------------------------------------
+
+gc_file_missing_error::gc_file_missing_error(const wxString & fileName) throw()
+    : gc_file_error(wxString::Format(gc_io::fileMissing,fileName.c_str()))
+{
+}
+
+gc_file_missing_error::~gc_file_missing_error() throw()
+{}
+
+//------------------------------------------------------------------------------------
+
+gc_file_read_error::gc_file_read_error() throw()
+    : gc_file_error(gc_io::fileReadError)
+{
+}
+
+gc_file_read_error::gc_file_read_error(const wxString & fileName) throw()
+    : gc_file_error(wxString::Format(gc_io::fileReadErrorWithName,fileName.c_str()))
+{
+}
+
+gc_file_read_error::~gc_file_read_error() throw()
+{}
+
+//------------------------------------------------------------------------------------
+
+wxString ReadLineSafely(wxFileInputStream * fStream,
+                        wxTextInputStream * tStream)
+{
+    assert(fStream != NULL);
+    assert(tStream != NULL);
+    wxString line  = tStream->ReadLine();
+    wxStreamError lastError = fStream->GetLastError();
+    if(lastError == wxSTREAM_EOF)
+        // EWFIX.BUG.698 -- ok to get eof if this is end of file
+    {
+        if(!line.IsEmpty())
+        {
+            return line;
+        }
+        else
+        {
+            throw gc_eof();
+        }
+    }
+    if(lastError != wxSTREAM_NO_ERROR)
+    {
+        throw gc_file_read_error();
+    }
+    return line;
+}
+
+//------------------------------------------------------------------------------------
+
+wxString ReadWordSafely(wxFileInputStream & fStream,
+                        wxTextInputStream & tStream)
+{
+    wxString line  = tStream.ReadWord();
+    wxStreamError lastError = fStream.GetLastError();
+    if(lastError == wxSTREAM_EOF)
+    {
+        throw gc_eof();
+    }
+    if(lastError != wxSTREAM_NO_ERROR)
+    {
+        throw gc_file_read_error();
+    }
+    return line;
+}
+
+//____________________________________________________________________________________
diff --git a/src/convUtil/gc_file_util.h b/src/convUtil/gc_file_util.h
new file mode 100644
index 0000000..851dfbf
--- /dev/null
+++ b/src/convUtil/gc_file_util.h
@@ -0,0 +1,57 @@
+// $Id: gc_file_util.h,v 1.6 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_FILE_UTIL_H
+#define GC_FILE_UTIL_H
+
+#include "gc_errhandling.h"
+#include "wx/string.h"
+
+class wxFileInputStream;
+class wxTextInputStream;
+
+class gc_file_error : public gc_ex
+{
+  public:
+    gc_file_error(const wxString &) throw();
+    virtual ~gc_file_error() throw();
+};
+
+class gc_eof : public gc_file_error
+{
+  public:
+    gc_eof() throw();
+    virtual ~gc_eof() throw();
+};
+
+class gc_file_missing_error : public gc_file_error
+{
+  public:
+    gc_file_missing_error(const wxString & fileName) throw();
+    virtual ~gc_file_missing_error() throw();
+};
+
+class gc_file_read_error : public gc_file_error
+{
+  public:
+    gc_file_read_error() throw();
+    gc_file_read_error(const wxString & fileName) throw();
+    virtual ~gc_file_read_error() throw();
+};
+
+wxString
+ReadLineSafely(wxFileInputStream * fStream, wxTextInputStream * tStream);
+
+wxString
+ReadWordSafely(wxFileInputStream & fStream, wxTextInputStream & tStream);
+
+#endif  // GC_FILE_UTIL_H
+
+//____________________________________________________________________________________
diff --git a/src/conversion/ConverterIf.cpp b/src/conversion/ConverterIf.cpp
new file mode 100644
index 0000000..5ba0b47
--- /dev/null
+++ b/src/conversion/ConverterIf.cpp
@@ -0,0 +1,61 @@
+// $Id: ConverterIf.cpp,v 1.17 2012/06/30 01:32:40 bobgian Exp $
+
+/*
+  Copyright 2002 Patrick Colacurcio, Peter Beerli, Mary Kuhner,
+  Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <iostream>
+#include <sstream>
+#include <ctype.h>
+#include <string>
+
+#include "Converter_ConverterIf.h"
+#include "Converter_DataSourceException.h"
+#include "constants.h"
+#include "stringx.h"
+
+#ifdef DMALLOC_FUNC_CHECK
+#include "/usr/local/include/dmalloc.h"
+#endif
+
+using namespace std;
+
+const long DEFAULTLONG = 0;
+
+//----------------------------------------------------------------------
+
+ConverterIf::ConverterIf()
+    : ParserUtil()
+{
+}
+
+//----------------------------------------------------------------------
+
+ConverterIf::~ConverterIf()
+{
+}
+
+//----------------------------------------------------------------------
+
+long ConverterIf::FlagCheck(const string& origstring,
+                            const string& msg) const
+{
+    long number;
+
+    if (!FromString(origstring,number))
+    {
+        string emsg = "\nEncountered an illegal value for " + msg;
+        emsg += ", " + origstring + ".\n";
+        throw FileFormatError(emsg);
+    }
+
+    return number;
+
+} // ConverterIf::FlagCheck
+
+//____________________________________________________________________________________
diff --git a/src/conversion/ConverterUI.cpp b/src/conversion/ConverterUI.cpp
new file mode 100644
index 0000000..92a2778
--- /dev/null
+++ b/src/conversion/ConverterUI.cpp
@@ -0,0 +1,831 @@
+// $Id: ConverterUI.cpp,v 1.80 2011/03/07 06:08:48 bobgian Exp $
+
+/*
+  Copyright 2002 Patrick Colacurcio, Peter Beerli, Mary Kuhner,
+  Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+// This file contains the UI and the main() function for the converter program
+
+// extensive revisions to allow UI and filereading to interact.
+// Jon 2002/03/21 - 2002/?
+
+#include <cassert>
+#include <iostream>
+#include <fstream>
+#include <map>
+#include <algorithm>
+#include <iterator>   // for ostream_iterator
+
+#include "Converter_ConverterUI.h"
+#include "Converter_PhylipConverter.h"
+#include "Converter_MigrateConverter.h"
+#include "Converter_LamarcDS.h"
+#include "Converter_DataSourceException.h"
+#include "Converter_SpacingDS.h"
+#include "Converter_HapConverter.h"
+#include "Converter_UserFileUtil.h"
+#include "Converter_SpaceConverter.h"
+#include "random.h"
+#include "constants.h" // for #define MENU
+                       //     for datatype const strings (DNA, SNP, etc)
+#include "stringx.h"   // for FromString() and IsInteger()
+#include "Converter_types.h" // for map typedefs
+
+#include "nomenuglobals.h" // to handle nomenu case
+
+//------------------------------------------------------------------------------------
+
+const long DEFAULTLONG = 0;
+const long CONVUI_LINELENGTH = 70;
+const long CONVUI_ERRORINDENT = 6;
+
+//------------------------------------------------------------------------------------
+
+bool isInterleaved()
+{
+    string msg = "Is this file in the interleaved format?";
+
+    return GetYesOrNo(msg,false);
+
+}
+
+//------------------------------------------------------------------------------------
+
+string getFileName(const UserFileUtil& fileutil)
+{
+    string buffer;
+    long tries = 0;
+    while (tries < 3)
+    {
+        buffer.erase();
+        cout << "Enter the file name: ";
+        getline(cin, buffer);
+
+        // check to see if file exists
+        if (fileutil.IsFilePresent(buffer)) break;
+        else
+        {
+            cout << "\nCouldn't find file: " << buffer << endl;
+            ++tries;
+        }
+    }
+
+    return buffer;
+}
+
+//------------------------------------------------------------------------------------
+
+string getHapFileName(const string& regionname,
+                      const UserFileUtil& fileutil)
+{
+#ifndef JSIM
+    string fname;
+    string msg = "\nHave you prepared a file containing phase information";
+    msg += " for the region " + regionname + "?";
+
+    if (GetYesOrNo(msg,false))
+    {
+        fname = getFileName(fileutil);
+    }
+
+    return fname;
+#else
+    return convstr::HAPFILENAME;
+#endif
+}
+
+//------------------------------------------------------------------------------------
+
+string getFormat()
+{
+    bool success = false;
+    string buffer;
+
+    while (!success)
+    {
+        cout << "Please enter:" << endl <<  "(1) if this is a Phylip format file" << endl
+             << "(2) if this is a Migrate/Recombine format file" << endl;
+        getline(cin, buffer);
+
+        if ((buffer == "1"))
+        {
+            return "Phylip";
+        }
+        else if (buffer == "2")
+        {
+            return "Migrate";
+        }
+        else
+        {
+            cout << "Enter either '1' or '2'." << endl;
+        }
+    }
+
+    return buffer; // shouldn't be able to get here
+}
+
+//------------------------------------------------------------------------------------
+
+string getDataType()
+{
+#ifndef JSIM
+    bool success = false;
+    string buffer;
+    cout << "What is the datatype of your data set?" << endl;
+    while (!success)
+    {
+        cout << "Please enter:" << endl << "(1) if it is DNA" << endl;
+        cout << "(2) if it is SNP\n" << "(3) if it is MICROSAT" << endl ;
+        cout << "(4) if it is K-allele (including electrophoretic)" << endl;
+        getline(cin, buffer);
+        if ((buffer == "1"))
+        {
+            return lamarcstrings::DNA;
+        }
+        else if (buffer == "2")
+        {
+            return lamarcstrings::SNP;
+        }
+        else if (buffer == "3")
+        {
+            return lamarcstrings::MICROSAT;
+        }
+        else if (buffer == "4")
+        {
+            return lamarcstrings::KALLELE;
+        }
+        else
+        {
+            cout << "Enter a number between 1 and 4." << endl;
+        }
+    }
+
+#else
+    return convstr::GENETICDATATYPE;
+#endif
+
+    return string("Can't be here\n"); // shouldn't be able to get here
+
+}
+
+//------------------------------------------------------------------------------------
+
+long getLong(const string& name, bool mustbepositive)
+{
+    long value;
+    bool success = false;
+    while (!success)
+    {
+        string buffer;
+        getline(cin,buffer);
+        if (IsInteger(buffer))
+        {
+            success = true;
+            FromString(buffer,value);
+            if (mustbepositive && value < 0)
+            {
+                success = false;
+                cout << "The "  + name + " must be a positive integer";
+                cout << endl;
+            }
+        }
+        else cout << "The " + name + " must be an integer" << endl;
+    }
+
+    return value;
+}
+
+//------------------------------------------------------------------------------------
+
+long getRegionalMapInfo(const string& regionName, long& length,
+                        long& offset)
+{
+#ifndef JSIM
+    bool mustbepositive = true;
+    cout << "What is the total length of the region, " + regionName + "?";
+    cout << endl;
+    length = getLong(string("length"),mustbepositive);
+
+    mustbepositive = false;
+    cout << "What position number does the region start at?" << endl;
+    offset = getLong(string("distance"));
+
+    // this question is incorrectly placed, it should be asking about
+    // loci, not regions.  In any case, it is only relevant once loci
+    // come into use.
+    // cout << "What is the map position of the beginning of the region?";
+    // cout << endl;
+
+    // return getLong(string("position"));
+    return 0L;
+
+#else
+    length = convstr::REGIONLENGTH;
+    offset = convstr::REGIONOFFSET;
+    return convstr::LOCUSMAPPOS;
+
+#endif
+}
+
+//------------------------------------------------------------------------------------
+
+string GetMapFileName()
+{
+#ifndef JSIM
+    string fname;
+
+    string msg = "\nHave you prepared a file mapping your genetic ";
+    msg += "markers to positions\non the sequence?";
+
+    if (GetYesOrNo(msg,false))
+    {
+        UserFileUtil fileutil;
+        fname = getFileName(fileutil);
+    }
+
+    return fname;
+#else
+    return convstr::MAPFILENAME;
+#endif
+
+} // GetMapFileName
+
+//------------------------------------------------------------------------------------
+
+void SetRegionLength(RegionMap::iterator region)
+{
+    long retries = 0;
+    while(retries < 3)
+    {
+        cout << "What is the total length (including ";
+        cout << "non-polymorphic positions) of the\nsequenced ";
+        cout << "region " + region->first + "?" << endl;
+        string buffer;
+        getline(cin,buffer);
+        long length;
+        FromString(buffer,length);
+        long nmarkers = region->second.getNmarkers();
+        if (length >= nmarkers)
+        {
+            SpacingDS spacing(length, nmarkers);
+            region->second.setSpacing(spacing);
+            break;
+        }
+        else
+        {
+            cout << "Total length of region must be at least as ";
+            cout << "large as the number of SNPs.\n\n";
+            ++retries;
+        }
+    }
+} // SetRegionLength
+
+//------------------------------------------------------------------------------------
+
+void SetMapInfo(LamarcDS& dataStore, const string& fname)
+{
+    if (fname.empty())
+    {
+        RegionMap::iterator region;
+        for(region = dataStore.getFirstRegion();
+            region != dataStore.getLastRegion(); // this is really foo.end()
+            ++region)
+        {
+            if (region->second.HasSNPs())
+                SetRegionLength(region);
+        }
+        return;
+    }
+
+    SpaceConverter converter(dataStore);
+
+    SpaceMap spaces = converter.ReadSpacingInfo(fname);
+
+    // for each region, interact with user to get rest of info, then build
+    // and add the appropiate SpacingDS.
+    RegionMap::iterator region;
+    for(region = dataStore.getFirstRegion();
+        region != dataStore.getLastRegion(); // this is really foo.end()
+        ++region)
+    {
+        // if spacing info was provided by the user
+        if (spaces.find(region->second.getName()) != spaces.end())
+        {
+            long length, offset, mapposition, retries = 0;
+            while(retries < 3)
+            {
+                mapposition = getRegionalMapInfo(region->first,length,offset);
+                try
+                {
+                    SpacingDS spacing(spaces[region->first], length, offset,
+                                      mapposition, region->second.getNmarkers());
+                    region->second.setSpacing(spacing);
+                    break;
+                }
+                catch (InconsistentDataError& e)
+                {
+                    cout << e.type() << ": " << e.what() << endl;
+                    ++retries;
+                }
+            }
+        }
+        else
+        {
+            // else if the region has SNPs then query/set the region's length
+            if (region->second.HasSNPs())
+                SetRegionLength(region);
+        }
+    }
+} // SetMapInfo
+
+//------------------------------------------------------------------------------------
+
+void SetHapInfo(LamarcDS& dataStore)
+{
+    RegionMap::iterator regionIt;
+    for (regionIt = dataStore.getFirstRegion();
+         regionIt != dataStore.getLastRegion(); ++regionIt)
+    {
+        long attempt, maxtries = 3;
+        for(attempt = 0; attempt < maxtries; ++attempt)
+        {
+            try
+            {
+                UserFileUtil fileutil;
+                string hapfilename =
+                    getHapFileName(regionIt->first,fileutil);
+
+                if (hapfilename.empty()) break;
+
+                Random randomgenerator; // uses system clock
+                HapConverter hapconverter(regionIt->second,randomgenerator);
+
+                vector<IndividualDS> individuals =
+                    hapconverter.ReadHapInfo(hapfilename);
+
+                hapconverter.ReplaceIndividualsWith(individuals);
+#ifndef JSIM
+                cout << "Set phase information for region ";
+                cout << regionIt->first << endl << endl;
+#endif
+                break;
+            }
+            catch(FileFormatError& e)
+            {
+                cout << e.type() << ": " << e.what() << endl;
+                if (attempt == maxtries-1)
+                {
+                    cout << "\n\nExceeded retry threshold\n";
+                    cout << "Attempt at conversion failed\n\n";
+                    exit(0);
+                }
+                else
+                    continue;
+            }
+        }
+    }
+} // SetHapInfo
+
+//------------------------------------------------------------------------------------
+
+bool AreMicrosRegions()
+{
+#ifndef JSIM
+    string query = "Should each microsat marker be treated as an ";
+    query += "independent region?";
+
+    return GetYesOrNo(query,true);
+#else
+    return convstr::MICROREGIONS;
+#endif
+
+} // AreMicrosRegions
+
+//------------------------------------------------------------------------------------
+
+bool GetYesOrNo(const string& query, bool defaultyes)
+{
+    cout << query;
+    if (defaultyes)
+        cout << "  (y)/n." << endl;
+    else
+        cout << "  y/(n)." << endl;
+
+    while(true)
+    {
+        string buffer;
+        getline(cin, buffer);
+
+        if (buffer == "" && defaultyes)
+            return true;
+        if (buffer == "" && !defaultyes)
+            return false;
+
+        if (ciStringEqual(buffer, string("y")) || ciStringEqual(buffer, string("yes")))
+        {
+            return true;
+        }
+
+        if (ciStringEqual(buffer, string("n")) ||
+            ciStringEqual(buffer, string("no")))
+        {
+            return false;
+        }
+
+        cout << "You entered: " << buffer << endl;
+        cout << "Please enter 'y' or 'n'." << endl;
+    }
+
+    assert(false);
+    return false;
+} // GetYesOrNo
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+void Unit::DisplayUnit() const
+{
+    if (region != "") cout << MakeCentered(region,16);
+    else
+    {
+        string unknownpop = string("Unknown") + ToString(regionno);
+        cout << MakeCentered(unknownpop,16);
+    }
+    if (population != "") cout << MakeCentered(population,16);
+    else cout << MakeCentered("Unknown",16);
+    cout << MakeCentered(ToString(tips),6) <<
+        MakeCentered(ToString(markers),10) <<
+        MakeCentered(datatype,10) <<
+        MakeCentered(filename,20) << endl;
+
+} // DisplayUnit
+
+//------------------------------------------------------------------------------------
+
+bool Unit::operator<(const Unit& other) const
+{
+    if (region < other.region) return true;
+    if (region > other.region) return false;
+
+    if (population < other.population) return true;
+    return false;  // identical Units compare false by flowthrough here
+
+} // operator<
+
+//------------------------------------------------------------------------------------
+
+bool Unit::IsIn(const string& regionname, const string& popname) const
+{
+    return (ciStringEqual(regionname, region) && ciStringEqual(popname, population));
+} // Unit::IsIn
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+set<string> PopRegionRelation::GetRegionNames() const
+{
+    set<string> regionnames;
+    vector<Unit>::const_iterator unit;
+    for(unit = units.begin(); unit != units.end(); ++unit)
+        regionnames.insert(unit->region);
+
+    return regionnames;
+
+} // PopRegionRelation::GetRegionNames()
+
+//------------------------------------------------------------------------------------
+
+set<string> PopRegionRelation::GetPopulationNames() const
+{
+    set<string> popnames;
+    vector<Unit>::const_iterator unit;
+    for(unit = units.begin(); unit != units.end(); ++unit)
+        popnames.insert(unit->population);
+
+    return popnames;
+
+} // PopRegionRelation::GetPopulationNames()
+
+//------------------------------------------------------------------------------------
+
+void PopRegionRelation::DisplayPopRegionRelation()
+{
+    unsigned long i;
+    cout << MakeCentered("Unit",4) << "  " <<
+        MakeCentered("Region",16)  <<
+        MakeCentered("Population",16) <<
+        MakeCentered("Tips",6) <<
+        MakeCentered("Markers",10) <<
+        MakeCentered("Datatype",10) <<
+        MakeCentered("Filename",20) << endl;
+    for (i = 0; i < units.size(); ++i)
+    {
+        cout << MakeJustified(indexToKey(i),4) << "  ";
+        units[i].DisplayUnit();
+    }
+    cout << endl;
+
+} // DisplayPopRegionRelation
+
+//------------------------------------------------------------------------------------
+
+void PopRegionRelation::ChangeRegions()
+{
+    cout << "***Grouping units into regions" << endl;
+    while (true)
+    {
+        while (true)
+        {
+            cout << "List one or more units that belong to the same region" << endl;
+            cout << "Return/Enter on an empty line to assign populations" << endl;
+            string regionlist;
+            getline(cin, regionlist);
+            if (regionlist == "") break;
+            // get region numbers out of regionlist
+            vector<long> regions;
+            FromString(regionlist, regions);
+            // make sure that the user type legal values 1..#number of regions
+            if(vec_greater(regions,static_cast<long>(units.size())) ||
+               (vec_leq(regions, 0L)))
+            {
+                cout << "Please enter a set of valid region numbers" << endl;
+                continue;
+            }
+            cout << "Name of this region?" << endl;
+            string region;
+            getline(cin, region);
+            unsigned long i;
+            for (i = 0; i < regions.size(); ++i)
+            {
+                if (regions[i] < 1 || regions[i] > static_cast<long>(units.size()))
+                    throw FileFormatError(string("Bad region!"));
+                else
+                {
+                    units[regions[i]-1].region = region;
+                }
+            }
+            DisplayPopRegionRelation();
+        }
+        if (RegionsValid()) return;
+        else
+        {
+            string msg("Not all regions are valid; try again.\n\n");
+            cout << MakeIndent(msg,CONVUI_ERRORINDENT) << endl;
+        }
+    }
+
+} // ChangeRegions
+
+//------------------------------------------------------------------------------------
+
+void PopRegionRelation::ChangePopulations()
+{
+    cout << "\n***Grouping units into populations" << endl;
+    while (true)
+    {
+        while (true)
+        {
+            cout << "List one or more units that belong to the same ";
+            cout << "population" << endl << "Return/Enter on an empty line ";
+            cout << "to finish all assignments" << endl;
+            string populationlist;
+            getline(cin, populationlist);
+            if (populationlist == "") break;
+            // get population numbers out of populationlist
+            vector<long> populations;
+            FromString(populationlist, populations);
+            // make sure that the user type legal values 1..#number of populations
+            if(vec_greater(populations,static_cast<long>(units.size())) ||
+               (vec_leq(populations, 0L)))
+            {
+                cout << "Please enter a set of valid population numbers" << endl;
+                continue;
+            }
+            cout << "Name of this population?" << endl;
+            string population;
+            getline(cin, population);
+            unsigned long i;
+            for (i = 0; i < populations.size(); ++i)
+            {
+                if (populations[i] < 1 || populations[i] > static_cast<long>(units.size()))
+                    throw FileFormatError(string("Bad population!"));
+                else
+                {
+                    units[populations[i]-1].population = population;
+                }
+            }
+            DisplayPopRegionRelation();
+        }
+        if (PopsValid()) return;
+        else
+        {
+            string msg("Not all populations are valid; try again.\n\n");
+            cout << MakeIndent(msg,CONVUI_ERRORINDENT) << endl;
+        }
+    }
+} // ChangePopulations
+
+//------------------------------------------------------------------------------------
+
+bool PopRegionRelation::RegionsValid() const
+{
+    unsigned long i;
+    MarkerMap markermap;
+    MarkerMap::iterator markit;
+    TypeMap typemap;
+    TypeMap::iterator typeit;
+
+    for (i = 0; i < units.size(); ++i)
+    {
+        const Unit& unit = units[i];
+        if (unit.region == "")
+        {
+            cout << "Empty region name" << endl;
+            return false;
+        }
+
+        // check if each region has a consistent number of markers, using a map
+        markit = markermap.find(unit.region);
+        if (markit == markermap.end())
+            markermap.insert(make_pair(unit.region,unit.markers));
+        else
+        {
+            if ((*markit).second != unit.markers)
+            {
+                string msg("\nCurrently, all the samples in a chromosomal ");
+                msg += "region must contain the same number of markers, the region ";
+                msg += ToString(unit.region) + " has samples with ";
+                msg += ToString(markit->second) + " markers and ";
+                msg += ToString(unit.markers) + " markers.\n\n";
+                WrapAndPrintToCout(msg);
+                return false;
+            }
+        }
+        // check if each region has a consistent datatype, using a map
+        typeit = typemap.find(unit.region);
+        if (typeit == typemap.end())
+            typemap.insert(make_pair(unit.region,unit.datatype));
+        else
+        {
+            if (!ciStringEqual((*typeit).second, unit.datatype))
+            {
+                string msg("\nCurrently, Lamarc cannot handle diverse datatypes ");
+                msg += "within a single chromosomal region.  The region ";
+                msg += ToString(unit.region) + " has both ";
+                msg += ToString(typeit->second) + " and " + ToString(unit.datatype);
+                msg += " markers within it.\n\n";
+                WrapAndPrintToCout(msg);
+                return false;
+            }
+        }
+    }
+    return true;
+} // RegionsValid
+
+//------------------------------------------------------------------------------------
+
+bool PopRegionRelation::PopsValid() const
+{
+    unsigned long i;
+
+    for (i = 0; i < units.size(); ++i)
+    {
+        const Unit& unit = units[i];
+        if (unit.population == "")
+        {
+            string msg("\nEmpty population name\n\n",CONVUI_ERRORINDENT);
+            cout << MakeIndent(msg,CONVUI_ERRORINDENT) << endl;
+            return false;
+        }
+    }
+    return true;
+} // PopsValid
+
+//------------------------------------------------------------------------------------
+
+void PopRegionRelation::WrapAndPrintToCout(const string& msg) const
+{
+    StringVec1d wrapped_msg(Linewrap(msg,CONVUI_LINELENGTH,
+                                     CONVUI_ERRORINDENT));
+    std::copy(wrapped_msg.begin(),wrapped_msg.end(),
+              ostream_iterator<string>(cout, "\n"));
+
+} // PopRegionRelation
+
+//------------------------------------------------------------------------------------
+
+bool PopRegionRelation::OverallValid() const
+{
+    if (!RegionsValid()) return false;
+    if (!PopsValid()) return false;
+
+    // check if every region has all populations
+
+    // list all populations for each region
+    unsigned long i;
+    RegPopNameMap regionpops;
+    for (i = 0; i < units.size(); ++i)
+    {
+        const Unit& unit = units[i];
+        RegPopNameMap::iterator it = regionpops.find(unit.region);
+        if (it == regionpops.end())
+        {
+            PopNameSet newpop;
+            newpop.insert(unit.population);
+            regionpops.insert(make_pair(unit.region, newpop));
+            continue;
+        }
+        else
+        {
+            PopNameSet::iterator sit = (*it).second.find(unit.population);
+            if (sit == (*it).second.end())
+            {
+                (*it).second.insert(unit.population);
+            }
+            else
+            {
+                string msg("\nThe chromosomal region ");
+                msg += ToString(unit.region) + " has at least two populations named ";
+                msg += ToString(unit.population) + ".\n\n";
+                WrapAndPrintToCout(msg);
+                return false;
+            }
+        }
+    }
+
+    // check that all lists are identical
+    RegPopNameMap :: iterator firstregion = regionpops.begin();
+    RegPopNameMap :: iterator iter = regionpops.begin();
+    for (++iter; iter != regionpops.end(); ++iter)
+    {
+        if ((*iter).second != (*firstregion).second)
+        {
+            string msg("\nLamarc requires that all chromosomal regions ");
+            msg += "contain samples from all of the same populations.  Region ";
+            msg += ToString(iter->first) + " has samples from: ";
+            PopNameSet::iterator pop(iter->second.begin());
+            msg += *pop;
+            for(++pop; pop != iter->second.end(); ++pop)
+            {
+                //JDEBUG--this doesn't work so...
+                //if (std::find_if(pop->begin(),pop->end(),
+                //                static_cast<int(*)(int)>(isalnum)))
+                if (getFirstInterestingChar(*pop) != '\0')
+                    msg += ", " + *pop;
+            }
+            msg += "; while region " + ToString(firstregion->first) + " has: ";
+            pop = firstregion->second.begin();
+            msg += *pop;
+            for(++pop; pop != firstregion->second.end(); ++pop)
+            {
+                if (getFirstInterestingChar(*pop) != '\0')
+                    msg += ", " + *pop;
+            }
+            msg += ".\n\n";
+            WrapAndPrintToCout(msg);
+            return false;
+        }
+    }
+
+    return true;
+} // OverallValid
+
+//------------------------------------------------------------------------------------
+
+RegByPopMap PopRegionRelation::GetRegionalByPopMap() const
+{
+    RegByPopMap htable;
+
+    set<string> regionnames = GetRegionNames();
+    set<string> popnames = GetPopulationNames();
+    set<string>::iterator regname, popname;
+    for(regname = regionnames.begin(); regname != regionnames.end();
+        ++regname)
+    {
+        PopIterMap region;
+
+        for(popname = popnames.begin(); popname != popnames.end(); ++popname)
+        {
+            PopIterVec datasets;
+            vector<Unit>::const_iterator unit;
+            for(unit = units.begin(); unit != units.end(); ++unit)
+            {
+                if (unit->IsIn(*regname,*popname))
+                    datasets.push_back(unit->m_pop);
+            }
+            region.insert(make_pair(*popname,datasets));
+        }
+
+        htable.insert(make_pair(*regname,region));
+    }
+
+    return htable;
+
+} // GetRegionalByPopMap
+
+//____________________________________________________________________________________
diff --git a/src/conversion/ConverterUIMain.cpp b/src/conversion/ConverterUIMain.cpp
new file mode 100644
index 0000000..72964f5
--- /dev/null
+++ b/src/conversion/ConverterUIMain.cpp
@@ -0,0 +1,441 @@
+// $Id: ConverterUIMain.cpp,v 1.12 2012/06/30 01:32:40 bobgian Exp $
+
+/*
+  Copyright 2002 Patrick Colacurcio, Peter Beerli, Mary Kuhner,
+  Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+// This file contains the UI and the main() function for the converter program
+
+// extensive revisions to allow UI and filereading to interact.
+// Jon 2002/03/21 - 2002/?
+
+#include <cassert>
+#include <iostream>
+#include <fstream>
+#include <map>
+#include <algorithm>
+
+#include "Converter_ConverterUI.h"
+#include "Converter_PhylipConverter.h"
+#include "Converter_MigrateConverter.h"
+#include "Converter_LamarcDS.h"
+#include "Converter_DataSourceException.h"
+#include "Converter_SpacingDS.h"
+#include "Converter_HapConverter.h"
+#include "Converter_UserFileUtil.h"
+#include "Converter_SpaceConverter.h"
+#include "random.h"
+#include "constants.h" // for #define MENU
+                       //     for datatype const strings (DNA, SNP, etc)
+#include "stringx.h"   // for FromString() and IsInteger()
+#include "Converter_types.h" // for map typedefs
+
+#include "nomenuglobals.h" // to handle nomenu case
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+// THE MAIN FUNCTION
+int main()
+{
+    try                                 // begin a big try-catch block
+    {
+#ifndef JSIM
+        cout << "****************************************************************" << endl;
+        cout << "****************************************************************" << endl;
+        cout << " WARNING: PROGRAM DEPRECATED" << endl;
+        cout << "****************************************************************" << endl;
+        cout << "****************************************************************" << endl;
+        cout << " This program, old_lam_conv, is no longer being maintained." << endl;
+        cout << endl;
+        cout << " To convert your Phylip and Migrate format files to the" << endl;
+        cout << " lamarc XML format you should now be using lam_conv" << endl;
+        cout << " or batch_lam_conv." << endl;
+        cout << endl;
+        cout << " If these programs do not work for you please send email to " << endl;
+        cout << " lamarc at gs.washington.edu and describe the problem. " << endl;
+        cout << "****************************************************************" << endl;
+        cout << "****************************************************************" << endl;
+#else
+        // read the nomenu file if it exists
+        std::ifstream nomenuinfile;
+        nomenuinfile.open(convstr::JSIMPARMFILE.c_str());
+        nomenuinfile >> convstr::GENETICDATAFILENAME;
+        nomenuinfile >> convstr::OUTFILENAME;
+        nomenuinfile >> convstr::HAPFILENAME;
+        nomenuinfile >> convstr::MAPFILENAME;
+        nomenuinfile >> convstr::GENETICDATATYPE;
+        nomenuinfile >> convstr::GENETICDATAFORMAT;
+        nomenuinfile >> convstr::GENETICDATAINTERLEAVED;
+        nomenuinfile >> convstr::REGIONLENGTH;
+        nomenuinfile >> convstr::REGIONOFFSET;
+        nomenuinfile >> convstr::LOCUSMAPPOS;
+        nomenuinfile >> convstr::MICROREGIONS;
+        nomenuinfile.close();
+#endif
+
+        LamarcDS *dataStore = new LamarcDS;
+        bool keepGoing = true;
+        long numblocks = 0;
+
+        //  UsePopRegionRelation();
+
+        string mapfilename = GetMapFileName();
+        cout << endl << "Begin processing of your genetic data files" << endl;
+
+        while(keepGoing)
+        {
+            string fileName;
+            bool interleaved;
+            string buffer;
+            string format;
+            bool success = false;
+
+            // Get the file type.
+#ifndef JSIM
+            format = getFormat();
+#else
+            format = convstr::GENETICDATAFORMAT;
+            interleaved = convstr::GENETICDATAINTERLEAVED;
+#endif
+
+            if ( format == "Migrate" )
+            {
+                LamarcDS* emptyDS = new LamarcDS;  // For merging correctly...
+                bool isError = false;
+
+#ifndef JSIM
+                interleaved = isInterleaved();
+                UserFileUtil fileutil;
+                fileName = getFileName(fileutil);
+
+                cout << "FileName: " << fileName << endl;
+                cout << endl;
+#else
+                fileName = convstr::GENETICDATAFILENAME;
+#endif
+
+                // Now create the converter
+                try
+                {
+                    string datatype;
+                    MigrateConverter converter(fileName,
+                                               interleaved);
+
+                    datatype = converter.GetDataType();
+                    if (datatype == "")
+                    {
+                        datatype = getDataType();
+                        converter.SetDataType(datatype);
+                    }
+                    if (datatype == lamarcstrings::MICROSAT || datatype == lamarcstrings::KALLELE)
+                    {
+                        converter.SetMarkersToRegions(AreMicrosRegions());
+                    }
+                    converter.ProcessData();
+
+                    // Get the info out of it.
+                    (*emptyDS).mergeTo(*dataStore);
+                    converter.addConvertedLamarcDS(*emptyDS);
+
+                }
+                catch (ConverterBaseError& e)
+                {
+                    cout << endl << "An error was found while processing your file.  Here it is:" << endl;
+                    cout << e.type() << ": " << e.what() << endl;
+                    cout << "Your file has not been added." << endl << endl;
+                    isError = true;
+                    delete emptyDS;
+                }
+
+                if (!isError)
+                {
+                    // If everything merged okay, set the datastore to the previously empty DS.
+                    LamarcDS *temp = dataStore;
+                    dataStore = emptyDS;
+                    delete temp;
+                }
+            }
+
+            else if ( format == "Phylip")
+            {
+#ifndef JSIM
+                //  Find if it's interleaved
+                interleaved = isInterleaved();
+
+                // finally, get the file name
+                UserFileUtil fileutil;
+                fileName = getFileName(fileutil);
+
+#else
+                fileName = convstr::GENETICDATAFILENAME;
+#endif
+
+                success = false;
+
+#ifndef JSIM
+                cout << "FileName: " << fileName << endl;
+                cout << endl;
+#endif
+
+                LamarcDS *emptyDS = new LamarcDS;  // for correct merging.
+                bool isError = false;
+
+                // Now create the converter
+                try
+                {
+                    PhylipConverter converter(fileName,
+                                              interleaved);
+
+                    string datatype = getDataType();
+                    converter.SetDataType(datatype);
+                    converter.ProcessData();
+
+                    // Get the info out of it.
+                    (*emptyDS).mergeTo(*dataStore);
+                    converter.addConvertedLamarcDS(*emptyDS);
+                    //      cin.clear();
+                    //      cin.ignore(INT_MAX, '\n');
+
+                }
+                catch (ConverterBaseError& e)
+                {
+                    cout << endl << "An error was found while processing your file.  Here it is:" << endl;
+                    cout << e.type() << ": " << e.what() << endl;
+                    cout << "Your file has not been added." << endl << endl;
+                    isError = true;
+                    delete emptyDS;
+                }
+                if (!isError)
+                {
+                    // If everything merged okay, set the datastore to the previously empty DS.
+                    LamarcDS *temp = dataStore;
+                    dataStore = emptyDS;
+                    delete temp;
+                }
+            }
+
+            // Show the number of datablocks (units by regions) the datastore
+            // contains.
+            numblocks = dataStore->GetNUnits();
+#ifndef JSIM
+            if (numblocks == 1)
+                cout << "\nFile now contains " << numblocks << " block\n" << endl;
+            else
+                cout << "\nFile now contains " << numblocks << " blocks\n" << endl;
+#endif
+
+#ifndef JSIM
+            //  Another File
+            while (!success)
+            {
+                cout << "Would you like to add another file? y/(n) :";
+                getline(cin, buffer);
+
+                if (ciStringEqual(buffer, string("y")))
+                {
+                    success = true;
+                }
+                else if (buffer == "" || ciStringEqual(buffer, string("n")))
+                {
+                    keepGoing = false;
+                    success = true;
+                }
+                else
+                {
+                    cout << "You Entered: " << buffer << endl;
+                    cout << "Enter either 'y' or 'n'." << endl;
+                    buffer = "";
+                }
+            }
+#else
+            keepGoing = false;
+            success = true;
+#endif
+
+            //      cout << "Here it is: " << endl << dataStore.getXML(0) << endl;
+        }
+
+        // Display the data from the file(s) and allow the user to (re-)group
+        // them using the "Unit" paradigm.
+
+#ifndef JSIM
+        // first we need to convert the datastore to units and then add them
+        // to the PopRegionRelation.
+
+        if (numblocks < 1)
+        {
+            cout << "Since no data blocks have been successfully entered," << endl;
+            cout << "the file converter gives up; it can't create an XML file without data." << endl;
+            return 0;
+        }
+
+        PopRegionRelation table;
+        long nregions = 0;
+        RegionMap::iterator region;
+        for(region = dataStore->getFirstRegion();
+            region != dataStore->getLastRegion(); ++region)
+        {
+            string filename(region->first);
+            unsigned long index = filename.rfind(string("&&---&&"));
+            filename  = filename.substr(0,index);
+            index = filename.find_last_of(string("/\\"));
+            if (index != filename.length())
+            {
+                filename = filename.substr(index+1,filename.length());
+            }
+
+            ++nregions;
+            PopMap::iterator pop;
+            for(pop = region->second.getFirstPopulation();
+                pop != region->second.getLastPopulation(); ++pop)
+            {
+                if (pop->second.IsGhost()) continue;
+                string datatype = pop->second.getFirstIndividual()->
+                    getDataType();
+                // EWFIX.P5 REFACTOR -- same code appears in GCRegion.cpp
+                char regionNameBuffer[13];
+                if(sprintf(regionNameBuffer,"region_%05ld",nregions)!=12)
+                {
+                    assert(false);
+                }
+                string regionName(regionNameBuffer);
+                // EWFIX.P5  -- note that using regionName is overwriting a previous name
+                // for the region, which was set as the file unit name
+                // but I DON'T KNOW WHY. Argh!
+                Unit unit(nregions,regionName,
+                          pop->first,pop->second.GetNumberOfOTUs(),
+                          pop->second.getSequenceLength(),
+                          filename, datatype, pop);
+                table.AddUnit(unit);
+            }
+        }
+
+        // now allow the user to interact with data
+        do {
+            table.DisplayPopRegionRelation();
+            table.ChangeRegions();
+            table.ChangePopulations();
+        } while (!table.OverallValid());
+        cout << "\nFinal result\n";
+        table.DisplayPopRegionRelation();
+
+        // now take the table results and write them back out to the datastore.
+        try
+        {
+            dataStore->ReorderUsing(table.GetRegionalByPopMap());
+        }
+        catch(InvalidSequenceLengthError& e)
+        {
+            cout << endl << "An error was found while processing your";
+            cout << " assignment of data to regions and populations.  Here";
+            cout << " it is:" << endl << e.type() << ": " << e.what() << endl;
+            exit(0);
+        }
+#endif
+
+        try
+        {
+            // Now do haplotype processing, region by region, incorporating
+            // user individual information
+            SetHapInfo(*dataStore);
+
+            // Now do the spacing parsing
+            SetMapInfo(*dataStore,mapfilename);
+        }
+        catch (ConverterBaseError& e)
+        {
+            cout << endl << "An error was found while processing your";
+            cout << " haplotype and/or mapping assignments:\n";
+            cout << e.type() << ": " << e.what() << endl;
+            exit(0);
+        }
+
+        string outFileName;
+        keepGoing = true;
+        bool needFileName = true;
+
+#ifdef JSIM
+        needFileName = false;
+        outFileName = "outfile";
+#endif
+        int count=0;
+        while (needFileName && count < 5)
+        {
+            count++;
+            cout << "Enter the desired output file name: ";
+            getline(cin, outFileName);
+
+            //  Check to see if the file currently exists
+            ifstream inFile( outFileName.c_str(), ios::in );
+            if (inFile)
+            {
+                cout << "The file \"" << outFileName << "\" already ";
+                cout << "exists." << endl << "Writing to this file will ";
+                cout << "destroy its current contents." << endl;
+
+                string query =  "Are you sure you want to overwrite this file?";
+
+                needFileName = !(GetYesOrNo(query,false));
+                inFile.close();
+            }
+            else
+            {
+                needFileName = false;
+            }
+            //  Okay, we have the file name
+            //  Write the file.
+            if (!needFileName)
+            {
+                ofstream outFile( outFileName.c_str(), ios::out );
+                if (!outFile)
+                {
+                    cout << "Cannot open \"" << outFileName << "\" for output." << endl;
+                    needFileName = true;
+                }
+                else
+                {
+                    outFile << (*dataStore).getXML(0) << endl;
+                    cout << "Data Written." << endl;
+                }
+            }
+            if (count >= 5 && needFileName)
+                cout << "Giving up.  Check your rights for file writing in this "
+                     << "directory and/or for the files you wish to write, and "
+                     << "re-run the converter." << endl;
+        }
+
+#ifdef JSIM
+        //The file name was read in earlier.
+        ofstream outFile( outFileName.c_str(), ios::out );
+        if (!outFile)
+        {
+            cerr << "Cannot open \"" << outFileName << "\" for output." << endl;
+        }
+        else
+            outFile << (*dataStore).getXML(0) << endl;
+#endif
+
+        delete dataStore;
+
+        return 0;
+    } // end the big try-catch block
+    catch (ConverterBaseError& e)
+    {
+        cout << endl << "Here is an uncaught error: " << e.type() << ": ";
+        cout << e.what() << endl;
+        exit(0);
+    }
+    catch (...)
+    {
+        cout << endl << "A system/compiler/linker error occured!" << endl;
+        exit(0);
+    }
+}
+
+//____________________________________________________________________________________
diff --git a/src/conversion/Converter_ConverterIf.h b/src/conversion/Converter_ConverterIf.h
new file mode 100644
index 0000000..473606e
--- /dev/null
+++ b/src/conversion/Converter_ConverterIf.h
@@ -0,0 +1,55 @@
+// $Id: Converter_ConverterIf.h,v 1.16 2011/03/07 06:08:48 bobgian Exp $
+
+/*
+  Copyright 2002 Patrick Colacurcio, Peter Beerli, Mary Kuhner,
+  Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef Converter_ConverterIf_H
+#define Converter_ConverterIf_H
+
+#include <string>
+
+#include "Converter_LamarcDS.h"
+#include "Converter_ParserUtil.h"
+#include "stringx.h"
+
+using std::string;
+
+class Random;
+
+//  This file is the base class for all the Converter (DS) classes.
+//  Its a pure virtual ensuring that each DS class will know how to
+//  write itself as XML
+//  Added ability to deal with phase unknown data, Jon 2002/02/12
+//  Removed phase unknown to new HapConverter class, Jon 2002/03/28
+
+class ConverterIf : public ParserUtil
+{
+  private:
+
+  protected:
+
+  public:
+    ConverterIf();
+    virtual ~ConverterIf();
+    virtual void addConvertedLamarcDS(LamarcDS& lamarc) = 0;
+
+    // A wrapper around the string to long converter, FromString(),
+    // to check that the number was parsed correctly.  Throws a
+    // a FileFormatError upon failure.
+    long FlagCheck(const string& origstring, const string& msg) const;
+
+    virtual string GetDataType() { return ""; };
+    virtual void SetDataType(const string&) {};
+    virtual void ProcessData() {};
+
+};
+
+#endif // Converter_ConverterIf_H
+
+//____________________________________________________________________________________
diff --git a/src/conversion/Converter_ConverterUI.h b/src/conversion/Converter_ConverterUI.h
new file mode 100644
index 0000000..0dd5fdd
--- /dev/null
+++ b/src/conversion/Converter_ConverterUI.h
@@ -0,0 +1,118 @@
+// $Id: Converter_ConverterUI.h,v 1.24 2011/03/07 06:08:48 bobgian Exp $
+
+/*
+  Copyright 2002 Patrick Colacurcio, Peter Beerli, Mary Kuhner,
+  Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef CONVERTER_UI_H
+#define CONVERTER_UI_H
+
+#include <string>
+#include <vector>
+#include <map>
+#include <set>
+
+#include "Converter_RegionDS.h"     // added 2003/11/28 by erynes to compile on Windows
+#include "Converter_PopulationDS.h" // added 2003/11/28 by erynes to compile on Windows
+
+#include "Converter_types.h"
+
+using std::string;
+
+class LamarcDS;
+class PopulationDS;
+class ConverterIf;
+class UserFileUtil;
+
+bool isInterleaved();
+string getFileName(const UserFileUtil& fileutil);
+string getHapFileName(const string& regionname, const UserFileUtil& fileutil);
+string getFormat();
+string getDataType();
+string GetMapFileName();
+long getLong(const string& name, bool mustbepositive = false);
+long getRegionalMapInfo (const string& regionName, long& length, long& offset);
+// SetRegionLength() is a helper function for SetMapInfo()
+void SetRegionLength(RegionMap::iterator region);
+void SetMapInfo(LamarcDS& dataStore, const string& mapfilename);
+void SetHapInfo(LamarcDS& dataStore);
+
+// used in Migrate file conversion, how to read microsats?
+bool AreMicrosRegions();
+
+// return true if answer is "yes" and false if answer is "no"
+// defaultyes is true if the default, user just carriage returns,
+// answer is "yes", false if "no"
+bool GetYesOrNo(const string& query, bool defaultyes);
+
+int main();
+
+//------------------------------------------------------------------------------------
+
+// This class stores information on one "unit" (a block of
+// sequences from the same region and population).
+
+class Unit
+{
+  public:
+    long regionno;
+    string region;
+    string population;
+    long tips;
+    long markers;
+    string filename;
+    string datatype;
+    PopMap::iterator m_pop;
+
+    Unit(long no, const string& reg, const string& pop, long ti,
+         long mar, const string& filen, const string& dtype,
+         PopMap::iterator mypop)
+        : regionno(no), region(reg), population(pop), tips(ti), markers(mar),
+          filename(filen), datatype(dtype), m_pop(mypop) {}
+
+    void DisplayUnit() const;
+
+    // this operator allows sorting by region, then population
+    bool operator<(const Unit& other) const;
+    bool IsIn(const string& regionname, const string& popname) const;
+};
+
+//------------------------------------------------------------------------------------
+
+// This class stores and manages a list of Units, allowing the
+// user to assign and validate their population and region
+// relationships
+
+//------------------------------------------------------------------------------------
+
+class PopRegionRelation
+{
+  private:
+    std::vector<Unit> units;
+    std::set<string> GetRegionNames() const;
+    std::set<string> GetPopulationNames() const;
+
+    // a pair of helper functions for OverallValid()
+    bool RegionsValid() const;
+    bool PopsValid() const;
+
+    void WrapAndPrintToCout(const string& msg) const;
+
+  public:
+    void AddUnit(Unit unit) { units.push_back(unit); };
+    // the following is non-const because it sorts the Units
+    void DisplayPopRegionRelation();
+    void ChangeRegions();
+    void ChangePopulations();
+    bool OverallValid() const;
+    RegByPopMap GetRegionalByPopMap() const;
+};
+
+#endif // CONVERTER_UI_H
+
+//____________________________________________________________________________________
diff --git a/src/conversion/Converter_DataSourceException.h b/src/conversion/Converter_DataSourceException.h
new file mode 100644
index 0000000..58b5b22
--- /dev/null
+++ b/src/conversion/Converter_DataSourceException.h
@@ -0,0 +1,149 @@
+// $Id: Converter_DataSourceException.h,v 1.11 2011/03/07 06:08:48 bobgian Exp $
+
+/*
+  Copyright 2002 Patrick Colacurcio, Peter Beerli, Mary Kuhner,
+  Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef Converter_DataSourceException_H
+#define Converter_DataSourceException_H
+
+#include <exception>
+#include <string>
+
+using std::string;
+
+//  This file contains the exceptions thrown by datasource classes
+//  in the converter directory.  This holds exceptions relating to bad
+//  data formats or constraint violations.
+//  what() can be called to get a string describing the error.
+
+class ConverterBaseError : public std::exception
+{
+  public:
+    virtual const char* type () const = 0;
+};
+
+class ConverterTestError : public ConverterBaseError
+{
+  private:
+    string _what;
+  public:
+    ConverterTestError(const string& wh): _what (wh) { };
+    virtual ~ConverterTestError() throw() {};
+    virtual const char* what () const throw() { return _what.c_str (); };
+    virtual const char* type () const { return "ConverterTestError"; };
+};
+
+class DataTypeNotFoundError : public ConverterBaseError
+{
+  private:
+    string _what;
+  public:
+    DataTypeNotFoundError(const string& wh): _what (wh) { };
+    virtual ~DataTypeNotFoundError() throw() {};
+    virtual const char* what () const throw() { return _what.c_str (); };
+    virtual const char* type () const { return "DataTypeNotFoundError"; };
+};
+
+class InvalidNucleotideError : public ConverterBaseError
+{
+  private:
+    string _what;
+  public:
+    InvalidNucleotideError(const string& wh): _what (wh) { };
+    virtual ~InvalidNucleotideError() throw() {};
+    virtual const char* what () const throw() { return _what.c_str (); };
+    virtual const char* type () const { return "InvalidNucleotideError"; };
+};
+
+class InvalidSequenceLengthError : public ConverterBaseError
+{
+  private:
+    string _what;
+  public:
+    InvalidSequenceLengthError(const string& wh): _what (wh) { };
+    virtual ~InvalidSequenceLengthError() throw() {};
+    virtual const char* what () const throw() { return _what.c_str (); };
+    virtual const char* type () const { return "InvalidSequenceLengthError"; };
+};
+
+class InvalidFrequenciesError : public ConverterBaseError
+{
+  private:
+    string _what;
+  public:
+    InvalidFrequenciesError(const string& wh): _what (wh) { };
+    virtual ~InvalidFrequenciesError() throw() {};
+    virtual const char* what () const throw() { return _what.c_str (); };
+    virtual const char* type () const { return "InvalidFrequenciesError"; };
+};
+
+class FileNotFoundError : public ConverterBaseError
+{
+  private:
+    string _what;
+  public:
+    FileNotFoundError(const string& wh): _what (wh) { };
+    virtual ~FileNotFoundError() throw() {};
+    virtual const char* what () const throw() { return _what.c_str (); };
+    virtual const char* type () const { return "FileNotFoundError"; };
+};
+
+class FileFormatError : public ConverterBaseError
+{
+  private:
+    string _what;
+  public:
+    FileFormatError(const string& wh): _what (wh) { };
+    virtual ~FileFormatError() throw() {};
+    virtual const char* what () const throw() { return _what.c_str (); };
+    virtual const char* type () const { return "FileFormatError"; };
+};
+
+class InconsistentDataError : public ConverterBaseError
+{
+  private:
+    string _what;
+  public:
+    InconsistentDataError(const string& wh): _what (wh) { };
+    virtual ~InconsistentDataError() throw() {};
+    virtual const char* what () const throw() { return _what.c_str (); };
+    virtual const char* type () const { return "InconsistentDataError"; };
+};
+
+class MarkerLengthMismatchDataError : public InconsistentDataError
+{
+  public:
+    MarkerLengthMismatchDataError(long front, long back, long length);
+    virtual ~MarkerLengthMismatchDataError() throw() {};
+};
+
+class OffsetAfterFirstPositionDataError : public InconsistentDataError
+{
+  public:
+    OffsetAfterFirstPositionDataError(long offset, long firstPosition);
+    virtual ~OffsetAfterFirstPositionDataError() throw() {};
+};
+
+class RegionEndBeforeLastPositionDataError : public InconsistentDataError
+{
+  public:
+    RegionEndBeforeLastPositionDataError(long offset, long length, long lastPosition);
+    virtual ~RegionEndBeforeLastPositionDataError() throw() {};
+};
+
+class MarkerPositionMismatchDataError : public InconsistentDataError
+{
+  public:
+    MarkerPositionMismatchDataError(long nmarkers, long npositions);
+    virtual ~MarkerPositionMismatchDataError() throw() {};
+};
+
+#endif // Converter_DataSourceException_H
+
+//____________________________________________________________________________________
diff --git a/src/conversion/Converter_DataSourceIf.h b/src/conversion/Converter_DataSourceIf.h
new file mode 100644
index 0000000..fd2f3f1
--- /dev/null
+++ b/src/conversion/Converter_DataSourceIf.h
@@ -0,0 +1,33 @@
+// $Id: Converter_DataSourceIf.h,v 1.8 2011/03/07 06:08:48 bobgian Exp $
+
+/*
+  Copyright 2002 Patrick Colacurcio, Peter Beerli, Mary Kuhner,
+  Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef Converter_DataSourceIf_H
+#define Converter_DataSourceIf_H
+
+#include <string>
+
+using std::string;
+
+//  This file is the base class for all the Datasource (DS) classes.
+//  Its a pure virtual ensuring that each DS class will know how to write itself as XML
+
+class DataSourceIf
+{
+  protected:
+    virtual void addTabs (int numTabs, string& str) const;
+  public:
+    virtual ~DataSourceIf();
+    virtual string getXML(unsigned int numTabs) const = 0;
+};
+
+#endif // Converter_DataSourceIf_H
+
+//____________________________________________________________________________________
diff --git a/src/conversion/Converter_HapConverter.h b/src/conversion/Converter_HapConverter.h
new file mode 100644
index 0000000..96061bd
--- /dev/null
+++ b/src/conversion/Converter_HapConverter.h
@@ -0,0 +1,68 @@
+// $Id: Converter_HapConverter.h,v 1.12 2012/06/30 01:32:41 bobgian Exp $
+
+/*
+  Copyright 2002 Patrick Colacurcio, Peter Beerli, Mary Kuhner,
+  Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+// HapConverter reads the info from a prepared Converter-Haplotype
+// format file and uses it to rearrange the innards of a RegionDS.
+// Specifically, may consolidate multiple former individuals into a
+// single individual as indicated by the read file.
+
+#ifndef CONVERTER_HAPCONVERTER_H
+#define CONVERTER_HAPCONVERTER_H
+
+#include <fstream>
+
+#include "Converter_ConverterIf.h"
+#include "Converter_RegionDS.h"
+
+using std::string;
+using std::vector;
+
+typedef vector<IndividualDS> IndDSVec;
+
+class HapConverter : public ConverterIf
+{
+  private:
+    RegionDS& m_region;
+    Random& m_random;
+    long m_nindividuals;
+    vector<string> m_hapnames;
+
+    HapConverter();                               // deliberately undefined
+    HapConverter(const HapConverter&);            // deliberately undefined
+    HapConverter& operator=(const HapConverter&); // deliberately undefined
+
+    vector<long> ParsePhaseInfo(ifstream& input) const;
+    vector<string> PopHapNames(long ntopop);
+    string PopTilDelimiter(istream& input, const char& dlm) const;
+
+    // helper function for ReadHapInfo. The returned container
+    // will be empty if further processing is necessary.  May
+    // throw a FileFormatError.
+    IndDSVec ParseFirstLine(istringstream& firstline,
+                            const string& filename,
+                            ifstream& filestr);
+
+  public:
+    HapConverter(RegionDS& region, Random& m_random);
+    virtual ~HapConverter();
+
+    // This function can throw a FileFormatError.
+    IndDSVec ReadHapInfo(const string& filename);
+
+    void ReplaceIndividualsWith(IndDSVec& individuals);
+
+    void addConvertedLamarcDS (LamarcDS& lamarc);
+
+};
+
+#endif // CONVERTER_HAPCONVERTER_H
+
+//____________________________________________________________________________________
diff --git a/src/conversion/Converter_IndividualDS.h b/src/conversion/Converter_IndividualDS.h
new file mode 100644
index 0000000..bfdaf41
--- /dev/null
+++ b/src/conversion/Converter_IndividualDS.h
@@ -0,0 +1,125 @@
+// $Id: Converter_IndividualDS.h,v 1.14 2011/03/07 06:08:48 bobgian Exp $
+
+/*
+  Copyright 2002 Patrick Colacurcio, Peter Beerli, Mary Kuhner,
+  Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+// IndividualDS is a representation of the data for any individual.
+// Each individual owns a name, and a sequence'.
+// Theres no validation on individual, however, there is on sequence, so if you
+// use the constructor that takes (string name, string sequence) you may
+// get a ConverterBaseError of some type.
+
+// TODO  Find out if no name is okay.
+
+// Methods
+// const Sequence& getSequence() returns a const reference to the Sequence.
+// const string& getName() returns the individual name as a const string reference.  (may be "")
+
+// IndividualDS(const string& name, const Sequence& seq)
+// IndividualDS(const string& name, const string& seq)
+
+#ifndef CONVERTER_INDIVIDUALDS_H
+#define CONVERTER_INDIVIDUALDS_H
+
+#include <string>
+#include <vector>
+#include "Converter_DataSourceIf.h"
+#include "Converter_Sequence.h"
+
+using std::string;
+using std::vector;
+
+class IndividualDS : public DataSourceIf
+{
+  private:
+    string m_name;
+    vector<Sequence> m_seq;
+    vector<string> m_seqnames;
+    vector<long> m_unknownphase;
+
+    IndividualDS();  // undefined
+
+  public:
+    IndividualDS(const string& name) : m_name(name) {};
+    IndividualDS(const string& name, const Sequence& seq);
+    // TODO. Take this out once operator ::string is a part of Sequence
+    IndividualDS(const string& name, const string& seq);
+    IndividualDS(const string& name, const string& seq, const string& dataType);
+
+    virtual ~IndividualDS();
+
+    // Use the Default Copy Constructor.
+    // Use the Default operator=
+
+    //  get the name of the individual
+    string getName() const;
+
+    //  set the name of the individual
+    void setName(const string& name);
+
+    // Two helper functions.  Get the guts of the Sequence
+    //  get the sequence (as a string).
+    string getSequence() const;
+
+    //  get the sequenceLength (as a int).
+    int getSequenceLength() const;
+
+    //  get the datatype of the first sequence
+    string getDataType() const { return m_seq.front().getDataType(); };
+
+    //  get all the sequence names in the individual
+    vector<string> GetAllSeqNames() const;
+
+    //  get the sequence names (supporter function for haplotyping, where
+    //  the sequence names stand in for haplotype names)
+    vector<string> GetHapNames() const;
+
+    //  a supporter function for error reporter, returns a comma delimited
+    //  list of the haplotype names in a single string
+    string GetHapNamesForPrint() const;
+
+    //  get the number of haplotypes
+    long GetNHaps() const { return static_cast<long>(m_seqnames.size()); };
+
+    // add a haplotype name or set of names to the individual
+    void AddHap(const string& name) { m_seqnames.push_back(name); };
+    void AddHap(const vector<string>& names) { m_seqnames.insert(
+            m_seqnames.end(),
+            names.begin(),
+            names.end()); };
+    // add a sequence
+    void AddSequence(const Sequence& seq) { m_seq.push_back(seq); };
+
+    //  get the number of sequences
+    unsigned long GetNumberOfSequences() const { return m_seq.size(); };
+
+    // is a given sequence present?
+    bool HasSequence(const string& seqname) const;
+
+    // are any sequences present?
+    bool HasNoSequences() const;
+
+    // is any non-contiguous data present?
+    bool HasNonContiguousData() const;
+
+    // are any SNPs present?
+    bool HasSNPs() const;
+
+    // remove a single sequence
+    Sequence PopSequence(const string& seqname);
+
+    // add unknown phase information
+    void SetUnknownPhase(const vector<long>& sites) { m_unknownphase = sites; };
+
+    string getXML(unsigned int numTabs) const;   //  From DataSourceIf
+};
+
+#endif  // CONVERTER_INDIVIDUALDS_H
+
+//____________________________________________________________________________________
diff --git a/src/conversion/Converter_LamarcDS.h b/src/conversion/Converter_LamarcDS.h
new file mode 100644
index 0000000..7f56640
--- /dev/null
+++ b/src/conversion/Converter_LamarcDS.h
@@ -0,0 +1,206 @@
+// $Id: Converter_LamarcDS.h,v 1.22 2011/03/07 06:08:48 bobgian Exp $
+
+/*
+  Copyright 2002 Patrick Colacurcio, Peter Beerli, Mary Kuhner,
+  Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+// LamarcDS is a representation of the found seen in a Lamarc Datafile
+// Each lamarcDS has the following information.
+// Most of it is currently default.
+// double coalescenceStartValues;
+// method_type coalescenceMethod;   // changed from string to method_type by ewalkup
+
+// MARY ADDS:
+// double migrationStartValues;
+// method_type migrationMethod; // changed from string to method_type by ewalkup
+// long migrationMaxEvents;
+
+// long coalescenceMaxEvents;
+// long replicates;
+// double temperatures;
+// double swapInterval;  // I have no idea what this is.
+// double resimulating;
+// long initialNumber;
+// long initialSamples;
+// long initialDiscard;
+// long initialInterval;
+// long finalNumber;
+// long finalSamples;
+// long finalDiscard;
+// long finalInterval;
+// string verbosity;
+// string echo;
+// string profile;
+// string posterior;
+// long seed;
+// string outputFile;
+// string summaryFile;
+
+// Oh yeah.  It also has some regions containing population data.
+// No two regions will have the same name.  If no name is given, give it a
+// unique name. The unique name will be in the format 'Region XXXX' where 'X'
+// is a capital letter. Every region in a single 'LamarcDS' must have the same
+// populations.  Even if those  populations are empty.  If a new region is
+// added, every region in the Datastore will then have the union of the old
+// populations and the new populations.  In most cases, these newly created
+// populations will be empty, but apparently it helps the program just knowing
+// that  these populations exist.
+
+// Some notes on some methods.
+
+// validateRegionName(const string&) will make sure the region name is not
+// just whitespace or empty string.  If it is, a unique name is provided for
+// the region.
+
+// string getUniqueName() will provide unique names for regions that are
+// provided without a name.  The Name will look like 'Region XXXX' where X
+// is a capital letter tween A and Z.
+
+// doesRegionNameExist(const string&) will return true if the region name
+// exists withing this datasource.  False otherwise.
+
+// validateNewRegion(RegionDS&) malongains constralong longegrity through
+// ensuring that every region in a datasource always has the same set of
+// populations.
+
+#ifndef CONVERTER_LAMARCDS_H
+#define CONVERTER_LAMARCDS_H
+
+#include <map>
+#include <string>
+
+#include "Converter_DataSourceIf.h"
+#include "Converter_RegionDS.h"
+#include "Converter_types.h" // for map typedefs
+
+using std::string;
+
+class LamarcDS : public DataSourceIf
+{
+  private:
+    double m_coalescenceStartValues;
+    method_type m_coalescenceMethod;
+    long m_coalescenceMaxEvents;
+
+    // MARY
+    double m_migrationStartValues;
+    method_type m_migrationMethod;
+    long m_migrationMaxEvents;
+
+    long m_replicates;
+    double m_temperatures;
+    double m_swapInterval;  // I have no idea what this is.
+    double m_resimulating;
+    long m_initialNumber;
+    long m_initialSamples;
+    long m_initialDiscard;
+    long m_initialInterval;
+    long m_finalNumber;
+    long m_finalSamples;
+    long m_finalDiscard;
+    long m_finalInterval;
+    string m_verbosity;
+    string m_progverbosity;
+    string m_echo;
+    string m_profile;
+    string m_posterior;
+    long m_seed;
+    string m_outputFile;
+    string m_inSummaryFile;
+    string m_outSummaryFile;
+    bool   m_useInSummaryFile;
+    bool   m_useOutSummaryFile;
+
+    RegionMap m_regions;
+
+    // destroy the stored genetic data, used by ReorderUsing()
+    void EraseRegions();
+
+    // Validation for LamarcDS
+    void validateRegionName(RegionDS& region) const;
+    string getUniqueName() const;
+    void validateNewRegion(RegionDS& region);
+
+  public:
+    //  Note.  Constructors may throw ConverterBaseError's
+    LamarcDS();                                // starts with no regions.  huh.
+    LamarcDS(RegionDS& region);
+    virtual ~LamarcDS();
+
+    //  get an iterator to the regions contained by this lamarc datasource.
+    RegionMap::iterator getFirstRegion();
+    RegionMap::const_iterator getFirstRegion() const;
+
+    //  get an iterator to the end of the region list.
+    RegionMap::iterator getLastRegion();
+    RegionMap::const_iterator getLastRegion() const;
+
+    //  add an region
+    void addRegion (RegionDS& region);
+
+    //  Merge an existing LamarcDS to this LamarcDS
+    //  TODO:  Figure whether the lamarc ref passed should be const.
+    void mergeTo(LamarcDS& lamarc);
+
+    //  Reorder the internal data maps according to the given map
+    //  Provided for use in the UI.  An InvalidSequenceLengthError
+    //  can be thrown, potentially aborting the whole process, no
+    //  internal cleanup is performed in this case.
+    void ReorderUsing(RegByPopMap newmap);
+
+    //  get the number of regions contained by the LamarcDS
+    long numRegions() const;
+
+    //  get the total number of population units contained by
+    //  the LamarcDS, used by the UI
+    long GetNUnits() const;
+
+    //  originally part of private validation, pulled out for use by
+    //  the SpaceConverter to validate the spacing info
+    bool doesRegionNameExist(const string& name) const;
+
+    //  is a non-contiguous type of genetic data present?
+    bool HasNonContiguousData() const;
+
+    //  are SNPs present in the stored genetic data?
+    bool HasSNPs() const;
+
+    //  A whole bunch of methods that allow our dear user to
+    //  set the <forces> information.
+
+    void setCoalescenceStartValues(const double);
+    void setCoalescenceMethod(method_type);
+    void setCoalescenceMaxEvents(const long);
+    void setReplicates(const long);  // unsigned?
+    void setTemperatures(const double);
+    void setSwapInterval(const long);  // unsigned
+    void setResimulating(const double);
+    void setInitialNumber(const long);  // unsigned?
+    void setInitialSamples(const long);  // unsigned?
+    void setInitialDiscard(const long);  // unsigned?
+    void setInitialInterval(const long);  // unsigned?
+    void setFinalNumber(const long);  // unsigned?
+    void setFinalSamples(const long);  // unsigned?
+    void setFinalDiscard(const long);  // unsigned?
+    void setFinalInterval(const long);  // unsigned?
+    void setVerbosity(const string&);  // restrictions?
+    void setProgVerbosity(const string&);  // restrictions?
+    void setEcho(const string&);  // boolean?
+    void setProfile(const string&);
+    void setPosterior(const string&);
+    void setSeed(const long);
+    void setOutputFile(const string&);
+    void setInSummaryFile(const string&);
+    void setOutSummaryFile(const string&);
+
+    string getXML(unsigned int numTabs) const;   //  From DataSourceIf
+};
+
+#endif // CONVERTER_LAMARCDS_H
+
+//____________________________________________________________________________________
diff --git a/src/conversion/Converter_MigrateConverter.h b/src/conversion/Converter_MigrateConverter.h
new file mode 100644
index 0000000..0124b96
--- /dev/null
+++ b/src/conversion/Converter_MigrateConverter.h
@@ -0,0 +1,88 @@
+// $Id: Converter_MigrateConverter.h,v 1.16 2012/06/30 01:32:41 bobgian Exp $
+
+/*
+  Copyright 2002 Patrick Colacurcio, Peter Beerli, Mary Kuhner,
+  Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+//  Migrate Converter will take an old style migrate file, and create a lamarcDS out of it.
+//  Note that this can throw pretty much any ConverterBaseError.
+//  Anytime this is used, one should catch and handle these errors.
+
+#ifndef CONVERTER_MIGRATECONVERTER_H
+#define CONVERTER_MIGRATECONVERTER_H
+
+#include <fstream>
+
+#include "Converter_ConverterIf.h"
+#include "Converter_LamarcDS.h"
+
+using std::string;
+
+class Random;
+
+//  Namespace here?
+
+class MigrateConverter : public ConverterIf
+{
+  private:
+    LamarcDS m_lamarc;
+    string m_fileName;
+    string m_datatype;
+    bool m_interleaved;
+    string m_firstLine;  // saves the first line for re-parsing
+    ifstream m_inFile;
+    bool m_markerstoregions;
+
+    void getInterleavedSequences (ifstream& infile,
+                                  const long numSequences,
+                                  const long sequenceLength,
+                                  const string& popName,
+                                  const string& regionName,
+                                  const string& datatype);
+
+    void getNonInterleavedSequences (ifstream& infile,
+                                     const long numSequences,
+                                     const long sequenceLength,
+                                     const string& popName,
+                                     const string& regionName,
+                                     const string& datatype);
+
+    string getNewName(Random&) const;
+
+    long GetNumPops(istringstream& linestream) const;
+    long GetNumLoci(istringstream& linestream) const;
+    std::vector<long> GetSeqLengths(istringstream& linestream, long numLoci) const;
+    long GetNumSeqs(istringstream& linestream) const;
+    string ReadDataType(istringstream& linestream) const;
+
+    void GetMicroSatLoci (ifstream& infile,
+                          const long numSequences,
+                          const long numLoci,
+                          const string& popName,
+                          const string& regionName,
+                          const string& datatype,
+                          const string& delimiter);
+
+  public:
+    //  Note.  Constructors may throw ConverterBaseError's
+    MigrateConverter(const string& fileName, bool interleaved);
+
+    virtual ~MigrateConverter();
+
+    void addConvertedLamarcDS(LamarcDS&);   //  From ConverterIf
+
+    virtual void SetDataType(const string& dtype) { m_datatype = dtype; };
+    virtual string GetDataType() { return m_datatype; };
+    virtual void ProcessData();
+
+    void SetMarkersToRegions(bool val);
+};
+
+#endif // CONVERTER_MIGRATECONVERTER_H
+
+//____________________________________________________________________________________
diff --git a/src/conversion/Converter_ModelDS.h b/src/conversion/Converter_ModelDS.h
new file mode 100644
index 0000000..643c260
--- /dev/null
+++ b/src/conversion/Converter_ModelDS.h
@@ -0,0 +1,116 @@
+// $Id: Converter_ModelDS.h,v 1.7 2011/03/07 06:08:48 bobgian Exp $
+
+/*
+  Copyright 2002 Patrick Colacurcio, Peter Beerli, Mary Kuhner,
+  Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+// ModelDS is a representation of the data on any one model.
+// Each model owns the following.
+// A set of Base Freqs.  These are four two digit frequencies that total 100.  I guess.
+// A ttratio
+// A rate
+// A probabilies
+
+// To be perfectly honest, I've no idea what some of these things are for.  But that
+// shouldn't matter.
+
+// Validation is done whenever new freqs are added.
+// Model's default constructor defaults to Model F84.  I've no Idea if this is a good thing
+// or not, but it seems okay.
+
+#ifndef CONVERTER_MODELDS_H
+#define CONVERTER_MODELDS_H
+
+#include <map>
+
+#include "Converter_DataSourceIf.h"
+
+using std::string;
+
+class ModelDS : public DataSourceIf
+{
+  private:
+    string m_modelName;
+    std::map<string, double> m_freqs;    // does not need case-insensitivity
+    double m_ttRatio;
+    unsigned int m_numCategories;
+    double m_rates;
+    double m_probabilities;
+
+    // Validation for ModelDS
+    void validateFreqs(double a, double c, double g, double t);
+
+  public:
+    //  Note.  Constructors may throw ConverterBaseError's
+    ModelDS();
+    ModelDS(const string& modelName,
+            const double freqA,
+            const double freqC,
+            const double freqG,
+            const double freqT,
+            const double ttRatio);
+    ModelDS(const string& modelName,
+            const double freqA,
+            const double freqC,
+            const double freqG,
+            const double freqT,
+            const double ttRatio,
+            const unsigned int numCategories,
+            const double rates,
+            const double probabilities);
+
+    virtual ~ModelDS();
+
+    // Use the Default Copy Constructor.
+
+    //  get the Model name
+    string getName() const;
+
+    //  Allow setting of the model name
+    void setName(const string& modelName);
+
+    //  get the Model ttRatio
+    double getTTRatio() const;
+
+    //  Allow setting of the model ttRatio
+    void setTTRatio(const double modelTTRatio);
+
+    //  get the Model numCategories
+    unsigned int getNumCategories() const;
+
+    //  Allow setting of the model numCategories
+    void setNumCategories(const unsigned int modelNumCategories);
+
+    //  get the Model rates
+    double getRates() const;
+
+    //  Allow setting of the model rates
+    void setRates(const double modelRates);
+
+    //  get the Model probabilities
+    double getProbabilities() const;
+
+    //  Allow setting of the model probabilities
+    void setProbabilities(const double modelProbabilities);
+
+    //  Allow getting of the Freqs.
+    double getAFreq();
+    double getCFreq();
+    double getGFreq();
+    double getTFreq();
+
+    //  Allow setting of the freqs.
+    //  This may throw a ConverterBaseError
+    void setFreqs(double a, double c, double g, double t);
+
+    string getXML(unsigned int numTabs) const;   //  From DataSourceIf
+};
+
+#endif // CONVERTER_MODELDS_H
+
+//____________________________________________________________________________________
diff --git a/src/conversion/Converter_ParserUtil.h b/src/conversion/Converter_ParserUtil.h
new file mode 100644
index 0000000..3bacda0
--- /dev/null
+++ b/src/conversion/Converter_ParserUtil.h
@@ -0,0 +1,61 @@
+// $Id: Converter_ParserUtil.h,v 1.9 2011/03/07 06:08:48 bobgian Exp $
+
+/*
+  Copyright 2002 Patrick Colacurcio, Peter Beerli, Mary Kuhner,
+  Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef Converter_ParserUtil_H
+#define Converter_ParserUtil_H
+
+#include <string>
+
+using std::string;
+
+//  This file is the base class for all the ParserUtil classes.
+
+class ParserUtil
+{
+  private:
+
+  protected:
+    // No creation of just this class.
+    ParserUtil();
+    virtual ~ParserUtil();
+
+    // Pulls chars until whitespace, newline, or digit is found.
+    bool getWord (istream& is, string& buffer) const ;
+    // Pulls chars until whitespace, newline is found.
+    bool getName (istream& is, string& buffer) const ;
+    // Pulls chars until a non digit is found
+    bool getNumber (istream& is, string& buffer) const;
+    // Pulls chars until a whitespace, newline, or tab is found
+    bool getToken (istream& is, string& buffer) const ;
+    // Pulls chars until a newline is found
+    bool getLine (istream& is, string& buffer) const;
+    // Pulls all spaces and newlines until next non whitespace is found
+    bool skipWhiteSpace (istream& is) const;
+    // Pulls the next N characters (newline, tabs skipped)
+    bool getNextNChars (istream& is, string& buffer, const long& n) const;
+    // Pulls the next N nonwhitespace characters (tab, newline, space)
+    bool getNextNNonWhiteSpace (istream& is, string& buffer, const long& n) const;
+    // Pulls chars until the given character is found.
+    bool skipToChar (istream& is, int searchChar) const;
+
+    // returns true if the first non-whitespace character is in the
+    // search string, false otherwise
+    bool isFirstChar(istream& is, const string& searchString) const;
+
+  public:
+};
+
+// Checks against DOS vs Unix newlines
+bool isnewline (int ch);
+
+#endif // Converter_ParserUtil_H
+
+//____________________________________________________________________________________
diff --git a/src/conversion/Converter_PhylipConverter.h b/src/conversion/Converter_PhylipConverter.h
new file mode 100644
index 0000000..3db9b18
--- /dev/null
+++ b/src/conversion/Converter_PhylipConverter.h
@@ -0,0 +1,56 @@
+// $Id: Converter_PhylipConverter.h,v 1.14 2012/06/30 01:32:41 bobgian Exp $
+
+/*
+  Copyright 2002 Patrick Colacurcio, Peter Beerli, Mary Kuhner,
+  Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+//  Phylip Converter will take an old style phylip file, and create a lamarcDS out of it.
+//  Note that this can throw pretty much any ConverterBaseError.
+//  Anytime this is used, one should catch and handle these errors.
+
+#ifndef CONVERTER_PHYLIPCONVERTER_H
+#define CONVERTER_PHYLIPCONVERTER_H
+
+#include <fstream>
+
+#include "Converter_ConverterIf.h"
+#include "Converter_LamarcDS.h"
+
+using std::string;
+
+class Random;
+
+//  Namespace here?
+
+class PhylipConverter : public ConverterIf
+{
+  private:
+    LamarcDS m_lamarc;
+    string m_fileName;
+    string m_datatype;
+    bool m_interleaved;
+    ifstream m_inFile;
+
+  public:
+    //  Note.  Constructors may throw ConverterBaseError's
+    PhylipConverter(const string& fileName,
+                    const bool interleaved);
+
+    virtual ~PhylipConverter();
+
+    void addConvertedLamarcDS(LamarcDS&);   //  From ConverterIf
+
+    virtual void SetDataType(const string& dtype) { m_datatype = dtype; };
+    virtual string GetDataType() { return m_datatype; };
+    virtual void ProcessData();
+
+};
+
+#endif // CONVERTER_PHYLIPCONVERTER_H
+
+//____________________________________________________________________________________
diff --git a/src/conversion/Converter_PopulationDS.h b/src/conversion/Converter_PopulationDS.h
new file mode 100644
index 0000000..4ed5a9b
--- /dev/null
+++ b/src/conversion/Converter_PopulationDS.h
@@ -0,0 +1,149 @@
+// $Id: Converter_PopulationDS.h,v 1.14 2011/03/07 06:08:48 bobgian Exp $
+
+/*
+  Copyright 2002 Patrick Colacurcio, Peter Beerli, Mary Kuhner,
+  Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+// PopulationDS is a representation of the data on any one population.
+// Each population owns a name, and a number of IndividualDS'.
+// Validation is done whenever a new Individual is added.  Population makes sure that
+// Individual Sequences are always the same length.
+// If they're different lengths, an InvalidSequenceLengthError is tossed.
+
+// One may construct from an individual, or individual and name.
+// TODO, decide whether to allow to construct from a vector of individuals.
+
+// The vector<IndividualDS>getIndividuals() method returns a vector of Individuals.
+// The method string getName() returns the population name as a string.  (may be "")
+// This method returns the sequence as a string..
+// The method addNewIndividual() is present in a couple of forms.
+// addNewIndividual(const IndividualDS& individual)
+// addnewIndividual(const string& name, const Sequence& seq)
+// addnewIndividual(const Sequence& seq)  #probably won't do this right away.
+
+#ifndef CONVERTER_POPULATION_H
+#define CONVERTER_POPULATION_H
+
+#include <list>
+#include <vector>
+#include "Converter_DataSourceIf.h"
+#include "Converter_IndividualDS.h"
+
+using std::string;
+using std::list;
+using std::vector;
+
+class PopulationDS : public DataSourceIf
+{
+  private:
+    string m_popName;
+    list<IndividualDS> m_individuals;
+    int m_sequenceLength;      //  This is a convenience.  Every sequence length in a population
+    //  be the same
+    string m_comment;          //  A comment for the XML
+
+    void validateNewIndividual(IndividualDS& individual);     // does the actual validation
+    string getUniqueName() const;   // Returns a Unique name for an individual in this population
+
+  public:
+    //  Note.  Constructors may throw ConverterBaseError's
+    PopulationDS(const string& popName,
+                 const string& individualName,
+                 const string& sequence);
+    PopulationDS(const string& popName,
+                 const IndividualDS& individual);
+    PopulationDS(const string& popName);
+    //  PopulationDS() : m_popName("default"), m_sequenceLength(0) {};
+
+    virtual ~PopulationDS();
+
+    // Use the Default Copy Constructor.  Will this be a bug?
+
+    //  get an iterator to the beginning of the individual list.
+    list<IndividualDS>::const_iterator getFirstIndividual() const;
+
+    //  get an iterator to the end of the individual list.
+    list<IndividualDS>::const_iterator getLastIndividual() const;
+
+    //  return a copy of all our individuals
+    list<IndividualDS> GetAllIndividuals() const {return m_individuals;};
+
+    //  get the size of the individuals list
+    int getNumberOfIndividuals() const;
+
+    //  get the total number of OTUs present in all the individuals
+    long GetNumberOfOTUs() const;
+
+    //  get all the sequence names in the population
+    vector<string> GetAllSeqNames() const;
+
+    //  get the name of the population
+    string getName() const;
+
+    //  Allow setting of the population name
+    //  TODO: Consider making this private and a friend of RegionDS
+    void setName(const string& popName);
+
+    //  Allow the user to set a comment for this population.
+    //  This will be reflected in the output of the XML.
+    void setComment(const string& comment);
+
+    //  Allow setting of the FIRST sequence Length.
+    //  So, if this population already has a non-zero sequence length, this function does NOTHING.
+    void setFirstSequenceLength(const int sequenceLength);
+
+    //  get the length of the sequences within.  If there are no sequences, this is zero.
+    int getSequenceLength() const;
+
+    //  add an individual (this may throw an InvalidSequenceLengthError)
+    //  Also note, that DuplicateIndividuals (individuals with non-unique names, will not be added.
+    void addIndividual (IndividualDS individual);
+
+    //  add an individual (this may throw an InvalidSequenceLengthError, or a DuplicateIndividualError)
+    //  Also note, that DuplicateIndividuals (individuals with non-unique names, will not be added.
+    void addIndividual (const string& name, const Sequence& seq);
+
+    // add a list of individuals, DuplicateIndividuals and Individuals
+    // with the wrong sequence length won't be added.  On encountering
+    // wrong sequence lengths, an InvalidSequenceLengthError will be
+    // thrown.
+    void AddIndividuals(list<IndividualDS> individuals);
+
+    //  returns true if a given individual exists, false otherwise
+    bool doesIndividualNameExist(const string& name) const;
+
+    // extract the given sequence from the given individual, destroying
+    // the individual if there are no more sequences in it; and returning
+    // the sequence
+    Sequence PopSequence(const string& seqname,
+                         list<IndividualDS>::iterator individual);
+
+    // given a container of sequence names, return a container of pairs
+    // consisting of one of the given sequence names along with which
+    // individual contains that sequence.  Return an empty container if
+    // any of the sequence names couldn't be found
+    vector< std::pair<string, list<IndividualDS>::iterator> > FindAllSequences(const vector<string>& seqnames);
+
+    // destroy the individuals
+    void EraseIndividuals();
+
+    // Am I a ghost population?
+    bool IsGhost() const;
+
+    // Do I contain any non-contiguous data?
+    bool HasNonContiguousData() const;
+
+    // Do I contain any SNPs?
+    bool HasSNPs() const;
+
+    string getXML(unsigned int numTabs) const;   //  From DataSourceIf
+};
+
+#endif // CONVERTER_POPULATION_H
+
+//____________________________________________________________________________________
diff --git a/src/conversion/Converter_RegionDS.h b/src/conversion/Converter_RegionDS.h
new file mode 100644
index 0000000..d628091
--- /dev/null
+++ b/src/conversion/Converter_RegionDS.h
@@ -0,0 +1,164 @@
+// $Id: Converter_RegionDS.h,v 1.19 2011/03/07 06:08:48 bobgian Exp $
+
+/*
+  Copyright 2002 Patrick Colacurcio, Peter Beerli, Mary Kuhner,
+  Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+// RegionDS is a representation of the data on any one region.
+// Each region owns the following.
+// A Model
+// A number of Uniquely Named Populations.
+
+// Validation is done whenever a new population is added.  Region makes sure that
+// population Sequences are always the same length.
+// If they're different lengths, an InvalidSequenceLengthError is tossed.
+// Region also validates that all Population Names differ.
+
+// The vector<PopulationDS>getPopulationNames() method returns a vector of Population Names.
+// The method getPopulation(const string& name) will return a particular Population..
+// The method string getName() returns the region name as a string.  (may be "")
+
+// The method addNewPopulation(const Population& pop) is Present
+// This method will add the new population if it doesn't exist.  If it does already exist,
+// it will merge individual information.  (For example, if it has an individual named jerry,
+// and the current population in the DS doesn't, it will be added.
+
+// Again, this can throw if the sequences within don't match the sequences of existing populations
+// in the region.
+
+// Please note that I'm not putting in functions that do things like remove a population or anything.
+// This is possible, even easy, its just that I have no need for that type of thing right now.
+// Feel free to add these types of functions if you like.
+
+#ifndef CONVERTER_REGIONDS_H
+#define CONVERTER_REGIONDS_H
+
+#include <vector>
+#include <map>
+
+#include "Converter_DataSourceIf.h"
+#include "Converter_PopulationDS.h"
+#include "Converter_ModelDS.h"
+#include "Converter_SpacingDS.h"
+#include "Converter_types.h" // for map typedefs
+
+using std::string;
+
+//  Include a structure that will allow ordering of the Population Map.
+//  Should this be inside the class?
+
+struct ltstr
+{
+    bool operator()(const string& s1, const string& s2)
+    {
+        return (s1 > s2 ? 0 : 1);
+    }
+};
+
+class RegionDS : public DataSourceIf
+{
+  private:
+    string m_regionName;
+    ModelDS m_model;
+    SpacingDS m_spacing;
+    PopMap m_pops;
+
+    RegionDS();                                // undefined always starts with something...
+    //  getUniqueName will provide unique names for populations that are provided without a name.
+    //  The Name will look like 'Population XXXX' where X is a capital letter tween A and Z.
+    string getUniqueName() const;
+
+    bool doesPopulationNameExist(const string& name) const;
+
+    // Validation for RegionDS
+    void validateNewPopulation(PopulationDS& population);
+    void validateFreqs(double a, double g, double c, double t);
+
+    // validatePopulationName will make sure the population name is not just whitespace or empty
+    // string.  If it is, a unique name is provided for the population.
+    void validatePopulationName(PopulationDS& pop) const;
+
+    // Validation for RegionDS
+  public:
+    //  Note.  Constructors may throw ConverterBaseError's
+    RegionDS(const string& regionName,
+             const ModelDS& model);
+    RegionDS(const string& regionName,
+             const ModelDS& model,
+             PopulationDS& population);
+
+    virtual ~RegionDS();
+
+    // Use the Default Copy Constructor.  Will this be a bug?
+
+    //  get an iterator to the populations contained by this region.
+    popmapiterator getFirstPopulation();
+    PopMap::const_iterator getFirstPopulation() const;
+
+    //  get an iterator to the end of the population list.
+    popmapiterator getLastPopulation();
+    PopMap::const_iterator getLastPopulation() const;
+
+    //  get the name of the region
+    string getName() const;
+
+    //  get the number of markers for the region
+    //  when loci show up, this function will take an argument
+    //  identifying which locus is being asked for
+    long getNmarkers() const;
+
+    //  get the number of populations for the region
+    long GetNPopulations() const;
+
+    //  get the number of non-ghost populations for the region
+    long GetNRealPopulations() const;
+
+    //  get all the sequence names from the region, in the order they
+    //  "appear" within the populations
+    std::vector<string> GetAllSeqNames() const;
+
+    //  get the data model for the region
+    const ModelDS& getDataModel() const { return m_model; };
+
+    //  set the name of the region
+    //  TODO, make this private and a friend of LamarcDS.
+    void setName(const string& name);
+
+    //  set the model.  This will allow the user to edit the
+    //  region's basefreqs, ttratio, and categories parameters.
+    void setModel(const ModelDS& model);
+
+    //  set the spacing information
+    void setSpacing (const SpacingDS& spacing);
+
+    //  Get the number of populations held by this region
+    unsigned int getNumPopulations() const;
+
+    //  add an population (this may throw an InvalidSequenceLengthError)
+    void addPopulation (PopulationDS& population);
+
+    //  tries to add the argument to all the populations, return !success
+    //  also sets the argument popname to the population name we'd like to
+    //  add the individual to.
+    bool FailToAdd(IndividualDS& individual, string& popname);
+
+    //  is non-contiguous data present?
+    bool HasNonContiguousData() const;
+
+    //  are there SNPs present?
+    bool HasSNPs() const;
+
+    //  add an individual to the named population
+    void AddIndividual(IndividualDS& ind, const string& popname);
+
+    string getXML(unsigned int numTabs) const;   //  From DataSourceIf
+};
+
+#endif // CONVERTER_REGIONDS_H
+
+//____________________________________________________________________________________
diff --git a/src/conversion/Converter_Sequence.h b/src/conversion/Converter_Sequence.h
new file mode 100644
index 0000000..c6ffdd6
--- /dev/null
+++ b/src/conversion/Converter_Sequence.h
@@ -0,0 +1,79 @@
+// $Id: Converter_Sequence.h,v 1.17 2012/06/30 01:32:41 bobgian Exp $
+
+/*
+  Copyright 2002 Patrick Colacurcio, Peter Beerli, Mary Kuhner,
+  Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+// Sequence is a representation of a Nucleotide sequence of any length.
+// Each Sequence owns a string that represents the DNA sequence.
+// Validation is done on the DNA sequence upon construction.  If any
+// invalid nucleotides or symbols are found, an InvalidNucleotide exception
+// is thrown.
+
+// The only constructor I'm giving initially is from a string.
+// The getLength() method returns the length of the sequence as an int.
+// The operator string is not defined, but I'm giving the asString() method.
+// This method returns the sequence as a string..
+
+// Note:  This IS a DataSource.  The naming is unfortunate.  I plan on changing that.
+
+#ifndef CONVERTER_SEQUENCE_H
+#define CONVERTER_SEQUENCE_H
+
+#include <string>
+#include <vector>
+
+#include "Converter_DataSourceIf.h"
+#include "constants.h" // for definition of DNA literal.
+
+using std::string;
+using std::vector;
+
+class Sequence : public DataSourceIf
+{
+  private:
+    vector<string> m_sequence;
+    string m_dataType;
+    string m_name;
+
+    Sequence();                                // undefined
+    void validate(const string &src) const;    // does the actual validation
+    void trimSequence();          // Trims the leading and trailing
+    // whitespace from the sequence
+
+    vector<string> AsStringVec(const string& src) const;
+
+  public:
+    Sequence(const string& sequenceString);
+    Sequence(const string& sequenceString,
+             const string& dataType,
+             const string& name);
+    Sequence(const vector<string>& sequenceString,
+             const string& dataType,
+             const string& name);
+
+    virtual ~Sequence();
+
+    // Use the Default Copy Constructor and operator=
+
+    long getSequenceLength() const;
+    string getDataType() const { return m_dataType; };
+    string GetName() const { return m_name; };
+    string asString() const;
+    void setDataType( const string& );
+    void setName( const string& name ) { m_name = name; };
+    bool IsNamed( const string& name ) const { return (m_name == name); };
+    bool HasNonContiguousData() const { return m_dataType != lamarcstrings::DNA; };
+    bool HasSNPs() const { return m_dataType == lamarcstrings::SNP; };
+
+    string getXML(unsigned int numTabs) const;   //  From DataSourceIf
+};
+
+#endif // CONVERTER_SEQUENCE_H
+
+//____________________________________________________________________________________
diff --git a/src/conversion/Converter_SpaceConverter.h b/src/conversion/Converter_SpaceConverter.h
new file mode 100644
index 0000000..59ef363
--- /dev/null
+++ b/src/conversion/Converter_SpaceConverter.h
@@ -0,0 +1,54 @@
+// $Id: Converter_SpaceConverter.h,v 1.9 2012/06/30 01:32:41 bobgian Exp $
+
+/*
+  Copyright 2002 Patrick Colacurcio, Peter Beerli, Mary Kuhner,
+  Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+// SpaceConverter reads the info from a prepared Converter-Haplotype
+// format file and modifies the regions of a LamarcDS.
+
+#ifndef CONVERTER_SPACECONVERTER_H
+#define CONVERTER_SPACECONVERTER_H
+
+#include <fstream>
+#include <map>
+
+#include "Converter_ConverterIf.h"
+#include "Converter_types.h"  // for map typedefs
+
+using std::string;
+
+class LamarcDS;
+
+class SpaceConverter : public ConverterIf
+{
+  private:
+    LamarcDS& m_lamarc;
+
+    SpaceConverter();                                 // deliberately undefined
+    SpaceConverter(const SpaceConverter&);            // deliberately undefined
+    SpaceConverter& operator=(const SpaceConverter&); // deliberately undefined
+
+    // May throw a FileFormatError.
+    void ValidateSpaceInfo(const SpaceMap& spaces,
+                           const string& filename) const;
+
+  public:
+    SpaceConverter(LamarcDS& lamarc);
+    virtual ~SpaceConverter();
+
+    // May throw a FileFormatError.
+    SpaceMap ReadSpacingInfo(const string& filename) const;
+
+    void addConvertedLamarcDS(LamarcDS& lamarc);
+
+};
+
+#endif // CONVERTER_SPACECONVERTER_H
+
+//____________________________________________________________________________________
diff --git a/src/conversion/Converter_SpacingDS.h b/src/conversion/Converter_SpacingDS.h
new file mode 100644
index 0000000..92672ac
--- /dev/null
+++ b/src/conversion/Converter_SpacingDS.h
@@ -0,0 +1,75 @@
+// $Id: Converter_SpacingDS.h,v 1.11 2011/03/07 06:08:48 bobgian Exp $
+
+/*
+  Copyright 2002 Patrick Colacurcio, Peter Beerli, Mary Kuhner,
+  Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+// SpacingDS is a representation of the spacing information for a
+// region's genetic data.
+//
+// Each representation owns the following.
+//   vector<long> positions;  // a sorted list of marker positions
+//   long offset;             // the position of a region's start in the
+//                            // user's numbering scheme
+//   long map_position;       // the position of the region with respect
+//                            // to other regions
+//   long length;             // the region's length
+
+// Limited validation is done on construction.  The member function:
+// IsConsistentWith(const string& geneticdata) is provided for more
+// rigorous validiation.
+
+// Note: when expanding to loci, either add a vector/map layer to
+// positions and offset and length, or this becomes a per locus
+// describer and the regional map_position will need to move to
+// the RegionDS.
+
+#ifndef CONVERTER_SPACINGDS_H
+#define CONVERTER_SPACINGDS_H
+
+#include <vector>
+#include "Converter_DataSourceIf.h"
+
+using std::vector;
+
+class SpacingDS : public DataSourceIf
+{
+  private:
+    vector<long> m_positions;
+    long m_length;
+    long m_offset;
+    long m_map_position;
+
+    // this is a validator for SpacingDS
+    void CheckConsistency(long nmarkers) const;
+
+  public:
+    //  Note.  Constructors may throw ConverterBaseError's
+    SpacingDS();
+    SpacingDS(long length,
+              long nmarkers); // this ctor doesn't CheckConsistency as there
+    // is no consistency to check!
+    SpacingDS(const vector<long>& positions,
+              long length,
+              long nmarkers);
+    SpacingDS(const vector<long>& positions,
+              long length,
+              long offset,
+              long map_position,
+              long nmarkers);
+
+    // We'll accept the default copy ctor and operator=.
+
+    virtual ~SpacingDS();
+
+    std::string getXML(unsigned int numTabs) const;   //  From DataSourceIf
+};
+
+#endif // CONVERTER_SPACINGDS_H
+
+//____________________________________________________________________________________
diff --git a/src/conversion/Converter_UserFileUtil.h b/src/conversion/Converter_UserFileUtil.h
new file mode 100644
index 0000000..143207a
--- /dev/null
+++ b/src/conversion/Converter_UserFileUtil.h
@@ -0,0 +1,36 @@
+// $Id: Converter_UserFileUtil.h,v 1.6 2011/03/07 06:08:48 bobgian Exp $
+
+/*
+  Copyright 2002 Patrick Colacurcio, Peter Beerli, Mary Kuhner,
+  Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+// This file contains some useful file i/o functions for use with
+// the user interface.
+
+#ifndef Converter_UserFileUtil_H
+#define Converter_UserFileUtil_H
+
+#include <fstream>
+#include <string>
+
+#include "stringx.h"
+
+//using namespace std;
+
+class UserFileUtil
+{
+  public:
+    // we accept default ctor, operator=, and copy ctor
+    virtual ~UserFileUtil() {};
+
+    bool IsFilePresent(const std::string& filename) const;
+};
+
+#endif // Converter_UserFileUtil_H
+
+//____________________________________________________________________________________
diff --git a/src/conversion/Converter_XmlParserUtil.h b/src/conversion/Converter_XmlParserUtil.h
new file mode 100644
index 0000000..c3a349e
--- /dev/null
+++ b/src/conversion/Converter_XmlParserUtil.h
@@ -0,0 +1,57 @@
+// $Id: Converter_XmlParserUtil.h,v 1.7 2012/06/30 01:32:41 bobgian Exp $
+
+/*
+  Copyright 2002 Patrick Colacurcio, Peter Beerli, Mary Kuhner,
+  Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef Converter_XmlParserUtil_H
+#define Converter_XmlParserUtil_H
+
+#include <string>
+#include <vector>
+#include <map>
+
+#include "Converter_ParserUtil.h"
+
+using std::string;
+using std::map;
+
+//  This file is the base class for all the ParserUtil classes.
+
+class XmlParserUtil : public ParserUtil
+{
+  private:
+    std::vector<string> m_tagStack;
+
+    // returns true for a start tag.  false for a end tag.
+    // Throws if theres an error.
+    bool stripTag(string& tagString, map<string, string>& tagInfo) const;
+
+  protected:
+    // No creation of just this class.
+    XmlParserUtil();
+
+    // finds the next xml tag.  Returns the text of that tag.  Updates the tagStack.
+    // The tagName will be in 'TagName'.  This is a dumb way to do it, but it should be okay.
+    string getNextTag(istream& is, map<string, string>& tagInfo);
+    // Gets the value of the current tag (the text between this and the current tags endtag)
+    bool getTagValue(istream& is, string& buffer);
+    // Returns the particular current dictionary in the xml tree.
+    // Format of the string is dict <space> dict <space> etc...
+    string getLocation() const;
+
+    // Returns the top level TagName
+    string getTopNodeName() const;
+
+  public:
+    virtual ~XmlParserUtil();
+};
+
+#endif // Converter_XmlParserUtil_H
+
+//____________________________________________________________________________________
diff --git a/src/conversion/Converter_types.h b/src/conversion/Converter_types.h
new file mode 100644
index 0000000..1f2da49
--- /dev/null
+++ b/src/conversion/Converter_types.h
@@ -0,0 +1,48 @@
+// $Id: Converter_types.h,v 1.6 2012/06/30 01:32:41 bobgian Exp $
+
+/*
+  Copyright 2002 Patrick Colacurcio, Peter Beerli, Mary Kuhner,
+  Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef CONVERTER_TYPES_H
+#define CONVERTER_TYPES_H
+
+#include <string>
+#include <map>
+#include <set>
+
+#include "stringx.h"
+
+using std::string;
+using std::map;
+using std::vector;
+
+class PopulationDS;
+class RegionDS;
+class IndividualDS;
+
+typedef map<string, PopulationDS, CIStringCompare> PopMap;
+typedef map<string, RegionDS, CIStringCompare> RegionMap;
+typedef map<IndividualDS, string> IndividualMap;
+
+typedef PopMap::iterator popmapiterator;
+typedef vector<popmapiterator> PopIterVec;
+typedef map<string, PopIterVec, CIStringCompare> PopIterMap;
+typedef map<string, PopIterMap, CIStringCompare> RegByPopMap;
+
+typedef map<string, vector<long>, CIStringCompare> SpaceMap;
+
+typedef map<string, long, CIStringCompare> MarkerMap;
+typedef map<string, string, CIStringCompare> TypeMap;
+
+typedef std::set<string, CIStringCompare> PopNameSet;
+typedef map<string, PopNameSet, CIStringCompare> RegPopNameMap;
+
+#endif // CONVERTER_TYPES_H
+
+//____________________________________________________________________________________
diff --git a/src/conversion/DataSourceException.cpp b/src/conversion/DataSourceException.cpp
new file mode 100644
index 0000000..9974c6f
--- /dev/null
+++ b/src/conversion/DataSourceException.cpp
@@ -0,0 +1,59 @@
+// $Id: DataSourceException.cpp,v 1.3 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include "Converter_DataSourceException.h"
+#include "stringx.h"
+
+MarkerLengthMismatchDataError::MarkerLengthMismatchDataError(long front, long back, long length)
+    : InconsistentDataError("The input length ("
+                            +   ToString(length)
+                            +   ") is less than the length covered by the markers ("
+                            +   ToString(back)
+                            +   " - "
+                            +   ToString(front)
+                            +   " + 1 = "
+                            +   ToString(back-front+1)
+                            +   ")")
+{
+}
+
+OffsetAfterFirstPositionDataError::OffsetAfterFirstPositionDataError(long offset, long firstPosition)
+    :   InconsistentDataError("First position of sequence ("
+                              +   ToString(firstPosition)
+                              +   ") is before the sequence offset ("
+                              +   ToString(offset)
+                              +   ")")
+{
+}
+
+RegionEndBeforeLastPositionDataError::RegionEndBeforeLastPositionDataError(long offset, long length, long lastPosition)
+    :   InconsistentDataError("End of region (position "
+                              +   ToString(offset+length-1)
+                              +   " = offset ("
+                              +   ToString(offset)
+                              +   ") + length ("
+                              +   ToString(length)
+                              +   ") - 1) is before last position ("
+                              +   ToString(lastPosition)
+                              +   ")"
+        )
+{
+}
+
+MarkerPositionMismatchDataError::MarkerPositionMismatchDataError(long nmarkers, long npositions)
+    : InconsistentDataError("There are "
+                            +   ToString(nmarkers)
+                            +   ", but only "
+                            +   ToString(npositions)
+                            +   " positions for them to occupy")
+{
+}
+
+//____________________________________________________________________________________
diff --git a/src/conversion/DataSourceIf.cpp b/src/conversion/DataSourceIf.cpp
new file mode 100644
index 0000000..becf501
--- /dev/null
+++ b/src/conversion/DataSourceIf.cpp
@@ -0,0 +1,31 @@
+// $Id: DataSourceIf.cpp,v 1.6 2012/06/30 01:32:41 bobgian Exp $
+
+/*
+  Copyright 2002 Patrick Colacurcio, Peter Beerli, Mary Kuhner,
+  Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <string>
+#include "Converter_DataSourceIf.h"
+
+#ifdef DMALLOC_FUNC_CHECK
+#include "/usr/local/include/dmalloc.h"
+#endif
+
+using namespace std;
+
+DataSourceIf::~DataSourceIf()
+{
+}
+
+void DataSourceIf::addTabs (int numTabs, string& str) const
+{
+    for (int i = 0; i < numTabs; i++)
+        str = str + "\t";
+}
+
+//____________________________________________________________________________________
diff --git a/src/conversion/Documentation b/src/conversion/Documentation
new file mode 100644
index 0000000..9855726
--- /dev/null
+++ b/src/conversion/Documentation
@@ -0,0 +1,133 @@
+It is possible that someone will have a file that adds individuals to an
+existing population. This is fine, although it is important to note that
+if no individual names are provided, (i.e. empty string or just
+whitespace) that these individuals will be given unique names within the
+population and added to the population.  So, if someone adds the same
+anonymous individual twice, that will go into the population twice, with
+two different unique names.  If this is the wrong way to deal with this,
+please let me know and I will fix it.  Rah!
+
+--------------------------------------------------------
+In regionDS.cpp, addPopulation.
+        Here is how it works.  If we try adding a population to a region,
+first, its individuals must have the same seqeunce lengths as the other
+populations.  As always, 0 is an okay sequence length with any other
+length. All population are guaranteed to have unique names.  If a user
+provides a newPopulation that already exists, the two populations are
+merged.  So, if the user provides a population named "Seattle" and in this
+region, there already exists a population named "Seattle", any individuals
+that exist in the new population that do not exist in the current
+datastore are added to the datastore.
+
+An example.
+There exists a region that contains the following data.
+
+<region name="TheSupervillainGene">
+        <population name="Cleveland">
+        </population>
+        <population name="Portland">
+                <individual name="Lex Luthor">
+                        <sample>
+                                <datablock type="DNA">
+                                        aggcttcagg
+                                </datablock>
+                        </sample>
+                </individual>
+        </population>
+        <population name="Seattle">
+                <individual name="Bizarro">
+                        <sample>
+                                <datablock type="DNA">
+                                        aggcttcagg
+                                </datablock>
+                        </sample>
+                </individual>
+        </population>
+</region>
+
+Our user adds a population containing the following data.
+<population name="Cleveland">
+        <individual name="Dr. Evil">
+                <sample>
+                        <datablock type="DNA">
+                                agccttaagg
+                        </datablock>
+                </sample>
+        </individual>
+</population>
+
+
+The resulting RegionDS will look like this
+
+<region name="TheSupervillainGene">
+        <population name="Cleveland">
+                <individual name="Dr. Evil">
+                        <sample>
+                                <datablock type="DNA">
+                                        agccttaagg
+                                </datablock>
+                        </sample>
+                </individual>
+        </population>
+        <population name="Portland">
+                <individual name="Lex Luthor">
+                        <sample>
+                                <datablock type="DNA">
+                                        aggcttcagg
+                                </datablock>
+                        </sample>
+                </individual>
+        </population>
+        <population name="Seattle">
+                <individual name="Bizarro">
+                        <sample>
+                                <datablock type="DNA">
+                                        aggcttcagg
+                                </datablock>
+                        </sample>
+                </individual>
+        </population>
+</region>
+
+Note that the cleveland population has not been duplicated, but added to. 
+
+If a user provides a population with no name to a region, a unique name
+within the region will be assigned to the population.  This name will look
+like this: 'Population XXXX' where 'X' is an uppercase letter. This name
+is guaranteed to be unique within the region.
+
+-----------------------------------------------------------------------------
+
+There was a potential bug in the adding of new populations relating to
+sequence lengths. Here is how it's been handled.
+
+If we start with a population with zero individuals (m_sequenceLength ==
+0) and then add a new population with a sequence length > 0.  This is a
+valid situation.  Now, we can access this first population and add an
+individual with a DIFFERENT sequence length, breaking our constraint that,
+within a population, all sequence lengths must be either 0, or identical.
+
+I'm handling this by, whenever we add a new population with a
+sequenceLength > 0 to a region, we set the m_sequenceLength of every
+population with a zero length sequence to that new sequenceLength. This
+doesn't really make sense as a stand alone thing for a population, but it
+does if you look at populations as being a part of a region.
+
+This means that we may have an empty population, but with a
+m_sequenceLength > 0.  This feels hackish.  If anyone has any Ideas of how
+to better do this, please let me know.
+
+This is the reason getFirstPopulation() and getLastPopulation() provide
+const iterators.  So someone can't go changing the sequence lengths on the
+sly.
+
+---------------------------------------------------------
+
+No two regions will have the same name.  If no name is given, give it a
+number. Every region in a single 'LamarcDS' must have the same
+populations.  Even if those populations are empty.
+
+---------------------- PopulationDS --------------------
+
+Population, if no name is provided, a random, unique name of four
+uppercase letters will be provided for it.
diff --git a/src/conversion/HapConverter.cpp b/src/conversion/HapConverter.cpp
new file mode 100644
index 0000000..6f02007
--- /dev/null
+++ b/src/conversion/HapConverter.cpp
@@ -0,0 +1,314 @@
+// $Id: HapConverter.cpp,v 1.24 2011/03/07 06:08:48 bobgian Exp $
+
+/*
+  Copyright 2002 Patrick Colacurcio, Peter Beerli, Mary Kuhner,
+  Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+
+#include "Converter_HapConverter.h"
+#include "Converter_DataSourceException.h"
+#include "Converter_ParserUtil.h"
+#include "random.h"
+#include "constants.h"
+
+//------------------------------------------------------------------------------------
+
+HapConverter::HapConverter(RegionDS& region, Random& random) :
+    ConverterIf(), m_region(region), m_random(random),
+    m_hapnames(region.GetAllSeqNames())
+{
+    // m_hapnames should end up with the same order of sequences as
+    // they existed in the user file, don't know if this works!
+
+} // HapConverter::ctor
+
+//------------------------------------------------------------------------------------
+
+HapConverter::~HapConverter()
+{
+} // HapConverter::dtor
+
+//------------------------------------------------------------------------------------
+
+vector<long> HapConverter::ParsePhaseInfo(ifstream& input) const
+// EWFIX.P3.BUG.351 -- this doesn't handle the case where you have no
+// phase known/unknown info, but you're just trying to assign
+// samples to individuals
+{
+    skipWhiteSpace(input);
+    string theline;
+    getLine(input, theline);
+
+    istringstream linestream(theline);
+    string somestring;
+    vector<long> sites;
+    getNumber(linestream,somestring);
+
+    while(!somestring.empty())
+    {
+        long position = FlagCheck(somestring,"marker position");
+        sites.push_back(position);
+        skipWhiteSpace(linestream);
+        somestring.erase();
+        getNumber(linestream,somestring);
+    }
+
+    return sites;
+
+} // HapConverter::ParsePhaseInfo
+
+//------------------------------------------------------------------------------------
+
+vector<string> HapConverter::PopHapNames(long ntopop)
+{
+    vector<string>::iterator start = m_hapnames.begin(),
+        stop = m_hapnames.begin()+ntopop;
+    vector<string> hnames(start,stop);
+    m_hapnames.erase(start,stop);
+
+    return hnames;
+
+} // HapConverter::PopHapNames
+
+//------------------------------------------------------------------------------------
+
+string HapConverter::PopTilDelimiter(istream& input, const char& dlm) const
+{
+    char ch(input.get());
+    string buffer;
+
+    while((ch != EOF) && (ch != dlm))
+    {
+        buffer += ch;
+        ch = input.get();
+    }
+
+    if (ch == EOF) input.putback(ch);
+
+    return buffer;
+
+} // HapConverter::PopTilDelimiter
+
+//------------------------------------------------------------------------------------
+
+IndDSVec HapConverter::ParseFirstLine(istringstream& firstline, const string& filename, ifstream& filestr)
+{
+    string somestring;
+    IndDSVec individuals;
+
+    if (getNumber(firstline,somestring))
+    {
+        m_nindividuals = FlagCheck(somestring, string("number of individuals"));
+    }
+    else
+    {
+        string errormsg = "The file " + filename + " is incorrectedly ";
+        errormsg += "formated.\nThe first token must be an integer ";
+        errormsg += "equal to the number of individuals in the file.";
+        throw FileFormatError(errormsg);
+    }
+
+    //  Its possible that there will be an adjacency key.
+    somestring.erase();
+    if (getWord(firstline, somestring))
+    {
+        if (CaselessStrCmp(somestring,"adjacent"))
+        {
+            long nhaps;
+            // adjacent sequences are haplotypes
+            somestring.erase();
+            if (getNumber(firstline,somestring))
+                nhaps = FlagCheck(somestring,string("number of haplotypes"));
+            else
+            {
+                string errormsg = "The file " + filename + " is ";
+                errormsg += "incorrectly formated.\n";
+                errormsg += "The converter expected to find the";
+                errormsg += " global number of haplotypes for each";
+                errormsg += " individual.";
+                throw FileFormatError(errormsg);
+            }
+
+            // now get phase information for all individuals
+            long nsites = m_region.getNmarkers();
+            vector<long> psites(nsites);
+            somestring.erase();
+            if (getWord(firstline,somestring))
+            {
+                if (CaselessStrCmp(somestring,"all"))
+                {
+                    for(long site=0; site < nsites; ++site)
+                        psites[site] = site;
+                }
+                else
+                {
+                    string errormsg = "The file " + filename + " is ";
+                    errormsg += "incorrectly formated.\nThe unknown ";
+                    errormsg += "keyword, " + somestring + ", was ";
+                    errormsg += "encountered.";
+                    throw FileFormatError(errormsg);
+                }
+            }
+            else
+            {
+                psites = ParsePhaseInfo(filestr);
+            }
+
+            unsigned long totalhaps = m_nindividuals*nhaps;
+            if (m_hapnames.size() != totalhaps)
+            {
+                string errormsg = "The first line of " + filename;
+                errormsg += " specifies " + ToString(m_nindividuals);
+                errormsg += " individuals\nwith " + ToString(nhaps);
+                errormsg += " haplotypes each (requiring a total of ";
+                errormsg += ToString(totalhaps) + " haplotypes).\nThe";
+                errormsg += " genetic data provides only ";
+                errormsg += ToString(m_hapnames.size()) + " haplotypes.";
+                throw FileFormatError(errormsg);
+            }
+
+            // now setup the individuals
+            for(int ind = 0; ind < m_nindividuals; ++ind)
+            {
+                string randomname = m_random.Name();
+                IndividualDS individual(randomname);
+                vector<string> hnames(PopHapNames(nhaps));
+                individual.AddHap(hnames);
+                individual.SetUnknownPhase(psites);
+                individuals.push_back(individual);
+            }
+        }
+        else
+        {
+            string errormsg = "The file " + filename + " is ";
+            errormsg += "incorrectly formated.\nThe unknown ";
+            errormsg += "keyword, " + somestring + ", was ";
+            errormsg += "encountered.";
+            throw FileFormatError(errormsg);
+        }
+    }
+
+    return individuals;
+
+} // HapConverter::ParseFirstLine
+
+//------------------------------------------------------------------------------------
+
+IndDSVec HapConverter::ReadHapInfo(const string& filename)
+{
+    ifstream haplotypefile(filename.c_str(),ios::in);
+
+    if (!haplotypefile)
+    {
+        string errormsg = "Couldn't find the file: " + filename;
+        throw FileFormatError (errormsg);
+    }
+
+    string line;
+
+    if (!getLine(haplotypefile,line))
+    {
+        string errormsg = "The file " + filename + " appears to";
+        errormsg += " be empty.";
+        throw FileFormatError (errormsg);
+    }
+
+    istringstream linestream(line);
+
+    IndDSVec individuals(ParseFirstLine(linestream,filename,haplotypefile));
+
+    if (!individuals.empty()) return individuals;
+
+    // read in the haplotype name delimiter character;
+    line.erase();
+    getLine(haplotypefile,line);
+    const char hapname_dlm(line[0]);
+
+    // EWFIX.P3 -- need to insist that hapname_dlm is not a digit
+
+    // Now start parsing the individuals
+    for(int ind = 0; ind < m_nindividuals; ++ind)
+    {
+        line.erase();
+        if (getLine(haplotypefile,line))
+        {
+            istringstream linestr(line);
+            string somestring;
+            getName(linestr,somestring);
+            IndividualDS individual(somestring);
+
+            somestring.erase();
+            getNumber(linestr,somestring);
+            long int nhapnames = FlagCheck(somestring,string("number of haplotypes"));
+
+            for(int hname = 0; hname < nhapnames; ++hname)
+            {
+                somestring = PopTilDelimiter(linestr,hapname_dlm);
+                StripLeadingSpaces(somestring);
+                StripTrailingSpaces(somestring);
+                individual.AddHap(somestring);
+            }
+
+            vector<long> sites(ParsePhaseInfo(haplotypefile));
+            individual.SetUnknownPhase(sites);
+
+            individuals.push_back(individual);
+        }
+        else
+        {
+            string errormsg = "The haplotypefile is incorrectedly formated.\n";
+            errormsg += "The converter expected to find ";
+            errormsg += ToString(m_nindividuals) + " individuals ";
+            errormsg += "but only found " + indexToKey(ind);
+            throw FileFormatError(errormsg);
+        }
+    }
+
+    return individuals;
+
+} // HapConverter::ReadHapInfo
+
+//------------------------------------------------------------------------------------
+
+void HapConverter::ReplaceIndividualsWith(IndDSVec& individuals)
+{
+
+    vector<string> popnames(individuals.size());
+    unsigned long whichind;
+    for(whichind = 0; whichind < individuals.size(); ++whichind)
+    {
+        // can't do population assignment in FailToAdd due to identical
+        // sequence names possibly present
+        if (m_region.FailToAdd(individuals[whichind],popnames[whichind]))
+        {
+            string errormsg = "The haplotypes, ";
+            errormsg += individuals[whichind].GetHapNamesForPrint();
+            errormsg += " were not part of the data set for the";
+            errormsg += " region, " + m_region.getName();
+            throw FileFormatError(errormsg);
+        }
+    }
+
+    for(whichind = 0; whichind < individuals.size(); ++whichind)
+    {
+        m_region.AddIndividual(individuals[whichind],popnames[whichind]);
+    }
+
+} // HapConverter::ReplaceIndividualsWith
+
+//------------------------------------------------------------------------------------
+
+void HapConverter::addConvertedLamarcDS(LamarcDS&)
+{
+    // DEBUG debug -- currently a no-op function!
+    // lamarc.mergeTo(m_lamarc);
+    assert(false);  // this code never ought to be called
+} // HapConverter::addConvertedLamarcDS
+
+//____________________________________________________________________________________
diff --git a/src/conversion/IndividualDS.cpp b/src/conversion/IndividualDS.cpp
new file mode 100644
index 0000000..4059962
--- /dev/null
+++ b/src/conversion/IndividualDS.cpp
@@ -0,0 +1,234 @@
+// $Id: IndividualDS.cpp,v 1.17 2011/03/07 06:08:48 bobgian Exp $
+
+/*
+  Copyright 2002 Patrick Colacurcio, Peter Beerli, Mary Kuhner,
+  Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+#include "Converter_IndividualDS.h"
+#include "Converter_DataSourceException.h"
+#include "stringx.h"
+
+#ifdef DMALLOC_FUNC_CHECK
+#include "/usr/local/include/dmalloc.h"
+#endif
+
+using namespace std;
+
+//------------------------------------------------------------------------------------
+
+IndividualDS::IndividualDS (const string& name,
+                            const Sequence& seq)
+    : m_name(name)
+{
+    m_seq.push_back(seq);
+}                               // IndividualDS::IndividualDS
+
+//------------------------------------------------------------------------------------
+
+IndividualDS::IndividualDS (const string& name,
+                            const string& seq)
+    : m_name(name)
+{
+    m_seq.push_back(Sequence(seq));
+}                               // IndividualDS::IndividualDS
+
+//------------------------------------------------------------------------------------
+
+IndividualDS::IndividualDS (const string& name,
+                            const string& seq,
+                            const string& dataType)
+    : m_name(name)
+{
+    m_seq.push_back(Sequence(seq, dataType, name));
+}                               // IndividualDS::IndividualDS
+
+//------------------------------------------------------------------------------------
+
+IndividualDS::~IndividualDS ()
+{
+}                               // IndividualDS::~IndividualDS
+
+//------------------------------------------------------------------------------------
+
+string
+IndividualDS::getSequence() const
+{
+    return m_seq[0].asString();
+}
+
+//------------------------------------------------------------------------------------
+
+int
+IndividualDS::getSequenceLength() const
+{
+    return m_seq[0].getSequenceLength();
+}
+
+//------------------------------------------------------------------------------------
+
+vector<string> IndividualDS::GetAllSeqNames() const
+{
+    vector<string> seqnames;
+
+    vector<Sequence>::const_iterator seq;
+    for(seq = m_seq.begin(); seq != m_seq.end(); ++seq)
+    {
+        seqnames.push_back(seq->GetName());
+    }
+
+    return seqnames;
+
+} // IndividualDS::GetAllSeqNames
+
+//------------------------------------------------------------------------------------
+
+vector<string> IndividualDS::GetHapNames() const
+{
+    return m_seqnames;
+} // IndividualDS::GetHapNames
+
+//------------------------------------------------------------------------------------
+
+string IndividualDS::GetHapNamesForPrint() const
+{
+    string names;
+    vector<string>::const_iterator name;
+    for(name = m_seqnames.begin(); name != m_seqnames.end(); ++name)
+        names += *name + ",";
+
+    return names;
+} // IndividualDS::GetHapNamesForPrint
+
+//------------------------------------------------------------------------------------
+
+bool IndividualDS::HasSequence(const string& seqname) const
+{
+    vector<Sequence>::const_iterator seq;
+    for(seq = m_seq.begin(); seq != m_seq.end(); ++seq)
+    {
+        if (seq->IsNamed(seqname)) return true;
+    }
+
+    return false;
+
+} // IndividualDS::HasSequence
+
+//------------------------------------------------------------------------------------
+
+bool IndividualDS::HasNoSequences() const
+{
+    return m_seq.empty();
+
+} // IndividualDS::HasNoSequences
+
+//------------------------------------------------------------------------------------
+
+bool IndividualDS::HasNonContiguousData() const
+{
+    vector<Sequence>::const_iterator seq;
+    for(seq = m_seq.begin(); seq != m_seq.end(); ++seq)
+    {
+        if (seq->HasNonContiguousData())
+            return true;
+    }
+
+    return false;
+
+} // IndividualDS::HasNonContiguousData
+
+//------------------------------------------------------------------------------------
+
+bool IndividualDS::HasSNPs() const
+{
+    vector<Sequence>::const_iterator seq;
+    for(seq = m_seq.begin(); seq != m_seq.end(); ++seq)
+    {
+        if (seq->HasSNPs())
+            return true;
+    }
+
+    return false;
+
+} // IndividualDS::HasSNPs
+
+//------------------------------------------------------------------------------------
+
+Sequence IndividualDS::PopSequence(const string& seqname)
+{
+    vector<Sequence>::iterator seq;
+    for(seq = m_seq.begin(); seq != m_seq.end(); ++seq)
+    {
+        if (seq->IsNamed(seqname))
+        {
+            Sequence sequence(*seq);
+            m_seq.erase(seq);
+            return sequence;
+        }
+    }
+
+    assert(false); // Shouldn't be able to get here
+
+    Sequence sequence(m_seq.front());
+    return sequence;
+
+} // IndividualDS::PopSequence
+
+//------------------------------------------------------------------------------------
+
+string
+IndividualDS::getName() const
+{
+    return m_name;
+}
+
+//------------------------------------------------------------------------------------
+
+void
+IndividualDS::setName(const string& name)
+{
+    m_name = name;
+}
+
+//------------------------------------------------------------------------------------
+
+string
+IndividualDS::getXML(unsigned int numTabs) const
+{
+    string individualXML;
+
+    addTabs(numTabs, individualXML);
+    individualXML += "<individual name=\"" + m_name + "\">\n";
+
+    ++numTabs;
+    if (!m_unknownphase.empty())        // if we have phase unknown data
+    {
+        addTabs(numTabs, individualXML);
+        individualXML += "<phase type=\"unknown\">";
+        vector<long>::const_iterator marker;
+        for(marker = m_unknownphase.begin();
+            marker != m_unknownphase.end();
+            ++marker)
+        {
+            individualXML += " " + ToString(*marker);
+        }
+        individualXML += " </phase>\n";
+    }
+
+    vector<Sequence>::const_iterator seqit;
+    for(seqit = m_seq.begin(); seqit != m_seq.end(); ++seqit)
+        individualXML += seqit->getXML(numTabs);
+
+    --numTabs;
+    addTabs(numTabs, individualXML);
+    individualXML += "</individual>\n";
+
+    return individualXML;
+}
+
+//____________________________________________________________________________________
diff --git a/src/conversion/LamarcDS.cpp b/src/conversion/LamarcDS.cpp
new file mode 100644
index 0000000..75d672b
--- /dev/null
+++ b/src/conversion/LamarcDS.cpp
@@ -0,0 +1,646 @@
+// $Id: LamarcDS.cpp,v 1.40 2012/06/30 01:32:41 bobgian Exp $
+
+/*
+  Copyright 2002 Patrick Colacurcio, Peter Beerli, Mary Kuhner,
+  Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <stdio.h>
+#include <vector>
+
+#include "Converter_LamarcDS.h"
+#include "Converter_PopulationDS.h"
+#include "Converter_DataSourceException.h"
+#include "stringx.h"
+#include "random.h"
+#include "constants.h"
+#include "defaults.h"
+#include "xml_strings.h"
+
+#ifdef DMALLOC_FUNC_CHECK
+#include "/usr/local/include/dmalloc.h"
+#endif
+
+using namespace std;
+
+//------------------------------------------------------------------------------------
+
+LamarcDS::LamarcDS (RegionDS& region)
+    :  m_coalescenceStartValues(defaults::theta),
+       m_coalescenceMethod(defaults::thetaMethod),
+       m_coalescenceMaxEvents(defaults::coalEvents),
+       m_migrationStartValues(defaults::migration),
+       m_migrationMethod(defaults::migMethod),
+       m_migrationMaxEvents(defaults::migEvents),
+       m_replicates(defaults::replicates),
+       m_temperatures(1.0),
+       m_swapInterval(1),
+       m_resimulating(1.0),
+       m_initialNumber(defaults::initNChains),
+       m_initialSamples(defaults::initNSamples),
+       m_initialDiscard(defaults::initDiscard),
+       m_initialInterval(defaults::initInterval),
+       m_finalNumber(defaults::finalNChains),
+       m_finalSamples(defaults::finalNSamples),
+       m_finalDiscard(defaults::finalDiscard),
+       m_finalInterval(defaults::finalInterval),
+       m_verbosity("verbose"),
+       m_progverbosity("normal"),
+       m_echo("true"),
+       m_profile("false"),
+       m_posterior("false"),
+       m_seed(1005),
+       m_outputFile("outfile"),
+       m_inSummaryFile("insumfile"),
+       m_outSummaryFile("outsumfile"),
+       m_useInSummaryFile(0),
+       m_useOutSummaryFile(0)
+{
+    validateRegionName(region);
+
+    // Push it onto the map.
+    string regionName = region.getName();
+    m_regions.insert(make_pair(regionName, region));
+}                               // LamarcDS::LamarcDS
+
+//------------------------------------------------------------------------------------
+// TODO:  Test this.  Its a last minute addition.
+LamarcDS::LamarcDS ()
+    :  m_coalescenceStartValues(defaults::theta),
+       m_coalescenceMethod(defaults::thetaMethod),
+       m_coalescenceMaxEvents(1000),
+       m_migrationStartValues(defaults::migration),
+       m_migrationMethod(defaults::migMethod),
+       m_migrationMaxEvents(defaults::migEvents),
+       m_replicates(defaults::replicates),
+       m_temperatures(1.0),
+       m_swapInterval(1),
+       m_resimulating(1.0),
+       m_initialNumber(defaults::initNChains),
+       m_initialSamples(defaults::initNSamples),
+       m_initialDiscard(defaults::initDiscard),
+       m_initialInterval(defaults::initInterval),
+       m_finalNumber(defaults::finalNChains),
+       m_finalSamples(defaults::finalNSamples),
+       m_finalDiscard(defaults::finalDiscard),
+       m_finalInterval(defaults::finalInterval),
+       m_verbosity("verbose"),
+       m_progverbosity("normal"),
+       m_echo("true"),
+       m_profile("false"),
+       m_posterior("false"),
+       m_seed(1005),
+       m_outputFile("outfile"),
+       m_inSummaryFile("insumfile"),
+       m_outSummaryFile("outsumfile"),
+       m_useInSummaryFile(0),
+       m_useOutSummaryFile(0)
+{
+}                               // LamarcDS::LamarcDS
+
+//------------------------------------------------------------------------------------
+
+LamarcDS::~LamarcDS ()
+{
+}                               // LamarcDS::~LamarcDS
+
+//------------------------------------------------------------------------------------
+
+void LamarcDS::EraseRegions()
+{
+    m_regions.erase(m_regions.begin(),m_regions.end());
+} // LamarcDS::EraseRegions
+
+//------------------------------------------------------------------------------------
+
+RegionMap::const_iterator
+LamarcDS::getFirstRegion() const
+{
+    return m_regions.begin();
+}
+
+//------------------------------------------------------------------------------------
+
+RegionMap::const_iterator
+LamarcDS::getLastRegion() const
+{
+    return m_regions.end();
+}
+
+//------------------------------------------------------------------------------------
+
+RegionMap::iterator
+LamarcDS::getFirstRegion()
+{
+    return m_regions.begin();
+}
+
+//------------------------------------------------------------------------------------
+
+RegionMap::iterator
+LamarcDS::getLastRegion()
+{
+    return m_regions.end();
+}
+
+//------------------------------------------------------------------------------------
+
+void
+LamarcDS::addRegion(RegionDS& region)
+{
+    validateRegionName(region);
+
+    //  This method may add populations to region, or all of m_regions or both.
+    validateNewRegion(region);
+
+    string regionName = region.getName();
+
+    if (m_regions.find(regionName) == m_regions.end())
+    {
+        //  The region is a new one, so stuff it in.
+        m_regions.insert(make_pair(region.getName(), region));
+    }
+
+    else
+    {
+        // Since we already have a region of this name, we're going to have to add only the
+        // populations that differ (if any) within the region.
+        // remember, if the population was anonymous, its considered to be unique.
+        // The anonymous population were given unique names when they were entered into the region.
+        RegionMap::iterator regionIt = m_regions.find(regionName);
+        RegionDS& existingRegion = (regionIt->second);
+
+        PopMap::iterator i;
+        for (i = region.getFirstPopulation(); i != region.getLastPopulation(); i++)
+        {
+            // Add the populations.  If the population already exists, it won't be added.
+            // Individuals within that population may be added, however.
+            // SEE THE PopulationDS DOCUMENTATION ON THIS
+
+            existingRegion.addPopulation(i->second);
+        }
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+void
+LamarcDS::validateNewRegion(RegionDS& region)
+{
+    //  Lets go through the populations in each region
+    //  If any population found in one doesn't exist in the other, add it.
+    //  NOTE:  This can be changed to throw if the populations dont match.  But this seemed
+    //  better.
+
+    RegionMap::iterator i = m_regions.begin();
+    //  This should never happen, but lets be safe.
+    if (i == m_regions.end())
+    {
+        // Its an empty set, so no validation is necessary.
+        return;
+    }
+
+    // Store the populations to be added.
+    // Don't add them right away because of iterator invalidation.
+    vector<string> popsToAddToNewRegion;
+    vector<string> popsToAddToOldRegions;
+
+    // Use the populations of the first region in the map. (Remember. They should all be the same)
+    RegionDS& existingRegion = (i->second);
+
+    PopMap::iterator iNewPops = region.getFirstPopulation();
+    PopMap::iterator iOldPops = existingRegion.getFirstPopulation();
+
+    while ((iNewPops != region.getLastPopulation()) && (iOldPops != existingRegion.getLastPopulation()))
+    {
+        // Compare the string names of the populations.
+        // They should be alphabetical in the map, so if one is less than the other
+        // the other doesn't have that population name.
+        // add it then increment the iterator that is less.
+        // bad explanation.  Sorry.
+
+        if ((iNewPops->first) < (iOldPops->first))
+        {
+            //      cout << (iNewPops->first) << " < " << (iOldPops->first) << endl;
+            popsToAddToOldRegions.push_back(iNewPops->first);
+            iNewPops++;
+        }
+        else if ((iNewPops->first) > (iOldPops->first))
+        {
+            //      cout << (iNewPops->first) << " > " << (iOldPops->first) << endl;
+            popsToAddToNewRegion.push_back(iOldPops->first);
+            iOldPops++;
+        }
+        else
+        {
+            //      cout << (iNewPops->first) << " = " << (iOldPops->first) << endl;
+            // If they're Identical, increment both counters
+            iOldPops++;
+            iNewPops++;
+        }
+    }
+    // By now, one (or both) of the iterators have reached map.end()
+    // check them both.  If one isn't at map.end, add its remaining pops to the other's
+    // region list.
+    while (iNewPops != region.getLastPopulation())
+    {
+        popsToAddToOldRegions.push_back(iNewPops->first);
+        iNewPops++;
+    }
+
+    while (iOldPops != existingRegion.getLastPopulation())
+    {
+        popsToAddToNewRegion.push_back(iOldPops->first);
+        iOldPops++;
+    }
+
+    // Now our lists are full.
+    // Now we add those populations to the regions
+    // Remember that if these Populations exist in any existing regions, they'll be ignored.
+    // See the documentation on RegionDS on this.
+    // Thats why we only have to compare the new region with the FIRST existing one.
+
+    vector<string>::const_iterator iPopName;
+
+    for (iPopName = popsToAddToNewRegion.begin(); iPopName != popsToAddToNewRegion.end(); iPopName++)
+    {
+        PopulationDS pop(*iPopName);
+        region.addPopulation(pop);
+    }
+
+    for (iPopName = popsToAddToOldRegions.begin(); iPopName != popsToAddToOldRegions.end(); iPopName++)
+    {
+        RegionMap::iterator j;
+
+        for (j = m_regions.begin(); j != m_regions.end(); j++)
+        {
+            PopulationDS pop(*iPopName);
+
+            RegionDS& curRegion = j->second;
+            curRegion.addPopulation(pop);
+        }
+    }
+
+    //  And we're done.  *whew*
+    return;
+}
+
+void
+LamarcDS::validateRegionName(RegionDS& region) const
+{
+    //  First, if the name of the region is whitespace or an empty string, get it a new name
+    string regionName = region.getName();
+    int strPosition = regionName.find_first_not_of (" ");
+
+    if ((strPosition >= 0) && (strPosition < (int)regionName.length()))
+    {
+        // There is a name that's not just whitespace
+        return;
+    }
+    else
+    {
+        // Its an anonymous region, so get a unique name for it and set the name
+        string newName = getUniqueName();
+        region.setName(newName);
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+string
+LamarcDS::getUniqueName() const
+{
+    // Use the random in lamarc/lib/
+    Random random;  // uses system clock
+
+    int char1 = abs(random.Long() % 26) + 65;
+    int char2 = abs(random.Long() % 26) + 65;
+    int char3 = abs(random.Long() % 26) + 65;
+    int char4 = abs(random.Long() % 26) + 65;
+    char name[6];
+
+    sprintf(name, "%c%c%c%c", char1, char2, char3, char4);
+
+    string regionName(name);
+    regionName = "Region " + regionName;
+
+    if (doesRegionNameExist(regionName))
+    {
+        return getUniqueName();
+    }
+
+    return regionName;
+}
+
+//------------------------------------------------------------------------------------
+
+bool
+LamarcDS::doesRegionNameExist(const string& name) const
+{
+    if (m_regions.find(name) == m_regions.end())
+    {
+        //  The region name doesn't exist, so return false
+        return false;
+    }
+    return true;
+}
+
+//------------------------------------------------------------------------------------
+
+bool LamarcDS::HasNonContiguousData() const
+{
+    RegionMap::const_iterator region;
+    for(region = getFirstRegion(); region != getLastRegion(); ++region)
+    {
+        if (region->second.HasNonContiguousData())
+            return true;
+    }
+
+    return false;
+
+} // LamarcDS::HasNonContiguousData
+
+//------------------------------------------------------------------------------------
+
+bool LamarcDS::HasSNPs() const
+{
+    RegionMap::const_iterator region;
+    for(region = getFirstRegion(); region != getLastRegion(); ++region)
+    {
+        if (region->second.HasSNPs())
+            return true;
+    }
+
+    return false;
+
+} // LamarcDS::HasNonContiguousData
+
+//------------------------------------------------------------------------------------
+
+void
+LamarcDS::mergeTo(LamarcDS& lamarc)
+{
+    RegionMap::iterator i;
+
+    for (i = lamarc.getFirstRegion(); i != lamarc.getLastRegion(); i++)
+    {
+        addRegion(i->second);
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+void LamarcDS::ReorderUsing(RegByPopMap newmap)
+{
+    LamarcDS newlamarcds;
+
+    RegByPopMap::iterator region;
+    for(region = newmap.begin(); region != newmap.end(); ++region)
+    {
+        // WARNING warning -- put in a temporary model right now,
+        // this will be phased out once the xml no longer needs
+        // to have a datamodel in it to be read by Lamarc.
+        ModelDS newmodel(getFirstRegion()->second.getDataModel());
+
+        RegionDS newregion(region->first,newmodel);
+
+        PopIterMap::iterator pop;
+        for(pop = region->second.begin();
+            pop != region->second.end(); ++pop)
+        {
+            PopulationDS newpop(pop->first);
+            vector<popmapiterator>::iterator dataset;
+            for(dataset = pop->second.begin();
+                dataset != pop->second.end(); ++dataset)
+            {
+                newpop.AddIndividuals((*dataset)->second.GetAllIndividuals());
+            }
+            newregion.addPopulation(newpop);
+        }
+
+        newlamarcds.addRegion(newregion);
+    }
+
+    EraseRegions();
+    mergeTo(newlamarcds);
+
+} // LamarcDS::ReorderUsing
+
+//------------------------------------------------------------------------------------
+
+long
+LamarcDS::numRegions() const
+{
+    return m_regions.size();
+}
+
+//------------------------------------------------------------------------------------
+
+long LamarcDS::GetNUnits() const
+{
+    long npops = 0;
+    RegionMap::const_iterator region;
+    for (region = getFirstRegion(); region != getLastRegion(); ++region)
+    {
+        npops += region->second.GetNRealPopulations();
+    }
+
+    return npops;
+
+} // LamarcDS::GetNUnits
+
+//
+//------------------------------------------------------------------------------------
+//  Parameter setters
+
+void
+LamarcDS::setCoalescenceStartValues(const double coalescenceStartValues)
+{
+    m_coalescenceStartValues = coalescenceStartValues;
+}
+
+void
+LamarcDS::setCoalescenceMethod(method_type coalescenceMethod)
+{
+    m_coalescenceMethod = coalescenceMethod;
+}
+
+void
+LamarcDS::setCoalescenceMaxEvents(const long coalescenceMaxEvents)
+{
+    m_coalescenceMaxEvents = coalescenceMaxEvents;
+}
+
+void
+LamarcDS::setReplicates(const long replicates)
+{
+    m_replicates = replicates;
+}
+
+void
+LamarcDS::setTemperatures(const double temperatures)
+{
+    m_temperatures = temperatures;
+}
+
+void
+LamarcDS::setSwapInterval(const long swapInterval)
+{
+    m_swapInterval = swapInterval;
+}
+
+void
+LamarcDS::setResimulating(const double resimulating)
+{
+    m_resimulating = resimulating;
+}
+
+void
+LamarcDS::setInitialNumber(const long initialNumber)
+{
+    m_initialNumber = initialNumber;
+}
+
+void
+LamarcDS::setInitialSamples(const long initialSamples)
+{
+    m_initialSamples = initialSamples;
+}
+
+void
+LamarcDS::setInitialDiscard(const long initialDiscard)
+{
+    m_initialDiscard = initialDiscard;
+}
+
+void
+LamarcDS::setInitialInterval(const long initialInterval)
+{
+    m_initialInterval = initialInterval;
+}
+
+void
+LamarcDS::setFinalNumber(const long finalNumber)
+{
+    m_finalNumber = finalNumber;
+}
+
+void
+LamarcDS::setFinalSamples(const long finalSamples)
+{
+    m_finalSamples = finalSamples;
+}
+
+void
+LamarcDS::setFinalDiscard(const long finalDiscard)
+{
+    m_finalDiscard = finalDiscard;
+}
+
+void
+LamarcDS::setFinalInterval(const long finalInterval)
+{
+    m_finalInterval = finalInterval;
+}
+
+void
+LamarcDS::setVerbosity(const string& verbosity)
+{
+    m_verbosity = verbosity;
+}
+
+void
+LamarcDS::setProgVerbosity(const string& progverbosity)
+{
+    m_progverbosity = progverbosity;
+}
+
+void
+LamarcDS::setEcho(const string& echo)
+{
+    m_echo = echo;
+}
+
+void
+LamarcDS::setProfile(const string& profile)
+{
+    m_profile = profile;
+}
+
+void
+LamarcDS::setPosterior(const string& posterior)
+{
+    m_posterior = posterior;
+}
+
+void
+LamarcDS::setSeed(const long seed)
+{
+    m_seed = seed;
+}
+
+void
+LamarcDS::setOutputFile(const string& outputFile)
+{
+    m_outputFile = outputFile;
+}
+
+void
+LamarcDS::setInSummaryFile(const string& inSummaryFile)
+{
+    m_inSummaryFile = inSummaryFile;
+}
+
+void
+LamarcDS::setOutSummaryFile(const string& outSummaryFile)
+{
+    m_outSummaryFile = outSummaryFile;
+}
+
+//------------------------------------------------------------------------------------
+
+string
+LamarcDS::getXML(unsigned int numTabs) const
+{
+    string lamarcXML;
+
+    addTabs(numTabs, lamarcXML);
+    lamarcXML = lamarcXML + "<lamarc version=\"" + VERSION + "\">\n";
+
+    addTabs(numTabs, lamarcXML);
+    lamarcXML = lamarcXML + "<!-- Created from the LamarcDS DataStore -->\n";
+
+    addTabs(numTabs, lamarcXML);
+    lamarcXML = lamarcXML + "<!-- -->\n";
+
+    addTabs(numTabs, lamarcXML);
+    lamarcXML = lamarcXML + MakeTag(xmlstr::XML_TAG_DATA) + "\n";
+
+    ++numTabs;
+
+    // Now write out the regions
+    RegionMap::const_iterator i;
+    for (i = m_regions.begin(); i != m_regions.end(); i++)
+    {
+        lamarcXML+= (i->second).getXML(numTabs);
+    }
+
+    --numTabs;
+    addTabs(numTabs, lamarcXML);
+    lamarcXML += MakeCloseTag(xmlstr::XML_TAG_DATA) + "\n";
+
+#ifndef JSIM
+    --numTabs;
+    addTabs(numTabs, lamarcXML);
+    lamarcXML += "</lamarc>\n";
+#endif
+
+    return lamarcXML;
+
+}
+
+//____________________________________________________________________________________
diff --git a/src/conversion/Migrate1.txt b/src/conversion/Migrate1.txt
new file mode 100644
index 0000000..2d4a2b0
--- /dev/null
+++ b/src/conversion/Migrate1.txt
@@ -0,0 +1,88 @@
+S 2 1
+670
+4 Ayacucho
+PERU77C    CCAAGAGAAG AACCTGATGA CATAGACTGT TGGTGTTATG GAGTGGAAAA 
+PERU78     CCAAGAGAAG AACCTGATGA CATAGACTGT TGGTGTTATG GAGTGGAAAA 
+PERU77A    CCAAGAGAAG ACCTGGATGA CATAGACTGT TGGTGTTATG GAGTGGAAAA 
+PERU79     CCAAGAGAGG AGCCAGATGA CATAGACTGT TGGTGTTATG GAGTGGAAAA 
+           CGTTAGGGTT GCTTATGGAA AGTGTGATTC AGCGGGTAGG TCAAGAAGGT 
+           TGTTAGGGTT GCTTATGGAA AGTGTGATTC AGCGGGTAGG TCAAGAAGGT 
+           TGTTAGGGTT GCTTATGGAA AGTGTGATTC AGCGGGTAGG TCAAGAAGGT 
+           TGTTAGGGTT GCTTATGGAA AGTGTGATTC AGCGGGTAGG TCAAGAAGGT
+           CTAGAAGGGC CATTGATTTA CCCACACATG AAAACCATGG TTTGAAGACT 
+           CTAGAAGGGC CATTGATTTA CCCACACATG AAAACCATGG TTTGAAGACT 
+           CTAGAAGGGC CATTGATTTA CCCACACATG AAAACCATGG TTTGAAGACT 
+           CTAGAAGGGC CATTGATTTA CCCACACATG AAAACCATGG TTTGAAGACT 
+           CGGCAAGAAA AGTGGATGAC TGGAAGAATG GGCGAGAGAC AATTGCAGAA 
+           CGGCAAGAAA AGTGGATGAC TGGAAGAATG GGCGAGAGAC AATTGCAGAA 
+           CGGCAAGAAA AGTGGATGAC TGGAAGAATG GGCGAGAGAC AATTGCAGAA 
+           CGGCAAGAAA AGTGGATGAC TGGAAGAATG GGCGAGAGAC AATTGCAGAA 
+           AATTGAAAGA TGGCTAGTGA GGAACCCCTT TTTTGCGGTG ACAGCCTTAG 
+           AATTGAAAGA TGGCTAGTGA GGAACCCCTT TTTTGCGGTG ACAGCCTTAG 
+           AATTGAAAGA TGGCTAGTGA GGAACCCCTT TTTTGCGGTG ACAGCCTTAG 
+           AATTGAAAGA TGGCTAGTGA GGAACCCCTT TTTTGCGGTG ACAGCCTTAG 
+           CTATTGCTTA TTTAGTGGGG AGCAACATGA CGCAACGAGT TGTGATTGCT 
+           CTATTGCTTA TTTAGTGGGG AGCAACATGA CGCAACGAGT TGTGATTGCT 
+           CTATTGCTTA TTTAGTGGGG AGCAACATGA CGCAACGAGT TGTGATTGCT 
+           CTATTGCTTA TTTAGTGGGG AGCAACATGA CGCAACGAGT TGTGATTGCT 
+           TTGCTAGTTT TGGCTGTTGG CCCAGCTTAT TCGGCTCACT GCATAGGAAT 
+           TTGCTAGTTT TGGCTGTTGG CCCAGCTTAT TCGGCTCACT GCATAGGAAT 
+           TTGCTAGTTT TGGCTGTTGG CCCAGCTTAT TCGGCTCACT GCATAGGAAT 
+           TTGCTAGTTT TGGCTGTTGG CCCAGCTTAT TCGGCTCACT GCATAGGAAT 
+           AACTGACAGG GATTTCATTG AGGGGGTGCA TGGAGGAACT TGGGTCTCAG 
+           AACTGACAGG GATTTCATTG AGGGGGTGCA TGGAGGAACT TGGGTCTCAG 
+           AACTGACAGG GATTTCATTG AGGGGGTGCA TGGAGGAACT TGGGTCTCAG 
+           AACTGACAGG GATTTCATTG AGGGGGTGCA TGGAGGAACT TGGGTCTCAG 
+           CCACTTTGGA ACAGGACAAG TGTGTTACTG TGATGGCCCC TGACAAGCCC 
+           CCACTTTGGA ACAGGACAAG TGTGTTACTG TGATGGCCCC TGACAAGCCC 
+           CCACTTTGGA ACAGGACAAG TGTGTTACTG TGATGGCCCC TGACAAGCCC 
+           CCACTTTGGA ACAGGACAAG TGTGTTACTG TGATGGCCCC TGACAAGCCC 
+           TCATTGGACA TATCACTGGA AACAGTTGCC ATTGATGGAC CTGCTGAAGC 
+           TCATTGGACA TATCACTGGA AACAGTTGCC ATTGATGGAC CTGCTGAAGC 
+           TCATTGGACA TATCACTGGA AACAGTTGCC ATTGATGGAC CTGCTGAAGC 
+           TCATTGGACA TATCACTGGA AACAGTTGCC ATTGATGGAC CTGCTGAAGC 
+           AAGGAAAGTG TGTTACAGTG CAGTCTTGAC TCATGTGAAG ATCAATGATA 
+           AAGGAAAGTG TGTTACAGTG CAGTCTTGAC TCATGTGAAG ATCAATGATA 
+           AAGGAAAGTG TGTTACAGTG CAGTCTTGAC TCATGTGAAG ATCAATGATA 
+           AAGGAAAGTG TGTTACAGTG CAGTCTTGAC TCATGTGAAG ATCAATGATA 
+           AGTGTCCCAG CACCGGTGAA GCCCATTTGG CCGAAGAAAA CGAGGGAGAT 
+           AGTGTCCCAG CACCGGTGAA GCCCATTTGG CCGAAGAAAA CGAGGGAGAT 
+           AGTGTCCCAG CACCGGTGAA GCCCATTTGG CCGAAGAAAA CGAGGGAGAT 
+           AGTGTCCCAG CACCGGTGAA GCCCATTTGG CCGAAGAAAA CGAGGGAGAT 
+           CACGCCTGTA AACGAACTTA CTCTGACAGA AGCTGGGGGA ACGGTTGTGG 
+           CACGCCTGTA AACGAACTTA CTCTGACAGA GGCTGGGGGA ACGGTTGTGG 
+           CACGCCTGTA AACGAACTTA CTCTGACAGA AGCTGGGGGA ACGGCTGTGG 
+           CACGCCTGTA AACGAACTTA CTCTGACAGA GGCTGGGGGA ACGATTGTGG 
+           CCTATTTGGG AAAGGAAGCA
+           CCTATTTGGG AAAGGAAGCA
+           CCTATTTGGG AAAGGGAGCA
+           CCTATTTGGG AAAGGGAGCA
+2 Paxco
+PERU95C    CCAAGAGAAG AGCCTGATGA CATAGACTGT TGGTGTTATG GAGTGGAAAA 
+PERU95D    CCAAGAGAAG AGCCTGATGA CATAGACTGT TGGTGTTATG GAGTGGAAAA 
+           TGTTAGGGTT GCTTATGGCA AGTGTGATTC AGCGGGTAGG TCAAGAAGGT 
+           TGTTAGGGTT GCTTATGGCA AGTGTGATTC AGCGGGTAGG TCAAGAAGGT
+           CTAGAAGGGC CATTGATTTA CCCACACATG AAAACCATGG TTTGAAGACT 
+           CTAGAAGGGC CATTGATTTA CCCACACATG AAAACCATGG TTTGAAGACT
+           CGGCAAGAAA AGTGGATGAC TGGAAGAATG GGTGAGAGAC AATTGCAGAA 
+           CGGCAAGAAA AGTGGATGAC TGGAAGAATG GGTGAGAGAC AATTGCAGAA
+           AATTGAAAGA TGGCTGGTGA GGAACCCCTT TTTTGCGGTG ACAGCCTTGA 
+           AATTGAAAGA TGGCTGGTGA GGAACCCCTT TTTTGCGGTG ACAGCCTTGA 
+           CTATTGTCTA TTTTGTGGGG AGCAACATGA CGCAACGAGT TGTGATTGCT 
+           CTATTGTTTA TTTTGTGGGG AGCAACATGA CGCAACGAGT TGTGATTGCT 
+           TTACTAGTCT TGGCTGTTGG CCCAGCTTAT TCGGCTCACT GCATAGGAAT 
+           TTACTAGTCT TGGCTGTTGG CCCAGCTTAT TCGGCTCACT GCATAGGAAT 
+           AACTGACAGG GATTTCATTG AGGGGGTGCA TGGAGGAACT TGGGTTTCAG 
+           AACTGACAGG GATTTCATTG AGGGGGTGCA TGGAGGAACT TGGGTTTCAG 
+           CTACCCTGGA GCAAGACAAG TGTGTTACTG TGATGGCCCC TGACAAGCCC 
+           CTACCCTGGA GCAAGACAAG TGTGTTACTG TGATGGCCCC TGACAAGCCC 
+           TCATTGGACA TCTCACTAGA GACAGTAGCC ATTGATGGAC CTGCTGAGGC 
+           TCATTGGACA TCTCACTAGA GACAGTAGCC ATTGATGGAC CTGCTGAGGC 
+           GAGGAAAGTG TGTTACAGTG CAGTCTTGAC TCATGTGAAG ATTAATGATA 
+           GAGGAAAGTG TGTTACAGTG CAGTCTTGAC TCATGTGAAG ATTAATGATA
+           AGTGTCCCAG CACCGGTGAA GCCCATTTGG CTGAAGAAAA CGAGGGAGAT 
+           AGTGTCCCAG CACCGGTGAA GCCCATTTGG CTGAAGAAAA CGAGGGAGAT 
+           CACGCCTGTA AACGAACTTA CTCTGACAGA GGCTGGGGGA ACGGTTGTGG 
+           CACGCCTGTA AACGAACTTA CTCTGACAGA GGCTGGGGGA ACGGTTGTGG 
+           CCTATTTGGG AAAGGGAGCA
+           CCTATTTGGG AAAGGGAGCA
diff --git a/src/conversion/MigrateConverter.cpp b/src/conversion/MigrateConverter.cpp
new file mode 100644
index 0000000..325a40f
--- /dev/null
+++ b/src/conversion/MigrateConverter.cpp
@@ -0,0 +1,679 @@
+// $Id: MigrateConverter.cpp,v 1.37 2011/03/07 06:08:48 bobgian Exp $
+
+/*
+  Copyright 2002 Patrick Colacurcio, Peter Beerli, Mary Kuhner,
+  Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+// refactored by Mary April 30 2002
+
+#include <cassert>
+#include <iostream>
+#include <sstream>
+#include <cctype>
+#include <cstring>
+#include <stdio.h> // for sprintf in MigrateConverter::getNewName
+#include <vector>
+#include <algorithm>
+
+#include "Converter_MigrateConverter.h"
+#include "Converter_DataSourceException.h"
+#include "Converter_LamarcDS.h"
+#include "Converter_RegionDS.h"
+#include "Converter_PopulationDS.h"
+#include "Converter_IndividualDS.h"
+#include "Converter_Sequence.h"
+#include "stringx.h"
+#include "random.h"
+
+#ifdef DMALLOC_FUNC_CHECK
+#include "/usr/local/include/dmalloc.h"
+#endif
+
+using namespace std;
+
+//------------------------------------------------------------------------------------
+
+MigrateConverter::MigrateConverter(const string& fileName, bool interleaved)
+    : ConverterIf(),
+      m_fileName(fileName),
+      m_interleaved(interleaved),
+      m_inFile(fileName.c_str(), ios::in),  // open the file
+      m_markerstoregions(false) // default to not treat each marker as
+      // a region
+{
+    // Open the file.
+    if (!m_inFile)
+    {
+        // Cant find the infile.
+        // Bail
+        throw FileNotFoundError ("Sorry.  Can't find the file: " + fileName + ".  Please check your path.");
+    }
+
+    // Read the first line and parse for datatype.
+    m_firstLine.erase();
+
+    if (!getLine(m_inFile, m_firstLine))
+    {
+        throw FileFormatError ("Your file appears to be empty.");
+    }
+    istringstream firstLineStream(m_firstLine);
+
+    m_datatype = ReadDataType(firstLineStream);
+
+} // MigrateConverter ctor
+
+//------------------------------------------------------------------------------------
+
+void MigrateConverter::ProcessData()
+{
+    istringstream firstLineStream(m_firstLine);
+
+    // We've already read the datatype, but need to get it out of the
+    // stream, so we will read it again and throw it away.
+    ReadDataType(firstLineStream);
+
+    if (m_interleaved && m_datatype == lamarcstrings::MICROSAT)
+    {
+        string errormsg = "Don't know how to convert an interleaved ";
+        errormsg += "Migrate/Recombine file containing microsatellite ";
+        errormsg += "data";
+        throw FileFormatError(errormsg);
+    }
+    if (m_interleaved && m_datatype == lamarcstrings::KALLELE)
+    {
+        string errormsg = "Don't know how to convert an interleaved ";
+        errormsg += "Migrate/Recombine file containing allelic ";
+        errormsg += "data";
+        throw FileFormatError(errormsg);
+    }
+
+    //  Next, lets get the number of populations and the number of loci.
+    skipWhiteSpace(firstLineStream);
+
+    //  Get the number of populations.
+    long numPops = GetNumPops(firstLineStream);
+
+    //  Get the number of loci.
+    long numLoci = GetNumLoci(firstLineStream);
+
+    string delim;
+    if (m_datatype == lamarcstrings::MICROSAT)
+    {
+        delim = ".";   // EWFIX.P3 -- default
+    }
+    if (m_datatype == lamarcstrings::MICROSAT || m_datatype == lamarcstrings::KALLELE)
+    {
+        // Get the delimiter character
+        string mightBeDelim;
+        if (getToken(firstLineStream,mightBeDelim))
+        {
+            if(mightBeDelim.length() == 1)
+            {
+                delim = mightBeDelim;
+            }
+        }
+    }
+
+    //  Process the second line, in the case of DNA or SNP data.
+    vector<long> sequenceLengths;
+    if (m_datatype == lamarcstrings::DNA || m_datatype == lamarcstrings::SNP)
+    {
+        string secondLine;
+        //  Lets get the second line and put it into its own stream
+        if (!getLine(m_inFile, secondLine))
+        {
+            throw FileFormatError ("Your file appears to be empty.");
+        }
+
+        istringstream secondLineStream(secondLine);
+
+        //  Next, lets get the length of the sequences.
+        sequenceLengths = GetSeqLengths(secondLineStream, numLoci);
+    }
+
+    //  For each population, we want to process the top line, and then process the sequence
+    //  for EACH locus.
+
+    for (int i = 0; i < numPops; i++)
+    {
+        //  Process the third line
+        string thirdLine;
+        //  Lets get the third line and put it into its own stream
+        if (!getLine(m_inFile, thirdLine))
+        {
+            throw FileFormatError ("Your file appears to be missing a line specifing the number of sequences in a population.");
+        }
+
+        istringstream thirdLineStream(thirdLine);
+        long numSequences = 0;
+
+        //  Next, lets get number of individuals.
+        numSequences = GetNumSeqs(thirdLineStream);
+
+        // Grab the Population name if its there.
+        string popName = "";
+        getLine(thirdLineStream, popName);
+        if (popName.empty())
+            popName = string("Population") + ToString(i);
+
+        //  Now.  Grab the sequence and individual names for each locus.
+        if (m_datatype == lamarcstrings::MICROSAT || m_datatype == lamarcstrings::KALLELE)
+        {
+            string regionName = buildName(m_fileName,"--",3,99); // EWFIX.CONSTANT
+
+            GetMicroSatLoci(m_inFile,
+                            numSequences,
+                            numLoci,
+                            popName,
+                            regionName,
+                            m_datatype,
+                            delim);
+        }
+        else
+        {
+            for (int j = 0; j < numLoci; j++)
+            {
+                string regionName = buildName(m_fileName,"--",3,j); // EWFIX.CONSTANT
+
+                //  Get the sequence lengths in order.
+                long sequenceLength = sequenceLengths[j];
+                //  TODO  Error catching required here?
+
+                skipWhiteSpace(m_inFile);
+                if (m_interleaved)
+                {
+                    getInterleavedSequences(m_inFile,
+                                            numSequences,
+                                            sequenceLength,
+                                            popName,
+                                            regionName,
+                                            m_datatype);
+                }
+                else
+                {
+                    getNonInterleavedSequences(m_inFile,
+                                               numSequences,
+                                               sequenceLength,
+                                               popName,
+                                               regionName,
+                                               m_datatype);
+                }
+            }
+        }
+    }
+}                               // MigrateConverter::ProcessData
+
+//------------------------------------------------------------------------------------
+
+void MigrateConverter::SetMarkersToRegions(bool val)
+{
+    m_markerstoregions = val;
+} // SetMarkersToRegions
+
+//------------------------------------------------------------------------------------
+
+MigrateConverter::~MigrateConverter ()
+{
+}                               // MigrateConverter::~MigrateConverter
+
+void MigrateConverter::getInterleavedSequences(ifstream& inFile,
+                                               const long numSequences,
+                                               const long sequenceLength,
+                                               const string& popName,
+                                               const string& regionName,
+                                               const string& datatype)
+{
+    vector<string> names;
+    vector<string> sequences;
+
+    for ( int i = 0; i < numSequences; i++)
+    {
+        string lineString;
+        string sequenceStr;
+
+        // get the first line
+        if (getLine (inFile, lineString))
+        {
+            istringstream indNameStream(lineString);
+            string indName;
+
+            // Get the first 10 characters.
+            if (!getNextNChars(indNameStream, indName, 10))
+                throw FileFormatError ("Your file is not in the correct Migrate format.  Please check your file and retry.");
+
+            //  Get the rest of the line.  (don't know whether there will be spaces, so get as tokens
+            while (getToken(indNameStream, sequenceStr))
+            {};
+
+            //  After getting the name and the sequence
+            //  1) reset the seqName string
+            //  2) stuff both the name and the sequence into the maps
+            //  3) skip whitespace so the next line is ready to go
+
+            names.push_back(indName);
+            sequences.push_back(sequenceStr);
+            skipWhiteSpace(inFile);
+        }
+    }
+
+    //  Now, get the rest of the interleaved sequences;
+    bool continuePullingSequence = true;
+
+    //  If there are no sequences, don't pull.
+    if (numSequences == 0)
+        continuePullingSequence = false;
+
+    int i = 0;
+
+    while (continuePullingSequence)
+    {
+        string lineString;
+        if (getLine (inFile, lineString))
+        {
+            istringstream indNameStream(lineString);
+            string sequenceStr;
+
+            //  Get the rest of the line.  (don't know whether there will be spaces, so get as tokens.
+            while (getToken(indNameStream, sequenceStr)) {};
+
+            sequences[i%numSequences] = sequences[i%numSequences] + sequenceStr;
+
+            skipWhiteSpace(inFile);
+        }
+        else
+        {
+            throw FileFormatError ("Your file is not in the correct Migrate format.  It appears that one of the sequences is too short.  The total sequence is: " + (sequences[i%numSequences]));
+        }
+
+        if( (i%numSequences) == (numSequences - 1))
+        {
+            //          cout << names[i%numSequences] << " length " << sequences[i%numSequences].length() << ":"
+            //               << sequences[i%numSequences] << endl;
+
+            if( (long)(sequences[i%numSequences].length()) >= sequenceLength)
+            {
+                continuePullingSequence = false;
+            }
+        }
+        i++;
+    }
+
+    //  Now, Put together the LamarcDS.
+    //  Put together the individuals and their sequences.
+    PopulationDS pop (popName);
+
+    //  Add a comment for this population.
+    string comment = "Population origin file for " + regionName + "/";
+    comment += popName + ": " + m_fileName;
+    pop.setComment(comment);
+
+    long seq, nseqs = sequences.size();
+    for(seq = 0; seq < nseqs; ++seq)
+    {
+        IndividualDS individual(names[seq],sequences[seq],datatype);
+        pop.addIndividual(individual);
+    }
+
+    //  Add the region.
+    ModelDS model;
+    RegionDS region (regionName, model, pop);
+
+    m_lamarc.addRegion(region);
+}  //getInterleavedSequences
+
+void MigrateConverter::getNonInterleavedSequences(ifstream& inFile,
+                                                  const long numSequences,
+                                                  const long sequenceLength,
+                                                  const string& popName,
+                                                  const string& regionName,
+                                                  const string& datatype)
+{
+    vector<string> names;
+    vector<string> sequences;
+
+    //  If its not interleaved, its a bit easier.
+    PopulationDS pop (popName);
+    skipWhiteSpace(inFile);
+
+    for ( int i = 0; i < numSequences; i++)
+    {
+        string indName;
+        string sequenceStr;
+
+        // Get the first 10 characters.
+        if (!getNextNChars(inFile, indName, 10))
+            throw FileFormatError ("Your file is not in the correct Migrate format.  \nPlease check your file and retry.");
+
+        //  Get characters until we've gone through the sequence length.
+        if (!getNextNNonWhiteSpace(inFile, sequenceStr, sequenceLength))
+        {
+            throw FileFormatError ("Your file is not in the correct Migrate format. \nProbably one of the sequences is too short.");
+        }
+
+        // Got the name, got the sequence, but can't stuff them into
+        // individuals yet if haplotyping is not known.
+        sequences.push_back(sequenceStr);
+        names.push_back(indName);
+
+        //  Clean up for the next sequence
+        skipWhiteSpace(inFile);
+    }
+
+    long seq, nseqs = sequences.size();
+    for(seq = 0; seq < nseqs; ++seq)
+    {
+        IndividualDS individual(names[seq],sequences[seq],datatype);
+        pop.addIndividual(individual);
+    }
+
+    //  Now, Put together the LamarcDS
+    //  Add the region
+    ModelDS model;
+    RegionDS region (regionName, model, pop);
+
+    m_lamarc.addRegion(region);
+} //getNonInterleavedSequences
+
+//------------------------------------------------------------------------------------
+
+void MigrateConverter::GetMicroSatLoci(ifstream& inFile,
+                                       const long numSequences,
+                                       const long numMarkers,
+                                       const string& popName,
+                                       const string& regionName,
+                                       const string& datatype,
+                                       const string& delimiter)
+{
+    vector<string> names;
+    vector< pair<vector<string>,vector<string> > > microsats;
+
+    PopulationDS pop (popName);
+    skipWhiteSpace(inFile);
+
+    for ( int i = 0; i < numSequences; i++)
+    {
+        // Read an individual.
+        string lineString;
+        if (getLine (inFile, lineString))
+        {
+            istringstream linestream(lineString);
+            // Get the first 10 characters.
+            string indname;
+            if (!getNextNChars(linestream,indname,10L))
+            {
+                string errormsg = "Not a legal Migrate/Recombine file. ";
+                errormsg += "Error in trying to add individual ";
+                errormsg += ToString(i) + " of population " + popName;
+                throw FileFormatError(errormsg);
+            }
+
+            // Read the microsats themselves.
+            vector<string> micro1;
+            vector<string> micro2;
+            long marker;
+            for(marker = 0; marker < numMarkers; ++marker)
+            {
+                string haplotypes;
+                if (!getToken(linestream,haplotypes))
+                {
+                    string errormsg = "Unable to find any haplotypes ";
+                    errormsg += "of marker " + indexToKey(marker) + " ";
+                    errormsg += "of individual " + indname + " of ";
+                    errormsg += "population " + popName;
+                    throw FileFormatError(errormsg);
+                }
+
+                // Make sure the correct delimiter is present.
+                unsigned long delimiterpos = haplotypes.find(delimiter);
+                if (delimiterpos == haplotypes.length())
+                {
+                    string errormsg = "Unable to find the delimiter ";
+                    errormsg += delimiter + " in marker ";
+                    errormsg += ToString(marker) + " of individual ";
+                    errormsg += indname + " of population " + popName;
+                    throw FileFormatError(errormsg);
+                }
+
+                string number1(haplotypes.substr(0,delimiterpos));
+                if (number1.empty())
+                {
+                    string errormsg = "Unable to find 1st haplotype ";
+                    errormsg += "of marker " + ToString(marker) + " ";
+                    errormsg += "of individual " + indname + " of ";
+                    errormsg += "population " + popName;
+                    throw FileFormatError(errormsg);
+                }
+                micro1.push_back(number1);
+
+                string number2(haplotypes.substr(delimiterpos+1,
+                                                 haplotypes.length()));
+                if (number2.empty())
+                {
+                    string errormsg = "Unable to find 2nd haplotype ";
+                    errormsg += "of marker " + ToString(marker) + " ";
+                    errormsg += "of individual " + indname + " of ";
+                    errormsg += "population " + popName;
+                    throw FileFormatError(errormsg);
+                }
+                micro2.push_back(number2);
+            }
+
+            StripLeadingSpaces(indname);
+            StripTrailingSpaces(indname);
+            names.push_back(indname);
+            microsats.push_back(make_pair(micro1,micro2));
+        }
+        else
+        {
+            string errormsg = "Expected to find " + ToString(numSequences);
+            errormsg += " individuals, but only found " + ToString(i-1);
+            errormsg += " in population " + popName;
+            throw FileFormatError(errormsg);
+        }
+    }
+
+    if (m_markerstoregions)
+    {
+        long reg, nregs = microsats[0].first.size();
+        for(reg = 0; reg < nregs; ++reg)
+        {
+            pop.EraseIndividuals();
+
+            long ind, nind = microsats.size();
+            for(ind = 0; ind < nind; ++ind)
+            {
+                Sequence seq1(microsats[ind].first[reg],datatype,
+                              names[ind]+string("-1"));
+                Sequence seq2(microsats[ind].second[reg],datatype,
+                              names[ind]+string("-2"));
+                IndividualDS individual(names[ind],seq1);
+                individual.AddSequence(seq2);
+                pop.addIndividual(individual);
+            }
+            //  Now, put together the LamarcDS.
+            //  Add the region.
+            ModelDS model;
+            string rname = buildName(m_fileName,"--",3,reg); // EWFIX.CONSTANT
+            RegionDS region (rname, model, pop);
+
+            m_lamarc.addRegion(region);
+        }
+    }
+    else
+    {
+        long ind, nind = microsats.size();
+        assert(nind == numSequences);
+        for(ind = 0; ind < nind; ++ind)
+        {
+            Sequence seq1(microsats[ind].first,datatype,
+                          names[ind]+string("-1"));
+            Sequence seq2(microsats[ind].second,datatype,
+                          names[ind]+string("-2"));
+            IndividualDS individual(names[ind],seq1);
+            individual.AddSequence(seq2);
+            pop.addIndividual(individual);
+        }
+
+        //  Now, put together the LamarcDS.
+        //  Add the region.
+        ModelDS model;
+        RegionDS region (regionName, model, pop);
+
+        m_lamarc.addRegion(region);
+    }
+} // GetMicroSatLoci
+
+//------------------------------------------------------------------------------------
+
+string
+MigrateConverter::getNewName(Random &random) const
+{
+    int char1 = abs(random.Long() % 26) + 65;
+    int char2 = abs(random.Long() % 26) + 65;
+    int char3 = abs(random.Long() % 26) + 65;
+    int char4 = abs(random.Long() % 26) + 65;
+    char name[6];
+
+    sprintf(name, "%c%c%c%c", char1, char2, char3, char4);
+
+    string newName(name);
+
+    return newName;
+}
+
+//  Okay.  Here is the deal.  Send in a lamarc, and it will merge this converter's lamarc into yours.
+//  If you want a lamarc of your own, send in an empty one.  I can't return a lamarc or do assignment
+//  on a lamarc because I'm having really, really weird template issues.
+void
+MigrateConverter::addConvertedLamarcDS (LamarcDS& lamarc)
+{
+    lamarc.mergeTo(m_lamarc);
+}
+
+long MigrateConverter::GetNumPops(istringstream& linestream) const
+{
+    //  Get the number of populations
+    //  (relies on short-circuit evaluation)
+    string someString;
+    long value;
+    if (getNumber(linestream, someString) && IsInteger(someString))
+    {
+        if (FromString(someString,value)) return value;
+        else return 0L;
+    }
+    else
+    {
+        // Bail.
+        throw FileFormatError ("Your file is not in the correct Migrate format.\nThe first number must be an integer signaling the number of Populations in the file.");
+    }
+
+    assert(false); // can't get here
+    return 0L;
+} // GetNumPops
+
+long MigrateConverter::GetNumLoci(istringstream& linestream) const
+{
+
+    //  Get the number of loci.
+    //  (Relies on short-circuit evaluation.)
+    string someString;
+    long value;
+    if (getNumber(linestream, someString) && IsInteger(someString))
+    {
+        if (FromString(someString,value)) return value;
+        else return 0L;
+    }
+    else
+    {
+        // Bail.
+        throw FileFormatError ("Your file is not in the correct Migrate format.\nThe second number must be an integer signaling the number of Loci in the file.");
+    }
+
+    assert(false); // can't get here
+    return 0L;
+} // GetNumLoci
+
+vector<long> MigrateConverter::GetSeqLengths(istringstream& linestream, long numLoci) const
+{
+    //  Next, lets get the length of the sequences.
+    //  (Relies on short-circuit evaluation.)
+    string someString;
+    vector<long> sequenceLengths;
+    long length;
+
+    for (int i = 0; i < numLoci; i++)
+    {
+        if (getNumber(linestream, someString) && IsInteger(someString))
+        {
+            if (!FromString(someString,length)) length = 0;
+            sequenceLengths.push_back(length);
+            someString = "";
+        }
+        else
+        {
+            // Bail.
+            string errormsg = "Your file is not in the correct Migrate format.\n";
+            errormsg += "The second line must contain one integer per locus specifying \n";
+            errormsg += "the length of the sequences in the file.";
+            throw FileFormatError (errormsg);
+        }
+    }
+
+    return sequenceLengths;
+
+} // GetSeqLengths
+
+string MigrateConverter::ReadDataType(istringstream& linestream) const
+{
+    if (isFirstChar(linestream,string("sSnNmMeE")))
+    {
+        string datatype;
+        getWord(linestream, datatype);
+        if (strpbrk(datatype.c_str(),string("sS").c_str()))
+            return string("DNA");
+        if (strpbrk(datatype.c_str(),string("nN").c_str()))
+            return string("SNP");
+        if (strpbrk(datatype.c_str(),string("mM").c_str()))
+            return string("MICROSAT");
+        if (strpbrk(datatype.c_str(),string("eE").c_str()))
+            return string("KALLELE");
+        return string("");   // cannot be reached
+    }
+    else
+    {
+        return string("");
+    }
+
+} // ReadDataType
+
+long MigrateConverter::GetNumSeqs(istringstream& linestream) const
+{
+    //  Next, lets get number of individuals.
+    // Relies on short-circuit evaluation.
+    string someString;
+    long value;
+
+    if (getNumber(linestream, someString) && IsInteger(someString))
+    {
+        if (!FromString(someString,value)) value = 0;
+        return value;
+    }
+    else
+    {
+        // Bail.
+        string errormsg = "Your file is not in the correct Migrate format.  At the \n";
+        errormsg += "beginning of each population, there must be a line specifying the number\n";
+        errormsg += "of sequences in that population.";
+        throw FileFormatError(errormsg);
+    }
+    assert(false); // can't get here
+    return 0L;
+
+} // GetNumSeqs
+
+//____________________________________________________________________________________
diff --git a/src/conversion/ModelDS.cpp b/src/conversion/ModelDS.cpp
new file mode 100644
index 0000000..140c250
--- /dev/null
+++ b/src/conversion/ModelDS.cpp
@@ -0,0 +1,264 @@
+// $Id: ModelDS.cpp,v 1.4 2010/03/17 17:25:58 bobgian Exp $
+
+/*
+  Copyright 2002 Patrick Colacurcio, Peter Beerli, Mary Kuhner,
+  Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include "Converter_ModelDS.h"
+#include "Converter_DataSourceException.h"
+#include "stringx.h"
+
+#ifdef DMALLOC_FUNC_CHECK
+#include "/usr/local/include/dmalloc.h"
+#endif
+
+using namespace std;
+
+//------------------------------------------------------------------------------------
+
+ModelDS::ModelDS()
+    : m_modelName("F84"),
+      m_ttRatio(2.0),
+      m_numCategories(1),
+      m_rates(1.0),
+      m_probabilities(1.0)
+{
+    setFreqs(.25, .25, .25, .25);
+}                               // ModelDS::ModelDS
+
+ModelDS::ModelDS(const string& modelName,
+                 const double freqA,
+                 const double freqC,
+                 const double freqG,
+                 const double freqT,
+                 const double ttRatio)
+    : m_modelName(modelName),
+      m_ttRatio(ttRatio),
+      m_numCategories(1),
+      m_rates(1.0),
+      m_probabilities(1.0)
+{
+    setFreqs(freqA, freqC, freqG, freqT);
+}                               // ModelDS::ModelDS
+
+ModelDS::ModelDS(const string& modelName,
+                 const double freqA,
+                 const double freqC,
+                 const double freqG,
+                 const double freqT,
+                 const double ttRatio,
+                 const unsigned int numCategories,
+                 const double rates,
+                 const double probabilities)
+    : m_modelName(modelName),
+      m_ttRatio(ttRatio),
+      m_numCategories(numCategories),
+      m_rates(rates),
+      m_probabilities(probabilities)
+{
+    setFreqs(freqA, freqC, freqG, freqT);
+}                               // ModelDS::ModelDS
+
+//------------------------------------------------------------------------------------
+
+ModelDS::~ModelDS ()
+{
+}                               // ModelDS::~ModelDS
+
+//------------------------------------------------------------------------------------
+
+void
+ModelDS::setFreqs(double freqA, double freqC, double freqG, double freqT)
+{
+    validateFreqs(freqA, freqC, freqG, freqT);
+
+    // Push them onto the map
+    m_freqs["A"] =  freqA;
+    m_freqs["C"] =  freqC;
+    m_freqs["G"] =  freqG;
+    m_freqs["T"] =  freqT;
+}
+
+//------------------------------------------------------------------------------------
+
+string
+ModelDS::getName() const
+{
+    return m_modelName;
+}
+
+void
+ModelDS::setName(const string& name)
+{
+    m_modelName = name;
+}
+
+//------------------------------------------------------------------------------------
+
+double
+ModelDS::getTTRatio() const
+{
+    return m_ttRatio;
+}
+
+void
+ModelDS::setTTRatio(const double ttRatio)
+{
+    m_ttRatio = ttRatio;
+}
+
+//------------------------------------------------------------------------------------
+
+unsigned int
+ModelDS::getNumCategories() const
+{
+    return m_numCategories;
+}
+
+void
+ModelDS::setNumCategories(const unsigned int numCategories)
+{
+    m_numCategories = numCategories;
+}
+
+//------------------------------------------------------------------------------------
+
+double
+ModelDS::getRates() const
+{
+    return m_rates;
+}
+
+void
+ModelDS::setRates(const double rates)
+{
+    m_rates = rates;
+}
+
+//------------------------------------------------------------------------------------
+
+double
+ModelDS::getProbabilities() const
+{
+    return m_probabilities;
+}
+
+void
+ModelDS::setProbabilities(const double probabilities)
+{
+    m_probabilities = probabilities;
+}
+
+//------------------------------------------------------------------------------------
+
+double
+ModelDS::getAFreq()
+{
+    return m_freqs["A"];
+}
+
+double
+ModelDS::getCFreq()
+{
+    return m_freqs["C"];
+}
+
+double
+ModelDS::getGFreq()
+{
+    return m_freqs["G"];
+}
+
+double
+ModelDS::getTFreq()
+{
+    return m_freqs["T"];
+}
+
+//------------------------------------------------------------------------------------
+
+void
+ModelDS::validateFreqs(const double a,
+                       const double g,
+                       const double c,
+                       const double t)
+{
+    //  map<string,double>::iterator pos;
+    double sum = 0;
+
+    //  for (pos = m_freqs.begin(); pos != m_freqs.end(); ++pos())
+    //    {
+    //      sum += pos->second;
+    //    }
+    sum = a + g + c + t;
+
+    if (fabs(sum-1.0) > 0.001)  // Epsilon taken from /tree/interface.cpp
+    {
+        throw InvalidFrequenciesError("The frequencies don't add up to 1.");
+    }
+    else
+        return;
+}
+
+//------------------------------------------------------------------------------------
+
+string
+ModelDS::getXML(unsigned int numTabs) const
+{
+    string modelXML;
+
+    addTabs(numTabs, modelXML);
+    modelXML = modelXML + "<model name=\"" + m_modelName + "\">\n";
+
+    //  Now, lets write out all of this models individuals.
+
+    ++numTabs;
+
+    addTabs(numTabs, modelXML);
+    map<string, double>::const_iterator i;
+    modelXML += "<base-freqs> ";
+
+    // These should be in order
+    for (i = m_freqs.begin(); i != m_freqs.end(); i++)
+    {
+        modelXML += ToString(i->second);
+        modelXML += " ";
+    }
+
+    modelXML += "</base-freqs>\n";
+
+    addTabs(numTabs, modelXML);
+
+    modelXML += "<ttratio> " + ToString(m_ttRatio) + " </ttratio>\n";
+
+    addTabs(numTabs, modelXML);
+    modelXML = modelXML + "<categories>\n";
+
+    ++numTabs;
+    addTabs(numTabs, modelXML);
+    modelXML = modelXML + "<num-categories> " + ToString((int)m_numCategories) + " </num-categories>\n";
+
+    addTabs(numTabs, modelXML);
+
+    modelXML = modelXML + "<rates> " + ToString(m_rates) + " </rates>\n";
+
+    addTabs(numTabs, modelXML);
+    modelXML = modelXML + "<probabilities> " + ToString(m_probabilities) + " </probabilities>\n";
+
+    --numTabs;
+    addTabs(numTabs, modelXML);
+    modelXML = modelXML + "</categories>\n";
+
+    --numTabs;
+    addTabs(numTabs, modelXML);
+    modelXML += "</model>\n";
+
+    return modelXML;
+}
+
+//____________________________________________________________________________________
diff --git a/src/conversion/ParserUtil.cpp b/src/conversion/ParserUtil.cpp
new file mode 100644
index 0000000..c306e87
--- /dev/null
+++ b/src/conversion/ParserUtil.cpp
@@ -0,0 +1,353 @@
+// $Id: ParserUtil.cpp,v 1.16 2012/06/30 01:32:41 bobgian Exp $
+
+/*
+  Copyright 2002 Patrick Colacurcio, Peter Beerli, Mary Kuhner,
+  Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <string>
+#include <cstring>
+#include <cctype>
+#include <iostream>
+
+#include "Converter_ParserUtil.h"
+
+#ifdef DMALLOC_FUNC_CHECK
+#include "/usr/local/include/dmalloc.h"
+#endif
+
+using namespace std;
+
+ParserUtil::ParserUtil() {}
+ParserUtil::~ParserUtil() {}
+
+// should this also include \r?
+bool
+isnewline (int ch)
+{
+    return (ch == '\n' || ch == '\r');
+}
+
+bool
+ParserUtil::getWord (istream& is, string& buffer) const
+{
+    int i = 0;
+    int ch;
+
+    while ((ch=is.get()) != EOF )
+    {
+        //if ( (ch != ' ') && (ch != '\n') && (!isdigit(ch)) )
+        if ( (ch != ' ') && !isnewline(ch) && (!isdigit(ch)) )
+        {
+            i = 1;
+            //buffer += (char*)&ch;
+            buffer += ch;
+        }
+        else if ( i == 1 )
+            break;
+    }
+    if (ch == EOF)
+    {
+        if (i == 0)
+            return false;
+        else
+        {
+            // putback the EOF
+            is.putback(ch);
+            return true;
+        }
+    }
+    if(i == 1)
+    {
+        is.putback(ch);
+        return true;
+    }
+    else
+        return false;
+}                               // ParserUtil::getWord
+
+bool
+ParserUtil::getName (istream& is, string& buffer) const
+{
+    int i = 0;
+    int readalphabetic = 0;
+    int ch;
+
+    while ((ch=is.get()) != EOF )
+    {
+        //     if ( (ch != ' ') && (ch != '\n') )
+        if ( (ch != ' ') && !isnewline(ch) )
+        {
+            i = 1;
+            if (isalpha(ch)) readalphabetic = 1;
+            //  buffer += (char*)&ch;
+            buffer += ch;
+        }
+        else if ( i == 1 )
+            break;
+    }
+    if (ch == EOF)
+    {
+        if (i == 0 || readalphabetic == 0)
+            return false;
+        else
+        {
+            // putback the EOF
+            is.putback(ch);
+            return true;
+        }
+    }
+    if(i == 1 && readalphabetic == 1)
+    {
+        is.putback(ch);
+        return true;
+    }
+    else
+        return false;
+}                               // ParserUtil::getName
+
+bool
+ParserUtil::getNumber (istream& is, string& buffer) const
+{
+    bool seenADigit = 0;
+    int ch;
+
+    while ((ch=is.get()) != EOF )
+    {
+        if ( (ch != ' ') && !isnewline(ch) )
+            // we've got something that's not whitespace
+        {
+            if(isdigit(ch))
+                // hooray! it's a digit.
+            {
+                seenADigit = true;
+                buffer += ch;
+            }
+            else
+                // oops! not what we expected
+            {
+                break;
+            }
+        }
+        else
+            // we're seeing whitespace now, so we can
+            // bail if we've seen digits already.
+        {
+            if (seenADigit)
+            {
+                break;
+            }
+        }
+    }
+    is.putback(ch);
+    return seenADigit;
+}                               // ParserUtil::getNumber
+
+bool
+ParserUtil::getToken (istream& is, string& buffer) const
+{
+    int i = 0;
+    int ch;
+
+    while ((ch=is.get()) != EOF )
+    {
+        //      if ( (ch != ' ') && (ch!= '>') && (ch != '\n') && (ch != '\t') )
+        if ( (ch != ' ') && (ch!= '>') && !isnewline(ch) && (ch != '\t') )
+        {
+            i = 1;
+            //buffer += (char*)&ch;
+            buffer += ch;
+        }
+        else if ( i == 1 )
+            break;
+    }
+    if (ch == EOF)
+    {
+        if (i == 0)
+            return false;
+        else
+        {
+            // putback the EOF
+            is.putback(ch);
+            return true;
+        }
+    }
+    if(i == 1)
+    {
+        // putback the last character. (the first non-number)
+        is.putback(ch);
+        return true;
+    }
+    else
+        return false;
+}                               // ParserUtil::getToken
+
+bool
+ParserUtil::getLine (istream& is, string& buffer) const
+{
+    int i = 0;
+    int ch;
+
+    while ((ch=is.get()) != EOF )
+    {
+        //if (ch != '\n')
+        if (ch != '\n' && ch != 13) // 13 - carriage return, as seen on MacOS X
+        {
+            i = 1;
+            //        buffer += (char*)&ch;
+            buffer += ch;
+        }
+        else if ( i == 1 )
+            break;
+    }
+    if (ch == EOF)
+    {
+        if (i == 0)
+            return false;
+        else
+        {
+            // putback the EOF
+            is.putback(ch);
+            return true;
+        }
+    }
+    if(i == 1)
+        return true;
+    else
+        return false;
+}                               // ParserUtil::getLine
+
+bool
+ParserUtil::skipWhiteSpace (istream& is) const
+{
+    int ch;
+
+    //  Get the first non whitespace character
+    while ((ch=is.get()) != EOF )
+    {
+        if ( (ch == ' ') || isnewline(ch) )
+        {
+        }
+        else
+        {
+            // its not whitespace, so put it back
+            break;
+        }
+    }
+
+    // put back the last char grabbed
+    is.putback(ch);
+    return true;
+}                               // ParserUtil::skipWhiteSpace
+
+bool
+ParserUtil::getNextNNonWhiteSpace (istream& is, string& buffer,const long& n) const
+{
+    long i = 0;
+    int ch;
+
+    for ( i = 0; i < n; i++ )
+    {
+        if ((ch=is.get()) != EOF)
+        {
+            // Skip the newlines and spaces
+            if ( (ch!='\t') && (ch!='\n') && (ch!=' ') )
+            {
+                buffer += ch;
+            }
+            else
+            {
+                --i;
+            }
+        }
+        else
+        {
+            is.putback(ch);
+            return false;
+        }
+    }
+    // MARYDEBUG commented in the following two lines
+    // cout << "The sequence length: " << buffer.length() << endl;
+    // cout << "The sequence: \"" << buffer << "\"" << endl;
+    return true;
+}                               // ParserUtil::getNextNNonWhiteSpace
+
+bool
+ParserUtil::getNextNChars (istream& is, string& buffer,const long& n) const
+{
+    long i = 0;
+    int ch;
+
+    for ( i = 0; i < n; i++ )
+    {
+        if ((ch=is.get()) != EOF)
+        {
+            // Skip the newlines and tabs
+            if ( (ch!='\n') && (ch!='\t') )
+            {
+                buffer += ch;
+            }
+            else
+            {
+                --i;
+            }
+        }
+        else
+        {
+            is.putback(ch);
+            return false;
+        }
+    }
+    //  cout << "Got the following buffer: \"" << buffer << "\"" << endl;
+    return true;
+}                               // ParserUtil::getNextNChars
+
+bool
+ParserUtil::skipToChar (istream& is, int searchChar) const
+{
+    int ch;
+
+    //  Get the first non whitespace character
+    while ((ch=is.get()) != EOF )
+    {
+        if ( (ch != searchChar) )
+        {
+        }
+        else
+        {
+            // its not whitespace, so put it back
+            break;
+        }
+    }
+
+    // put back the last char grabbed
+    is.putback(ch);
+    return true;
+}                               // ParserUtil::skipToChar
+
+bool
+ParserUtil::isFirstChar(istream& is, const string& searchString) const
+{
+    bool found = false;
+    char* ch = new char[2];
+
+    skipWhiteSpace(is);
+
+    is.get(ch,2);
+
+    if (ch[0] != EOF)
+        if (strpbrk(ch,searchString.c_str()))
+            found = true;
+
+    is.putback(ch[0]);
+
+    delete [] ch;
+    return found;
+
+} // ParserUtil::isFirstChar
+
+//____________________________________________________________________________________
diff --git a/src/conversion/PhylipConverter.cpp b/src/conversion/PhylipConverter.cpp
new file mode 100644
index 0000000..3956f93
--- /dev/null
+++ b/src/conversion/PhylipConverter.cpp
@@ -0,0 +1,315 @@
+// $Id: PhylipConverter.cpp,v 1.31 2012/06/30 01:32:41 bobgian Exp $
+
+/*
+  Copyright 2002 Patrick Colacurcio, Peter Beerli, Mary Kuhner,
+  Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <iostream>
+#include <fstream>
+#include <sstream>
+#include <cctype>
+
+#include "Converter_PhylipConverter.h"
+#include "Converter_DataSourceException.h"
+#include "Converter_LamarcDS.h"
+#include "Converter_RegionDS.h"
+#include "Converter_PopulationDS.h"
+#include "Converter_IndividualDS.h"
+#include "Converter_Sequence.h"
+#include "stringx.h"
+
+#ifdef DMALLOC_FUNC_CHECK
+#include "/usr/local/include/dmalloc.h"
+#endif
+
+using namespace std;
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+PhylipConverter::PhylipConverter(const string& fileName,
+                                 const bool interleaved)
+    //  :  m_lamarc(lamarc),
+    :  ConverterIf(),
+       m_fileName(fileName),
+       m_interleaved(interleaved),
+       m_inFile(fileName.c_str(), ios::in)
+{
+
+    // Read the input file
+
+    if (!m_inFile)
+    {
+        // Cant find the infile.
+        // Bail
+        throw FileNotFoundError ("Sorry.  Can't find the file: " + fileName + ".  \nPlease check your path.");
+    }
+
+    SetDataType("");  // Phylip files have no idea what they are....
+
+} // PhylipConverter ctor
+
+//------------------------------------------------------------------------------------
+
+void PhylipConverter::ProcessData()
+{
+    string numSeqsString;
+    string seqLengthString;
+    long numSequences = 0;
+    long sequenceLength = 0;
+    bool usesWeights = false;
+
+    string someString;
+    string firstLine;
+    //  Lets get the first line and put it into its own stream
+    if (getLine(m_inFile, firstLine))
+    {
+    }
+    else
+    {
+        throw FileFormatError ("Your file appears to be empty.");
+    }
+
+    istringstream firstLineStream(firstLine);
+
+    //  First, lets get the number of sequences.
+    if (getNumber(firstLineStream, someString))
+    {
+        if (!IsInteger(someString))
+        {
+            // Bail.
+            throw FileFormatError ("Your file is not in the correct Phylip format.  \nThe first token must be an integer signaling the number of sequences in the file.");
+        }
+        if (!FromString(someString, numSequences)) numSequences = 0L;
+        someString = "";
+    }
+    else
+    {
+        // Bail.
+        throw FileFormatError ("Your file is not in the correct Phylip format.  \nThe first token must be an integer signaling the number of sequences in the file.");
+    }
+
+    //  Next, lets get the length of the sequences.
+    if (getNumber(firstLineStream, someString))
+    {
+        if (!IsInteger(someString))
+        {
+            // Bail.
+            throw FileFormatError ("Your file is not in the correct Phylip format.  \nThe second token must be an integer signaling the length of the sequences in the file.");
+        }
+        if (!FromString(someString, sequenceLength)) sequenceLength = 0L;
+        someString = "";
+    }
+    else
+    {
+        // Bail.
+        throw FileFormatError ("Your file is not in the correct Phylip format.  \nThe second token must be an integer signaling the length of the sequences in the file.");
+    }
+
+    //  Its possible that there will be a weights key.
+    if (getWord(firstLineStream, someString))
+    {
+        if (CaselessStrCmp(someString,"W"))
+        {
+            //  This file will use weights
+            usesWeights = true;
+            someString = "";
+        }
+    }
+
+    //  Now, if there are weights, lets skip em.
+    //  first, get the 'weights' word.
+    if (usesWeights)
+    {
+        if (getToken(m_inFile, someString))
+        {
+        }
+        else
+        {
+            throw FileFormatError ("Your file is not in the correct Phylip format.  \nPlease check your file for formatting errors.");
+        }
+
+        //  Get the weights.
+        //  This should be the length of the sequence, skipping newlines and whitespace.
+        if (!getNextNNonWhiteSpace (m_inFile, someString, sequenceLength))
+        {
+            throw FileFormatError ("Your file is not in the correct Phylip format.  \nThe converter was expecting to find a list of Weights.");
+        }
+    }
+
+    //  Great.  Now we should be ready to grab the individuals
+    string indSeq;
+
+    skipWhiteSpace(m_inFile);
+
+    vector<string> names;
+    vector<string> sequences;
+
+    if (m_interleaved)
+    {
+        long basesread = 0;
+
+        for ( int i = 0; i < numSequences; i++)
+        {
+            string lineString;
+            string sequenceStr;
+
+            // get the first line
+            if (getLine (m_inFile, lineString))
+            {
+                istringstream indNameStream(lineString);
+                string indName;
+
+                // Get the first 10 characters.
+                if (!getNextNChars(indNameStream, indName, 10))
+                    throw FileFormatError ("Your file is not in the correct Phylip format.  \nPlease check your file and retry.");
+
+                //  Get the rest of the line.  (don't know whether there will be spaces, so get as tokens
+                while (getToken(indNameStream, sequenceStr)) {};
+
+                //  After getting the name and the sequence
+                //  1) reset the seqName string
+                //  2) stuff both the name and the sequence into the containers
+                //  3) skip whitespace so the next line is ready to go
+
+                StripTrailingSpaces(indName);
+                names.push_back(indName);
+                sequences.push_back(sequenceStr);
+                basesread = sequenceStr.length();
+                skipWhiteSpace(m_inFile);
+            }
+        }
+
+        //  Now, get the rest of the interleaved sequences;
+        bool continuePullingSequence = true;
+
+        //  If there are no sequences or all the bases are read, don't pull.
+        if (numSequences == 0 || basesread >= sequenceLength)
+            continuePullingSequence = false;
+
+        int i = 0;
+
+        while (continuePullingSequence)
+        {
+            string lineString;
+            if (getLine (m_inFile, lineString))
+            {
+                istringstream indNameStream(lineString);
+                string sequenceStr;
+
+                //  Get the rest of the line.  (don't know whether there will be spaces, so get as tokens
+                while (getToken(indNameStream, sequenceStr)) {};
+
+                sequences[i%numSequences] = sequences[i%numSequences] + sequenceStr;
+
+                skipWhiteSpace(m_inFile);
+            }
+            else
+            {
+                throw FileFormatError ("Your file is not in the correct Phylip format.  \nIt appears that one of the sequences is too short.");
+            }
+
+            if( (i%numSequences) == (numSequences - 1))
+            {
+                //              cout << names[i%numSequences] << " length " << sequences[i%numSequences].length() << ":"
+                //                   << sequences[i%numSequences] << endl;
+
+                if( (long)(sequences[i%numSequences].length()) >= sequenceLength)
+                {
+                    continuePullingSequence = false;
+                }
+            }
+            i++;
+        }
+
+    }
+
+    //  If its not interleaved, its a bit easier.
+    else
+    {
+        skipWhiteSpace(m_inFile);
+
+        for ( int i = 0; i < numSequences; i++)
+        {
+            string indName;
+            string sequenceStr;
+
+            // Get the first 10 characters.
+            if (!getNextNChars(m_inFile, indName, 10))
+                throw FileFormatError ("Your file is not in the correct Phylip format.  \nPlease check your file and retry.");
+
+            //  Get characters until we've gone through the sequence length
+            if (!getNextNNonWhiteSpace(m_inFile, sequenceStr, sequenceLength))
+            {
+                throw FileFormatError ("Your file is not in the correct Phylip format.  \nIt appears that one of the sequences is too short.");
+            }
+
+            // got the name, got the sequence.  Stuff em into an individual, and put that in our
+            // population
+
+            //    cout << "Name: " << indName << ": " << sequenceStr << endl;
+            names.push_back(indName);
+            sequences.push_back(sequenceStr);
+
+            //  Clean up for the next sequence
+            skipWhiteSpace(m_inFile);
+        }
+    }
+
+    //  Now, Put together the LamarcDS
+    PopulationDS pop ("");
+    //  Add a comment for this population
+    string comment = "Origin file: " + m_fileName;
+    pop.setComment(comment);
+
+    long seq, nseqs = sequences.size();
+    for(seq = 0; seq < nseqs; ++seq)
+    {
+        IndividualDS individual(names[seq],sequences[seq],m_datatype);
+        pop.addIndividual(individual);
+    }
+
+    //  Add the region
+    ModelDS model;
+#ifndef JSIM
+    // test file name to make sure not too long
+    // sprintf into name string
+    RegionDS region (buildName(m_fileName,"--",3,0), model, pop);
+#else
+    RegionDS region (string("Ocharinka"), model, pop);
+#endif
+
+    m_lamarc.addRegion(region);
+
+#if 0
+    while ( getToken(m_inFile, someString))
+    {
+        cout << someString << endl;
+        someString = "";
+    }
+#endif
+}                               // PhylipConverter::ProcessData
+
+//------------------------------------------------------------------------------------
+
+PhylipConverter::~PhylipConverter ()
+{
+}                               // PhylipConverter::~PhylipConverter
+
+//------------------------------------------------------------------------------------
+
+//  Okay.  Here is the deal.  Send in a lamarc, and it will merge this converter's lamarc into yours.
+//  If you want a lamarc of your own, send in an empty one.  I can't return a lamarc or do assignment
+//  on a lamarc because I'm having really, really weird template issues.
+void
+PhylipConverter::addConvertedLamarcDS (LamarcDS& lamarc)
+{
+    lamarc.mergeTo(m_lamarc);
+}
+
+//____________________________________________________________________________________
diff --git a/src/conversion/PopulationDS.cpp b/src/conversion/PopulationDS.cpp
new file mode 100644
index 0000000..bee21fc
--- /dev/null
+++ b/src/conversion/PopulationDS.cpp
@@ -0,0 +1,427 @@
+// $Id: PopulationDS.cpp,v 1.22 2012/06/30 01:32:41 bobgian Exp $
+
+/*
+  Copyright 2002 Patrick Colacurcio, Peter Beerli, Mary Kuhner,
+  Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <stdio.h>
+
+#include "Converter_PopulationDS.h"
+#include "Converter_DataSourceException.h"
+#include "random.h"
+#include "stringx.h" // for CaselessStrCmp() in ValidateNewIndividual()
+
+#ifdef DMALLOC_FUNC_CHECK
+#include "/usr/local/include/dmalloc.h"
+#endif
+
+using namespace std;
+
+//------------------------------------------------------------------------------------
+
+PopulationDS::PopulationDS (const string& popName)
+    : m_popName(popName),
+      m_sequenceLength(0)
+{
+    // Initially, an empty population.
+    // No individuals in the population
+}                               // PopulationDS::PopulationDS
+
+//------------------------------------------------------------------------------------
+
+PopulationDS::PopulationDS (const string& popName,
+                            const string& individualName,
+                            const string& sequence)
+    : m_popName(popName),
+      m_sequenceLength (sequence.length())
+{
+    // Now initialize the Individual.
+    IndividualDS thisIndividual(individualName, sequence);
+
+    // Push it onto the vector.
+    m_individuals.push_back(thisIndividual);
+}                               // PopulationDS::PopulationDS
+
+//------------------------------------------------------------------------------------
+
+PopulationDS::PopulationDS (const string& popName,
+                            const IndividualDS& ind)
+    : m_popName(popName),
+      m_sequenceLength(ind.getSequenceLength())
+{
+    // Push it onto the vector.
+    m_individuals.push_back(ind);
+
+}                               // PopulationDS::PopulationDS
+
+//------------------------------------------------------------------------------------
+
+PopulationDS::~PopulationDS ()
+{
+}                               // PopulationDS::~PopulationDS
+
+//------------------------------------------------------------------------------------
+
+list<IndividualDS>::const_iterator
+PopulationDS::getFirstIndividual() const
+{
+    return m_individuals.begin();
+}
+
+//------------------------------------------------------------------------------------
+
+list<IndividualDS>::const_iterator
+PopulationDS::getLastIndividual() const
+{
+    return m_individuals.end();
+}
+
+//------------------------------------------------------------------------------------
+
+string
+PopulationDS::getName() const
+{
+    return m_popName;
+}
+
+//------------------------------------------------------------------------------------
+
+void
+PopulationDS::setName(const string& name)
+{
+    m_popName = name;
+}
+
+//------------------------------------------------------------------------------------
+
+void
+PopulationDS::setFirstSequenceLength(const int sequenceLength)
+{
+    if (m_sequenceLength == 0)
+    {
+        m_sequenceLength = sequenceLength;
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+int
+PopulationDS::getSequenceLength() const
+{
+    return m_sequenceLength;
+}
+
+//------------------------------------------------------------------------------------
+
+int
+PopulationDS::getNumberOfIndividuals() const
+{
+    return m_individuals.size();
+}
+
+//------------------------------------------------------------------------------------
+
+long PopulationDS::GetNumberOfOTUs() const
+{
+    long count = 0;
+    list<IndividualDS>::const_iterator ind;
+    for(ind = getFirstIndividual(); ind != getLastIndividual(); ++ind)
+    {
+        count += ind->GetNumberOfSequences();
+    }
+
+    return count;
+
+} // PopulationDS::GetNumberOfOTUs
+
+//------------------------------------------------------------------------------------
+
+vector<string> PopulationDS::GetAllSeqNames() const
+{
+    vector<string> seqnames;
+
+    list<IndividualDS>::const_iterator ind;
+    for(ind = getFirstIndividual(); ind != getLastIndividual(); ++ind)
+    {
+        vector<string> isnames = ind->GetAllSeqNames();
+        seqnames.insert(seqnames.end(),isnames.begin(),isnames.end());
+    }
+
+    return seqnames;
+
+} // PopulationDS::GetAllSeqNames
+
+//------------------------------------------------------------------------------------
+
+void
+PopulationDS::setComment(const string& comment)
+{
+    m_comment = comment;
+}
+
+//------------------------------------------------------------------------------------
+
+void
+PopulationDS::addIndividual(IndividualDS individual)
+{
+    // First validate the individual
+    validateNewIndividual(individual);
+
+    // Next, check to see if this individual is already present
+    if (doesIndividualNameExist(individual.getName()))
+    {
+        std::string msg = "Two or more samples with name \""
+            + individual.getName()
+            + "\" were found for Population \""
+            + getName()
+            + "\".  Either you have added the same file twice "
+            + "(not legal) or you need to change your sample names.";
+#ifdef GUI
+        throw InconsistentDataError(msg);
+#else
+        cout << endl << msg << endl;
+        // We've already got this individual, so return
+        exit(1);
+        return;
+#endif
+    }
+
+    // Otherwise, validate it and shove it into the vector
+    m_individuals.push_back(individual);
+}
+
+//------------------------------------------------------------------------------------
+
+void
+PopulationDS::addIndividual(const string& name,
+                            const Sequence& seq)
+{
+    IndividualDS individual(name, seq);
+
+    // Next, check to see if this individual is already present
+    validateNewIndividual(individual);
+
+    if (doesIndividualNameExist(individual.getName()))
+    {
+        // We've already got this individual, so return
+        return;
+    }
+
+    // Otherwise, validate it and shove it into the vector
+    m_individuals.push_back(individual);
+}
+
+//------------------------------------------------------------------------------------
+
+void PopulationDS::AddIndividuals(list<IndividualDS> individuals)
+{
+    list<IndividualDS>::iterator indiv;
+    for(indiv = individuals.begin(); indiv != individuals.end(); ++indiv)
+    {
+        addIndividual(*indiv);
+    }
+
+} // PopulationDS::AddIndividuals
+
+//------------------------------------------------------------------------------------
+
+string
+PopulationDS::getUniqueName() const
+{
+    // Use the random in lamarc/lib/
+    Random random; // uses system clock
+
+    int char1 = abs(random.Long() % 26) + 65;
+    int char2 = abs(random.Long() % 26) + 65;
+    int char3 = abs(random.Long() % 26) + 65;
+    int char4 = abs(random.Long() % 26) + 65;
+    char name[6];
+
+    sprintf(name, "%c%c%c%c", char1, char2, char3, char4);
+
+    if (doesIndividualNameExist((string)name))
+    {
+        return getUniqueName();
+    }
+
+    return string(name);
+}
+
+//------------------------------------------------------------------------------------
+
+void
+PopulationDS::validateNewIndividual(IndividualDS& ind)
+{
+    //  First, if the name of the individual is whitespace or an empty string, get it a new name
+    string indName = ind.getName();
+    int strPosition = indName.find_first_not_of (" ");
+
+    if ((strPosition >= 0) && (strPosition < (int)indName.length()))
+    {
+        // Do nothing
+    }
+    else
+    {
+        // Its an anonymous individual, so get a unique name for it and set the name
+        string newName = getUniqueName();
+        ind.setName(newName);
+    }
+
+    // Now, do the validation stuff.
+    if (m_sequenceLength == 0)
+    {
+        m_sequenceLength = ind.getSequenceLength();
+        return;
+    }
+    else if (ind.getSequenceLength() == 0)
+    {
+        return;
+    }
+    else if (ind.getSequenceLength() != m_sequenceLength)
+    {
+        throw InvalidSequenceLengthError("Tried to add a new sequence to a population with a different \
+length from the other sequences.");
+    }
+
+    return;
+}
+
+//  TODO.  Ripe for optimization
+bool
+PopulationDS::doesIndividualNameExist(const string& name) const
+{
+    list<IndividualDS>::const_iterator i;
+
+    for (i = getFirstIndividual(); i != getLastIndividual(); i++)
+    {
+        if(CaselessStrCmp(i->getName(),name))
+            return true;
+    }
+    return false;
+}
+
+//------------------------------------------------------------------------------------
+
+Sequence PopulationDS::PopSequence(const string& seqname,
+                                   list<IndividualDS>::iterator individual)
+{
+    Sequence sequence = individual->PopSequence(seqname);
+
+    if (individual->HasNoSequences())
+        m_individuals.erase(individual);
+
+    return sequence;
+
+} // PopulationDS::PopSequence
+
+//------------------------------------------------------------------------------------
+
+vector< pair<string,list<IndividualDS>::iterator> >
+PopulationDS::FindAllSequences(const vector<string>& seqnames)
+{
+    vector< pair<string,list<IndividualDS>::iterator> > seqpairs;
+
+    vector<string>::const_iterator seqname;
+    for(seqname = seqnames.begin();
+        seqname != seqnames.end(); ++seqname)
+    {
+        list<IndividualDS>::iterator individual;
+        for(individual = m_individuals.begin();
+            individual != m_individuals.end(); ++individual)
+        {
+            if (individual->HasSequence(*seqname))
+            {
+                seqpairs.push_back(make_pair(*seqname,individual));
+                break;
+            }
+        }
+    }
+
+    if (seqnames.size() != seqpairs.size()) seqpairs.clear();
+
+    return seqpairs;
+
+} // PopulationDS::FindAllSequences
+
+//------------------------------------------------------------------------------------
+
+void PopulationDS::EraseIndividuals()
+{
+    m_individuals.clear();
+} // PopulationDS::EraseIndividuals
+
+//------------------------------------------------------------------------------------
+
+bool PopulationDS::IsGhost() const
+{
+    return m_individuals.empty();
+} // PopulationDS::IsGhost
+
+//------------------------------------------------------------------------------------
+
+bool PopulationDS::HasNonContiguousData() const
+{
+    list<IndividualDS>::const_iterator ind;
+    for(ind = getFirstIndividual(); ind != getLastIndividual(); ++ind)
+    {
+        if (ind->HasNonContiguousData())
+            return true;
+    }
+
+    return false;
+
+} // PopulationDS::HasNonContiguousData
+
+//------------------------------------------------------------------------------------
+
+bool PopulationDS::HasSNPs() const
+{
+    list<IndividualDS>::const_iterator ind;
+    for(ind = getFirstIndividual(); ind != getLastIndividual(); ++ind)
+    {
+        if (ind->HasSNPs())
+            return true;
+    }
+
+    return false;
+
+} // PopulationDS::HasSNPs
+
+//------------------------------------------------------------------------------------
+
+string
+PopulationDS::getXML(unsigned int numTabs) const
+{
+    string populationXML;
+
+    addTabs(numTabs, populationXML);
+    populationXML = populationXML + "<population name=\"" + m_popName + "\">\n";
+
+    //  If there's a comment, lets write it out.
+    if (m_comment != "")
+    {
+        addTabs(numTabs, populationXML);
+        populationXML += "<!--  " + m_comment + " -->\n";
+    }
+
+    //  Now, lets write out all of this populations individuals.
+    ++numTabs;
+    list<IndividualDS>::const_iterator i;
+    for (i = getFirstIndividual(); i != getLastIndividual(); i++)
+    {
+        populationXML+= i->getXML(numTabs);
+    }
+
+    --numTabs;
+    addTabs(numTabs, populationXML);
+    populationXML += "</population>\n";
+
+    return populationXML;
+}
+
+//____________________________________________________________________________________
diff --git a/src/conversion/RegionDS.cpp b/src/conversion/RegionDS.cpp
new file mode 100644
index 0000000..777a971
--- /dev/null
+++ b/src/conversion/RegionDS.cpp
@@ -0,0 +1,473 @@
+// $Id: RegionDS.cpp,v 1.26 2011/04/23 02:02:48 bobgian Exp $
+
+/*
+  Copyright 2002 Patrick Colacurcio, Peter Beerli, Mary Kuhner,
+  Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cmath>
+#include <stdio.h>
+
+#include "Converter_RegionDS.h"
+#include "Converter_DataSourceException.h"
+#include "random.h"
+#include "stringx.h"
+
+#ifdef DMALLOC_FUNC_CHECK
+#include "/usr/local/include/dmalloc.h"
+#endif
+
+using namespace std;
+
+//------------------------------------------------------------------------------------
+
+RegionDS::RegionDS (const string& regionName,
+                    const ModelDS& model)
+    : m_regionName(regionName),
+      m_model(model)
+{
+    // No populations, so no initialization necessary.
+}                               // RegionDS::RegionDS
+
+//------------------------------------------------------------------------------------
+
+RegionDS::RegionDS (const string& regionName,
+                    const ModelDS& model,
+                    PopulationDS& population)
+    : m_regionName(regionName),
+      m_model(model)
+{
+    // Validate the population name.
+    validatePopulationName(population);
+
+    // Push it onto the map.
+    string popName = population.getName();
+    m_pops.insert(make_pair(popName, population));
+}                               // RegionDS::RegionDS
+
+//------------------------------------------------------------------------------------
+
+RegionDS::~RegionDS ()
+{
+}                               // RegionDS::~RegionDS
+
+//------------------------------------------------------------------------------------
+
+PopMap::const_iterator
+RegionDS::getFirstPopulation() const
+{
+    return m_pops.begin();
+}
+
+//------------------------------------------------------------------------------------
+
+PopMap::const_iterator
+RegionDS::getLastPopulation() const
+{
+    return m_pops.end();
+}
+
+//------------------------------------------------------------------------------------
+
+PopMap::iterator
+RegionDS::getFirstPopulation()
+{
+    return m_pops.begin();
+}
+
+//------------------------------------------------------------------------------------
+
+PopMap::iterator
+RegionDS::getLastPopulation()
+{
+    return m_pops.end();
+}
+
+//------------------------------------------------------------------------------------
+
+string
+RegionDS::getName() const
+{
+    return m_regionName;
+}
+
+//------------------------------------------------------------------------------------
+
+long
+RegionDS::getNmarkers() const
+{
+    return m_pops.begin()->second.getSequenceLength();
+}
+
+//------------------------------------------------------------------------------------
+
+long RegionDS::GetNPopulations() const
+{
+    return m_pops.size();
+} // RegionDS::GetNPopulations
+
+//------------------------------------------------------------------------------------
+
+long RegionDS::GetNRealPopulations() const
+{
+    long count = 0;
+    PopMap::const_iterator pop;
+    for(pop = getFirstPopulation(); pop != getLastPopulation(); ++pop)
+    {
+        if (!pop->second.IsGhost()) ++count;
+    }
+
+    return count;
+
+} // RegionDS::GetNRealPopulations
+
+//------------------------------------------------------------------------------------
+
+vector<string> RegionDS::GetAllSeqNames() const
+{
+    vector<string> seqnames;
+
+    PopMap::const_iterator pop;
+    for(pop = getFirstPopulation(); pop != getLastPopulation(); ++pop)
+    {
+        vector<string> psnames = pop->second.GetAllSeqNames();
+        seqnames.insert(seqnames.end(),psnames.begin(),psnames.end());
+    }
+
+    return seqnames;
+
+} // RegionDS::GetAllSeqNames
+
+//------------------------------------------------------------------------------------
+
+void
+RegionDS::setName(const string& name)
+{
+    m_regionName = name;
+}
+
+//------------------------------------------------------------------------------------
+
+void
+RegionDS::setModel(const ModelDS& model)
+{
+    m_model = model;
+}
+
+//------------------------------------------------------------------------------------
+
+void
+RegionDS::setSpacing(const SpacingDS& spacing)
+{
+    m_spacing = spacing;
+}
+
+//------------------------------------------------------------------------------------
+
+unsigned int
+RegionDS::getNumPopulations() const
+{
+    return m_pops.size();
+}
+
+//------------------------------------------------------------------------------------
+
+void
+RegionDS::addPopulation(PopulationDS& pop)
+{
+    validatePopulationName(pop);
+    validateNewPopulation(pop);
+    string popName = pop.getName();
+
+    if (m_pops.find(popName) == m_pops.end())
+    {
+        //  The population is a new one, so stuff it in.
+        m_pops.insert(make_pair(pop.getName(), pop));
+    }
+
+    else
+    {
+        // since we already have a population of this name, we're going to have to add only the
+        // individuals that differ (if any) within the population.
+        // remember, if the individual is anonymous, its considered to be unique.
+        // We can have as many anonymous individuals as we want.  The anonymous individuals are
+        // given names when they are entered into the population.
+
+        PopMap::iterator popIt = m_pops.find(popName);
+        PopulationDS& existingPopulation = (popIt->second);
+
+        list<IndividualDS>::const_iterator i;
+        for (i = pop.getFirstIndividual(); i != pop.getLastIndividual(); i++)
+        {
+            // Add the individuals.  If the individual already exists, it won't be added.
+            // if its an anonymous individual, it will be given a new, unique name within
+            // This population and added.
+            // I'm defining an anonymous individual as having a name that consists only of whitespace
+            // SEE THE DOCUMENTATION ON THIS
+
+            existingPopulation.addIndividual(*i);
+        }
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+typedef pair<string,list<IndividualDS>::iterator> indpair;
+
+// FailToAdd returns population name rather than adding to the population
+// directly because of the probable presence of identical sequence names
+// with resulting unpredictable add/removal consequences
+bool RegionDS::FailToAdd(IndividualDS& individual, string& popname)
+{
+    bool failure = true;
+
+    vector<string> hapnames = individual.GetHapNames();
+
+    PopMap::iterator popit;
+    for(popit = m_pops.begin(); popit != m_pops.end(); ++popit)
+    {
+        // can't just do removals here as you might only have a subset
+        // of the desired sequences (usually this means a user input error)
+        // in this population
+        vector<indpair> oldinds = popit->second.FindAllSequences(hapnames);
+
+        if (oldinds.empty()) continue;
+
+        failure = false;
+        popname = popit->second.getName();
+        vector<indpair>::iterator oldind;
+        for(oldind = oldinds.begin(); oldind != oldinds.end(); ++oldind)
+        {
+            // ask the population to remove the sequence from the old individual,
+            // then add the returned sequence to the new individual.
+            individual.AddSequence(popit->second.
+                                   PopSequence(oldind->first,
+                                               oldind->second));
+        }
+        break;
+
+    }
+
+    return failure;
+
+} // RegionDS::FailToAdd
+
+//------------------------------------------------------------------------------------
+
+bool RegionDS::HasNonContiguousData() const
+{
+    PopMap::const_iterator pop;
+    for(pop = getFirstPopulation(); pop != getLastPopulation(); ++pop)
+    {
+        if (pop->second.HasNonContiguousData())
+            return true;
+    }
+
+    return false;
+
+} // RegionDS::HasNonContiguousData
+
+//------------------------------------------------------------------------------------
+
+bool RegionDS::HasSNPs() const
+{
+    PopMap::const_iterator pop;
+    for(pop = getFirstPopulation(); pop != getLastPopulation(); ++pop)
+    {
+        if (pop->second.HasSNPs())
+            return true;
+    }
+
+    return false;
+
+} // RegionDS::HasSNPs
+
+//------------------------------------------------------------------------------------
+
+void RegionDS::AddIndividual(IndividualDS& ind, const string& popname)
+{
+    // can't use this because there is no default ctor for PopulationDS
+    // m_pops[popname].addIndividual(ind);
+    m_pops.find(popname)->second.addIndividual(ind);
+
+} // RegionDS::AddIndividual
+
+//------------------------------------------------------------------------------------
+
+void
+RegionDS::validateNewPopulation(PopulationDS& pop)
+{
+    PopMap::iterator i = m_pops.begin();
+    int regionSequenceLength;
+
+    if (i == m_pops.end())
+    {
+        // Its an empty set, so no validation is necessary.
+        return;
+    }
+    else
+    {
+        regionSequenceLength = (i->second).getSequenceLength();
+    }
+
+    if (pop.getSequenceLength() == 0)
+    {
+        // If the new population has a sequence length of 0, set its sequenceLength to the
+        // regionSequenceLength and be on your way.
+        pop.setFirstSequenceLength(regionSequenceLength);
+        return;
+    }
+
+    //  If the region's sequence length is 0, (all of its populations have a sequence length of zero)
+    //  and the inserting population has a length > 0, set those populationSequence lengths to the
+    //  new population's sequence Length.  SEE DOCUMENTATION.
+    if (regionSequenceLength == 0)
+    {
+        for (i = m_pops.begin(); i != m_pops.end(); i++)
+        {
+            (i->second).setFirstSequenceLength(pop.getSequenceLength());
+        }
+    }
+
+    // finally, if they both have sequence lengths > 0, and they arent the same, compare and
+    // throw if they differ
+    else if (regionSequenceLength != pop.getSequenceLength())
+    {
+        //      string errString = "CurrentRegion: " + m_regionName + " Current PopLength: "
+        //        + ToString(pop.getSequenceLength()) + " RegionSequenceLength: "
+        //        + ToString(regionSequenceLength);
+        //      cout << errString << endl;
+
+        throw InvalidSequenceLengthError("Tried to add a new population to a region with \n a different \
+length from the other sequences.");
+    }
+
+    return;
+}
+
+//------------------------------------------------------------------------------------
+
+void
+RegionDS::validatePopulationName(PopulationDS& pop) const
+{
+    //  First, if the name of the population is whitespace or an empty string, get it a new name
+    string popName = pop.getName();
+    int strPosition = popName.find_first_not_of (" ");
+
+    if ((strPosition >= 0) && (strPosition < (int)popName.length()))
+    {
+        // There is a name that's not just whitespace
+        return;
+    }
+    else
+    {
+        // Its an anonymous population, so get a unique name for it and set the name
+        string newName = getUniqueName();
+        pop.setName(newName);
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+void
+RegionDS::validateFreqs(double a,
+                        double g,
+                        double c,
+                        double t)
+{
+    double sum = 0;
+
+    sum = a + g + c + t;
+
+    if (fabs(sum-1.0) > 0.001)  // Epsilon taken from /tree/interface.cpp
+    {
+        throw InvalidFrequenciesError("The frequencies don't add up to 1.");
+    }
+    else
+        return;
+}
+
+//------------------------------------------------------------------------------------
+
+string
+RegionDS::getUniqueName() const
+{
+    // If running in the GUI, we don't yet want a unique name. If
+    // we create it now, we won't be able to distinguish between
+    // names that were in the files and ones we created.
+#ifdef GUI
+    string empty = "";
+    return empty;
+#endif
+    // Use the random in lamarc/lib/
+    Random random; // uses system clock
+
+    int char1 = abs(random.Long() % 26) + 65;
+    int char2 = abs(random.Long() % 26) + 65;
+    int char3 = abs(random.Long() % 26) + 65;
+    int char4 = abs(random.Long() % 26) + 65;
+    char name[6];
+
+    sprintf(name, "%c%c%c%c", char1, char2, char3, char4);
+
+    string popName(name);
+    popName = "Population " + popName;
+
+    if (doesPopulationNameExist(popName))
+    {
+        return getUniqueName();
+    }
+
+    return popName;
+}
+
+//------------------------------------------------------------------------------------
+
+bool
+RegionDS::doesPopulationNameExist(const string& name) const
+{
+    if (m_pops.find(name) == m_pops.end())
+    {
+        //  The population name doesn't exist, so return false
+        return false;
+    }
+    return true;
+}
+
+//------------------------------------------------------------------------------------
+
+string
+RegionDS::getXML(unsigned int numTabs) const
+{
+    string regionXML;
+
+    addTabs(numTabs, regionXML);
+    regionXML = regionXML + "<region name=\"" + m_regionName + "\">\n";
+
+    //  Now, lets write out the model..
+
+    ++numTabs; // need to keep this even in non-menu mode to keep the
+    // unsigned numTabs at the expected value--otherwise it
+    // may go negative
+
+    regionXML += m_spacing.getXML(numTabs);
+
+    // Now write out the populations
+    PopMap::const_iterator i;
+    for (i = m_pops.begin(); i != m_pops.end(); i++)
+    {
+        regionXML+= (i->second).getXML(numTabs);
+    }
+
+    --numTabs;
+    addTabs(numTabs, regionXML);
+    regionXML += "</region>\n";
+
+    return regionXML;
+}
+
+//____________________________________________________________________________________
diff --git a/src/conversion/Sample7.txt b/src/conversion/Sample7.txt
new file mode 100644
index 0000000..0d91eb9
--- /dev/null
+++ b/src/conversion/Sample7.txt
@@ -0,0 +1,144 @@
+15 662
+Locusta   GATTAATCCGAACATTACACGCAAATGGGGCTTCAATATTCTTCATCTGCATATATTTACATGTAGGACGAGGAATTTAT
+TrachyrhacGAATTATCCGAACACTACACGCAAATGGAGCTTCAATATTCTTCATTTGTATTTATCTTCATGTAGGGCGAGGAATCTAC
+Camnula   GAATTATCCGAACTCTACATGCAAATGGAGCGTCAATACTCTCATTTTGCATTTATCTACATGTAGGACGAGGAATTTAT
+Metator   GAATTATCCGAACATTACATGCAAATGGAGCATCAATATTCTTTATCTGTATCTACCTACATGTAGGACGAGGAATCTAT
+Sph_co.   GAATTATTCGAACATTACATGCAAATGGAGCATCAATATTCTTCATTTGCATTTATTTACATGTAGGACGAGGAATCTAT
+TrimerotroGAAGTATCCGAACATTACATGCATATGGAGCATCAATATTCTTTATTTGTATTTATTTACATGTAGGACGAGGAATTTAT
+Sph_ca.   GAATTATTCGAACATTACATGCAAATGGAGCCTGATTATTCTTTATTTGTATCTACTTACATGTAGGGCGAGGAATTTAT
+DissosteirGAATTATCCGAACATTACATGCATATGGAGCATCTATATTCTTTATTTGTATATACTTACATGTAGGACGAGGAATTTAT
+CircotettiGAATCATTCGAACATTTCATGCAAATGGAGCATCAATATTCTTTATCTGTATTTATTTACATGTAGGACGAGGAATTTAC
+Arphia_c. GAATTATCCGAACACTACATGCAAATGGAGCATCAATATTTTTCATTTGCATTTATCTACATGTAGGACGAGGAATTTAT
+Arphia_p. GAATTATCCGAACACTACATGCAAATGGAGCATCAATATTTTTCATTTGTATTTATCTACATGTAGGACGAGGAATTTAT
+EncoptolopGCGTTATTCGAACATTACATGCAAATGGGGCATCTATATTTTTTATTTGTATTTATTTACATGTAGGACGAGGAATTTAT
+ChortophagGAATCATCCGAACATTACATGCAAATGGAGCATCTATATTTTTTATTTGTATTTATTTGCATGTAGGACGAGGAATTTAT
+MelanoplusGAATTATTCGAACTCTTCATGCCAATGGAGCATCAATATTTTTCATTTGTATTTATCTTCATGTCGGTCGAGGAATTTAC
+Acheta    GACTATTACGAACTATACATGCTAATGGAGCGTCATTCTTCTTCATCTGCTTATATATACACGTAGGACGTGGAATTTAT
+
+TATGGATCATACATGTATATAAATACATGAATAACAGGAACAATTATTTTATTCTTAGTAATAGCAACAGCATTTATAGG
+TATGGATCTTATATATTTATGAATACTTGAATAACAGGAACAATTATCTTGTTTTTAGTAATAGCAACAGCATTTATAGG
+TATGGATCATATATATACATAAATACTTGAATAACAGGTACATTAATTTTATTCTTAGTTATAGCAACAGCATTTATAGG
+TATGGATCCTTTATATTCATAAATACTTGAATAACAGGGACAATTATCCTATTCTTAGTAATAGCAACAGCATTTATAGG
+TATGGATCATACATATATATAAATACTTGAATAACAGGAACAATAATTTTATTCTTAGTAATAGCAACAGCATTTATAGG
+TATGGATCATATATATATATAAATACTTGAATAACAGGAACAATAATTCTATTCTTAGTAATAGCAACAGCATTCATAGG
+TATGGATCATATATATATATAAATACTTGAATAACAGGAACAATAATACTATTCTTAGTAATAGCAACAGCATTTATAGG
+TATGGATCATATATATATATAAATACTTGAATAACAGGAACAATAATTATATTTTTAGTTATAGCAACAGCATTTATAGG
+TATGGATCATACATATATATAAATACTTGAATAACAGGAACAGTAATTTTATTCTTAGTAATAGCAACAGCATTTATAGG
+TATGGATCATATATATACATAAATACATGAATAACAGGTACATTAATTTTATTCCTAGTAATAGCAACAGCATTTATGGG
+TATGGATCATATATATACATAAATACATGAATAACAGGTACATTAATTTTATTCCTAGTAATAGCAACAGCATTTATAGG
+TATGGATCATATATATATACAAGAACATGAATAATTGGAACAATTATTTTATTTCTTGTAATAGCAACCGCATTCATAGG
+TATGGATCATATATATATACAAGAACATGAATAATTGGAACAATTATTTTATTCCTTGTTATAGCAACTGCATTTATAGG
+TATGGATCATATATATATACTCATACTTGAATAATTGGAACAATTATTTTATTCTTAGTTATAGCAACTGCATTTATAGG
+TATGGATCCTACAATTTAATACACACATGAATAGTAGGTATTCTAATCTTATTCCTAGTAATAGCTGCCGCTTTCATAGG
+
+ATACGTTTTACCATGAGGTCAAATATCATTTTGAGGAGCAACAGTAATTACAAATTTACTATCAGCAATTCCTTATATTG
+ATACGTTTTACCATGAGGGCAAATATCATTTTGGGGAGCAACAGTAATTACAAACCTACTATCAGCCATCCCTTATATTG
+ATATGTATTACCATGAGGACAAATATCATTCTGAGGAGCAACAGTAATTACAAATCTACTATCAGCAATTCCATATATCG
+TTATGTTCTACCATGAGGACAAATATCCTTTTGAGGAGCAACAGTAATTACAAATCTATTATCAGCCATTCCATATATTG
+ATACGTATTACCATGAGGACAAATATCATTCTGAGGAGCAACAGTAATTACTAATCTATTATCAGCCATTCCATATATAG
+ATATGTATTACCATGAGGACAAATATCATTCTGAGGAGCAACAGTAATTACCAATCTATTATCAGCTATCCCGTATATAG
+ATATGTATTACCATGAGGACAAATATCATTCTGAGGAGCAACAGTAATTACTAATCTATTATCAGCTATCCCATATATAG
+ATATGTATTACCGTGAGGACAAATATCATTTTGAGGAGCAACAGTAATCACAAACTTATTATCAGCTATCCCTTATATTG
+ATATGTATTACCATGAGGACAAATATCATTCTGAGGGGCAACAGTTATTACTAATTTATTATCAGCCATCCCATACATTG
+ATATGTATTACCATGAGGACAAATATCATTTTGAGGAGCAACCGTAATTACAAACCTACTATCAGCAATCCCATATATCG
+ATATGTATTACCATGAGGACAAATATCATTTTGAGGAGCAACCGTAATTACAAATCTACTATCAGCAATTCCATATATCG
+ATATGTATTACCATGAGGACAAATATCATTTTGAGGAGCAACAGTAATCACAAATTTATTATCAGCAATTCCATATTTAG
+ATACGTATTACCATGAGGACAAATATCATTTTGAGGAGCAACAGTAATTACAAATTTATTATCAGCAATTCCATATTTAG
+TTATGTATTACCATGAGGACAAATATCATTTTGGGGTGCAACAGTAATTACTAATTTATTATCAGCAATTCCATATTTAG
+TTACGTCTTACCATGAGGACAAATATCATTTTGAGGAGCCACAGTTATTACTAACCTCTTATCAGCAATTCCTTATCTAG
+
+GAACAGACATTGTTCAATTCTTAATCCAACATCGAGGTCGCAATCCATTTTACCGATAAGAACTCTAAAAAATGATTACG
+GAACCGACATTGTTCAATTCTTAATCCAACATCGAGGTCGCAACCCATTTTGTCGATATGAACTCTCAAAAATGATTACG
+GAACAGATATTGTTCAATTCTTAATCCAACATCGAGGTCGCAACCCATTTTGTCGATATGAACTCTCAAAAATGATTACG
+GAACAGATATTGTTCAATTCTTAATCCAACATCGAGGTCGCAACCCATTTTGTCGATATGAACTCTTAAAAATGATTACG
+GTACAGATATTGTTCAATTCTTAATCCAACATCGAGGTCGCAAACCATTTTGTCGATATGAACTCTCAAAAATGATTACG
+GGACAGAAATTGTACAATTCTTAATCCAACATCGAGGTCGCAACCCATTTTGTCGATATGAACTCTCAAAAATGATTACG
+GCACAGAAATTGTACAATTCTTAATCCAACATCGAGGTCGCAACCCATTTCGTCGATATGAACTCTCAAAAATGATTACG
+GAACAGATATTGTTCAGTTCTTAATCCAACATCGAGGTCGCAACCCATTTTGTCGATAGGAACTCTCAAAAATGATTACG
+GAACAGACATTGTTCAATTCTTAATCCAACATCGAGGTCGCAACCCATTTTGTCGATATGAACTCTCAAAAATGATTACG
+GAACAGATATTGTTCAATTCTTAATCCAACATCGAGGTCGCAATCCATTTTGTCGATATGAACTCTCAAAAATGATTACG
+GAACAGATATTGTTCAATTCTTAATCCAACATCGAGGTCGCAACCCATTTTGTCGATATGAACTCTCAAAAATGATTACG
+GAAATGAATTAGTACAATTCTTAATCCAACATCGAGGTCGCAATCTGCTTTGTCGATATGAGCTCTCAAAAACAATTACG
+GAAATGAATTAGTACAATTCTTAATCCAACATCGAGGTCGCAATCTGTTTCGTCGATATGAGCTCTCAAAAACAATTACG
+GAACAGACTTAGTTCAATTCTTAATCCAACATCGAGGTCGCAATCTGCTTTGTCAATATGAGCTCTCAAAAACAATTACG
+GAACTGATCTAGTACAATTCTTAATCCAACATCGAGGTCGCAATCTTTATTATCAATATGAACTCTCCAATAACATTACG
+
+CTGTTATCCC-TAAGGTAACTTAATCTTATAATCACAAATTATGGATCAAA-TAAACATAAATTAATGATTT-ATAATGA
+CTGTTATCCC-TAAGGTAACTTAATCTTTTGATCATAAATTATGGATCAAA-TAAACATAAATCAATGATTTAATAATGA
+CTGTTATCCC-TAAGGTAACTTTATCTTATGATCATAAATTATGGATCAAA-TAAACATAAATCAATGATTTAATCATGA
+CTGTTATCCC-TAACCTAACTTAATCTTTTGATCATAAATTATGGATCAAA-TGAACACAAATTAATGATTTTATAATGA
+CTGTTATCCC-TAAGGTAACTTAATCTTATGATCATAAATTATGGATCAAA-TAAACATAAATCAATGATTTTATAATGA
+CTGTTATCCC-TAAGGTAACTTAATCTTATGATCATAAATTATGGATCAAA-TAAACATAAATTAATGATTTTATAATGA
+CTGTTATCCC-TAAGGTAACTTAATCTTATGATCATAAATTATGGATCAAA-TAAACATAAATTAATGAATTTATAATGA
+CTGTTATCCC-TAAGGTAACTTAATCTTATGATCATAAATTATGGATCAAA-TAAACATAAATCAATGATTTAATAATGA
+CTGTTATCCC-TAAGGTAACTTAATCTTATGATCATAAATTATGGATCATA-CAAACATAAATAAATGATTTAATAATGA
+CTGTTATCCC-TAAGGTAACTTAATCTTTTGATCATAAATTATGGATCAAA-TAAACATAAATCAATGATTTAATAATGA
+CTGTTATCCC-TAAGGTAACTTAATCTTTTGATCATAAATTATGGATCAAA-TAAACATAAATCAATGATTTAATAATGA
+CTGTTATCCC-TAAGGTAACTTAATCTTATGATCATAAATTATGGATCATT-TAAACATAAATTAATGATTAAATAATGA
+CTGTTATCCC-TAAGGTAACTTAATCTTATGATCATAAATTATGGATCATT-TAAACATAAATTAATGATTTAATAATGA
+CTGTTATCCC-TAAGGTAACTTAATCTTATGATCATAAATTATGGATCAAAATAAACATAAATCAATGATTTTATAATGA
+CTGTTATCCCCTAAGGTATCTTTATCTCAT-TCCATAA-TACAGGA-CAATTAACCCAATCATCA-TAGTTAAA-CATAA
+
+AGA-GTTTAATTATTCTTCATGTCACCCCAACAAAACAT--AAAAATTAAC-ATT--AAAAT-TAAACTATATAATAAAA
+AGA-GTTTATTTATCCTTCAAGTCGCCCCAACTAAACATTAAAAAACTTA-TATTAAAAGA--TAAGCTAAAATAA-TAC
+AGA-GTTTAATTATTCTTCAAGTCACCCCAACTAAACATTTAAAAATTTA-TACTTAAAAA--TAAACTAAAATAA-TAT
+AGA-GTTTCTTTATTCTTCAAGTCACCCCAACCAAACATTAAAAATCT-A-TATTAAAAGA--TAAACTAAAATAA-TAA
+AGA-GTTTAATTATTCTTCAAGTCACCCCAACTAAACATTATAAAAATT-ATACTTAAAAA--TAAACCAAAATAA-TAT
+AGA-GTTTAATTATTCTTCAAGTCACCCCAACTAAACATTATAAAAATTT-TATTTTAAAA--TAAACCAAAATAA-TAT
+AGA-GTTTAATTATTCTTCAAGTCACCCCAACTAAACACTATAAAAATTT-TATTTTAAAA--TAAACCAAAATAA-TAT
+AGA-GTTTAATTATTCTTCAAGTCACCCCAACTAAACATTAAAAAACTTA-TACTTAAAAA--TAGACAAAAATAA-TAT
+AGA-GTTTAATTATTCTTCAAGTCACCCCAACTAAACATAGTAAAAATT-ATATTTAAAAA--TAAACCAAAATAA-TAT
+AGA-GTTTAATTATTCTTCATGTCACCCCAACCTAACATTATAAAAATT-ATATTTAAAAATCTAAACTAAAATAA-TAT
+AGA-GTTTAATTATTCTTCATGTCACCCCAACCTAACATTATAAAAATT-ATATTTAAAAATATAAACTAAAATAA-TAT
+AGA-GTTTTTTTATTCTTCATGTCACCCCAACAAAACAT--CAACATTTAATATAAAAAGA--TAA--TAAAAGAAATAT
+AGA-GTTTATTTATTCTTCATGTCACCCCAACAAAACAT--CAACAATTAATATAAAAAGA--TAA--TAAAAAAAATAT
+AGA-GTTTATTTATTCTTCATGTCACCCCAACAAAACATT--ATCTTAAAATATAAAAA-A-----ACTATCTAAAA-AT
+AGAAGTTAAATTATTCTCT-TGTCACCCCAACCAAATAC-AATATATTAACATCTATAAAAAACCACCAAACA--TAAAT
+
+ATTAATTTAA--TAAA-TGTAAAGCTCTATAGGGTCTTCTCGTCC-CAAGAAATATTTAAGCCTTTTAACTTAAAAGTTA
+AAAA-CTTA---TAAA-TGTTAAGCTCTATAGGGTCTTCTCGTCCTAAAGAAATATTTAAGCCTTTTAACTTAAAAGTTA
+AAATT-TTAAAA-A---TGTTAAGCTCTATAGGGTCTTCTCGTCCTCAAGGAAAATTTAAGCCTTTTAACTTAAAAGTTA
+AAAAT-TAT---TAAA-TGTTAAGCTCTATAGGGTCTTCTCGTCCTAAAGAAATATTTAAGCCTTTTAACTTAAAAGTTA
+AAAAA-TAAAAA-----TGTCAAGCTCTATAGGGTCTTCTCGTCCTAAAGAAAAATTTGAGCCTTTTAACTCAAAAGTTA
+AAAAA-TAAAAA-----TGTCAAGCTCTATAGGGTCTTCTCGTCCTAAAGAAATATTTGAGCCTTTTAACTCAAAAGTTA
+AAAAA-TAAAAA-----TGTCAAGCTCTATAGGGTCTTCTCGTCCTAAAGAAATATTTGAGCCTTTTAACTCAAAAGTTA
+AAA-TCTAAAAA-----TGTTAAGCTCTATAGGGTCTTCTCGTCCTAAAGAAATATTTAAGCCTTTTAACTCAAAAGTTA
+AAAAT-TAAA-TA----TGTCAAGATCTATAGGGTCTTCTCGTCCTAAAGAAACATTTGAGCCTTTTAACTCAAAAGTTA
+AAAATCTTTAA-A----TGTAAAGCTCTATAGGGTCTTCTCGTCCTAAAAAAAAATTTAAGCCTTCTAACTCAAAAGTTA
+AAAATCTTTAA-A----TGTAAAGCTCTATAGGGTCTTCTCGTCCTAAAAAAAAATTTAAGCCTTCTAACTCAAAAGTTA
+TAAA--TTAAAA-----TGTAAAGCTCTATAGGGTCTTCTCGTCCTAAAGAAAAATTTAAGCCTTTTAACTTAAAAGTTA
+TAAA--TTAAAA-----TGTAAAGCTCTATAGGGTCTTCTCGTCCTAAAGAAAAATTTAAGCCTTTTAACTTAAAAGTTA
+TATA--TTAAAATAAAATGTCAAGCTCTATAGGGTCTTCTCGTCTTTAAGAAAAATTTAAGCCTTTTAACTCAAAAGTTA
+TTCAATATAA--TAA--TATTAAGATCTATAGGGTCTTCTCGTCCCCTATTTTCATTTGAGCCTTTTAACTCAAAAGTAA
+
+AATTCTAAAAATATTATTAAGAGACAGTAAATTT-CTCGTCCAACCATTCATTCCAGTCCCCAATTAAGAAACTAA-TGA
+AATTCTA-AAAT-TTATTAAGAGACAGTTAATTT-CTCGTGAAACCATTCATTCAAGCCTCTAATTAAGAAACTAA-TGA
+AATTCTA-AAAC-TTATTAAGAGACAGTTAATTT-CTCGTCAAACCATTCATACAAGCCCCTAATTAAGAAACTAA-TGA
+AATTCTA-AAAT-TTATTAAGAGACAGTTAATTT-CTCGTCAAACCATTCATACAAGCCTCTAATTAAGAAACTAA-TGA
+AATTCTA-AAAT-TTAATAAGAGACAGTTAATTT-CTCGT-AAACCATTCATTCAAGCCTCTAATTAAGAAACTAA-TGA
+AATTCTA-AAAT-TTATTAAGAGACAGTTAATTT-CTCGTTAAACCATTCATTCAAGCCTCTAATTAAGAGACTAA-TGA
+AATTCTA-AAAT-TTATTAAGAGACAGTTAATTT-CTCGTTAAACCATTCATTCAAGCCCCCAATTAAGAGACTAA-TGA
+AATTCCA-AAAT-TTATTAAGAGACAGTTAATTT-CTCGTTAAACCATTCATTCAAGCCCCTAATTAAGAATCTAA-TGA
+AATTCTA-AAAT-TTATTAAGAGACAGTCAATTT-CTCGTTAAACCATTCATTCAAGCCTCTAATTAAGAAACTAACTGA
+AATTCTA-AATT-TTATTAAGAGGCATTTTGTTTTCTCGTCAATCCCTTCATTCAAGCCCCTAATTAAGAAACTAA-TGA
+AATTCTA-AAAA-TTATTAAGAGACATTTTGTTTTCTCGTCAATCCATTCATTCAAGCCCCTAATTAAGAAACTAA-TGA
+AATTATA-AAAT-TTATTAAGAGACAGTTAATTT-CTCGTCAAACCATTCATTCCAGCCTCTAATTAAGAAACTAA-TGA
+AATTATA-AAAT-TTATTAAGAGACAGTTAATTT-CTCGTCAAACCATTCATTCCAGCCTCTAATTAAGAAACTAA-TGA
+AATTCTATTA-T-TTAATAAGAGACAGTTAATTT-CTCGTCAAGCCATTCATTCCAGCCCCTAATTAAGAGACTAA-TGA
+AATTCAATA--CATCACTCCGAGACAGTTATTATT-TCGTCAAACCATTCATTCCAGCCTCCAATTAAAAGACTAA-TGA
+
+TTATGGT-ACCATTTGCACAGT
+TTATGCT-ACC-TTTGCACGGT
+TTATGCT-ACC-TTTGCACGGT
+TTATGCT-ACC-TTTGCACGGT
+TTATGCT-ACC-TTTGCACGGT
+TTATGCT-ACC-TTTGCACGGT
+TTATGCT-ACC-TTTGCACGGT
+TTATGCT-ACC-TTTGCACGGT
+TTATGCT-ACC-TTTGCACGGT
+TTATGCTTACC-TTTGCACGGT
+TTATGCT-ACC-TTTGCACGGT
+TTATGCT-ACC-TTTGCACGGT
+TTATGCT-ACC-TTTGCACGGT
+TTATGCT-ACC-TTTGCACGGT
+TTATGCT-ACC-TTTGCACGGT
diff --git a/src/conversion/Sequence.cpp b/src/conversion/Sequence.cpp
new file mode 100644
index 0000000..6bf3eee
--- /dev/null
+++ b/src/conversion/Sequence.cpp
@@ -0,0 +1,201 @@
+// $Id: Sequence.cpp,v 1.13 2010/03/17 17:25:58 bobgian Exp $
+
+/*
+  Copyright 2002 Patrick Colacurcio, Peter Beerli, Mary Kuhner,
+  Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include "Converter_Sequence.h"
+#include "Converter_DataSourceException.h"
+#include "random.h"
+
+#ifdef DMALLOC_FUNC_CHECK
+#include "/usr/local/include/dmalloc.h"
+#endif
+
+using namespace std;
+
+//------------------------------------------------------------------------------------
+
+Sequence::Sequence (const string &src)
+{
+    m_sequence = AsStringVec(src);
+    trimSequence();
+    string sequence = asString();
+    validate(sequence);
+    setDataType ( "DNA" );  // Default Data Type
+    Random random; // uses system clock
+    setName ( random.Name() );
+}                               // Sequence::Sequence
+
+Sequence::Sequence (const string &src,
+                    const string &dataType,
+                    const string &name)
+{
+    if (dataType == lamarcstrings::DNA || dataType == lamarcstrings::SNP)
+    {
+        m_sequence = AsStringVec(src);
+        trimSequence();
+    }
+    else if (dataType == lamarcstrings::MICROSAT || dataType == lamarcstrings::KALLELE)
+    {
+        m_sequence.push_back(src);
+    }
+
+    if (dataType == lamarcstrings::DNA)
+    {
+        string sequence = asString();
+        validate(sequence);
+    }
+
+    setDataType ( dataType );
+    setName ( name );
+}                               // Sequence::Sequence
+
+Sequence::Sequence (const vector<string> &src,
+                    const string &dataType,
+                    const string &name)
+{
+    m_sequence = src;
+    if (dataType == lamarcstrings::DNA || dataType == lamarcstrings::SNP ) trimSequence();
+
+    if (dataType == lamarcstrings::DNA)
+    {
+        string sequence = asString();
+        validate(sequence);
+    }
+
+    setDataType ( dataType );
+    setName ( name );
+}                               // Sequence::Sequence
+
+//------------------------------------------------------------------------------------
+
+Sequence::~Sequence ()
+{
+}                               // Sequence::~Sequence
+
+//------------------------------------------------------------------------------------
+
+// If only the standard allowed for a string::string(char&) ctor,
+// then we'd just use
+//   vector<string>::assign(src.begin(),src.end())
+// in place of this whole messy function!
+vector<string> Sequence::AsStringVec(const string& src) const
+{
+    vector<string> sequence;
+    string::const_iterator marker;
+    for(marker = src.begin(); marker != src.end(); ++marker)
+        sequence.push_back(string(1L,*marker));
+
+    return sequence;
+
+} // AsStringVec
+
+//------------------------------------------------------------------------------------
+
+string
+Sequence::asString() const
+{
+    string sequence;
+    vector<string>::const_iterator seq;
+    for(seq = m_sequence.begin(); seq != m_sequence.end(); ++seq)
+        sequence += *seq;
+    return sequence;
+}
+
+// ____________________________________________________
+
+long
+Sequence::getSequenceLength() const
+{
+    long length = m_sequence.size();
+    return length;
+}
+
+void
+Sequence::setDataType( const string &dataType)
+{
+    m_dataType = dataType;
+}
+
+void
+Sequence::validate(const string &src) const
+{
+    int n = src.length();
+    int illegalPosition;
+
+    illegalPosition = src.find_first_not_of ("agctumrwsykvhdbnox?-AGCTUMRWSYKVHDBNOX");
+
+    if ((illegalPosition >= 0) && (illegalPosition < n))
+    {
+        {
+            string err
+                ("Invalid character in sequence: ");
+            err += src[illegalPosition];
+            err += ";";
+
+            throw InvalidNucleotideError(err);
+        }
+
+    }
+}
+
+void
+Sequence::trimSequence()
+{
+    string sequence = asString();
+    // Gets rid of the whitespace
+    size_t beg = sequence.find_first_not_of(" \n\t");
+    size_t end = sequence.find_last_not_of(" \n\t");
+
+    // Mary fix:  off by one error (STL uses half-open intervals) 2/20/02
+    sequence = sequence.substr(beg, end - beg + 1);
+
+    m_sequence = AsStringVec(sequence);
+}
+
+string
+Sequence::getXML(unsigned int numTabs) const
+{
+    string sequenceXML;
+
+    addTabs(numTabs, sequenceXML);
+    sequenceXML += "<sample name=\"" + m_name + "\">\n";
+
+    ++numTabs;
+    addTabs(numTabs, sequenceXML);
+    sequenceXML += "<datablock type=\"" + m_dataType + "\">\n";
+
+    ++numTabs;
+    addTabs(numTabs, sequenceXML);
+    string sequence;
+    if (m_dataType == lamarcstrings::DNA || m_dataType == lamarcstrings::SNP)
+        sequence = asString();
+    else
+    {
+        vector<string>::const_iterator mseq;
+        for(mseq = m_sequence.begin(); mseq != m_sequence.end(); ++mseq)
+        {
+            sequence += *mseq + " ";
+        }
+    }
+    sequenceXML += sequence;
+    sequenceXML += "\n";
+
+    --numTabs;
+    addTabs(numTabs, sequenceXML);
+    sequenceXML += "</datablock>\n";
+
+    --numTabs;
+    addTabs(numTabs, sequenceXML);
+    sequenceXML += "</sample>\n";
+
+    return sequenceXML;
+}
+
+//____________________________________________________________________________________
diff --git a/src/conversion/SpaceConverter.cpp b/src/conversion/SpaceConverter.cpp
new file mode 100644
index 0000000..48465a9
--- /dev/null
+++ b/src/conversion/SpaceConverter.cpp
@@ -0,0 +1,165 @@
+// $Id: SpaceConverter.cpp,v 1.13 2012/06/30 01:32:41 bobgian Exp $
+
+/*
+  Copyright 2002 Patrick Colacurcio, Peter Beerli, Mary Kuhner,
+  Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <vector>
+#include <iostream>
+#include <sstream>
+
+#include "Converter_SpaceConverter.h"
+#include "Converter_DataSourceException.h"
+#include "Converter_LamarcDS.h"
+#include "stringx.h"
+
+//----------------------------------------------------------------------
+
+SpaceConverter::SpaceConverter(LamarcDS& lamarc) :
+    ConverterIf(), m_lamarc(lamarc)
+{
+} // SpaceConverter::ctor
+
+//----------------------------------------------------------------------
+
+SpaceConverter::~SpaceConverter()
+{
+} // SpaceConverter::dtor
+
+//----------------------------------------------------------------------
+
+void SpaceConverter::ValidateSpaceInfo(const SpaceMap& spaces,
+                                       const string& fname) const
+{
+    // now make sure that every region that is addressed in the space info
+    // exists in the data under the same name
+    SpaceMap::const_iterator spaceit;
+    for(spaceit = spaces.begin(); spaceit != spaces.end(); ++spaceit)
+    {
+        if (!m_lamarc.doesRegionNameExist(spaceit->first))
+        {
+            string errormsg = "Region name " + spaceit->first;
+            errormsg += " was mentioned in file " + fname;
+            errormsg += "\nbut no corresponding region name was found in the data.\n";
+            errormsg += "Make sure that region names are used";
+            errormsg += " consistently in all parts of data entry.";
+            throw FileFormatError(errormsg);
+        }
+    }
+
+} // SpaceConverter::ValidateSpaceInfo
+
+//----------------------------------------------------------------------
+
+SpaceMap SpaceConverter::ReadSpacingInfo(const string& filename) const
+{
+    SpaceMap spacings;
+
+    ifstream spacefile(filename.c_str(),ios::in);
+
+    if (!spacefile)
+    {
+        string errormsg = "I could not find file, " + filename;
+        throw FileFormatError(errormsg);
+    }
+
+    string line, somestring;
+    if (!getLine(spacefile,line))
+    {
+        string errormsg = "Your file, " + filename + ", appears";
+        errormsg += " to be empty.";
+        throw FileFormatError (errormsg);
+    }
+
+    bool notdone = true;
+    bool firsttime  = true;
+    long linenumber = 1;
+    string regionname;
+    vector<long> positions;
+
+    // now parse the file line by line
+    while(notdone)
+    {
+        istringstream linestream(line);
+
+        skipWhiteSpace(linestream);
+        somestring.erase();
+
+        // process the first token in the line.
+        if (getName(linestream,somestring))
+        {
+            if (!firsttime)  // insert previous finished region
+                spacings.insert(make_pair(regionname,positions));
+            firsttime = false;
+            regionname = somestring;
+            positions.clear();
+        }
+        else
+        {
+            if (IsInteger(somestring))
+            {
+                long position = FlagCheck(somestring,
+                                          string("position number"));
+                positions.push_back(position);
+            }
+            else
+            {
+                string errormsg = "An illegal region name was found";
+                errormsg += " on line " + ToString(linenumber);
+                errormsg += " of file " + filename +"\nTo be legal";
+                errormsg += " a region name must have at least one";
+                errormsg += " alphabetic character in it.";
+                throw FileFormatError (errormsg);
+            }
+        }
+
+        // process the rest of the line, which must be numbers
+        skipWhiteSpace(linestream);
+        while (linestream.good())
+        {
+            somestring.erase();
+            if (getNumber(linestream,somestring))
+            {
+                long position = FlagCheck(somestring,
+                                          string("position number"));
+                positions.push_back(position);
+            }
+            else
+            {
+                string errormsg = "The non-number " + somestring + " was";
+                errormsg += " encountered while reading region " + regionname;
+                errormsg += " in file " + filename + " on line ";
+                errormsg += ToString(linenumber);
+                throw FileFormatError (errormsg);
+            }
+            skipWhiteSpace(linestream);
+        }
+
+        line.erase();
+        notdone = getLine(spacefile,line);
+        ++linenumber;
+    }
+    // add the last region to the map
+    spacings.insert(make_pair(regionname,positions));
+
+#ifndef GUI
+    ValidateSpaceInfo(spacings,filename);
+#endif
+
+    return spacings;
+
+} // SpaceConverter::ReadSpacingInfo
+
+//----------------------------------------------------------------------
+
+void SpaceConverter::addConvertedLamarcDS(LamarcDS& lamarc)
+{
+    lamarc.mergeTo(m_lamarc);
+} // SpaceConverter::addConvertedLamarcDS
+
+//____________________________________________________________________________________
diff --git a/src/conversion/SpacingDS.cpp b/src/conversion/SpacingDS.cpp
new file mode 100644
index 0000000..b3611fd
--- /dev/null
+++ b/src/conversion/SpacingDS.cpp
@@ -0,0 +1,187 @@
+// $Id: SpacingDS.cpp,v 1.16 2012/06/30 01:32:41 bobgian Exp $
+
+/*
+  Copyright 2002 Patrick Colacurcio, Peter Beerli, Mary Kuhner,
+  Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <algorithm>
+
+#include "Converter_SpacingDS.h"
+#include "Converter_DataSourceException.h"
+#include "stringx.h"
+
+//------------------------------------------------------------------------------------
+
+SpacingDS::SpacingDS()
+    : DataSourceIf(),
+      m_length(0),
+      m_offset(1),
+      m_map_position(1)
+{
+} // SpacingDS::default ctor
+
+//------------------------------------------------------------------------------------
+
+void SpacingDS::CheckConsistency(long nmarkers) const
+{
+    if (m_positions.back() - m_positions.front() >= m_length)
+    {
+#if 0
+        string errormsg = "The input length is less than the length ";
+        errormsg += "covered by the markers.";
+        throw InconsistentDataError(errormsg);
+#endif
+        throw MarkerLengthMismatchDataError(  m_positions.front(),
+                                              m_positions.back(),
+                                              m_length);
+    }
+
+#if 0
+    if (m_offset > m_positions.front() || m_offset + m_length <= m_positions.back())
+    {
+        string errormsg = "The given start of the region places some markers ";
+        errormsg += "outside of the region.";
+        throw InconsistentDataError(errormsg);
+    }
+#endif
+
+    if (m_offset > m_positions.front())
+    {
+        throw OffsetAfterFirstPositionDataError(m_offset,m_positions.front());
+    }
+
+    if (m_offset + m_length <= m_positions.back())
+    {
+        throw RegionEndBeforeLastPositionDataError(m_offset,m_length,m_positions.back());
+    }
+
+    long npositions = static_cast<long>(m_positions.size());
+
+    if (nmarkers != npositions)
+    {
+#if 0
+        string errormsg = "There are " + ToString(nmarkers);
+        errormsg += " markers in your data and ";
+        errormsg += ToString(npositions) + " positions for them to occupy.";
+
+        throw InconsistentDataError(errormsg);
+#endif
+
+        throw MarkerPositionMismatchDataError(nmarkers,npositions);
+    }
+
+} // SpacingDS::CheckInternalConsistency
+
+//------------------------------------------------------------------------------------
+
+SpacingDS::SpacingDS(long length, long nmarkers) : DataSourceIf(), m_length(length),
+                                                   m_offset(1), m_map_position(1)
+{
+    // This constructor makes a SpacingDS when no information except
+    // length is given.  It assumes that all of the markers are lined
+    // up at the left end of the given length.  Data files produced
+    // in this way MUST NOT BE USED TO INFER RECOMBINATION.  The faculty
+    // is provided only for non-recombination uses.
+
+    long i;
+    for (i = 0; i < nmarkers; ++i)
+    {
+        m_positions.push_back(i);
+    }
+
+} // SpacingDS::ctor
+
+//------------------------------------------------------------------------------------
+
+SpacingDS::SpacingDS(const vector<long>& positions, long length, long nmarkers) :
+    DataSourceIf(), m_positions(positions), m_length(length)
+{
+    std::sort(m_positions.begin(), m_positions.end());
+
+    m_offset = 1;
+    m_map_position = 1;
+
+    CheckConsistency(nmarkers);
+
+} // SpacingDS::ctor
+
+//------------------------------------------------------------------------------------
+
+SpacingDS::SpacingDS(const vector<long>& positions, long length,
+                     long offset, long map_position, long nmarkers) :
+    DataSourceIf(), m_positions(positions), m_length(length), m_offset(offset),
+    m_map_position(map_position)
+{
+    std::sort(m_positions.begin(), m_positions.end());
+
+    CheckConsistency(nmarkers);
+
+} // SpacingDS::maximal ctor
+
+//------------------------------------------------------------------------------------
+
+SpacingDS::~SpacingDS()
+{
+} // SpacingDS::default dtor
+
+//------------------------------------------------------------------------------------
+
+string SpacingDS::getXML(unsigned int numTabs) const
+{
+    string spaceXML;
+
+    if (m_length == 0) return spaceXML;
+
+    addTabs(numTabs,spaceXML);
+    spaceXML += string("<spacing>\n");
+
+    ++numTabs;
+    addTabs(numTabs,spaceXML);
+    spaceXML += string("<block>\n"); // this will cover loci later, so
+    // will have a loop here
+
+    ++numTabs;
+    addTabs(numTabs,spaceXML);
+    spaceXML += string("<map_position> ") + ToString(m_map_position);
+    spaceXML += string(" </map_position>\n");
+
+    addTabs(numTabs,spaceXML);
+    spaceXML += string("<length> ") + ToString(m_length);
+    spaceXML += string(" </length>\n");
+
+    if (!m_positions.empty())
+    {
+        addTabs(numTabs,spaceXML);
+        spaceXML += string("<locations> ");
+        vector<long>::const_iterator pos;
+        for(pos = m_positions.begin(); pos != m_positions.end(); ++pos)
+        {
+            spaceXML += ToString(*pos) + " ";
+        }
+        spaceXML += "\n";
+        addTabs(numTabs,spaceXML);
+        spaceXML += string("</locations>\n");
+    }
+
+    addTabs(numTabs,spaceXML);
+    spaceXML += string("<offset> ") + ToString(m_offset);
+    spaceXML += string(" </offset>\n");
+
+    --numTabs;
+    addTabs(numTabs,spaceXML);
+    spaceXML += string("</block>\n");
+
+    --numTabs;
+    addTabs(numTabs,spaceXML);
+    spaceXML += string("</spacing>\n");
+
+    return spaceXML;
+
+} // SpacingDS::getXML
+
+//____________________________________________________________________________________
diff --git a/src/conversion/UserFileUtil.cpp b/src/conversion/UserFileUtil.cpp
new file mode 100644
index 0000000..812867e
--- /dev/null
+++ b/src/conversion/UserFileUtil.cpp
@@ -0,0 +1,26 @@
+// $Id: UserFileUtil.cpp,v 1.4 2010/03/17 17:25:58 bobgian Exp $
+
+/*
+  Copyright 2002 Patrick Colacurcio, Peter Beerli, Mary Kuhner,
+  Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include "Converter_UserFileUtil.h"
+
+//----------------------------------------------------------------------
+
+bool UserFileUtil::IsFilePresent(const string& filename) const
+{
+    ifstream newfile(filename.c_str(), ios::in);
+
+    if (!newfile) return false;
+
+    return true;
+
+} // UserFileUtil::IsFilePresent
+
+//____________________________________________________________________________________
diff --git a/src/conversion/XmlParserUtil.cpp b/src/conversion/XmlParserUtil.cpp
new file mode 100644
index 0000000..4633cbe
--- /dev/null
+++ b/src/conversion/XmlParserUtil.cpp
@@ -0,0 +1,276 @@
+// $Id: XmlParserUtil.cpp,v 1.7 2012/06/30 01:32:41 bobgian Exp $
+
+/*
+  Copyright 2002 Patrick Colacurcio, Peter Beerli, Mary Kuhner,
+  Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <sstream>
+
+#include "Converter_XmlParserUtil.h"
+#include "Converter_DataSourceException.h"
+#include "Converter_DataSourceException.h"
+
+#ifdef DMALLOC_FUNC_CHECK
+#include "/usr/local/include/dmalloc.h"
+#endif
+
+using namespace std;
+
+XmlParserUtil::XmlParserUtil() {}
+
+XmlParserUtil::~XmlParserUtil() {}
+
+string
+XmlParserUtil::getNextTag (istream& is, map<string,string>& tagInfo)
+{
+    int i = 0;
+    int ch;
+    int firstCh;
+    string buffer;
+
+    int beginChar = '<';
+
+    skipToChar(is, beginChar);
+
+    //  if it's a comment, skip it.
+    firstCh=is.get();
+    if ((ch=is.get()) == '!')
+    {
+        return getNextTag(is, tagInfo);
+    }
+    else
+    {
+        is.putback(ch);
+        is.putback(firstCh);
+    }
+
+    while ((ch=is.get()) != EOF )
+    {
+        if ( (ch != '>') )
+        {
+            i = 1;
+            buffer += ch;
+        }
+        else if ( i == 1 )
+            break;
+    }
+    if (ch == EOF)
+    {
+        if (i == 0)
+        {
+            // putback the EOF
+            is.putback(ch);
+            return buffer;
+        }
+        else
+        {
+            // throw here
+            is.putback(ch);
+            return buffer;
+        }
+    }
+    if(i == 1)
+    {
+        // dont putback the last '>'
+        // but stick it into the buffer.
+        buffer += ">";
+        // is.putback(ch);
+    }
+    else
+        // throw here
+        return "";
+
+    bool startTag = stripTag(buffer, tagInfo);
+
+    if (startTag)
+    {
+        m_tagStack.push_back(tagInfo["TagName"]);
+    }
+    else
+    {
+        if (m_tagStack.empty())
+        {
+            // TODO Error
+            // Break
+            return "";
+        }
+        string lastTag = m_tagStack.back();
+        m_tagStack.pop_back();
+        if (lastTag != tagInfo["TagName"])
+        {
+            string err = "Invalid XML.  Unbalanced Tags: <" + lastTag + ">...<" +
+                tagInfo["TagName"] + ">.";
+            throw FileFormatError(err);
+        }
+    }
+    return buffer;
+
+}                               // XmlParserUtil::getNextTag
+
+bool
+XmlParserUtil::stripTag(string& tag, map<string, string>& tagInfo) const
+{
+    istringstream tagStream(tag);
+
+    int ch;
+    bool startTag;
+
+    // pull the initial '<'
+    if ((ch = tagStream.get()) != '<')
+    {
+        string err = "Invalid XML.  Could not parse tag: " + tag;
+        throw FileFormatError(err);
+    }
+
+    skipWhiteSpace(tagStream);
+
+    // if the next character is a '/' note that this is an starttag
+    if ((ch = tagStream.get()) == '/')
+    {
+        startTag = false;
+    }
+    else
+    {
+        // put back whatever it was...
+        tagStream.putback(ch);
+        startTag = true;
+    }
+
+    // Next, get the next token.  (the tagName)
+    string tagName;
+    bool success = getToken(tagStream, tagName);
+
+    if (!success)
+    {
+        string err = "Invalid XML.  Could not parse tagName: " + tag;
+        throw FileFormatError(err);
+    }
+
+    tagInfo.insert(make_pair(string("TagName"), tagName));
+
+    // LAME! -plc
+    string startTagString = "true";
+    if (!startTag)
+    {
+        startTagString = "false";
+    }
+
+    tagInfo.insert(make_pair(string("IsStartTag"), startTagString));
+
+    skipWhiteSpace(tagStream);
+
+    string attributeName;
+    string attributeValue;
+
+    bool getName = true;
+    bool inQuotes = false;
+
+    //  Now, get the attributes.
+    while (((ch=tagStream.get()) != EOF) && (ch != '>'))
+    {
+        if ( (ch != '=') && (ch != '"') )
+        {
+            if (getName)
+            {
+                attributeName += ch;
+            }
+            else
+            {
+                if (inQuotes)
+                {
+                    attributeValue += ch;
+                }
+            }
+        }
+        else if (ch == '=')
+        {
+            getName = false;
+        }
+        else if (ch == '"')
+        {
+            if (inQuotes)
+            {
+                inQuotes = false;
+                getName = true;
+                skipWhiteSpace(tagStream);
+                tagInfo.insert(make_pair(attributeName, attributeValue));
+                attributeName = "";
+                attributeValue = "";
+            }
+            else
+            {
+                inQuotes = true;
+            }
+        }
+    }
+    return startTag;
+}                               // XmlParserUtil::stripTag
+
+bool
+XmlParserUtil::getTagValue (istream& is, string& buffer)
+{
+    string currentLocation = getLocation();
+
+    int ch;
+
+    while ((ch=is.get()) != EOF )
+    {
+        if ( (ch != '<') )
+        {
+            buffer += ch;
+        }
+        else
+        {
+            map<string, string> tagInfo;  // Note that we don't actually use this here.
+            is.putback(ch);
+            string nextTag = getNextTag(is, tagInfo);
+            // If we get back to less than where (stcompwise, that is),
+            // we started, stackwise, we're done.
+            if (currentLocation > getLocation() )
+            {
+                break;
+            }
+            // otherwise, add the tag info to the deal.
+            buffer += nextTag;
+        }
+    }
+    if (ch == EOF)
+    {
+        return false;
+    }
+    else
+    {
+        return true;
+    }
+}                               // XmlParserUtil::getTagValue
+
+string
+XmlParserUtil::getLocation() const
+{
+    string curDict;  //  The current sub tree that we're in.
+    //  eg.  "lamarc forces replication"
+
+    if (m_tagStack.empty())
+        return "";
+
+    vector<string>::const_iterator it;
+    for (it = m_tagStack.begin(); it != m_tagStack.end(); it++)
+    {
+        curDict += *it + " ";
+    }
+
+    return curDict;
+}                               // XmlParserUtil::getLocatio
+
+string
+XmlParserUtil::getTopNodeName() const
+{
+    return m_tagStack.back();
+}                               // XmlParserUtil::getTopNodeNam
+
+//____________________________________________________________________________________
diff --git a/src/conversion/nomenuglobals.cpp b/src/conversion/nomenuglobals.cpp
new file mode 100644
index 0000000..9b9dadc
--- /dev/null
+++ b/src/conversion/nomenuglobals.cpp
@@ -0,0 +1,36 @@
+// $Id: nomenuglobals.cpp,v 1.7 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include "nomenuglobals.h"
+#ifdef JSIM
+#include "constants.h"  // for datatype const string (DNA, etc)
+
+// the following are mandatory--they must exist
+string   convstr::GENETICDATAFILENAME              = "tempdna";
+string   convstr::OUTFILENAME                      = "outfile";
+// end mandatory
+
+const string   convstr::JSIMPARMFILE               = "";
+string   convstr::HAPFILENAME                      = "";
+string   convstr::MAPFILENAME                      = "";
+
+string   convstr::GENETICDATATYPE                  = lamarcstrings::DNA;
+string   convstr::GENETICDATAFORMAT                = "Phylip";
+bool          convstr::GENETICDATAINTERLEAVED      = true;
+
+long          convstr::REGIONLENGTH                = 2000;
+long          convstr::REGIONOFFSET                = 0;
+long          convstr::LOCUSMAPPOS                 = 0;
+
+bool convstr::MICROREGIONS                         = false;
+
+#endif  // JSIM
+
+//____________________________________________________________________________________
diff --git a/src/conversion/nomenuglobals.h b/src/conversion/nomenuglobals.h
new file mode 100644
index 0000000..faa2175
--- /dev/null
+++ b/src/conversion/nomenuglobals.h
@@ -0,0 +1,48 @@
+// $Id: nomenuglobals.h,v 1.9 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+// This file defines the behaviour of the converter when compiled with the JSIM flag
+
+#ifndef CONV_JSIMGLOBALS_H
+#define CONV_JSIMGLOBALS_H
+
+#ifdef JSIM
+#include <string>
+
+using std::string;
+
+class convstr
+{
+  public:
+    // the following are mandatory--they must exist
+    static string GENETICDATAFILENAME;
+    static string OUTFILENAME;
+    // end mandatory
+
+    static const string    JSIMPARMFILE; // empty or filename
+
+    static string    HAPFILENAME;          // empty or filename
+    static string    MAPFILENAME;          // empty or filename
+    static string    GENETICDATATYPE;
+    static string    GENETICDATAFORMAT;    // Phylip or Migrate
+    static bool      GENETICDATAINTERLEAVED;
+    static long      REGIONLENGTH;
+    static long      REGIONOFFSET;
+    static long      LOCUSMAPPOS;          // what's the locus' map-position?
+    // empty = no mapping
+    static bool      MICROREGIONS;         // are microsats unlinked
+    // (and therefore analyzed as separate regions)?
+};
+
+#endif  // JSIM
+
+#endif // CONV_JSIMGLOBALS_H
+
+//____________________________________________________________________________________
diff --git a/src/conversion/testfiles/fin-hel1 b/src/conversion/testfiles/fin-hel1
new file mode 100644
index 0000000..cc68b85
--- /dev/null
+++ b/src/conversion/testfiles/fin-hel1
@@ -0,0 +1,8 @@
+6 68 W
+Weights    01234121010101010101010101010132222020202020202020202020202020202020
+germ_10    TCATTTCCGGTGCAAACCGAATCTCAGCTTGATTAATCTGGATCACCCAGAAGAGCTCTTAAAACGCA
+germ_11    AGATTTGGGGTGCAATGGGAATCTCTCGTTGATTATAGTGGATCAGGGAGAAGAGGACTTAAAACCGT
+germ_20    ACTTTTGCCGTGCATAGCCAATCTGACCATGATTTAACAGGATCTCGCTGAAGACCAGATAAAAGGGA
+germ_21    ACAATTGCGCTGCTAAGCGTATCACACCTAGATAAAACTCGATGACGCACAAGTGCAGTAAAATCGGA 
+germ_30    ACATATGCGGAGGAAAGCGATTGTCACCTTCAATAAACTGCAACACGCAGTACAGCAGTTTATACGGA
+germ_31    ACATTAGCGGTCCAAAGCGAAACTCACCTTGTTTAAACTGGTTCACGCAGATGAGCAGTTATAACGGA
diff --git a/src/conversion/testfiles/fin-hel1r b/src/conversion/testfiles/fin-hel1r
new file mode 100644
index 0000000..40f3108
--- /dev/null
+++ b/src/conversion/testfiles/fin-hel1r
@@ -0,0 +1,9 @@
+n 1 1 helsinki
+68
+6 germ1
+germ_10    TCATTTCCGGTGCAAACCGAATCTCAGCTTGATTAATCTGGATCACCCAGAAGAGCTCTTAAAACGCA
+germ_11    AGATTTGGGGTGCAATGGGAATCTCTCGTTGATTATAGTGGATCAGGGAGAAGAGGACTTAAAACCGT
+germ_20    ACTTTTGCCGTGCATAGCCAATCTGACCATGATTTAACAGGATCTCGCTGAAGACCAGATAAAAGGGA
+germ_21    ACAATTGCGCTGCTAAGCGTATCACACCTAGATAAAACTCGATGACGCACAAGTGCAGTAAAATCGGA
+germ_30    ACATATGCGGAGGAAAGCGATTGTCACCTTCAATAAACTGCAACACGCAGTACAGCAGTTTATACGGA
+germ_31    ACATTAGCGGTCCAAAGCGAAACTCACCTTGTTTAAACTGGTTCACGCAGATGAGCAGTTATAACGGA
diff --git a/src/conversion/testfiles/fin-hel2 b/src/conversion/testfiles/fin-hel2
new file mode 100644
index 0000000..71b74b5
--- /dev/null
+++ b/src/conversion/testfiles/fin-hel2
@@ -0,0 +1,7 @@
+6 68
+germ_10    TCATTTCCGGTGCAAACCGAATCTCAGCTTGATTAATCTGGATCACCCAGAAGAGCTCTTAAAACGCA
+germ_11    AGATTTGGGGTGCAATGGGAATCTCTCGTTGATTATAGTGGATCAGGGAGAAGAGGACTTAAAACCGT
+germ_20    ACTTTTGCCGTGCATAGCCAATCTGACCATGATTTAACAGGATCTCGCTGAAGACCAGATAAAAGGGA
+germ_21    ACAATTGCGCTGCTAAGCGTATCACACCTAGATAAAACTCGATGACGCACAAGTGCAGTAAAATCGGA
+germ_30    ACATATGCGGAGGAAAGCGATTGTCACCTTCAATAAACTGCAACACGCAGTACAGCAGTTTATACGGA
+germ_31    ACATTAGCGGTCCAAAGCGAAACTCACCTTGTTTAAACTGGTTCACGCAGATGAGCAGTTATAACGGA
diff --git a/src/conversion/testfiles/fin-hel2r b/src/conversion/testfiles/fin-hel2r
new file mode 100644
index 0000000..ba38c75
--- /dev/null
+++ b/src/conversion/testfiles/fin-hel2r
@@ -0,0 +1,9 @@
+n 1 1 helsinki
+68
+6 germ2
+germ_10    TCATTTCCGGTGCAAACCGAATCTCAGCTTGATTAATCTGGATCACCCAGAAGAGCTCTTAAAACGCA
+germ_11    AGATTTGGGGTGCAATGGGAATCTCTCGTTGATTATAGTGGATCAGGGAGAAGAGGACTTAAAACCGT
+germ_20    ACTTTTGCCGTGCATAGCCAATCTGACCATGATTTAACAGGATCTCGCTGAAGACCAGATAAAAGGGA
+germ_21    ACAATTGCGCTGCTAAGCGTATCACACCTAGATAAAACTCGATGACGCACAAGTGCAGTAAAATCGGA
+germ_30    ACATATGCGGAGGAAAGCGATTGTCACCTTCAATAAACTGCAACACGCAGTACAGCAGTTTATACGGA
+germ_31    ACATTAGCGGTCCAAAGCGAAACTCACCTTGTTTAAACTGGTTCACGCAGATGAGCAGTTATAACGGA
diff --git a/src/conversion/testfiles/fin-hel3 b/src/conversion/testfiles/fin-hel3
new file mode 100644
index 0000000..71b74b5
--- /dev/null
+++ b/src/conversion/testfiles/fin-hel3
@@ -0,0 +1,7 @@
+6 68
+germ_10    TCATTTCCGGTGCAAACCGAATCTCAGCTTGATTAATCTGGATCACCCAGAAGAGCTCTTAAAACGCA
+germ_11    AGATTTGGGGTGCAATGGGAATCTCTCGTTGATTATAGTGGATCAGGGAGAAGAGGACTTAAAACCGT
+germ_20    ACTTTTGCCGTGCATAGCCAATCTGACCATGATTTAACAGGATCTCGCTGAAGACCAGATAAAAGGGA
+germ_21    ACAATTGCGCTGCTAAGCGTATCACACCTAGATAAAACTCGATGACGCACAAGTGCAGTAAAATCGGA
+germ_30    ACATATGCGGAGGAAAGCGATTGTCACCTTCAATAAACTGCAACACGCAGTACAGCAGTTTATACGGA
+germ_31    ACATTAGCGGTCCAAAGCGAAACTCACCTTGTTTAAACTGGTTCACGCAGATGAGCAGTTATAACGGA
diff --git a/src/conversion/testfiles/fin-hel3r b/src/conversion/testfiles/fin-hel3r
new file mode 100644
index 0000000..b86d5a2
--- /dev/null
+++ b/src/conversion/testfiles/fin-hel3r
@@ -0,0 +1,9 @@
+n 1 1 helsinki
+68
+6 germ3
+germ_10    TCATTTCCGGTGCAAACCGAATCTCAGCTTGATTAATCTGGATCACCCAGAAGAGCTCTTAAAACGCA
+germ_11    AGATTTGGGGTGCAATGGGAATCTCTCGTTGATTATAGTGGATCAGGGAGAAGAGGACTTAAAACCGT
+germ_20    ACTTTTGCCGTGCATAGCCAATCTGACCATGATTTAACAGGATCTCGCTGAAGACCAGATAAAAGGGA
+germ_21    ACAATTGCGCTGCTAAGCGTATCACACCTAGATAAAACTCGATGACGCACAAGTGCAGTAAAATCGGA
+germ_30    ACATATGCGGAGGAAAGCGATTGTCACCTTCAATAAACTGCAACACGCAGTACAGCAGTTTATACGGA
+germ_31    ACATTAGCGGTCCAAAGCGAAACTCACCTTGTTTAAACTGGTTCACGCAGATGAGCAGTTATAACGGA
diff --git a/src/conversion/testfiles/fin-kar1 b/src/conversion/testfiles/fin-kar1
new file mode 100644
index 0000000..a74ae6f
--- /dev/null
+++ b/src/conversion/testfiles/fin-kar1
@@ -0,0 +1,11 @@
+   10   501
+0_8           CTAGTGATATGACGTAGCGTCTCCTGCCTTACACCCTGAGCCGGTGCACGCCCACAATTCGGAATTGCGGATTTTCGCGAGGTCAGGAAGTAAACGACCTCTTTAAATACTTAGTAACATTTGCTTGGATTAGGACTGGAATTCGATGGCGCCAAGTGTAAAATTCCGGACTCTGACGAGGTTGCTGTCGAAAAAAAAGCTCAGTTCTGTGGTAAGAAACTCCGGCCGTCTTAATGTTTAATCAGTTGCCTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCCGCCCCCTAATGTAATCGTCCATCGTATCTCAGCCTGCGTCTATTAGAGCAGTGAGGCGGGTCTACCAGTGTAGCGTGGGATAACTCATTGCCTACTGTTAATTTTTCGTGAAGACAGTAGGGTGGCTGAGATATTCGACTGAAGTGCCCAAGCAGCGATGAGGGTTCGAAGGAGCGTCCTGGGTTACCGGCA [...]
+0_4           CTAGTGATATGACGTAGCGTCTCCTGCCTTACACTTTAAGCTGATGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTCACGAAGTAAACGGCCTCTTTGAATACTTAGTAGCACTTGATTGGGTTATGACTGGAAGTCGATGGCGCCAAGTGTAAGATTTGAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCGGTGGTAAGAAAATCCGGCCGTTTTAATCTTTAATCAGTTTCTTTGCGATCAGTAGGACTAATAACTTGTTTGCCGTCTACCTTCAAATGTAATTGTCCATCGTATCTCAGCCTGCGTCTATTAGAGCAGTGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAATTTTTCGTGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGGA [...]
+0_7           CTAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGCTGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTCAGGAAGTAAACGACCGCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCCGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAACTTGTTTGCCGTCTACCTCCTAATGTAATCGTCCATCGTATCTCAGCCTGCGTCTATTAGAGCAGTGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAAGTTTTCGTGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGCA [...]
+0_2           CTAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGGTGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTCAAGAAGTAAACGACCTCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCTGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCTACCTCCTAATGTAATCGTCCATCGTATTTCAGCCTGCGTCTATTAGAGCAGTGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAATTTTTCGTGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTAGAAGATGCGTCCTGGGTTACCGGCA [...]
+0_9           CTAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGCTGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTCAGGAAGTAAACGACCTCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCCGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCTACCTCCTAATGTAATCGTCCATCGTATTTCAGCCTGCGTCTATTAGAGCAGTGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAATTTTTCGTGAAGACGGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGCA [...]
+0_0           CCAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGCTGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTTAGGAAGTAAACGACCGCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCCGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCAACCTCCTAATGTAATCGTCCATCGTATCTCAGCCTGCGCCTATTAGAGCAGGGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAAGTTTTCGCGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGCA [...]
+0_3           CCAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGCTGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTTAGGAAGTAAACGACCTCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCCGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCAACCTCCTAATGTAATCGTCCATCGTATCTCAGCCTGCGCCTATTAGAGCAGGGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAATTTTTCGCGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGCA [...]
+0_1           CCAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGCTGCACGCCCTCAATTCGGACTTGCGGGTTTTCGTGAGGTTAGGAAGTAAACGACCTCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCCGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCAACCTCCTAATGTAATCGTCCATCGTATCTCAGCCTGCGCCTATTAGAGCAGGGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAATTTTTCGCGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGCA [...]
+0_5           CCAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGCTGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTTAGGAAGTAAACGACCTCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCCGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCAACCTCCTAATGTAATCGTCCATCGTATCTCAGCCTGCGCCTATTAGAGCAGGGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAATTTTTCGCGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGCA [...]
+0_6           CCAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGCTGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTTAGGAAGTAAACGACCTCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCCGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCAACCTCCTAATGTAATCGTCCATCGTATCTCAGCCTGCGCCTATTAGAGCAGGGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAATTTTTCGCGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGCA [...]
diff --git a/src/conversion/testfiles/fin-kar1r b/src/conversion/testfiles/fin-kar1r
new file mode 100644
index 0000000..4b27aae
--- /dev/null
+++ b/src/conversion/testfiles/fin-kar1r
@@ -0,0 +1,13 @@
+n 1 1 karelia
+501
+10 germ1
+0_8           CTAGTGATATGACGTAGCGTCTCCTGCCTTACACCCTGAGCCGGTGCACGCCCACAATTCGGAATTGCGGATTTTCGCGAGGTCAGGAAGTAAACGACCTCTTTAAATACTTAGTAACATTTGCTTGGATTAGGACTGGAATTCGATGGCGCCAAGTGTAAAATTCCGGACTCTGACGAGGTTGCTGTCGAAAAAAAAGCTCAGTTCTGTGGTAAGAAACTCCGGCCGTCTTAATGTTTAATCAGTTGCCTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCCGCCCCCTAATGTAATCGTCCATCGTATCTCAGCCTGCGTCTATTAGAGCAGTGAGGCGGGTCTACCAGTGTAGCGTGGGATAACTCATTGCCTACTGTTAATTTTTCGTGAAGACAGTAGGGTGGCTGAGATATTCGACTGAAGTGCCCAAGCAGCGATGAGGGTTCGAAGGAGCGTCCTGGGTTACCGGCA [...]
+0_4           CTAGTGATATGACGTAGCGTCTCCTGCCTTACACTTTAAGCTGATGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTCACGAAGTAAACGGCCTCTTTGAATACTTAGTAGCACTTGATTGGGTTATGACTGGAAGTCGATGGCGCCAAGTGTAAGATTTGAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCGGTGGTAAGAAAATCCGGCCGTTTTAATCTTTAATCAGTTTCTTTGCGATCAGTAGGACTAATAACTTGTTTGCCGTCTACCTTCAAATGTAATTGTCCATCGTATCTCAGCCTGCGTCTATTAGAGCAGTGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAATTTTTCGTGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGGA [...]
+0_7           CTAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGCTGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTCAGGAAGTAAACGACCGCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCCGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAACTTGTTTGCCGTCTACCTCCTAATGTAATCGTCCATCGTATCTCAGCCTGCGTCTATTAGAGCAGTGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAAGTTTTCGTGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGCA [...]
+0_2           CTAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGGTGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTCAAGAAGTAAACGACCTCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCTGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCTACCTCCTAATGTAATCGTCCATCGTATTTCAGCCTGCGTCTATTAGAGCAGTGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAATTTTTCGTGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTAGAAGATGCGTCCTGGGTTACCGGCA [...]
+0_9           CTAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGCTGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTCAGGAAGTAAACGACCTCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCCGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCTACCTCCTAATGTAATCGTCCATCGTATTTCAGCCTGCGTCTATTAGAGCAGTGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAATTTTTCGTGAAGACGGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGCA [...]
+0_0           CCAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGCTGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTTAGGAAGTAAACGACCGCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCCGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCAACCTCCTAATGTAATCGTCCATCGTATCTCAGCCTGCGCCTATTAGAGCAGGGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAAGTTTTCGCGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGCA [...]
+0_3           CCAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGCTGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTTAGGAAGTAAACGACCTCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCCGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCAACCTCCTAATGTAATCGTCCATCGTATCTCAGCCTGCGCCTATTAGAGCAGGGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAATTTTTCGCGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGCA [...]
+0_1           CCAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGCTGCACGCCCTCAATTCGGACTTGCGGGTTTTCGTGAGGTTAGGAAGTAAACGACCTCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCCGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCAACCTCCTAATGTAATCGTCCATCGTATCTCAGCCTGCGCCTATTAGAGCAGGGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAATTTTTCGCGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGCA [...]
+0_5           CCAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGCTGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTTAGGAAGTAAACGACCTCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCCGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCAACCTCCTAATGTAATCGTCCATCGTATCTCAGCCTGCGCCTATTAGAGCAGGGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAATTTTTCGCGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGCA [...]
+0_6           CCAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGCTGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTTAGGAAGTAAACGACCTCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCCGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCAACCTCCTAATGTAATCGTCCATCGTATCTCAGCCTGCGCCTATTAGAGCAGGGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAATTTTTCGCGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGCA [...]
diff --git a/src/conversion/testfiles/fin-kar2 b/src/conversion/testfiles/fin-kar2
new file mode 100644
index 0000000..a74ae6f
--- /dev/null
+++ b/src/conversion/testfiles/fin-kar2
@@ -0,0 +1,11 @@
+   10   501
+0_8           CTAGTGATATGACGTAGCGTCTCCTGCCTTACACCCTGAGCCGGTGCACGCCCACAATTCGGAATTGCGGATTTTCGCGAGGTCAGGAAGTAAACGACCTCTTTAAATACTTAGTAACATTTGCTTGGATTAGGACTGGAATTCGATGGCGCCAAGTGTAAAATTCCGGACTCTGACGAGGTTGCTGTCGAAAAAAAAGCTCAGTTCTGTGGTAAGAAACTCCGGCCGTCTTAATGTTTAATCAGTTGCCTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCCGCCCCCTAATGTAATCGTCCATCGTATCTCAGCCTGCGTCTATTAGAGCAGTGAGGCGGGTCTACCAGTGTAGCGTGGGATAACTCATTGCCTACTGTTAATTTTTCGTGAAGACAGTAGGGTGGCTGAGATATTCGACTGAAGTGCCCAAGCAGCGATGAGGGTTCGAAGGAGCGTCCTGGGTTACCGGCA [...]
+0_4           CTAGTGATATGACGTAGCGTCTCCTGCCTTACACTTTAAGCTGATGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTCACGAAGTAAACGGCCTCTTTGAATACTTAGTAGCACTTGATTGGGTTATGACTGGAAGTCGATGGCGCCAAGTGTAAGATTTGAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCGGTGGTAAGAAAATCCGGCCGTTTTAATCTTTAATCAGTTTCTTTGCGATCAGTAGGACTAATAACTTGTTTGCCGTCTACCTTCAAATGTAATTGTCCATCGTATCTCAGCCTGCGTCTATTAGAGCAGTGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAATTTTTCGTGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGGA [...]
+0_7           CTAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGCTGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTCAGGAAGTAAACGACCGCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCCGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAACTTGTTTGCCGTCTACCTCCTAATGTAATCGTCCATCGTATCTCAGCCTGCGTCTATTAGAGCAGTGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAAGTTTTCGTGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGCA [...]
+0_2           CTAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGGTGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTCAAGAAGTAAACGACCTCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCTGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCTACCTCCTAATGTAATCGTCCATCGTATTTCAGCCTGCGTCTATTAGAGCAGTGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAATTTTTCGTGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTAGAAGATGCGTCCTGGGTTACCGGCA [...]
+0_9           CTAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGCTGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTCAGGAAGTAAACGACCTCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCCGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCTACCTCCTAATGTAATCGTCCATCGTATTTCAGCCTGCGTCTATTAGAGCAGTGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAATTTTTCGTGAAGACGGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGCA [...]
+0_0           CCAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGCTGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTTAGGAAGTAAACGACCGCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCCGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCAACCTCCTAATGTAATCGTCCATCGTATCTCAGCCTGCGCCTATTAGAGCAGGGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAAGTTTTCGCGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGCA [...]
+0_3           CCAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGCTGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTTAGGAAGTAAACGACCTCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCCGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCAACCTCCTAATGTAATCGTCCATCGTATCTCAGCCTGCGCCTATTAGAGCAGGGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAATTTTTCGCGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGCA [...]
+0_1           CCAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGCTGCACGCCCTCAATTCGGACTTGCGGGTTTTCGTGAGGTTAGGAAGTAAACGACCTCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCCGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCAACCTCCTAATGTAATCGTCCATCGTATCTCAGCCTGCGCCTATTAGAGCAGGGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAATTTTTCGCGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGCA [...]
+0_5           CCAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGCTGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTTAGGAAGTAAACGACCTCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCCGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCAACCTCCTAATGTAATCGTCCATCGTATCTCAGCCTGCGCCTATTAGAGCAGGGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAATTTTTCGCGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGCA [...]
+0_6           CCAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGCTGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTTAGGAAGTAAACGACCTCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCCGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCAACCTCCTAATGTAATCGTCCATCGTATCTCAGCCTGCGCCTATTAGAGCAGGGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAATTTTTCGCGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGCA [...]
diff --git a/src/conversion/testfiles/fin-kar2r b/src/conversion/testfiles/fin-kar2r
new file mode 100644
index 0000000..1dbb876
--- /dev/null
+++ b/src/conversion/testfiles/fin-kar2r
@@ -0,0 +1,13 @@
+n 1 1 karelia
+501
+10 germ2
+0_8           CTAGTGATATGACGTAGCGTCTCCTGCCTTACACCCTGAGCCGGTGCACGCCCACAATTCGGAATTGCGGATTTTCGCGAGGTCAGGAAGTAAACGACCTCTTTAAATACTTAGTAACATTTGCTTGGATTAGGACTGGAATTCGATGGCGCCAAGTGTAAAATTCCGGACTCTGACGAGGTTGCTGTCGAAAAAAAAGCTCAGTTCTGTGGTAAGAAACTCCGGCCGTCTTAATGTTTAATCAGTTGCCTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCCGCCCCCTAATGTAATCGTCCATCGTATCTCAGCCTGCGTCTATTAGAGCAGTGAGGCGGGTCTACCAGTGTAGCGTGGGATAACTCATTGCCTACTGTTAATTTTTCGTGAAGACAGTAGGGTGGCTGAGATATTCGACTGAAGTGCCCAAGCAGCGATGAGGGTTCGAAGGAGCGTCCTGGGTTACCGGCA [...]
+0_4           CTAGTGATATGACGTAGCGTCTCCTGCCTTACACTTTAAGCTGATGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTCACGAAGTAAACGGCCTCTTTGAATACTTAGTAGCACTTGATTGGGTTATGACTGGAAGTCGATGGCGCCAAGTGTAAGATTTGAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCGGTGGTAAGAAAATCCGGCCGTTTTAATCTTTAATCAGTTTCTTTGCGATCAGTAGGACTAATAACTTGTTTGCCGTCTACCTTCAAATGTAATTGTCCATCGTATCTCAGCCTGCGTCTATTAGAGCAGTGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAATTTTTCGTGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGGA [...]
+0_7           CTAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGCTGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTCAGGAAGTAAACGACCGCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCCGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAACTTGTTTGCCGTCTACCTCCTAATGTAATCGTCCATCGTATCTCAGCCTGCGTCTATTAGAGCAGTGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAAGTTTTCGTGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGCA [...]
+0_2           CTAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGGTGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTCAAGAAGTAAACGACCTCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCTGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCTACCTCCTAATGTAATCGTCCATCGTATTTCAGCCTGCGTCTATTAGAGCAGTGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAATTTTTCGTGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTAGAAGATGCGTCCTGGGTTACCGGCA [...]
+0_9           CTAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGCTGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTCAGGAAGTAAACGACCTCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCCGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCTACCTCCTAATGTAATCGTCCATCGTATTTCAGCCTGCGTCTATTAGAGCAGTGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAATTTTTCGTGAAGACGGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGCA [...]
+0_0           CCAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGCTGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTTAGGAAGTAAACGACCGCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCCGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCAACCTCCTAATGTAATCGTCCATCGTATCTCAGCCTGCGCCTATTAGAGCAGGGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAAGTTTTCGCGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGCA [...]
+0_3           CCAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGCTGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTTAGGAAGTAAACGACCTCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCCGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCAACCTCCTAATGTAATCGTCCATCGTATCTCAGCCTGCGCCTATTAGAGCAGGGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAATTTTTCGCGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGCA [...]
+0_1           CCAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGCTGCACGCCCTCAATTCGGACTTGCGGGTTTTCGTGAGGTTAGGAAGTAAACGACCTCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCCGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCAACCTCCTAATGTAATCGTCCATCGTATCTCAGCCTGCGCCTATTAGAGCAGGGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAATTTTTCGCGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGCA [...]
+0_5           CCAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGCTGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTTAGGAAGTAAACGACCTCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCCGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCAACCTCCTAATGTAATCGTCCATCGTATCTCAGCCTGCGCCTATTAGAGCAGGGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAATTTTTCGCGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGCA [...]
+0_6           CCAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGCTGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTTAGGAAGTAAACGACCTCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCCGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCAACCTCCTAATGTAATCGTCCATCGTATCTCAGCCTGCGCCTATTAGAGCAGGGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAATTTTTCGCGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGCA [...]
diff --git a/src/conversion/testfiles/fin-kar3 b/src/conversion/testfiles/fin-kar3
new file mode 100644
index 0000000..a74ae6f
--- /dev/null
+++ b/src/conversion/testfiles/fin-kar3
@@ -0,0 +1,11 @@
+   10   501
+0_8           CTAGTGATATGACGTAGCGTCTCCTGCCTTACACCCTGAGCCGGTGCACGCCCACAATTCGGAATTGCGGATTTTCGCGAGGTCAGGAAGTAAACGACCTCTTTAAATACTTAGTAACATTTGCTTGGATTAGGACTGGAATTCGATGGCGCCAAGTGTAAAATTCCGGACTCTGACGAGGTTGCTGTCGAAAAAAAAGCTCAGTTCTGTGGTAAGAAACTCCGGCCGTCTTAATGTTTAATCAGTTGCCTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCCGCCCCCTAATGTAATCGTCCATCGTATCTCAGCCTGCGTCTATTAGAGCAGTGAGGCGGGTCTACCAGTGTAGCGTGGGATAACTCATTGCCTACTGTTAATTTTTCGTGAAGACAGTAGGGTGGCTGAGATATTCGACTGAAGTGCCCAAGCAGCGATGAGGGTTCGAAGGAGCGTCCTGGGTTACCGGCA [...]
+0_4           CTAGTGATATGACGTAGCGTCTCCTGCCTTACACTTTAAGCTGATGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTCACGAAGTAAACGGCCTCTTTGAATACTTAGTAGCACTTGATTGGGTTATGACTGGAAGTCGATGGCGCCAAGTGTAAGATTTGAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCGGTGGTAAGAAAATCCGGCCGTTTTAATCTTTAATCAGTTTCTTTGCGATCAGTAGGACTAATAACTTGTTTGCCGTCTACCTTCAAATGTAATTGTCCATCGTATCTCAGCCTGCGTCTATTAGAGCAGTGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAATTTTTCGTGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGGA [...]
+0_7           CTAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGCTGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTCAGGAAGTAAACGACCGCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCCGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAACTTGTTTGCCGTCTACCTCCTAATGTAATCGTCCATCGTATCTCAGCCTGCGTCTATTAGAGCAGTGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAAGTTTTCGTGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGCA [...]
+0_2           CTAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGGTGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTCAAGAAGTAAACGACCTCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCTGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCTACCTCCTAATGTAATCGTCCATCGTATTTCAGCCTGCGTCTATTAGAGCAGTGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAATTTTTCGTGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTAGAAGATGCGTCCTGGGTTACCGGCA [...]
+0_9           CTAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGCTGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTCAGGAAGTAAACGACCTCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCCGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCTACCTCCTAATGTAATCGTCCATCGTATTTCAGCCTGCGTCTATTAGAGCAGTGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAATTTTTCGTGAAGACGGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGCA [...]
+0_0           CCAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGCTGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTTAGGAAGTAAACGACCGCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCCGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCAACCTCCTAATGTAATCGTCCATCGTATCTCAGCCTGCGCCTATTAGAGCAGGGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAAGTTTTCGCGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGCA [...]
+0_3           CCAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGCTGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTTAGGAAGTAAACGACCTCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCCGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCAACCTCCTAATGTAATCGTCCATCGTATCTCAGCCTGCGCCTATTAGAGCAGGGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAATTTTTCGCGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGCA [...]
+0_1           CCAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGCTGCACGCCCTCAATTCGGACTTGCGGGTTTTCGTGAGGTTAGGAAGTAAACGACCTCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCCGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCAACCTCCTAATGTAATCGTCCATCGTATCTCAGCCTGCGCCTATTAGAGCAGGGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAATTTTTCGCGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGCA [...]
+0_5           CCAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGCTGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTTAGGAAGTAAACGACCTCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCCGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCAACCTCCTAATGTAATCGTCCATCGTATCTCAGCCTGCGCCTATTAGAGCAGGGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAATTTTTCGCGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGCA [...]
+0_6           CCAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGCTGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTTAGGAAGTAAACGACCTCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCCGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCAACCTCCTAATGTAATCGTCCATCGTATCTCAGCCTGCGCCTATTAGAGCAGGGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAATTTTTCGCGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGCA [...]
diff --git a/src/conversion/testfiles/fin-kar3r b/src/conversion/testfiles/fin-kar3r
new file mode 100644
index 0000000..60e481c
--- /dev/null
+++ b/src/conversion/testfiles/fin-kar3r
@@ -0,0 +1,13 @@
+n 1 1 karelia
+501
+10 germ3
+0_8           CTAGTGATATGACGTAGCGTCTCCTGCCTTACACCCTGAGCCGGTGCACGCCCACAATTCGGAATTGCGGATTTTCGCGAGGTCAGGAAGTAAACGACCTCTTTAAATACTTAGTAACATTTGCTTGGATTAGGACTGGAATTCGATGGCGCCAAGTGTAAAATTCCGGACTCTGACGAGGTTGCTGTCGAAAAAAAAGCTCAGTTCTGTGGTAAGAAACTCCGGCCGTCTTAATGTTTAATCAGTTGCCTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCCGCCCCCTAATGTAATCGTCCATCGTATCTCAGCCTGCGTCTATTAGAGCAGTGAGGCGGGTCTACCAGTGTAGCGTGGGATAACTCATTGCCTACTGTTAATTTTTCGTGAAGACAGTAGGGTGGCTGAGATATTCGACTGAAGTGCCCAAGCAGCGATGAGGGTTCGAAGGAGCGTCCTGGGTTACCGGCA [...]
+0_4           CTAGTGATATGACGTAGCGTCTCCTGCCTTACACTTTAAGCTGATGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTCACGAAGTAAACGGCCTCTTTGAATACTTAGTAGCACTTGATTGGGTTATGACTGGAAGTCGATGGCGCCAAGTGTAAGATTTGAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCGGTGGTAAGAAAATCCGGCCGTTTTAATCTTTAATCAGTTTCTTTGCGATCAGTAGGACTAATAACTTGTTTGCCGTCTACCTTCAAATGTAATTGTCCATCGTATCTCAGCCTGCGTCTATTAGAGCAGTGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAATTTTTCGTGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGGA [...]
+0_7           CTAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGCTGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTCAGGAAGTAAACGACCGCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCCGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAACTTGTTTGCCGTCTACCTCCTAATGTAATCGTCCATCGTATCTCAGCCTGCGTCTATTAGAGCAGTGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAAGTTTTCGTGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGCA [...]
+0_2           CTAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGGTGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTCAAGAAGTAAACGACCTCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCTGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCTACCTCCTAATGTAATCGTCCATCGTATTTCAGCCTGCGTCTATTAGAGCAGTGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAATTTTTCGTGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTAGAAGATGCGTCCTGGGTTACCGGCA [...]
+0_9           CTAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGCTGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTCAGGAAGTAAACGACCTCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCCGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCTACCTCCTAATGTAATCGTCCATCGTATTTCAGCCTGCGTCTATTAGAGCAGTGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAATTTTTCGTGAAGACGGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGCA [...]
+0_0           CCAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGCTGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTTAGGAAGTAAACGACCGCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCCGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCAACCTCCTAATGTAATCGTCCATCGTATCTCAGCCTGCGCCTATTAGAGCAGGGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAAGTTTTCGCGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGCA [...]
+0_3           CCAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGCTGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTTAGGAAGTAAACGACCTCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCCGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCAACCTCCTAATGTAATCGTCCATCGTATCTCAGCCTGCGCCTATTAGAGCAGGGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAATTTTTCGCGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGCA [...]
+0_1           CCAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGCTGCACGCCCTCAATTCGGACTTGCGGGTTTTCGTGAGGTTAGGAAGTAAACGACCTCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCCGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCAACCTCCTAATGTAATCGTCCATCGTATCTCAGCCTGCGCCTATTAGAGCAGGGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAATTTTTCGCGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGCA [...]
+0_5           CCAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGCTGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTTAGGAAGTAAACGACCTCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCCGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCAACCTCCTAATGTAATCGTCCATCGTATCTCAGCCTGCGCCTATTAGAGCAGGGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAATTTTTCGCGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGCA [...]
+0_6           CCAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGCTGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTTAGGAAGTAAACGACCTCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCCGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCAACCTCCTAATGTAATCGTCCATCGTATCTCAGCCTGCGCCTATTAGAGCAGGGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAATTTTTCGCGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGCA [...]
diff --git a/src/conversion/testfiles/helsinkimap b/src/conversion/testfiles/helsinkimap
new file mode 100644
index 0000000..a15007e
--- /dev/null
+++ b/src/conversion/testfiles/helsinkimap
@@ -0,0 +1 @@
+helsinki 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 58 59 61 74 78 88 91 101 111 119 160 261 262 364 1064 1165 1166 1267 1323
diff --git a/src/conversion/testfiles/helsinkiphase b/src/conversion/testfiles/helsinkiphase
new file mode 100644
index 0000000..4ff28a0
--- /dev/null
+++ b/src/conversion/testfiles/helsinkiphase
@@ -0,0 +1 @@
+2 adjacent 3 all
diff --git a/src/conversion/testfiles/kareliaphase b/src/conversion/testfiles/kareliaphase
new file mode 100644
index 0000000..d044e9d
--- /dev/null
+++ b/src/conversion/testfiles/kareliaphase
@@ -0,0 +1 @@
+5 adjacent 2 all
diff --git a/src/conversion/testfiles/msatr b/src/conversion/testfiles/msatr
new file mode 100644
index 0000000..c9fe67f
--- /dev/null
+++ b/src/conversion/testfiles/msatr
@@ -0,0 +1,49 @@
+M  2 10 .  Example: Microsatellite data set
+25   population number   0
+0BBK 0BAH 16.19 16.19 19.20 16.24 20.21 19.20 23.20 19.17 24.19 22.20 
+0BBG 0BBM 16.18 16.18 18.20 16.24 20.21 18.20 22.22 19.17 24.19 22.20 
+0BAJ 0BAB 16.16 16.18 18.20 15.24 20.21 18.22 22.22 19.18 24.20 22.20 
+0BAL 0BBL 16.16 16.18 19.20 16.25 20.21 18.21 22.22 18.19 24.20 23.20 
+0BAR 0BBW 15.16 17.18 19.20 16.25 20.21 18.20 21.22 18.19 23.20 24.20 
+0BAQ 0BBS 15.21 16.18 20.21 14.19 19.21 18.20 21.22 19.20 23.20 23.19 
+0BAS 0BBT 15.23 16.18 20.20 12.19 20.21 18.22 21.22 19.20 23.20 23.19 
+0BBH 0BAA 15.17 16.18 19.20 12.20 20.21 18.22 21.19 19.18 23.20 22.19 
+0BAX 0BCA 15.17 16.18 19.20 12.19 20.21 18.22 21.20 19.19 23.20 21.19 
+0BBB 0BBX 15.17 16.18 19.20 23.19 20.21 18.22 23.20 19.19 23.20 21.19 
+0BBE 0BBN 15.15 16.19 19.20 24.20 20.21 18.22 20.20 19.19 23.20 21.20 
+0BAC 0BBU 16.17 16.16 19.20 24.20 20.19 16.22 20.20 19.19 23.20 20.20 
+0BCB 0BAM 15.17 16.16 19.22 28.19 20.19 16.22 20.20 19.20 23.22 20.20 
+0BBR 0BAW 15.17 16.16 19.22 22.21 20.19 16.22 20.20 19.20 22.19 20.20 
+0BAU 0BAP 16.17 16.16 19.22 24.21 20.19 19.21 20.20 19.20 22.19 20.20 
+0BBO 0BBJ 16.17 16.16 19.21 23.21 21.19 18.21 20.20 19.19 22.19 20.20 
+0BAK 0BAD 16.15 16.15 18.21 25.20 21.19 19.21 20.20 20.19 19.19 19.20 
+0BAN 0BAT 17.22 16.16 18.21 25.22 21.19 18.24 20.20 19.19 18.20 18.15 
+0BBF 0BAE 17.22 17.16 21.22 25.22 21.19 18.24 20.20 19.19 19.20 18.17 
+0BAV 0BAO 17.22 17.16 21.22 24.23 21.20 18.24 20.20 18.20 19.20 18.17 
+0BBQ 0BBD 18.19 17.17 21.22 25.21 21.20 18.24 20.20 16.22 20.20 18.22 
+0BAF 0BBV 18.25 17.17 21.21 25.21 21.20 21.24 20.20 16.22 19.19 19.23 
+0BBA 0BAI 17.24 17.16 21.21 25.21 21.20 21.24 20.20 15.22 19.19 19.20 
+0BBP 0BBC 17.25 18.21 21.21 24.21 21.20 22.24 20.20 16.22 21.19 20.20 
+0BAG 0BBI 17.25 18.20 20.21 24.21 21.20 20.24 20.20 16.22 21.19 20.20 
+21   population number   1
+1BAA 1BAF 15.17 16.16 19.20 13.23 20.21 18.21 22.23 19.15 25.20 25.20 
+1BAQ 1BAN 16.16 17.16 19.20 16.23 20.21 18.20 22.23 19.17 25.19 25.20 
+1BBK 1BAS 16.16 17.16 19.20 16.23 20.21 18.21 22.23 19.17 24.19 25.20 
+1BAL 1BAV 18.16 16.16 18.20 16.19 20.21 18.21 22.19 19.18 20.19 23.20 
+1BAE 1BAI 18.17 16.17 19.20 15.19 20.21 18.21 22.19 19.18 22.19 23.19 
+1BAP 1BBP 18.17 16.17 20.20 14.19 20.21 18.21 22.20 19.20 23.19 23.19 
+1BBF 1BAO 18.21 16.17 20.20 14.19 20.21 17.21 22.20 19.20 23.19 22.19 
+1BBM 1BAM 17.21 16.17 19.20 15.19 20.21 17.21 22.20 19.20 23.19 22.19 
+1BBG 1BAU 16.21 16.17 19.21 21.19 20.21 17.22 21.20 18.19 23.19 22.17 
+1BBJ 1BBC 18.22 16.18 19.20 21.20 20.21 17.22 21.20 20.19 23.19 22.20 
+1BBL 1BAK 16.22 16.18 19.20 23.20 20.21 17.21 21.21 19.19 23.19 22.19 
+1BBB 1BBR 21.22 16.18 19.20 26.21 20.21 18.22 21.21 19.19 21.19 22.19 
+1BBD 1BAG 21.22 16.18 19.20 23.21 20.21 18.22 21.21 19.20 21.19 23.19 
+1BBH 1BBO 21.19 16.18 19.20 25.20 20.19 18.22 21.21 19.19 21.19 23.17 
+1BBE 1BAJ 23.19 16.16 18.20 25.22 20.19 21.21 23.21 19.20 18.19 23.20 
+1BBQ 1BBI 23.19 16.21 18.20 24.21 20.19 21.21 23.21 19.20 21.18 23.20 
+1BAC 1BAB 23.26 16.21 18.22 25.20 20.19 21.21 23.21 19.23 20.19 22.15 
+1BAH 1BAT 23.25 16.21 21.22 25.20 20.19 21.21 23.19 19.23 20.19 22.15 
+1BBN 1BAX 23.27 16.22 20.21 25.20 20.20 21.21 23.19 20.23 19.18 22.15 
+1BAD 1BAW 22.27 16.22 20.21 25.20 20.21 21.24 23.20 19.22 20.19 21.20 
+1BAR 1BBA 21.29 16.20 20.21 25.20 21.20 21.24 23.21 15.22 20.20 21.20 
diff --git a/src/conversion/testfiles/phyout b/src/conversion/testfiles/phyout
new file mode 100644
index 0000000..a7c9e72
--- /dev/null
+++ b/src/conversion/testfiles/phyout
@@ -0,0 +1,404 @@
+<lamarc>
+<!-- Created from the LamarcDS DataStore -->
+	<forces>
+		<coalescence>
+			<start-values> 0.01 0.01 0.01 </start-values>
+			<method> Watterson Watterson Watterson </method>
+			<max-events> 1000 </max-events>
+		</coalescence>
+		<migration>
+			<start-values> 0.0 1 1 1 0.0 1 1 1 0.0  </start-values>
+			<method> FST FST FST FST FST FST FST FST FST </method>
+			<max-events> 1000 </max-events>
+		</migration>
+	</forces>
+	<!-- -->
+	<chains>
+		<replicates> 1 </replicates>
+		<heating>
+			<temperatures> 1 </temperatures>
+			<swap-interval> 1 </swap-interval>
+		</heating>
+		<strategy>
+			<resimulating> 1 </resimulating>
+		</strategy>
+			<initial>
+				<number> 1 </number>
+				<samples> 40 </samples>
+				<discard> 10 </discard>
+				<interval> 10 </interval>
+			</initial>
+		<final>
+			<number> 1 </number>
+			<samples> 40 </samples>
+			<discard> 10 </discard>
+			<interval> 10 </interval>
+		</final>
+	</chains>
+	<!-- -->
+	<format>
+		<verbosity> verbose </verbosity>
+		<progress-reports> verbose </progress-reports>
+		<echo> true </echo>
+		<plotting>
+			<profile> false </profile>
+			<posterior> false </posterior>
+		</plotting>
+		<seed> 1005 </seed>
+		<parameter-file> parmfile </parameter-file>
+		<results-file> outfile </results-file>
+		<summary-file> sumfile </summary-file>
+	</format>
+	<!-- -->
+	<data>
+		<region name="helsinki">
+			<model name="F84">
+				<base-freqs> 0.25 0.25 0.25 0.25 </base-freqs>
+				<ttratio> 2 </ttratio>
+				<categories>
+					<num-categories> 1 </num-categories>
+					<rates> 1 </rates>
+					<probabilities> 1 </probabilities>
+				</categories>
+			</model>
+<spacing>
+				<block>
+					<map_position> 0 </map_position>
+					<length> 1324 </length>
+					<locations> 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 58 59 61 74 78 88 91 101 111 119 160 261 262 364 1064 1165 1166 1267 1323 
+					</locations>
+					<offset> 0 </offset>
+				</block>
+			</spacing>
+			<population name="germ1">
+			<!--  Population origin file for helsinki/germ1: fin-hel1 -->
+				<individual name="AC-1416111666">
+<phase type="unknown"> 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 </phase>
+					<sample name="germ_10   ">
+						<datablock type="SNP">
+							TCATTTCCGGTGCAAACCGAATCTCAGCTTGATTAATCTGGATCACCCAGAAGAGCTCTTAAAACGCA
+						</datablock>
+					</sample>
+					<sample name="germ_11   ">
+						<datablock type="SNP">
+							AGATTTGGGGTGCAATGGGAATCTCTCGTTGATTATAGTGGATCAGGGAGAAGAGGACTTAAAACCGT
+						</datablock>
+					</sample>
+					<sample name="germ_20   ">
+						<datablock type="SNP">
+							ACTTTTGCCGTGCATAGCCAATCTGACCATGATTTAACAGGATCTCGCTGAAGACCAGATAAAAGGGA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="CC809148025">
+<phase type="unknown"> 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 </phase>
+					<sample name="germ_21   ">
+						<datablock type="SNP">
+							ACAATTGCGCTGCTAAGCGTATCACACCTAGATAAAACTCGATGACGCACAAGTGCAGTAAAATCGGA
+						</datablock>
+					</sample>
+					<sample name="germ_30   ">
+						<datablock type="SNP">
+							ACATATGCGGAGGAAAGCGATTGTCACCTTCAATAAACTGCAACACGCAGTACAGCAGTTTATACGGA
+						</datablock>
+					</sample>
+					<sample name="germ_31   ">
+						<datablock type="SNP">
+							ACATTAGCGGTCCAAAGCGAAACTCACCTTGTTTAAACTGGTTCACGCAGATGAGCAGTTATAACGGA
+						</datablock>
+					</sample>
+				</individual>
+			</population>
+			<population name="germ2">
+				<individual name="AA-1347866141">
+<phase type="unknown"> 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 </phase>
+					<sample name="germ_10   ">
+						<datablock type="SNP">
+							TCATTTCCGGTGCAAACCGAATCTCAGCTTGATTAATCTGGATCACCCAGAAGAGCTCTTAAAACGCA
+						</datablock>
+					</sample>
+					<sample name="germ_11   ">
+						<datablock type="SNP">
+							AGATTTGGGGTGCAATGGGAATCTCTCGTTGATTATAGTGGATCAGGGAGAAGAGGACTTAAAACCGT
+						</datablock>
+					</sample>
+					<sample name="germ_20   ">
+						<datablock type="SNP">
+							ACTTTTGCCGTGCATAGCCAATCTGACCATGATTTAACAGGATCTCGCTGAAGACCAGATAAAAGGGA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="AG-1262439246">
+<phase type="unknown"> 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 </phase>
+					<sample name="germ_21   ">
+						<datablock type="SNP">
+							ACAATTGCGCTGCTAAGCGTATCACACCTAGATAAAACTCGATGACGCACAAGTGCAGTAAAATCGGA
+						</datablock>
+					</sample>
+					<sample name="germ_30   ">
+						<datablock type="SNP">
+							ACATATGCGGAGGAAAGCGATTGTCACCTTCAATAAACTGCAACACGCAGTACAGCAGTTTATACGGA
+						</datablock>
+					</sample>
+					<sample name="germ_31   ">
+						<datablock type="SNP">
+							ACATTAGCGGTCCAAAGCGAAACTCACCTTGTTTAAACTGGTTCACGCAGATGAGCAGTTATAACGGA
+						</datablock>
+					</sample>
+				</individual>
+			</population>
+			<population name="germ3">
+				<individual name="CG-1264639891">
+<phase type="unknown"> 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 </phase>
+					<sample name="germ_10   ">
+						<datablock type="SNP">
+							TCATTTCCGGTGCAAACCGAATCTCAGCTTGATTAATCTGGATCACCCAGAAGAGCTCTTAAAACGCA
+						</datablock>
+					</sample>
+					<sample name="germ_11   ">
+						<datablock type="SNP">
+							AGATTTGGGGTGCAATGGGAATCTCTCGTTGATTATAGTGGATCAGGGAGAAGAGGACTTAAAACCGT
+						</datablock>
+					</sample>
+					<sample name="germ_20   ">
+						<datablock type="SNP">
+							ACTTTTGCCGTGCATAGCCAATCTGACCATGATTTAACAGGATCTCGCTGAAGACCAGATAAAAGGGA
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="CT-1903169788">
+<phase type="unknown"> 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 </phase>
+					<sample name="germ_21   ">
+						<datablock type="SNP">
+							ACAATTGCGCTGCTAAGCGTATCACACCTAGATAAAACTCGATGACGCACAAGTGCAGTAAAATCGGA
+						</datablock>
+					</sample>
+					<sample name="germ_30   ">
+						<datablock type="SNP">
+							ACATATGCGGAGGAAAGCGATTGTCACCTTCAATAAACTGCAACACGCAGTACAGCAGTTTATACGGA
+						</datablock>
+					</sample>
+					<sample name="germ_31   ">
+						<datablock type="SNP">
+							ACATTAGCGGTCCAAAGCGAAACTCACCTTGTTTAAACTGGTTCACGCAGATGAGCAGTTATAACGGA
+						</datablock>
+					</sample>
+				</individual>
+			</population>
+		</region>
+		<region name="karelia">
+			<model name="F84">
+				<base-freqs> 0.25 0.25 0.25 0.25 </base-freqs>
+				<ttratio> 2 </ttratio>
+				<categories>
+					<num-categories> 1 </num-categories>
+					<rates> 1 </rates>
+					<probabilities> 1 </probabilities>
+				</categories>
+			</model>
+			<population name="germ1">
+			<!--  Population origin file for karelia/germ1: fin-kar1 -->
+				<individual name="GG-1213039616">
+<phase type="unknown"> 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 [...]
+					<sample name="0_8       ">
+						<datablock type="DNA">
+							CTAGTGATATGACGTAGCGTCTCCTGCCTTACACCCTGAGCCGGTGCACGCCCACAATTCGGAATTGCGGATTTTCGCGAGGTCAGGAAGTAAACGACCTCTTTAAATACTTAGTAACATTTGCTTGGATTAGGACTGGAATTCGATGGCGCCAAGTGTAAAATTCCGGACTCTGACGAGGTTGCTGTCGAAAAAAAAGCTCAGTTCTGTGGTAAGAAACTCCGGCCGTCTTAATGTTTAATCAGTTGCCTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCCGCCCCCTAATGTAATCGTCCATCGTATCTCAGCCTGCGTCTATTAGAGCAGTGAGGCGGGTCTACCAGTGTAGCGTGGGATAACTCATTGCCTACTGTTAATTTTTCGTGAAGACAGTAGGGTGGCTGAGATATTCGACTGAAGTGCCCAAGCAGCGATGAGGGTTCGAAGGAGCGTCCTGGGTTACCGGCACTGTCTG [...]
+						</datablock>
+					</sample>
+					<sample name="0_4       ">
+						<datablock type="DNA">
+							CTAGTGATATGACGTAGCGTCTCCTGCCTTACACTTTAAGCTGATGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTCACGAAGTAAACGGCCTCTTTGAATACTTAGTAGCACTTGATTGGGTTATGACTGGAAGTCGATGGCGCCAAGTGTAAGATTTGAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCGGTGGTAAGAAAATCCGGCCGTTTTAATCTTTAATCAGTTTCTTTGCGATCAGTAGGACTAATAACTTGTTTGCCGTCTACCTTCAAATGTAATTGTCCATCGTATCTCAGCCTGCGTCTATTAGAGCAGTGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAATTTTTCGTGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGGACTACCTG [...]
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="GG-1269630573">
+<phase type="unknown"> 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 [...]
+					<sample name="0_7       ">
+						<datablock type="DNA">
+							CTAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGCTGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTCAGGAAGTAAACGACCGCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCCGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAACTTGTTTGCCGTCTACCTCCTAATGTAATCGTCCATCGTATCTCAGCCTGCGTCTATTAGAGCAGTGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAAGTTTTCGTGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGCACTGTCTG [...]
+						</datablock>
+					</sample>
+					<sample name="0_2       ">
+						<datablock type="DNA">
+							CTAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGGTGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTCAAGAAGTAAACGACCTCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCTGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCTACCTCCTAATGTAATCGTCCATCGTATTTCAGCCTGCGTCTATTAGAGCAGTGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAATTTTTCGTGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTAGAAGATGCGTCCTGGGTTACCGGCACTGTCTG [...]
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="CA1317981730">
+<phase type="unknown"> 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 [...]
+					<sample name="0_9       ">
+						<datablock type="DNA">
+							CTAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGCTGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTCAGGAAGTAAACGACCTCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCCGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCTACCTCCTAATGTAATCGTCCATCGTATTTCAGCCTGCGTCTATTAGAGCAGTGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAATTTTTCGTGAAGACGGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGCACTGTCTG [...]
+						</datablock>
+					</sample>
+					<sample name="0_0       ">
+						<datablock type="DNA">
+							CCAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGCTGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTTAGGAAGTAAACGACCGCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCCGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCAACCTCCTAATGTAATCGTCCATCGTATCTCAGCCTGCGCCTATTAGAGCAGGGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAAGTTTTCGCGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGCACTGTCTG [...]
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="CT-948561059">
+<phase type="unknown"> 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 [...]
+					<sample name="0_3       ">
+						<datablock type="DNA">
+							CCAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGCTGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTTAGGAAGTAAACGACCTCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCCGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCAACCTCCTAATGTAATCGTCCATCGTATCTCAGCCTGCGCCTATTAGAGCAGGGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAATTTTTCGCGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGCACTGTCTG [...]
+						</datablock>
+					</sample>
+					<sample name="0_1       ">
+						<datablock type="DNA">
+							CCAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGCTGCACGCCCTCAATTCGGACTTGCGGGTTTTCGTGAGGTTAGGAAGTAAACGACCTCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCCGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCAACCTCCTAATGTAATCGTCCATCGTATCTCAGCCTGCGCCTATTAGAGCAGGGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAATTTTTCGCGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGCACTGTCTG [...]
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="TG-1980932940">
+<phase type="unknown"> 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 [...]
+					<sample name="0_5       ">
+						<datablock type="DNA">
+							CCAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGCTGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTTAGGAAGTAAACGACCTCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCCGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCAACCTCCTAATGTAATCGTCCATCGTATCTCAGCCTGCGCCTATTAGAGCAGGGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAATTTTTCGCGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGCACTGTCTG [...]
+						</datablock>
+					</sample>
+					<sample name="0_6       ">
+						<datablock type="DNA">
+							CCAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGCTGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTTAGGAAGTAAACGACCTCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCCGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCAACCTCCTAATGTAATCGTCCATCGTATCTCAGCCTGCGCCTATTAGAGCAGGGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAATTTTTCGCGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGCACTGTCTG [...]
+						</datablock>
+					</sample>
+				</individual>
+			</population>
+			<population name="germ2">
+				<individual name="CA-1164768391">
+<phase type="unknown"> 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 [...]
+					<sample name="0_8       ">
+						<datablock type="DNA">
+							CTAGTGATATGACGTAGCGTCTCCTGCCTTACACCCTGAGCCGGTGCACGCCCACAATTCGGAATTGCGGATTTTCGCGAGGTCAGGAAGTAAACGACCTCTTTAAATACTTAGTAACATTTGCTTGGATTAGGACTGGAATTCGATGGCGCCAAGTGTAAAATTCCGGACTCTGACGAGGTTGCTGTCGAAAAAAAAGCTCAGTTCTGTGGTAAGAAACTCCGGCCGTCTTAATGTTTAATCAGTTGCCTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCCGCCCCCTAATGTAATCGTCCATCGTATCTCAGCCTGCGTCTATTAGAGCAGTGAGGCGGGTCTACCAGTGTAGCGTGGGATAACTCATTGCCTACTGTTAATTTTTCGTGAAGACAGTAGGGTGGCTGAGATATTCGACTGAAGTGCCCAAGCAGCGATGAGGGTTCGAAGGAGCGTCCTGGGTTACCGGCACTGTCTG [...]
+						</datablock>
+					</sample>
+					<sample name="0_4       ">
+						<datablock type="DNA">
+							CTAGTGATATGACGTAGCGTCTCCTGCCTTACACTTTAAGCTGATGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTCACGAAGTAAACGGCCTCTTTGAATACTTAGTAGCACTTGATTGGGTTATGACTGGAAGTCGATGGCGCCAAGTGTAAGATTTGAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCGGTGGTAAGAAAATCCGGCCGTTTTAATCTTTAATCAGTTTCTTTGCGATCAGTAGGACTAATAACTTGTTTGCCGTCTACCTTCAAATGTAATTGTCCATCGTATCTCAGCCTGCGTCTATTAGAGCAGTGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAATTTTTCGTGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGGACTACCTG [...]
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="CT-954059520">
+<phase type="unknown"> 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 [...]
+					<sample name="0_7       ">
+						<datablock type="DNA">
+							CTAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGCTGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTCAGGAAGTAAACGACCGCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCCGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAACTTGTTTGCCGTCTACCTCCTAATGTAATCGTCCATCGTATCTCAGCCTGCGTCTATTAGAGCAGTGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAAGTTTTCGTGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGCACTGTCTG [...]
+						</datablock>
+					</sample>
+					<sample name="0_2       ">
+						<datablock type="DNA">
+							CTAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGGTGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTCAAGAAGTAAACGACCTCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCTGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCTACCTCCTAATGTAATCGTCCATCGTATTTCAGCCTGCGTCTATTAGAGCAGTGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAATTTTTCGTGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTAGAAGATGCGTCCTGGGTTACCGGCACTGTCTG [...]
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="AG-829184365">
+<phase type="unknown"> 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 [...]
+					<sample name="0_9       ">
+						<datablock type="DNA">
+							CTAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGCTGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTCAGGAAGTAAACGACCTCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCCGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCTACCTCCTAATGTAATCGTCCATCGTATTTCAGCCTGCGTCTATTAGAGCAGTGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAATTTTTCGTGAAGACGGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGCACTGTCTG [...]
+						</datablock>
+					</sample>
+					<sample name="0_0       ">
+						<datablock type="DNA">
+							CCAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGCTGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTTAGGAAGTAAACGACCGCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCCGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCAACCTCCTAATGTAATCGTCCATCGTATCTCAGCCTGCGCCTATTAGAGCAGGGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAAGTTTTCGCGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGCACTGTCTG [...]
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="CA1911914274">
+<phase type="unknown"> 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 [...]
+					<sample name="0_3       ">
+						<datablock type="DNA">
+							CCAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGCTGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTTAGGAAGTAAACGACCTCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCCGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCAACCTCCTAATGTAATCGTCCATCGTATCTCAGCCTGCGCCTATTAGAGCAGGGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAATTTTTCGCGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGCACTGTCTG [...]
+						</datablock>
+					</sample>
+					<sample name="0_1       ">
+						<datablock type="DNA">
+							CCAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGCTGCACGCCCTCAATTCGGACTTGCGGGTTTTCGTGAGGTTAGGAAGTAAACGACCTCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCCGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCAACCTCCTAATGTAATCGTCCATCGTATCTCAGCCTGCGCCTATTAGAGCAGGGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAATTTTTCGCGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGCACTGTCTG [...]
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="CA-567992227">
+<phase type="unknown"> 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 [...]
+					<sample name="0_5       ">
+						<datablock type="DNA">
+							CCAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGCTGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTTAGGAAGTAAACGACCTCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCCGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCAACCTCCTAATGTAATCGTCCATCGTATCTCAGCCTGCGCCTATTAGAGCAGGGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAATTTTTCGCGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGCACTGTCTG [...]
+						</datablock>
+					</sample>
+					<sample name="0_6       ">
+						<datablock type="DNA">
+							CCAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGCTGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTTAGGAAGTAAACGACCTCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCCGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCAACCTCCTAATGTAATCGTCCATCGTATCTCAGCCTGCGCCTATTAGAGCAGGGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAATTTTTCGCGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGCACTGTCTG [...]
+						</datablock>
+					</sample>
+				</individual>
+			</population>
+			<population name="germ3">
+				<individual name="TG-1119826216">
+<phase type="unknown"> 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 [...]
+					<sample name="0_8       ">
+						<datablock type="DNA">
+							CTAGTGATATGACGTAGCGTCTCCTGCCTTACACCCTGAGCCGGTGCACGCCCACAATTCGGAATTGCGGATTTTCGCGAGGTCAGGAAGTAAACGACCTCTTTAAATACTTAGTAACATTTGCTTGGATTAGGACTGGAATTCGATGGCGCCAAGTGTAAAATTCCGGACTCTGACGAGGTTGCTGTCGAAAAAAAAGCTCAGTTCTGTGGTAAGAAACTCCGGCCGTCTTAATGTTTAATCAGTTGCCTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCCGCCCCCTAATGTAATCGTCCATCGTATCTCAGCCTGCGTCTATTAGAGCAGTGAGGCGGGTCTACCAGTGTAGCGTGGGATAACTCATTGCCTACTGTTAATTTTTCGTGAAGACAGTAGGGTGGCTGAGATATTCGACTGAAGTGCCCAAGCAGCGATGAGGGTTCGAAGGAGCGTCCTGGGTTACCGGCACTGTCTG [...]
+						</datablock>
+					</sample>
+					<sample name="0_4       ">
+						<datablock type="DNA">
+							CTAGTGATATGACGTAGCGTCTCCTGCCTTACACTTTAAGCTGATGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTCACGAAGTAAACGGCCTCTTTGAATACTTAGTAGCACTTGATTGGGTTATGACTGGAAGTCGATGGCGCCAAGTGTAAGATTTGAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCGGTGGTAAGAAAATCCGGCCGTTTTAATCTTTAATCAGTTTCTTTGCGATCAGTAGGACTAATAACTTGTTTGCCGTCTACCTTCAAATGTAATTGTCCATCGTATCTCAGCCTGCGTCTATTAGAGCAGTGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAATTTTTCGTGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGGACTACCTG [...]
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="GT-956456629">
+<phase type="unknown"> 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 [...]
+					<sample name="0_7       ">
+						<datablock type="DNA">
+							CTAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGCTGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTCAGGAAGTAAACGACCGCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCCGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAACTTGTTTGCCGTCTACCTCCTAATGTAATCGTCCATCGTATCTCAGCCTGCGTCTATTAGAGCAGTGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAAGTTTTCGTGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGCACTGTCTG [...]
+						</datablock>
+					</sample>
+					<sample name="0_2       ">
+						<datablock type="DNA">
+							CTAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGGTGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTCAAGAAGTAAACGACCTCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCTGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCTACCTCCTAATGTAATCGTCCATCGTATTTCAGCCTGCGTCTATTAGAGCAGTGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAATTTTTCGTGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTAGAAGATGCGTCCTGGGTTACCGGCACTGTCTG [...]
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="TC1911004218">
+<phase type="unknown"> 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 [...]
+					<sample name="0_9       ">
+						<datablock type="DNA">
+							CTAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGCTGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTCAGGAAGTAAACGACCTCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCCGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCTACCTCCTAATGTAATCGTCCATCGTATTTCAGCCTGCGTCTATTAGAGCAGTGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAATTTTTCGTGAAGACGGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGCACTGTCTG [...]
+						</datablock>
+					</sample>
+					<sample name="0_0       ">
+						<datablock type="DNA">
+							CCAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGCTGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTTAGGAAGTAAACGACCGCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCCGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCAACCTCCTAATGTAATCGTCCATCGTATCTCAGCCTGCGCCTATTAGAGCAGGGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAAGTTTTCGCGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGCACTGTCTG [...]
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="TC-16056491">
+<phase type="unknown"> 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 [...]
+					<sample name="0_3       ">
+						<datablock type="DNA">
+							CCAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGCTGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTTAGGAAGTAAACGACCTCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCCGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCAACCTCCTAATGTAATCGTCCATCGTATCTCAGCCTGCGCCTATTAGAGCAGGGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAATTTTTCGCGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGCACTGTCTG [...]
+						</datablock>
+					</sample>
+					<sample name="0_1       ">
+						<datablock type="DNA">
+							CCAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGCTGCACGCCCTCAATTCGGACTTGCGGGTTTTCGTGAGGTTAGGAAGTAAACGACCTCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCCGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCAACCTCCTAATGTAATCGTCCATCGTATCTCAGCCTGCGCCTATTAGAGCAGGGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAATTTTTCGCGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGCACTGTCTG [...]
+						</datablock>
+					</sample>
+				</individual>
+				<individual name="GT1043708940">
+<phase type="unknown"> 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 [...]
+					<sample name="0_5       ">
+						<datablock type="DNA">
+							CCAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGCTGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTTAGGAAGTAAACGACCTCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCCGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCAACCTCCTAATGTAATCGTCCATCGTATCTCAGCCTGCGCCTATTAGAGCAGGGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAATTTTTCGCGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGCACTGTCTG [...]
+						</datablock>
+					</sample>
+					<sample name="0_6       ">
+						<datablock type="DNA">
+							CCAGTGATATGACGTAGCGTCTCCTGCCTTACACCTTAAGCTGCTGCACGCCCACAATTCGGACTTGCGGGTTTTCGTGAGGTTAGGAAGTAAACGACCTCTTTGAATACTTAGTAACACTTGCTTGGGTTATGACTGGAATTCGATGGCGCCAAGTGTAAGATTTCAGACTCTGGCGAGACTGCTGACGAAAAAAAAGCTCAGTTCCGTGGTAAGAAAATCCGGCCGTCTTAATCTTTAATCAGTTTCTTTGCGATCTGTAGGACTAATAGCTTGTTTGCCGTCAACCTCCTAATGTAATCGTCCATCGTATCTCAGCCTGCGCCTATTAGAGCAGGGAGGCGGGTGCACCAGTGTAGCGTGGGATGACTCATTGCCCACTGTTAATTTTTCGCGAAGACAGTTGGGTGGCTGAGATATTCGACTGAAGCGCCCAAGTACCGATGAGGGTTCGAAGATGCGTCCTGGGTTACCGGCACTGTCTG [...]
+						</datablock>
+					</sample>
+				</individual>
+			</population>
+		</region>
+	</data>
+</lamarc>
+
diff --git a/src/datalike/calculators.cpp b/src/datalike/calculators.cpp
new file mode 100644
index 0000000..cde8eb2
--- /dev/null
+++ b/src/datalike/calculators.cpp
@@ -0,0 +1,452 @@
+// $Id: calculators.cpp,v 1.27 2012/06/30 01:32:41 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+#include <functional>
+#include <numeric>
+#include <map>
+
+#include "calculators.h"
+#include "datapack.h"
+#include "dlmodel.h"
+#include "locus.h"
+#include "mathx.h" //for ScaleToSumToOne
+#include "region.h"
+#include "registry.h"
+
+using std::map;
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d FrequenciesFromData(long int regionId, long int locusId, model_type modelType)
+{
+    const Region& thisRegion = registry.GetDataPack().GetRegion(regionId);
+    const Locus& thisLocus = thisRegion.GetLocus(locusId);
+    return FrequenciesFromData(thisLocus,modelType);
+}
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d FrequenciesFromData(const Locus& locus, model_type modelType)
+{
+    assert(modelType == F84 || modelType == GTR);
+    DoubleVec1d basefreqs(BASES,0.0);
+    double totalmarkers = 0.0;
+    const vector<TipData>& tips = locus.GetTipData();
+
+    vector<TipData>::const_iterator tit;
+    for (tit = tips.begin(); tit != tips.end(); ++tit)
+    {
+        StringVec1d data = tit->data;
+        StringVec1d::const_iterator sit;
+
+        for (sit = data.begin(); sit != data.end(); ++sit)
+        {
+            double errorRate= locus.GetPerBaseErrorRate();
+            DoubleVec1d site = NucModel::StaticDataToLikes(*sit,errorRate);
+            double total = site[baseA]+site[baseC]+site[baseG]+site[baseT];
+            basefreqs[baseA] += site[baseA]/total;
+            basefreqs[baseC] += site[baseC]/total;
+            basefreqs[baseG] += site[baseG]/total;
+            basefreqs[baseT] += site[baseT]/total;
+        }
+        totalmarkers += locus.GetNmarkers();
+    }
+
+    basefreqs[baseA] /= totalmarkers;
+    basefreqs[baseC] /= totalmarkers;
+    basefreqs[baseG] /= totalmarkers;
+    basefreqs[baseT] /= totalmarkers;
+    ScaleToSumToOne(basefreqs);
+
+    // check for any zero freqs, try global freqs, then use DBL_EPSILON set
+    double zero(0.0);
+    if (std::count(basefreqs.begin(),basefreqs.end(),zero))
+    {
+        basefreqs = registry.GetDataPack().CountOverallBaseFreqs();
+        ScaleToSumToOne(basefreqs);
+        if (std::count(basefreqs.begin(),basefreqs.end(),zero))
+        {
+            DoubleVec1d::iterator baseit;
+            for (baseit = basefreqs.begin(); baseit != basefreqs.end(); ++baseit)
+            {
+                if (*baseit == 0.0) *baseit = defaults::minLegalFrequency;
+            }
+        }
+    }
+
+    return basefreqs;
+}
+
+//------------------------------------------------------------------------------------
+
+// MDEBUG LOCUS Function not currently used but being saved for
+// multi-locus code
+// EWFIX.P3 TEST -- IMPORTANT -- this function may contain code that sets
+// the estimate to a default value when we don't want to. need
+// to see if we can avoid that and instead post-process unsuitable
+// values to correct them when appropriate. for example, if using
+// this function to get an initial estimate, we may want to default
+// to a reasonable value, but if reporting the value we do not.
+
+void MigFSTLocus(const DataPack& dpack, const Locus& loc, long int numMigs,
+                 DoubleVec1d& estimate, std::deque<bool>& isCalculated)
+{
+    estimate.clear();
+    estimate = DoubleVec1d(numMigs,0.0);
+    assert(isCalculated.size() == estimate.size());
+    DoubleVec1d fw = loc.GetDataTypePtr()->CalcFw(loc);
+    DoubleVec1d fb = loc.GetDataTypePtr()->CalcFb(loc);
+
+    long int index1, index2;
+    long int pop1, pop2, npops = fw.size();
+    assert(numMigs ==  (npops * npops));
+
+    for(pop1 = 0; pop1 < npops; ++pop1)
+    {
+        for(pop2 = pop1+1; pop2 < npops; ++pop2)
+        {
+            index1 = pop1 * npops + pop2;  // position in linear vector
+            double fb12 = fb[index1];
+            double denom = -2.0 * fb12 + fw[pop1] + fw[pop2];
+            if (denom == 0.0)
+            {
+                isCalculated[index1] = false;
+                estimate[index1] = defaults::migration;
+            }
+            else estimate[index1] = 2.0 * fb12 / denom;
+            if (estimate[index1] < 0.0)
+            {
+                isCalculated[index1] = false;
+                estimate[index1] = defaults::migration;
+            }
+            index2 = pop2 * npops + pop1;  // lower triangle
+            estimate[index2] = estimate[index1];
+        }
+    }
+} // MigFSTLocus
+
+//------------------------------------------------------------------------------------
+
+void MigFSTMultiregion(const DataPack & dpack, const DoubleVec2d & regionalmuratios,
+                       DoubleVec1d & perRegionMigsAccumulator, std::deque<bool> & isCalculated)
+{
+    // MDEBUG this is disabled for force_DIVMIG currently.  It should be enabled
+    // eventually, contemporary populations only.
+
+    // get MIG npop and DIVMIG npop
+    long int migpopCount  = dpack.GetNPartitionsByForceType(force_MIG);
+    long int divmigpopCount  = dpack.GetNPartitionsByForceType(force_DIVMIG);
+
+    if (migpopCount == 0 && divmigpopCount <= 1)
+        throw data_error("Data error--zero populations?  There should be at least one.");
+
+    if (migpopCount <= 1) return;
+
+    long int popCount = migpopCount;
+
+    isCalculated.clear();
+    perRegionMigsAccumulator.clear();
+    long int numMigs = popCount * popCount;
+    perRegionMigsAccumulator = DoubleVec1d(numMigs,0.0);
+    isCalculated = std::deque<bool>(numMigs,true);
+
+    const long int numRegions = dpack.GetNRegions();
+
+    for(long int regionIndex = 0; regionIndex < numRegions; regionIndex++)
+    {
+        const Region& region = dpack.GetRegion(regionIndex);
+        long int numLoci = region.GetNloci();
+        DoubleVec1d perLociMigsAccumulator(numMigs,0.0);
+
+        for(long int locusIndex = 0; locusIndex < numLoci; locusIndex++)
+        {
+            const Locus& locus = region.GetLocus(locusIndex);
+            DoubleVec1d migsForOneLocus;
+            MigFSTLocus(dpack,locus,numMigs,migsForOneLocus,isCalculated);
+
+            // rescale for relative mutation rate
+            std::transform(migsForOneLocus.begin(),
+                           migsForOneLocus.end(),
+                           migsForOneLocus.begin(),
+                           std::bind2nd(std::multiplies<double>(),
+                                        regionalmuratios[regionIndex][locusIndex]));
+
+            // and add to the totals by region
+            std::transform(  migsForOneLocus.begin(),
+                             migsForOneLocus.end(),
+                             perLociMigsAccumulator.begin(),
+                             perLociMigsAccumulator.begin(),
+                             std::plus<double>());
+        }
+        // average over the per-locus data
+        std::transform(  perLociMigsAccumulator.begin(),
+                         perLociMigsAccumulator.end(),
+                         perLociMigsAccumulator.begin(),
+                         std::bind2nd(std::divides<double>(),static_cast<double>(numLoci)));
+
+        // and add to the totals by region
+        std::transform(  perLociMigsAccumulator.begin(),
+                         perLociMigsAccumulator.end(),
+                         perRegionMigsAccumulator.begin(),
+                         perRegionMigsAccumulator.begin(),
+                         std::plus<double>());
+    }
+    // finally, average over the per-region data
+    std::transform(  perRegionMigsAccumulator.begin(),
+                     perRegionMigsAccumulator.end(),
+                     perRegionMigsAccumulator.begin(),
+                     std::bind2nd(std::divides<double>(),static_cast<double>(numRegions)));
+}
+
+//------------------------------------------------------------------------------------
+
+// EWFIX.P3 TEST -- IMPORTANT -- this function may contain code that sets
+// the estimate to a default value when we don't want to. need
+// to see if we can avoid that and instead post-process unsuitable
+// values to correct them when appropriate. for example, if using
+// this function to get an initial estimate, we may want to default
+// to a reasonable value, but if reporting the value we do not.
+
+void ThetaFSTLocus(const DataPack& dpack, const Locus& loc, const long int numThetas,
+                   vector<map<force_type,string> > tipids, DoubleVec1d& estimates,
+                   std::deque<bool>& isCalculated)
+{
+    assert(numThetas > 1);
+
+    StringVec3d data;
+    for(long int xpart = 0; xpart < numThetas; xpart++)
+    {
+        data.push_back(loc.GetCrossPartitionGeneticData(tipids[xpart]));
+    }
+
+    DoubleVec1d fw = loc.GetDataTypePtr()->CalcXFw(loc,data);
+    DoubleVec1d fb = loc.GetDataTypePtr()->CalcXFb(loc,data);
+
+    long int index1;
+    assert(fw.size() == static_cast<unsigned long int>(numThetas));
+
+    DoubleVec1d thetaEstimates(numThetas,defaults::theta);
+
+    for(long int xpart = 0; xpart < numThetas; xpart++)
+    {
+        double estimate = 0.0;
+        for(long int otherpart = 0; otherpart < numThetas; ++otherpart)
+        {
+            if (otherpart == xpart) continue;
+            index1 = xpart * numThetas + otherpart;  // position in linear vector
+            double fb12 = fb[index1];
+            double numer = -2.0 * fb12 + fw[xpart] + fw[otherpart];
+            double denom = -2.0 * fb12 *fb12 + fw[xpart] + fw[otherpart] + fw[xpart] * fw[xpart];
+
+            if (denom != 0.0)
+            {
+                estimate += (numer * (1.0 - fw[xpart])) / denom;
+            }
+        }
+
+        if (estimate > 0.0)
+        {
+            thetaEstimates[xpart] = estimate / numThetas;
+        }
+        else
+        {
+            isCalculated[xpart] = false;
+        }
+
+    }
+    estimates = thetaEstimates;
+} // ThetaFSTLocus
+
+//------------------------------------------------------------------------------------
+
+void ThetaFSTMultiregion(const DataPack& dpack, const DoubleVec1d & regionalthetascalars,
+                         const DoubleVec2d & regionalmuratios, DoubleVec1d & estimates,
+                         std::deque<bool> & isCalculated)
+{
+    const long int numRegions = dpack.GetNRegions();
+    const long int numThetas  = dpack.GetNCrossPartitions();
+    vector<map<force_type,string> > tipids = dpack.GetCrossPartitionIds(true);
+    isCalculated.clear();
+    isCalculated = std::deque<bool>(numThetas,true);
+
+    DoubleVec1d OverallThetasAccumulator(numThetas,0.0);
+
+    for(long int regionIndex = 0; regionIndex < numRegions; regionIndex++)
+    {
+        const Region& region = dpack.GetRegion(regionIndex);
+        long int numLoci = region.GetNloci();
+        DoubleVec1d RegionalThetasAccumulator(numThetas,0.0);
+
+        for(long int locusIndex = 0; locusIndex < numLoci; locusIndex++)
+        {
+            const Locus& locus = region.GetLocus(locusIndex);
+            DoubleVec1d thetasForOneLocus;
+            ThetaFSTLocus(dpack,locus,numThetas,tipids,thetasForOneLocus,isCalculated);
+            // correct for varying mutation rates
+            std::transform(thetasForOneLocus.begin(),
+                           thetasForOneLocus.end(),
+                           thetasForOneLocus.begin(),
+                           std::bind2nd(std::divides<double>(),
+                                        regionalmuratios[regionIndex][locusIndex]));
+
+            // and add to the totals by locus
+            std::transform(  thetasForOneLocus.begin(),
+                             thetasForOneLocus.end(),
+                             RegionalThetasAccumulator.begin(),
+                             RegionalThetasAccumulator.begin(),
+                             std::plus<double>());
+        }
+
+        // average over the per-locus data; while scaling appropiately
+        // for the current region
+        std::transform(  RegionalThetasAccumulator.begin(),
+                         RegionalThetasAccumulator.end(),
+                         RegionalThetasAccumulator.begin(),
+                         std::bind2nd(std::divides<double>(),regionalthetascalars[regionIndex]*numLoci));
+
+        // and add to the totals by region
+        std::transform(  RegionalThetasAccumulator.begin(),
+                         RegionalThetasAccumulator.end(),
+                         OverallThetasAccumulator.begin(),
+                         OverallThetasAccumulator.begin(),
+                         std::plus<double>());
+    }
+    // finally, average over the per-region data
+    std::transform(  OverallThetasAccumulator.begin(),
+                     OverallThetasAccumulator.end(),
+                     OverallThetasAccumulator.begin(),
+                     std::bind2nd(std::divides<double>(),static_cast<double>(numRegions)));
+    estimates = OverallThetasAccumulator;
+
+} // ThetaFSTMultiregion
+
+//------------------------------------------------------------------------------------
+// EWFIX.P3 TEST -- IMPORTANT -- this function may contain code that sets the estimate to a default value
+// when we don't want to.  Need to see if we can avoid that and instead post-process unsuitable values
+// to correct them when appropriate. for example, if using this function to get an initial estimate,
+// we may want to default to a reasonable value, but if reporting the value we do not.
+
+void ThetaWattersonLocus(const DataPack& dpack, const Locus& locus, const long int numThetas,
+                         const vector<map<force_type,string> > tipids, DoubleVec1d& estimates,
+                         std::deque<bool>& isCalculated)
+{
+    const long int nsites = locus.GetNsites();
+    DoubleVec1d thetaEstimates(numThetas,defaults::theta);
+
+    for(long int thetaIndex = 0; thetaIndex < numThetas; thetaIndex++)
+    {
+        long int nvarmarkers = 0;
+        const StringVec2d data = locus.GetCrossPartitionGeneticData(tipids[thetaIndex]);
+        if (!data.empty())
+        {
+            nvarmarkers = locus.GetDataTypePtr()->CalcNVarMarkers(data);
+        }
+
+        const long int nseqs = locus.GetNTips(tipids[thetaIndex]);
+
+        if(nseqs > 1)
+        {
+            double harmonic = 0.0;
+            for(long int seq = 1; seq < nseqs; seq++)
+            {
+                harmonic += 1.0/seq;
+            }
+            thetaEstimates[thetaIndex] = nvarmarkers / (nsites * harmonic);
+        }
+        else
+        {
+            isCalculated[thetaIndex] = false;
+        }
+    }
+    estimates = thetaEstimates;
+} // ThetaWattersonLocus
+
+//------------------------------------------------------------------------------------
+
+void ThetaWattersonMultiregion(const DataPack& dpack,
+                               const DoubleVec1d& regionalthetascalars,
+                               const DoubleVec2d& regionalmuratios,
+                               DoubleVec1d& estimates,
+                               std::deque<bool>& isCalculated)
+{
+    const long int numRegions = dpack.GetNRegions();
+    const long int numThetas  = dpack.GetNCrossPartitions();
+    isCalculated.assign(numThetas,true);
+
+    DoubleVec1d OverallThetasAccumulator(numThetas,0.0);
+
+    for(long int regionIndex = 0; regionIndex < numRegions; regionIndex++)
+    {
+        const Region& region = dpack.GetRegion(regionIndex);
+        DoubleVec1d RegionalThetasAccumulator;
+        ThetaWattersonRegion(dpack,region,regionalthetascalars[regionIndex],
+                             regionalmuratios[regionIndex],RegionalThetasAccumulator,isCalculated);
+
+        // and add to the totals by region
+        std::transform(  RegionalThetasAccumulator.begin(),
+                         RegionalThetasAccumulator.end(),
+                         OverallThetasAccumulator.begin(),
+                         OverallThetasAccumulator.begin(),
+                         std::plus<double>());
+    }
+    // finally, average over the per-region data
+    std::transform(  OverallThetasAccumulator.begin(),
+                     OverallThetasAccumulator.end(),
+                     OverallThetasAccumulator.begin(),
+                     std::bind2nd(std::divides<double>(),static_cast<double>(numRegions)));
+    estimates = OverallThetasAccumulator;
+
+} // ThetaWattersonMultiregion
+
+//------------------------------------------------------------------------------------
+
+void ThetaWattersonRegion(const DataPack& dpack,
+                          const Region& region,
+                          double thetascalar,
+                          const DoubleVec1d& muratios,
+                          DoubleVec1d& estimates,
+                          std::deque<bool>& isCalculated)
+{
+    long int numLoci = region.GetNloci();
+    long int numThetas = dpack.GetNCrossPartitions();
+    DoubleVec1d ThetasAccumulator(numThetas,0.0);
+    vector<map<force_type,string> > tipids = dpack.GetCrossPartitionIds(true);
+
+    for(long int locusIndex = 0; locusIndex < numLoci; locusIndex++)
+    {
+        const Locus& locus = region.GetLocus(locusIndex);
+        DoubleVec1d thetasForOneLocus;
+        ThetaWattersonLocus(dpack,locus,numThetas,tipids,thetasForOneLocus,isCalculated);
+
+        // rescale the locus values for their relative mutation rates
+        std::transform(thetasForOneLocus.begin(),
+                       thetasForOneLocus.end(),
+                       thetasForOneLocus.begin(),
+                       std::bind2nd(std::divides<double>(),muratios[locusIndex]));
+
+        // and add to the totals by region
+        std::transform(  thetasForOneLocus.begin(),
+                         thetasForOneLocus.end(),
+                         ThetasAccumulator.begin(),
+                         ThetasAccumulator.begin(),
+                         std::plus<double>());
+    }
+    // average over the per-locus data; while scaling appropriately
+    // for the current region
+    std::transform(  ThetasAccumulator.begin(),
+                     ThetasAccumulator.end(),
+                     ThetasAccumulator.begin(),
+                     std::bind2nd(std::divides<double>(),thetascalar*numLoci));
+
+    estimates = ThetasAccumulator;
+} // ThetaWattersonRegion
+
+//____________________________________________________________________________________
diff --git a/src/datalike/calculators.h b/src/datalike/calculators.h
new file mode 100644
index 0000000..865562e
--- /dev/null
+++ b/src/datalike/calculators.h
@@ -0,0 +1,63 @@
+// $Id: calculators.h,v 1.13 2011/03/07 06:08:48 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef CALCULATORS_H
+#define CALCULATORS_H
+
+#include <map>
+#include <string>
+#include <vector>
+#include <deque>
+#include "defaults.h"
+#include "vectorx.h"
+
+class DataModel;
+class DataPack;
+class Region;
+class Locus;
+
+using std::map;
+using std::string;
+using std::vector;
+
+DoubleVec1d FrequenciesFromData(const Locus& locus, model_type);
+
+DoubleVec1d FrequenciesFromData(long regionId, long locusId, model_type);
+
+void MigFSTLocus(const DataPack& dpack,const Locus& loc, long nmigs,
+                 DoubleVec1d& estimate, std::deque<bool>& isCalculated);
+
+// EWFIX.P5 DIMENSIONS -- change to 2d  EWFIX.P5 LOCUS
+
+void MigFSTMultiregion(const DataPack& dpack, const DoubleVec2d& regionalmuratios,
+                       DoubleVec1d& perRegionMigsAccumulator, std::deque<bool>&); // EWFIX.P5 DIMENSIONS
+
+void ThetaFSTMultiregion(const DataPack& dpack, const DoubleVec1d&
+                         regionalthetascalars, const DoubleVec2d& regionalmuratios, DoubleVec1d&
+                         estimates, std::deque<bool>&);
+
+void ThetaFSTLocus(const DataPack& dpack, const Locus& locus, DoubleVec1d&
+                   estimates, std::deque<bool>&);
+
+void ThetaWattersonMultiregion(const DataPack& dpack, const DoubleVec1d&
+                               regionalthetascalars, const DoubleVec2d& regionalmuratios,
+                               DoubleVec1d& estimates, std::deque<bool>&);
+
+void ThetaWattersonLocus(const DataPack&,const Locus&,long numThetas,
+                         vector<map<force_type,string> > tipids,
+                         DoubleVec1d&,
+                         std::deque<bool>&);
+
+void ThetaWattersonRegion(const DataPack&, const Region&, double thetascalar,
+                          const DoubleVec1d& muratios, DoubleVec1d& estimates, std::deque<bool>&);
+
+#endif // CALCULATORS_H
+
+//____________________________________________________________________________________
diff --git a/src/datalike/cellmanager.cpp b/src/datalike/cellmanager.cpp
new file mode 100644
index 0000000..f928c45
--- /dev/null
+++ b/src/datalike/cellmanager.cpp
@@ -0,0 +1,62 @@
+// $Id: cellmanager.cpp,v 1.5 2011/03/07 06:08:48 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+// This file contains the implementation code for the manager of data-likelihood
+// storage, CellManager
+
+#ifdef DMALLOC_FUNC_CHECK
+#include "/usr/local/include/dmalloc.h"
+#endif
+
+#include "cellmanager.h"
+
+//------------------------------------------------------------------------------------
+
+cellarray CellManager::GetArray(triplet id, DLCell& requester)
+{
+    FreeStore::iterator it = m_store.find(id);
+
+    if (it != m_store.end())            // found one
+    {
+        cellarray result = it->second;
+        m_store.erase(it);
+        return result;
+    }
+    else
+        return requester.MakeArray();
+
+} // GetArray
+
+//------------------------------------------------------------------------------------
+
+void CellManager::FreeArray(triplet id, cellarray array)
+{
+    m_store.insert(std::make_pair(id, array));
+
+} // FreeArray
+
+//------------------------------------------------------------------------------------
+
+void CellManager::ClearStore()
+{
+    // assumes all cells are 3-D contiguous storage gotten with new[]!
+    cellarray ar;
+    FreeStore::iterator it = m_store.begin();
+    for ( ; it != m_store.end(); ++it)
+    {
+        ar = it->second;
+        delete [] ar[0][0];
+        delete [] ar[0];
+        delete [] ar;
+    }
+    m_store.erase(m_store.begin(), m_store.end());
+} // ClearStore
+
+//____________________________________________________________________________________
diff --git a/src/datalike/cellmanager.h b/src/datalike/cellmanager.h
new file mode 100644
index 0000000..44e792b
--- /dev/null
+++ b/src/datalike/cellmanager.h
@@ -0,0 +1,59 @@
+// $Id: cellmanager.h,v 1.3 2011/02/20 04:14:09 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+/******************************************************************
+
+The CellManager maintains a private internal store of the arrays
+used by class DLCell and its children.  This is a speedup; we avoid
+allocating and deallocating these arrays and reuse them instead.
+This functionality was originally static in DLCell but that design
+suffered order-of-static-deallocation issues especially on
+Macs.  Here it is a Singleton which is abandoned on the heap, producing
+the appearance of a memory leak, but at least no crashes.  DO NOT
+give this class a destructor, as destroying it will reintroduce the
+order-of-deallocation bug!
+
+After each Region you should call ClearStore to get rid of old
+arrays; they are unlikely to be useful in the new Region and will
+eat up space.
+
+The CellManager code assumes that all of the data-likelihood
+arrays are three-dimensional contiguous allocation using new[].  If you
+write one that isn't, do not use this code to manage it.
+
+********************************************************************/
+
+#ifndef CELLMANAGER_H
+#define CELLMANAGER_H
+
+#include "types.h"  // for definition of cellarray and FreeStore
+#include "dlcell.h" // for definition of triplet and DLCell
+
+class CellManager
+{
+  private:
+    FreeStore     m_store;  // multimap of triplet and cellarray
+    cellarray     MakeArray();
+
+    // disabled functionality follows
+    ~CellManager();  // undefined to prevent destruction
+    CellManager(const CellManager&);
+    CellManager&  operator=(const CellManager&);
+
+  public:
+    CellManager() {};
+    cellarray     GetArray(triplet dimensions, DLCell& requester);
+    void          FreeArray(triplet dimensions, cellarray array);
+    void          ClearStore();
+};
+
+#endif // CELLMANAGER_H
+
+//____________________________________________________________________________________
diff --git a/src/datalike/datapack.cpp b/src/datalike/datapack.cpp
new file mode 100644
index 0000000..5a968c4
--- /dev/null
+++ b/src/datalike/datapack.cpp
@@ -0,0 +1,722 @@
+// $Id: datapack.cpp,v 1.70 2013/10/25 17:00:52 mkkuhner Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+#include <functional>     // for plus<> used in CountOverallBaseFreqs
+#include <iostream>
+#include <numeric>        // for accumulate() used in CountOverallBaseFreqs
+#include <fstream>        // for WriteFlucFile()
+
+#include "datapack.h"
+#include "region.h"
+#include "registry.h"
+#include "constants.h"
+#include "individual.h"
+#include "xml_strings.h"  // for class xmlstr in DataPack::ToXML()
+#include "stringx.h"      // for MakeTag(), MakeCloseTag(), etc in
+                          //    DataPack::ToXML()
+
+#ifdef DMALLOC_FUNC_CHECK
+#include "/usr/local/include/dmalloc.h"
+#endif
+
+using namespace std;
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+DataPack::~DataPack()
+{
+    /* EWFIX.P3 -- causes seg fault in static destruction on mac
+       EWFIX.P3 -- LEAK
+       // once and for all, clean out those DLCell arrays
+       DLCell::ClearStore();
+    */
+
+    // clean out the regions
+    vector<Region*>::iterator rit;
+    for(rit = m_regions.begin(); rit != m_regions.end(); ++rit)
+        delete *rit;
+} // DataPack::~DataPack
+
+//------------------------------------------------------------------------------------
+
+long DataPack::GetNTips() const
+{
+    long ntips = 0;
+    vector<Region*>::const_iterator rit;
+    for(rit = m_regions.begin(); rit != m_regions.end(); ++rit)
+        ntips += (*rit)->GetNTips();
+
+    return ntips;
+} // DataPack::GetNTips
+
+//------------------------------------------------------------------------------------
+
+long DataPack::GetNumTipPopulations() const
+{
+    set<string> popnames;
+    vector<Region*>::const_iterator rit;
+    for(rit = m_regions.begin(); rit != m_regions.end(); ++rit)
+        (*rit)->AddUniqueNamesTo(popnames);
+
+    long ntippops(popnames.size());
+    return ntippops;
+} // DataPack::GetNumTipPopulations
+
+//------------------------------------------------------------------------------------
+
+long DataPack::AddPartition(force_type forceType, const string& partname)
+{
+    // create the new force entry (if it already doesn't exist) and
+    // make sure the new partition entry doesn't already exist.
+    // NB:  the code doesn't look like it adds a new force, but due to
+    // std::map behavior, it does!
+    StringVec1d& partitions(m_partnames[forceType]);
+    long part, nparts = partitions.size();
+    for(part = 0; part < nparts; ++part)
+        if (partitions[part] == partname)
+            return part;
+
+    // new partition to add
+    partitions.push_back(partname);
+    return partitions.size()-1;
+} // DataPack::AddPartition
+
+//------------------------------------------------------------------------------------
+
+long DataPack::GetNPartitionsByForceType(force_type forceType) const
+{
+    PartitionNames::const_iterator pnames = m_partnames.find(forceType);
+    if (pnames != m_partnames.end()) return pnames->second.size();
+
+    // NB:  This is a hack, but what to do?  The function is widely used
+    // in both phase 1 and phase 2, and its appropriate answer for
+    // MIG depends on whether DIVMIG exists.
+    // (The following if statement is "If there's MIG and no DIVMIG".)
+
+    if (forceType == force_MIG && m_partnames.find(force_DIVMIG) == m_partnames.end())
+        return 1L;
+    else return 0L;
+} // DataPack::GetNPartitionsByForceType
+
+//------------------------------------------------------------------------------------
+
+string DataPack::GetPartitionName(force_type forceType, long index) const
+{
+    assert(static_cast<unsigned long>(index) < m_partnames.find(forceType)->second.size());
+    return m_partnames.find(forceType)->second[index];
+} // DataPack::GetPartitionName
+
+//------------------------------------------------------------------------------------
+
+long DataPack::GetPartitionNumber(force_type forceType, const string& partname) const
+{
+    const StringVec1d& partitions(m_partnames.find(forceType)->second);
+    long part, nparts = partitions.size();
+    for(part = 0; part < nparts; ++part)
+        if (partitions[part] == partname)
+            return part;
+
+    assert(false);  // failed to find anything!
+    return FLAGLONG;
+} // DataPack::GetPartitionNumber
+
+//------------------------------------------------------------------------------------
+
+StringVec1d DataPack::GetAllPartitionNames(force_type forceType) const
+{
+    StringVec1d emptyvec;
+    PartitionNames::const_iterator pnames = m_partnames.find(forceType);
+
+    if (pnames != m_partnames.end())
+    {
+        return pnames->second;
+    }
+    else if (forceType == force_MIG || forceType == force_DIVMIG)
+    {
+        return m_outputpopnames;
+    }
+
+    return emptyvec;
+} // DataPack::GetAllPartitionNames
+
+//------------------------------------------------------------------------------------
+
+StringVec1d DataPack::GetAllCrossPartitionNames() const
+{
+    vector<map<force_type,string> > ids = GetCrossPartitionIds(false);
+    StringVec1d xpnames;
+
+    vector<map<force_type,string> >::iterator idit;
+    for(idit = ids.begin(); idit != ids.end(); ++idit)
+    {
+        string xpname = idit->begin()->second;
+        if (idit->size() > 1) xpname += " (";
+        map<force_type,string>::iterator nameit;
+        map<force_type,string>::iterator startit(idit->begin());
+        for(nameit = ++startit; nameit != idit->end(); ++nameit)
+        {
+            if (nameit != startit) xpname += ", ";
+            xpname += nameit->second;
+        }
+        if (idit->size() > 1) xpname += ")";
+        xpnames.push_back(xpname);
+    }
+
+    return xpnames;
+} // DataPack::GetAllCrossPartitionNames
+
+//------------------------------------------------------------------------------------
+
+vector<map<force_type,string> >
+DataPack::GetCrossPartitionIds(bool ignoresinglepop) const
+{
+    if (m_partnames.empty())   // there are no partition forces
+    {
+        vector<map<force_type,string> > ids(1L);
+        if (!ignoresinglepop)
+        {
+            assert(m_outputpopnames.size() == 1);
+            // force_type ft = registry.GetForceSummary().GetNonLocalPartitionForceTag();
+            //
+            // ids[0].insert(make_pair(ft,m_outputpopnames[0]));
+            //
+            // cannot use the above lines as the ForceSummary doesn't yet exist in at least
+            // one of the function callers
+            ids[0].insert(make_pair(force_MIG,m_outputpopnames[0]));
+        }
+
+        return ids;   // return the saved population names
+    }
+
+    // setup separate collections of forcenames, first partition name and
+    // iterator-last partition name, all dimensioned by the number of forces
+    vector<force_type> fname;
+    vector<StringVec1d::const_iterator> startname;
+    vector<StringVec1d::const_iterator> endname;
+    PartitionNames::const_iterator forceit;
+    for(forceit = m_partnames.begin(); forceit != m_partnames.end(); ++forceit)
+    {
+        fname.push_back(forceit->first);
+        startname.push_back(forceit->second.begin());
+        endname.push_back(forceit->second.end());
+    }
+
+    // initialize the "current name" to the first partition name of each
+    // force
+    vector<StringVec1d::const_iterator> curname(startname);
+
+    // loop over each cross partition, building up and adding the appropriate
+    // map of names for each cross partition and then storing the map away
+    // in the return collection "ids".
+    long nxparts = GetNCrossPartitions();
+    vector<map<force_type,string> > ids(nxparts);
+    long xpart;
+    for(xpart = 0; xpart < nxparts; ++xpart)
+    {
+        map<force_type,string> id;
+        long force, nforces = m_partnames.size();
+        for(force = 0; force < nforces; ++force)
+            id.insert( make_pair( fname[force],*(curname[force])));
+        ids[xpart] = id;
+
+        // advance the string iterators for the next cross partition
+        for(force = nforces - 1; force > -1; --force)
+        {
+            if (++curname[force] != endname[force]) break;
+            curname[force] = startname[force];
+        }
+    }
+
+    return ids;
+} // DataPack::GetCrossPartitionIds
+
+//------------------------------------------------------------------------------------
+
+long DataPack::GetNPartitionForces() const
+{
+    return m_partnames.size();
+} // DataPack::GetNPartitionForces
+
+//------------------------------------------------------------------------------------
+
+long DataPack::GetNCrossPartitions() const
+{
+    long count = 1;
+    PartitionNames::const_iterator pnames;
+    for(pnames = m_partnames.begin(); pnames != m_partnames.end(); ++pnames)
+        count *= pnames->second.size();
+
+    return count;
+} // DataPack::GetNCrossPartitions()
+
+//------------------------------------------------------------------------------------
+
+long DataPack::GetNPartitions(unsigned long index) const
+{
+    assert(index < m_finalcount.size());
+    return m_finalcount[index];
+} // DataPack::GetNPartitions
+
+//------------------------------------------------------------------------------------
+
+LongVec1d DataPack::GetAllNPartitions() const
+{
+    return m_finalcount;
+} // DataPack::GetAllNPartitions
+
+//------------------------------------------------------------------------------------
+
+long DataPack::GetCrossPartitionIndex(const LongVec1d& membership) const
+{
+    if (membership.empty()) return 0L;
+
+    // The following code computes a cross-partition index based on
+    // the number of states in each partition force and the given
+    // membership vector.  The index is equivalent to writing the
+    // forces out in a table which varies the last force first
+    // (like counting in binary, except with arbitrary bases for each
+    // force) and then numbering the table entries starting at zero.
+
+    // How the code actually works:  It initializes the index with the
+    // state of the last force.  Then, for each of the remaining forces,
+    // it computes a multiplier based on all later forces and
+    // multiplies the force's state by this before adding it to the
+    // index.  (For example, if the last force has eight
+    // states, each entry of the next-to-last force is equivalent to
+    // a table of eight entries.)
+
+    long lastforce = membership.size() - 1;
+    long index = membership[lastforce], force, totalcount = 1;
+    for (force = lastforce-1; force > -1; --force)
+    {
+        totalcount *= GetNPartitions(force+1);
+        index += totalcount * membership[force];
+    }
+
+    return index;
+} // DataPack::GetCrossPartitionIndex
+
+//------------------------------------------------------------------------------------
+
+map<force_type,string> DataPack::GetTipId(long xpartindex) const
+{
+    map<force_type,string> tipid;
+    LongVec1d membership(GetBranchMembership(xpartindex));
+
+    PartitionNames::const_iterator force;
+    for(force = m_partnames.begin(); force != m_partnames.end(); ++force)
+    {
+        force_type forceType = force->first;
+        long partitionindex = membership[registry.GetForceSummary().GetPartIndex(forceType)];
+        string partitionname = GetPartitionName(forceType,partitionindex);
+        tipid.insert(make_pair(forceType,partitionname));
+    }
+
+    return tipid;
+} // DataPack::GetTipId
+
+//------------------------------------------------------------------------------------
+
+LongVec1d DataPack::GetBranchMembership(long xpartindex) const
+{
+    long npartforces = GetNPartitionForces();
+
+    // first build up the divisor
+    long force, divisor = 1;
+    for(force = 0; force < npartforces; ++force)
+        divisor *= GetNPartitions(force);
+
+    // now build the vector
+    LongVec1d membership(npartforces);
+    long remainder = xpartindex;
+    for(force = 0; force < npartforces; ++force)
+    {
+        divisor /= GetNPartitions(force);
+        membership[force] = remainder / divisor;
+        remainder %= divisor;
+    }
+
+    return membership;
+} // DataPack::GetBranchMembership
+
+//------------------------------------------------------------------------------------
+
+void DataPack::SetFinalPartitionCounts(LongVec1d pcounts)
+{
+    m_finalcount = pcounts;
+} // DataPack::SetFinalPartitionCounts
+
+//------------------------------------------------------------------------------------
+
+void DataPack::SetRegion(Region* r)
+{
+    r->SetID(m_regions.size());
+    m_regions.push_back(r);
+} // DataPack::SetRegion
+
+//------------------------------------------------------------------------------------
+
+long DataPack::GetNMaxLoci() const
+{
+    long maxLoci = 0;
+    vector<Region*>::const_iterator iter;
+    for(iter=m_regions.begin(); iter != m_regions.end(); iter++)
+    {
+        long numLoci = (*iter)->GetNloci();
+        if(numLoci > maxLoci)
+        {
+            maxLoci = numLoci;
+        }
+    }
+    return maxLoci;
+}
+
+//------------------------------------------------------------------------------------
+
+long DataPack::GetNumMovingLoci(long reg) const
+{
+    return GetRegion(reg).GetNumMovingLoci();
+}
+
+//------------------------------------------------------------------------------------
+
+StringVec1d DataPack::GetAllRegionNames() const
+{
+    StringVec1d rnames;
+    vector<Region*>::const_iterator region;
+    for(region = m_regions.begin(); region != m_regions.end(); ++region)
+        rnames.push_back((*region)->GetRegionName());
+
+    return rnames;
+} // DataPack::GetAllRegionNames
+
+//------------------------------------------------------------------------------------
+
+StringVec2d DataPack::GetAllLociNames() const
+{
+    StringVec2d lnames;
+    vector<Region*>::const_iterator region;
+    for(region = m_regions.begin(); region != m_regions.end(); ++region)
+        lnames.push_back((*region)->GetAllLociNames());
+
+    return lnames;
+} // DataPack::GetAllLociNames
+
+//------------------------------------------------------------------------------------
+
+StringVec2d DataPack::GetAllLociDataTypes() const
+{
+    StringVec2d lnames;
+    vector<Region*>::const_iterator region;
+    for(region = m_regions.begin(); region != m_regions.end(); ++region)
+        lnames.push_back((*region)->GetAllLociDataTypes());
+
+    return lnames;
+} // DataPack::GetAllLociDataTypes
+
+//------------------------------------------------------------------------------------
+
+StringVec2d DataPack::GetAllLociMuRates() const
+{
+    StringVec2d lnames;
+    vector<Region*>::const_iterator region;
+    for(region = m_regions.begin(); region != m_regions.end(); ++region)
+        lnames.push_back((*region)->GetAllLociMuRates());
+
+    return lnames;
+} // DataPack::GetAllLociMuRates
+
+//------------------------------------------------------------------------------------
+
+StringVec1d DataPack::ToXML(unsigned long nspaces) const
+{
+    StringVec1d xmllines;
+    string line = MakeIndent(MakeTag(xmlstr::XML_TAG_DATA),nspaces);
+    xmllines.push_back(line);
+
+    nspaces += INDENT_DEPTH;
+    vector<Region*>::const_iterator region;
+    for(region = m_regions.begin(); region != m_regions.end(); ++region)
+    {
+        StringVec1d regxml((*region)->ToXML(nspaces));
+        xmllines.insert(xmllines.end(),regxml.begin(),regxml.end());
+    }
+    nspaces -= INDENT_DEPTH;
+
+    line = MakeIndent(MakeCloseTag(xmlstr::XML_TAG_DATA),nspaces);
+    xmllines.push_back(line);
+
+    return xmllines;
+} // DataPack::ToXML
+
+//------------------------------------------------------------------------------------
+
+bool DataPack::CanHapArrange() const
+{
+    vector<Region*>::const_iterator region;
+    for(region = m_regions.begin(); region != m_regions.end(); ++region)
+    {
+        if ((*region)->CanHaplotype()) return true;
+    }
+    return false;
+} // DataPack::CanHapArrange
+
+//------------------------------------------------------------------------------------
+// True if you have at least two linked markers.
+
+bool DataPack::CanMeasureRecombination() const
+{
+    vector<Region*>::const_iterator region;
+    for(region = m_regions.begin(); region != m_regions.end(); ++region)
+    {
+        size_t nMarkersThisRegion = 0;
+
+        long nLoci = (*region)->GetNloci();
+        for(long i=0; i < nLoci ; i++)
+        {
+            const Locus & locusRef = (*region)->GetLocus(i);
+            if(! locusRef.IsMovable())
+            {
+                long nMarkersThisLocus = locusRef.GetNmarkers();
+                if(nMarkersThisLocus > 1)
+                {
+                    return true;
+                }
+                nMarkersThisRegion += nMarkersThisLocus;
+            }
+        }
+
+        if (nMarkersThisRegion > 1 )
+        {
+            return true;
+        }
+    }
+    return false;
+} // DataPack::CanMeasureRecombination
+
+//------------------------------------------------------------------------------------
+
+bool DataPack::AnyMapping() const
+{
+    vector<Region*>::const_iterator region;
+    for(region = m_regions.begin(); region != m_regions.end(); ++region)
+    {
+        if ((*region)->AnyMapping()) return true;
+    }
+    return false;
+}
+
+//------------------------------------------------------------------------------------
+
+void DataPack::RemoveUneededPartitions()
+{
+    // use in Phase 1 only
+    // of all partition forces, only force_MIG might have mentioned partitions
+    // in the XML and then discovered there was only one partition and the
+    // force was not, in fact, active.
+
+    force_type ft = force_MIG;
+    if (GetNPartitionsByForceType(ft) == 1)
+    {
+        vector<Region*>::const_iterator region;
+        for(region = m_regions.begin(); region != m_regions.end(); ++region)
+            (*region)->RemovePartitionFromLoci(ft);
+        m_outputpopnames = m_partnames[ft];
+        m_partnames.erase(ft);
+    }
+} // DataPack::RemoveUneededPartitions
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d DataPack::CountOverallBaseFreqs() const
+{
+    DoubleVec1d basecnts(BASES,0.0);
+    double nbases(0.0);
+    vector<Region*>::const_iterator region;
+    for(region = m_regions.begin(); region != m_regions.end(); ++region)
+    {
+        DoubleVec1d regbasecnts((*region)->CountNBases());
+        transform(basecnts.begin(),basecnts.end(),regbasecnts.begin(),
+                  basecnts.begin(), plus<double>());
+        double initial(0.0);
+        nbases += accumulate(regbasecnts.begin(),regbasecnts.end(),initial);
+    }
+
+    DoubleVec1d basefreqs(BASES);
+    transform(basecnts.begin(),basecnts.end(),basefreqs.begin(),
+              bind2nd(divides<double>(),nbases));
+
+    return basefreqs;
+} // DataPack::CountOverallBaseFreqs
+
+//------------------------------------------------------------------------------------
+
+DoubleVec2d DataPack::CreateParamScalarVector() const
+{
+    DoubleVec2d pscalars;
+    for (unsigned long reg=0; reg<m_regions.size(); reg++)
+    {
+        ForceParameters fp(reg);
+        pscalars.push_back(fp.GetRegionalScalars());
+    }
+
+    return pscalars;
+} // DataPack::CreateParamScalarVector
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d DataPack::GetRegionalPopSizeScalars() const
+{
+    DoubleVec1d pscalars;
+    vector<Region*>::const_iterator region;
+    for(region = m_regions.begin(); region != m_regions.end(); ++region)
+        pscalars.push_back((*region)->GetEffectivePopSize());
+
+    return pscalars;
+} // DataPack::GetRegionalPopSizeScalars
+
+//------------------------------------------------------------------------------------
+
+LongVec1d DataPack::GetNumAllLociPerRegion() const
+{
+    LongVec1d nloci;
+    vector<Region*>::const_iterator region;
+    for(region = m_regions.begin(); region != m_regions.end(); ++region)
+        nloci.push_back((*region)->GetNumAllLoci());
+
+    return nloci;
+}
+
+//------------------------------------------------------------------------------------
+
+bool DataPack::HasMultiLocus() const
+{
+    vector<Region*>::const_iterator region;
+    for(region = m_regions.begin(); region != m_regions.end(); ++region)
+    {
+        if ((*region)->GetNumAllLoci() > 1)
+        {
+            return true;
+        }
+    }
+
+    return false;
+}
+
+//------------------------------------------------------------------------------------
+
+bool DataPack::IsDuplicateRegionName(const string& newname) const
+{
+    vector<Region*>::const_iterator region;
+    for(region = m_regions.begin(); region != m_regions.end(); ++region)
+    {
+        if ((*region)->GetRegionName() == newname) return true;
+    }
+
+    return false;
+} // DataPack::IsDuplicateRegionName
+
+//------------------------------------------------------------------------------------
+
+Region* DataPack::GetRegionByName(const string& newname)
+{
+    vector<Region*>::const_iterator region;
+    for(region = m_regions.begin(); region != m_regions.end(); ++region)
+    {
+        if ((*region)->GetRegionName() == newname) return *region;
+    }
+
+    return NULL;
+} // DataPack::IsDuplicateRegionName
+
+//------------------------------------------------------------------------------------
+// WriteFlucFile is only used when JSIM is defined (as of 3/30/06 --LS)
+
+void DataPack::WriteFlucFile(const string& outputfilename) const
+{
+    ofstream ofile;
+    ofile.open(outputfilename.c_str(),ios::out | ios::trunc);
+    ofile << m_regions.size() << endl;
+    ofile.close();
+
+    vector<Region*>::const_iterator region;
+    for(region = m_regions.begin(); region != m_regions.end(); ++region)
+    {
+        (*region)->WriteFlucFile(outputfilename,false);
+    }
+} // DataPack::WriteFlucFile
+
+//------------------------------------------------------------------------------------
+// doesn't handle spacing or loci
+// WritePopulationXMLFiles used only when JSIM defined (3/30/06 --LS)
+
+void DataPack::WritePopulationXMLFiles(bool separate_regions) const
+{
+    if (separate_regions)
+    {
+        vector<Region*>::const_iterator region;
+        for(region = m_regions.begin(); region != m_regions.end(); ++region)
+        {
+            (*region)->WritePopulationXMLFiles();
+        }
+    }
+    else
+    {
+        long xmlindent(INDENT_DEPTH);
+        StringVec3d xmldata; // region X pop X data
+        vector<Region*>::const_iterator region;
+        for(region = m_regions.begin(); region != m_regions.end(); ++region)
+        {
+            xmldata.push_back((*region)->MakeByPopXML(xmlindent));
+        }
+
+        // write the all-populations file
+        ofstream afile;
+        afile.open("popall", ios::out | ios::trunc);
+        afile << "<lamarc>" << endl;
+        afile << "<data>" << endl;
+        long reg, nregions(GetNRegions());
+        for(reg = 0; reg < nregions; ++reg)
+        {
+            m_regions[reg]->WriteToXMLFileUsing(afile,xmldata[reg],true);
+        }
+        afile << "</data>" << endl;
+#ifndef JSIM
+        afile << "</lamarc>" << endl;
+#endif
+        afile.close();
+
+        // write the single population files
+        force_type ft = registry.GetForceSummary().GetNonLocalPartitionForceTag();
+        long pop, npops = GetNPartitionsByForceType(ft);
+        for(pop = 0; pop < npops; ++pop)
+        {
+            string fname("pop"+ToString(pop));
+            ofstream ofile;
+            ofile.open(fname.c_str(), ios::out | ios::trunc);
+
+            ofile << "<lamarc>" << endl;
+            ofile << "<data>" << endl;
+            for(reg = 0; reg < nregions; ++reg)
+            {
+                m_regions[reg]->WriteToXMLFileUsing(ofile,xmldata[reg][pop]);
+            }
+            ofile << "</data>" << endl;
+#ifndef JSIM
+            ofile << "</lamarc>" << endl;
+#endif
+            ofile.close();
+        }
+    }
+} // DataPack::WritePopulationXMLFiles
+
+//____________________________________________________________________________________
diff --git a/src/datalike/datapack.h b/src/datalike/datapack.h
new file mode 100644
index 0000000..af3466b
--- /dev/null
+++ b/src/datalike/datapack.h
@@ -0,0 +1,152 @@
+// $Id: datapack.h,v 1.58 2012/06/30 01:32:41 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+// In general, a DataPack owns a container of pointer to Regions, which
+// own a container of Locus objects, which own a container of TipData objects.
+//
+// DataPack--a storage class, with a container of pointers to all
+//    Regions programwide.  DataPack owns what the pointers point to.
+//    It also tracks some population level data, number of populations
+//    and population names.  It does not track any migration specific
+//    info (which is handled by class ForceSummary).
+//
+// Written by Jim Sloan, heavily revised by Jon Yamato
+// 2002/01/03 changed Tipdata::data to a vector for generality--Mary Kuhner
+
+// NB  This code distinguishes between "markers" and "sites".  A marker
+// is a site for which we have data.  In SNP data, for example, every base
+// pair is a site, but only the SNPs are markers.  Data likelihoods are
+// calculated on markers; recombination probabilities are calculated on
+// sites (links, actually).  Please keep these straight!
+
+#ifndef DATAPACK_H
+#define DATAPACK_H
+
+#include <cassert>                      // May be needed for inline definitions.
+#include <memory>
+#include <stdlib.h>
+#include <string>
+#include <vector>
+
+#include "constants.h"
+#include "partition.h"
+#include "types.h"
+#include "vectorx.h"
+
+//------------------------------------------------------------------------------------
+
+class Region;
+
+//------------------------------------------------------------------------------------
+
+class DataPack
+{
+  private:
+    DataPack(const DataPack&);          // undefined
+    DataPack& operator=(const DataPack&); // undefined
+
+    vector<Region *> m_regions;
+
+    PartitionNames m_partnames;
+    StringVec1d m_outputpopnames;       // In the single population case save the MIG/DIVMIG partition names here.
+    LongVec1d m_finalcount;             // This is set by SetFinalPartitionCount.
+
+  public:
+
+    DataPack() {};
+    ~DataPack();
+
+    long GetNTips() const;              // Return the total number of tips in all regions.
+
+    long AddPartition(force_type, const string& partname);
+
+    long GetNPartitionsByForceType(force_type) const;
+    string GetPartitionName(force_type, long index) const;
+    long GetPartitionNumber(force_type, const string& name) const;
+    StringVec1d GetAllPartitionNames(force_type) const;
+    StringVec1d GetAllCrossPartitionNames() const;
+
+    // This function is provided to interface with tipdata objects it's also used by GetAllCrossPartitionNames
+    // which needs to pass false as an argument (it cares about partition forces with only one partition.
+    std::vector<std::map<force_type,std::string> >
+    GetCrossPartitionIds(bool ignoresinglepop = true) const;
+
+    // This function will return 0 if the membership vector is empty.
+    // It does this to interface with the BranchBuffer and Branch::ScoreEvent.
+    long GetCrossPartitionIndex(const LongVec1d& membership) const;
+    LongVec1d GetBranchMembership(long xpartindex) const;
+    std::map<force_type,string> GetTipId(long xpartindex) const;
+
+    long GetNPartitionForces() const;
+    long GetNCrossPartitions() const;
+    long GetNPartitions(unsigned long index) const;
+    LongVec1d GetAllNPartitions() const;
+    long GetNumTipPopulations() const;
+
+    void SetFinalPartitionCounts(LongVec1d pcounts); // Called by ForceSummary::SummarizeData
+
+    void SetRegion(Region* r);
+
+    long        GetNMaxLoci()             const;
+    long        GetNRegions()             const {return m_regions.size();};
+    vector<Region*>& GetAllRegions()            {return m_regions;};
+    const vector<Region*>& GetAllRegions() const {return m_regions;};
+
+    Region&     GetRegion(long n)               {assert(n < static_cast<long>(m_regions.size()));
+        return *m_regions[n];};
+    const Region&     GetRegion(long n)   const {assert(n < static_cast<long>(m_regions.size()));
+        return *m_regions[n];};
+
+    long GetNumMovingLoci(long reg) const;
+    StringVec1d GetAllRegionNames() const;
+    StringVec2d GetAllLociNames() const; // dim: region X locus
+    StringVec2d GetAllLociDataTypes() const; // dim: region X locus
+    StringVec2d GetAllLociMuRates() const; // dim: region X locus
+
+    StringVec1d ToXML(unsigned long nspaces) const; // used by XMLOutfile::Display()
+
+    // The next function is a helper function for the Menu subclass dealing with rearrangement.
+    bool        CanHapArrange()           const;
+
+    // True if you have at least two linked markers.
+    bool        CanMeasureRecombination()   const;
+
+    // This function is for the reportpage.
+    bool        AnyMapping()              const;
+
+    // Helper function for the XML-parser.
+    void        RemoveUneededPartitions();
+
+    // Helper function for the F84 data model.
+    DoubleVec1d CountOverallBaseFreqs() const;
+
+    // Helper function for lamarc FinishRegistry().
+    DoubleVec2d CreateParamScalarVector() const;
+
+    // Helper function for reportpage filling.
+    DoubleVec1d GetRegionalPopSizeScalars() const;
+    LongVec1d   GetNumAllLociPerRegion() const;
+    bool HasMultiLocus() const;
+
+    // Helper functions for XML parsing.
+    bool IsDuplicateRegionName(const string& newname) const;
+    Region* GetRegionByName(const string& name);
+
+    // The following two functions are used only when JSIM is defined:
+    // Parallel run of fluctuate.
+    void WriteFlucFile(const string& outputfilename) const;
+    //
+    // Doesn't handle loci or spacing.
+    void WritePopulationXMLFiles(bool separate_regions) const;
+};
+
+#endif // DATAPACK_H
+
+//____________________________________________________________________________________
diff --git a/src/datalike/datatype.cpp b/src/datalike/datatype.cpp
new file mode 100644
index 0000000..e412465
--- /dev/null
+++ b/src/datalike/datatype.cpp
@@ -0,0 +1,707 @@
+// $Id: datatype.cpp,v 1.38 2013/10/25 17:00:52 mkkuhner Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+#include <cstring>
+
+#include "datatype.h"
+#include "dlcalc.h"
+#include "dlcell.h"
+#include "dlmodel.h"
+#include "datapack.h"
+#include "locus.h"
+#include "errhandling.h"
+#include "stringx.h"
+
+#ifdef DMALLOC_FUNC_CHECK
+#include "/usr/local/include/dmalloc.h"
+#endif
+
+using std::vector;
+
+//------------------------------------------------------------------------------------
+
+string ToString(data_type dtype)
+{
+    switch(dtype)
+    {
+        case dtype_DNA:
+            return "DNA";
+        case dtype_SNP:
+            return "SNP";
+        case dtype_msat:
+            return "Microsatellite";
+        case dtype_kallele:
+            return "K-Allele";
+    }
+
+    assert(false); //uncaught data type;
+    return "unknown";
+}
+
+//------------------------------------------------------------------------------------
+
+string ToString(data_source dsource)
+{
+    switch(dsource)
+    {
+        case dsource_study:
+            return "Study";
+            break;
+        case dsource_panel:
+            return "Panel";
+            break;
+    }
+
+    assert(false); //uncaught data source;
+    return "unknown";
+}
+
+//------------------------------------------------------------------------------------
+
+long DataType::GetPairwiseMarkers(const Locus& locus) const
+{
+    return locus.GetNmarkers();
+} // DataType::GetPairwiseMarkers
+
+//------------------------------------------------------------------------------------
+
+double DataType::DifferenceWithinPop(StringVec2d::const_iterator
+                                     begin1, StringVec2d::const_iterator end) const
+{
+    double ndiffs = 0.0;
+
+    // half triangular matrix
+    StringVec2d::const_iterator group1 = begin1;
+    StringVec2d::const_iterator group2;
+
+    for ( ; group1 != end; ++group1)
+    {
+        group2 = group1;
+        ++group2;  // workaround for compilers that don't have += for iterators
+
+        for ( ; group2 != end; ++group2)
+        {
+            StringVec1d::const_iterator site1, site2;
+            for (site1 = group1->begin(), site2 = group2->begin();
+                 site1 != group1->end();
+                 ++site1, ++site2)
+            {
+                if (!IsEquivalent(*site1, *site2)) ++ndiffs;
+            }
+        }
+    }
+
+    return ndiffs;
+} // DifferenceWithinPop
+
+//------------------------------------------------------------------------------------
+
+double DataType::DifferenceBetweenPops(StringVec2d::const_iterator begin1,
+                                       StringVec2d::const_iterator end1, StringVec2d::const_iterator begin2,
+                                       StringVec2d::const_iterator end2) const
+{
+    double ndiffs = 0.0;
+    StringVec2d::const_iterator group1 = begin1;
+    StringVec2d::const_iterator group2 = begin2;
+
+    for ( ; group1 != end1; ++group1)
+    {
+        for ( ; group2 != end2; ++group2)
+        {
+            StringVec1d::const_iterator site1, site2;
+            for (site1 = group1->begin(), site2 = group2->begin();
+                 site1 != group1->end();
+                 ++site1, ++site2)
+            {
+                if (!IsEquivalent(*site1, *site2)) ++ndiffs;
+            }
+        }
+    }
+
+    return ndiffs;
+} // DifferenceBetweenPops
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d DataType::CalcFw(const Locus& locus) const
+{
+    // NB This cannot be used with DIVMIG
+    StringVec3d data(locus.GetPartitionGeneticData(force_MIG));
+    DoubleVec1d fwithin(data.size(),0.0);
+    DoubleVec1d::iterator fw;
+    StringVec3d::const_iterator pop;
+
+    for(pop = data.begin(), fw = fwithin.begin(); pop != data.end(); ++pop, ++fw)
+    {
+        double nn = GetPairwiseMarkers(locus) * (pop->size()*pop->size() - pop->size())/2.0;
+        double within = DifferenceWithinPop(pop->begin(), pop->end());
+
+        if (nn > 0.0) within /= nn;
+
+        *fw = 1.0 - within;
+    }
+
+    return fwithin;
+} // NucleotideType::CalcFw
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d DataType::CalcFb(const Locus& locus) const
+{
+    // NB This cannot be used with DIVMIG
+    StringVec3d data(locus.GetPartitionGeneticData(force_MIG));
+    long npops = data.size();
+    DoubleVec1d fbetween(npops*npops,0.0);
+    long pop1, pop2;
+
+    for(pop1 = 0; pop1 < npops; ++pop1)
+    {
+        for(pop2 = pop1 + 1; pop2 < npops; ++pop2)
+        {
+            double nn = GetPairwiseMarkers(locus) * data[pop1].size() * data[pop2].size();
+            double between = DifferenceBetweenPops(data[pop1].begin(),
+                                                   data[pop1].end(), data[pop2].begin(),
+                                                   data[pop2].end());
+
+
+            if (nn > 0.0) between /= nn;
+
+            fbetween[pop1 * npops + pop2] = 1.0 - between;
+            fbetween[pop2 * npops + pop1] = 1.0 - between;
+        }
+    }
+
+    return fbetween;
+} // NucleotideType::CalcFb
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d DataType::CalcXFw(const Locus& locus,const StringVec3d& data) const
+{
+    DoubleVec1d fwithin;
+
+    StringVec3d::const_iterator part;
+    for(part = data.begin(); part != data.end(); ++part)
+    {
+        double nn = GetPairwiseMarkers(locus) * (part->size()*part->size() - part->size())/2.0;
+        double within = DifferenceWithinPop(part->begin(), part->end());
+
+        if (nn > 0.0)
+        {
+            within /= nn;
+        }
+
+        fwithin.push_back(1.0 - within);
+    }
+
+    return fwithin;
+} // NucleotideType::CalcXFw
+
+//------------------------------------------------------------------------------------
+
+// argument "data" is locus.GetAllCrossPartitionGeneticData()
+DoubleVec1d DataType::CalcXFb(const Locus& locus, const StringVec3d& data) const
+{
+    long nparts = data.size();
+    DoubleVec1d fbetween(nparts*nparts,0.0);
+    long part1, part2;
+
+    for(part1 = 0; part1 < nparts; ++part1)
+    {
+        for(part2 = part1 + 1; part2 < nparts; ++part2)
+        {
+            double nn = GetPairwiseMarkers(locus) * data[part1].size() * data[part2].size();
+            double between = DifferenceBetweenPops(data[part1].begin(),
+                                                   data[part1].end(), data[part2].begin(),
+                                                   data[part2].end());
+
+            if (nn > 0.0) between /= nn;
+
+            fbetween[part1 * nparts + part2] = 1.0 - between;
+            fbetween[part2 * nparts + part1] = 1.0 - between;
+        }
+    }
+
+    return fbetween;
+} // NucleotideType::CalcXFb
+
+//------------------------------------------------------------------------------------
+
+model_type NucleotideType::DefaultModelType()
+{
+    return F84;
+} // DefaultModelType
+
+//------------------------------------------------------------------------------------
+
+long DataType::CalcNVarMarkers(const StringVec2d& data) const
+{
+    long nvarmarkers = 0;
+    long marker, nmarkers = data[0].size();
+
+    for (marker = 0; marker < nmarkers; ++ marker)
+    {
+        bool varying = false;
+        long indiv, nindiv = data.size();
+
+        for(indiv = 1; indiv < nindiv && !varying; ++indiv)
+            if (!IsEquivalent(data[0][marker],data[indiv][marker]))
+                varying = true;
+
+        if (varying) ++nvarmarkers;
+    }
+
+    return nvarmarkers;
+} // CalcNVarMarkers
+
+//------------------------------------------------------------------------------------
+
+wakestats DataType::CalcNPairDiffs(const StringVec2d& data) const
+{
+    wakestats sums(0L,0L);
+    StringVec2d::const_iterator indiv1;
+    for(indiv1 = data.begin(); indiv1 != data.end(); ++indiv1)
+    {
+        StringVec2d::const_iterator indiv2;
+        for(indiv2 = indiv1+1; indiv2 != data.end(); ++indiv2)
+        {
+            assert(indiv1->size() == indiv2->size());
+            long pos, npos = indiv1->size(), count = 0;
+            for(pos = 0; pos < npos; ++pos)
+                count += ((IsEquivalent((*indiv1)[pos],(*indiv2)[pos])) ?
+                          0L : 1L);
+            sums.first += count;
+            sums.second += count * count;
+        }
+    }
+
+    return sums;
+} // CalcNPairDiffs
+
+//------------------------------------------------------------------------------------
+
+bool DataType::IsEquivalent(const string& data1, const string& data2) const
+{
+    return (data1 == data2);
+} // IsEquivalent
+
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+bool NucleotideType::Proofread(const string &raw, StringVec1d &clean,
+                               string& baddata) const
+{
+    long rawsize, position;
+    char ch;
+
+    clean.clear();
+    rawsize = raw.size();
+
+    for (position = 0; position < rawsize; ++position)
+    {
+        ch = toupper(raw[position]);
+        if (isspace(ch))
+            continue;
+
+        if (!strchr("ACGTUMRWSYKVHDBNOX?-", ch))
+        {
+            baddata = ToString(ch);
+            return false;
+        }
+
+        string cleanstr;
+        cleanstr += ch;
+        clean.push_back(cleanstr);
+    }
+
+    return true;
+}
+
+//------------------------------------------------------------------------------------
+
+bool NucleotideType::IsEquivalent(const string& data1, const string& data2) const
+{
+    if (CaselessStrCmp(data1, data2)) return true;
+
+    // otherwise, they might still be equivalent due to code ambiguities
+
+    string unknowns = "NOXnox?-";  // all of these mean 'unknown'
+
+    bool isunknown1 = (data1.find_first_of(unknowns) != string::npos);
+    bool isunknown2 = (data2.find_first_of(unknowns) != string::npos);
+
+    if (isunknown1 && isunknown2) return true;
+
+    string ts = "TUtu";            // all of these mean T
+
+    bool ist1 = (data1.find_first_of(ts) != string::npos);
+    bool ist2 = (data2.find_first_of(ts) != string::npos);
+
+    if (ist1 && ist2) return true;
+
+    return false;
+} // NucleotideType::IsEquivalent
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+DNASeqType::DNASeqType()
+{
+    m_name   = "DNA Sequence";
+    m_xmltag = lamarcstrings::DNA;
+}
+
+//------------------------------------------------------------------------------------
+
+LocusCell DNASeqType::CreateDLCell(const Locus& locus) const
+{
+    long len = locus.GetNmarkers();
+    long cat = locus.GetDataModel()->GetNcategories();
+    vector<Cell_ptr> newcells;
+    Cell_ptr cell(new DNACell(len, cat));
+
+    newcells.push_back(cell);
+
+    return LocusCell(newcells);
+} // CreateDLCell
+
+//------------------------------------------------------------------------------------
+
+LocusCell DNASeqType::CreateInitializedDLCell(const Locus& locus,
+                                              const StringVec1d& tdata) const
+{
+    long len = locus.GetNmarkers();
+    long cat = locus.GetDataModel()->GetNcategories();
+    vector<Cell_ptr> newcells;
+    Cell_ptr cell(new DNACell(len, cat));
+
+    cell->Initialize(tdata, locus.GetDataModel());
+    newcells.push_back(cell);
+
+    return LocusCell(newcells);
+} // CreateInitializedDLCell
+
+//------------------------------------------------------------------------------------
+
+DLCalc_ptr DNASeqType::CreateDLCalculator(const Locus& locus) const
+{
+    return DLCalc_ptr(new DNACalculator(locus));
+}
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+SNPDataType::SNPDataType()
+{
+    m_name   = "SNP data";
+    m_xmltag = lamarcstrings::SNP;
+}
+
+//------------------------------------------------------------------------------------
+
+long SNPDataType::GetPairwiseMarkers(const Locus& locus) const
+{
+    return locus.GetNsites();
+} // SNPDataType::GetPairwiseMarkers
+
+//------------------------------------------------------------------------------------
+
+LocusCell SNPDataType::CreateDLCell(const Locus& locus) const
+{
+    long len = locus.GetNmarkers();
+    int  cat = locus.GetDataModel()->GetNcategories();
+    vector<Cell_ptr> newcells;
+
+    // A SNP data type actually has *two* DLCells, one for variable and one
+    // for non-variable sites
+    Cell_ptr variant(new DNACell(len, cat));
+    Cell_ptr invariant(new SNPCell(5, cat));
+
+    newcells.push_back(variant);
+    newcells.push_back(invariant);
+    return LocusCell(newcells);
+}
+
+//------------------------------------------------------------------------------------
+
+LocusCell SNPDataType::CreateInitializedDLCell(const Locus& locus,
+                                               const StringVec1d& tdata) const
+{
+    long len = locus.GetNmarkers();
+    int  cat = locus.GetDataModel()->GetNcategories();
+    vector<Cell_ptr> newcells;
+
+    // A SNP data type actually has *two* DLCells, one for variable and one
+    // for non-variable sites
+
+    Cell_ptr variant(new DNACell(len, cat));
+    variant->Initialize(tdata, locus.GetDataModel());
+
+    Cell_ptr invariant(new SNPCell(5, cat));
+    invariant->Initialize(tdata, locus.GetDataModel());
+
+    newcells.push_back(variant);
+    newcells.push_back(invariant);
+
+    return LocusCell(newcells);
+} // CreateInitializedDLCell
+
+//------------------------------------------------------------------------------------
+
+DLCalc_ptr SNPDataType::CreateDLCalculator(const Locus& locus) const
+{
+    return DLCalc_ptr(new SNPCalculator(locus));
+}
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+LocusCell AlleleType::CreateDLCell(const Locus& locus) const
+{
+    long len = locus.GetNmarkers();
+    long cat = locus.GetDataModel()->GetNcategories();
+    long bins = locus.GetDataModel()->GetNbins();
+    vector<Cell_ptr> newcells;
+    Cell_ptr cell(new AlleleCell(len, cat, bins));
+
+    newcells.push_back(cell);
+
+    return LocusCell(newcells);
+} // CreateDLCell
+
+//------------------------------------------------------------------------------------
+
+LocusCell AlleleType::CreateInitializedDLCell(const Locus& locus,
+                                              const StringVec1d& tdata) const
+{
+    long len = locus.GetNmarkers();
+    long cat = locus.GetDataModel()->GetNcategories();
+    long bins = locus.GetDataModel()->GetNbins();
+    vector<Cell_ptr> newcells;
+    Cell_ptr cell(new AlleleCell(len, cat, bins));
+
+    cell->Initialize(tdata, locus.GetDataModel());
+    newcells.push_back(cell);
+
+    return newcells;
+} // CreateInitializedDLCell
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+MSType::MSType()
+    : AlleleType()
+{
+    m_name   = "Microsatellite";
+    m_xmltag = lamarcstrings::MICROSAT;
+}
+
+//------------------------------------------------------------------------------------
+
+bool MSType::Proofread(const string &raw, StringVec1d &clean,
+                       string& baddata) const
+{
+    unsigned long index;
+    char c;
+    string allele;
+
+    for (index = 0; index < raw.size(); ++index)
+    {
+        c = raw[index];
+        if (!isspace(c)) allele += c;
+        else
+        {
+            if (!allele.empty()) clean.push_back(allele);
+            allele.erase();
+        }
+    }
+
+    // needed in case the last allele is not followed by spaces....
+    if (!allele.empty()) clean.push_back(allele);
+
+    // error checking:  microsat data must be either an integer or ?
+    for (index = 0; index < clean.size(); ++index)
+    {
+        if (clean[index] == "?") continue;  // means unknown data
+        if (!IsInteger(clean[index]))
+        {
+            baddata = clean[index];
+            return false;  // illegal data found
+        }
+    }
+
+    return true;
+}
+
+//------------------------------------------------------------------------------------
+
+DLCalc_ptr MSType::CreateDLCalculator(const Locus& locus) const
+{
+    return DLCalc_ptr(new AlleleCalculator(locus));
+}
+
+//------------------------------------------------------------------------------------
+
+model_type MSType::DefaultModelType()
+{
+    return Brownian;
+} // DefaultModelType
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+KAlleleType::KAlleleType()
+    : AlleleType()
+{
+    m_name   = "K-Allele";
+    m_xmltag = lamarcstrings::KALLELE;
+} // KAlleleType constructor
+
+//------------------------------------------------------------------------------------
+
+bool KAlleleType::Proofread(const string& raw, StringVec1d& clean,
+                            string& baddata) const
+{
+    unsigned long index;
+    char c;
+    string allele;
+
+    for (index = 0; index < raw.size(); ++index)
+    {
+        c = raw[index];
+        if (!isspace(c)) allele += c;
+        else
+        {
+            if (!allele.empty()) clean.push_back(allele);
+            allele.erase();
+        }
+    }
+
+    // needed in case the last allele is not followed by spaces....
+    if (!allele.empty()) clean.push_back(allele);
+
+    // no error checking possible, as any character but
+    // whitespace is allowable
+
+    return true;
+} // Proofread
+
+//------------------------------------------------------------------------------------
+
+DLCalc_ptr KAlleleType::CreateDLCalculator(const Locus& locus) const
+{
+    return DLCalc_ptr(new AlleleCalculator(locus));
+} // CreateDLCalculator
+
+//------------------------------------------------------------------------------------
+
+model_type KAlleleType::DefaultModelType()
+{
+    return KAllele;
+} // DefaultModelType
+
+//------------------------------------------------------------------------------------
+
+
+// Free helper function
+
+DataType* CreateDataType(const string tag)
+{
+    if (CaselessStrCmp(tag, lamarcstrings::DNA)) return new DNASeqType;
+    if (CaselessStrCmp(tag, lamarcstrings::SNP)) return new SNPDataType;
+    if (CaselessStrCmp(tag, lamarcstrings::MICROSAT)) return new MSType;
+    if (CaselessStrCmp(tag, lamarcstrings::KALLELE)) return new KAlleleType;
+    data_error e("Unknown data type encountered: " + tag);
+    throw e;
+} // CreateDataType
+
+
+//------------------------------------------------------------------------------------
+
+bool ModelTypeAcceptsDataType(model_type modelType, data_type dtype)
+{
+    switch(modelType)
+    {
+        case F84:
+        case GTR:
+            // Nucleotide models can accept only DNA and SNP data types.
+            switch(dtype)
+            {
+                case dtype_DNA:
+                case dtype_SNP:
+                    return true;
+                case dtype_msat:
+                case dtype_kallele:
+                    return false;
+            }
+            assert(false); //uncaught data type.
+            return false;
+            break;
+        case Brownian:
+        case Stepwise:
+        case MixedKS:
+            // Brownian model is appropriate only for microsat data because it
+            //  relies on ordering of alleles.
+            // The stepwise model can accept only microsatellite data.
+            // The MixedKS model includes a Stepwise aspect, and therefore has
+            //  the same restrictions.
+            switch(dtype)
+            {
+                case dtype_msat:
+                    return true;
+                case dtype_DNA:
+                case dtype_SNP:
+                case dtype_kallele:
+                    return false;
+            }
+            assert(false); //uncaught data type.
+            return false;
+            break;
+        case KAllele:
+            // KAllele model can be used for elecrophoretic, phenotypic,
+            // or possibly microsatellite types.
+            switch(dtype)
+            {
+                case dtype_msat:
+                case dtype_kallele:
+                    return true;
+                case dtype_DNA:
+                case dtype_SNP:
+                    return false;
+            }
+            assert(false); //uncaught data type.
+            return false;
+            break;
+    };
+
+    assert(false); //uncaught data model.
+    return false;
+}
+
+model_type DefaultModelForDataType(data_type dtype)
+{
+    switch(dtype)
+    {
+        case dtype_DNA:
+        case dtype_SNP:
+            return F84;
+        case dtype_msat:
+            return Brownian;
+        case dtype_kallele:
+            return KAllele;
+    }
+
+    assert(false); //uncaught data type.
+    return F84;
+}
+
+//____________________________________________________________________________________
diff --git a/src/datalike/datatype.h b/src/datalike/datatype.h
new file mode 100644
index 0000000..f71daf8
--- /dev/null
+++ b/src/datalike/datatype.h
@@ -0,0 +1,228 @@
+// $Id: datatype.h,v 1.29 2011/03/07 06:08:48 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+/*********************************************************************
+ This class is the key control class for polymorphism based on
+ the type of the input data (SNP, DNA, microsatellite, etc.)  It is
+ subclassed along datatype lines.
+
+ Each DataType should have a short tag (related to XML file reading)
+ and a textual name for its type.  It should have expertise to
+ Proofread() raw input data and factory functions to create a
+ datatype-appropriate DLCell and DLCalculator.
+
+ A free factory function, CreateDataType, can be used to create a DataType
+ object corresponding to a tag.  Whenever a new DataType subclass
+ is added this function must be updated.
+
+ DataType written by Jim Sloan, revised by Mary Kuhner
+
+ added quickcalculator support Jon Yamato 2001/09/12:
+    CalcNVarMarkers(), CalcNPairDiffs(), CalcFw(), CalcFb()
+ deleted DataTranslator helper class Mary Kuhner 2002/01/02
+    (this functionality now in DataModel)
+ added m_source and supporting code Jim McGill 2010/03/16
+******************************************************************/
+
+#ifndef DATATYPE_H
+#define DATATYPE_H
+
+#include <vector>
+#include <string>
+#include <map>
+#include <stdlib.h>
+#include "types.h"
+#include "constants.h"
+#include "vectorx.h"
+#include "locuscell.h"
+
+class Tree;
+class Locus;
+class DLCalculator;
+class TipData;
+
+enum data_type {dtype_DNA, dtype_SNP, dtype_msat, dtype_kallele };
+std::string ToString(data_type dtype);
+
+//------------------------------------------------------------------------------------
+// The DataType base class
+
+class DataType
+{
+  private:
+    DataType(const DataType&);      // undefined
+    DataType& operator=(DataType&);  // undefined
+
+  protected:
+    string m_name;        // the full name used in report file writing
+    string m_xmltag;      // the xml tag associated with the type
+
+    virtual long GetPairwiseMarkers(const Locus& locus) const;
+    double DifferenceWithinPop(StringVec2d::const_iterator group1,
+                               StringVec2d::const_iterator done) const;
+    double DifferenceBetweenPops(StringVec2d::const_iterator group1,
+                                 StringVec2d::const_iterator done1,
+                                 StringVec2d::const_iterator group2,
+                                 StringVec2d::const_iterator done2) const;
+
+  public:
+    DataType() {};
+    virtual        ~DataType() {};
+    string GetName() const                       { return m_name; };
+    string GetXMLTag() const                     { return m_xmltag; };
+
+    virtual data_type GetType() const = 0; //RTTI
+    virtual bool   Proofread(const string& raw, StringVec1d& clean,
+                             string& baddata) const = 0;
+    DoubleVec1d CalcFw(const Locus& locus) const;
+    DoubleVec1d CalcFb(const Locus& locus) const;
+    DoubleVec1d CalcXFw(const Locus& locus,const StringVec3d& data) const;
+    DoubleVec1d CalcXFb(const Locus& locus,const StringVec3d& data) const;
+    virtual long   CalcNVarMarkers(const StringVec2d& data) const;
+    virtual wakestats CalcNPairDiffs(const StringVec2d& data) const;
+    virtual bool   IsEquivalent(const string& data1, const string& data2) const;
+    virtual bool   IsNucleotideData() const { return false; };
+
+    // user pretty printing
+    virtual string GetDelimiter() const { return string(""); };
+
+    // Factory functions
+    virtual LocusCell CreateDLCell(const Locus& locus) const   = 0;
+    virtual LocusCell CreateInitializedDLCell(const Locus& locus,
+                                              const StringVec1d& tipdata) const = 0;
+    virtual DLCalc_ptr CreateDLCalculator(const Locus& locus) const = 0;
+    virtual model_type    DefaultModelType() = 0;
+
+};
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+// Nucleotide sequence type
+
+class NucleotideType : public DataType
+{
+  public:
+    NucleotideType() : DataType() {};
+    virtual      ~NucleotideType() {};
+    virtual data_type GetType() const = 0; //RTTI
+    virtual bool Proofread(const string& raw, StringVec1d& clean,
+                           string& baddata) const;
+    virtual bool IsEquivalent(const string& data1, const string& data2) const;
+    virtual bool IsNucleotideData() const { return true; };
+
+    virtual model_type      DefaultModelType();
+};
+
+//------------------------------------------------------------------------------------
+// DNA sequence type
+
+class DNASeqType : public NucleotideType
+{
+  public:
+    DNASeqType();
+    virtual      ~DNASeqType() {};
+    virtual data_type GetType() const {return dtype_DNA;}; //RTTI
+
+    // Factory functions
+    virtual LocusCell CreateDLCell(const Locus& locus) const;
+    virtual LocusCell CreateInitializedDLCell(const Locus& locus,
+                                              const StringVec1d& tipdata) const;
+    virtual DLCalc_ptr CreateDLCalculator(const Locus& locus) const;
+};
+
+//------------------------------------------------------------------------------------
+// SNP data type
+
+class SNPDataType : public NucleotideType
+{
+  protected:
+    virtual long GetPairwiseMarkers(const Locus& locus) const;
+
+  public:
+    SNPDataType();
+    virtual ~SNPDataType() {};
+    virtual data_type GetType() const {return dtype_SNP;}; //RTTI
+
+    // Factory functions
+    virtual LocusCell CreateDLCell(const Locus& locus) const;
+    virtual LocusCell CreateInitializedDLCell(const Locus& locus,
+                                              const StringVec1d& tipdata) const;
+    virtual DLCalc_ptr CreateDLCalculator(const Locus& locus) const;
+
+};
+
+//------------------------------------------------------------------------------------
+// Allele type
+
+class AlleleType : public DataType
+{
+  public:
+    AlleleType() : DataType() {};
+    virtual        ~AlleleType() {};
+
+    virtual data_type GetType() const = 0; //RTTI
+    virtual string GetDelimiter() const { return string(" "); };
+    virtual LocusCell CreateDLCell(const Locus& locus) const;
+    virtual LocusCell CreateInitializedDLCell(const Locus& locus,
+                                              const StringVec1d& tipdata) const;
+};
+
+//------------------------------------------------------------------------------------
+// Microsatellite type
+
+class MSType : public AlleleType
+{
+  public:
+    MSType();
+    virtual      ~MSType() {};
+    virtual data_type GetType() const {return dtype_msat;}; //RTTI
+
+    virtual bool Proofread(const string& content, StringVec1d& data,
+                           string& baddata) const;
+
+    // Factory functions
+    virtual DLCalc_ptr CreateDLCalculator(const Locus& locus) const;
+    virtual model_type    DefaultModelType();
+
+};
+
+//------------------------------------------------------------------------------------
+// K-Allele type (for disease status)
+
+class KAlleleType : public AlleleType
+{
+  public:
+    KAlleleType();
+    virtual  ~KAlleleType() {};
+    virtual data_type GetType() const {return dtype_kallele;}; //RTTI
+
+    virtual bool Proofread(const string& content, StringVec1d& data,
+                           string& baddata) const;
+
+    // Factory functions
+    virtual DLCalc_ptr CreateDLCalculator(const Locus& locus) const;
+    virtual model_type    DefaultModelType();
+};
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+// Free function, a polymorphic creator for DataTypes.
+// If you add a new DataType subclass you must update this
+// function.
+
+DataType* CreateDataType(const string tag);
+
+bool ModelTypeAcceptsDataType(model_type,data_type);
+model_type DefaultModelForDataType(data_type);
+
+#endif // DATATYPE_H
+
+//____________________________________________________________________________________
diff --git a/src/datalike/dlcalc.cpp b/src/datalike/dlcalc.cpp
new file mode 100644
index 0000000..8bafff7
--- /dev/null
+++ b/src/datalike/dlcalc.cpp
@@ -0,0 +1,1263 @@
+// $Id: dlcalc.cpp,v 1.113 2012/06/30 01:32:41 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+#include <iostream>
+#include <fstream>
+
+#include "local_build.h"
+
+#include "dlcalc.h"
+#include "datapack.h"
+#include "locus.h"
+#include "dlmodel.h"
+#include "tree.h"
+#include "timelist.h"
+#include "region.h"
+#include "registry.h"
+#include "random.h"
+
+#ifdef DMALLOC_FUNC_CHECK
+#include "/usr/local/include/dmalloc.h"
+#endif
+
+using namespace std;
+
+//------------------------------------------------------------------------------------
+
+DLCalculator::DLCalculator(const Locus& locus)
+    : m_datamodel(*(locus.GetDataModel())),
+      m_markerpos(locus.GetMarkerLocations())
+{
+    // deliberately blank
+} // DLCalculator::DLCalculator
+
+//------------------------------------------------------------------------------------
+
+DLCalculator::DLCalculator(const DLCalculator& src)
+    : m_datamodel(src.m_datamodel),
+      m_markerpos(src.m_markerpos)
+{
+    // intentionally blank
+} // DLCalculator copy constructor
+
+//------------------------------------------------------------------------------------
+
+ChildInfo
+DLCalculator::GetChildInfo(Branch_ptr branch, long locus, long childindex,
+                           long cellindex, long posn, bool moving) const
+{
+    Branch_ptr child = branch->GetValidChild(branch->Child(childindex), posn);
+    double length = branch->HowFarTo(*child);
+    return ChildInfo(child->GetDLCell(locus, cellindex, moving), length);
+} // GetChildInfo
+
+//------------------------------------------------------------------------------------
+
+ChildInfo DLCalculator::NullChildInfo() const
+{
+    Cell_ptr nullcell(new NullCell);
+    double length = FLAGDOUBLE;
+    return ChildInfo(nullcell, length);
+} // NullChildInfo
+
+//------------------------------------------------------------------------------------
+
+rangepair DLCalculator::SitePairToMarkerPair(rangepair sites)
+{
+    long firstmrkr = FLAGLONG;
+    long lastmrkr = FLAGLONG;
+    long firstsite = sites.first;
+    long lastsite = sites.second;
+    long nmarkers = m_markerpos.size();
+
+    assert (firstsite < lastsite);
+
+    --lastsite;    // calling code assumes half-open intervals
+
+    // if we are at the extreme right or left, outside the range
+    // of markers, return immediately
+
+    if (lastsite < m_markerpos.front() || firstsite > m_markerpos.back())
+        return(rangepair(FLAGLONG, FLAGLONG));
+
+    // find first marker that is within site range, inclusive
+    long marker;
+    for (marker = 0; marker < nmarkers; ++marker)
+    {
+        if (m_markerpos[marker] > lastsite) // we've passed the end
+        {
+            return(rangepair(FLAGLONG,FLAGLONG));
+        }
+        if (m_markerpos[marker] >= firstsite) // found it
+        {
+            firstmrkr = marker;
+            break;
+        }
+    }
+
+    // Find first marker past the end of site range (which may be the
+    // non-existent marker after all markers; this is a half-open
+    // interval).
+    for (marker = nmarkers - 1; marker >= 0; --marker)
+    {
+        if (m_markerpos[marker] <= lastsite)
+        {
+            lastmrkr = marker + 1;                   // found it; we return the
+            // next marker for half-open
+            break;
+        }
+    }
+
+    assert(firstmrkr != FLAGLONG && lastmrkr != FLAGLONG);
+    assert(lastmrkr > firstmrkr);
+
+    return(rangepair(firstmrkr, lastmrkr));
+
+} // SitePairToMarkerPair
+
+//------------------------------------------------------------------------------------
+
+void DLCalculator::SimulateData(Tree& tree, Locus& locus)
+{
+    locus.ClearVariability();
+    rangepair span = locus.GetSiteSpan();
+
+    // Need vector instead of single rate for autocorrelation.
+    DoubleVec1d rates = m_datamodel.ChooseRandomRates(span.second - span.first);
+
+    LongVec1d markers = locus.GetMarkerLocations();
+    for (unsigned long marker=0; marker<markers.size(); marker++)
+    {
+        long site = markers[marker];
+        double rate = rates[site - span.first];
+
+        // reverse iterate through the tree simulating data upwards
+        TimeList& timelist = tree.GetTimeList();
+        for (Branchconstriter brrit = timelist.RBeginBranch(); brrit != timelist.REndBranch(); brrit++)
+        {
+            if ((*brrit)->Event() == btypeCoal ||
+                (*brrit)->Event() == btypeTip)
+            {
+                // try to find applicable parent
+                Branch_ptr pParent = (*brrit)->GetValidParent(site);
+
+                DoubleVec1d state;
+                if (pParent == Branch::NONBRANCH)
+                {
+                    // if no parent found:
+                    state = m_datamodel.ChooseAncestralState(marker);
+                }
+                else
+                {
+                    double length = pParent->HowFarTo(**brrit);
+                    state = m_datamodel.SimulateMarker(rate * length,
+                                                       marker,
+                                                       pParent->GetDLCell(locus.GetIndex(), markerCell, locus.IsMoving())->GetStateFor(marker, 0));
+                }
+                //Find where to save the information and then save it.
+                Cell_ptr childdl = (*brrit)->GetDLCell(locus.GetIndex(), markerCell, locus.IsMoving());
+                childdl->SetAllCategoriesTo(state, marker);
+                if ((*brrit)->Event() == btypeTip)
+                {
+                    //Keep track of the variability of the simulated data.
+                    locus.SaveState(state, marker, boost::dynamic_pointer_cast<TBranch>(*brrit)->m_label);
+                    //LS TEST
+                    //cerr << childdl->DLsToString(marker, marker) << endl;
+                }
+            }
+        }
+    }
+} // SimulateData
+
+//------------------------------------------------------------------------------------
+
+void DLCalculator::CopyDataFrom(Locus& destloc, Locus& origloc, Tree& tree)
+{
+    destloc.ClearVariability();
+    LongVec1d markers = destloc.GetMarkerLocations();
+
+    for (unsigned long marker=0; marker<markers.size(); marker++)
+    {
+        long site = markers[marker];
+        assert(origloc.SiteInLocus(site));
+
+        TimeList& timelist = tree.GetTimeList();
+        DoubleVec2d origstates;
+        DoubleVec2d deststates;
+        long n_destalleles = (*timelist.BeginBranch())->GetDLCell(destloc.GetIndex(), markerCell, destloc.IsMoving())->GetStateFor(marker, 0).size();
+        DoubleVec1d zeroes(n_destalleles, 0.0);
+        for (Branchconstiter brit = timelist.FirstTip(); brit != timelist.EndBranch(); brit = timelist.NextTip(brit))
+        {
+            origstates.push_back((*brit)->GetDLCell(origloc.GetIndex(), markerCell, origloc.IsMoving())->GetStateFor(origloc.SiteToMarker(site), 0));
+            deststates.push_back(zeroes); //Just to be sure.
+        }
+
+        //Now.  The trick is that the two loci may have different numbers
+        // of possible alleles.  So if the destination locus has an equal number
+        // or more alleles than the original, we'll simply overwrite the first X
+        // alleles with the new alleles, leaving any extra alleles at zero.
+        //
+        //When there are 'missing' alleles in the destination that are not
+        // in the original, we must punt, and assign multiple original alleles
+        // to the same destination allele.  We want to be able to preserve any
+        // diversity, however:  it's no good if the only two alleles the original
+        // visited are lumped together in the destination.  So.  We first check
+        // to see what the most frequent allele is in the original, and assign
+        // that to the first allele of the desination.  Then we assign the next
+        // most frequent allele to the second allele of the destination.  We keep
+        // doing this until we run out of destination alleles.  Then we add
+        // any remaining original alleles to the final destination allele.
+        //
+        //Whew!
+
+        //So, OK.  first:  Sum the originals to figure out who's the greatest.
+        DoubleVec1d origsums(origstates[0].size(), 0.0);
+        for (unsigned long i=0; i<origstates.size(); i++)
+        {
+            transform(origstates[i].begin(),
+                      origstates[i].end(),
+                      origsums.begin(),
+                      origsums.begin(),
+                      plus<double>());
+        }
+
+        //Now make a multimap so we can loop over it.
+        multimap<double, unsigned long> mindexes;
+        for (unsigned long i=0; i<origsums.size(); i++)
+        {
+            mindexes.insert(make_pair(origsums[i], i));
+        }
+        long destindex = 0;
+        for (multimap<double, unsigned long>::reverse_iterator mindex=mindexes.rbegin();
+             mindex != mindexes.rend(); mindex++)
+        {
+            long origindex = mindex->second;
+
+            //We now have the index of the original allele and of the destination
+            // allele, so we're good to go.
+            if (mindex->first != 0)
+            {
+                for (unsigned long tip=0; tip<origstates.size(); tip++)
+                {
+                    deststates[tip][destindex] += origstates[tip][origindex];
+                }
+            }
+
+            if (destindex < n_destalleles - 1)
+            {
+                destindex++;
+            }
+        }
+
+        //Now copy everything back into the dlcells of the destination
+        unsigned long tip=0;
+        for (Branchconstiter brit = timelist.FirstTip(); brit != timelist.EndBranch(); brit = timelist.NextTip(brit), tip++)
+        {
+            Cell_ptr childdl = (*brit)->GetDLCell(destloc.GetIndex(), markerCell, destloc.IsMoving());
+            //Copy the information
+            childdl->SetAllCategoriesTo(deststates[tip], marker);
+            destloc.SaveState(deststates[tip], marker, boost::dynamic_pointer_cast<TBranch>(*brit)->m_label);
+        }
+    }
+} // DLCalculator::CopyDataFrom
+
+//------------------------------------------------------------------------------------
+
+void DLCalculator::Randomize(Locus& destloc, rangeset rset, Tree& tree)
+{
+    cerr << "Deleted range: " << ToString(rset) << endl;
+
+    // Take all sites in rset and equilibrate their dlcells.
+    LongVec1d markers = destloc.GetMarkerLocations();
+
+    for (unsigned long marker=0; marker<markers.size(); marker++)
+    {
+        long site = markers[marker];
+        if (rset == AddPairToRange(make_pair(site, site+1), rset))
+        {
+            TimeList& timelist = tree.GetTimeList();
+            long n_destalleles = (*timelist.BeginBranch())->GetDLCell(destloc.GetIndex(), markerCell, destloc.IsMoving())->GetStateFor(marker, 0).size();
+            DoubleVec1d ones(n_destalleles, 1.0);
+            for (Branchconstiter brit = timelist.FirstTip(); brit != timelist.EndBranch(); brit = timelist.NextTip(brit))
+            {
+                Cell_ptr childdl = (*brit)->GetDLCell(destloc.GetIndex(), markerCell, destloc.IsMoving());
+                //Copy the information
+                childdl->SetAllCategoriesTo(ones, marker);
+            }
+        }
+    }
+} //DLCalculator::Randomize
+
+//------------------------------------------------------------------------------------
+
+void DLCalculator::MarkPanelBranches(Tree& tree, const Locus& locus)
+{
+    // Walk down the tree and mark each branch.
+    // The tips are marked at create time as 0 = panel, 1 = sample.
+    // Anything but a coalescence gets marked the same as its child.
+    // A coalescence counts all the children that have value > 0.
+    // Thus if both branches coming into a coalescence ultimate point to
+    // samples or coalescences with value > 0, the coalescence has a score of 2.
+    TimeList& timelist = tree.GetTimeList();
+    Branchiter brit;
+    Branch_ptr branch = Branch::NONBRANCH;
+    int node = 0;
+    for (brit = timelist.FirstBody(); brit != timelist.EndBranch();
+         brit = timelist.NextBody(brit))
+    {
+        // propagate down tree from tips - which were marked at creation time
+        branch = *brit;
+        node++;
+        // MDEBUG wasCoalCalced appears to be dead code--it is NEVER set true anywhere anymore
+        branch->m_wasCoalCalced = false;
+        branch->m_isSample = 0;
+        // currently there are always 2 children, but this allows for more
+        for (long nc=0; nc<branch->NChildren(); nc++)
+        {
+            if (branch->Child(nc)!= Branch::NONBRANCH)
+            {
+                if (branch->Child(nc)->Event()== btypeCoal)
+                {
+                    if (branch->Child(nc)->m_isSample > 1)
+                    {
+                        branch->m_isSample++;
+                    }
+                }
+                else
+                {
+                    if (branch->Child(nc)->m_isSample > 0)
+                    {
+                        branch->m_isSample++;
+                    }
+                }
+            }
+        }
+    }
+} // DLCalculator::MarkPanelBranches
+
+//------------------------------------------------------------------------------------
+
+NucCalculator::NucCalculator(const Locus& locus)
+    : DLCalculator(locus)
+{
+    // deliberately blank
+} // NucCalculator ctor
+
+//------------------------------------------------------------------------------------
+
+NucCalculator::NucCalculator(const NucCalculator& src)
+    : DLCalculator(src)
+{
+    // deliberately blank
+} // NucCalculator copy constructor
+
+//------------------------------------------------------------------------------------
+
+LongVec1d NucCalculator::CalculateAliasesFromData(const vector<DoubleVec2d>& data) const
+{
+    // compute aliases, starting at position 1 because 0 can never be aliased.
+    // If an alias partner is not found, the marker's alias will remain at the
+    // -1 (no alias) with which it was initialized
+    long nmarkers = data.size();
+    LongVec1d alias(nmarkers, -1L);
+#ifndef LAMARC_QA_NOALIAS
+    long partner, marker;
+    for (marker = 1; marker < nmarkers; ++marker)
+    {
+        for (partner = marker - 1; partner >= 0; --partner)
+        {
+            if (data[marker] == data[partner])
+            {
+                // found an alias
+                alias[marker] = partner;
+                break;
+            }
+        }
+    }
+#endif
+
+    return alias;
+} // CalculateAliasesFromData
+
+//------------------------------------------------------------------------------------
+
+LongVec1d NucCalculator::RecalculateAliases(const Tree& tree, const Locus& locus) const
+{
+    const TimeList& timelist = tree.GetTimeList();
+    long cat = 0; // we alias based on first category
+
+    long ntips = timelist.GetNTips();
+    long nmarkers = locus.GetNmarkers();
+    DoubleVec1d basepattern(4,0.0);
+    DoubleVec2d tippattern(ntips, basepattern);
+    vector<DoubleVec2d> markerpattern(nmarkers, tippattern);
+
+    Branchconstiter brit = timelist.FirstTip();
+    long tip, marker, base;
+    for (tip = 0; brit != timelist.EndBranch(); brit = timelist.NextTip(brit), ++tip)
+    {
+        Cell_ptr dlcell = (*brit)->GetDLCell(locus.GetIndex(), markerCell, locus.IsMoving());
+        //Note:  We never worry about aliasing for moving loci.
+        for (marker = 0; marker < nmarkers; ++marker)
+        {
+            SiteDL siteDLs = dlcell->GetSiteDLs(marker);
+            for (base = 0; base < 4; ++base)
+            {
+                markerpattern[marker][tip][base] = siteDLs[cat][base];
+            }
+        }
+    }
+
+    assert(tip == ntips);
+    return CalculateAliasesFromData(markerpattern);
+
+} // RecalculateAliases
+
+//------------------------------------------------------------------------------------
+
+LongVec1d NucCalculator::SetupAliases(const Locus& locus) const
+{
+    long nmarkers = locus.GetNmarkers();
+
+    long cat = 0; // we alias based on the first category
+
+    long ntips = locus.GetNTips();
+    const vector<LocusCell>& tipcells = locus.GetTipCells();
+    DoubleVec1d basepattern(4,0.0);
+    DoubleVec2d tippattern(ntips, basepattern);
+    vector<DoubleVec2d> markerpattern(nmarkers, tippattern);
+
+    // dump the data into a usable form
+    long marker, tip, base;
+    for (tip = 0; tip < ntips; ++tip)
+    {
+        Cell_ptr dlcell = tipcells[tip][0]; // the primary Cell
+        for (marker = 0; marker < nmarkers; ++marker)
+        {
+            SiteDL siteDLs = dlcell->GetSiteDLs(marker);
+            for (base = 0; base < 4; ++base)
+            {
+                markerpattern[marker][tip][base] = siteDLs[cat][base];
+            }
+        }
+    }
+
+    return CalculateAliasesFromData(markerpattern);
+
+} // SetupAliases
+
+//------------------------------------------------------------------------------------
+
+void NucCalculator::CalculateSite(Cell_ptr child1, Cell_ptr child2, Cell_ptr newcell, long pos, long alias)
+{
+    SiteDL newsiteDLs;
+
+    SiteDL child1DLs = child1->GetSiteDLs(pos);
+    SiteDL child2DLs = child2->GetSiteDLs(pos);
+
+    // if the alias is invalid, calculate; else use the alias
+    if (alias == -1)
+    {
+        newsiteDLs = dynamic_cast<NucModel&>(m_datamodel).
+            ComputeSiteDLs(child1DLs,child2DLs);
+        if (m_datamodel.ShouldNormalize())
+        {
+            double newnorm = newcell->Normalize(newsiteDLs) +
+                child1->GetNorms(pos) + child2->GetNorms(pos);
+            newcell->SetNorms(newnorm, pos);
+        }
+    }
+    else
+    {
+        // cerr << " " << pos;
+        newsiteDLs = newcell->GetSiteDLs(alias);
+        newcell->SetNorms(newcell->GetNorms(alias),pos);
+    }
+
+    newcell->SetSiteDLs(pos,newsiteDLs);
+
+} // NucCalculator::CalculateSite
+
+//------------------------------------------------------------------------------------
+
+void NucCalculator::Breakalias(LongVec1d& aliases, const rangevector& subtrees)
+{
+    long tr;
+    long nsubtrees(subtrees.size());
+
+    // skip the first subtree because it can't break any aliases;
+    // for all other subtrees, break all aliases that cross the
+    // subtree boundary
+    for (tr = 1; tr < nsubtrees; ++tr)
+    {
+        rangepair marker = SitePairToMarkerPair(subtrees[tr]);
+        long pos;
+        for (pos = marker.first; pos < marker.second; ++pos)
+        {
+            assert(static_cast<unsigned long>(pos) < aliases.size());
+            if (aliases[pos] < marker.first) aliases[pos] = -1;
+        }
+    }
+
+} // NucCalculator::Breakalias
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+DLCalculator* DNACalculator::Clone() const
+{
+    DNACalculator* pnewcalc = new DNACalculator(*this);
+
+    return(pnewcalc);
+
+} // DNACalculator::Clone
+
+//------------------------------------------------------------------------------------
+
+double DNACalculator::Calculate(Tree& tree, const Locus& locus, bool moving)
+{
+    assert(tree.IsValidTree());
+
+    Branch_ptr branch = Branch::NONBRANCH;
+    Cell_ptr dlcell;
+    double totallike = 0.0;
+    long nsubtrees, tr;
+    NucModel& datmodel = dynamic_cast<NucModel&>(m_datamodel);
+    ChildInfo child0, child1;
+    long curmarker;
+    long loc = locus.GetIndex();
+    LongVec1d aliases = tree.GetAliases(locus.GetIndex());
+
+    // Find the subtrees
+    rangepair span = locus.GetSiteSpan();
+    rangevector subtrees = tree.GetLocusSubtrees(span);
+    nsubtrees = subtrees.size();
+
+    // Update the aliases.
+    Breakalias(aliases, subtrees);
+
+    // Initialize the model categories storage
+    datmodel.ResetCatCells();
+
+    // Step through the subtrees to compute the data likelhoods
+    TimeList& timelist = tree.GetTimeList();
+    for (tr = 0; tr < nsubtrees; tr++)
+    {
+        rangepair marker = SitePairToMarkerPair(subtrees[tr]);
+        // bail out if there are no markers in this subtree
+        if (marker.first == FLAGLONG) continue;
+
+        long firstsite = subtrees[tr].first;
+
+        Branchiter brit;
+        Branch_ptr last_active_coal_branch = Branch::NONBRANCH;
+        for (brit = timelist.FirstCoal(); brit != timelist.EndBranch();
+             brit = timelist.NextCoal(brit))
+        {
+            branch = *brit;
+            if (branch->CanCalcDL(firstsite))
+            {
+                // we want the last such branch saved after fall-through, since
+                // it will be the correct place to calculate the likelihood
+                last_active_coal_branch = branch;
+
+                if (branch->ShouldCalcDL(firstsite))
+                {
+                    // Find "real" children and appropriate branch lengths.
+                    child0 = GetChildInfo(branch, loc, 0, markerCell, firstsite, moving);
+                    child1 = GetChildInfo(branch, loc, 1, markerCell, firstsite, moving);
+
+                    // Precalculate exponential terms
+                    datmodel.RescaleLengths(child0.m_length, child1.m_length);
+
+                    // Calculate the data likelihood at each position.
+                    dlcell = branch->GetDLCell(loc, markerCell, moving);
+                    for(curmarker = marker.first; curmarker < marker.second;++curmarker)
+                    {
+                        CalculateSite(child0.m_cell, child1.m_cell, dlcell, curmarker,aliases[curmarker]);
+                    }
+                }
+            }
+        }
+
+        assert(last_active_coal_branch != Branch::NONBRANCH);
+        // We want to calculate likelihood at last_active_coal_branch,
+        // so we set the dlcell to point there--whether it does
+        // already or not.  This makes it legal to compute the
+        // data likelihood of a tree with no flags set, which is
+        // useless but hard to prevent and thus needs to work.
+        dlcell = last_active_coal_branch->GetDLCell(loc, markerCell, moving);
+
+        totallike += datmodel.ComputeSubtreeDL(*dlcell,
+                                               dlcell->GetSiteDLs(marker.first),
+                                               dlcell->GetSiteDLs(marker.second),
+                                               marker.first);
+    }
+
+    return totallike;
+} // DNACalculator::Calculate
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+SNPCalculator::SNPCalculator(const Locus& locus)
+    : NucCalculator(locus),
+      m_invarmodel(dynamic_cast<NucModel*>(m_datamodel.Clone()))
+{
+    assert(m_invarmodel);
+    m_invarmodel->SetNmarkers(INVARIANTS+1); // the invariant model should have
+    // 1 "marker" for each base-type plus one extra for the conjoint number
+
+} // SNPCalculator::SNPCalculator
+
+//------------------------------------------------------------------------------------
+
+SNPCalculator::SNPCalculator(const SNPCalculator& src)
+    : NucCalculator(src),
+      m_invarmodel(dynamic_cast<NucModel*>(src.m_invarmodel->Clone()))
+{
+    assert(m_invarmodel);
+} // SNPCalculator::copy ctor
+
+//------------------------------------------------------------------------------------
+
+SNPCalculator::~SNPCalculator()
+{
+    delete m_invarmodel;
+} // SNPCalculator::dtor
+
+//------------------------------------------------------------------------------------
+
+DLCalculator* SNPCalculator::Clone() const
+{
+    SNPCalculator* pnewcalc = new SNPCalculator(*this);
+    return(pnewcalc);
+} // SNPCalculator::Clone
+
+//------------------------------------------------------------------------------------
+
+void SNPCalculator::CalculateInvarSite(Cell_ptr child1, Cell_ptr child2, Cell_ptr newcell, long pos)
+{
+    SiteDL child1DLs = child1->GetSiteDLs(pos);
+    SiteDL child2DLs = child2->GetSiteDLs(pos);
+
+    // no alias for invariant sites
+
+    SiteDL newsiteDLs = m_invarmodel->ComputeSiteDLs(child1DLs,child2DLs);
+
+    if (m_invarmodel->ShouldNormalize())
+    {
+        double norm1 = child1->GetNorms(pos);
+        double norm2 = child2->GetNorms(pos);
+        double newnorm = newcell->Normalize(newsiteDLs) + norm1 + norm2;
+        newcell->SetNorms(newnorm,pos);
+    }
+
+    newcell->SetSiteDLs(pos,newsiteDLs);
+
+} // SNPCalculator::CalculateInvarSite
+
+//------------------------------------------------------------------------------------
+
+// EWFIX -- add comment and/or change name of "long i" to be more clear
+void SNPCalculator::MaskPanelTips(TimeList& timeList, long loc, long first, long last, long i)
+{
+    // determine the mask
+    DoubleVec1d mask(INVARIANTS,0.0);
+    if (i<INVARIANTS)
+    {
+        mask = m_invarmodel->DataToLikes(SINGLEBASES[i]);
+
+    }
+    else
+    {
+        // unknown so all equally probable
+        mask[0] = 1.0;
+        mask[1] = 1.0;
+        mask[2] = 1.0;
+        mask[3] = 1.0;
+    }
+
+    // apply the mask to all panel tips
+    Branchconstiter brit;
+    Branch_ptr branch;
+    for (brit = timeList.FirstTip(); brit != timeList.EndBranch(); brit = timeList.NextTip(brit))
+    {
+        branch = *brit;
+        if (branch->m_isSample == 0)
+        {
+            // found a panel tip, set its mask
+            Cell_ptr dlcell = branch->GetDLCell(loc, markerCell, false);
+            long curmarker;
+            for (curmarker = first; curmarker < last; ++curmarker)
+            {
+                dlcell->SetAllCategoriesTo(mask, curmarker);
+            }
+        }
+    }
+
+}  // MaskPanelTips
+
+//------------------------------------------------------------------------------------
+
+// A horrible truth to remember about this code:  the storage used
+// for invariant site calculations is *re-used*.  You can never assume
+// that that information is still around; the next subtree will wipe
+// it out.  This is why we recalculate invariant sites throughout the
+// whole tree every time, even though, in theory, this is not necessary.
+// There is a possible but very difficult OPTIMIZATION opportunity here.
+// Mary June 11 2002
+//
+// Preconditions (non-panel):
+//    Sample tips have correct markercells and invarcells
+//    Tree is correctly marked with updateDL flags
+// Postconditions (non-panel):
+//    Sample tips have correct markercells and invarcells
+//    markercells (ONLY!) contain correct likelihood values in all
+//       branches where the site was live and not FC
+//    INVARCELLS DO NOT CONTAIN VALID LIKELIHOODS EXCEPT AT TIPS
+//    Value returned is correct data likelihood
+//    UpdateDL status has not changed
+//
+// Preconditions (panel):
+//    Sample tips have correct markercells and invarcells
+//    Panel tips have correct invarcells BUT NOT MARKERCELLS
+// Postconditions (panel):
+//    Sample tips have correct markercells and invarcells
+//    Panel tips have correct invarcells BUT NOT MARKERCELLS
+//    NO CELLS CONTAIN VALID LIKELIHOODS EXCEPT AT TIPS
+//    Value returned is correct data likelihood
+//    All branches are marked as needing updating
+
+double SNPCalculator::Calculate(Tree& tree, const Locus& locus, bool moving)
+{
+    if (moving == true)  // This is not an appropriate Calculator for a floating locus.
+    {
+        assert(false);
+        throw implementation_error ("Floating loci may not use the SNP model.");
+    }
+
+    // Determine subtrees
+    double totallike = 0.0;
+    rangepair span = locus.GetSiteSpan();
+    rangevector subtrees = tree.GetLocusSubtrees(span);
+    long nsubtrees = subtrees.size();
+    long loc = locus.GetIndex();
+
+    // Update the aliases.
+    LongVec1d aliases = tree.GetAliases(locus.GetIndex());
+    Breakalias(aliases, subtrees);
+
+    // Initialize the model categories storage
+    // we reference m_datamodel with a local variable since we need and know it to
+    // be NucModel as this is SNP-only code. m_invarmodel doesn't need it 'cause
+    // it is ours alone
+    NucModel& varmodel = dynamic_cast<NucModel&>(m_datamodel);
+    varmodel.ResetCatCells();
+    m_invarmodel->ResetCatCells();
+
+    // check if there are panels
+    if(!tree.GetSnpPanelFlag())
+    {
+        // no-panel case
+        for (long ntree = 0; ntree < nsubtrees; ++ntree)
+        {
+            // loop over the subtrees
+            totallike += CalcNoPanel(tree, loc, subtrees[ntree], aliases);
+        }
+    }
+    else
+    {
+        // panel case
+        long curmarker;
+        TimeList& timeList = tree.GetTimeList();
+        Branch_ptr root = timeList.Root();
+        Cell_ptr rootcell_m = root->GetDLCell(loc, markerCell, false);
+        Cell_ptr rootcell_i = root->GetDLCell(loc, invariantCell, false);
+
+        // Temporary DLCells for scratch use
+        Cell_ptr basecell(rootcell_m->Clone());  // sample markers only
+        Cell_ptr sumcell(rootcell_m->Clone());   // sum of sample+panel markers
+        Cell_ptr invarcell(rootcell_i->Clone()); // panel invariant sites only
+
+        // mark the coalescences  // MDEBUG dead code?
+        MarkPanelBranches(tree, locus);
+
+        // Regrettably, we have to recalculate every node every time, so the updateDL
+        // information is set accordingly here.
+        // Calculating a node does NOT unset its updateDL flag (for subtree reasons) so
+        // once these are set they stay set until data likelihood calculation is complete.
+        //
+        // We should cache these for benefit of further loci -- MDEBUG
+        timeList.SetAllUpdateDLs();
+
+        // loop over the subtrees
+        for (long ntree = 0; ntree < nsubtrees; ++ntree)
+        {
+            // clear working storage
+            sumcell->EmptyCell();
+
+            rangepair marker = SitePairToMarkerPair(subtrees[ntree]);
+            DoubleVec2d markerstates;
+
+            // work on the variable sites, if they exist
+            if (marker.first != FLAGLONG)
+            {
+                // sum up the single base panel corrections
+                for (int i=0; i<INVARIANTS; i++)
+                {
+                    MaskPanelTips(timeList, loc, marker.first, marker.second, i);
+                    Branch_ptr topBranch = CalcPanel(tree, loc, subtrees[ntree], aliases);
+                    sumcell->AddTo(topBranch->GetDLCell(loc, markerCell, false));
+                }
+
+                // subtract the summed single base panel corrections from the unknown panel values
+                MaskPanelTips(timeList, loc, marker.first, marker.second, INVARIANTS);
+                Branch_ptr topBranch = CalcPanel(tree, loc, subtrees[ntree], aliases);
+                basecell->CopyInitialize(*(topBranch->GetDLCell(loc, markerCell, false)));
+                basecell->SubtractFrom(sumcell);
+            }
+
+            // now calculate unobserved sites invariant correction,
+            // (masks set when tips created in timelist.cpp)
+            Branch_ptr topBranch = CalcPanelInvariants(tree, loc, subtrees[ntree]);
+            invarcell->CopyInitialize(*(topBranch->GetDLCell(loc, invariantCell, false)));
+
+            totallike += SumPanel(basecell, subtrees[ntree], invarcell);
+        }
+    }
+
+    return totallike;
+
+} // SNPCalculator::Calculate
+
+//------------------------------------------------------------------------------------
+
+double SNPCalculator::CalcNoPanel(Tree& tree, long loc, pair<long,long> sitespan, LongVec1d aliases)
+{
+    Branch_ptr branch = Branch::NONBRANCH;
+    Branch_ptr child1 = Branch::NONBRANCH;
+    Branch_ptr child2 = Branch::NONBRANCH;
+
+    // set up some necessary values
+    NucModel& varmodel = dynamic_cast<NucModel&>(m_datamodel);
+    TimeList& timelist = tree.GetTimeList();
+
+    // no markers in this subtree is handled by the rangepair,
+    // marker, having the same value in both first and second.
+    // Currently that value is FLAGLONG.
+    rangepair marker = SitePairToMarkerPair(sitespan);
+    long firstsite = sitespan.first;
+
+    bool snpsPresent = (marker.first != FLAGLONG);
+
+    Cell_ptr dlcell;
+    Cell_ptr dlcell_invar;
+    Cell_ptr dlcell1;
+    Cell_ptr dlcell2;
+    double length1 = 0.0;
+    double length2 = 0.0;
+
+    // Data likelihood for ONE subtree
+
+    Branchiter brit;
+    for (brit = timelist.FirstCoal(); brit != timelist.EndBranch();
+         brit = timelist.NextCoal(brit))
+    {
+        branch = *brit;
+        if (branch->CanCalcDL(firstsite))
+        {
+            // obtain the children of this coalescence and their branch lengths
+            child1 = branch->GetValidChild(branch->Child(0),firstsite);
+            length1 = branch->HowFarTo(*child1);
+            child2 = branch->GetValidChild(branch->Child(1),firstsite);
+            length2 = branch->HowFarTo(*child2);
+
+            // note the marker DLCell of this coalescence--needed later even if not updated now
+            dlcell  = branch->GetDLCell(loc, markerCell, false);
+
+            // Calculate the data likelihood at each position for variable sites,
+            // if appropriate; we skip this if the branch is marked "doesn't need updating"
+            if (branch->ShouldCalcDL(firstsite))
+            {
+                dlcell1 = child1->GetDLCell(loc, markerCell, false);
+                dlcell2 = child2->GetDLCell(loc, markerCell, false);
+                varmodel.RescaleLengths(length1, length2);
+                long curmarker;
+                for (curmarker = marker.first; curmarker < marker.second; ++curmarker)
+                {
+                    CalculateSite(dlcell1,dlcell2,dlcell,curmarker,aliases[curmarker]);
+                }
+            }
+
+            // Calculate the invariant data likelihood for this branch,
+            // using baseA for allAs, baseC for allCs, etc.  Must be done even if "doesn't need updating"
+            dlcell_invar  = branch->GetDLCell(loc, invariantCell, false);
+            dlcell1 = child1->GetDLCell(loc, invariantCell, false);
+            dlcell2 = child2->GetDLCell(loc, invariantCell, false);
+            m_invarmodel->RescaleLengths(length1, length2);
+            long curbase;
+            for (curbase = baseA; curbase <= baseT; ++curbase)
+            {
+                CalculateInvarSite(dlcell1,dlcell2,dlcell_invar,curbase);
+            }
+        }
+    }
+    // Code above sets dlcell and dlcell_invar only in a two-legged coalescence.  Therefore,
+    // the last time it sets them, it is at the subtree root.  The last setting of dlcell
+    // and dlcell_invar thus contains the subtree root values and will be used for summing.
+
+    return SumNoPanel(dlcell, dlcell_invar, sitespan);
+} //SNPCalculator::CalcNoPanel
+
+//------------------------------------------------------------------------------------
+
+double SNPCalculator::SumNoPanel(Cell_ptr varcell, Cell_ptr invarcell, pair<long,long> sitespan)
+{
+    double totallike = 0.0;
+    NucModel& varmodel = dynamic_cast<NucModel&>(m_datamodel);
+
+    rangepair marker = SitePairToMarkerPair(sitespan);
+    // if the subtree contains no markers skip computing marker datalike
+    if (marker.first != FLAGLONG)
+    {
+        totallike += varmodel.ComputeSubtreeDL(*varcell,
+                                               varcell->GetSiteDLs(marker.first),
+                                               varcell->GetSiteDLs(marker.second),
+                                               marker.first);
+    }
+
+    // Calculate the invariant marker likelihood for the subtree,
+    // using baseA for allAs, baseC for allCs, etc.
+    // number invariant sites = total sites - markers
+    long ninvarsites = sitespan.second - sitespan.first;
+    if (marker.first != FLAGLONG)
+        ninvarsites -= marker.second - marker.first;
+
+    if (ninvarsites > 0)
+    {
+        // We are going to use a trick here, and add together the four
+        // invariant site likelihoods into the next (fifth) bin.
+        // This is ugly, but necessary to keep
+        // SNP-specific code out of the DataModel class.  The DataModel
+        // needs to treat the four invariant markers as one big
+        // supermarker, and cannot determine this itself; so we help it
+        // out.
+
+        invarcell->SumMarkers(baseA, baseEnd, m_invarmodel->ShouldNormalize());
+
+        //We multiply the log likelihood by the number of invariant markers
+        // because this is equivalent to raising the likelihood to the power
+        // of the number of invariants.
+        totallike += ninvarsites * m_invarmodel->
+            ComputeSubtreeDL(*invarcell,
+                             invarcell->GetSiteDLs(baseEnd),
+                             invarcell->GetSiteDLs(baseEnd+1),    // one past the end
+                             baseEnd);
+    }
+
+    return totallike;
+} // SNPCalculator::SumNoPanel
+
+//------------------------------------------------------------------------------------
+
+Branch_ptr SNPCalculator::CalcPanel(Tree& tree, long loc, pair<long,long> sitespan, LongVec1d aliases)
+{
+    Branch_ptr branch = Branch::NONBRANCH;
+    Branch_ptr child1 = Branch::NONBRANCH;
+    Branch_ptr child2 = Branch::NONBRANCH;
+
+    // set up some necessary values
+    NucModel& varmodel = dynamic_cast<NucModel&>(m_datamodel);
+    TimeList& timelist = tree.GetTimeList();
+
+    // no markers in this subtree is handled by the rangepair,
+    // marker, having the same value in both first and second.
+    // Currently that value is FLAGLONG.
+    rangepair marker = SitePairToMarkerPair(sitespan);
+    long firstsite = sitespan.first;
+
+    Cell_ptr dlcell;
+    Cell_ptr dlcell1;
+    Cell_ptr dlcell2;
+    double length1 = 0.0;
+    double length2 = 0.0;
+    long curmarker;
+
+    // Compute marker likelihood for one subtree
+    Branch_ptr last_calc_branch = Branch::NONBRANCH;
+    Branchiter brit;
+    for (brit = timelist.FirstCoal(); brit != timelist.EndBranch();
+         brit = timelist.NextCoal(brit))
+    {
+        branch = *brit;
+        if (branch->CanCalcDL(firstsite))
+        {
+            // store which node we did; last one is subtree root and is returned
+            last_calc_branch = branch;
+            dlcell  = branch->GetDLCell(loc, markerCell, false);
+
+            // obtain the children of this coalescence and their branch lengths
+            child1 = branch->GetValidChild(branch->Child(0),firstsite);
+            length1 = branch->HowFarTo(*child1);
+            dlcell1 = child1->GetDLCell(loc, markerCell, false);
+            child2 = branch->GetValidChild(branch->Child(1),firstsite);
+            length2 = branch->HowFarTo(*child2);
+            dlcell2 = child2->GetDLCell(loc, markerCell, false);
+
+            // Precalculate the exponential values based on branch length.
+            varmodel.RescaleLengths(length1, length2);
+            for (curmarker = marker.first; curmarker < marker.second; ++curmarker)
+            {
+                CalculateSite(dlcell1,dlcell2,dlcell,curmarker,aliases[curmarker]);
+            }
+        }
+    }
+
+    return last_calc_branch;
+} // SNPCalculator::CalcPanel
+//------------------------------------------------------------------------------------
+
+Branch_ptr SNPCalculator::CalcPanelInvariants(Tree& tree, long loc, pair<long,long> sitespan)
+{
+    Branch_ptr branch = Branch::NONBRANCH;
+    Branch_ptr child1 = Branch::NONBRANCH;
+    Branch_ptr child2 = Branch::NONBRANCH;
+
+    // set up some necessary values
+    TimeList& timelist = tree.GetTimeList();
+
+    // no markers in this subtree is handled by the rangepair,
+    // marker, having the same value in both first and second.
+    // Currently that value is FLAGLONG.
+    rangepair marker = SitePairToMarkerPair(sitespan);
+    long firstsite = sitespan.first;
+
+    Cell_ptr dlcell;
+    Cell_ptr dlcell1;
+    Cell_ptr dlcell2;
+    double length1 = 0.0;
+    double length2 = 0.0;
+    long curbase;
+
+    // Compute marker likelihood for one subtree
+    Branch_ptr last_calc_branch = Branch::NONBRANCH;
+    Branchiter brit;
+    for (brit = timelist.FirstCoal(); brit != timelist.EndBranch();
+         brit = timelist.NextCoal(brit))
+    {
+        branch = *brit;
+        if (branch->CanCalcDL(firstsite))
+        {
+            // this is how we remember the final coalescence point
+            // for this subtree
+            last_calc_branch = branch;
+
+            // obtain the children of this coalescence and their branch lengths
+            child1 = branch->GetValidChild(branch->Child(0),firstsite);
+            length1 = branch->HowFarTo(*child1);
+            child2 = branch->GetValidChild(branch->Child(1),firstsite);
+            length2 = branch->HowFarTo(*child2);
+            dlcell1 = child1->GetDLCell(loc, invariantCell, false);
+            dlcell2 = child2->GetDLCell(loc, invariantCell, false);
+            dlcell  = branch->GetDLCell(loc, invariantCell, false);
+            m_invarmodel->RescaleLengths(length1, length2);
+            for (curbase = 0; curbase < INVARIANTS; ++curbase)
+            {
+                CalculateInvarSite(dlcell1,dlcell2,dlcell,curbase);
+            }
+        }
+    }
+    return last_calc_branch;
+}
+
+//------------------------------------------------------------------------------------
+
+double SNPCalculator::SumPanel(Cell_ptr varcell, pair<long,long> sitespan, Cell_ptr invarcell)
+{
+    double totallike = 0.0;
+    NucModel& varmodel = dynamic_cast<NucModel&>(m_datamodel);
+    rangepair marker = SitePairToMarkerPair(sitespan);
+
+    // if the subtree contains no markers skip computing marker datalike
+    if (marker.first != FLAGLONG)
+    {
+        // calculate variant panel likelihood for this subtree
+        totallike = varmodel.ComputeSubtreeDL(*varcell,
+                                              varcell->GetSiteDLs(marker.first),
+                                              varcell->GetSiteDLs(marker.second),
+                                              marker.first);
+
+
+    }
+
+    // Calculate the invariant marker likelihood for the subtree,
+    // using baseA for allAs, baseC for allCs, etc.
+    long ninvarmarkers = sitespan.second - sitespan.first;
+    if (marker.first != FLAGLONG)
+        ninvarmarkers -= marker.second - marker.first;
+
+    // don't bother if there aren't any
+    if (ninvarmarkers > 0)
+    {
+        double invarlike = 0;
+        // We are going to use a trick here, and add together the four
+        // invariant site likelihoods into the bin normally occupied
+        // by the first one.  This is ugly, but necessary to keep
+        // SNP-specific code out of the DataModel class.  The DataModel
+        // needs to treat the four invariant markers as one big
+        // supermarker, and cannot determine this itself; so we help it
+        // out.
+
+        // Changed to sum to "last" bin
+
+        invarcell->SumMarkers(baseA, baseEnd, m_invarmodel->ShouldNormalize());
+
+        // We multiply the log likelihood by the number of invariant markers
+        // because this is equivalent to raising the likelihood to the power
+        // of the number of invariants.
+        invarlike = m_invarmodel->ComputeSubtreeDL(*invarcell,
+                                                   invarcell->GetSiteDLs(baseEnd),
+                                                   invarcell->GetSiteDLs(baseEnd+1),    // one past the end
+                                                   baseEnd);
+
+        invarlike *= ninvarmarkers;
+        totallike += invarlike;
+    }
+
+    return totallike;
+} // SNPCalculator::SumPanel
+
+//------------------------------------------------------------------------------------
+
+// A horrible truth to remember about this code:  the storage used
+// for invariant site calculations is *re-used*.  You can never assume
+// that that information is still around; the next subtree will wipe
+// it out.  This is why we recalculate invariant sites throughout the
+// whole tree every time, even though, in theory, this is not necessary.
+// There is a possible but very difficult OPTIMIZATION opportunity here.
+// Mary June 11 2002
+
+//------------------------------------------------------------------------------------
+
+void SNPCalculator::SimulateData(Tree& tree, Locus& locus)
+{
+    assert(false);
+    //The default data simulator probably doesn't work with SNPs.
+
+} // SimulateData
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+AlleleCalculator::AlleleCalculator(const Locus& locus)
+    : DLCalculator(locus)
+{
+    // deliberately blank
+} // AlleleCalculator::AlleleCalculator
+
+//------------------------------------------------------------------------------------
+
+AlleleCalculator::AlleleCalculator(const AlleleCalculator& src)
+    : DLCalculator(src)
+{
+    // deliberately blank
+} // AlleleCalculator::copy ctor, for internal use only
+
+//------------------------------------------------------------------------------------
+
+// Similar to DNA case except:
+// no aliasing (identical microsats are too rare)
+// uses a unique approach to non-marker sites (different from SNPs)
+
+double AlleleCalculator::Calculate(Tree& tree, const Locus& locus, bool moving)
+{
+    Branch_ptr branch = Branch::NONBRANCH;
+    Cell_ptr dlcell;
+    double totallike = 0.0;
+    long posn1, posn2, firstsite, nsubtrees, tr, pos;
+    AlleleModel& datmodel = dynamic_cast<AlleleModel&>(m_datamodel);
+    DoubleVec1d scaled0, scaled1;
+    ChildInfo child0, child1;
+    if (moving)
+    {
+        //We need to recalculate the marker positions
+        m_markerpos = locus.GetMarkerLocations();
+        //LS DEBUG MAPPING:  we might need a similar thing for DNACalc:Calc,
+        // but only if/when we allow mapping something with a DNA model (we
+        // currently are restricted to K-Allele models).
+    }
+
+    long loc = locus.GetIndex();
+
+    // Initialize the model categories storage
+    datmodel.ResetCatCells();
+
+    rangepair span = locus.GetSiteSpan();
+    rangevector subtrees = tree.GetLocusSubtrees(span);
+    nsubtrees = subtrees.size();
+
+    // Step through the subtrees to compute the data likelhoods
+    TimeList& timelist = tree.GetTimeList();
+    for (tr = 0; tr < nsubtrees; tr++)
+    {
+        rangepair markers = SitePairToMarkerPair(subtrees[tr]);
+        if (markers.first == FLAGLONG) continue;  // no markers in this subtree
+        posn1 = markers.first;
+        posn2 = markers.second;
+        firstsite = subtrees[tr].first;
+
+        Branchiter brit;
+        Branch_ptr last_calc_branch = Branch::NONBRANCH;
+        for (brit = timelist.FirstCoal(); brit != timelist.EndBranch();
+             brit = timelist.NextCoal(brit))
+        {
+            branch = *brit;
+            if (branch->CanCalcDL(firstsite))
+            {
+                last_calc_branch = branch;
+
+                if (branch->ShouldCalcDL(firstsite))
+                {
+                    dlcell = branch->GetDLCell(loc, markerCell, moving);
+                    // Find "real" children and appropriate branch lengths.
+                    child0 = GetChildInfo(branch, loc, 0, markerCell, firstsite, moving);
+                    child1 = GetChildInfo(branch, loc, 1, markerCell, firstsite, moving);
+
+                    // Precalculate branchlength terms
+                    scaled0 = datmodel.RescaleLength(child0.m_length);
+                    scaled1 = datmodel.RescaleLength(child1.m_length);
+
+                    // Calculate the data likelihood at each position.
+                    for(pos = posn1; pos < posn2; ++pos)
+                    {
+                        datmodel.ComputeSiteDLs(child0.m_cell, child1.m_cell, dlcell, scaled0, scaled1, pos);
+                    }
+                }
+            }
+        }
+
+        // we want to calculate likelihood at last_calc_branch so set dlcell accordingly
+        dlcell = last_calc_branch->GetDLCell(loc, markerCell, moving);
+
+        totallike += datmodel.ComputeSubtreeDLs(*dlcell, dlcell->GetSiteDLs(posn1),
+                                                dlcell->GetSiteDLs(posn2), posn1);
+    }
+
+    // oddly enough, the microsat likelihood can be 1, and sometimes is, so
+    // this assert is inappropriate.
+    // assert(totallike != 0);  // this would be a likelihood=1, which is *too* likely
+
+    return totallike;
+
+} // AlleleCalculator::Calculate
+
+//____________________________________________________________________________________
diff --git a/src/datalike/dlcalc.h b/src/datalike/dlcalc.h
new file mode 100644
index 0000000..1e66bb3
--- /dev/null
+++ b/src/datalike/dlcalc.h
@@ -0,0 +1,213 @@
+// $Id: dlcalc.h,v 1.44 2011/10/11 16:42:16 mkkuhner Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef DLCALCULATOR_H
+#define DLCALCULATOR_H
+
+#include <cmath>
+#include <vector>
+
+#include "vectorx.h"
+#include "constants.h"
+#include "rangex.h"
+#include "dlcell.h"
+#include "branch.h"
+
+/*********************************************************************
+ DLCalculator manages the calculation of data likelihoods on
+ a given locus specified at constructor time.  Call
+ the member function Calculate() to perform the actual calculation.
+ Calculate() makes use of DataModel member functions
+ ComputeExponentials(), ComputeSiteDLs() and ComputeTreeDL(),
+ and returns the likelihood.
+
+ The class is polymorphic on data type (not data model).
+
+ We assume that we start with a correct tree with all update flags set
+ appropriately.  We end with a tree with correct likelihoods.  This
+ class does NOT reset the update flags after updating the likelihood,
+ as the flags may be needed by subsequent calculations.
+
+ This file also holds the small helper class ChildInfo used to
+ simplify data likelihood calculation code.
+
+ Written by Jon Yamato, revised by Jim Sloan, revised by Jon Yamato
+ 2002/01/28 added microsatellite support -- Mary Kuhner
+            moved markerweights to base class
+ 2002/07/08 added k-allele model -- Mary
+ 2002/07/21 refactoring to remove duplicate code -- Mary
+ 2004/07/13 added trait-locus code -- Mary
+ 2005/03/01 moved aliases to tree -- Mary
+ 2005/09/22 deleted markerweights entirely.
+
+**********************************************************************/
+
+// The following are used in the .cpp code:
+// #include "datapack.h" for access to
+//    datamodel, GetDataLength(), GetNTips()
+// #include "dlmodel.h" for access to
+//    Finalize(), ComputeExponentials(), ComputeSiteDLs(),
+//    ComputeTreeDLs()
+// #include "tree.h" for access to
+//    timelist, SetDLValue()
+// #include "timelist.h" for access to
+//    BeginBranch(), GetBranch(), BeginBody(), InRange()
+
+class Locus;
+class Tree;
+class DataModel;
+class NucModel;
+class ChildInfo;
+class TimeList;
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+class DLCalculator
+{
+  private:
+    DLCalculator();                               // undefined
+    DLCalculator& operator=(const DLCalculator&); // undefined
+
+  protected:
+    DataModel& m_datamodel;
+    LongVec1d m_markerpos;
+
+    DLCalculator(const DLCalculator& src);        // internal use
+
+    rangepair SitePairToMarkerPair(rangepair sites);
+    ChildInfo GetChildInfo(Branch_ptr branch, long locus, long childindex,
+                           long cellindex, long posn, bool moving) const;
+    ChildInfo NullChildInfo() const;
+
+  public:
+    DLCalculator(const Locus& locus);
+    virtual ~DLCalculator()            {};
+    virtual DLCalculator* Clone() const = 0;
+    virtual double Calculate(Tree& tree, const Locus& locus, bool moving) = 0;
+    virtual void SimulateData(Tree& tree, Locus& locus);
+    virtual void CopyDataFrom(Locus& destloc, Locus& origloc, Tree& tree);
+    virtual void Randomize(Locus& destloc, rangeset rset, Tree& tree);
+    virtual void MarkPanelBranches(Tree& tree, const Locus& locus);
+    virtual LongVec1d RecalculateAliases(const Tree&, const Locus&) const
+    { LongVec1d empty; return empty; };
+
+};
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+class NucCalculator : public DLCalculator
+{
+  private:
+    LongVec1d CalculateAliasesFromData(const std::vector<DoubleVec2d>& data) const;
+    LongVec1d SetupAliases(const Locus& locus) const;
+
+  protected:
+    typedef double** SiteDL;
+
+    NucCalculator(const NucCalculator& src);    // internal use
+    virtual void CalculateSite(Cell_ptr child1, Cell_ptr child2,
+                               Cell_ptr newcell, long pos, long alias);
+    void Breakalias(LongVec1d& aliases, const rangevector& subtrees);
+
+  public:
+    NucCalculator(const Locus& locus);
+    virtual ~NucCalculator() {};
+    virtual LongVec1d RecalculateAliases(const Tree& tree, const Locus& locus) const;
+
+};
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+class DNACalculator : public NucCalculator
+{
+  private:
+
+  protected:
+    DNACalculator(const DNACalculator& src) : NucCalculator(src) {};
+
+  public:
+    DNACalculator(const Locus& locus) : NucCalculator(locus) {};
+    virtual ~DNACalculator() {};
+    virtual DLCalculator* Clone() const;
+    virtual double Calculate(Tree& tree, const Locus& locus, bool moving);
+};
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+class SNPCalculator : public NucCalculator
+{
+  private:
+    NucModel* m_invarmodel;
+
+    void CalculateInvarSite(Cell_ptr child1, Cell_ptr child2, Cell_ptr newcell, long pos);
+    void MaskPanelTips(TimeList& timeList, long loci, long first, long last, long i);
+
+  protected:
+    SNPCalculator(const SNPCalculator& src);
+
+    // no-panel pathway
+    double CalcNoPanel(Tree& tree, long loc, std::pair<long,long> sitespan, LongVec1d aliases);
+    double SumNoPanel(Cell_ptr varcell, Cell_ptr invarcell, std::pair<long,long> sitespan);
+
+    // panel pathway
+    Branch_ptr CalcPanel(Tree& tree, long loc, std::pair<long,long> sitespan, LongVec1d aliases);
+    Branch_ptr CalcPanelInvariants(Tree& tree, long loc, std::pair<long,long> sitespan);
+    double SumPanel(Cell_ptr varcell, std::pair<long,long> sitespan, Cell_ptr sumcell);
+
+  public:
+    SNPCalculator(const Locus& locus);
+    virtual ~SNPCalculator();
+    virtual DLCalculator* Clone() const;
+    virtual double Calculate(Tree& tree, const Locus& locus, bool moving);
+    virtual void SimulateData(Tree& tree, Locus& locus);
+
+};
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+// This subclass is suitable for any type of allelic data, such as
+// microsats, electrophoretic alleles or the k-allele data type.
+// It used to have subclasses, but they weren't needed.
+
+class AlleleCalculator : public DLCalculator
+{
+  protected:
+    AlleleCalculator(const AlleleCalculator& src);  // internal use
+
+  public:
+    AlleleCalculator(const Locus& locus);
+    virtual ~AlleleCalculator() {};
+    virtual DLCalculator* Clone() const { return new AlleleCalculator(*this); };
+    virtual double Calculate(Tree& tree, const Locus& locus, bool moving);
+
+};
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+// This tiny class saves some information for use by data likelihood
+// calculations.  It is similar to std::pair.
+
+class ChildInfo
+{
+  public:
+    Cell_ptr m_cell;
+    double m_length;
+
+    ChildInfo(Cell_ptr cell, double len) : m_cell(cell), m_length(len) {};
+    ChildInfo() : m_length(0) {};
+};
+
+#endif // DLCALCULATOR_H
+
+//____________________________________________________________________________________
diff --git a/src/datalike/dlcell.cpp b/src/datalike/dlcell.cpp
new file mode 100644
index 0000000..158f68e
--- /dev/null
+++ b/src/datalike/dlcell.cpp
@@ -0,0 +1,697 @@
+// $Id: dlcell.cpp,v 1.44 2012/06/30 01:32:41 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+// This file contains the implementation code for the data-likelihood
+// storage object.
+
+#include <cassert>
+#include <cmath>
+#include <cstring>
+#include <iostream>                     //debug
+
+#include "dlcell.h"
+#include "dlmodel.h"
+#include "errhandling.h"
+#include "definitions.h"
+#include "registry.h"
+#include "cellmanager.h"
+#include "stringx.h"                    // for ToString() use in debug function DLsToString()
+
+#ifdef DMALLOC_FUNC_CHECK
+#include "/usr/local/include/dmalloc.h"
+#endif
+
+// this turns on the data likelihood details for testing
+//#define DATA_LIKELIHOOD_DETAILS
+
+using namespace std;
+
+//------------------------------------------------------------------------------------
+// DLCell
+//------------------------------------------------------------------------------------
+
+DLCell::DLCell(long markers, long cats, long bins)
+    : Cell(),
+      m_nmarkers(markers),
+      m_ncats(cats),
+      m_nbins(bins),
+      m_norms(markers, 0.0)
+{
+    // obtain an internal array from the free store
+    m_identifier.first = m_nmarkers;
+    m_identifier.second = m_ncats;
+    m_identifier.third = m_nbins;
+
+    m_DLs = registry.GetCellManager().GetArray(m_identifier, *this);
+} // DLCell constructor
+
+//------------------------------------------------------------------------------------
+
+DLCell::~DLCell()
+{
+    // return the internal array to the free store
+    registry.GetCellManager().FreeArray(m_identifier, m_DLs);
+    m_DLs = NULL;
+} // DLCell destructor
+
+//------------------------------------------------------------------------------------
+
+Cell* DLCell::Copy() const
+{
+    Cell* pCell = Clone();
+    pCell->CopyInitialize(*this);
+    return pCell;
+
+} // Copy
+
+//------------------------------------------------------------------------------------
+
+void DLCell::EmptyCell()
+{
+    for (long posn = 0; posn < m_nmarkers; ++posn)
+    {
+        for (long cat = 0; cat < m_ncats; ++cat)
+        {
+            for (long base = 0; base < m_nbins; ++base)
+            {
+                m_DLs[posn][cat][base] = 0;
+            }
+        }
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+void DLCell::CopyInitialize(const Cell& src)
+{
+    const DLCell& srcell = dynamic_cast<const DLCell&>(src);
+
+    assert(m_nmarkers == srcell.m_nmarkers);
+    assert(m_ncats == srcell.m_ncats);
+    assert(m_nbins == srcell.m_nbins);
+
+    m_norms = srcell.m_norms;
+    // m_nmarkers+1 because there is an extra cell at the end, to
+    // allow an STL-like interface where one past the end is a valid
+    // address
+
+    long arraySize = (m_nmarkers+1) * m_ncats * m_nbins;
+    //  cerr << "arraysize=" << arraySize << endl;
+    // memcpy for speed--be careful!
+    memcpy(m_DLs[0][0], srcell.m_DLs[0][0], arraySize*sizeof(double));
+
+} // CopyInitialize
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+
+bool DLCell::IsValidPosition(long pos) const
+{
+    // for some purposes, pos == m_nmarkers is in fact valid; this
+    // allows "one past the end" logic emulating the STL
+
+    if (0 <= pos && pos <= m_nmarkers) return true;
+    return false;
+} // IsValidPosition
+
+//------------------------------------------------------------------------------------
+// Make a datalikelihood array appropriately sized for this Cell
+
+cellarray DLCell::MakeArray()
+{
+    long len = m_nmarkers + 1;
+    // provide a STL like interface into the m_DLs array.
+    // (i.e. provides a legal reference one past the normal end
+    // of the array)
+
+    // set up a 3-dimensional array in which all positions are
+    // contiguous, so that memcpy can be used on it (a speed
+    // optimization).
+
+    long site, category;
+    cellarray dl;
+
+    dl = new double**[len];
+
+    dl[0] = new double*[len * m_ncats];
+    for (site = 0; site < len; ++site)
+    {
+        dl[site] = dl[0] + site * m_ncats;
+    }
+
+    dl[0][0] = new double[len * m_ncats * m_nbins];
+    for (site = 0; site < len; ++site)
+    {
+        for (category = 0; category < m_ncats; ++category)
+        {
+            dl[site][category] = dl[0][0] + site * m_ncats * m_nbins + category * m_nbins;
+        }
+    }
+
+    return dl;
+
+} // MakeArray
+
+//------------------------------------------------------------------------------------
+
+void DLCell::SwapDLs(Cell_ptr othercell, long pos)
+{
+    // This creates a 2D array (categories x bins) and uses it to
+    // swap the information for a single marker between two DLCells.
+    // All bets are off if they are not the same type!
+
+    // WARNING -- Not exception safe due to raw new.
+
+    // take pointers to the two things to be swapped
+    double** otherDLs = othercell->GetSiteDLs(pos);
+    double** myDLs = GetSiteDLs(pos);
+
+    // create temporary storage for the swap
+    double** newDLs = new double* [m_ncats];
+    newDLs[0] = new double [m_ncats * m_nbins];
+    long cat;
+    for(cat = 1; cat < m_ncats; ++cat)
+        newDLs[cat] = newDLs[0] + cat * m_nbins;
+
+    // memcpy for speed
+    memcpy(newDLs[0], myDLs[0], m_ncats * m_nbins * sizeof(double));
+
+    // swap
+    SetSiteDLs(pos, otherDLs);
+    othercell->SetSiteDLs(pos, newDLs);
+
+    // release temporary storage
+    delete [] newDLs[0];
+    delete [] newDLs;
+
+} // SwapDLs
+
+//------------------------------------------------------------------------------------
+
+void DLCell::SumMarkers(long startpos, long endpos, bool normalize)
+{
+    long cat, bin, pos;
+
+    if (normalize)
+    {
+
+        for (cat = 0; cat < m_ncats; ++cat)
+        {
+            for (bin = 0; bin < m_nbins; ++bin)
+            {
+                double result = 0.0;
+                for (pos = startpos; pos != endpos; ++pos)
+                {
+                    double markerval = log(m_DLs[pos][cat][bin]) + GetNorms(pos);
+                    if (markerval > EXPMIN) result += exp(markerval);
+                }
+#ifdef DATA_LIKELIHOOD_DETAILS
+                cerr << "SumMarkers-normalized cat:" << cat
+                     << " bin:" << bin
+                     << " result:" << result
+                     << endl;
+#endif
+                if (result == 0.0)   // never found a good value?
+                    m_DLs[endpos][cat][bin] = exp(EXPMIN);
+                else
+                    m_DLs[endpos][cat][bin] = result;
+            }
+        }
+
+        // renormalize and re-set norms
+        double newnorm = Normalize(m_DLs[endpos]);
+        SetNorms(newnorm, endpos);
+
+    }
+    else                                // no normalization
+    {
+
+        for (cat = 0; cat < m_ncats; ++cat)
+        {
+            for (bin = 0; bin < m_nbins; ++bin)
+            {
+                double result = 0.0;
+                for (pos = startpos; pos != endpos; ++pos)
+                {
+                    result += m_DLs[pos][cat][bin];
+                }
+#ifdef DATA_LIKELIHOOD_DETAILS
+                cerr << "SumMarkers-not normalized cat:" << cat
+                     << " bin:" << bin
+                     << " result:" << result
+                     << endl;
+#endif
+
+                m_DLs[endpos][cat][bin] = result;
+            }
+        }
+    }
+
+} // SumMarkers
+
+//------------------------------------------------------------------------------------
+
+bool DLCell::IsSameAs(const Cell_ptr othercell, long pos) const
+{
+    double** otherDLs = othercell->GetSiteDLs(pos);
+    double** myDLs = GetSiteDLs(pos);
+    long cat, bin;
+    for(cat = 0; cat < m_ncats; ++cat)
+        for (bin = 0; bin < m_nbins; ++bin)
+            if (myDLs[cat][bin] != otherDLs[cat][bin]) return false;
+
+    return true;
+} // DLCell::IsSameAs
+
+//------------------------------------------------------------------------------------
+
+long DLCell::DiffersFrom(Cell_ptr othercell) const
+{
+    long marker;
+    for(marker = 0; marker < m_nmarkers; ++marker)
+        if (!IsSameAs(othercell,marker)) return marker;
+
+    return FLAGLONG;
+
+} // DLCell::DiffersFrom
+
+//------------------------------------------------------------------------------------
+
+void DLCell::SetAllCategoriesTo(DoubleVec1d& state, long posn)
+{
+    for (long cat = 0; cat < m_ncats; cat++)
+    {
+        for (unsigned long nstate=0; nstate < state.size(); nstate++)
+        {
+            m_DLs[posn][cat][nstate] = state[nstate];
+        }
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d DLCell::GetStateFor(long posn, long cat) const
+{
+    DoubleVec1d state;
+    for (long nstate = 0; nstate < m_nbins; nstate++)
+    {
+        state.push_back(m_DLs[posn][cat][nstate]);
+    }
+    return state;
+}
+
+
+//------------------------------------------------------------------------------------
+
+void DLCell::SetStateTo (long posn, long cat, DoubleVec1d state)
+{
+    for (long nstate = 0; nstate < m_nbins; nstate++)
+    {
+        m_DLs[posn][cat][nstate] = state[nstate];
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+void DLCell::AddTo(const Cell_ptr othercell)
+{
+    for (long pos = 0; pos < m_nmarkers; pos++)
+    {
+        double** otherDLs = othercell->GetSiteDLs(pos);
+        double** myDLs = GetSiteDLs(pos);
+        long cat, bin;
+        for(cat = 0; cat < m_ncats; ++cat)
+        {
+            for (bin = 0; bin < m_nbins; ++bin)
+            {
+                myDLs[cat][bin] += otherDLs[cat][bin];
+            }
+        }
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+void DLCell::SubtractFrom(const Cell_ptr othercell)
+{
+    for (long pos = 0; pos < m_nmarkers; pos++)
+    {
+        double** otherDLs = othercell->GetSiteDLs(pos);
+        double** myDLs = GetSiteDLs(pos);
+        long cat, bin;
+        for(cat = 0; cat < m_ncats; ++cat)
+        {
+            for (bin = 0; bin < m_nbins; ++bin)
+            {
+                myDLs[cat][bin] -= otherDLs[cat][bin];
+            }
+        }
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+void DLCell::MultiplyBy(double mult)
+{
+    for (long pos = 0; pos < m_nmarkers; pos++)
+    {
+        double** myDLs = GetSiteDLs(pos);
+        long cat, bin;
+        for(cat = 0; cat < m_ncats; ++cat)
+        {
+            for (bin = 0; bin < m_nbins; ++bin)
+            {
+                myDLs[cat][bin] *= mult;
+            }
+        }
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+void DLCell::MultiplyBy(const Cell_ptr othercell)
+{
+    for (long pos = 0; pos < m_nmarkers; pos++)
+    {
+        double** otherDLs = othercell->GetSiteDLs(pos);
+        double** myDLs = GetSiteDLs(pos);
+        long cat, bin;
+        for(cat = 0; cat < m_ncats; ++cat)
+        {
+            for (bin = 0; bin < m_nbins; ++bin)
+            {
+                myDLs[cat][bin] *= otherDLs[cat][bin];
+            }
+        }
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+LongVec1d DLCell::GetOnes(long marker) const
+{
+    LongVec1d ones;
+    double** dls = GetSiteDLs(marker);
+    //Just mess with the first category
+    for (long bin = 0; bin < m_nbins; ++bin)
+    {
+        if (dls[0][bin] == 1.0)
+        {
+            ones.push_back(bin);
+        }
+    }
+    return ones;
+}
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+
+string DLCell::DLsToString(long start, long end) const
+{
+    string lines;
+
+    long line;
+    for(line = start; line <= end; ++line)
+    {
+        lines += "marker " + ToString(line) + ": ";
+        double** dls = GetSiteDLs(line);
+        long cat;
+        for(cat = 0; cat < m_ncats; ++cat)
+        {
+            lines += "{";
+            long bin;
+            for(bin = 0; bin < m_nbins; ++bin)
+            {
+                lines += ToString(dls[cat][bin]);
+                if (bin != m_nbins - 1) lines += ",";
+            }
+            lines += "}";
+            if (cat != m_ncats - 1) lines += ", ";
+        }
+        lines += "\n";
+    }
+
+    return lines;
+
+} // DLCell::DLsToString
+
+//------------------------------------------------------------------------------------
+
+double DLCell::Normalize(double** siteDLs)
+{
+    double biggest = NEG_MAX;
+    long cat, bin;
+    for(cat = 0; cat < m_ncats; ++cat)
+    {
+        for(bin = 0; bin < m_nbins; ++bin)
+        {
+            if (siteDLs[cat][bin] > biggest) biggest = siteDLs[cat][bin];
+        }
+    }
+
+    for(cat = 0; cat < m_ncats; ++cat)
+    {
+        for(bin = 0; bin < m_nbins; ++bin)
+        {
+            siteDLs[cat][bin] /= biggest;
+        }
+    }
+
+    return log(biggest);
+
+} // Normalize
+
+//------------------------------------------------------------------------------------
+
+void DLCell::SetSiteDLs(long posn, double **siteDLs)
+{
+    assert(IsValidPosition(posn));
+    long siteSize = m_ncats * m_nbins;
+    memcpy(m_DLs[posn][0], siteDLs[0], siteSize * sizeof(double));
+}
+
+//------------------------------------------------------------------------------------
+
+void DLCell::AddToSiteDLs(long posn, double **siteDLs)
+{
+    assert(IsValidPosition(posn));
+    long cat, bin;
+    for(cat = 0; cat < m_ncats; ++cat)
+    {
+        for (bin = 0; bin < m_nbins; ++bin)
+        {
+            m_DLs[posn][cat][bin] += siteDLs[cat][bin];
+        }
+    }
+}
+
+//------------------------------------------------------------------------------------
+// NullCell
+//------------------------------------------------------------------------------------
+
+NullCell::NullCell()
+    : Cell()
+{
+    // deliberately blank
+} // NullCell constructor
+
+//------------------------------------------------------------------------------------
+
+void NullCell::Initialize (const StringVec1d&, const DataModel_ptr)
+{
+    assert (false); // should never call this!
+
+} // Initialize
+
+DoubleVec1d NullCell::GetStateFor(long posn, long cat) const
+{
+    throw implementation_error("No state for this data likelihood.");
+}
+
+//------------------------------------------------------------------------------------
+// NucCell
+//------------------------------------------------------------------------------------
+
+NucCell::NucCell(long markers, long cats)
+    : DLCell(markers, cats, BASES)
+{
+    // deliberately left blank
+} // NucCell constructor
+
+//------------------------------------------------------------------------------------
+
+void NucCell::Initialize(const StringVec1d &sequence, const DataModel_ptr trans)
+{
+    long posn, base, cat;
+
+    string postring;
+    vector<double> likes;
+
+    // could be OPTIMIZED
+
+    for (posn = 0; posn < m_nmarkers; ++posn)
+    {
+        postring = sequence[posn];
+        likes = trans->DataToLikes(postring);
+        for (cat = 0; cat < m_ncats; ++cat)
+        {
+            for (base = 0; base < m_nbins; ++base)
+            {
+                m_DLs[posn][cat][base] = likes[base];
+            }
+        }
+    }
+} // Initialize
+
+//------------------------------------------------------------------------------------
+// DNACell
+//------------------------------------------------------------------------------------
+
+DNACell::DNACell(long markers, long cats)
+    : NucCell(markers, cats)
+{
+    // intentionally blank
+} // DNACell constructor
+
+//------------------------------------------------------------------------------------
+
+Cell *DNACell::Clone() const
+{
+    Cell *pDLCell = new DNACell(m_nmarkers, m_ncats);
+    return pDLCell;
+}
+
+//------------------------------------------------------------------------------------
+// SNPCell
+//------------------------------------------------------------------------------------
+
+SNPCell::SNPCell(long markers, long cats)
+    : NucCell(markers, cats)
+{
+    // deliberately blank
+} // SNPCell constructor
+
+//------------------------------------------------------------------------------------
+
+SNPCell::~SNPCell()
+{
+    // deliberately blank
+}
+
+//------------------------------------------------------------------------------------
+
+Cell *SNPCell::Clone() const
+{
+    Cell *pDLCell  = new SNPCell(m_nmarkers, m_ncats);
+    return pDLCell;
+}
+
+//------------------------------------------------------------------------------------
+
+void SNPCell::Initialize(const StringVec1d&, const DataModel_ptr trans)
+{
+    // We do not use the sequence input -- SNPCell::Initialize should
+    // only be used to initialize invariant cells. We keep the first
+    // argument for conformity with the base class interface.
+
+    long categ, invar, base;
+
+    // In the m_DLs array, the basic idea is to set all entries to 0.0
+    // except for the entries where the invariant equals the base,
+    // where they are set to 1.0.
+    // HOWEVER,
+    // since we've now got a data uncertainty model, each of the
+    // invariant bases needs to go through the error correction
+    // transformation first.
+
+    for (invar = 0; invar < INVARIANTS; ++invar)
+    {
+        vector<double> likes = trans->DataToLikes(SINGLEBASES[invar]);
+        for (categ = 0; categ < m_ncats; ++categ)
+        {
+            for (base = 0; base < m_nbins; ++base)
+            {
+                m_DLs[invar][categ][base] = likes[base];
+            }
+        }
+    }
+}
+
+//------------------------------------------------------------------------------------
+// AlleleCell
+//------------------------------------------------------------------------------------
+
+AlleleCell::AlleleCell(long markers, long cats, long bins)
+    : DLCell(markers, cats, bins)
+{
+    // deliberately blank
+} // AlleleCell constructor
+
+//------------------------------------------------------------------------------------
+
+Cell *AlleleCell::Clone() const
+{
+    Cell *pDLCell  = new AlleleCell(m_nmarkers, m_ncats, m_nbins);
+    return pDLCell;
+} // Clone
+
+//------------------------------------------------------------------------------------
+
+void AlleleCell::Initialize(const StringVec1d &sequence, const DataModel_ptr trans)
+{
+    long posn, bin, cat;
+    vector<double> likes;
+
+    // could be OPTIMIZED
+
+    for (posn = 0; posn < m_nmarkers; ++posn)
+    {
+        likes = trans->DataToLikes(sequence[posn], posn);
+        for (cat = 0; cat < m_ncats; ++cat)
+        {
+            for (bin = 0; bin < m_nbins; ++bin)
+            {
+                m_DLs[posn][cat][bin] = likes[bin];
+            }
+        }
+    }
+} // Initialize
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+triplet::triplet()
+    : first(0),
+      second(0),
+      third(0)
+{
+    // intentionally blank
+} // triplet default constructor
+
+//------------------------------------------------------------------------------------
+
+triplet::triplet(long f, long s, long t)
+    : first(f),
+      second(s),
+      third(t)
+{
+    // intentionally blank
+} // triplet constructor
+
+//------------------------------------------------------------------------------------
+
+bool triplet::operator<(const triplet& rhs) const
+{
+    if (first != rhs.first) return (first < rhs.first);
+    if (second != rhs.second) return (second < rhs.second);
+    return (third < rhs.third);
+} // triplet operator<
+
+//____________________________________________________________________________________
diff --git a/src/datalike/dlcell.h b/src/datalike/dlcell.h
new file mode 100644
index 0000000..2c7b659
--- /dev/null
+++ b/src/datalike/dlcell.h
@@ -0,0 +1,314 @@
+// $Id: dlcell.h,v 1.36 2011/03/08 08:16:33 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+/******************************************************************
+ This file defines the branch data-likelihood storage object.  It maintains the data
+ likelihoods internally as a bare array for speed reasons (ability to use memcpy).
+
+ The DLCell constructors and Clone make fully functional *empty* DLCells suitable
+ for use on internal branches.  Copy() can be used to make a filled-up DLCell,
+ or you can Initialize() it with a sequence.
+
+ The base DLCell class maintains a private internal store of allocated arrays, as a speed-up.
+ This store should be cleared at the beginning of every region (by calling ClearStore()) or
+ it will turn into a major memory leak.  The freestore-management code assumes that all of
+ the data-likelihood arrays are three-dimensional contiguous allocation using new[].  If you
+ write one that isn't, derive from Cell, not from DLCell!
+
+ The file also contains the simple helper class 'triplet', analogous to std::pair.
+
+ NB:  It would be more efficient to do MSCells in some other way, but it would mean writing
+ a lot of new code, so I didn't.  If Microsats are too space-intensive to use, review this code.
+
+ Written by Jim Sloan, revised by Mary Kuhner
+    added datalikelihood branchwise-normalization -- Jon 2001/03/09
+    added NullCell -- Mary 2002/05/03
+    deleted SNPCell (replaced with a pair of DNACells) -- Mary 2002/05/28
+    put SNPCell back, much simplified -- Mary 2002/06/11
+    added KCell -- Mary 2002/07/08
+    collapsed the AlleleCell subclasses -- Mary 2002/07/22
+
+********************************************************************/
+
+#ifndef DLCELL_H
+#define DLCELL_H
+
+#include <cassert>                      // May be needed for inline definitions.
+#include <iostream>
+#include <stdlib.h>
+#include <string>
+
+#include "constants.h"
+#include "types.h"
+#include "vectorx.h"
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+// This struct is similar to std::pair<long>
+
+struct triplet
+{
+  public:
+    long first;
+    long second;
+    long third;
+
+    triplet();
+    triplet(long f, long s, long t);
+    bool operator<(const triplet& rhs) const;
+
+    // We accept default destructor, copy constructor and operator=
+};
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+class Cell
+{
+  protected:
+    Cell() {};
+
+  public:
+    virtual          ~Cell() {};
+    virtual Cell*    Clone()         const                = 0;
+    virtual Cell*    Copy()          const                = 0;
+
+    // Initialize from data.
+    virtual void     Initialize(const StringVec1d& sequence, const DataModel_ptr trans) = 0;
+#if 1
+    virtual void     CopyInitialize(const Cell& src)         = 0;
+#endif
+    virtual void     EmptyCell()                             = 0;
+
+    // Retrieve individual marker DLs.
+    virtual void     SetSiteDLs(long posn, double** siteDLs)   = 0;
+    virtual void     AddToSiteDLs(long posn, double** siteDLs) = 0;
+    virtual double** GetSiteDLs(long posn) const               = 0;
+
+    virtual double** GetNextMarker(double** marker)          = 0;
+
+    // Manage the normalization coefficients.
+    virtual double   Normalize(double** siteDLs)             = 0;
+    virtual void     SetNorms(double val, long pos)          = 0;
+    virtual double   GetNorms(long pos) const                = 0;
+
+    virtual void SumMarkers(long startpos, long endpos, bool normalize) = 0;
+
+    // Swap two markers..
+    virtual void     SwapDLs(Cell_ptr other, long pos)          = 0;
+
+    // Compare DLCell contents.
+    virtual bool     IsSameAs(const Cell_ptr othercell, long pos) const = 0;
+    virtual long     DiffersFrom(Cell_ptr othercell) const = 0;
+
+    // Simulation functions.
+    virtual void     SetAllCategoriesTo(DoubleVec1d& state, long posn) = 0;
+    virtual DoubleVec1d GetStateFor(long posn, long cat) const = 0;
+    virtual void     SetStateTo(long posn, long cat, DoubleVec1d state) = 0;
+    virtual void     AddTo(const Cell_ptr othercell) = 0;
+    virtual void     SubtractFrom(const Cell_ptr othercell) = 0;
+    virtual void     MultiplyBy(double mult) = 0;
+    virtual void     MultiplyBy(const Cell_ptr othercell) = 0;
+
+    // For output.
+    virtual LongVec1d GetOnes(long marker) const = 0;
+
+    // Debugging functions.
+    virtual string   DLsToString(long start, long end) const = 0;
+    virtual long     GetNMarkers() = 0;
+    virtual long     GetNCats() = 0;
+    virtual long     GetNBins() = 0;
+
+}; // Cell
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+class DLCell : public Cell
+{
+  private:
+    DLCell(const DLCell&);               // undefined
+    DLCell&    operator=(const DLCell&); // undefined
+
+  protected:
+    triplet     m_identifier;           // used to manage free store
+    long        m_nmarkers;             // number of markers
+    long        m_ncats;                // number of rate categories
+    long        m_nbins;                // number of allelic states
+    DoubleVec1d m_norms;                // normalization coefficients
+    cellarray   m_DLs;                  // array of data likelihoods: position X category X bin
+
+    // Error checking (debugging).
+    bool       IsValidPosition(long pos) const;
+
+  public:
+
+    DLCell(long markers, long cats, long bins);
+    virtual          ~DLCell();
+    virtual Cell*    Copy()          const;
+    cellarray  MakeArray();             // NB:  must not be virtual--it's called by base class constructor!
+    void EmptyCell();
+
+    virtual void     CopyInitialize(const Cell& src); // Initialize from data.
+
+    // Retrieve individual marker DLs.
+    virtual void     SetSiteDLs(long posn, double** siteDLs);
+    virtual void     AddToSiteDLs(long posn, double** siteDLs);
+    virtual double** GetSiteDLs(long posn) const {assert(IsValidPosition(posn)); return m_DLs[posn];};
+    virtual double** GetNextMarker(double** marker) {return marker + m_ncats;};
+
+    // Manage the normalization coefficients.
+    double   Normalize(double** siteDLs);
+    void     SetNorms(double val, long pos)  {assert(IsValidPosition(pos)); m_norms[pos] = val; };
+    double   GetNorms(long pos) const        {assert(IsValidPosition(pos)); return m_norms[pos]; };
+
+    // This function accumulates all the values from startpos, up to but not including endpos, *INTO* endpos.  Careful!
+    void     SumMarkers(long startpos, long endpos, bool normalize);
+
+    // Swap two markers.
+    virtual void     SwapDLs(Cell_ptr other, long pos);
+
+    // Clear the free store.
+    // This is called by control code when a new region is begun, so that
+    // the old cellarrays, which are no longer useful, can be discarded.
+    static  void     ClearStore();
+
+    // Compare DLCell contents.
+    virtual bool     IsSameAs(const Cell_ptr othercell, long pos) const;
+
+    // DiffersFrom returns the first position that IsSameAs() thinks
+    // the 2 cells differ at, FLAGLONG if no position is found.
+    virtual long     DiffersFrom(Cell_ptr othercell) const;
+
+    // Simulation functions.
+    virtual void     SetAllCategoriesTo(DoubleVec1d& state, long posn);
+    virtual DoubleVec1d GetStateFor(long posn, long cat) const;
+    virtual void     SetStateTo(long posn, long cat, DoubleVec1d state);
+    virtual void     AddTo(const Cell_ptr othercell);
+    virtual void     SubtractFrom(const Cell_ptr othercell);
+    virtual void     MultiplyBy(double mult);
+    virtual void     MultiplyBy(const Cell_ptr othercell);
+
+    // For output.
+    virtual LongVec1d GetOnes(long marker) const;
+
+    // Debugging functions.
+    virtual string   DLsToString(long start, long end) const;
+    virtual long     GetNMarkers() {return m_nmarkers;};
+    virtual long     GetNCats() {return m_ncats;};
+    virtual long     GetNBins() {return m_nbins;};
+
+}; // DLCell
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+// NullCell is a Cell that does nothing, useful in simplifying certain algorithms
+// in DLCalculator.  They are not currently used in the Tree.
+
+class NullCell : public Cell
+{
+  public:
+    NullCell();
+    virtual          ~NullCell() {};
+    virtual Cell*    Clone() const { return new NullCell; };
+    virtual Cell*    Copy() const { return new NullCell; };
+    virtual void     Initialize(const StringVec1d&, const DataModel_ptr trans);
+#if 1
+    virtual void     CopyInitialize(const Cell&) { assert(false); }; // no! it's Null!
+#endif
+    virtual void     EmptyCell() {};
+    virtual void     SetSiteDLs(long, double**) { assert(false); };  // no! it's Null!
+    virtual void     AddToSiteDLs(long, double**) {};
+    virtual double** GetSiteDLs(long) const    { return NULL; };
+    virtual double   Normalize(double**) {return 0; };
+    virtual void     SetNorms(double, long) {};
+    virtual double   GetNorms(long) const {return 0; };
+    virtual void     SumMarkers(long, long, bool) { assert(false); };
+    virtual double** GetNextMarker(double**) { assert(false); return NULL; };
+    virtual void     SwapDLs(Cell_ptr, long) { assert(false); };
+
+    virtual bool     IsSameAs(const Cell_ptr, long) const
+    { assert(false); return false; };   // no! it's Null!
+
+    virtual long     DiffersFrom(Cell_ptr) const { assert(false); return 0; };
+
+    // Simulation functions.
+    virtual void     SetAllCategoriesTo(DoubleVec1d& state, long posn) {};
+    virtual DoubleVec1d GetStateFor(long posn, long cat) const;
+    virtual void     SetStateTo(long posn, long cat, DoubleVec1d state) {};
+    virtual void     AddTo(const Cell_ptr othercell) {};
+    virtual void     SubtractFrom(const Cell_ptr othercell) {};
+    virtual void     MultiplyBy(double mult) {};
+    virtual void     MultiplyBy(const Cell_ptr othercell) {};
+
+    // For output.
+    virtual LongVec1d GetOnes(long marker) const { return LongVec1d();};
+
+    // Debugging functions.
+    virtual string   DLsToString(long, long) const { return string(""); };
+    virtual long     GetNMarkers() {return 0;};
+    virtual long     GetNCats() {return 0;};
+    virtual long     GetNBins() {return 0;};
+}; // NullCell
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+class NucCell : public DLCell
+{
+  public:
+    NucCell(long markers, long cats);
+    virtual          ~NucCell()          {};
+    virtual void     Initialize(const StringVec1d& sequence, const DataModel_ptr trans);
+
+}; // NucCell
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+class DNACell : public NucCell
+{
+  public:
+    DNACell(long markers, long cats);
+    virtual         ~DNACell()            {};
+    virtual Cell* Clone()    const;
+};
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+class SNPCell : public NucCell
+{
+  public:
+    SNPCell(long markers, long cats);
+    virtual           ~SNPCell();
+    virtual  Cell*    Clone() const;
+    virtual  void     Initialize(const StringVec1d&, const DataModel_ptr trans);
+
+}; // SNPCell
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+class AlleleCell : public DLCell
+{
+  public:
+    AlleleCell(long markers, long cats, long bins);
+    virtual          ~AlleleCell()               {};
+    virtual  Cell*   Clone() const;
+    virtual void     Initialize(const StringVec1d& sequence, const DataModel_ptr trans);
+
+}; // AlleleCell
+
+#endif // DLCELL_H
+
+//____________________________________________________________________________________
diff --git a/src/datalike/dlmodel.cpp b/src/datalike/dlmodel.cpp
new file mode 100644
index 0000000..23fecf6
--- /dev/null
+++ b/src/datalike/dlmodel.cpp
@@ -0,0 +1,3634 @@
+// $Id: dlmodel.cpp,v 1.138 2012/06/30 01:32:41 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+#include <algorithm>
+#include <cstring>
+#include <numeric>
+#include <iostream>
+
+#include "calculators.h"
+#include "datapack.h"
+#include "datatype.h"
+#include "defaults.h"                   // for defaults::threshhold in StepwiseModel::ctor
+#include "dlcell.h"
+#include "dlmodel.h"
+#include "errhandling.h"
+#include "funcMax.h"
+#include "locus.h"
+#include "mathx.h"
+#include "registry.h"
+#include "runreport.h"
+#include "stringx.h"
+#include "xml_strings.h"                // for ToXML()
+#include "xmlsum_strings.h"             // For WriteAlpha()
+
+#ifdef DMALLOC_FUNC_CHECK
+#include "/usr/local/include/dmalloc.h"
+#endif
+
+// turns on detailed local variables for debugging
+// JRM 3/10
+//#define DEBUG_VARIABLES
+#ifdef DEBUG_VARIABLES
+int printdbgvar = 0;
+#endif
+
+using namespace std;
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+DataModel::DataModel(long nmarkers,
+                     long numCategories,
+                     DoubleVec1d categoryRates,
+                     DoubleVec1d categoryProbabilities,
+                     double userAutoCorrelationValue,
+                     bool doNormalize,
+                     long numBins,
+                     double relmurate)
+    :
+    m_ncategories(numCategories),
+    m_catrates(categoryRates),
+    m_catprobs(categoryProbabilities),
+    m_acratio(1.0 / userAutoCorrelationValue),
+    m_normalize(doNormalize),
+    m_nbins(numBins),
+    m_nmarkers(nmarkers),
+    m_relmurate(relmurate)
+{
+    m_ucratio = 1.0 - m_acratio;
+    ScaleCatProbabilities();
+    ScaleCatRates();
+    assert(DataModel::IsValidDataModel());
+
+} // DataModel constructor
+
+//------------------------------------------------------------------------------------
+
+string DataModel::GetDataModelName() const
+{
+    // "true" argument gets long version of name
+    return ToString(GetModelType(),true);
+}
+
+//------------------------------------------------------------------------------------
+
+string DataModel::GetDataModelShortName() const
+{
+    // "false" argument gets short version of name
+    return ToString(GetModelType(),false);
+}
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d DataModel::ChooseRandomRates(long nsites) const
+{
+    DoubleVec1d rates;
+    Random& rand = registry.GetRandom();
+    double rate = m_catrates[0];
+    //choose a random rate to start
+    double r = rand.Float();
+    for (long i = 0; i < m_ncategories; ++i)
+    {
+        if (r < m_catprobs[i]) rate = m_catrates[i];
+        else r -= m_catprobs[i];
+    }
+
+    for (long site=0; site<nsites; site++)
+    {
+        if (rand.Float() <= m_acratio)
+        {
+            //choose a new rate.
+            r = rand.Float();
+            for (long i = 0; i < m_ncategories; ++i)
+            {
+                if (r < m_catprobs[i]) rate = m_catrates[i];
+                else r -= m_catprobs[i];
+            }
+        }
+        rates.push_back(rate);
+    }
+    return rates;
+
+} // ChooseRandomRate
+
+//------------------------------------------------------------------------------------
+
+#if 0 // Vestigial but possibly future code
+void DataModel::SetGamma(bool gam)
+{
+    usegamma = gam;
+    if (usegamma)
+    {
+        // DEBUG debug warning WARNING
+        // need to error check presence of gammashape and ncats > 1?
+        SetCatRates(gamma_rates(gammashape,m_ncategories));
+        DoubleVec1d newprobs(m_ncategories,1.0/m_ncategories);
+        SetCatProbabilities(newprobs);
+    }
+}
+#endif
+
+//------------------------------------------------------------------------------------
+
+bool DataModel::IsValidDataModel() const
+{
+    if (m_nbins < 1) return false;
+    if (m_nmarkers < 1) return false;
+
+    if (m_acratio < 0) return false;
+
+    if (m_ncategories < 1) return false;
+    size_t ncats = m_ncategories;
+    if (m_catrates.size() != ncats) return false;
+    if (m_catprobs.size() != ncats) return false;
+
+    size_t i;
+    double totalprob = 0.0;
+    for (i = 0; i < ncats; ++i)
+    {
+        if (m_catrates[i] < 0.0) return false;
+        if (m_catprobs[i] <= 0.0) return false;
+        if (m_catprobs[i] > 1.0) return false;
+        totalprob += m_catprobs[i];
+    }
+
+    if (fabs(1.0 - totalprob) > EPSILON) return false;
+
+    return true;
+} // DataModel::IsValidDataModel
+
+//------------------------------------------------------------------------------------
+
+StringVec1d DataModel::CreateDataModelReport() const
+{
+
+    StringVec1d report;
+    string line;
+
+    if (m_ncategories > 1)
+    {
+        line = ToString(m_ncategories) + " rate categories with correlated length " +
+            ToString(1.0 / m_acratio);
+        report.push_back(line);
+        long cat;
+        for (cat = 0; cat < m_ncategories; ++cat)
+        {
+            line = "Relative rate " + ToString(m_catrates[cat]) + "  Frequency " +
+                ToString(m_catprobs[cat]);
+            report.push_back(line);
+        }
+    }
+    if (m_relmurate != 1)
+    {
+        line = "The relative marker mutation rate for this model's segment was ";
+        line += ToString(m_relmurate) + ".";
+        report.push_back(line);
+    }
+
+    return report;
+} // CreateDataModelReport
+
+//------------------------------------------------------------------------------------
+
+StringVec1d DataModel::ToXML(size_t nspaces) const
+{
+    StringVec1d xmllines;
+    string line = MakeIndent(
+        MakeTagWithName(xmlstr::XML_TAG_MODEL,GetDataModelShortName()),
+        nspaces);
+    xmllines.push_back(line);
+
+    nspaces += INDENT_DEPTH;
+    string mytag(MakeTag(xmlstr::XML_TAG_NORMALIZE));
+    line = MakeIndent(mytag,nspaces) + ToStringTF(ShouldNormalize()) +
+        MakeCloseTag(mytag);
+    xmllines.push_back(line);
+    line = MakeIndent(MakeTag(xmlstr::XML_TAG_CATEGORIES),nspaces);
+    xmllines.push_back(line);
+
+    nspaces += INDENT_DEPTH;
+    mytag = MakeTag(xmlstr::XML_TAG_NUM_CATEGORIES);
+    line = MakeIndent(mytag,nspaces) + ToString(GetNcategories()) +
+        MakeCloseTag(mytag);
+    xmllines.push_back(line);
+    mytag = MakeTag(xmlstr::XML_TAG_RATES);
+    line = MakeIndent(mytag,nspaces) + ToString(GetCatRates(),6) +
+        MakeCloseTag(mytag);
+    xmllines.push_back(line);
+    mytag = MakeTag(xmlstr::XML_TAG_PROBABILITIES);
+    line = MakeIndent(mytag,nspaces) + ToString(GetCatProbabilities()) +
+        MakeCloseTag(mytag);
+    xmllines.push_back(line);
+    mytag = MakeTag(xmlstr::XML_TAG_AUTOCORRELATION);
+    line = MakeIndent(mytag,nspaces) + ToString(GetUserAcratio()) +
+        MakeCloseTag(mytag);
+    xmllines.push_back(line);
+    nspaces -= INDENT_DEPTH;
+
+    line = MakeIndent(MakeCloseTag(xmlstr::XML_TAG_CATEGORIES),nspaces);
+    xmllines.push_back(line);
+
+    mytag = MakeTag(xmlstr::XML_TAG_RELATIVE_MURATE);
+    line = MakeIndent(mytag,nspaces) + ToString(m_relmurate) +
+        MakeCloseTag(mytag);
+    xmllines.push_back(line);
+
+    nspaces -= INDENT_DEPTH;
+    line = MakeIndent(MakeCloseTag(xmlstr::XML_TAG_MODEL),nspaces);
+    xmllines.push_back(line);
+
+    return xmllines;
+} // ToXML
+
+//------------------------------------------------------------------------------------
+
+void DataModel::ResetCatCells()
+{
+    m_catcells.assign(m_ncategories,1.0);
+} // DataModel::ResetCatCells
+
+//------------------------------------------------------------------------------------
+
+void DataModel::ScaleCatProbabilities()
+{
+    long cat;
+    double totalprob = 0.0;
+    m_ncategories = m_catprobs.size();
+    for(cat = 0; cat < m_ncategories; cat++)
+    {
+        totalprob += m_catprobs[cat];
+    }
+    if (fabs(1.0 - totalprob) > EPSILON)
+    {
+        for(cat = 0; cat < m_ncategories; cat++)
+        {
+            m_catprobs[cat] = m_catprobs[cat] / totalprob;
+        }
+    }
+} // DataModel::ScaleCatProbabilities
+
+//------------------------------------------------------------------------------------
+
+void DataModel::ScaleCatRates()
+{
+    // We renormalize the category rates to a weighted mean of 1.0
+    double meanrate = 0.0;
+    size_t rate;
+
+    if (m_catrates.size() != m_catprobs.size())
+    {
+        string msg = "DataModel::ScaleCatRates() was called before ";
+        msg += "m_catrates and m_catprobs were properly initialized.";
+        throw implementation_error(msg);
+    }
+
+    size_t numrates = m_catrates.size();
+
+    for (rate = 0; rate < numrates; ++rate)
+    {
+        meanrate += m_catrates[rate] * m_catprobs[rate];
+    }
+
+    for (rate = 0; rate < numrates; ++rate)
+    {
+        m_catrates[rate] /= meanrate;
+    }
+
+} // DataModel::ScaleCatRates
+
+//------------------------------------------------------------------------------------
+
+void DataModel::TryToNormalizeAndThrow(long posn, model_type mtype)
+{
+    if (ShouldNormalize())
+    {
+        string errmsg("Encountered a subtree of zero likelihood at ");
+        errmsg += "position " + ToString(posn) + ".";
+        switch (mtype)
+        {
+            case F84:
+            case GTR:
+            case KAllele:
+                assert(false);
+                //Encountering a tree of zero data likelihood on a tree with DNA
+                // is almost certainly a programming error, though occasionally
+                // one gets a tree so large that mispairing tips can be
+                // catastrophic.  Likewise, a K-allele model should allow mutations
+                // of a single step to have reasonable likelihoods even if the
+                // microsats are very disparate.
+                //
+                // However, a *user* is much more likely to have such a data set,
+                // (and hopefully we debug the program thoroughly before release)
+                // so we fall through here, throw the 'bad tree' error, and try
+                // a different tree.  --Lucian
+            case MixedKS:
+                // The MixedKS model has been seen to have problems here when it
+                // is optimizing.
+            case Brownian:
+            case Stepwise:
+                throw zero_dl_error(errmsg);
+        }
+    }
+    else
+    {
+        SetNormalize(true);
+        throw datalikenorm_error("Datalikelihood normalization turned on.");
+    }
+}
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+NucModel::NucModel(long nmarkers,
+                   long numCategories,
+                   DoubleVec1d categoryRates,
+                   DoubleVec1d categoryProbabilities,
+                   double userAutoCorrelationValue,
+                   bool doNormalize,
+                   double relMuRate,
+                   double freqA,
+                   double freqC,
+                   double freqG,
+                   double freqT,
+                   bool calcFreqsFromData,
+                   double perBaseErrorRate)
+    : DataModel(nmarkers,
+                numCategories,
+                categoryRates,
+                categoryProbabilities,
+                userAutoCorrelationValue,
+                doNormalize,
+                defaults::nucleotideBins,
+                relMuRate),
+      m_basefreqs(defaults::nucleotideBins),
+      m_freqsfromdata(calcFreqsFromData),
+      m_perBaseErrorRate(perBaseErrorRate)
+{
+    m_basefreqs[baseA] = freqA;
+    m_basefreqs[baseC] = freqC;
+    m_basefreqs[baseG] = freqG;
+    m_basefreqs[baseT] = freqT;
+    NormalizeBaseFrequencies();
+    assert(NucModel::IsValidDataModel());
+} // NucModel constructor
+
+//------------------------------------------------------------------------------------
+
+void NucModel::NormalizeBaseFrequencies()
+{
+    double sumOfFreqs = accumulate(m_basefreqs.begin(),m_basefreqs.end(),0.0);
+    transform(m_basefreqs.begin(), m_basefreqs.end(),
+              m_basefreqs.begin(),
+              bind2nd(divides<double>(),sumOfFreqs));
+}
+
+//------------------------------------------------------------------------------------
+
+bool NucModel::IsValidDataModel() const
+{
+    size_t index;
+    double totalfreqs = 0.0;
+    if(m_basefreqs.size() != 4) return false;
+    for (index = 0; index < m_basefreqs.size(); index++)
+    {
+        double thisFreq = m_basefreqs[index];
+        if(thisFreq <= 0.0) return false;
+        if(thisFreq >= 1.0) return false;
+        totalfreqs += thisFreq;
+    }
+    if (fabs(1.0 - totalfreqs) > EPSILON) return false;
+    return DataModel::IsValidDataModel();
+} // NucModel::ValidBaseFrequencies
+
+//------------------------------------------------------------------------------------
+
+vector<double> NucModel::DataToLikes(const string& datum, long) const
+{
+    return NucModel::StaticDataToLikes(datum, GetPerBaseErrorRate());
+}
+
+//------------------------------------------------------------------------------------
+// This function implements the standard ambiguity codes for nucleotide data,
+// returning a vector of four doubles indicating the likelihood for A, C, G, T in that order.
+
+vector<double> NucModel::StaticDataToLikes(const string& datum, double perBaseErrorRate)
+{
+    // We assume this is a single nucleotide base, passed as a string only for generality
+    assert(datum.size() == 1);
+
+    vector<double> likes(BASES, 0.0);  // initialize to zero
+    char nucleotide = datum[0];
+
+    // resolve code
+    switch(nucleotide)
+    {
+        case 'A':
+            likes[baseA] = 1.0;
+            break;
+
+        case 'C':
+            likes[baseC] = 1.0;
+            break;
+
+        case 'G':
+            likes[baseG] = 1.0;
+            break;
+
+        case 'T':
+        case 'U':
+            likes[baseT] = 1.0;
+            break;
+
+        case 'M':
+            likes[baseA] = 1.0;
+            likes[baseC] = 1.0;
+            break;
+
+        case 'R':
+            likes[baseA] = 1.0;
+            likes[baseG] = 1.0;
+            break;
+
+        case 'W':
+            likes[baseA] = 1.0;
+            likes[baseT] = 1.0;
+            break;
+
+        case 'S':
+            likes[baseC] = 1.0;
+            likes[baseG] = 1.0;
+            break;
+
+        case 'Y':
+            likes[baseC] = 1.0;
+            likes[baseT] = 1.0;
+            break;
+
+        case 'K':
+            likes[baseG] = 1.0;
+            likes[baseT] = 1.0;
+            break;
+
+        case 'V':
+            likes[baseA] = 1.0;
+            likes[baseC] = 1.0;
+            likes[baseG] = 1.0;
+            break;
+
+        case 'H':
+            likes[baseA] = 1.0;
+            likes[baseC] = 1.0;
+            likes[baseT] = 1.0;
+            break;
+
+        case 'D':
+            likes[baseA] = 1.0;
+            likes[baseG] = 1.0;
+            likes[baseT] = 1.0;
+            break;
+
+        case 'B':
+            likes[baseC] = 1.0;
+            likes[baseG] = 1.0;
+            likes[baseT] = 1.0;
+            break;
+
+        case 'N':
+        case 'O':
+        case 'X':
+        case '?':
+        case '-':
+            likes[baseA] = 1.0;
+            likes[baseC] = 1.0;
+            likes[baseG] = 1.0;
+            likes[baseT] = 1.0;
+            break;
+
+        default:
+            assert(false);    // how did an unknown nucleotide get past proofreading?
+            likes[baseA] = 1.0;
+            likes[baseC] = 1.0;
+            likes[baseG] = 1.0;
+            likes[baseT] = 1.0;
+            break;
+    }
+
+    long   num_ones  = (long)(likes[baseA] + likes[baseC] + likes[baseG] + likes[baseT]);
+    double new_0     = (double)num_ones * perBaseErrorRate / 3.0;
+    double new_1     = 1.0 - (double)(4-num_ones) * perBaseErrorRate / 3.0;
+    likes[baseA] = (likes[baseA] > 0.5) ? new_1 : new_0;
+    likes[baseC] = (likes[baseC] > 0.5) ? new_1 : new_0;
+    likes[baseG] = (likes[baseG] > 0.5) ? new_1 : new_0;
+    likes[baseT] = (likes[baseT] > 0.5) ? new_1 : new_0;
+
+    return(likes);
+
+} // DataToLikes
+
+//------------------------------------------------------------------------------------
+
+StringVec1d NucModel::CreateDataModelReport() const
+{
+    StringVec1d report = DataModel::CreateDataModelReport();
+
+    string line = "Base frequencies: " + ToString(m_basefreqs[baseA]) + ", ";
+    line += ToString(m_basefreqs[baseC]) + ", ";
+    line += ToString(m_basefreqs[baseG]) + ", ";
+    line += ToString(m_basefreqs[baseT]);
+    report.push_back(line);
+
+    return report;
+
+} // NucModel::CreateDataModelReport
+
+//------------------------------------------------------------------------------------
+
+StringVec1d NucModel::ToXML(size_t nspaces) const
+{
+    StringVec1d xmllines(DataModel::ToXML(nspaces));
+
+    nspaces += INDENT_DEPTH;
+
+    string line(MakeIndent(MakeTag(xmlstr::XML_TAG_BASE_FREQS),nspaces));
+    if (m_freqsfromdata)
+        line += " " + xmlstr::XML_TAG_CALCULATED + " ";
+    else
+        line += ToString(m_basefreqs,6);
+    line += MakeCloseTag(xmlstr::XML_TAG_BASE_FREQS);
+    StringVec1d::iterator endtag = --xmllines.end();
+    xmllines.insert(endtag,line);
+
+    string line2(MakeIndent(MakeTag(xmlstr::XML_TAG_PER_BASE_ERROR_RATE),nspaces));
+    line2 += ToString(m_perBaseErrorRate);
+    line2 += MakeCloseTag(xmlstr::XML_TAG_PER_BASE_ERROR_RATE);
+    endtag = --xmllines.end();
+    xmllines.insert(endtag,line2);
+
+    nspaces -= INDENT_DEPTH;
+    return xmllines;
+
+} // NucModel::ToXML
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d NucModel::ChooseAncestralState(long marker)
+{
+    DoubleVec1d result(BASES, 0.0);
+    double r = registry.GetRandom().Float();
+    long i;
+    for (i = 0; i < BASES; ++i)
+    {
+        if (r < m_basefreqs[i])
+        {
+            result[i] = 1.0;
+            return result;
+        }
+        else
+        {
+            r -= m_basefreqs[i];
+        }
+    }
+
+    // this code could be reached due to rounding errors
+    result[0] = 1.0;
+    return result;
+} // ChooseAncestralState
+
+//------------------------------------------------------------------------------------
+
+string NucModel::CellToData(Cell_ptr cell, long marker) const
+{
+    LongVec1d ones = cell->GetOnes(marker);
+    if (ones.size() == 1)
+    {
+        switch (ones[0])
+        {
+            case 0:
+                return "A";
+            case 1:
+                return "C";
+            case 2:
+                return "G";
+            case 3:
+                return "T";
+            default:
+                throw data_error("Tried to convert " + ToString(ones[0]+1)
+                                 + " to a nucleotide, but there should only be four bins.");
+        }
+    }
+    throw implementation_error("Cannot convert nucleotide data from the internal format"
+                               " that is not simply one of the four bases.");
+}
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+F84Model::F84Model(
+    long nmarkers,
+    long numCategories,
+    DoubleVec1d categoryRates,
+    DoubleVec1d categoryProbabilities,
+    double userAutoCorrelationValue,
+    bool doNormalize,
+    double relMuRate,
+    double freqA,
+    double freqC,
+    double freqG,
+    double freqT,
+    double ttRatio,
+    bool calculateFreqsFromData,
+    double perBaseErrorRate)
+    : NucModel(
+        nmarkers,
+        numCategories,
+        categoryRates,
+        categoryProbabilities,
+        userAutoCorrelationValue,
+        doNormalize,
+        relMuRate,
+        freqA,
+        freqC,
+        freqG,
+        freqT,
+        calculateFreqsFromData,
+        perBaseErrorRate),
+      m_ttratio(ttRatio),
+      // set some potentially useful defaults
+      computed1(true), computed2(true),
+      freqar(FLAGDOUBLE), freqcy(FLAGDOUBLE),
+      freqgr(FLAGDOUBLE), freqty(FLAGDOUBLE),
+      basefreqarray(NULL),
+      daughter1(NULL), daughter2(NULL), target(NULL)
+{
+    // The data-likelihood buffer vectors such as expA1 are default-constructed
+    // to an empty state, which is fine.
+    assert(F84Model::IsValidDataModel());
+    // cerr << "*****new F84 model created*****" << this << endl;
+    Finalize();
+
+} // F84Model::F84Model
+
+//------------------------------------------------------------------------------------
+
+void F84Model::EmptyBuffers()
+{
+    if (basefreqarray)
+    {
+        delete [] basefreqarray[0];
+        delete [] basefreqarray;
+    }
+    if (daughter1)
+    {
+        delete [] daughter1[0];
+        delete [] daughter1;
+    }
+    if (daughter2)
+    {
+        delete [] daughter2[0];
+        delete [] daughter2;
+    }
+    if (target)
+    {
+        delete [] target[0];
+        delete [] target;
+    }
+
+} // F84Model::EmptyBuffers
+
+//------------------------------------------------------------------------------------
+
+DataModel* F84Model::Clone() const
+{
+    DataModel* newmodel = new F84Model(*this);
+    return newmodel;
+}
+
+//------------------------------------------------------------------------------------
+
+void F84Model::AllocateBuffers()
+{
+    basefreqarray   = new double* [m_ncategories];
+    daughter1 = new double* [m_ncategories];
+    daughter2 = new double* [m_ncategories];
+    target = new double* [m_ncategories];
+    basefreqarray[0]   = new double [m_ncategories*BASES];
+    daughter1[0] = new double [m_ncategories*BASES];
+    daughter2[0] = new double [m_ncategories*BASES];
+    target[0] = new double [m_ncategories*BASES];
+    long cat;
+
+    for (cat = 0; cat < m_ncategories; ++cat)
+    {
+        basefreqarray[cat]   = basefreqarray[0] + cat*BASES;
+        daughter1[cat] = daughter1[0] + cat*BASES;
+        daughter2[cat] = daughter2[0] + cat*BASES;
+        target[cat] = target[0] + cat*BASES;
+        basefreqarray[cat][baseA] = m_basefreqs[baseA];
+        basefreqarray[cat][baseC] = m_basefreqs[baseC];
+        basefreqarray[cat][baseG] = m_basefreqs[baseG];
+        basefreqarray[cat][baseT] = m_basefreqs[baseT];
+    }
+
+} // F84Model::AllocateBuffers
+
+//------------------------------------------------------------------------------------
+
+F84Model::~F84Model()
+{
+    // if the array does not exist we suppose that the object is
+    // not Finalized, and don't try any deallocations.
+    EmptyBuffers();
+
+} // F84Model::~F84Model
+
+//------------------------------------------------------------------------------------
+
+void F84Model::CopyMembers(const F84Model& src)
+{
+    m_ttratio   = src.m_ttratio;
+
+    freqar    = src.freqar;
+    freqcy    = src.freqcy;
+    freqgr    = src.freqgr;
+    freqty    = src.freqty;
+
+    computed1 = src.computed1;
+    computed2 = src.computed2;
+
+    // don't copy the data likelihood buffers
+}
+
+//------------------------------------------------------------------------------------
+
+bool F84Model::IsValidDataModel() const
+{
+    if (m_ttratio <= 0.5) return false;
+    return NucModel::IsValidDataModel();
+
+} // F84Model::IsValidDataModel
+
+//------------------------------------------------------------------------------------
+
+void F84Model::CopyBuffers(const F84Model& src)
+{
+    xcatrates = src.xcatrates;
+    ycatrates = src.ycatrates;
+    expA1     = src.expA1;
+    expB1     = src.expB1;
+    expC1     = src.expC1;
+    expA2     = src.expA2;
+    expB2     = src.expB2;
+    expC2     = src.expC2;
+    catlikes = src.catlikes;
+
+    EmptyBuffers();
+    AllocateBuffers();
+
+    // Do not try to copy the buffers if they are unallocated
+    if (src.daughter1)
+        memcpy(daughter1[0],src.daughter1[0],m_ncategories*BASES*sizeof(double));
+    if (src.daughter2)
+        memcpy(daughter2[0],src.daughter2[0],m_ncategories*BASES*sizeof(double));
+    if (src.target)
+        memcpy(target[0],src.target[0],m_ncategories*BASES*sizeof(double));
+
+} // F84Model::CopyBuffers
+
+//------------------------------------------------------------------------------------
+
+F84Model::F84Model(const F84Model& src)
+    : NucModel(src), basefreqarray(NULL), daughter1(NULL), daughter2(NULL),
+      target(NULL)
+{
+    // cerr << "*****F84 model copied*****" << this << endl;
+
+    CopyMembers(src);
+    CopyBuffers(src);
+}
+
+//------------------------------------------------------------------------------------
+
+F84Model& F84Model::operator=(const F84Model& src)
+{
+    NucModel::operator=(src);
+    CopyMembers(src);
+    CopyBuffers(src);
+    return *this;
+}
+
+//------------------------------------------------------------------------------------
+
+void F84Model::SetTTratio(double tr)
+{
+    if (tr <= 0.5)
+    {
+        data_error e("Transition/transversion ratio must be > 0.5");
+        throw e;
+    }
+    m_ttratio = tr;
+}
+
+//------------------------------------------------------------------------------------
+
+void F84Model::Finalize()
+{
+    double pur, pyr, ag, ct, m, n, x, y;
+    long cat;
+
+    double freqa(m_basefreqs[baseA]), freqc(m_basefreqs[baseC]),
+        freqg(m_basefreqs[baseG]), freqt(m_basefreqs[baseT]);
+
+    pur    = freqa + freqg;
+    pyr    = freqc + freqt;
+    freqar = freqa / pur;
+    freqcy = freqc / pyr;
+    freqgr = freqg / pur;
+    freqty = freqt / pyr;
+
+    ag = freqa * freqg;
+    ct = freqc * freqt;
+    m  = m_ttratio * pur * pyr - (ag + ct);
+    n  = ag / pur + ct / pyr;
+
+    // code stolen from DNAMLK here
+    y = m / (m + n);
+    // if globalfreqs has failed....
+    if (y < 0) y = 0;
+    else if (y > 1) y = 1;
+    x = 1.0 - y;
+    double fracchange = y * (2.0 * freqa * freqgr + 2.0 * freqc * freqty)
+        + x * (1.0 - freqa * freqa - freqc * freqc - freqg * freqg - freqt * freqt);
+    y /= - fracchange;
+    x /= - fracchange;
+
+    for (cat = 0; cat < m_ncategories; ++cat)
+    {
+        xcatrates.push_back(x*m_catrates[cat]);
+        ycatrates.push_back(y*m_catrates[cat]);
+    }
+
+    // Allocate additional space for likelihood calculations.
+    expA1.insert(expA1.begin(),m_ncategories,0.0);
+    expA2.insert(expA2.begin(),m_ncategories,0.0);
+    expB1.insert(expB1.begin(),m_ncategories,0.0);
+    expB2.insert(expB2.begin(),m_ncategories,0.0);
+    expC1.insert(expC1.begin(),m_ncategories,0.0);
+    expC2.insert(expC2.begin(),m_ncategories,0.0);
+
+    AllocateBuffers();
+
+    double zero = 0.0;
+    catlikes = CreateVec2d(m_nmarkers,m_ncategories,zero);
+
+} // F84Model::Finalize
+
+//------------------------------------------------------------------------------------
+
+void F84Model::RescaleLength1(double length1)
+{
+    long cat;
+    double n;
+
+    if (length1 > FLAGDOUBLE)
+    {
+#ifdef DEBUG_VARIABLES
+        if (printdbgvar > 0)
+        {
+            cerr << "RescaleLength1 raw length1: " << length1 << endl;
+            // cerr << " m_relmurate: " << m_relmurate << endl;
+        }
+#endif
+        length1 *= m_relmurate;
+        computed1 = true;
+
+        for(cat = 0; cat < m_ncategories; cat++)
+        {
+            n = exp(length1 * xcatrates[cat]);
+            expA1[cat] = 1.0 - n;
+            expB1[cat] = n * exp(length1 * ycatrates[cat]);
+            expC1[cat] = n - expB1[cat];
+#ifdef DEBUG_VARIABLES
+            if (0)
+            {
+                cerr << " cat: " << cat << endl;
+                cerr << " n: " << n << endl;
+                cerr << " xcatrates: " << xcatrates[cat] << endl;
+                cerr << " ycatrates: " << ycatrates[cat] << endl;
+                cerr << " expA1: " << expA1[cat] << endl;
+                cerr << " expB1: " << expB1[cat] << endl;
+                cerr << " expC1: " << expC1[cat] << endl;
+            }
+#endif
+        }
+    }
+    else
+    {
+        computed1 = false;
+    }
+#ifdef DEBUG_VARIABLES
+    if (printdbgvar > 0)
+    {
+        cerr << " scaled length1: " << length1 << " computed1: " << computed1 << endl << endl;
+    }
+#endif
+
+} // F84Model::RescaleLength1
+
+//------------------------------------------------------------------------------------
+
+void F84Model::RescaleLength2(double length2)
+{
+    long cat;
+    double n;
+
+    if (length2 > FLAGDOUBLE)
+    {
+#ifdef DEBUG_VARIABLES
+        if (printdbgvar > 0)
+        {
+            cerr << "RescaleLength2 raw length2: " << length2 << endl;
+            // cerr << " m_relmurate: " << m_relmurate << endl;
+        }
+#endif
+        length2 *= m_relmurate;
+        computed2 = true;
+
+        for(cat = 0; cat < m_ncategories; cat++)
+        {
+            n = exp(length2 * xcatrates[cat]);
+            expA2[cat] = 1.0 - n;
+            expB2[cat] = n * exp(length2 * ycatrates[cat]);
+            expC2[cat] = n - expB2[cat];
+#ifdef DEBUG_VARIABLES
+            if (0)
+            {
+                cerr << " cat: " << cat << endl;
+                cerr << " n: " << n << endl;
+                cerr << " xcatrates: " << xcatrates[cat] << endl;
+                cerr << " ycatrates: " << ycatrates[cat] << endl;
+                cerr << " expA2: " << expA2[cat] << endl;
+                cerr << " expB2: " << expB2[cat] << endl;
+                cerr << " expC2: " << expC2[cat] << endl;
+            }
+#endif
+        }
+    }
+    else
+    {
+        computed2 = false;
+    }
+#ifdef DEBUG_VARIABLES
+    if (printdbgvar > 0)
+    {
+        cerr << " scaled length2: " << length2 << " computed2: " << computed2 << endl << endl;
+    }
+#endif
+}
+
+//------------------------------------------------------------------------------------
+
+void F84Model::RescaleLengths(double length1, double length2)
+{
+    RescaleLength1(length1);
+    RescaleLength2(length2);
+
+} // F84Model::RescaleLengths
+
+//------------------------------------------------------------------------------------
+
+double** F84Model::ComputeSiteDLs(double** siteDLs1, double** siteDLs2)
+{
+    double sumAll, sumPur, sumPyr;
+
+    long cat;
+#ifdef DEBUG_VARIABLES
+    if (0)
+    {
+        cerr << "basefreqA: " << m_basefreqs[baseA] << endl;
+        cerr << "basefreqC: " << m_basefreqs[baseC] << endl;
+        cerr << "basefreqG: " << m_basefreqs[baseG] << endl;
+        cerr << "basefreqT: " << m_basefreqs[baseT] << endl;
+        if (computed1)
+            cerr << "computed1: true" << endl;
+        else
+            cerr << "computed1: false" << endl;
+        if (computed2)
+            cerr << "computed2: true" << endl;
+        else
+            cerr << "computed2: false" << endl;
+    }
+#endif
+
+    if (computed1)
+    {
+        for (cat = 0; cat < m_ncategories; ++cat)
+        {
+            double* catDLs1 = siteDLs1[cat];
+            sumAll  = expA1[cat] *
+                (m_basefreqs[baseA]*catDLs1[baseA] +
+                 m_basefreqs[baseC]*catDLs1[baseC] +
+                 m_basefreqs[baseG]*catDLs1[baseG] +
+                 m_basefreqs[baseT]*catDLs1[baseT]);
+
+            sumPur  = freqar*catDLs1[baseA] + freqgr*catDLs1[baseG];
+            sumPyr  = freqcy*catDLs1[baseC] + freqty*catDLs1[baseT];
+
+#ifdef DEBUG_VARIABLES
+            if (printdbgvar > 0)
+            {
+                // cerr << "cat: " << cat << endl;
+                // cerr << "sumA1: " << sumAll << endl;
+                // cerr << "freqar: " << freqar << endl;
+                // cerr << "freqcy: " << freqcy << endl;
+                // cerr << "freqgr: " << freqgr << endl;
+                // cerr << "freqty: " << freqty << endl;
+                // cerr << "sumPur1: " << sumPur << endl;
+                // cerr << "sumPyr1: " << sumPyr << endl;
+                cerr << "dls1: " << catDLs1[baseA] << " " << catDLs1[baseC] << " " << catDLs1[baseG] << " " << catDLs1[baseT] << endl;
+                // cerr << "dls1A: " << catDLs1[baseA] << endl;
+                // cerr << "dls1C: " << catDLs1[baseC] << endl;
+                // cerr << "dls1G: " << catDLs1[baseG] << endl;
+                // cerr << "dls1T: " << catDLs1[baseT] << endl;
+                // cerr << "expA1: " << expA1[cat] << endl;
+                // cerr << "expB1: " << expB1[cat] << endl;
+                // cerr << "expC1: " << expC1[cat] << endl << endl;
+            }
+#endif
+            double expC1cat = expC1[cat];
+            daughter1[cat][baseA] = sumAll + expB1[cat]*catDLs1[baseA] +
+                expC1cat*sumPur;
+            daughter1[cat][baseC] = sumAll + expB1[cat]*catDLs1[baseC] +
+                expC1cat*sumPyr;
+            daughter1[cat][baseG] = sumAll + expB1[cat]*catDLs1[baseG] +
+                expC1cat*sumPur;
+            daughter1[cat][baseT] = sumAll + expB1[cat]*catDLs1[baseT] +
+                expC1cat*sumPyr;
+        }
+    }
+    else
+    {
+        memcpy(daughter1[0],basefreqarray[0],m_ncategories*BASES*sizeof(double));
+    }
+
+    if (computed2)
+    {
+        for (cat = 0; cat < m_ncategories; cat++)
+        {
+            double* catDLs2 = siteDLs2[cat];
+            sumAll  = expA2[cat] *
+                (m_basefreqs[baseA]*catDLs2[baseA] +
+                 m_basefreqs[baseC]*catDLs2[baseC] +
+                 m_basefreqs[baseG]*catDLs2[baseG] +
+                 m_basefreqs[baseT]*catDLs2[baseT]);
+
+            sumPur  = freqar*catDLs2[baseA] + freqgr*catDLs2[baseG];
+            sumPyr  = freqcy*catDLs2[baseC] + freqty*catDLs2[baseT];
+
+#ifdef DEBUG_VARIABLES
+            if (printdbgvar > 0)
+            {
+                // cerr << "cat: " << cat << endl;
+                // cerr << "sumA2: " << sumAll << endl;// JRM debug
+                // cerr << "sumPur2: " << sumPur << endl;
+                // cerr << "sumPyr2: " << sumPyr << endl;
+                // cerr << "dls2A: " << catDLs2[baseA] << endl;
+                // cerr << "dls2C: " << catDLs2[baseC] << endl;
+                // cerr << "dls2G: " << catDLs2[baseG] << endl;
+                // cerr << "dls2T: " << catDLs2[baseT] << endl;
+                // cerr << "expA2: " << expA2[cat] << endl;
+                // cerr << "expB2: " << expB2[cat] << endl;
+                // cerr << "expC2: " << expC2[cat] << endl << endl;
+                cerr << "dls2: " << catDLs2[baseA] << " " << catDLs2[baseC] << " " << catDLs2[baseG] << " " << catDLs2[baseT] << endl;
+            }
+#endif
+
+            double expC2cat = expC2[cat];
+            daughter2[cat][baseA] = sumAll + expB2[cat]*catDLs2[baseA] +
+                expC2cat*sumPur;
+            daughter2[cat][baseC] = sumAll + expB2[cat]*catDLs2[baseC] +
+                expC2cat*sumPyr;
+            daughter2[cat][baseG] = sumAll + expB2[cat]*catDLs2[baseG] +
+                expC2cat*sumPur;
+            daughter2[cat][baseT] = sumAll + expB2[cat]*catDLs2[baseT] +
+                expC2cat*sumPyr;
+        }
+    }
+    else
+    {
+        memcpy(daughter2[0],basefreqarray[0],m_ncategories*BASES*sizeof(double));
+    }
+
+    for (cat = 0; cat < m_ncategories; cat++)
+    {
+        target[cat][baseA] = daughter1[cat][baseA] *
+            daughter2[cat][baseA];
+        target[cat][baseC] = daughter1[cat][baseC] *
+            daughter2[cat][baseC];
+        target[cat][baseG] = daughter1[cat][baseG] *
+            daughter2[cat][baseG];
+        target[cat][baseT] = daughter1[cat][baseT] *
+            daughter2[cat][baseT];
+#ifdef DEBUG_VARIABLES
+        if (printdbgvar > 0)
+        {
+            // cerr << "cat: " << cat << endl;
+            // cerr << "targetA: " << target[cat][baseA] << endl;
+            // cerr << "targetC: " << target[cat][baseC] << endl;
+            // cerr << "targetG: " << target[cat][baseG] << endl;
+            // cerr << "targetT: " << target[cat][baseT] << endl << endl;
+            cerr << "target: " << target[cat][baseA] << " " << target[cat][baseC] << " " << target[cat][baseG] << " " << target[cat][baseT] << endl;
+        }
+#endif
+    }
+
+    return target;
+
+} // F84Model::ComputeSiteDLs
+
+//------------------------------------------------------------------------------------
+
+double F84Model::ComputeSubtreeDL(Cell& rootdls, double** startmarker, double** endmarker, long posn)
+{
+    double total=0.0, subtotal;
+    double** marker;
+    long cat;
+    DoubleVec1d prior(m_ncategories);
+    long firstposn = posn;
+    int im = 0;
+
+    for (marker = startmarker; marker != endmarker;
+         marker = rootdls.GetNextMarker(marker))
+    {
+        subtotal = 0.0;
+
+        for (cat = 0; cat < m_ncategories; cat++)
+        {
+            prior[cat] = m_basefreqs[baseA]*marker[cat][baseA] +
+                m_basefreqs[baseC]*marker[cat][baseC] +
+                m_basefreqs[baseG]*marker[cat][baseG] +
+                m_basefreqs[baseT]*marker[cat][baseT];
+
+            subtotal += m_catprobs[cat] * prior[cat];
+#if 0
+            cerr << "marker " << im
+                 << " cat " << cat
+                 << " probs " << marker[cat][baseA]
+                 << " " << marker[cat][baseC]
+                 << " " << marker[cat][baseG]
+                 << " " << marker[cat][baseT]
+                 << " prior " << prior[cat]
+                 << endl;
+#endif
+        }
+        im++;
+
+        if (!subtotal)
+        {
+            DataModel::TryToNormalizeAndThrow(posn, GetModelType());
+        }
+
+        if (ShouldNormalize())
+        {
+            total += (log(subtotal) + rootdls.GetNorms(posn));
+        }
+        else
+        {
+            total +=  log(subtotal);
+        }
+
+        // de-normalization not needed here, since we are only interested in
+        // the ratio
+        if (m_ncategories > 1)
+        {
+            for (cat = 0; cat < m_ncategories; cat++)
+                catlikes[posn][cat] = prior[cat]/subtotal;
+        }
+        ++posn;
+    }
+
+    if (m_ncategories > 1) total += ComputeCatDL(firstposn, posn);
+    //    cerr << "total: " << total << endl;
+    return total;
+
+} // F84Model::ComputeSubtreeDL
+
+//------------------------------------------------------------------------------------
+
+double F84Model::ComputeCatDL(long startmarker, long endmarker)
+{
+    double subtotal;
+    long marker, cat;
+
+    DoubleVec1d like = m_catcells;
+    DoubleVec1d nulike(m_ncategories);
+
+    for (marker = startmarker; marker != endmarker; ++marker)
+    {
+        subtotal = 0.0;
+        for (cat = 0; cat < m_ncategories; cat++)
+            subtotal += m_catprobs[cat] * like[cat];
+
+        subtotal *= m_acratio;
+
+        for (cat = 0; cat < m_ncategories; cat++)
+            nulike[cat] = catlikes[marker][cat] *
+                (subtotal + m_ucratio * like[cat]);
+
+        // the following puts the nulike values into like.  It
+        // also puts the like values into nulike, but we will not
+        // be using values from nulike so we don't care.
+        like.swap(nulike);
+    }
+
+    subtotal = 0.0;
+    for (cat = 0; cat < m_ncategories; cat++)
+        subtotal += m_catprobs[cat] * like[cat];
+
+    // the following puts the like values into catcells for
+    // long-term storage.  It also puts the catcells values into
+    // like, but we don't care.
+    m_catcells.swap(like);
+
+    return log(subtotal);
+
+} // F84Model::ComputeCatDL
+
+//------------------------------------------------------------------------------------
+
+StringVec1d F84Model::CreateDataModelReport() const
+{
+    StringVec1d report = NucModel::CreateDataModelReport();
+
+    string line = "Transition/transversion ratio: " + ToString(m_ttratio);
+    report.push_back(line);
+
+    return report;
+
+} // F84Model::CreateDataModelReport
+
+//------------------------------------------------------------------------------------
+
+StringVec1d F84Model::ToXML(size_t nspaces) const
+{
+    StringVec1d xmllines(NucModel::ToXML(nspaces));
+
+    nspaces += INDENT_DEPTH;
+    string line(MakeIndent(MakeTag(xmlstr::XML_TAG_TTRATIO),nspaces));
+    line += ToString(GetTTratio());
+    line += MakeCloseTag(xmlstr::XML_TAG_TTRATIO);
+    nspaces -= INDENT_DEPTH;
+
+    StringVec1d::iterator endtag = --xmllines.end();
+    xmllines.insert(endtag,line);
+
+    return xmllines;
+
+} // F84Model::ToXML
+
+//------------------------------------------------------------------------------------
+
+// This routine simulates data for a single node under the F84
+// model.  It assumes that the given branch lengths have already
+// been rescaled for the rate category desired.
+// Nomenclature follows _Inferring Phylogenies_ pp. 202-203,
+//  printing date 2004 (earlier printings have different page numberings)
+
+// The second argument is unused since nucleotides don't have differing
+// number of states by marker position
+DoubleVec1d F84Model::SimulateMarker(double branchlength, long whichmarker, const DoubleVec1d& state) const
+{
+    double freqa(m_basefreqs[baseA]), freqc(m_basefreqs[baseC]), freqg(m_basefreqs[baseG]), freqt(m_basefreqs[baseT]);
+    double pur = freqa + freqg;
+    double pyr = freqc + freqt;
+    double beta = 1.0 / (2.0 * pur * pyr * (1.0 + m_ttratio));
+    double alpha = (pur * pyr * m_ttratio - freqa * freqg - freqc * freqt) /
+        (2.0 * (1.0 + m_ttratio) * (pyr * freqa * freqg + pur * freqc * freqt));
+
+    DoubleVec1d cumprob;
+    cumprob.push_back(freqa);
+    cumprob.push_back(freqa + freqc);
+    cumprob.push_back(freqa + freqc + freqg);
+
+    double expB = exp(-beta * branchlength);
+
+    DoubleVec1d answer(BASES, 0.0);
+
+    double general = 1.0 - expB;
+    double r = registry.GetRandom().Float();
+
+    // We compute a chance of drawing from a pool of all four nucleotides
+    // in proportion to their frequency; if that doesn't happen, we
+    // compute a chance of drawing from a pool of only purines or only
+    // pyrimidines; if that doesn't happen either there is no change
+    // and we return the initial state.
+
+    if (r < general)
+    {
+        // perform a draw from the general pool
+        double r2 = registry.GetRandom().Float();
+        long i;
+        for (i = 0; i < BASES-1; ++i)
+        {
+            if (r2 < cumprob[i])
+            {
+                answer[i] = 1.0;
+                return answer;
+            }
+        }
+        // flowthrough if previous bases not picked
+        answer[BASES-1] = 1.0;
+        return answer;
+    }
+    else
+    {
+        r -= general;
+        double transition = expB * (1.0 - exp(-alpha * branchlength));
+        if (r < transition)
+        {
+            // perform a draw from the transition pool
+            double r2 = registry.GetRandom().Float();
+            if (state[baseA] == 1.0 || state[baseG] == 1.0) // purine
+            {
+                if (r2 < freqa / (freqa + freqg))
+                {
+                    answer[baseA] = 1.0;
+                }
+                else
+                {
+                    answer[baseG] = 1.0;
+                }
+            }
+            else                        // pyrimidine
+            {
+                if (r2 < freqc / (freqc + freqt))
+                {
+                    answer[baseC] = 1.0;
+                }
+                else
+                {
+                    answer[baseT] = 1.0;
+                }
+            }
+            return answer;
+        }
+    }
+
+    // otherwise, no event happens
+    return state;
+
+} // SimulateMarke
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+GTRModel::GTRModel(long nmarkers,
+                   long numCategories,
+                   DoubleVec1d categoryRates,
+                   DoubleVec1d categoryProbabilities,
+                   double userAutoCorrelationValue,
+                   bool doNormalize,
+                   double relMuRate,
+                   double freqA,
+                   double freqC,
+                   double freqG,
+                   double freqT,
+                   double freqAC,
+                   double freqAG,
+                   double freqAT,
+                   double freqCG,
+                   double freqCT,
+                   double freqTG,
+                   double perBaseErrorRate)
+    : NucModel(nmarkers,
+               numCategories,
+               categoryRates,
+               categoryProbabilities,
+               userAutoCorrelationValue,
+               doNormalize,
+               relMuRate,
+               freqA,
+               freqC,
+               freqG,
+               freqT,
+               false,          // calculate freqs from data
+               perBaseErrorRate),
+      AC(freqAC),
+      AG(freqAG),
+      AT(freqAT),
+      CG(freqCG),
+      CT(freqCT),
+      TG(freqTG),
+      // set some potentially useful defaults
+      basefreqarray(NULL),
+      daughter(NULL),target(NULL)
+{
+    // reset the names from the base values
+    computed.assign(2,true);
+    DoubleVec1d empty(BASES,0.0);
+    scratch.assign(BASES,empty);
+    Finalize();
+
+    assert(GTRModel::IsValidDataModel());
+} // GTRModel::GTRModel
+
+//------------------------------------------------------------------------------------
+
+GTRModel::GTRModel(const GTRModel& src)
+    : NucModel(src), basefreqarray(NULL),daughter(NULL),target(NULL)
+{
+    CopyMembers(src); // doesn't copy target or daughter buffers!
+} // GTRModel copy ctor
+
+//------------------------------------------------------------------------------------
+
+GTRModel::~GTRModel()
+{
+    EmptyBuffers();
+} // GTRModel dtor
+
+//------------------------------------------------------------------------------------
+
+DataModel* GTRModel::Clone() const
+{
+    DataModel* newmodel = new GTRModel(*this);
+    return newmodel;
+}
+
+//------------------------------------------------------------------------------------
+
+GTRModel& GTRModel::operator=(const GTRModel& src)
+{
+    NucModel::operator=(src);
+    CopyMembers(src);
+    return *this;
+}
+
+//------------------------------------------------------------------------------------
+
+void GTRModel::AllocateBuffers()
+{
+    DoubleVec1d zero(m_ncategories,0.0);
+    catlikes.assign(m_nmarkers,zero);
+
+    DoubleVec1d base1(BASES,0);
+    DoubleVec2d base2(BASES,base1);
+    DoubleVec3d empty(m_ncategories,base2);
+    pchange.assign(2,empty);
+
+    daughter         = new double** [2];
+    target           = new double* [m_ncategories];
+    basefreqarray    = new double* [m_ncategories];
+    daughter[0]      = new double* [2*m_ncategories];
+    daughter[0][0]   = new double [2*m_ncategories*BASES];
+    target[0]        = new double [m_ncategories*BASES];
+    basefreqarray[0] = new double [m_ncategories*BASES];
+    daughter[1] = daughter[0] + m_ncategories;
+    daughter[1][0] = daughter[0][0] + m_ncategories*BASES;
+    long cat;
+    for (cat = 0; cat < m_ncategories; ++cat)
+    {
+        basefreqarray[cat]   = basefreqarray[0] + cat*BASES;
+        daughter[0][cat] = daughter[0][0] + cat*BASES;
+        daughter[1][cat] = daughter[1][0] + cat*BASES;
+        target[cat] = target[0] + cat*BASES;
+        long base;
+        for(base = baseA; base <= baseT; ++base)
+            basefreqarray[cat][base] = m_basefreqs[base];
+    }
+} // GTRModel::AllocateBuffers
+
+//------------------------------------------------------------------------------------
+
+void GTRModel::EmptyBuffers()
+{
+    if (basefreqarray)
+    {
+        delete [] basefreqarray[0];
+        delete [] basefreqarray;
+    }
+    if (daughter)
+    {
+        delete [] daughter[0][0];
+        delete [] daughter[0];
+        delete [] daughter;
+    }
+    if (target)
+    {
+        delete [] target[0];
+        delete [] target;
+    }
+
+} // GTRModel::EmptyBuffers
+
+//------------------------------------------------------------------------------------
+
+void GTRModel::CopyMembers(const GTRModel& src)
+{
+    AG = src.AG;
+    AC = src.AC;
+    AT = src.AT;
+    CG = src.CG;
+    CT = src.CT;
+    TG = src.TG;
+    eigvals = src.eigvals;
+    eigvecs1 = src.eigvecs1;
+    eigvecs2 = src.eigvecs2;
+    pchange = src.pchange;
+    computed = src.computed;
+    scratch = src.scratch;
+    m_basefreqs = src.m_basefreqs;
+
+    if (src.basefreqarray)
+    {
+        EmptyBuffers();
+        AllocateBuffers();
+
+        memcpy(basefreqarray[0],src.basefreqarray[0],
+               m_ncategories*BASES*sizeof(double));
+    }
+
+    // we don't copy target or daughter!
+} // GTRModel::CopyMembers
+
+//------------------------------------------------------------------------------------
+
+void GTRModel::GTRDotProduct(const DoubleVec2d& first, const DoubleVec2d& second, DoubleVec2d& answer)
+// dot product of first and second put into PRE-EXISTING answer!
+// second has been PRE-TRANSPOSED!!
+{
+    // should be square
+    assert(first.size() == first[0].size());
+    // and all the same size!
+    assert(first.size() == second.size());
+    assert(first.size() == answer.size());
+    double initial = 0.0;
+
+    long i, j, n=first.size();
+    for (i = 0; i < n; ++i)
+    {
+        for (j = 0; j < n; ++j)
+        {
+            answer[i][j] = inner_product(first[i].begin(), first[i].end(), second[j].begin(), initial);
+        }
+    }
+} // GTRModel::GTRDotProduct
+
+//------------------------------------------------------------------------------------
+
+void GTRModel::BarfOnBadGTRRates(const DoubleVec1d& rts) const
+{
+    if (rts.size() != 6)
+    {
+        data_error e("Incorrect number of GTR rates:  expected 6, found " + ToString(rts.size()));
+        throw e;
+    }
+
+    size_t i;
+    for (i = 0; i < rts.size(); ++i)
+    {
+        if (rts[i] <= 0.0)
+        {
+            data_error e("All rates for the GTR model must be greater than 0.");
+            throw e;
+        }
+    }
+} // GTRModel::BarfOnBadGTRRates
+
+//------------------------------------------------------------------------------------
+
+void GTRModel::SetRates(const DoubleVec1d& rts)
+{
+    BarfOnBadGTRRates(rts);
+
+    AC = rts[0];
+    AG = rts[1];
+    AT = rts[2];
+    CG = rts[3];
+    CT = rts[4];
+    TG = rts[5];
+} // GTRModel::SetRates
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d GTRModel::GetRates() const
+{
+    DoubleVec1d rts;
+    rts.push_back(AC);
+    rts.push_back(AG);
+    rts.push_back(AT);
+    rts.push_back(CG);
+    rts.push_back(CT);
+    rts.push_back(TG);
+
+    return rts;
+
+} // GTRModel::GetRates
+
+//------------------------------------------------------------------------------------
+
+void GTRModel::Finalize()
+{
+    AllocateBuffers();
+
+    // calculate eigvals and eigvecs1 & 2
+    // assemble rate matrix; using scratch
+    double* bf = basefreqarray[0];
+
+    // rescale rates to a mean of 1 event per unit branchlength
+    double scalefactor = 2.0 * (AC * bf[baseA] * bf[baseC] +
+                                AG * bf[baseA] * bf[baseG] +
+                                AT * bf[baseA] * bf[baseT] +
+                                CG * bf[baseC] * bf[baseG] +
+                                CT * bf[baseC] * bf[baseT] +
+                                TG * bf[baseG] * bf[baseT]);
+    double nAC = AC / scalefactor;
+    double nAG = AG / scalefactor;
+    double nAT = AT / scalefactor;
+    double nCG = CG / scalefactor;
+    double nCT = CT / scalefactor;
+    double nTG = TG / scalefactor;
+
+    scratch[0][0] = -(nAC*bf[baseC] + nAG*bf[baseG] + nAT*bf[baseT])/bf[baseA];
+    scratch[0][1] = nAC;
+    scratch[0][2] = nAG;
+    scratch[0][3] = nAT;
+    scratch[1][0] = nAC;
+    scratch[1][1] = -(nAC*bf[baseA] + nCG*bf[baseG] + nCT*bf[baseT])/bf[baseC];
+    scratch[1][2] = nCG;
+    scratch[1][3] = nCT;
+    scratch[2][0] = nAG;
+    scratch[2][1] = nCG;
+    scratch[2][2] = -(nAG*bf[baseA] + nCG*bf[baseC] + nTG*bf[baseT])/bf[baseG];
+    scratch[2][3] = nTG;
+    scratch[3][0] = nAT;
+    scratch[3][1] = nCT;
+    scratch[3][2] = nTG;
+    scratch[3][3] = -(nAT*bf[baseA] + nCT*bf[baseC] + nTG*bf[baseG])/bf[baseT];
+
+    // assemble square root of base frequencies
+    DoubleVec1d zero(BASES,0.0);
+    DoubleVec2d diag(BASES,zero);
+    long i;
+    for (i = 0; i < BASES; ++i)
+    {
+        diag[i][i] = sqrt(bf[i]);
+    }
+
+    GTRDotProduct(diag,Transpose(scratch),scratch);
+    DoubleVec2d answ(BASES,zero);
+    GTRDotProduct(scratch,diag,answ);
+
+    EigenCalculator eig;
+    pair<DoubleVec1d,DoubleVec2d> eigsys(eig.Eigen(answ));
+    eigvecs1 = diag;  // needed to satisfy GTRDotProduct size reqs
+    // calculate: diag . Transpose[eigsys.second], which would require
+    // a call to the transpose of the transpose for GTRDotProduct purposes,
+    // so just use the matrix straight.
+    GTRDotProduct(diag,eigsys.second,eigvecs1);
+    eigvals = eigsys.first;
+    eigvecs2 = Transpose(Invert(eigvecs1));
+
+} // GTRModel::Finalize
+
+//------------------------------------------------------------------------------------
+// calculate pchange [eigvecs1 . Exp[eigvals*length] . eigvecs2]
+
+void GTRModel::RescaleLengths(double length1, double length2)
+{
+    computed.assign(2,true);
+
+    // if a length is FLAGDOUBLE we are in a one-legged coalescence
+    // and we avoid this whole computation
+    if (CloseEnough(length1, FLAGDOUBLE)) computed[0] = false;
+    if (CloseEnough(length2, FLAGDOUBLE)) computed[1] = false;
+
+    DoubleVec1d zero(BASES,0.0);
+    DoubleVec2d zeros(BASES,zero);
+    DoubleVec3d zeroes(m_ncategories,zeros);
+    DoubleVec4d diag(2,zeroes);
+
+    long cat;
+    double expon;
+
+    length1 *= m_relmurate;
+    length2 *= m_relmurate;
+
+    for(cat = 0; cat < m_ncategories; ++cat)
+    {
+        long i;
+        for (i = 0; i < BASES; ++i)
+        {
+            double scalar = m_catrates[cat]*eigvals[i];
+
+            if (computed[0])
+            {
+                expon = length1 * scalar;
+                if (expon >= EXPMIN)
+                {
+                    if (expon <= EXPMAX)
+                        diag[0][cat][i][i] = exp(length1*scalar);
+                    else
+                        diag[0][cat][i][i] = EXP_OF_EXPMAX;
+                }
+                // else it remains zero
+            }
+
+            if (computed[1])
+            {
+                expon = length2 * scalar;
+                if (expon >= EXPMIN)
+                {
+                    if (expon <= EXPMAX)
+                        diag[1][cat][i][i] = exp(length2*scalar);
+                    else
+                        diag[1][cat][i][i] = EXP_OF_EXPMAX;
+                }
+                // else it remains zero
+            }
+        }
+    }
+
+    for(cat = 0; cat < m_ncategories; ++cat)
+    {
+        long br;
+        for(br = 0; br < 2; ++br)
+        {
+            if (!computed[br]) continue;
+            GTRDotProduct(eigvecs1,diag[br][cat],scratch);
+            GTRDotProduct(scratch,eigvecs2,pchange[br][cat]);
+        }
+    }
+
+} // GTRModel::RescaleLengths
+
+//------------------------------------------------------------------------------------
+
+double** GTRModel::ComputeSiteDLs(double** siteDL1, double** siteDL2)
+{
+    long cat, base;
+
+    if (computed[0])
+    {
+        for(cat = 0; cat < m_ncategories; ++cat)
+        {
+            double* catDLs = siteDL1[cat];
+            DoubleVec2d& prob = pchange[0][cat];
+            for(base = baseA; base <= baseT; ++base)
+            {
+                daughter[0][cat][base] = prob[baseA][base] * catDLs[baseA] +
+                    prob[baseC][base] * catDLs[baseC] +
+                    prob[baseG][base] * catDLs[baseG] +
+                    prob[baseT][base] * catDLs[baseT];
+            }
+        }
+    }
+    else
+    {
+        memcpy(daughter[0][0],basefreqarray[0],m_ncategories*BASES*sizeof(double));
+    }
+
+    if (computed[1])
+    {
+        for(cat = 0; cat < m_ncategories; ++cat)
+        {
+            double* catDLs = siteDL2[cat];
+            DoubleVec2d& prob = pchange[1][cat];
+            for(base = baseA; base <= baseT; ++base)
+            {
+                daughter[1][cat][base] = prob[baseA][base] * catDLs[baseA] +
+                    prob[baseC][base] * catDLs[baseC] +
+                    prob[baseG][base] * catDLs[baseG] +
+                    prob[baseT][base] * catDLs[baseT];
+            }
+        }
+    }
+    else
+    {
+        memcpy(daughter[1][0],basefreqarray[0],m_ncategories*BASES*sizeof(double));
+    }
+
+    for (cat = 0; cat < m_ncategories; cat++)
+        for(base = baseA; base <= baseT; ++base)
+            target[cat][base] = daughter[0][cat][base] *
+                daughter[1][cat][base];
+
+    return target;
+
+} // GTRModel::ComputeSiteDLs
+
+//------------------------------------------------------------------------------------
+
+double GTRModel::ComputeSubtreeDL(Cell& rootdls, double** startmarker, double** endmarker, long posn)
+{
+    double total=0.0, subtotal;
+    double** marker;
+    long cat;
+    DoubleVec1d prior(m_ncategories);
+    long firstposn = posn;
+
+    for (marker = startmarker; marker != endmarker;
+         marker = rootdls.GetNextMarker(marker))
+    {
+        subtotal = 0.0;
+
+        for (cat = 0; cat < m_ncategories; cat++)
+        {
+            prior[cat] = basefreqarray[cat][baseA]*marker[cat][baseA] +
+                basefreqarray[cat][baseC]*marker[cat][baseC] +
+                basefreqarray[cat][baseG]*marker[cat][baseG] +
+                basefreqarray[cat][baseT]*marker[cat][baseT];
+
+            subtotal += m_catprobs[cat] * prior[cat];
+        }
+
+        if (!subtotal)
+        {
+            DataModel::TryToNormalizeAndThrow(posn, GetModelType());
+        }
+
+        if (ShouldNormalize())
+        {
+            total += (log(subtotal) + rootdls.GetNorms(posn));
+        }
+        else
+        {
+            total += log(subtotal);
+        }
+
+        // de-normalization not needed here, since we are only interested in the ratio
+        if (m_ncategories > 1)
+        {
+            for (cat = 0; cat < m_ncategories; cat++)
+                catlikes[posn][cat] = prior[cat]/subtotal;
+        }
+        ++posn;
+    }
+
+    if (m_ncategories > 1) total += ComputeCatDL(firstposn, posn);
+    return total;
+
+} // GTRModel::ComputeSubtreeDL
+
+//------------------------------------------------------------------------------------
+
+double GTRModel::ComputeCatDL(long startmarker, long endmarker)
+{
+    double subtotal;
+    long marker, cat;
+
+    DoubleVec1d like = m_catcells;
+    DoubleVec1d nulike(m_ncategories);
+
+    for (marker = startmarker; marker != endmarker; ++marker)
+    {
+        subtotal = 0.0;
+        for (cat = 0; cat < m_ncategories; cat++)
+            subtotal += m_catprobs[cat] * like[cat];
+
+        subtotal *= m_acratio;
+
+        for (cat = 0; cat < m_ncategories; cat++)
+            nulike[cat] = catlikes[marker][cat] *
+                (subtotal + m_ucratio * like[cat]);
+
+        // the following puts the nulike values into like.  It
+        // also puts the like values into nulike, but we will not
+        // be using values from nulike so we don't care.
+        like.swap(nulike);
+    }
+
+    subtotal = 0.0;
+    for (cat = 0; cat < m_ncategories; cat++)
+        subtotal += m_catprobs[cat] * like[cat];
+
+    // the following puts the like values into catcells for
+    // long-term storage.  It also puts the catcells values into
+    // like, but we don't care.
+    m_catcells.swap(like);
+
+    return log(subtotal);
+
+} // GTRModel::ComputeCatDL
+
+//------------------------------------------------------------------------------------
+
+StringVec1d GTRModel::CreateDataModelReport() const
+{
+    StringVec1d report = NucModel::CreateDataModelReport();
+
+    string line = "Mutation parameters: ";
+    report.push_back(line);
+    line = "Between A and (C, G, T):  ";
+    line += ToString(AC) + ", " + ToString(AG) + ", " + ToString(AT);
+    report.push_back(line);
+    line = "Between C and (G, T):  ";
+    line += ToString(CG) + ", " + ToString(CT);
+    report.push_back(line);
+    line = "Between G and (T):  ";
+    line += ToString(TG);
+    report.push_back(line);
+
+    return report;
+
+} // GTRModel::CreateDataModelReport
+
+//------------------------------------------------------------------------------------
+
+StringVec1d GTRModel::ToXML(size_t nspaces) const
+{
+    StringVec1d xmllines(NucModel::ToXML(nspaces));
+
+    nspaces += INDENT_DEPTH;
+    string line(MakeIndent(MakeTag(xmlstr::XML_TAG_GTRRATES),nspaces));
+    line += ToString(GetRates(),6);
+    line += MakeCloseTag(xmlstr::XML_TAG_GTRRATES);
+    nspaces -= INDENT_DEPTH;
+
+    StringVec1d::iterator endtag = --xmllines.end();
+    xmllines.insert(endtag,line);
+
+    return xmllines;
+
+} // GTRModel::ToXML
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d GTRModel::SimulateMarker(double branchlength, long whichmarker,
+                                     const DoubleVec1d& state) const
+{
+    throw implementation_error("Cannot simulate data with the GTR model yet.");
+} // SimulateMarker
+
+//------------------------------------------------------------------------------------
+
+bool GTRModel::IsValidDataModel() const
+{
+    if (AC <= 0) return false;
+    if (AG <= 0) return false;
+    if (AT <= 0) return false;
+    if (CG <= 0) return false;
+    if (CT <= 0) return false;
+    if (TG <= 0) return false;
+    return NucModel::IsValidDataModel();
+} // GTRModel::IsValidDataModel
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+AlleleModel::AlleleModel(long nmarkers,
+                         long numCategories,
+                         DoubleVec1d categoryRates,
+                         DoubleVec1d categoryProbabilities,
+                         double userAutoCorrelationValue,
+                         bool doNormalize,
+                         long numBins,
+                         double relMuRate)
+    : DataModel(nmarkers,
+                numCategories,
+                categoryRates,
+                categoryProbabilities,
+                userAutoCorrelationValue,
+                doNormalize,
+                numBins,
+                relMuRate),
+      m_likes(CreateVec2d(m_nmarkers, m_ncategories, 0.0)),
+      m_bincounts()
+{
+    // intentionally blank
+} // AlleleModel constructor
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+DoubleVec1d AlleleModel::RescaleLength(double length)
+{
+    length *= m_relmurate;
+
+    long cat;
+    DoubleVec1d scaled(m_ncategories);
+    for (cat = 0; cat < m_ncategories; ++cat)
+    {
+        scaled[cat] = length * m_catrates[cat];
+    }
+    return scaled;
+
+} // AlleleModel::RescaleLengths
+
+//------------------------------------------------------------------------------------
+
+double AlleleModel::ComputeCatDL(long startmarker, long endmarker)
+{
+    double subtotal;
+    long marker, cat;
+
+    DoubleVec1d previous = m_catcells;
+    DoubleVec1d current(m_ncategories, 0.0);
+
+    for (marker = startmarker; marker != endmarker; ++marker)
+    {
+        subtotal = 0.0;
+        for (cat = 0; cat < m_ncategories; cat++)
+            subtotal += m_catprobs[cat] * previous[cat];
+
+        subtotal *= m_acratio;
+
+        for (cat = 0; cat < m_ncategories; cat++)
+            current[cat] = m_likes[marker][cat] *
+                (subtotal + m_ucratio * previous[cat]);
+
+        // This line puts the current values in previous cheaply.  We
+        // don't care that it puts the previous values in current,
+        // because current will be overwritten anyway.
+        previous.swap(current);
+    }
+
+    subtotal = 0.0;
+    for (cat = 0; cat < m_ncategories; cat++)
+        subtotal += m_catprobs[cat] * previous[cat];
+
+    m_catcells.swap(previous);
+
+    return log(subtotal);
+
+} // AlleleModel::ComputeCatDL
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d AlleleModel::ChooseAncestralState(long marker)
+{
+    long size = m_bincounts[marker];
+    DoubleVec1d result(size, 0.0);
+    result[registry.GetRandom().Long(size)] = 1.0;
+    return result;
+} // ChooseAncestralState
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+StepwiseModel::StepwiseModel(long nmarkers,
+                             const StringVec2d& uniqueAlleles,
+                             long numCategories,
+                             DoubleVec1d categoryRates,
+                             DoubleVec1d categoryProbabilities,
+                             double userAutoCorrelationValue,
+                             bool doNormalize,
+                             double relMuRate)
+    :   AlleleModel(nmarkers,
+                    numCategories,
+                    categoryRates,
+                    categoryProbabilities,
+                    userAutoCorrelationValue,
+                    doNormalize,
+                    defaults::bins,
+                    relMuRate),
+        m_allowance(defaults::step_allowance)
+{
+    Initialize(uniqueAlleles);
+} // StepwiseModel constructor
+
+//------------------------------------------------------------------------------------
+
+DataModel* StepwiseModel::Clone() const
+{
+    DataModel* newmodel = new StepwiseModel(*this);
+    return newmodel;
+}
+
+//------------------------------------------------------------------------------------
+
+StringVec1d StepwiseModel::CreateDataModelReport() const
+{
+    StringVec1d report = DataModel::CreateDataModelReport();
+    string maxbins = "Maximum number of bins:  " + ToString(m_bincounts[0]);
+    report.push_back(maxbins);
+    return report;
+} // StepwiseModel::CreateDataModelReport
+
+//------------------------------------------------------------------------------------
+
+vector<double> StepwiseModel::DataToLikes(const string& datum, long marker) const
+{
+    if (m_bincounts.empty())
+    {
+        string msg = "StepwiseModel::DataToLikes() was called before m_bincounts ";
+        msg += "was initialized.";
+        throw implementation_error(msg);
+    }
+
+    if (datum == "?")       // unknown data fills all bins with 1
+    {
+        vector<double> result(m_bincounts[marker], 1.0);
+        return result;
+    }
+    else
+    {
+        vector<double> result(m_bincounts[marker], 0.0);
+        long allele;
+        FromString(datum,allele);
+        allele -= m_offsets[marker];
+        if (allele < 0 || allele >= m_bincounts[marker])
+        {
+            string msg = "StepwiseModel::DataToLikes() was called on an ";
+            msg += "uninitialized (or incorrectly initialized) object.";
+            throw implementation_error(msg);
+        }
+        result[allele] = 1.0;
+        return result;
+    }
+
+} // DataToLikes
+
+//------------------------------------------------------------------------------------
+
+void StepwiseModel::Initialize(const StringVec2d& uniqueAlleles)
+{
+    if (static_cast<unsigned long>(m_nmarkers) != uniqueAlleles.size())
+    {
+        string msg = "StepwiseModel::Initialize() encountered m_nmarkers = ";
+        msg += ToString(m_nmarkers) + " and uniqueAlleles.size() = ";
+        msg += ToString(uniqueAlleles.size()) + "; these numbers should be equal.";
+        throw implementation_error(msg);
+    }
+    long marker;
+
+    // find the biggest and smallest allele for each marker
+    // NB We assume that allele sizes are in number of repeats,
+    // *not* base pair count or anything else, and that "missing
+    // data" is coded as ?.
+
+    for (marker = 0; marker < m_nmarkers; ++marker)
+    {
+        bool real_allele = false;  // did we ever see a non-? allele
+        long smallone = MAXLONG, largeone = 0;
+        for (size_t nallele = 0; nallele<uniqueAlleles[marker].size();
+             nallele++)
+        {
+            // convert to number
+            // do not count "unknown data" markers
+            string allele = uniqueAlleles[marker][nallele];
+            if (allele == "?")
+            {
+                assert(false); //need to catch this earlier
+                continue;
+            }
+            real_allele = true;
+            long tipval;
+            FromString(allele, tipval);  // convert to long
+
+            if (tipval < smallone) smallone = tipval;
+            if (tipval > largeone) largeone = tipval;
+        }
+
+        // if no non-? were ever found, use arbitrary values
+        if (!real_allele)
+        {
+            smallone = 10;
+            largeone = 10;
+        }
+
+        long newoffset = max(smallone - m_allowance, 0L);
+        m_offsets.push_back(newoffset);
+        m_bincounts.push_back(largeone + m_allowance + 1 - newoffset);
+        if (real_allele && largeone != smallone)
+            // m_threshhold: +1 makes it work for (large-small) both odd/even
+            m_threshhold.push_back((largeone - smallone + 1L)/2L);
+        else
+            m_threshhold.push_back(1L);
+
+        // Pre-calculate table of steps
+        CalculateSteps(marker);
+    }
+
+    AlleleModel::m_nbins = *max_element(m_bincounts.begin(),
+                                        m_bincounts.end());
+    fill(AlleleModel::m_bincounts.begin(),
+         AlleleModel::m_bincounts.end(), AlleleModel::m_nbins);
+} // Initialize
+
+//------------------------------------------------------------------------------------
+// Adaptation of Peter Beerli's microsatellite likelihood nuview_micro routine from Migrate.
+
+void StepwiseModel::ComputeSiteDLs (Cell_ptr child1, Cell_ptr child2,
+                                    Cell_ptr thiscell, const DoubleVec1d& lengthOfBranchToChild1ScaledByRateCat,
+                                    const DoubleVec1d& lengthOfBranchToChild2ScaledByRateCat, long marker)
+{
+    double **pSiteDLsForChild1 = child1->GetSiteDLs(marker);
+    double **pSiteDLsForChild2 = child2->GetSiteDLs(marker);
+    double normForChild1 = child1->GetNorms(marker);
+    double normForChild2 = child2->GetNorms(marker);
+
+    if (!pSiteDLsForChild1 && !pSiteDLsForChild2) // they can't both be NULL
+    {
+        string msg = "StepwiseModel::ComputeSiteDLs() found no existing ";
+        msg += "site data-likelihoods for either child.";
+        throw implementation_error(msg);
+    }
+
+    // in case of a one-legged coalescence, copy values and return
+    if (!pSiteDLsForChild1)
+    {
+        thiscell->SetSiteDLs(marker, pSiteDLsForChild2);
+        thiscell->SetNorms(normForChild2, marker);
+        return;
+    }
+
+    if (!pSiteDLsForChild2)
+    {
+        thiscell->SetSiteDLs(marker, pSiteDLsForChild1);
+        thiscell->SetNorms(normForChild1, marker);
+        return;
+    }
+
+    double **jointProbChild1Child2 = new double*[m_ncategories * sizeof(double *)];
+    jointProbChild1Child2[0] = new double[m_ncategories * m_bincounts[marker] * sizeof(double)];
+    for (long cat = 0; cat < m_ncategories; ++cat)
+        jointProbChild1Child2[cat] = jointProbChild1Child2[0] + cat * m_bincounts[marker];
+
+    long bmax = m_bincounts[marker];
+    long threshold = m_threshhold[marker];
+    double maxJointProbChild1Child2 = -DBL_MAX;
+    double mutationProbChild1, mutationProbChild2;
+
+    // Compute the bin contents for each possible microsat allele.
+    // "b" is the "bin number," i.e., the scaled value of the starting allele
+    // ("scaled" meaning "shifted so that the smallest allele gets set to zero").
+    // For each starting allele "b", we sweep through a range of alleles "a" to which
+    // allele "b" can mutate, from b - threshold to b + threshold.
+    for (long b = 0; b < bmax; b++)
+    {
+        for (long cat = 0; cat < m_ncategories; ++cat)
+        {
+            mutationProbChild1 = mutationProbChild2 = 0.0;
+            for (long a = max(0L, b - threshold); a <= min(b + threshold, bmax-1); a++)
+            {
+                // Note:  The probability of mutating n "steps" downward
+                // (e.g., from a microsat allele of 23 repeats to an allele of 20 repeats)
+                // equals the prob. of mutating n steps upward
+                // (e.g., from 23 repeats to 26 repeats).
+                long netNumPositiveSteps = labs(b - a);
+                if (pSiteDLsForChild1[cat][a] > 0)
+                {
+                    mutationProbChild1 += Probability(lengthOfBranchToChild1ScaledByRateCat[cat],
+                                                      netNumPositiveSteps, marker) * pSiteDLsForChild1[cat][a];
+                }
+                if (pSiteDLsForChild2[cat][a] > 0)
+                {
+                    mutationProbChild2 += Probability(lengthOfBranchToChild2ScaledByRateCat[cat],
+                                                      netNumPositiveSteps, marker) * pSiteDLsForChild2[cat][a];
+                }
+            }
+            jointProbChild1Child2[cat][b] = mutationProbChild1*mutationProbChild2;
+
+            if (jointProbChild1Child2[cat][b] > maxJointProbChild1Child2)
+            {
+                maxJointProbChild1Child2 = jointProbChild1Child2[cat][b];
+            }
+        }
+    }
+
+    // normalize to further protect against overflow, if requested
+    if (ShouldNormalize())
+    {
+        if (0.0 == maxJointProbChild1Child2)
+        {
+            thiscell->SetNorms(-DBL_MAX,marker);
+        }
+        else
+        {
+            for (long b = 0; b < bmax; b++)
+            {
+                for (long cat = 0; cat < m_ncategories; ++cat)
+                {
+                    jointProbChild1Child2[cat][b] /= maxJointProbChild1Child2;
+                }
+            }
+            thiscell->SetNorms(log(maxJointProbChild1Child2) +
+                               normForChild1 + normForChild2, marker);
+        }
+    }
+
+    thiscell->SetSiteDLs(marker, jointProbChild1Child2);
+
+    delete[] jointProbChild1Child2[0];
+    delete[] jointProbChild1Child2;
+} // ComputeSiteDLs
+
+//------------------------------------------------------------------------------------
+
+double StepwiseModel::ComputeSubtreeDLs(Cell& rootdls, double** startmarker, double** endmarker, long posn)
+{
+    double total=0.0, subtotal;
+    double** marker;
+    long cat, bin;
+    long firstposn = posn;
+
+    for (marker = startmarker; marker != endmarker;
+         marker = rootdls.GetNextMarker(marker))
+    {
+        subtotal = 0.0;
+        DoubleVec1d buffer(m_ncategories, 0.0);
+
+        for (cat = 0; cat < m_ncategories; ++cat)
+        {
+            for (bin = 0; bin < m_bincounts[posn]; ++bin)
+            {
+                // NB:  We assume a flat allele frequency prior here.
+                // cerr << "cat " << cat << ", bin " << bin << ", prob " << marker[cat][bin] << endl;
+                buffer[cat] += marker[cat][bin];
+            }
+            subtotal += m_catprobs[cat] * buffer[cat];
+        }
+
+        if (!subtotal)
+        {
+            DataModel::TryToNormalizeAndThrow(posn, GetModelType());
+        }
+
+        total += (log(subtotal) + rootdls.GetNorms(posn));
+
+        assert (total != 0);  // that would be *too* likely
+
+        if (m_ncategories > 1)
+        {
+            for (cat = 0; cat < m_ncategories; cat++)
+                m_likes[posn][cat] = buffer[cat]/subtotal;
+        }
+        ++posn;
+    }
+
+    if (m_ncategories > 1) total += ComputeCatDL(firstposn, posn);
+    return total;
+
+} // StepwiseModel::ComputeSubtreeDLs
+
+//------------------------------------------------------------------------------------
+
+// This method returns the probability of changing an allele by "diff" net steps
+// in time t.  For example, if the data type is microsatellite, we could compute
+// the probability that the sequence ACACAC (3 repeats) mutates to ACACACACAC
+// (5 repeats, for a "diff" of 2) along a branch whose length is scaled to be t.
+// "diff" received by this method is nonnegative, representing a net increase
+// in the size of the allele; the probability of decreasing by "diff" net steps
+// is defined to be the same as increasing by "diff" net steps.
+// All nonvanishingly unlikely net steps are considered; e.g., simply taking 2
+// steps along the branch, or taking 2 steps forward plus k steps backward
+// plus k steps forward, where k = 0, 1, 2, 3, ..., infinity.
+// The formula is Prob(i net steps in time t) =
+//    exp(-t)*sum_over_k((t/2)^(i+2k) / ((i+k)!k!)),
+// where the sum runs from k = 0 to k = infinity.
+// This formula is eq. 15.26 in Joe's "Inferring Phylogenies" book (p. 242).
+// The code looks different because the factorials are obtained via a precomputed
+// lookup table, and exp/log are used to counteract underflow.
+// Contributions to the sum drop off rapidly as k increases.
+// (Side note:  An equivalent and more compact version of this formula is
+// Prob(i,t) = exp(-t)*BesselI(i,t), where BesselI(n,x) is the
+// modified Bessel function of integer order n evaluated at real argument x.)
+
+double StepwiseModel::Probability(double t, long diff, long marker) const
+{
+    long threshold = m_threshhold[marker]; // max. number of positive steps
+    if (diff > threshold)
+        return 0.0; // approximately infinitely unlikely to mutate that much in time t
+
+    double sum(0.0), oldsum(0.0);
+    const DoubleVec2d& PrecomputedTerms = m_steps[marker];
+    double log_tOver2 = log(0.5 * t);
+
+    for (long k = 0; k <= threshold; k++) // num steps = diff + k <= threshold
+    {
+        sum += exp(-t + log_tOver2*(diff + 2.0*k) - PrecomputedTerms[diff][k]);
+
+        // quit if the contributions have become trivial
+        if (fabs (oldsum - sum) < DBL_EPSILON) break;
+        oldsum = sum;
+    }
+
+    return sum;
+}
+
+//------------------------------------------------------------------------------------
+// Adaptation of Peter Beerli's calculate_steps routine from MIGRATE.
+// This routine precomputes values needed by Probability, for speed.
+
+void StepwiseModel::CalculateSteps(long marker)
+{
+    long k, diff;
+    DoubleVec1d tempvec;
+    DoubleVec2d steps;
+    long threshhold = m_threshhold[marker];
+
+    for (diff = 0; diff <= threshhold; diff++)
+    {
+        tempvec.clear();
+        for (k = 0; k <= threshhold; k++)
+        {
+            tempvec.push_back(logfac (diff + k) + logfac (k));
+        }
+        steps.push_back(tempvec);
+    }
+
+    m_steps.push_back(steps); // Note:  This is okay from a speed standpoint,
+    // but not so good from a design standpoint,
+    // because this method assumes it's being called
+    // within a loop over markers.
+} // calculate_steps
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d StepwiseModel::SimulateMarker(double branchlength, long whichmarker, const DoubleVec1d& state) const
+{
+    throw implementation_error("Cannot simulate data with the stepwise model yet.");
+} // SimulateMarker
+
+//------------------------------------------------------------------------------------
+
+string StepwiseModel::CellToData(Cell_ptr cell, long marker) const
+{
+    LongVec1d ones = cell->GetOnes(marker);
+    assert(static_cast<size_t>(marker) < m_offsets.size());
+    if (ones.size() == 1)
+    {
+        return ToString(ones[0] + m_offsets[marker]);
+    }
+    throw implementation_error
+        ("Cannot convert stepwise data from the internal format for data not simply a single number.");
+}
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+BrownianModel::BrownianModel(long nmarkers,
+                             long numCategories,
+                             DoubleVec1d categoryRates,
+                             DoubleVec1d categoryProbabilities,
+                             double userAutoCorrelationValue,
+                             bool doNormalize,
+                             double relMuRate)
+    : AlleleModel(nmarkers,
+                  numCategories,
+                  categoryRates,
+                  categoryProbabilities,
+                  userAutoCorrelationValue,
+                  doNormalize,
+                  defaults::brownianBins, // 3, for mean, variance, and cumulative total
+                  relMuRate)
+{
+    // intentionally blank
+} // BrownianModel constructor
+
+//------------------------------------------------------------------------------------
+
+DataModel* BrownianModel::Clone() const
+{
+    DataModel* newmodel = new BrownianModel(*this);
+    return newmodel;
+}
+
+//------------------------------------------------------------------------------------
+
+void BrownianModel::SetNormalize(bool norm)
+{
+    if(norm)
+    {
+        data_error e("Normalization cannot be set for Brownian Model");
+        throw e;
+    }
+} // BrownianModel::SetNormalize
+
+//------------------------------------------------------------------------------------
+
+vector<double> BrownianModel::DataToLikes(const string& datum, long) const
+{
+    vector<double> result(m_nbins,0.0);
+
+    if (datum == "?")                   // unknown data
+    {
+        result[0] = 5.0;                // this is an arbitrary value
+        result[1] = DBL_BIG;
+        result[2] = 0.0;
+    }
+    else
+    {
+        FromString(datum,result[0]);
+        result[1] = 0.0;
+        result[2] = 0.0;
+    }
+
+    return result;
+
+} // DataToLikes
+
+//------------------------------------------------------------------------------------
+// Adaptation of Peter Beerli's nuview_brownian() from Migrate-1.2.4
+// by Jon Yamato 2002/05/06
+
+// N[Log[1/Sqrt[2 Pi]], 30]
+#define LOG2PIHALF -0.918938533204672741780329736406
+
+void BrownianModel::ComputeSiteDLs(Cell_ptr child1, Cell_ptr child2,
+                                   Cell_ptr thiscell, const DoubleVec1d& vv1, const DoubleVec1d& vv2,
+                                   long marker)
+{
+    double mean1, mean2, xx1, xx2, c12, v1, v2, vtot, f1, f2;
+    double **c1dls = child1->GetSiteDLs(marker),
+        **c2dls = child2->GetSiteDLs(marker);
+
+    if (!c1dls && !c2dls)
+    {
+        string msg = "BrownianModel::ComputeSiteDLs() failed to find ";
+        msg += "data likelihoods for either child.";
+        throw implementation_error(msg);
+    }
+
+    if (!c1dls)
+    {
+        thiscell->SetSiteDLs(marker,c2dls);
+        return;
+    }
+
+    if (!c2dls)
+    {
+        thiscell->SetSiteDLs(marker,c1dls);
+        return;
+    }
+
+    // temporary space needed for interface with dlcell::SetSiteDLs()
+    double **mydls = new double*[m_ncategories*sizeof(double*)];
+    mydls[0] = new double[m_ncategories*m_nbins*sizeof(double)];
+    long cat;
+    for(cat = 1; cat < m_ncategories; ++cat)
+        mydls[cat] = mydls[0] + cat*m_nbins;
+
+    for (cat = 0; cat < m_ncategories; ++cat)
+    {
+
+        mean1 = c1dls[cat][0];
+        xx1 = c1dls[cat][2];
+        v1 = vv1[cat] + c1dls[cat][1];
+
+        mean2 = c2dls[cat][0];
+        xx2 = c2dls[cat][2];
+        v2 = vv2[cat] + c2dls[cat][1];
+
+        vtot = v1 + v2;
+
+        // the weights are set reciprocally so that the value coming from
+        // the shorter branch is given more weight.
+        if (vtot > 0.0) f1 = v2/vtot;
+        else f1 = 0.5;
+        f2 = 1.0 - f1;
+
+        mydls[cat][0] = f1*mean1 + f2*mean2;
+
+        mydls[cat][1] = v1*f1;
+
+        mydls[cat][2] = xx1 + xx2;
+
+        c12 = (mean1-mean2)*(mean1-mean2) / vtot;
+        mydls[cat][2] += min(0.0,-0.5 * (log(vtot)+c12) + LOG2PIHALF);
+    }
+
+    thiscell->SetSiteDLs(marker,mydls);
+
+    delete [] mydls[0];
+    delete [] mydls;
+
+} // BrownianModel::ComputeSiteDLs
+
+#undef LOG2PIHALF
+
+//------------------------------------------------------------------------------------
+
+double BrownianModel::ComputeSubtreeDLs(Cell& rootdls, double** startmarker, double** endmarker, long posn)
+{
+    // NB:  Brownian likelihoods are stored as logs!
+
+    double total=0.0, subtotal;
+    double** marker;
+    long cat;
+    long firstposn = posn;
+
+    for (marker = startmarker; marker != endmarker;
+         marker = rootdls.GetNextMarker(marker))
+    {
+
+        if (m_ncategories > 1)
+        {
+            // in order to add up likelihoods over categories, we
+            // must un-log them.  We normalize them first to avoid
+            // underflow.
+
+            DoubleVec1d buffer(m_ncategories, 0.0);
+            subtotal = 0.0;
+
+            double biggest = NEGMAX;
+
+            for (cat = 0; cat < m_ncategories; ++cat)
+            {
+                if (marker[cat][2] > biggest) biggest = marker[cat][2];
+            }
+
+            for (cat = 0; cat < m_ncategories; ++cat)
+            {
+                buffer[cat] += exp(marker[cat][2] - biggest);
+                subtotal += m_catprobs[cat] * buffer[cat];
+            }
+
+            for (cat = 0; cat < m_ncategories; cat++)
+                m_likes[posn][cat] = buffer[cat]/subtotal;
+
+            if (!subtotal)
+            {
+                // we shouldn't be here!  Normalization shouldn't happen
+                // for Brownian.
+                datalike_error ex("invalid subtree found in Brownian model");
+                throw(ex);
+            }
+
+            total += (log(subtotal) + biggest);
+
+        }
+        else
+        {
+            // If there is only one category, there is no normalization
+            // and we MUST NOT un-log the likelihood or it will underflow.
+            total += marker[0][2];
+        }
+
+        ++posn;
+    }
+
+    if (m_ncategories > 1) total += ComputeCatDL(firstposn, posn);
+
+    return total;
+
+} // BrownianModel::ComputeSubtreeDLs
+
+//------------------------------------------------------------------------------------
+
+StringVec1d BrownianModel::CreateDataModelReport() const
+{
+    StringVec1d report = DataModel::CreateDataModelReport();
+    string rptline("(The brownian approximation for microsatellite ");
+    rptline += "evolution has no extra parameters.)";
+    report.push_back(rptline);
+
+    return report;
+
+} // BrownianModel::CreateDataModelReport
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d BrownianModel::SimulateMarker(double branchlength, long whichmarker, const DoubleVec1d& state) const
+{
+    throw implementation_error("Cannot simulate data with the Brownian model yet.");
+} // SimulateMarker
+
+//------------------------------------------------------------------------------------
+
+string BrownianModel::CellToData(Cell_ptr cell, long marker) const
+{
+    double** dls = cell->GetSiteDLs(marker);
+    if (dls != NULL)
+    {
+        return ToString(dls[0][0]);
+    }
+    return "0";
+}
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+KAlleleModel::KAlleleModel(
+    long nmarkers,
+    const StringVec2d& uniqueAlleles,
+    long numCategories,
+    DoubleVec1d categoryRates,
+    DoubleVec1d categoryProbabilities,
+    double userAutoCorrelationValue,
+    bool doNormalize,
+    double relMuRate
+    )
+    : AlleleModel(
+        nmarkers,
+        numCategories,
+        categoryRates,
+        categoryProbabilities,
+        userAutoCorrelationValue,
+        doNormalize,
+        defaults::bins,
+        relMuRate
+        )
+{
+    Initialize(uniqueAlleles);
+} // KAlleleModel constructor
+
+//------------------------------------------------------------------------------------
+
+void KAlleleModel::ComputeSiteDLs(Cell_ptr child1, Cell_ptr child2,
+                                  Cell_ptr thiscell, const DoubleVec1d& lengthOfBranchToChild1ScaledByRateCat,
+                                  const DoubleVec1d& lengthOfBranchToChild2ScaledByRateCat, long marker)
+{
+    double **pSiteDLsForChild1 = child1->GetSiteDLs(marker);
+    double **pSiteDLsForChild2 = child2->GetSiteDLs(marker);
+    double normForChild1 = child1->GetNorms(marker);
+    double normForChild2 = child2->GetNorms(marker);
+
+    if (!pSiteDLsForChild1 && !pSiteDLsForChild2)
+    {
+        string msg = "KAlleleModel::ComputeSiteDLs() found no existing ";
+        msg += "site data-likelihoods for either child.";
+        throw implementation_error(msg);
+    }
+
+    // in case of a one-legged coalescence, copy values and return
+    if (!pSiteDLsForChild1)
+    {
+        thiscell->SetSiteDLs(marker, pSiteDLsForChild2);
+        thiscell->SetNorms(normForChild2, marker);
+        return;
+    }
+
+    if (!pSiteDLsForChild2)
+    {
+        thiscell->SetSiteDLs(marker, pSiteDLsForChild1);
+        thiscell->SetNorms(normForChild1, marker);
+        return;
+    }
+
+    long smax = m_bincounts[marker];
+
+    // allocate temporary working space;
+    // OPTIMIZE should be done more cheaply!
+    double **jointProbChild1Child2 = new double*[m_ncategories * sizeof(double*)];
+    jointProbChild1Child2[0] = new double[m_ncategories * m_bincounts[marker] * sizeof(double)];
+    for (long cat = 0; cat < m_ncategories; ++cat)
+        jointProbChild1Child2[cat] = jointProbChild1Child2[0] + cat * m_bincounts[marker];
+
+    // compute the bin contents for each possible allele size
+    long s, ss;
+    double prob1, prob2, a1, a2, b1, b2, temp1, temp2, sum1, sum2;
+    double maxJointProbChild1Child2(-DBL_MAX);
+
+    for (long cat = 0; cat < m_ncategories; ++cat)
+    {
+        prob1 = probMathFunc ( lengthOfBranchToChild1ScaledByRateCat[cat], (smax-1.0)/smax);
+        prob2 = probMathFunc ( lengthOfBranchToChild2ScaledByRateCat[cat], (smax-1.0)/smax);
+
+        a1 = 1.0 - prob1;
+        a2 = 1.0 - prob2;
+        b1 = prob1 / ( smax - 1.0); // smax was set to at least 2 in Initialize()
+        b2 = prob2 / ( smax - 1.0);
+        for (s = 0; s < smax; s++)
+        {
+            sum1 = 0; sum2 = 0;
+            for(ss = 0; ss < smax; ss++)
+            {
+                if (s == ss)
+                {
+                    temp1 = a1; temp2 = a2;
+                }
+                else
+                {
+                    temp1 = b1; temp2 = b2;
+                }
+                sum1 += pSiteDLsForChild1[cat][ss] * temp1;
+                sum2 += pSiteDLsForChild2[cat][ss] * temp2;
+            }
+
+            jointProbChild1Child2[cat][s] = sum1 * sum2;
+            if (jointProbChild1Child2[cat][s] > maxJointProbChild1Child2)
+                maxJointProbChild1Child2 = jointProbChild1Child2[cat][s];  // overflow protection
+        }
+    }
+
+    // normalize to further protect against overflow, if requested
+    if (ShouldNormalize())
+    {
+        if (maxJointProbChild1Child2 == 0.0)
+        {
+            thiscell->SetNorms(-DBL_MAX,marker);
+        }
+        else
+        {
+            for (s = 0; s < smax; s++)
+            {
+                for (long cat = 0; cat < m_ncategories; ++cat)
+                {
+                    jointProbChild1Child2[cat][s] /= maxJointProbChild1Child2;
+                }
+            }
+            thiscell->SetNorms(log(maxJointProbChild1Child2) + normForChild1 + normForChild2, marker);
+        }
+    }
+
+    thiscell->SetSiteDLs(marker, jointProbChild1Child2);
+
+    delete[] jointProbChild1Child2[0];
+    delete[] jointProbChild1Child2;
+} // KAlleleModel::ComputeSiteDLs
+
+//------------------------------------------------------------------------------------
+
+DataModel* KAlleleModel::Clone() const
+{
+    DataModel* newmodel = new KAlleleModel(*this);
+    return newmodel;
+} // KAllelModel::Clone
+
+//------------------------------------------------------------------------------------
+
+double KAlleleModel::probMathFunc( double ut, double coef)
+{
+    double mathValue = coef*(1.0-exp(-1.0 / coef * ut));
+    if (systemSpecificIsnan(mathValue)) // BUGBUG Presumably some other action should be taken?
+        cerr << "coef:" << coef << endl
+             << "time:" << ut << endl;
+    return mathValue;
+} // KAllelModel::probMathFunc
+
+//------------------------------------------------------------------------------------
+
+StringVec1d KAlleleModel::CreateDataModelReport() const
+{
+    StringVec1d report = DataModel::CreateDataModelReport();
+    string maxbins = "Maximum number of bins:  " + ToString(m_bincounts[0]);
+    report.push_back(maxbins);
+    return report;
+
+} // KAllelModel::CreateDataModelReport
+
+//------------------------------------------------------------------------------------
+
+void KAlleleModel::Initialize(const StringVec2d& uniqueAlleles)
+{
+    if (static_cast<unsigned long>(m_nmarkers) != uniqueAlleles.size())
+    {
+        string msg = "KAlleleModel::Initialize() encountered m_nmarkers = ";
+        msg += ToString(m_nmarkers) + " and uniqueAlleles.size() = ";
+        msg += ToString(uniqueAlleles.size()) + "; these numbers should be equal.";
+        throw implementation_error(msg);
+    }
+
+    for (long marker = 0; marker < m_nmarkers; ++marker)
+    {
+        map <string,long> tmpMap;
+        m_allelemaps.push_back(tmpMap);
+        size_t nallele = 0;
+        for (; nallele<uniqueAlleles[marker].size(); nallele++)
+        {
+            string allelename = uniqueAlleles[marker][nallele];
+            if (allelename == "?")
+            {
+                assert(false); //we need to catch this earlier.
+                continue;
+            }
+            assert(m_allelemaps[marker].find(allelename)==m_allelemaps[marker].end());
+            m_allelemaps[marker].insert(make_pair(allelename, nallele));
+        }
+
+        if(nallele <= 1) nallele=2; // must be at least this much
+        m_bincounts.push_back(nallele);
+    }
+
+    AlleleModel::m_nbins = *max_element(m_bincounts.begin(),
+                                        m_bincounts.end());
+    fill(AlleleModel::m_bincounts.begin(),
+         AlleleModel::m_bincounts.end(), AlleleModel::m_nbins);
+} // KAlleleModel::Initialize
+
+//------------------------------------------------------------------------------------
+
+vector<double> KAlleleModel::DataToLikes(const string& datum, long marker) const
+{
+    if (m_bincounts.empty())
+    {
+        string msg = "KAlleleModel::DataToLikes() was called on an uninitialized ";
+        msg += "object.";
+        throw implementation_error(msg);
+    }
+
+    if (datum == "?")       // unknown data fills all bins with 1
+    {
+        vector<double> result(m_bincounts[marker], 1.0);
+        return result;
+    }
+    else
+    {
+        vector<double> result(m_bincounts[marker], 0.0);
+        map<string, long>::const_iterator allele = m_allelemaps[marker].find(datum);
+        assert(allele != m_allelemaps[marker].end()); //Uncounted allele
+        result[allele->second] = 1.0;
+        return result;
+    }
+
+} // DataToLikes
+
+//------------------------------------------------------------------------------------
+
+double KAlleleModel::ComputeSubtreeDLs(Cell& rootdls, double** startmarker, double** endmarker, long posn)
+{
+    double total=0.0, subtotal;
+    double** marker;
+    long cat, bin;
+    long firstposn = posn;
+
+    for (marker = startmarker; marker != endmarker;
+         marker = rootdls.GetNextMarker(marker))
+    {
+        subtotal = 0.0;
+        DoubleVec1d buffer(m_ncategories, 0.0);
+
+        for (cat = 0; cat < m_ncategories; ++cat)
+        {
+            for (bin = 0; bin < m_bincounts[posn]; ++bin)
+            {
+                // NB:  We assume a flat allele frequency prior here.
+                buffer[cat] += marker[cat][bin]/m_nbins;
+                //LS DEBUG MAPPING:  we eventually want unique allele freqs here instead
+                // of dividing by m_nbins.
+            }
+
+            subtotal += m_catprobs[cat] * buffer[cat];
+            // cerr << m_catprobs[cat] << endl << buffer[cat] << endl;
+            // assert(subtotal != 0);  // that would not be likely enough
+            if (!subtotal)
+            {
+                DataModel::TryToNormalizeAndThrow(posn, GetModelType());
+            }
+        }
+
+        total += (log(subtotal) + rootdls.GetNorms(posn));
+
+        assert (total != 0);  // that would be *too* likely
+
+        if (m_ncategories > 1)
+        {
+            for (cat = 0; cat < m_ncategories; cat++)
+                m_likes[posn][cat] = buffer[cat]/subtotal;
+        }
+        ++posn;
+    }
+
+    if (m_ncategories > 1) total += ComputeCatDL(firstposn, posn);
+
+    if(systemSpecificIsnan(total))
+    {
+        cerr << endl << "ComputeSubtreeDLs" << endl;
+    }
+
+    return total;
+
+}; // KAlleleModel::ComputeSubtreeDLs
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+StringVec1d KAlleleModel::ToXML(size_t nspaces) const
+{
+    StringVec1d xmllines(DataModel::ToXML(nspaces));
+
+    // Yes, this model has no fields of its own.
+
+    return xmllines;
+
+} // ToXML
+
+//------------------------------------------------------------------------------------
+// This routine simulates data for a single node under the KAllele
+// model.  It assumes that the given branch lengths have already
+// been rescaled for the rate category desired.
+
+DoubleVec1d KAlleleModel::SimulateMarker(double branchlength, long whichmarker, const DoubleVec1d& state) const
+{
+    double rnd = registry.GetRandom().Float();
+    long nstates(m_bincounts[whichmarker]);
+    long oldstate = 0;
+    for (long staten=0; staten<static_cast<long>(state.size()); staten++)
+    {
+        if (state[staten] == 1.0) oldstate = staten;
+    }
+
+    // if something happens
+    double newratio = nstates/(nstates-1);
+    if (rnd < (1/newratio) * (1.0 - exp(-newratio * branchlength)))
+    {
+        long chosenstate = registry.GetRandom().Long(nstates-1);
+        if (chosenstate >= oldstate) chosenstate++;
+        DoubleVec1d answer(state.size(),0.0);
+        answer[chosenstate] = 1.0;
+        return answer;
+    }
+
+    // return nothing happens
+    return state;
+
+} // SimulateMarker
+
+//------------------------------------------------------------------------------------
+
+string KAlleleModel::CellToData(Cell_ptr cell, long marker) const
+{
+    LongVec1d ones = cell->GetOnes(marker);
+    if (ones.size() == 1)
+    {
+        for (map<string, long>::const_iterator key=m_allelemaps[marker].begin();
+             key != m_allelemaps[marker].end(); key++)
+        {
+            if ((*key).second == ones[0])
+            {
+                return (*key).first;
+            }
+        }
+        throw data_error("Cannot find any any alleles for bin " + ToString(ones[0]) + ".");
+    }
+    throw implementation_error
+        ("Cannot convert K-Allele data from the internal format if the data is not a single allele.");
+
+}
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+MixedKSModel::MixedKSModel(
+    long nmarkers,
+    const StringVec2d& uniqueAlleles,
+    long numCategories,
+    DoubleVec1d categoryRates,
+    DoubleVec1d categoryProbabilities,
+    double userAutoCorrelationValue,
+    bool doNormalize,
+    double relMuRate,
+    double alphaVal,
+    bool doOptimization
+    )
+    : AlleleModel(
+        nmarkers,
+        numCategories,
+        categoryRates,
+        categoryProbabilities,
+        userAutoCorrelationValue,
+        doNormalize,
+        defaults::bins,
+        relMuRate
+        ),
+      m_allowance(defaults::mixedks_allowance),
+      m_isOpt(doOptimization),
+      m_alphaRpt()
+{
+    SetAlpha(alphaVal); // instead of initializing, because there's
+                        // logic to check value of alpha in SetAlpha
+    m_origAlpha = m_alpha;
+    Initialize(uniqueAlleles);
+}
+
+//------------------------------------------------------------------------------------
+
+DataModel* MixedKSModel::Clone() const
+{
+    DataModel* newmodel = new MixedKSModel(*this);
+    return newmodel;
+}
+
+//------------------------------------------------------------------------------------
+
+vector<double> MixedKSModel::DataToLikes(const string& datum, long marker) const
+{
+    if (m_bincounts.empty())
+    {
+        string msg = "MixedKSModel::DataToLikes() was called on an uninitialized ";
+        msg += "object.";
+        throw implementation_error(msg);
+    }
+
+    if (datum == "?")                   // unknown data fills all bins with 1
+    {
+        vector<double> result(m_bincounts[marker], 1.0);
+        return result;
+    }
+    else if (StringType(datum)!=2)
+    {
+        throw data_error ("Your data must consist of numbers, not letters or punctuation."
+                          "  The Mixed KS model is inappropriate for DNA or RNA data.");
+    }
+    else
+    {
+        vector<double> result(m_bincounts[marker], 0.0);
+        long allele;
+        FromString(datum,allele);
+        allele -= m_offsets[marker];
+        if (allele < 0 || allele >= m_bincounts[marker])
+        {
+            string msg = "MixedKSModel::DataToLikes() was called on an ";
+            msg += "uninitialized (or incorrectly initialized) object.";
+            throw implementation_error(msg);
+        }
+        result[allele] = 1.0;
+        return result;
+    }
+
+} // DataToLikes
+
+//------------------------------------------------------------------------------------
+
+void MixedKSModel::Initialize(const StringVec2d& uniqueAlleles)
+{
+    if (static_cast<unsigned long>(m_nmarkers) != uniqueAlleles.size())
+    {
+        string msg = "MixedKSModel::Initialize() encountered m_nmarkers = ";
+        msg += ToString(m_nmarkers) + " and uniqueAlleles.size() = ";
+        msg += ToString(uniqueAlleles.size()) + "; these numbers should be equal.";
+        throw implementation_error(msg);
+    }
+
+    // find the biggest and smallest allele for each marker
+    // NB We assume that allele sizes are in number of repeats,
+    // *not* base pair count or anything else, and that "missing
+    // data" is coded as ?.
+
+    long tipval;
+
+    for (long marker = 0; marker < m_nmarkers; ++marker)
+    {
+        bool real_allele = false;  // did we ever see a non-? allele
+        long smallone = MAXLONG, largeone = 0;
+
+        for (size_t nallele = 0; nallele<uniqueAlleles[marker].size();
+             nallele++)
+        {
+            // convert to number if possible;
+            // do not count "unknown data" markers
+            string allele = uniqueAlleles[marker][nallele];
+            if (allele == "?")
+            {
+                assert(false); // catch this earlier
+                continue;
+            }
+            real_allele = true;
+            FromString(allele, tipval);  // convert to long
+
+            if (tipval < smallone) smallone = tipval;
+            if (tipval > largeone) largeone = tipval;
+        }
+
+        // if no non-? were ever found, use arbitrary values
+        if (!real_allele)
+        {
+            smallone = 10;
+            largeone = 10;
+        }
+
+        long newoffset = max(smallone - m_allowance, 0L);
+        m_offsets.push_back(newoffset);
+        m_bincounts.push_back(largeone + m_allowance + 1 - newoffset);
+        if (real_allele && largeone != smallone)
+            // m_threshhold: +1 makes it work for (large-small) both odd/even
+            m_threshhold.push_back((largeone - smallone + 1L)/2L);
+        else
+            m_threshhold.push_back(1L);
+
+        // Pre-calculate table of steps
+        CalculateSteps(marker);
+    }
+
+    AlleleModel::m_nbins = *max_element(m_bincounts.begin(),
+                                        m_bincounts.end());
+    fill(AlleleModel::m_bincounts.begin(),
+         AlleleModel::m_bincounts.end(), AlleleModel::m_nbins);
+} // Initialize
+
+//------------------------------------------------------------------------------------
+
+void MixedKSModel::SetAlpha(double alphaVal)
+{
+    assert(alphaVal >= 0.0);
+    assert(alphaVal <= 1.0);
+    m_alpha = alphaVal;
+    m_beta = 1 - m_alpha;
+}
+
+//------------------------------------------------------------------------------------
+
+void MixedKSModel::SetAlpha(double alphaVal, long rep, long chain)
+{
+    if (m_alphaRpt.size() == static_cast<size_t>(rep))
+    {
+        ResetAlpha();
+    }
+    assert(m_alphaRpt[rep].size() == static_cast<size_t>(chain));
+    m_alphaRpt[rep].push_back(alphaVal);
+    SetAlpha(alphaVal);
+}
+
+//------------------------------------------------------------------------------------
+// Adaptation of Peter Beerli's microsatellite likelihood nuview_micro routine from Migrate.
+
+void MixedKSModel::ComputeSiteDLs (Cell_ptr child1, Cell_ptr child2,
+                                   Cell_ptr thiscell, const DoubleVec1d& lengthOfBranchToChild1ScaledByRateCat,
+                                   const DoubleVec1d& lengthOfBranchToChild2ScaledByRateCat, long marker)
+{
+    double **pSiteDLsForChild1 = child1->GetSiteDLs(marker);
+    double **pSiteDLsForChild2 = child2->GetSiteDLs(marker);
+    double normForChild1 = child1->GetNorms(marker);
+    double normForChild2 = child2->GetNorms(marker);
+
+    if (!pSiteDLsForChild1 && !pSiteDLsForChild2) // they can't both be NULL
+    {
+        string msg = "MixedKSModel::ComputeSiteDLs() found no existing ";
+        msg += "site data-likelihoods for either child.";
+        throw implementation_error(msg);
+    }
+
+    // in case of a one-legged coalescence, copy values and return
+    if (!pSiteDLsForChild1)
+    {
+        thiscell->SetSiteDLs(marker, pSiteDLsForChild2);
+        thiscell->SetNorms(normForChild2, marker);
+        return;
+    }
+
+    if (!pSiteDLsForChild2)
+    {
+        thiscell->SetSiteDLs(marker, pSiteDLsForChild1);
+        thiscell->SetNorms(normForChild1, marker);
+        return;
+    }
+
+    long smax = m_bincounts[marker];
+    long threshold = m_threshhold[marker];
+
+    // allocate temporary working space;
+    // OPTIMIZE should be done more cheaply!
+    double **jointProbChild1Child2 = new double*[m_ncategories * sizeof(double *)];
+    jointProbChild1Child2[0] = new double[m_ncategories * m_bincounts[marker] * sizeof(double)];
+    for (long cat = 0; cat < m_ncategories; ++cat)
+        jointProbChild1Child2[cat] = jointProbChild1Child2[0] + cat * m_bincounts[marker];
+    double maxJointProbChild1Child2 = -DBL_MAX;
+    double mutationProbChild1, mutationProbChild2;
+
+    long s, a, diff;
+
+    //KAllele part
+    // compute the bin contents for each possible allele size
+    long ss;
+    double prob1, prob2, a1, a2, b1, b2, temp1, temp2, sum1, sum2;
+
+    for (long cat = 0; cat < m_ncategories; ++cat)
+    {
+        prob1 = probMathFunc (lengthOfBranchToChild1ScaledByRateCat[cat], (smax-1.0)/smax);
+        prob2 = probMathFunc (lengthOfBranchToChild2ScaledByRateCat[cat], (smax-1.0)/smax);
+
+        a1 = 1.0 - prob1;
+        a2 = 1.0 - prob2;
+        b1 = prob1 / ( smax - 1.0); //need to check when smax = 1
+        b2 = prob2 / ( smax - 1.0);
+        for (s = 0; s < smax; s++)
+        {
+            sum1 = 0; sum2 = 0;
+            for(ss = 0; ss < smax; ss++)
+            {
+                if (s == ss)
+                {
+                    temp1 = a1; temp2 = a2;
+                }
+                else
+                {
+                    temp1 = b1; temp2 = b2;
+                }
+                sum1 += pSiteDLsForChild1[cat][ss] * temp1;
+                sum2 += pSiteDLsForChild2[cat][ss] * temp2;
+            }
+
+            jointProbChild1Child2[cat][s] = sum1 * sum2 * m_alpha;  //alpha
+            if (jointProbChild1Child2[cat][s] > maxJointProbChild1Child2)
+                maxJointProbChild1Child2 = jointProbChild1Child2[cat][s];  // overflow protection
+        }
+    }
+    //LS NOTE:  The following code firing was due to a bug in the past;
+    // you should probably look carefully to make sure that's not true
+    // again.
+    if(maxJointProbChild1Child2 < 0)
+    {
+        string msg = "Warning:  maximum probability value of "
+            + ToString(maxJointProbChild1Child2)
+            + " in MixedKS.";
+        registry.GetRunReport().ReportDebug(msg);
+    }
+
+    //StepWise part
+    // compute the bin contents for each possible allele size
+    for (s = 0; s < smax; s++)
+    {
+        for (long cat = 0; cat < m_ncategories; ++cat)
+        {
+            mutationProbChild1 = mutationProbChild2 = 0.0;
+            for (a = max (0L, s - threshold); a <= s + threshold && a < smax; a++)
+            {
+                diff = labs(s - a);
+                if (pSiteDLsForChild1[cat][a] > 0)
+                {
+                    mutationProbChild1 += Probability(lengthOfBranchToChild1ScaledByRateCat[cat],
+                                                      diff, marker) * pSiteDLsForChild1[cat][a];
+                }
+                if (pSiteDLsForChild2[cat][a] > 0)
+                {
+                    mutationProbChild2 += Probability(lengthOfBranchToChild2ScaledByRateCat[cat],
+                                                      diff, marker) * pSiteDLsForChild2[cat][a];
+                }
+            }
+            jointProbChild1Child2[cat][s] += mutationProbChild1 * mutationProbChild2 * m_beta;
+            if (jointProbChild1Child2[cat][s] > maxJointProbChild1Child2)
+                maxJointProbChild1Child2 = jointProbChild1Child2[cat][s];  // overflow protection
+        }
+    }
+
+    // normalize to further protect against overflow, if requested
+    if (ShouldNormalize())
+    {
+        if (maxJointProbChild1Child2 == 0.0)
+        {
+            thiscell->SetNorms(-DBL_MAX,marker);
+        }
+        else
+        {
+            for (s = 0; s < smax; s++)
+            {
+                for (long cat = 0; cat < m_ncategories; ++cat)
+                {
+                    jointProbChild1Child2[cat][s] /= maxJointProbChild1Child2;
+                }
+            }
+            thiscell->SetNorms(log(maxJointProbChild1Child2) + normForChild1 + normForChild2,
+                               marker);
+        }
+    }
+
+    thiscell->SetSiteDLs(marker, jointProbChild1Child2);
+
+    delete[] jointProbChild1Child2[0];
+    delete[] jointProbChild1Child2;
+} // ComputeSiteDLs
+
+//------------------------------------------------------------------------------------
+
+double MixedKSModel::ComputeSubtreeDLs(Cell& rootdls, double** startmarker, double** endmarker, long posn)
+{
+    double total=0.0, subtotal;
+    double** marker;
+    long cat, bin;
+    long firstposn = posn;
+
+    for (marker = startmarker; marker != endmarker;
+         marker = rootdls.GetNextMarker(marker))
+    {
+        subtotal = 0.0;
+        DoubleVec1d buffer(m_ncategories, 0.0);
+
+        for (cat = 0; cat < m_ncategories; ++cat)
+        {
+            for (bin = 0; bin < m_bincounts[posn]; ++bin)
+            {
+                // NB:  We assume a flat allele frequency prior here.
+                buffer[cat] += marker[cat][bin];
+            }
+
+            subtotal += m_catprobs[cat] * buffer[cat];
+        }
+
+        if (!subtotal)
+        {
+            DataModel::TryToNormalizeAndThrow(posn, GetModelType());
+        }
+
+        total += (log(subtotal) + rootdls.GetNorms(posn));
+
+        assert (total != 0);  // that would be *too* likely
+
+        if (m_ncategories > 1)
+        {
+            for (cat = 0; cat < m_ncategories; cat++)
+                m_likes[posn][cat] = buffer[cat]/subtotal;
+        }
+        ++posn;
+    }
+
+    if (m_ncategories > 1) total += ComputeCatDL(firstposn, posn);
+    return total;
+
+} // MixedKSModel::ComputeSubtreeDLs
+
+//------------------------------------------------------------------------------------
+
+StringVec1d MixedKSModel::ToXML(size_t nspaces) const
+{
+    StringVec1d xmllines(AlleleModel::ToXML(nspaces));
+
+    nspaces += INDENT_DEPTH;
+
+    string line(MakeIndent(MakeTag(xmlstr::XML_TAG_ISOPT),nspaces));
+    line += ToStringTF(m_isOpt);
+    line += MakeCloseTag(xmlstr::XML_TAG_ISOPT);
+    StringVec1d::iterator endtag = --xmllines.end();
+    xmllines.insert(endtag,line);
+
+    string line2(MakeIndent(MakeTag(xmlstr::XML_TAG_ALPHA),nspaces));
+    line2 += ToString(GetAlpha());
+    line2 += MakeCloseTag(xmlstr::XML_TAG_ALPHA);
+    endtag = --xmllines.end();
+    xmllines.insert(endtag,line2);
+
+    nspaces -= INDENT_DEPTH;
+    return xmllines;
+
+} // ToXML
+
+//------------------------------------------------------------------------------------
+// Adaptation of Peter Beerli's prob_micro routine from MIGRATE.
+// This routine computes the probability of a change of "diff" steps in time "t".
+//
+//   Mary Kuhner 2002/01/02
+
+double MixedKSModel::Probability (double t, long diff, long marker) const
+{
+    long threshold = m_threshhold[marker];
+    if (diff > threshold)
+        return 0.0; // approximately infinitely unlikely to mutate that much in time t
+
+    const DoubleVec2d& PrecomputedTerms = m_steps[marker];
+    double sum(0.0), oldsum(0.0), log_tOver2(log(0.5*t));
+
+    for (long k = 0; k <= threshold; k++) // num steps = diff + k <= threshold
+    {
+        sum += exp(-t + log_tOver2*(diff + 2.0*k) - PrecomputedTerms[diff][k]);
+
+        // quit if the contributions have become trivial
+        if (fabs (oldsum - sum) < DBL_EPSILON) break;
+        oldsum = sum;
+    }
+    return sum;
+} // Probability
+
+//------------------------------------------------------------------------------------
+// Adaptation of Peter Beerli's calculate_steps routine from MIGRATE.
+// This routine precomputes values needed by Probability, for speed.
+
+void MixedKSModel::CalculateSteps (long marker)
+{
+    long k, diff;
+    DoubleVec1d tempvec;
+    DoubleVec2d steps;
+    long threshhold = m_threshhold[marker];
+
+    for (diff = 0; diff <= threshhold; diff++)
+    {
+        tempvec.clear();
+        for (k = 0; k <= threshhold; k++)
+        {
+            tempvec.push_back(logfac (diff + k) + logfac (k));
+        }
+        steps.push_back(tempvec);
+    }
+
+    m_steps.push_back(steps);
+    // Note:  This is okay from a speed standpoint,
+    // but not so good from a design standpoint,
+    // because this method assumes it's being called
+    // within a loop over markers.
+} // CalculateSteps
+
+//------------------------------------------------------------------------------------
+
+double MixedKSModel::probMathFunc( double ut, double coef)
+{
+    double mathValue = coef*(1.0-exp(-1.0 / coef * ut));
+    if (systemSpecificIsnan(mathValue))
+        cerr << "coef:" << coef << endl
+             << "time:" << ut << endl;
+    return mathValue;
+} // MixedKSModel::probMathFunc
+
+//------------------------------------------------------------------------------------
+
+StringVec1d MixedKSModel::CreateDataModelReport() const
+{
+    StringVec1d report = DataModel::CreateDataModelReport();
+
+    string line;
+    if (m_isOpt)
+    {
+        line = "Final ";
+    }
+    line += "Multistep:single-step ratio: " + ToString(m_alpha);
+    report.push_back(line);
+
+    string maxbins = "Maximum number of bins:  " + ToString(m_bincounts[0]);
+    report.push_back(maxbins);
+    if (m_isOpt)
+    {
+        report.push_back(string("Multistep:single-step ratio used for each chain (optimized from the previous):"));
+        //report alpha for each chain
+        for(size_t rep=0; rep<m_alphaRpt.size(); rep++)
+        {
+            for (size_t chain=0; chain<m_alphaRpt[rep].size(); chain++)
+            {
+                line = "";
+                if (m_alphaRpt.size() > 1)
+                {
+                    line = "Replicate " + indexToKey(rep) + ", ";
+                }
+                line += "Chain " + indexToKey(chain) + " :"
+                    + ToString(m_alphaRpt[rep][chain]);
+                report.push_back( line );
+            }
+        }
+    }
+
+    return report;
+
+} // MixedKSModel::CreateDataModelReport
+
+//------------------------------------------------------------------------------------
+
+bool MixedKSModel::OptimizeDataModel(Tree* tree, const Locus& locus)
+{
+    //only do optimization when turn on 'm_isOpt', otherwize do nothing
+    if(!m_isOpt) return false;
+
+    FuncMax fm(*this, tree, locus);
+
+    FMState fms(100,      //max_iters
+                0.01,     //increment
+                0.0001,   //threshold
+                0,        //limits
+                1,
+                0.5       //initX
+        );
+
+    fm.setState(fms);
+    fm.run();
+    //Note:  The FuncMax object has a pointer to us, and uses it to set our alpha.
+    //report final alpha for each chain
+    m_alphaRpt[m_alphaRpt.size()-1].push_back(m_alpha);
+    return true;
+}
+
+//------------------------------------------------------------------------------------
+
+void MixedKSModel::ResetAlpha()
+{
+    SetAlpha(m_origAlpha);
+    DoubleVec1d newalphas;
+    newalphas.push_back(m_origAlpha);
+    m_alphaRpt.push_back(newalphas);
+}
+
+//------------------------------------------------------------------------------------
+
+void MixedKSModel::WriteAlpha(ofstream& sumout, long loc, long rep, long chain)
+{
+    if (!m_isOpt) return;
+    assert(static_cast<size_t>(rep)<m_alphaRpt.size());
+    if (static_cast<size_t>(chain+1)>=m_alphaRpt[rep].size()) return;
+    //LS NOTE:  We  write out the alpha that we calculated for this chain
+    // instead of the alpha we used for this chain, because of timing
+    // issues--we don't want to lose the information if sumfile writing stops
+    // unexpectedly.
+    sumout << "\t"
+           << xmlsum::ALPHA_START1 << " "
+           << xmlsum::ALPHA_START2 << " " << loc << " "
+           << xmlsum::ALPHA_START3 << " " << m_alphaRpt[rep][chain+1]
+           << " " << xmlsum::ALPHA_END << endl;
+
+}
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d MixedKSModel::SimulateMarker(double branchlength, long whichmarker, const DoubleVec1d& state) const
+{
+    throw implementation_error("Cannot simulate data with the MixedKS model yet.");
+} // SimulateMarker
+
+//------------------------------------------------------------------------------------
+
+string MixedKSModel::CellToData(Cell_ptr cell, long marker) const
+{
+    LongVec1d ones = cell->GetOnes(marker);
+    assert(static_cast<size_t>(marker) < m_offsets.size());
+    if (ones.size() == 1)
+    {
+        return ToString(ones[0] + m_offsets[marker]);
+    }
+    throw implementation_error
+        ("Cannot convert stepwise data from the internal format for data not simply a single number.");
+}
+
+//____________________________________________________________________________________
diff --git a/src/datalike/dlmodel.h b/src/datalike/dlmodel.h
new file mode 100644
index 0000000..d4ddf83
--- /dev/null
+++ b/src/datalike/dlmodel.h
@@ -0,0 +1,671 @@
+// $Id: dlmodel.h,v 1.70 2012/06/30 01:32:41 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+/*********************************************************************
+ DLModel is the key control class for polymorphism based on the
+ type of data likelihood model in use.  It is subclassed based on
+ model.
+
+ A DataModel is an Abstract Factory (Design Patterns, pg 87)
+ responsible for creating the data model output report.
+
+ A DataModel also provides the implementation of a particular data
+ likelihood model.  Exactly what needs to be provided is laid out in
+ the appropriate DLCalculator class.  For example, each realization of
+ a nucleotide based model must provide 4 functions:
+ RescaleLengths(), ComputeSiteDLs(), ComputeSubtreeDL(),
+ and ComputeCatDL();
+
+ The DataToLikes() function allows input data to be translated to
+ likelihoods in a model-appropriate way.
+
+ Each instantiable DataModel should have a distinct value of GetModelType()
+ taken from the model_type enum.
+
+ NB:  Lamarc includes two similar, but distinct, ways of looking at
+ data analysis.  Classes such as DataType and its dependencies
+ (DLCalculators, DLCells, etc.) classify the input data based on
+ how it was generated (is it full DNA, SNPs, electrophoretic alleles,
+ etc?)  DataModel, in contrast, classifies the data based on what
+ model will be used to analyze it.  These hierarchies are distinct,
+ because the same kind of data can be analyzed using different
+ models (e.g. F84 versus GTR for DNA data) and the same model can
+ occasionally be used for different kinds of data (e.g. F84 works
+ for both DNA and SNPs).  However, they are far from independent,
+ since any given model will only cover a few data types.
+
+ Written by Jim Sloan, rewritten by Jon Yamato.
+ winter 2001--removed Dialog factory capability--Mary Kuhner
+ 2002/01/02 moved DataToLikes into this class--Mary Kuhner
+ 2002/07/08 added K-Allele model--Mary Kuhner
+ 2004/09/09 Added data simulation capability
+
+**************************************************************/
+
+#ifndef DLMODEL_H
+#define DLMODEL_H
+
+#include <cmath>
+#include <deque>
+#include <string>
+#include <vector>
+
+#include "vectorx.h"
+#include "constants.h"
+#include "datatype.h" //for the data_type enum
+#include "types.h"   // for Cell_ptr type
+
+class Locus;
+class Cell;
+class DataType;
+class DLCalculator;
+class Tree;
+
+typedef std::map<std::string, long> alleleMap;
+typedef std::vector<alleleMap> alleleVec;
+typedef alleleMap::iterator alleleIt;
+typedef alleleMap::const_iterator alleleConstIt;
+
+//------------------------------------------------------------------------------------
+// The Data model base class
+
+class DataModel
+{
+  private:
+    DataModel();        // undefined, as we want to create only
+    // models with good member data values
+
+  protected:
+    long            m_ncategories;
+    DoubleVec1d     m_catrates;
+    DoubleVec1d     m_catprobs;
+    double          m_acratio;
+    double          m_ucratio;
+    bool            m_normalize;
+    long            m_nbins;
+    long            m_nmarkers;
+    DoubleVec1d     m_catcells;  // working storage for category computations
+    double          m_relmurate; // relative mutation rate of genetic marker
+
+    // to adjust cat probabilities that don't sum to 1.0
+    void ScaleCatProbabilities();
+    void ScaleCatRates();
+
+    // to try to catch & fix impossible subtrees, used by ComputeSubtreeDL()
+    // this function will throw either a datalikenorm_error or a zero_dl_error!
+    void TryToNormalizeAndThrow(long posn, model_type mtype);
+
+    // Validation
+    virtual bool IsValidDataModel()      const;
+
+  public:
+    DataModel(long nmarkers,
+              long numCategories,
+              DoubleVec1d categoryRates,
+              DoubleVec1d categoryProbabilities,
+              double userAutoCorrelationValue,
+              bool doNormalize,
+              long nbins,
+              double relmurate);
+    virtual ~DataModel()                         {};
+    virtual DataModel* Clone() const = 0;
+
+    virtual model_type GetModelType() const = 0;
+    // Data input
+    string GetDataModelName()        const;
+    string GetDataModelShortName()   const;
+    virtual vector<double> DataToLikes(const string& datum, long marker = 0) const = 0;
+
+    // Access
+    long        GetNcategories()          const   {return m_ncategories;};
+    DoubleVec1d GetCatRates()             const   {return m_catrates;};
+    DoubleVec1d GetCatProbabilities()     const   {return m_catprobs;};
+    double      GetUserAcratio()          const   {return 1.0/m_acratio;};
+    virtual bool ShouldNormalize()        const   {return m_normalize;};
+    long        GetNbins()                const   {return m_nbins;};
+    virtual double GetRelMuRate()         const   {return m_relmurate;}
+
+    virtual void SetNormalize(bool norm)                 {m_normalize = norm;};
+
+    // SNP support function, used in setting up the SNP invarmodel
+    void SetNmarkers(long nmark)                         {m_nmarkers = nmark;};
+
+    // Likelihood Calculation
+    void ResetCatCells();   // this needs to be called before each tree evaluation
+    virtual double ComputeCatDL(long startmarker, long endmarker) = 0;
+    //opt datamodel, default do nothing
+    virtual bool OptimizeDataModel(Tree* tree, const Locus& locus) {return false;};
+    virtual void WriteAlpha(std::ofstream&, long, long, long) {};
+    virtual void ResetAlpha() {};
+    virtual void SetAlpha(double alphaVal, long rep, long chain) {};
+
+    // Factories
+
+    virtual StringVec1d      CreateDataModelReport() const;
+    virtual StringVec1d      ToXML(std::size_t nspaces) const;
+
+    // Simulation
+    // Select a vector of rates at random, weighted by their probabilities
+    //  and correlated according to m_acratio.
+    DoubleVec1d ChooseRandomRates(long nsites) const;
+
+    // Select an ancestral state at random
+    virtual DoubleVec1d ChooseAncestralState(long marker) = 0;
+
+    // Simulate data on a branch
+    virtual DoubleVec1d SimulateMarker(double branchlength, long whichmarker,
+                                       const DoubleVec1d & state) const = 0;
+
+    //Convert bin number to a data string
+    virtual string CellToData(Cell_ptr cell, long marker) const = 0;
+
+};
+
+//------------------------------------------------------------------------------------
+// Nucleotide model
+
+class NucModel : public DataModel
+{
+  private:
+    NucModel();         // undefined, as we want to create only
+    // models with good member data values
+  protected:
+    DoubleVec1d m_basefreqs;
+    bool m_freqsfromdata;
+    double  m_perBaseErrorRate;
+
+    // Validation
+    void NormalizeBaseFrequencies();
+    virtual bool IsValidDataModel()               const;
+
+  public:
+    NucModel(long nmarkers,
+             long numCategories,
+             DoubleVec1d categoryRates,
+             DoubleVec1d categoryProbabilities,
+             double userAutoCorrelationValue,
+             bool doNormalize,
+             double relMuRate,
+             double freqA,
+             double freqC,
+             double freqG,
+             double freqT,
+             bool freqsFromData,
+             double perBaseErrorRate);
+    virtual ~NucModel() {};
+
+    // Data input
+    static vector<double> StaticDataToLikes(const string& datum, double perBaseErrorRate);
+    virtual vector<double> DataToLikes(const string& datum, long marker = 0) const;
+
+    // Access
+    DoubleVec1d GetBaseFrequencies()  const {return m_basefreqs;};
+    bool        FreqsFromData()       const {return m_freqsfromdata;};
+    double      GetPerBaseErrorRate() const {return m_perBaseErrorRate;};
+
+    // Data Likelihood Calculation
+    virtual void    RescaleLengths(double length1, double length2)      = 0;
+    virtual double** ComputeSiteDLs(double** siteDL1, double** siteDL2)      = 0;
+    virtual double  ComputeSubtreeDL(Cell& rootdls, double** startmarker,
+                                     double** endmarker, long posn) = 0;
+
+    // Factories
+    virtual StringVec1d      CreateDataModelReport() const;
+    virtual StringVec1d      ToXML(std::size_t nspaces) const;
+    virtual DoubleVec1d      ChooseAncestralState(long marker);
+    virtual string CellToData(Cell_ptr cell, long marker) const;
+
+};
+
+//------------------------------------------------------------------------------------
+// Felsenstein '84 model
+
+class F84Model : public NucModel
+{
+  private:
+    F84Model();     // undefined
+    // User-defined parameters
+    double m_ttratio;
+
+    // Buffers for data likelihood calculation
+    bool        computed1,computed2;
+    double      freqar, freqcy, freqgr, freqty;
+    DoubleVec1d xcatrates, ycatrates;
+    DoubleVec1d expA1, expA2, expB1, expB2, expC1, expC2;
+    // we own the following:
+    double **basefreqarray;          // used in memcpy(), ComputeSiteDL()
+    // this dimensionality is needed by memcpy!
+    double **daughter1, **daughter2; // used in a memcpy(), ComputeCatDL()
+    double **target;                 // used as return storage, ComputeSiteDL()
+    //    which is then used in a memcpy(),
+    //    NucCell::SetSiteDLs()
+    DoubleVec2d catlikes;           // dim: m_nmarkers * m_ncategories
+
+    void EmptyBuffers();
+    void AllocateBuffers();
+    void CopyMembers(const F84Model& src);
+    void CopyBuffers(const F84Model& src);
+
+    // helper functions for RescaleLengths(double, double);
+    void     RescaleLength1(double length1);
+    void     RescaleLength2(double length2);
+
+  protected:
+    // Validation
+    virtual bool IsValidDataModel()               const;
+
+  public:
+    F84Model(long nmarkers,
+             long numCategories,
+             DoubleVec1d categoryRates,
+             DoubleVec1d categoryProbabilities,
+             double userAutoCorrelationValue,
+             bool doNormalize,
+             double relMuRate,
+             double freqA,
+             double freqC,
+             double freqG,
+             double freqT,
+             double ttRatio,
+             bool calculateFrequenciesFromData,
+             double perBaseErrorRate);
+    virtual ~F84Model();
+    virtual DataModel* Clone() const;
+
+    F84Model& operator=(const F84Model& src);
+    F84Model(const F84Model& src);
+
+    virtual model_type GetModelType() const         { return F84; };
+
+    // Access
+    void SetTTratio(double tr);
+
+    double      GetTTratio()          const {return m_ttratio;};
+
+    // Data Likelihood Calculation
+    virtual void Finalize();
+    virtual void     RescaleLengths(double length1, double length2);
+    virtual double** ComputeSiteDLs(double** siteDL1, double** siteDL2);
+    virtual double   ComputeSubtreeDL(Cell& rootdls, double** startmarker,
+                                      double** endmarker, long posn);
+    virtual double   ComputeCatDL(long startmarker, long endmarker);
+
+    // Factories
+    virtual StringVec1d  CreateDataModelReport() const;
+    virtual StringVec1d  ToXML(std::size_t nspaces) const;
+
+    virtual DoubleVec1d  SimulateMarker(double branchlength,
+                                        long whichmarker, const DoubleVec1d & state) const;
+};
+
+//------------------------------------------------------------------------------------
+// GTR model
+
+class GTRModel : public NucModel
+{
+  private:
+    GTRModel();     // undefined
+    // user supplied info
+    double AC, AG, AT, CG, CT, TG;
+
+    // stuff derived from user data and set up in Finalize
+    DoubleVec1d eigvals;             // eigen values
+    DoubleVec2d eigvecs1, eigvecs2;  // eigvecs1 is the simple matrix of
+    // eigen vectors.
+    // eigvecs2 is the inverted and transposed
+    // eigvecs1.
+
+    // used to communicate between RescaleLengths() and ComputeSiteDLs()
+    std::deque<bool> computed;
+    DoubleVec4d pchange;             // transition probs scaled by length
+    // dim: 2 X m_ncategories X rate matrix
+
+    // used to communicate between ComputeSubtreeDL() and ComputeCatDL()
+    DoubleVec2d catlikes;            // dim: m_nmarkers X m_ncategories
+
+    // we own the following, used by ComputeSiteDLs():
+    double **basefreqarray;         // used in memcpy(), ComputeSiteDL()
+    // this dimensionality is needed by memcpy!
+    double ***daughter;             // used in a memcpy(), ComputeSiteDL()
+    double **target;                // used as return storage, ComputeSiteDL()
+    //    which is then used in a memcpy(),
+    //    NucCell::SetSiteDLs()
+
+    // helpers for ctor's and operator=
+    void AllocateBuffers();
+    void EmptyBuffers();
+    void CopyMembers(const GTRModel& src);
+
+    // pre-transposed dotproduct for GTR use speed-wise
+    void GTRDotProduct(const DoubleVec2d& first, const DoubleVec2d& second,
+                       DoubleVec2d& answer);
+
+    // private validity checkers
+    void BarfOnBadGTRRates(const DoubleVec1d& rts) const;
+
+    // scratch pad matrix to avoid allocating lots of DoubleVec2ds
+    DoubleVec2d scratch;
+
+  protected:
+    // Validation
+    virtual bool IsValidDataModel()               const;
+
+  public:
+    GTRModel(long nmarkers,
+             long numCategories,
+             DoubleVec1d categoryRates,
+             DoubleVec1d categoryProbabilities,
+             double userAutoCorrelationValue,
+             bool doNormalize,
+             double relMuRate,
+             double freqA,
+             double freqC,
+             double freqG,
+             double freqT,
+             double freqAC,
+             double freqAG,
+             double freqAT,
+             double freqCG,
+             double freqCT,
+             double freqTG,
+             double perBaseErrorRate);
+    virtual ~GTRModel();
+    virtual DataModel* Clone() const;
+    GTRModel(const GTRModel& src);
+    GTRModel& operator=(const GTRModel& src);
+
+    virtual model_type GetModelType() const         { return GTR; };
+
+    // Access
+    void SetRates(const DoubleVec1d& rts);
+    DoubleVec1d GetRates() const;
+
+    // Data Likelihood Calculation
+    virtual void Finalize();
+    virtual void     RescaleLengths(double length1, double length2);
+    virtual double** ComputeSiteDLs(double** siteDL1, double** siteDL2);
+    virtual double   ComputeSubtreeDL(Cell& rootdls, double** startmarker,
+                                      double** endmarker, long posn);
+    virtual double   ComputeCatDL(long startmarker, long endmarker);
+
+    // Factories
+    virtual StringVec1d  CreateDataModelReport() const;
+    virtual StringVec1d  ToXML(std::size_t nspaces) const;
+
+    virtual DoubleVec1d  SimulateMarker(double branchlength,
+                                        long whichmarker, const DoubleVec1d & state) const;
+};
+
+//------------------------------------------------------------------------------------
+// Allele model
+
+class AlleleModel : public DataModel
+{
+  private:
+    AlleleModel();  // undefined
+  protected:
+    virtual ~AlleleModel() {};
+    AlleleModel(
+        long nmarkers,
+        long numCategories,
+        DoubleVec1d categoryRates,
+        DoubleVec1d categoryProbabilities,
+        double userAutoCorrelationValue,
+        bool doNormalize,
+        long nbins,
+        double relMuRate);
+
+    DoubleVec2d m_likes;   // likelihoods:  marker x category
+    LongVec1d   m_bincounts;  // per marker, total number of usable bins
+
+  public:
+
+    virtual DoubleVec1d RescaleLength(double length);
+    virtual double ComputeCatDL(long startmarker, long endmarker);
+    virtual void ComputeSiteDLs(Cell_ptr child1, Cell_ptr child2,
+                                Cell_ptr thiscell, const DoubleVec1d& vv1,
+                                const DoubleVec1d& vv2, long marker) = 0;
+    virtual double ComputeSubtreeDLs(Cell& rootdls,  double** startmarker,
+                                     double** endmarker, long posn) = 0;
+    virtual DoubleVec1d ChooseAncestralState(long marker);
+    virtual string CellToData(Cell_ptr cell, long marker) const = 0;
+};
+
+//------------------------------------------------------------------------------------
+// Step wise model
+
+class StepwiseModel : public AlleleModel
+{
+  private:
+
+    StepwiseModel();    // undefined
+
+    LongVec1d m_offsets;    // per marker, difference of least allele from 0
+    LongVec1d m_threshhold; //per marker, threshhold for allelic differences
+    vector<DoubleVec2d> m_steps; // pre-computed constants for mutations of
+    // various sizes: nbins x nbins.
+    // Per marker, because it uses m_threshhold.
+    long m_allowance;      // extra bins to each side of actual markers
+
+    // helper functions for likelihood
+    // 2003/11/07 erynes: Changing member variable "threshhold" from hardcoded
+    //                    long to marker-specific LongVec1d required the passing
+    //                    of the current marker to these functions.
+    double Probability(double t, long diff, long marker) const;
+    void CalculateSteps(long marker);
+    void Initialize(const StringVec2d& uniqueAlleles);
+
+  public:
+    // accepting compiler constructed copy-ctor and operator=
+
+    StepwiseModel(
+        long nmarkers,
+        const StringVec2d& uniqueAlleles,
+        long numCategories,
+        DoubleVec1d categoryRates,
+        DoubleVec1d categoryProbabilities,
+        double userAutoCorrelationValue,
+        bool doNormalize,
+        double relMuRate
+        );
+    virtual ~StepwiseModel() {};
+    virtual DataModel* Clone() const;
+
+    virtual model_type GetModelType() const         { return Stepwise; };
+
+    virtual vector<double> DataToLikes(const string& datum, long marker = 0) const;
+
+    // Data Likelihood Calculation
+    virtual void ComputeSiteDLs(Cell_ptr child1, Cell_ptr child2, Cell_ptr thiscell,
+                                const DoubleVec1d& vv1, const DoubleVec1d& vv2, long marker);
+    virtual double ComputeSubtreeDLs(Cell& rootdls,
+                                     double** startmarker, double** endmarker, long posn);
+
+    // Factories
+    virtual StringVec1d CreateDataModelReport() const;
+
+    virtual DoubleVec1d SimulateMarker(double branchlength,
+                                       long whichmarker, const DoubleVec1d & state) const;
+    virtual string CellToData(Cell_ptr cell, long marker) const;
+};
+
+//------------------------------------------------------------------------------------
+// Brownian model
+
+class BrownianModel : public AlleleModel
+{
+  private:
+    BrownianModel();    // private
+
+  public:
+    BrownianModel(
+        long nmarkers,
+        long numCategories,
+        DoubleVec1d categoryRates,
+        DoubleVec1d categoryProbabilities,
+        double userAutoCorrelationValue,
+        bool doNormalize,
+        double relMuRate
+        );
+    virtual ~BrownianModel()   {};
+    virtual DataModel* Clone() const;
+
+    virtual bool ShouldNormalize() const {return false;};
+    virtual void SetNormalize(bool norm);
+
+    virtual model_type GetModelType() const         { return Brownian; };
+    virtual vector<double> DataToLikes(const string& datum, long marker = 0) const;
+
+    // Data Likelihood Calculation
+    virtual void ComputeSiteDLs(Cell_ptr child1, Cell_ptr child2, Cell_ptr thiscell,
+                                const DoubleVec1d& vv1, const DoubleVec1d& vv2, long marker);
+    virtual double ComputeSubtreeDLs(Cell& rootdls,
+                                     double** startmarker, double** endmarker, long posn);
+
+    // Factories
+    virtual StringVec1d CreateDataModelReport() const;
+
+    virtual DoubleVec1d SimulateMarker(double branchlength,
+                                       long whichmarker, const DoubleVec1d & state) const;
+    virtual string CellToData(Cell_ptr cell, long marker) const;
+};
+
+//------------------------------------------------------------------------------------
+// K-Allele model
+
+class KAlleleModel : public AlleleModel
+{
+  private:
+    KAlleleModel();   // undefined
+    std::vector< std::map<string, long> > m_allelemaps; //one map per marker
+
+    // helper functions for likelihood
+    double probMathFunc( double ut, double coef);
+    void Initialize(const StringVec2d& uniqueAlleles);
+
+  public:
+    KAlleleModel(
+        long nmarkers,
+        const StringVec2d& unknownHaplotypeAlleles,
+        long numCategories,
+        DoubleVec1d categoryRates,
+        DoubleVec1d categoryProbabilities,
+        double userAutoCorrelationValue,
+        bool doNormalize,
+        double relMuRate
+        );
+
+    virtual ~KAlleleModel() {};
+    virtual DataModel* Clone() const;
+
+    virtual bool       ShouldNormalize()    const { return m_normalize; };
+    virtual model_type GetModelType()       const { return KAllele; };
+
+    virtual vector<double> DataToLikes(const string& datum, long marker = 0) const;
+
+    // Data Likelihood Calculation
+    virtual void ComputeSiteDLs(Cell_ptr child1, Cell_ptr child2, Cell_ptr thiscell,
+                                const DoubleVec1d& vv1, const DoubleVec1d& vv2, long marker);
+    virtual double ComputeSubtreeDLs(Cell& rootdls,
+                                     double** startmarker, double** endmarker, long posn);
+
+    // Factories
+    virtual StringVec1d CreateDataModelReport() const;
+    virtual StringVec1d ToXML(std::size_t nspaces) const;
+
+    virtual DoubleVec1d SimulateMarker(double branchlength,
+                                       long whichmarker,
+                                       const DoubleVec1d& state) const;
+    virtual string CellToData(Cell_ptr cell, long marker) const;
+};
+
+//------------------------------------------------------------------------------------
+// MixedKS model
+
+class MixedKSModel : public AlleleModel
+{
+  private:
+    MixedKSModel();   // undefined
+
+    LongVec1d m_offsets;    // per marker, difference of least allele from 0
+    LongVec1d m_threshhold; //per marker, threshhold for allelic differences
+    vector<DoubleVec2d> m_steps; // pre-computed constants for mutations of
+    // various sizes: nbins x nbins.
+    // Per marker, because it uses m_threshhold.
+    long m_allowance;      // extra bins to each side of actual markers
+
+    // helper functions for likelihood
+    // 2003/11/07 erynes: Changing member variable "threshhold" from hardcoded
+    //                    long to marker-specific LongVec1d required the passing
+    //                    of the current marker to these functions.
+    double Probability(double t, long diff, long marker) const;
+    void CalculateSteps(long marker);
+
+    virtual void Initialize(const StringVec2d& uniqueAlleles);
+  public:
+    // accepting compiler constructed copy-ctor and operator=
+
+    MixedKSModel(
+        long nmarkers,
+        const StringVec2d& uniqueAlleles,
+        long numCategories,
+        DoubleVec1d categoryRates,
+        DoubleVec1d categoryProbabilities,
+        double userAutoCorrelationValue,
+        bool doNormalize,
+        double relMuRate,
+        double alphaVal,
+        bool doOptimization);
+
+    virtual ~MixedKSModel() {};
+    virtual DataModel* Clone() const;
+
+    virtual model_type GetModelType() const { return MixedKS; };
+
+    virtual vector<double> DataToLikes(const string& datum, long marker = 0) const;
+
+    // Data Likelihood Calculation
+    virtual void ComputeSiteDLs(Cell_ptr child1,Cell_ptr child2,Cell_ptr thiscell,
+                                const DoubleVec1d& vv1, const DoubleVec1d& vv2,
+                                long marker);
+    virtual double ComputeSubtreeDLs(Cell& rootdls,
+                                     double** startmarker, double** endmarker,
+                                     long posn);
+
+    // Factories
+    virtual StringVec1d CreateDataModelReport() const;
+    virtual StringVec1d ToXML(std::size_t nspaces) const;
+
+    double GetAlpha() const { return m_alpha; };
+
+    void SetAlpha(double alphaVal);
+    virtual void SetAlpha(double alphaVal, long rep, long chain);
+    virtual void ResetAlpha();
+
+    virtual DoubleVec1d SimulateMarker(double branchlength, long whichmarker,
+                                       const DoubleVec1d& state) const;
+    virtual bool OptimizeDataModel(Tree* tree, const Locus& locus);
+    virtual void WriteAlpha(std::ofstream&, long loc, long rep, long chain);
+    virtual string CellToData(Cell_ptr cell, long marker) const;
+
+  private:
+
+    // helper functions for likelihood used in KAllele
+    double probMathFunc( double ut, double coef);
+    double m_alpha;
+    double m_origAlpha;
+    double m_beta;        //beta = 1 - alpha;
+    bool m_isOpt;         //whether to optimize
+    DoubleVec2d m_alphaRpt;
+};
+
+#endif // DLMODEL_H
+
+//____________________________________________________________________________________
diff --git a/src/datalike/funcMax.cpp b/src/datalike/funcMax.cpp
new file mode 100644
index 0000000..b607da5
--- /dev/null
+++ b/src/datalike/funcMax.cpp
@@ -0,0 +1,164 @@
+// $Id: funcMax.cpp,v 1.10 2012/06/30 01:32:41 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <iostream>
+#include <fstream>
+
+#include "errhandling.h"
+#include "funcMax.h"
+
+using namespace std;
+
+FMState::FMState(int _max_iters,
+                 double _increment,
+                 double _threshold,
+                 double _leftLimit,
+                 double _rightLimit,
+                 double _initX
+    )
+{
+    max_iters = _max_iters;
+    increment = _increment;
+    threshold = _threshold;
+    leftLimit = _leftLimit;
+    rightLimit = _rightLimit;
+    initX = _initX;
+    //pf = _pf;
+}
+
+FuncMax::FuncMax(MixedKSModel& _mksModel,
+                 Tree* _tree,
+                 const Locus& _locus)
+    :mksModel(_mksModel),
+     dlcalc(_locus.GetDLCalc()),
+     tree(_tree),
+     locus(_locus),
+     m_moving(_locus.IsMoving())
+{
+}
+
+void FuncMax::setState(FMState fms)
+{
+    max_iters = fms.max_iters;
+    increment = fms.increment;
+    threshold = fms.threshold;
+    leftLimit = fms.leftLimit;
+    rightLimit = fms.rightLimit;
+    initX = fms.initX;
+    pf = fms.pf;
+}
+
+FMState FuncMax::getCurState()
+{
+    FMState fms(max_iters,
+                increment,
+                threshold,
+                leftLimit,
+                rightLimit,
+                curX      //instead of initX
+        );
+    return fms;
+}
+
+double FuncMax::getCurX()
+{
+    return curX;
+}
+
+void FuncMax::run()
+{
+    char leftMark, rightMark;
+
+    for(int i=0; i<max_iters; i++)
+    {
+#ifdef DEBUG_FILE
+        *fs << "[org] ";
+#endif
+        curX = (leftLimit+rightLimit)/2;
+        if(isInc(curX, rightLimit))
+        {
+            leftLimit = curX;
+            leftMark = '*';
+            rightMark = ' ';
+        }
+        else
+        {
+            rightLimit = curX;
+            leftMark = ' ';
+            rightMark = '*';
+        }
+
+#ifdef DEBUG_FILE
+        *fs << "[" << leftMark
+            << setw(10) << leftLimit << "  " << rightMark
+            << setw(10) << rightLimit << "]" << endl;
+#endif
+        if(fabs(rightLimit-leftLimit) < threshold)
+        {
+#ifdef DEBUG_FILE
+            *fs << "Stop since reached threshold." << endl;
+#endif
+            return;
+        }
+    }
+#ifdef DEBUG_FILE
+    *fs << "Stop after " << max_iters << " iterations." << endl;
+#endif
+}
+
+double FuncMax::eval(double x)
+{
+    //set alpha
+    mksModel.SetAlpha(x);
+    tree->GetTimeList().SetAllUpdateDLs();
+    double y=EXPMIN;
+    try
+    {
+        y=dlcalc->Calculate(*tree, locus, m_moving);
+    }
+    catch (datalikenorm_error e)
+    {
+        //We turned on normalization.  This should only happen once.
+        tree->GetTimeList().SetAllUpdateDLs();
+        try
+        {
+            y=dlcalc->Calculate(*tree, locus, m_moving);
+        }
+        catch (zero_dl_error e)
+        {
+            //We'll say that this is due to a very bad alpha.
+            return EXPMIN;
+        }
+    }
+    catch (zero_dl_error e)
+    {
+        //We'll say that this is due to a very bad alpha.
+        return EXPMIN;
+    }
+#ifdef DEBUG_FILE
+    static int i = 0;
+    *fs << setw(5) << i++;
+    *fs << "x " << setw(10) << x;
+    *fs << "y " << setw(10) << y;
+#endif
+    return y;
+}
+
+bool FuncMax::isInc(double x, double max)
+{
+    double y = eval(x);
+#ifdef DEBUG_FILE
+    *fs << "[inc] ";
+#endif
+    double _y = eval(min(x+increment, max));
+    return _y > y;
+}
+
+//____________________________________________________________________________________
diff --git a/src/datalike/funcMax.h b/src/datalike/funcMax.h
new file mode 100644
index 0000000..922cb5d
--- /dev/null
+++ b/src/datalike/funcMax.h
@@ -0,0 +1,78 @@
+// $Id: funcMax.h,v 1.8 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef FUNCMAX_H
+#define FUNCMAX_H
+
+#include <iomanip>
+#include <fstream>
+#include <cmath>
+
+#include "dlcalc.h"
+#include "dlmodel.h"
+#include "locus.h"
+#include "tree.h"
+
+class MixedKSModel;
+
+typedef double (*dblpf) (double);
+
+struct FMState
+{
+    int max_iters;
+    double increment;
+    double threshold;
+    double leftLimit;
+    double rightLimit;
+    double initX;
+    dblpf pf;
+
+    FMState(int _max_iters,
+            double _increment,
+            double _threshold,
+            double _leftLimit,
+            double _rightLimit,
+            double _initX
+        );
+};
+
+class FuncMax
+{
+    int max_iters;
+    double increment;
+    double threshold;
+    double leftLimit;
+    double rightLimit;
+    double initX;
+    dblpf pf;
+
+    MixedKSModel& mksModel;
+    std::ofstream* fs;
+    DLCalc_ptr dlcalc;
+    Tree* tree;
+    const Locus& locus;
+    bool m_moving;
+
+  public:
+    FuncMax(MixedKSModel& _mksModel, Tree* tree, const Locus& locus);
+    void setState(FMState fms);
+    void run();
+    FMState getCurState();
+    double getCurX();
+
+  private:
+    double curX;
+    double eval(double x);
+    bool isInc(double x, double max);
+};
+
+#endif  // FUNCMAX_H
+
+//____________________________________________________________________________________
diff --git a/src/datalike/haplotypes.cpp b/src/datalike/haplotypes.cpp
new file mode 100644
index 0000000..bdcf940
--- /dev/null
+++ b/src/datalike/haplotypes.cpp
@@ -0,0 +1,359 @@
+// $Id: haplotypes.cpp,v 1.13 2012/06/30 01:32:41 bobgian Exp $
+
+/*
+  Copyright 2006  Lucian Smith, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+#include <iostream>                     // debug only
+
+#include "errhandling.h"
+#include "haplotypes.h"
+#include "locus.h"
+#include "locuscell.h"
+#include "registry.h"
+#include "region.h"
+#include "stringx.h"
+#include "xml_strings.h"
+
+using std::multiset;
+using std::cerr;
+using std::endl;
+
+//------------------------------------------------------------------------------------
+
+Haplotypes::Haplotypes(long regnum, string lname)
+    : m_regionnum(regnum),
+      m_locusname(lname),
+      m_haplotype_alleles(),
+      m_penetrances(),
+      m_haplotype_dlcells(),
+      m_current_hapindex(FLAGLONG)
+{
+    // intentionally blank
+}
+
+//------------------------------------------------------------------------------------
+
+Haplotypes::Haplotypes(Haplotypes oldhaps, bool clear)
+    : m_regionnum(oldhaps.m_regionnum),
+      m_locusname(oldhaps.m_locusname),
+      m_haplotype_alleles(),
+      m_penetrances(),
+      m_haplotype_dlcells(),
+      m_current_hapindex(FLAGLONG)
+{
+    assert(clear == true);
+}
+
+//------------------------------------------------------------------------------------
+
+void Haplotypes::ConvertAllelesToDLCells()
+{
+    assert(m_haplotype_alleles.size() > 0);
+    assert(m_haplotype_alleles.size() == m_penetrances.size());
+    assert(m_haplotype_dlcells.size() == 0);
+
+    for (unsigned long hap=0; hap<m_haplotype_alleles.size(); hap++)
+    {
+        vector<LocusCell> resolution;
+        for (unsigned long cell=0; cell<m_haplotype_alleles[hap].size(); cell++)
+        {
+            StringVec1d onecell;
+            onecell.push_back(m_haplotype_alleles[hap][cell]);
+            const Locus& locus=registry.GetDataPack().GetRegion(m_regionnum).GetLocus(m_locusname);
+            resolution.push_back(locus.GetDataTypePtr()->CreateInitializedDLCell(locus, onecell));
+        }
+        //Now we multiply final LocusCell by the appropriate penetrance.  We
+        // could choose any of them if we wanted to, but choose the last so that
+        // the CollapseHaplotypeDLs() routine doesn't have to worry about
+        // penetrances.
+        resolution[resolution.size()-1] *= m_penetrances[hap];
+        m_haplotype_dlcells.push_back(resolution);
+    }
+    // Combine haplotype resolutions which share their identity at a tip
+    //  by simply adding the DLCells at the other tip.
+    CollapseHaplotypeDLs();
+}
+
+//------------------------------------------------------------------------------------
+
+void Haplotypes::CollapseHaplotypeDLs()
+{
+    vector<vector<LocusCell> >::iterator dlList = m_haplotype_dlcells.begin();
+    for (;dlList != m_haplotype_dlcells.end();dlList++)
+    {
+        vector<vector<LocusCell> >::iterator dlComp = dlList;
+        dlComp++;
+        for (;dlComp != m_haplotype_dlcells.end();)
+        {
+            bool allbutlastmatch = true;
+            //Actually, the last can match, too, but that should never happen.
+            for (size_t allele=0; allele<((*dlList).size()-1); allele++)
+            {
+                if (!((*dlList)[allele] == (*dlComp)[allele]))
+                {
+                    allbutlastmatch = false;
+                }
+            }
+            if (allbutlastmatch)
+            {
+                //We can collapse them into one DLcell
+                (*dlList)[(*dlList).size()-1] += (*dlComp)[(*dlComp).size()-1];
+                //Now delete the compared one.
+                dlComp = m_haplotype_dlcells.erase(dlComp);
+            }
+            else
+            {
+                dlComp++;
+            }
+        }
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+void Haplotypes::AddHaplotype(StringVec1d alleles, double penetrance)
+{
+    if (m_haplotype_alleles.size() > 0)
+    {
+        if (alleles.size() != m_haplotype_alleles[0].size())
+        {
+            string msg = "The haplotype resolution \"";
+            for (unsigned long i=0; i<alleles.size(); i++)
+            {
+                msg += alleles[i] + " ";
+            }
+            msg += "\" has a different number of alleles than the haplotype "
+                "resolution \"";
+            for (unsigned long i=0; i<m_haplotype_alleles[0].size(); i++)
+            {
+                msg += m_haplotype_alleles[0][i] + " ";
+            }
+            msg += "\".  Remember that spaces are not allowed in allele names.  Also, if you have samples"
+                " with multiple ploidies (if you have samples from an X chromosome, say) each phenotype must match"
+                " a set of genotypes of the same ploidy -- even if the phenotype of 'X0' matches the phenotype"
+                " of 'XX', they must be defined separately.";
+            throw data_error(msg);
+        }
+    }
+    for (size_t hap=0; hap<m_haplotype_alleles.size(); hap++)
+    {
+        if (m_haplotype_alleles[hap] == alleles)
+        {
+            return;
+        }
+    }
+    m_haplotype_alleles.push_back(alleles);
+    m_penetrances.push_back(penetrance);
+}
+
+//------------------------------------------------------------------------------------
+
+void Haplotypes::AddHaplotype(multiset<string> alleles, double penetrance)
+{
+    StringVec2d allAlleles = SetToVecs(alleles);
+    for (size_t alleleVec=0; alleleVec != allAlleles.size(); alleleVec++)
+    {
+        AddHaplotype(allAlleles[alleleVec], penetrance);
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+vector<LocusCell> Haplotypes::ChooseNewHaplotypes()
+{
+    if (m_haplotype_dlcells.size() == 0)
+    {
+        ConvertAllelesToDLCells();
+    }
+    if (m_haplotype_dlcells.size() == 1)
+    {
+        return m_haplotype_dlcells[0];
+    }
+    //Choose a new haplotype index
+    long newindex = m_current_hapindex;
+    while (newindex == m_current_hapindex)
+    {
+        newindex = registry.GetRandom().Long(m_haplotype_dlcells.size());
+    }
+
+    m_current_hapindex = newindex;
+    return m_haplotype_dlcells[newindex];
+}
+
+//------------------------------------------------------------------------------------
+
+vector<LocusCell> Haplotypes::ChooseRandomHaplotypes()
+{
+    if (m_haplotype_dlcells.size() == 0)
+    {
+        ConvertAllelesToDLCells();
+    }
+    //Choose a new haplotype index
+    m_current_hapindex = registry.GetRandom().Long(m_haplotype_dlcells.size());
+    return m_haplotype_dlcells[m_current_hapindex];
+}
+
+//------------------------------------------------------------------------------------
+
+vector<LocusCell> Haplotypes::ChooseFirstHaplotypes()
+{
+    if (m_haplotype_dlcells.size() == 0)
+    {
+        ConvertAllelesToDLCells();
+    }
+    //Choose haplotype zero.
+    m_current_hapindex = 0;
+    return m_haplotype_dlcells[m_current_hapindex];
+}
+
+//------------------------------------------------------------------------------------
+
+vector<LocusCell> Haplotypes::ChooseNextHaplotypes()
+{
+    assert(m_haplotype_dlcells.size() != 0);
+    //Choose the next haplotype index
+    if (static_cast<unsigned long>(m_current_hapindex) == m_haplotype_dlcells.size()-1)
+    {
+        //Already at the last one.
+        vector<LocusCell> blankcells;
+        return blankcells;
+    }
+    m_current_hapindex++;
+    return m_haplotype_dlcells[m_current_hapindex];
+}
+
+//------------------------------------------------------------------------------------
+
+StringVec1d Haplotypes::GetAlleles() const
+{
+    StringVec1d retvec;
+    for (unsigned long res=0; res<m_haplotype_alleles.size(); res++)
+    {
+        for (unsigned long allele=0; allele<m_haplotype_alleles[res].size(); allele++)
+        {
+            retvec.push_back(m_haplotype_alleles[res][allele]);
+        }
+    }
+    return retvec;
+}
+
+//------------------------------------------------------------------------------------
+
+string Haplotypes::GetMarkerData() const
+{
+    assert(m_haplotype_alleles.size() == m_penetrances.size());
+    string markerdata;
+    for (unsigned long hapres=0; hapres<m_haplotype_alleles.size(); hapres++)
+    {
+        markerdata += ToString(m_haplotype_alleles[hapres]) + "  ";
+        if (m_penetrances[hapres] < 1)
+        {
+            markerdata += "(" + ToString(m_penetrances[hapres]) + ") ";
+        }
+    }
+    return markerdata;
+}
+
+//------------------------------------------------------------------------------------
+
+StringVec1d Haplotypes::GetHaplotypesXML(long nspaces) const
+{
+    string spaces(nspaces, ' ');
+    string spaces2(nspaces+2, ' ');
+
+    StringVec1d retvec;
+    for (unsigned long hap=0; hap<m_penetrances.size(); hap++)
+    {
+        retvec.push_back(spaces + MakeTag(xmlstr::XML_TAG_HAPLOTYPES));
+        retvec.push_back(spaces2 + MakeTag(xmlstr::XML_TAG_PENETRANCE) + " "
+                         + ToString(m_penetrances[hap]) + " "
+                         + MakeCloseTag(xmlstr::XML_TAG_PENETRANCE));
+        retvec.push_back(spaces2 + MakeTag(xmlstr::XML_TAG_ALLELES)
+                         + ToString(m_haplotype_alleles[hap]) + " "
+                         + MakeCloseTag(xmlstr::XML_TAG_ALLELES));
+        retvec.push_back(spaces + MakeCloseTag(xmlstr::XML_TAG_HAPLOTYPES));
+    }
+    return retvec;
+}
+
+//------------------------------------------------------------------------------------
+
+bool Haplotypes::MultipleHaplotypes()  const
+{
+    if (m_haplotype_dlcells.size() > 0)
+    {
+        return (m_haplotype_dlcells.size() > 1); //we're in phase 2
+    }
+    else
+    {
+        return (m_haplotype_alleles.size() > 1); //still in phase 1
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+StringVec2d Haplotypes::SetToVecs(multiset<string> stringSet) const
+{
+    StringVec2d retvecs;
+    for (multiset<string>::iterator newstring=stringSet.begin();
+         newstring != stringSet.end(); newstring++)
+    {
+        multiset<string> partialSet = stringSet;
+        partialSet.erase(partialSet.find(*newstring));
+        if (partialSet.size() == 0)
+        {
+            StringVec1d strings;
+            strings.push_back(*newstring);
+            retvecs.push_back(strings);
+            return retvecs;
+        }
+        StringVec2d partialVecs = SetToVecs(partialSet);
+        for (StringVec2d::iterator partialVec = partialVecs.begin();
+             partialVec != partialVecs.end(); partialVec++)
+        {
+            (*partialVec).push_back(*newstring);
+            retvecs.push_back(*partialVec);
+        }
+    }
+    return retvecs;
+}
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+
+void Haplotypes::PrintCellsAndAlleles() const
+{
+    cerr << "Here are the original strings:" << endl;
+    for (size_t set=0; set<m_haplotype_alleles.size(); set++)
+    {
+        cerr << ToString(m_haplotype_alleles[set]) << endl;
+    }
+    cerr << "And here are the corresponding DLCells.  They should match!" << endl;
+    for (size_t set=0; set<m_haplotype_dlcells.size(); set++)
+    {
+        for (size_t allele=0; allele<m_haplotype_dlcells[set].size(); allele++)
+        {
+            cerr << registry.GetDataPack().GetRegion(m_regionnum).GetLocus(m_locusname).GetDataModel()->
+                CellToData(m_haplotype_dlcells[set][allele][0], 0)
+                 << " ";
+        }
+    }
+    cerr << endl;
+    cerr << "And finally, the DLCells as raw data." << endl;
+    for (size_t set=0; set<m_haplotype_dlcells.size(); set++)
+    {
+        for (size_t allele=0; allele<m_haplotype_dlcells[set].size(); allele++)
+        {
+            cerr << m_haplotype_dlcells[set][allele][0]->DLsToString(0,0);
+        }
+    }
+    cerr << endl;
+}
+
+//____________________________________________________________________________________
diff --git a/src/datalike/haplotypes.h b/src/datalike/haplotypes.h
new file mode 100644
index 0000000..32efe15
--- /dev/null
+++ b/src/datalike/haplotypes.h
@@ -0,0 +1,64 @@
+// $Id: haplotypes.h,v 1.10 2012/06/30 01:32:41 bobgian Exp $
+
+/*
+  Copyright 2006  Lucian Smith, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+// This files defines the class that stores "haplotype" specific
+// information.
+
+#ifndef HAPLOTYPES_H
+#define HAPLOTYPES_H
+
+#include <set>
+#include "vectorx.h"
+
+class LocusCell;
+
+class Haplotypes
+{
+  private:
+    Haplotypes(); //undefined
+    long m_regionnum;
+    string m_locusname;
+    StringVec2d  m_haplotype_alleles; //Phase 1
+    DoubleVec1d  m_penetrances;
+
+    vector<vector<LocusCell> > m_haplotype_dlcells; //Phase 2
+    long m_current_hapindex;
+
+    void ConvertAllelesToDLCells();
+    void CollapseHaplotypeDLs();
+    StringVec2d SetToVecs(std::multiset<std::string> stringSet) const;
+
+  public:
+    Haplotypes(long regnum, string lname);
+    Haplotypes(Haplotypes hap, bool clear);
+    ~Haplotypes() {};
+    //We accept the default for:
+    //Haplotype& operator=(const Haplotype& src);
+    //Haplotype(const Haplotype& src);
+
+    void AddHaplotype(StringVec1d alleles, double penetrance);
+    void AddHaplotype(std::multiset<std::string> alleles, double penetrance);
+    vector<LocusCell> ChooseNewHaplotypes();
+    vector<LocusCell> ChooseRandomHaplotypes();
+    vector<LocusCell> ChooseFirstHaplotypes();
+    vector<LocusCell> ChooseNextHaplotypes();
+    StringVec1d GetAlleles() const; //phase 1
+    string GetMarkerData() const; //phase 3 (output)
+    StringVec1d GetHaplotypesXML(long nspaces) const; //menuinfile XML
+    bool MultipleHaplotypes() const;
+
+    // Debugging function.
+    void PrintCellsAndAlleles() const;
+
+};
+
+#endif // HAPLOTYPES_H
+
+//____________________________________________________________________________________
diff --git a/src/datalike/locus.cpp b/src/datalike/locus.cpp
new file mode 100644
index 0000000..b7ce4a5
--- /dev/null
+++ b/src/datalike/locus.cpp
@@ -0,0 +1,1285 @@
+// $Id: locus.cpp,v 1.82 2012/06/30 01:32:41 bobgian Exp $
+
+/*
+  Copyright 2003  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+#include <functional>                   // for Locus::CountNNucleotides()
+#include <fstream>
+#include <iostream>                     // debugging
+
+#include "constants.h"
+#include "dlcalc.h"
+#include "dlmodel.h"
+#include "force.h"                      // for TipData::GetBranchPartitions()
+#include "individual.h"                 // for use of Individuals in setting up Locus objects
+#include "locus.h"
+#include "mathx.h"                      // for IsEven
+#include "rangex.h"                     // LS DEBUG SIM
+#include "registry.h"
+#include "runreport.h"
+#include "stringx.h"
+#include "xml_strings.h"
+
+using namespace std;
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+Locus::Locus(long int ind, bool movable, string name)
+    : m_index(ind),
+      m_name(name),
+      m_nmarkers(FLAGLONG),
+      m_nsites(FLAGLONG),
+      m_regionalmapposition(0),
+      m_globalmapposition(0),
+      m_offset(0),
+      m_movable(movable),
+      m_type(mloc_data),
+      m_defaultlocations(true),
+      m_positions(),
+      m_pDatatype(),
+      m_pDatamodel(),
+      m_tipdata(),
+      m_protoCell(),
+      m_pDLCalculator(),
+      m_tipcells(),
+      m_allowedrange(),
+      m_variablerange(),
+      m_unknownrange(),
+      m_map(),
+      m_simulate(false),
+      m_truesite(FLAGLONG),
+      m_variability(),
+      m_phenotypes(name)
+{
+    if (movable)
+    {
+        //this is a different sort of locus:
+        SetNmarkers(1); //This requires alleles of 1 marker
+        SetNsites(1);
+        SetPositions();
+    }
+} // Locus constructor
+
+//------------------------------------------------------------------------------------
+
+string Locus::GetName() const
+{
+    if (!m_name.empty()) return m_name;
+    string tempname("#");
+    tempname += ToString(GetIndex()+1);
+    return tempname;
+} // GetName
+
+//------------------------------------------------------------------------------------
+
+long int Locus::GetNsites() const
+{
+    // if it has never been set, we assume it's m_nmarkers
+    if (m_nsites == FLAGLONG) return m_nmarkers;
+    return m_nsites;
+
+} // GetNsites
+
+//------------------------------------------------------------------------------------
+
+long int Locus::GetOffset() const
+{
+    return m_offset;
+
+} // GetOffset
+
+//------------------------------------------------------------------------------------
+
+LongVec1d Locus::GetUserMarkerLocations() const
+{
+    LongVec1d userpos(m_positions);
+    transform(userpos.begin(), userpos.end(), userpos.begin(),
+              bind2nd(plus<long int>(), m_offset - m_regionalmapposition));
+    return userpos;
+} // GetUserMarkerLocations
+
+//------------------------------------------------------------------------------------
+// Once everything is ready, make this Locus into a fully functional one containing likelihood cells for its tips.
+
+void Locus::Setup(const IndVec& individuals)
+{
+    m_tipcells.clear();
+
+    m_protoCell = m_pDatatype->CreateDLCell(*this);
+    unsigned long int tip;
+    for (tip = 0; tip < m_tipdata.size(); ++tip)
+    {
+        if (m_tipdata[tip].m_nodata)
+        {
+            //The data is stored in the haplotypes, not this tip.
+            vector<LocusCell> cellsbymarkers;
+            //Find which individual codes for this tip.
+            for (unsigned long int ind = 0; ind < individuals.size(); ind++)
+            {
+                if (m_tipdata[tip].individual == individuals[ind].GetId())
+                {
+                    for (long int marker = 0; marker < GetNmarkers(); marker++)
+                    {
+                        vector<LocusCell> cells = individuals[ind].GetLocusCellsFor(GetName(), marker);
+                        assert(static_cast<long int>(cells.size()) > m_tipdata[tip].m_hap);
+                        cellsbymarkers.push_back(cells[m_tipdata[tip].m_hap]);
+                    }
+                    continue;
+                }
+            }
+            LocusCell onecell(cellsbymarkers);
+            m_tipcells.push_back(onecell);
+        }
+        else
+        {
+            m_tipcells.push_back(m_pDatatype->CreateInitializedDLCell(*this, m_tipdata[tip].data));
+        }
+    }
+    m_pDLCalculator = DLCalc_ptr(m_pDatatype->CreateDLCalculator(*this));
+
+} // Setup
+
+//------------------------------------------------------------------------------------
+// took away clone of src since it should be uniquely generated
+// for each locus by Registry::InstallDataModels
+
+void Locus::SetDataModelOnce(DataModel_ptr src)
+{
+    if (src.get() != NULL) m_pDatamodel = src;
+    else m_pDatamodel.reset();
+
+} // SetDataModelOnce
+
+//------------------------------------------------------------------------------------
+
+void Locus::SetAnalysisType(mloc_type type)
+{
+    m_type = type;
+    switch(type)
+    {
+        case mloc_mapjump:
+        case mloc_mapfloat:
+            m_movable = true;
+            break;
+        case mloc_data:
+            m_movable = false;
+            break;
+        case mloc_partition:
+            assert(false);
+            throw implementation_error("You shouldn't be able to set the analysis type to 'partition' yet.");
+            break;
+            //LS DEBUG MAPPING:  We need to throw it away here or something.
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+void Locus::SetAllowedRange(rangeset rs, long int regoffset)
+{
+    //rs comes in here on the global scale.
+    rangeset offsetrs;
+    for (rangesetiter rpair = rs.begin(); rpair != rs.end(); rpair++)
+    {
+        long int low = rpair->first - regoffset;
+        long int high = rpair->second - regoffset;
+        offsetrs.insert(make_pair(low, high));
+    }
+
+    m_allowedrange = offsetrs;
+}
+
+//------------------------------------------------------------------------------------
+
+void Locus::SetEmptyTipData(vector<TipData> td)
+{
+    for (unsigned long int tip = 0; tip < td.size(); tip++)
+    {
+        td[tip].data.clear();
+        td[tip].m_nodata = true;
+    }
+    m_tipdata = td;
+}
+
+//------------------------------------------------------------------------------------
+
+void Locus::SetVariableRange(rangeset rs)
+{
+    m_variablerange = rs;
+}
+
+//------------------------------------------------------------------------------------
+
+void Locus::SetDataType(DataType_ptr src)
+{
+    m_pDatatype = src; // shallow copy!
+} // SetDataType
+
+//------------------------------------------------------------------------------------
+
+void Locus::SetNmarkers(long int n)
+{
+    if (m_nmarkers == FLAGLONG)
+    {
+        m_nmarkers = n;
+    }
+    else
+    {
+        if (m_nmarkers != n)
+        {
+            data_error e("Inconsistent number of markers");
+            throw e;
+        }
+    }
+} // SetNmarkers
+
+//------------------------------------------------------------------------------------
+
+void Locus::SetGlobalMapPosition(long int site)
+{
+#if 0
+    if (site == 0)
+    {
+        throw data_error("Assuming the biologist's convention of the nonexistence of site zero,"
+                         " we assume that the position left of site 1 is site -1."
+                         "  As such, you may not set the map position of any segment to '0'.");
+    }
+#endif
+    m_globalmapposition = site;
+} // Setglobalmapposition
+
+//------------------------------------------------------------------------------------
+
+void Locus::SetRegionalMapPosition(long int site)
+{
+    transform(m_positions.begin(), m_positions.end(), m_positions.begin(),
+              bind2nd(minus<long int>(), m_regionalmapposition));
+
+    if(IsMoving())
+    {
+        long int movement = site - m_regionalmapposition;
+        SetGlobalMapPosition(m_globalmapposition + movement);
+    }
+
+    m_regionalmapposition = site;
+    transform(m_positions.begin(), m_positions.end(), m_positions.begin(),
+              bind2nd(plus<long int>(), m_regionalmapposition));
+
+} // SetRegionalMapPosition
+
+//------------------------------------------------------------------------------------
+
+void Locus::SetOffset(long int val)
+{
+    m_offset = val;
+} // SetOffset
+
+//------------------------------------------------------------------------------------
+
+void Locus::SetPositions()
+{
+    m_defaultlocations = true;
+    m_positions.clear();
+    m_positions.reserve(m_nmarkers);  // for speed
+    long int i;
+    for (i = 0; i < m_nmarkers; ++i)
+    {
+        m_positions.push_back(i + m_regionalmapposition);
+    }
+
+} // SetPositions
+
+//------------------------------------------------------------------------------------
+
+void Locus::SetPositions(const LongVec1d& pos)
+{
+    m_defaultlocations = false;
+    m_positions = pos;
+    transform(m_positions.begin(), m_positions.end(), m_positions.begin(),
+              bind2nd(plus<long int>(), m_regionalmapposition));
+
+} // SetPositions
+
+//------------------------------------------------------------------------------------
+
+LongVec1d Locus::CalcNVariableMarkers() const
+{
+    LongVec1d nvarmarkers;
+
+    const DataPack& dpack = registry.GetDataPack();
+    long int xpart, nxparts = dpack.GetNCrossPartitions();
+
+    for (xpart = 0; xpart < nxparts; ++xpart)
+    {
+        nvarmarkers.push_back(CalcNVariableMarkers(dpack.GetTipId(xpart)));
+    }
+
+    return nvarmarkers;
+} // CalcNVariableMarkers()
+
+//------------------------------------------------------------------------------------
+
+long int Locus::CalcNVariableMarkers(tipidtype xpart) const
+{
+    long int nvarmarkers = 0;
+
+    const StringVec2d data = GetCrossPartitionGeneticData(xpart);
+    if (!data.empty()) nvarmarkers = GetDataTypePtr()->CalcNVarMarkers(data);
+
+    return nvarmarkers;
+
+} // CalcNVariableMarkers(tipidtype xpart)
+
+//------------------------------------------------------------------------------------
+
+vector<TipData> Locus::GetPopulationTipData(const string& popname) const
+{
+    vector<TipData> popdata;
+    vector<TipData>::const_iterator tip;
+    for(tip = m_tipdata.begin(); tip != m_tipdata.end(); ++tip)
+        if (tip->IsInPopulation(popname)) popdata.push_back(*tip);
+
+    return popdata;
+
+} // GetPopulationTipData
+
+//------------------------------------------------------------------------------------
+
+StringVec2d Locus::GetCrossPartitionGeneticData(tipidtype xpart) const
+{
+    StringVec2d data;
+    vector<TipData>::const_iterator tip = GetTipData().begin();
+    for( ; tip != GetTipData().end(); ++tip)
+        if (tip->IsInCrossPartition(xpart)) data.push_back(tip->data);
+
+    return data;
+
+} // GetCrossPartitionGeneticData
+
+//------------------------------------------------------------------------------------
+
+StringVec3d Locus::GetPartitionGeneticData(force_type partname) const
+{
+    assert(partname == force_MIG || partname == force_DISEASE || partname == force_DIVMIG);
+
+    StringVec3d data(registry.GetDataPack().GetNPartitionsByForceType(partname));
+    vector<TipData>::const_iterator tip = GetTipData().begin();
+    for( ; tip != GetTipData().end(); ++tip)
+        data[tip->GetPartition(partname)].push_back(tip->data);
+
+    return data;
+
+} // GetPartitionGeneticData
+
+//------------------------------------------------------------------------------------
+
+StringVec1d Locus::GetMarkerDataWithLabels(const IndVec& individuals) const
+{
+    long int width = max(static_cast<long int>(GetName().size()), GetNmarkers()+5);
+    StringVec1d labeleddata(1,MakeCentered(GetName(),width));
+    vector<TipData>::const_iterator tip = GetTipData().begin();
+    if (tip->m_nodata)
+    {
+        //We need to iterate over the individuals instead
+        //LS DEBUG:  this is a pretty fragile check for this situation.
+        for (long int marker = 0; marker < m_nmarkers; marker++)
+        {
+            for (unsigned long int ind = 0; ind < individuals.size(); ind++)
+            {
+                string label = MakeJustified(individuals[ind].GetName(), -9);
+                string data = individuals[ind].GetMarkerDataFor(GetName(), marker);
+                labeleddata.push_back(label + " " + data);
+            }
+            if (m_nmarkers > 1)
+            {
+                labeleddata.push_back("");
+            }
+        }
+    }
+    else
+    {
+        for( ; tip != GetTipData().end(); ++tip)
+        {
+            string label = MakeJustified(tip->label,-9);
+            string data = tip->GetFormattedData(m_pDatatype->GetDelimiter());
+            labeleddata.push_back(label+ " " + data);
+        }
+    }
+    return labeleddata;
+
+} // GetMarkerDataWithLabels
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d Locus::CountNNucleotides() const
+{
+    DoubleVec1d count(BASES, 0L);
+
+    if (!m_pDatatype->IsNucleotideData()) return count;
+
+    vector<TipData>::const_iterator tip = GetTipData().begin();
+    for( ; tip != GetTipData().end(); ++tip)
+    {
+        StringVec1d data = tip->data;
+
+        StringVec1d::const_iterator sit;
+        for (sit = data.begin(); sit != data.end(); ++sit)
+        {
+            // we can't use locus' inherent datamodel here because this code may
+            // be called in the menu, where the locus may not have a datamodel yet!
+            DoubleVec1d site = NucModel::StaticDataToLikes(*sit,GetPerBaseErrorRate());
+            double zero(0.0);
+            double total(accumulate(site.begin(),site.end(),zero));
+            transform(site.begin(),site.end(),site.begin(),
+                      bind2nd(divides<double>(),total));
+
+            assert(site.size() == count.size());
+
+            transform(count.begin(),count.end(),site.begin(),
+                      count.begin(),plus<double>());
+        }
+    }
+    return count;
+} // CountNNucleotides
+
+//------------------------------------------------------------------------------------
+
+bool Locus::MultiSampleIndividuals() const
+{
+    set<long int> individuals; //a unique list
+    for (unsigned long int ind = 0; ind < m_tipdata.size(); ind++)
+    {
+        long int individual = m_tipdata[ind].individual;
+        if (individuals.find(individual) != individuals.end()) return true;
+        individuals.insert(individual);
+    }
+    return false;
+}
+
+//------------------------------------------------------------------------------------
+
+long int Locus::GetNTips(tipidtype xpart) const
+{
+    long int count = 0;
+    vector<TipData>::const_iterator tip = GetTipData().begin();
+    for( ; tip != GetTipData().end(); ++tip)
+        if (tip->IsInCrossPartition(xpart)) ++count;
+
+    return count;
+} // GetNTips(tipidtype xpart)
+
+//------------------------------------------------------------------------------------
+
+double Locus::CalculateDataLikelihood(Tree& tree, bool moving) const
+{
+    return m_pDLCalculator->Calculate(tree, *this, moving);
+} // CalculateDataLikelihood
+
+//------------------------------------------------------------------------------------
+
+void Locus::AddUniqueNamesTo(set<string>& popnames) const
+{
+    vector<TipData>::const_iterator tip = GetTipData().begin();
+    for( ; tip != GetTipData().end(); ++tip)
+        popnames.insert(tip->m_popname); // Only new names added because popnames is a std::set.
+
+} // AddUniqueNamesTo
+
+//------------------------------------------------------------------------------------
+
+void Locus::SetNewMapPositionIfMoving()
+{
+    assert (m_simulate);
+    if (IsMoving())
+    {
+        //Pick a site to actually live
+        long int nsites = CountSites(m_allowedrange);
+        do {
+            m_truesite = registry.GetRandom().Long(nsites);
+        } while (m_allowedrange != AddPairToRange(make_pair(m_truesite, m_truesite + 1), m_allowedrange));
+        SetRegionalMapPosition(m_truesite);
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+void Locus::SimulateData(Tree& tree, long int nsites)
+{
+    assert(m_simulate);
+    ClearVariability();
+    long int ntries = 0;
+    while (IsNighInvariant() && ++ntries<5000)
+    {
+        m_pDLCalculator->SimulateData(tree, *this);
+    }
+    if (ntries >= 5000)
+    {
+        registry.GetRunReport().ReportNormal("Gave up trying to simulate non-invariant data for segment "
+                                             + GetName() + ".");
+    }
+} // SimulateData
+
+//------------------------------------------------------------------------------------
+
+void Locus::CopyDataFrom(Locus& original, Tree& tree)
+{
+    m_pDLCalculator->CopyDataFrom(*this, original, tree);
+}
+
+//------------------------------------------------------------------------------------
+
+void Locus::MakePhenotypesFor(IndVec& individuals)
+{
+    for (size_t ind = 0; ind<individuals.size(); ind++)
+    {
+        for (long int marker = 0; marker<m_nmarkers; marker++)
+        {
+            StringVec1d alleles = individuals[ind].GetAllelesFromDLs(m_index, marker, IsMoving(), m_pDatamodel);
+            //LS DEBUG
+            //cerr << "Original haplotypes:  " << ToString(alleles) << endl;
+            if (m_phenotypes.AnyDefinedPhenotypes())
+            {
+                individuals[ind].SetHaplotypes(GetName(), marker, m_phenotypes.ChooseHaplotypes(alleles));
+            }
+            else
+            {
+                Haplotypes haps(individuals[ind].GetHaplotypesFor(GetName(),marker), true);
+                haps.AddHaplotype(alleles, 1.0);
+                individuals[ind].SetHaplotypes(GetName(), marker, haps);
+                //LS DEBUG
+                //cerr << "Final haplotypes:  " << ToString(haps.GetAlleles()) << endl;
+                // individuals[ind].PrintHaplotypesFor(GetName(), marker);
+            }
+        }
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+void Locus::SaveState(DoubleVec1d& state, long int marker, string label)
+{
+    //First, copy over our own dlcell:
+    for (size_t tip = 0; tip < m_tipdata.size(); tip++)
+    {
+        if (m_tipdata[tip].label == label)
+        {
+            m_tipcells[tip].SetAllCategoriesTo(state, marker);
+        }
+    }
+
+    //Now add it to the 'variability' list.
+    map<long int, DoubleVec1d>::iterator freqs = m_variability.find(marker);
+    if (freqs == m_variability.end())
+    {
+        m_variability.insert(make_pair(marker, state));
+        return;
+    }
+    //Not new, so add it to the old
+    transform(state.begin(),
+              state.end(),
+              freqs->second.begin(),
+              freqs->second.begin(),
+              plus<double>());
+}
+
+//------------------------------------------------------------------------------------
+
+void Locus::RandomizeHalf(Tree& tree, bool swath)
+{
+    m_unknownrange.clear();
+    //The idea here is that we take a random quarter-sized swath out of the first
+    // half, and and another quarter-sized swath out of the second half.
+
+    if (swath)
+    {
+        //Pick a swath out of the first half, and another swath out of the
+        // second half.
+        long int quarter = static_cast<long int>(m_nsites/4);
+        long int left = registry.GetRandom().Long(quarter);
+        m_unknownrange.insert(make_pair(left, left+quarter+1));
+        left = registry.GetRandom().Long(quarter);
+        m_unknownrange.insert(make_pair(left+(2*quarter), left + (3*quarter) + 1));
+    }
+    else
+    {
+        //The 'every other site' version:
+        for (long int left = 0; left < m_nsites; left = left+2)
+        {
+            m_unknownrange.insert(make_pair(left, left+1));
+        }
+    }
+    m_pDLCalculator->Randomize(*this, m_unknownrange, tree);
+
+    //Now we need to clear the variable sites out of the unknown range
+    m_variablerange = RemoveRangeFromRange(m_unknownrange, m_variablerange);
+}
+
+//------------------------------------------------------------------------------------
+
+bool Locus::SiteInLocus(long int site) const
+{
+    // Is the given site in this locus?
+    return (m_regionalmapposition <= site && site < m_regionalmapposition + m_nsites);
+} // SiteInLocus
+
+//------------------------------------------------------------------------------------
+
+long int Locus::SiteToMarker(long int site) const
+{
+    assert(SiteInLocus(site));
+    for (long int i = 0; i < m_nmarkers; i++)
+    {
+        if (m_positions[i] == site)
+        {
+            return i;
+        }
+    }
+    return FLAGLONG;
+}
+
+//------------------------------------------------------------------------------------
+
+pair<long int, long int> Locus::GetSiteSpan() const
+{
+    return make_pair(m_regionalmapposition, m_regionalmapposition + m_nsites);
+} // GetSiteSpan
+
+//------------------------------------------------------------------------------------
+
+pair<long int, long int> Locus::GetGlobalScaleSiteSpan() const
+{
+    return make_pair(m_globalmapposition, m_globalmapposition + m_nsites);
+} // GetGlobalScaleSiteSpan
+
+//------------------------------------------------------------------------------------
+
+bool Locus::IsMovable() const
+{
+    //We might want to change this if split into phase 1/phase 2
+    return m_movable;
+}
+
+//------------------------------------------------------------------------------------
+
+bool Locus::IsMoving() const
+{
+    switch (GetAnalysisType())
+    {
+        case mloc_mapjump:
+        case mloc_mapfloat:
+            return true;
+        case mloc_data:
+            return false;
+        case mloc_partition:
+            assert(false);
+            throw implementation_error("Shouldn't be checking this for a partition segment.");
+    }
+    throw implementation_error("Uncaught analysis type for segment.");
+}
+
+//------------------------------------------------------------------------------------
+
+double Locus::GetPerBaseErrorRate() const
+// EWFIX.CODESMELL -- horrible, horrible code smell
+{
+    const DataModel_ptr p = GetDataModel();
+    if(p == NULL) return defaults::per_base_error_rate;
+
+    DataModel * q = p->Clone();
+    NucModel * nm = dynamic_cast<NucModel*>(q);
+    if(nm != NULL) return nm->GetPerBaseErrorRate();
+
+    return defaults::per_base_error_rate;
+}
+
+//------------------------------------------------------------------------------------
+
+void Locus::RemovePartitionFromTipDatas(force_type forcename)
+{
+    vector<TipData>::iterator tip = GetTipData().begin();
+    for( ; tip != GetTipData().end(); ++tip)
+        tip->RemovePartition(forcename);
+
+} // RemovePartitionFromTipDatas
+
+//------------------------------------------------------------------------------------
+
+bool Locus::IsValidLocus(string& errorString) const
+{
+    unsigned long int nmark = GetNmarkers();  // to avoid comparison warning
+    if (nmark == 0)
+    {
+        errorString = "No data in segment " + GetName();
+        return false;
+    }
+
+    if (nmark != GetMarkerLocations().size())
+    {
+        errorString = "The number of markers doesn't match their positions in segment " + GetName();
+        return false;
+    }
+
+    if (m_nsites < static_cast<long int>(nmark))
+    {
+        errorString = "Number of markers exceeds sequence length in segment " + GetName();
+        return false;
+    }
+
+    vector<long int> sortedpositions = GetMarkerLocations();
+    sort(sortedpositions.begin(), sortedpositions.end());
+    if (sortedpositions != GetMarkerLocations())
+    {
+        errorString = "Positions out of order in segment " + GetName();
+        return false;
+    }
+
+#if 0 // Dead code
+    // There should no longer be a need to validate the data
+    // model since they are constructed to be valid and the
+    // participating members don't change.
+    if (m_pDatamodel.get() != NULL && !m_pDatamodel->IsValidDataModel())
+    {
+        errorString = "Invalid datamodel in segment " + m_name;
+        return false;
+    }
+#endif // Dead code
+
+    // We needn't validate datatype as it has no state.
+    return true;
+
+} // IsValidLocus
+
+//------------------------------------------------------------------------------------
+
+set<pair<double, long int> > Locus::MakeOrderedSites(long int regoffset) const
+{
+    assert(m_map.size() > 0);
+
+    set<pair<double, long int> > orderedsites;
+    for (unsigned long int site = 0; site < m_map.size(); ++site)
+    {
+        long int place = site + regoffset;
+        orderedsites.insert(make_pair(m_map[site], place));
+    }
+    return orderedsites;
+}
+
+//------------------------------------------------------------------------------------
+
+rangeset Locus::GetBestSites(set<pair<double, long int> > orderedsites) const
+{
+    rangeset bestsites;
+
+    for (set<pair<double, long int> >::reverse_iterator sitepair = orderedsites.rbegin();
+         sitepair != orderedsites.rend();
+         ++sitepair)
+    {
+        rangepair thissite = make_pair(sitepair->second, sitepair->second + 1);
+        if (sitepair->first == orderedsites.rbegin()->first)
+        {
+            bestsites = AddPairToRange(thissite, bestsites);
+        }
+    }
+
+    return bestsites;
+}
+
+//------------------------------------------------------------------------------------
+
+rangeset Locus::GetTopSites(set<pair<double, long int> > orderedsites, double percLimit) const
+{
+    rangeset topsites;
+
+    double total = 0;
+    for (set<pair<double, long int> >::reverse_iterator sitepair = orderedsites.rbegin();
+         sitepair != orderedsites.rend();
+         ++sitepair)
+    {
+        rangepair thissite = make_pair(sitepair->second, sitepair->second + 1);
+        if (total < percLimit)
+        {
+            topsites = AddPairToRange(thissite, topsites);
+        }
+        total += sitepair->first;
+        //We add afterwards so that the site that pushes us over the edge still gets
+        // included in the appropriate range.
+    }
+
+    return topsites;
+}
+
+//------------------------------------------------------------------------------------
+
+StringVec1d Locus::ReportMappingInfo(long int regoffset, bool isshort) const
+{
+    set<pair<double, long int> > orderedsites = MakeOrderedSites(regoffset);
+
+
+    rangeset bestsites = GetBestSites(orderedsites);
+    rangeset topfivepercent = GetTopSites(orderedsites, 0.05);
+    rangeset topfiftypercent = GetTopSites(orderedsites, 0.5);
+    rangeset topninetyfivepercent = GetTopSites(orderedsites, 0.95);
+
+    // EWFIX -- 2010-09-02 -- don't remove until checked
+#if 0
+    double total = 0;
+    for (set<pair<double, long int> >::reverse_iterator sitepair = orderedsites.rbegin();
+         sitepair != orderedsites.rend();
+         ++sitepair)
+    {
+        rangepair thissite = make_pair(sitepair->second, sitepair->second+1);
+        if (sitepair->first == orderedsites.rbegin()->first)
+        {
+            bestsites = AddPairToRange(thissite, bestsites);
+        }
+        if (total < .05)
+        {
+            topfivepercent = AddPairToRange(thissite, topfivepercent);
+        }
+        if (total < .5)
+        {
+            topfiftypercent = AddPairToRange(thissite, topfiftypercent);
+        }
+        if (total < .95)
+        {
+            topninetyfivepercent = AddPairToRange(thissite, topninetyfivepercent);
+        }
+        total += sitepair->first;
+        //We add afterwards so that the site that pushes us over the edge still gets
+        // included in the appropriate range.
+    }
+#endif
+
+    StringVec1d report;
+    string msg = "Most likely site(s) for " + GetName() + ":  "
+        + ToStringUserUnits(bestsites) + ".  Relative data likelihood = "
+        + ToString(orderedsites.rbegin()->first);
+    report.push_back(msg);
+    if (isshort)
+    {
+        return report;
+    }
+    msg = "The top 5% of all sites in this region:  " + ToStringUserUnits(topfivepercent);
+    report.push_back(msg);
+    msg = "The top 50% of all sites in this region:  " + ToStringUserUnits(topfiftypercent);
+    report.push_back(msg);
+    msg = "The top 95% of all sites in this region:  " + ToStringUserUnits(topninetyfivepercent);
+    report.push_back(msg);
+    msg = "You have a total of " + ToString(CountSites(topninetyfivepercent)) +
+        " sites in your 95% range.";
+    report.push_back(msg);
+    if (m_truesite != FLAGLONG)
+    {
+        rangepair truesite = make_pair(m_truesite, m_truesite + 1);
+        msg = "The true site (" + ToStringUserUnits(truesite) + ") ";
+        if (topninetyfivepercent == AddPairToRange(truesite, topninetyfivepercent))
+        {
+            msg += "was";
+        }
+        else
+        {
+            msg += "was not";
+        }
+        msg += " included in the top 95%.";
+        report.push_back(msg);
+
+#if 0  // LS DEBUG SIM  Hard-coded information output
+        rangeset unknownrange = registry.GetDataPack().GetRegion(0).GetLocus(0).GetUnknownRange();
+        msg = "This site was in the ";
+        if (unknownrange == AddPairToRange(truesite, unknownrange))
+        {
+            msg += "known";
+        }
+        else
+        {
+            msg += "unknown";
+        }
+        msg += " part of the segment.";
+        report.push_back(msg);
+#endif
+    }
+    if (m_variability.size() > 0 && IsMoving())
+    {
+        report.push_back("Data variability:");
+        for (map<long int, DoubleVec1d>::const_iterator marker = m_variability.begin();
+             marker != m_variability.end(); marker++)
+        {
+            for (unsigned long int allele = 0; allele < marker->second.size(); allele++)
+            {
+                msg = "Position " + ToString(marker->first + 1) + ", "
+                    + "Allele " + ToString(allele+1) + ":  "
+                    + ToString(marker->second[allele]);
+                report.push_back(msg);
+            }
+        }
+    }
+    return report;
+}
+
+//------------------------------------------------------------------------------------
+
+bool Locus::IsDuplicateTipName(const string& newname) const
+{
+    vector<TipData>::const_iterator tip;
+    for(tip = m_tipdata.begin(); tip != m_tipdata.end(); ++tip)
+        if (tip->label == newname) return true;
+
+    return false;
+}
+
+//------------------------------------------------------------------------------------
+
+StringVec1d Locus::MakeTraitXML(long int nspaces, long int regoffset) const
+{
+    StringVec1d xmllines;
+    string line = MakeIndent(MakeTag(xmlstr::XML_TAG_TRAIT), nspaces);
+    xmllines.push_back(line);
+    nspaces += INDENT_DEPTH;
+
+    line = MakeTag(xmlstr::XML_TAG_NAME) + " " + GetName() + " " +
+        MakeCloseTag(xmlstr::XML_TAG_NAME);
+    xmllines.push_back(MakeIndent(line, nspaces));
+
+    line = MakeTag(xmlstr::XML_TAG_ANALYSIS) + " "
+        + ToXMLString(GetAnalysisType())
+        + " " + MakeCloseTag(xmlstr::XML_TAG_ANALYSIS);
+    xmllines.push_back(MakeIndent(line, nspaces));
+
+    line = MakeTag(xmlstr::XML_TAG_POSSIBLE_LOCATIONS);
+    xmllines.push_back(MakeIndent(line, nspaces));
+
+    nspaces += INDENT_DEPTH;
+    for (rangeset::iterator range = m_allowedrange.begin();
+         range != m_allowedrange.end(); range++)
+    {
+        long int start = (*range).first + regoffset;
+        long int end   = (*range).second + regoffset - 1;
+
+        line = MakeTag(xmlstr::XML_TAG_RANGE);
+        xmllines.push_back(MakeIndent(line, nspaces));
+
+        nspaces += INDENT_DEPTH;
+        line = MakeTag(xmlstr::XML_TAG_START) + " " + ToString(start)
+            + " " + MakeCloseTag(xmlstr::XML_TAG_START);
+        xmllines.push_back(MakeIndent(line, nspaces));
+        line = MakeTag(xmlstr::XML_TAG_END) + " " + ToString(end)
+            + " " + MakeCloseTag(xmlstr::XML_TAG_END);
+        xmllines.push_back(MakeIndent(line, nspaces));
+        nspaces -= INDENT_DEPTH;
+
+        line = MakeCloseTag(xmlstr::XML_TAG_RANGE);
+        xmllines.push_back(MakeIndent(line, nspaces));
+    }
+    nspaces -= INDENT_DEPTH;
+    line = MakeCloseTag(xmlstr::XML_TAG_POSSIBLE_LOCATIONS);
+    xmllines.push_back(MakeIndent(line, nspaces));
+
+    StringVec1d dlmodelxml(GetDataModel()->ToXML(nspaces));
+    xmllines.insert(xmllines.end(),dlmodelxml.begin(),dlmodelxml.end());
+
+    StringVec1d phenotypexml(m_phenotypes.GetPhenotypesXML(nspaces));
+    xmllines.insert(xmllines.end(),phenotypexml.begin(),phenotypexml.end());
+
+    nspaces -= INDENT_DEPTH;
+    line = MakeCloseTag(xmlstr::XML_TAG_TRAIT);
+    xmllines.push_back(MakeIndent(line, nspaces));
+
+    return xmllines;
+}
+
+//------------------------------------------------------------------------------------
+//The general rule for IsNighInvariant is that if any marker is *not* 'nigh
+// invariant' (false), the locus as a whole is also not.  If all markers are
+// indeed 'nigh invariant' (true), so is the locus as a whole.
+
+bool Locus::IsNighInvariant() const
+{
+    if (m_variability.size() == 0)
+    {
+        return true;
+    }
+    for (map<long int, DoubleVec1d>::const_iterator marker = m_variability.begin();
+         marker != m_variability.end(); marker++)
+    {
+        if (!(IsNighInvariant(marker->first))) return false;
+    }
+    return true;
+}
+
+//------------------------------------------------------------------------------------
+// The rule for IsNighInvariant (well, in its current 11-28-05 form) is that
+//  if all of the data at this marker is one particular allele but two, the
+//  data is 'nigh invariant' (true).  If there are at least three alleles which
+//  are members of the non-majority allele, the data is not 'nigh invariant'
+//  (false).
+
+bool Locus::IsNighInvariant(long int marker) const
+{
+    unsigned long int ntips = m_tipcells.size();
+
+    map<long int, DoubleVec1d>::const_iterator freqs = m_variability.find(marker);
+    if (freqs == m_variability.end()) return false;
+    double maxfreq = 0;
+    for (unsigned long int allele = 0; allele < freqs->second.size(); allele++)
+    {
+        maxfreq = max(maxfreq, freqs->second[allele]);
+    }
+    long int mindiff = min(3L, static_cast<long int>(ntips) - 1);
+    if (maxfreq <= ntips - mindiff)
+    {
+        return false;
+    }
+    return true;
+}
+
+//------------------------------------------------------------------------------------
+
+bool Locus::IsCompletelyInvariant(long int marker) const
+{
+    unsigned long int ntips = m_tipcells.size();
+
+    map<long int, DoubleVec1d>::const_iterator freqs = m_variability.find(marker);
+    if (freqs == m_variability.end()) return false;
+    for (unsigned long int allele = 0; allele < freqs->second.size(); allele++)
+    {
+        if (0 < freqs->second[allele] && freqs->second[allele] < ntips)
+        {
+            return false;
+        }
+    }
+    return true;
+}
+
+//------------------------------------------------------------------------------------
+
+long int Locus::ChooseVariableSiteFrom(rangeset rset)
+{
+    LongVec1d variables;
+    for (rangeset::iterator rit = rset.begin(); rit != rset.end(); rit++)
+    {
+        for (long int site = rit->first; site<rit->second; ++site)
+        {
+            if (SiteInLocus(site))
+            {
+                if (!IsNighInvariant(SiteToMarker(site)))
+                {
+                    variables.push_back(site);
+                }
+            }
+        }
+    }
+    if (variables.size() == 0)
+    {
+        //No variable sites!
+        return FLAGLONG;
+    }
+    return variables[registry.GetRandom().Long(variables.size())];
+}
+
+//------------------------------------------------------------------------------------
+
+rangeset Locus::GetVariableRange()
+{
+    if (m_variablerange.size() == 0)
+    {
+        m_variablerange = CalculateVariableRange();
+    }
+    return m_variablerange;
+}
+
+//------------------------------------------------------------------------------------
+
+rangeset Locus::CalculateVariableRange() const
+{
+    rangeset variables;
+    for (long int site = 0; site < m_nsites; ++site)
+    {
+        if (!IsNighInvariant(SiteToMarker(site)))
+        {
+            variables = AddPairToRange(make_pair(site, site+1), variables);
+        }
+    }
+    return variables;
+
+}
+
+//------------------------------------------------------------------------------------
+
+rangeset Locus::CalculateCompleteVariableRange() const
+{
+    rangeset variables;
+    for (long int site = 0; site < m_nsites; ++site)
+    {
+        if (!IsCompletelyInvariant(SiteToMarker(site)))
+        {
+            variables = AddPairToRange(make_pair(site, site+1), variables);
+        }
+    }
+    return variables;
+}
+
+//------------------------------------------------------------------------------------
+
+rangeset Locus::GetVariableAndUnknownRange() const
+{
+    return Union(m_variablerange, m_unknownrange);
+}
+
+//------------------------------------------------------------------------------------
+
+long int Locus::GetVariabilityOfUnknownRange() const
+{
+    long int numvariable = 0;
+    for (rangeset::iterator range = m_unknownrange.begin(); range != m_unknownrange.end(); range++)
+    {
+        for (long int site = range->first; site < range->second; ++site)
+        {
+            if (!IsNighInvariant(SiteToMarker(site)))
+            {
+                numvariable++;
+            }
+        }
+    }
+    return numvariable;
+}
+
+//------------------------------------------------------------------------------------
+
+long int Locus::GetVariabilityOfKnownRange() const
+{
+    return CountSites(m_variablerange);
+}
+
+//------------------------------------------------------------------------------------
+
+rangeset Locus::GetKnownRange() const
+{
+    rangeset retset;
+    retset.insert(make_pair(0, m_nsites));
+    retset = RemoveRangeFromRange(m_unknownrange, retset);
+    return retset;
+}
+
+//------------------------------------------------------------------------------------
+
+StringVec1d Locus::CreateDataModelReport(string regionname) const
+{
+    StringVec1d out = m_pDatamodel->CreateDataModelReport();
+    StringVec1d head;
+    head.push_back("Parameters of a " + m_pDatamodel->GetDataModelName()
+                   + " model for the " + GetName() + " segment of the "
+                   + regionname + " region");
+    head.insert(head.end(), out.begin(), out.end());
+    return head;
+}
+
+//------------------------------------------------------------------------------------
+
+void Locus::WriteMapping(string regname, long int regoffset) const
+{
+    ofstream mapfile;
+    mapfile.precision(10);
+    UserParameters& userparams = registry.GetUserParameters();
+    string fname = userparams.GetMapFilePrefix() + "_" + GetName() + ".txt";
+    mapfile.open(fname.c_str(), ios::out );
+    userparams.AddMapFileName(fname);
+
+    mapfile << "Mapping results for " + GetName() + " from the region \"" + regname + "\".\n";
+
+    switch (GetAnalysisType())
+    {
+        case mloc_mapjump:
+            mapfile << "The analysis for this trait was performed by allowing the location of "
+                "the trait marker to move from place to place as trees were created.\n";
+            break;
+        case mloc_mapfloat:
+            mapfile << "This analysis for this trait was performed by collecting trees, then calculating the "
+                "data likelihood of the trait marker at all allowed sites on those trees, and then averaging.\n";
+            break;
+        case mloc_data:
+        case mloc_partition:
+            assert(false); //These loci should not be in the moving locus vector.
+            return;
+    }
+
+    StringVec1d mapinfo = ReportMappingInfo(regoffset);
+    for (size_t i = 0; i < mapinfo.size(); i++)
+    {
+        mapfile << mapinfo[i] + "\n";
+    }
+    mapfile << "\nSite\tData likelihood\tRaw likelihood\n";
+
+    DoubleVec1d results = GetMappingInfo();
+    DoubleVec1d rawresults = GetRawMappingInfo();
+    StringVec1d resultsStrings(results.size(), "-");
+    StringVec1d rawresultsStrings(rawresults.size(), "-");
+    rangeset validrange = GetAllowedRange();
+    for (rangeset::iterator range=validrange.begin();
+         range != validrange.end(); range++)
+    {
+        for (long int site = range->first; site<range->second; ++site)
+        {
+            resultsStrings[site] = Pretty(results[site], 10);
+            rawresultsStrings[site] = Pretty(rawresults[site], 10);
+        }
+    }
+    for (size_t site = 0; site < results.size(); site += 1)
+    {
+        long int place = ToNoZeroesIfNeeded(static_cast<long int>(site) + regoffset);
+        mapfile << ToString(place) + "\t" + resultsStrings[site] + "\t" + rawresultsStrings[site] + "\n";
+    }
+}
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+
+void Locus::PrintVariability()
+{
+    for (map<long int, DoubleVec1d>::const_iterator marker = m_variability.begin();
+         marker != m_variability.end();
+         marker++)
+    {
+        string msg = "Position " + ToString(marker->first + 1) + ": ";
+        for (unsigned long int allele = 0; allele < marker->second.size(); allele++)
+        {
+            msg += ToString(marker->second[allele]) + ", ";
+        }
+        cerr << msg << endl;
+    }
+    cerr << "Overall, the variable sites are:  " << ToString(GetVariableRange()) << endl;
+}
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+
+void Locus::PrintOnesAndZeroesForVariableSites()
+{
+    rangeset variability = GetVariableRange();
+    long int newl = 249;
+    cerr << "Variability: ";
+    for (long int site = 0; site < m_nsites; ++site)
+    {
+        if (variability == AddPairToRange(make_pair(site, site + 1), variability))
+        {
+            cerr << "1 ";
+        }
+        else
+        {
+            cerr << "0 ";
+        }
+        if (site == newl)
+        {
+            cerr << endl << "Variability: ";
+            newl += 250;
+        }
+    }
+    cerr << endl;
+}
+
+//____________________________________________________________________________________
diff --git a/src/datalike/locus.h b/src/datalike/locus.h
new file mode 100644
index 0000000..f1cd07c
--- /dev/null
+++ b/src/datalike/locus.h
@@ -0,0 +1,226 @@
+// $Id: locus.h,v 1.52 2012/06/30 01:32:41 bobgian Exp $
+
+/*
+  Copyright 2003  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+/***************************************************************
+ The Locus class contains data information for one locus.
+
+ The LocusVec is a managed container of Locus objects which knows
+ that some represent "fixed" and others represent "floating" loci.
+
+ NB  This code distinguishes between "markers" and "sites".  A marker
+ is a site for which we have data.  In SNP data, for example, every base
+ pair is a site, but only the SNPs are markers.  Data likelihoods are
+ calculated on markers; recombination probabilities are calculated on
+ sites (links, actually).  Please keep these straight!
+
+ Locus written by Mary Kuhner 2002/07/24
+ 2004/09/15 Moved TipData class to its own file--Mary
+ 2004/09/15 Merged in functionality of class LocusLike--Mary
+ 2004/10/05 Added managed container class LocusVec--Mary
+****************************************************************/
+
+#ifndef LOCUS_H
+#define LOCUS_H
+
+#include <cassert>                      // May be needed for inline definitions.
+#include <map>
+#include <string>
+#include <vector>
+
+#include "types.h"
+#include "vectorx.h"
+#include "dlmodel.h"
+#include "dlcalc.h"
+#include "locuscell.h"
+#include "phenotypes.h"
+#include "tipdata.h"
+#include "ui_vars_traitmodels.h"
+
+//------------------------------------------------------------------------------------
+
+class Individual;
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+class Locus
+{
+    typedef std::map<force_type,std::string> tipidtype;
+
+  private:
+    long            m_index;            // position in vector
+    string          m_name;
+    long            m_nmarkers;
+    long            m_nsites;
+    long            m_regionalmapposition; // numbered from start of region
+    long            m_globalmapposition; // numbered in user's coordinate system
+    long            m_offset;
+    bool            m_movable;          // If possible, this should move to phase-1 only.
+    mloc_type       m_type;             // And this should be phase-2.
+    bool            m_defaultlocations; // phase-1
+    LongVec1d       m_positions;        // dim: markers
+    DataType_ptr    m_pDatatype;
+    DataModel_ptr   m_pDatamodel;
+    vector<TipData> m_tipdata;          // dim: tips (LS DEBUG:  phase 1)
+
+    LocusCell       m_protoCell;
+    DLCalc_ptr      m_pDLCalculator;
+    vector<LocusCell> m_tipcells;       // dim: tips  (LS DEBUG:  phase 2)
+    rangeset        m_allowedrange;
+    rangeset        m_variablerange;    // LS DEBUG SIM:  probably don't need later.
+    rangeset        m_unknownrange;     // LS DEBUG SIM:  probably don't need later.
+
+    DoubleVec1d     m_map;
+    DoubleVec1d     m_rawmap;
+
+    // For simulation purposes.
+    bool            m_simulate;
+    long            m_truesite;
+    std::map<long,DoubleVec1d> m_variability;
+    Phenotypes      m_phenotypes;       //Phase 2 (during phase 1, it lives elsewhere)
+
+    // Helper function for CalcNVariableMarkers().
+    long            CalcNVariableMarkers(tipidtype xpart) const;
+
+  public:
+
+    // Construction/destruction.
+    // We accept dtor, copy ctor and op=, though with some doubts about the latter two.
+    // Apparently this class is copied only to put it into a vector and the original is discarded,
+    // so sharing datatype and datamodel is not a problem.
+    Locus(long ind, bool movable, string name);
+
+    // Getters.
+    long          GetIndex()          const { return m_index; };
+    string        GetName()           const;
+    long          GetNmarkers()       const { return m_nmarkers; };
+    long          GetNsites()         const;
+    long     GetGlobalMapPosition()   const { return m_globalmapposition; };
+    long     GetRegionalMapPosition() const { return m_regionalmapposition; };
+    long          GetOffset()         const;
+    LongVec1d     GetMarkerLocations()     const { return m_positions; };
+    LongVec1d     GetUserMarkerLocations() const;
+    data_type     GetDataType()       const { return m_pDatatype->GetType(); };
+    DataType_ptr  GetDataTypePtr()    const { return m_pDatatype; };
+    DataModel_ptr GetDataModel()      const { return m_pDatamodel; };
+    double        GetMuRate()         const { return m_pDatamodel->GetRelMuRate(); };
+    DLCalc_ptr    GetDLCalc()         const { return m_pDLCalculator; };
+    LocusCell     GetProtoCell()      const { return m_protoCell; };
+    vector<LocusCell> GetTipCells()   const { return m_tipcells; };
+    vector<TipData>& GetTipData()           { return m_tipdata; };
+    const vector<TipData>& GetTipData() const { return m_tipdata; };
+    vector<TipData> GetPopulationTipData(const std::string& popname) const;
+    StringVec2d   GetCrossPartitionGeneticData(tipidtype xpart) const;
+    StringVec3d   GetPartitionGeneticData(force_type partname) const;
+    StringVec1d   GetMarkerDataWithLabels(const std::vector<Individual>&) const;// dim: ntips
+    long          GetNTips()          const { return m_tipdata.size(); };
+    long          GetNTips(tipidtype xpart) const;
+    rangeset      GetAllowedRange() const {return m_allowedrange;};
+    DoubleVec1d   GetMappingInfo() const {return m_map;};
+    DoubleVec1d   GetRawMappingInfo() const {return m_rawmap;};
+    bool          GetShouldSimulate() const {return m_simulate;};
+
+    // Helper for F84 Model base freqs setting, called by Region::CountNBases().
+    DoubleVec1d   CountNNucleotides() const;
+
+    bool          MultiSampleIndividuals() const;
+    bool          SiteInLocus(long site) const;
+    long          SiteToMarker(long site) const;
+    std::pair<long, long> GetSiteSpan() const;
+    std::pair<long, long> GetGlobalScaleSiteSpan() const;
+    bool          IsMovable() const;    //Phase 1
+    bool          IsMoving() const;     //Phase 2
+    bool          IsUsingDefaultLocations() const {return m_defaultlocations;};
+    mloc_type     GetAnalysisType() const {return m_type;};
+    double        GetPerBaseErrorRate() const;
+
+    // Setters.
+    void SetIndex(long index) {assert(m_movable); m_index = index;};
+    void SetName(string newname)           { m_name = newname; };
+    void SetNmarkers(long val);         // throws!
+    void SetNsites(long val)               { m_nsites = val; };
+    void SetGlobalMapPosition(long site);
+    void SetRegionalMapPosition(long site);
+    void SetOffset(long val);
+    void SetPositions(const LongVec1d& pos);
+    void SetPositions();                // set to default
+    void SetDataType(DataType_ptr dt);
+    void SetDataModelOnce(DataModel_ptr dm);
+    void SetTipData(const TipData& td)     { m_tipdata.push_back(td); };
+    void SetEmptyTipData(vector<TipData> td);
+    void SetTrueSite(long site)     { m_truesite = site; };
+    void SetAllowedRange(rangeset rs, long regoffset);
+    void SetMappingInfo(DoubleVec1d map) {m_map = map;};
+    void SetRawMappingInfo(DoubleVec1d rawmap) {m_rawmap = rawmap;};
+    void SetAnalysisType(mloc_type type);
+    void SetShouldSimulate(bool sim) {m_simulate = sim;};
+
+    void SetVariableRange(rangeset rs);
+    void SetPhenotypes(Phenotypes p) {m_phenotypes = p;};
+
+    // Workers.
+    void            Setup(const vector<Individual>& individuals);
+    LongVec1d       CalcNVariableMarkers() const; // dim: xparts
+    double          CalculateDataLikelihood(Tree& tree, bool moving) const;
+    void            AddUniqueNamesTo(std::set<string>& popnames) const;
+
+    // Helper function for Region::RemovePartitionFromLoci.
+    void            RemovePartitionFromTipDatas(force_type forcename);
+
+    // Validators.
+    bool            IsValidLocus(string& errorString) const;
+    StringVec1d     ReportMappingInfo(long regoffset, bool isshort=false) const;
+    bool            IsDuplicateTipName(const string& newname) const;
+
+    // XML.
+    StringVec1d MakeTraitXML(long nspaces, long regoffset) const;
+
+    // Simulation.
+    void SetNewMapPositionIfMoving();
+    void SimulateData(Tree& tree, long nsites);
+    void CopyDataFrom(Locus& original, Tree& tree);
+    void MakePhenotypesFor(IndVec& individuals);
+    void SaveState(DoubleVec1d& state, long marker, string label);
+    void RandomizeHalf(Tree& tree, bool swath);
+    void ClearVariability() {m_variability.clear();};
+
+    // Helper functions for simulations.
+    bool            IsNighInvariant() const;
+    bool            IsNighInvariant(long marker) const;
+    bool            IsCompletelyInvariant(long marker) const;
+    long            ChooseVariableSiteFrom(rangeset rset);
+    rangeset        GetVariableRange();
+    rangeset        CalculateVariableRange() const;
+    rangeset        CalculateCompleteVariableRange() const;
+    rangeset        GetVariableAndUnknownRange() const;
+    long            GetVariabilityOfUnknownRange() const;
+    long            GetVariabilityOfKnownRange() const;
+    rangeset        GetKnownRange() const;
+    void            ClearUnknownRange() {m_unknownrange.clear();};
+    StringVec1d     CreateDataModelReport(string regionname) const;
+    void            WriteMapping(string regname, long regoffset) const;
+
+#if 0  // LS DEBUG SIM  Hard-coded information output
+    rangeset        GetUnknownRange() const {return m_unknownrange;};
+#endif
+
+    // Debugging functions.
+    void            PrintVariability();
+    void            PrintOnesAndZeroesForVariableSites();
+
+    // For XML reporting.
+    std::set<std::pair<double, long int> >  MakeOrderedSites(long int regoffset) const;
+    rangeset        GetTopSites (std::set<std::pair<double, long int> > orderedsites, double percLimit) const;
+    rangeset        GetBestSites(std::set<std::pair<double, long int> > orderedsites) const;
+};
+
+#endif // LOCUS_H
+
+//____________________________________________________________________________________
diff --git a/src/datalike/locuscell.cpp b/src/datalike/locuscell.cpp
new file mode 100644
index 0000000..2f12302
--- /dev/null
+++ b/src/datalike/locuscell.cpp
@@ -0,0 +1,112 @@
+// $Id: locuscell.cpp,v 1.13 2012/06/30 01:32:41 bobgian Exp $
+
+/*
+  Copyright 2003  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+#include "locuscell.h"
+
+//------------------------------------------------------------------------------------
+
+LocusCell::LocusCell(const vector<Cell_ptr>& src)
+{
+    DeepCopyCells(src);
+} // LocusCell ctor
+
+//------------------------------------------------------------------------------------
+
+LocusCell::LocusCell(const LocusCell& src)
+{
+    DeepCopyCells(src.dlcells);
+} // LocusCell copy ctor
+
+//------------------------------------------------------------------------------------
+
+LocusCell::LocusCell(const vector<LocusCell>& src)
+{
+    //For use when constructing a LocusCell from haplotypes, which come
+    // in a marker-by-marker vector, and need to be concatenated.
+    vector<Cell_ptr> src_ptrs;
+    for (unsigned long marker=0; marker<src.size(); marker++)
+    {
+        assert(src.size() == 1);
+        src_ptrs.push_back(src[marker][0]);
+    }
+    DeepCopyCells(src_ptrs);
+} // LocusCell copy ctor
+
+//------------------------------------------------------------------------------------
+
+LocusCell& LocusCell::operator=(const LocusCell& src)
+{
+    DeepCopyCells(src.dlcells);
+    return *this;
+} // operator=
+
+//------------------------------------------------------------------------------------
+
+LocusCell& LocusCell::operator+=(const LocusCell& src)
+{
+    assert(dlcells.size() == src.dlcells.size());
+    for (size_t cell=0; cell<dlcells.size(); cell++)
+    {
+        (*dlcells[cell]).AddTo(src.dlcells[cell]);
+    }
+    return *this;
+} // operator=
+
+//------------------------------------------------------------------------------------
+
+LocusCell& LocusCell::operator*=(double mult)
+{
+    for (size_t cell=0; cell<dlcells.size(); cell++)
+    {
+        (*dlcells[cell]).MultiplyBy(mult);
+    }
+    return *this;
+} // operator=
+
+//------------------------------------------------------------------------------------
+
+bool LocusCell::operator==(const LocusCell& src) const
+{
+    unsigned long cell;
+    for (cell = 0; cell < dlcells.size(); ++cell)
+    {
+        if (dlcells[cell]->DiffersFrom(src.dlcells[cell]) != FLAGLONG)
+            return false;
+    }
+    return true;
+} // operator==
+
+//------------------------------------------------------------------------------------
+
+void LocusCell::DeepCopyCells(const vector<Cell_ptr>& src)
+{
+    dlcells.clear();
+    unsigned long cell;
+    for (cell = 0; cell < src.size(); ++cell)
+    {
+        dlcells.push_back(Cell_ptr(src[cell]->Copy()));
+    }
+
+} // DeepCopyCells
+
+//------------------------------------------------------------------------------------
+
+bool LocusCell::DiffersFrom(const LocusCell& other, long marker) const
+{
+    return !dlcells[0]->IsSameAs(other.dlcells[0],marker);
+} // DiffersFrom
+
+void LocusCell::SetAllCategoriesTo(DoubleVec1d& state, long marker)
+{
+    dlcells[0]->SetAllCategoriesTo(state, marker);
+}
+
+//____________________________________________________________________________________
diff --git a/src/datalike/locuscell.h b/src/datalike/locuscell.h
new file mode 100644
index 0000000..59ed3ad
--- /dev/null
+++ b/src/datalike/locuscell.h
@@ -0,0 +1,62 @@
+// $Id: locuscell.h,v 1.9 2011/03/07 06:08:49 bobgian Exp $
+
+/*
+  Copyright 2003  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+/*************************************************************************
+ This class is a managed container of Cells belonging to a single
+ locus.  It needs to be managed because Cells generally have to be
+ deep copied.
+
+ Written by Mary Kuhner
+*************************************************************************/
+
+#ifndef LOCUSCELL_H
+#define LOCUSCELL_H
+
+#include <cassert>  // May be needed for inline definitions.
+#include <vector>
+#include "dlcell.h"
+
+//------------------------------------------------------------------------------------
+
+class LocusCell
+{
+  private:
+    std::vector<Cell_ptr> dlcells;
+
+    void DeepCopyCells(const std::vector<Cell_ptr>& src);
+
+  public:
+    LocusCell()                                   {};
+    LocusCell(const std::vector<Cell_ptr>& src);
+    LocusCell(const LocusCell& src);
+    LocusCell(const vector<LocusCell>& src);
+    ~LocusCell()                                  {};
+
+    LocusCell&     operator=(const LocusCell& src);
+    LocusCell&     operator+=(const LocusCell& src);
+    LocusCell&     operator*=(double mult);
+    bool           operator==(const LocusCell& src) const;
+    unsigned long  size() const             { return dlcells.size(); };
+    void           clear()                  { dlcells.clear(); };
+
+    Cell_ptr       operator[](long ind)     { assert(static_cast<unsigned long>(ind) < dlcells.size());
+        return dlcells[ind]; };
+
+    const Cell_ptr operator[](long ind) const
+    { assert(static_cast<unsigned long>(ind) < dlcells.size());
+        return dlcells[ind]; };
+
+    bool           DiffersFrom(const LocusCell& other, long marker) const;
+    void           SetAllCategoriesTo(DoubleVec1d& state, long marker);
+};
+
+#endif // LOCUSCELL_H
+
+//____________________________________________________________________________________
diff --git a/src/datalike/phenotypes.cpp b/src/datalike/phenotypes.cpp
new file mode 100644
index 0000000..bddb596
--- /dev/null
+++ b/src/datalike/phenotypes.cpp
@@ -0,0 +1,203 @@
+// $Id: phenotypes.cpp,v 1.5 2011/03/07 06:08:49 bobgian Exp $
+
+/*
+  Copyright 2006  Lucian Smith, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+
+#include "errhandling.h"
+#include "locuscell.h" //Have to include this to use Haplotypes
+#include "mathx.h"
+#include "phenotypes.h"
+#include "registry.h"
+#include "stringx.h"
+#include "vectorx.h"
+#include "xml_strings.h"
+
+using namespace std;
+
+//------------------------------------------------------------------------------------
+
+typedef map<multiset<string>, pair<StringVec1d, DoubleVec1d> >::iterator PhenMapIter;
+typedef map<multiset<string>, pair<StringVec1d, DoubleVec1d> >::const_iterator PhenMapConstIter;
+
+//------------------------------------------------------------------------------------
+
+Phenotypes::Phenotypes(long regnum, string lname)
+    : m_regionnum(regnum),
+      m_locusname(lname),
+      m_phenomap(),
+      m_hapsmap()
+{
+}
+
+//Needed a blank copy in the locus--we replace it if we have a real one.
+//
+//Boy howdy does this all need to be refactored.  Whew, it stinks.
+Phenotypes::Phenotypes(string lname)
+    : m_regionnum(FLAGLONG),
+      m_locusname(lname),
+      m_phenomap(),
+      m_hapsmap()
+{
+}
+
+void Phenotypes::AddPhenotype(const StringVec1d& alleles, string name, double penetrance)
+{
+    assert (m_regionnum != FLAGLONG);
+    multiset<string> alleleSet = VecToSet(alleles);
+    PhenMapIter genotype = m_phenomap.find(alleleSet);
+    if (genotype == m_phenomap.end())
+    {
+        StringVec1d names;
+        DoubleVec1d penetrances;
+        m_phenomap.insert(make_pair(alleleSet, make_pair(names, penetrances)));
+        genotype = m_phenomap.find(alleleSet);
+        assert(genotype != m_phenomap.end());
+    }
+    (*genotype).second.first.push_back(name);
+    (*genotype).second.second.push_back(penetrance);
+
+}
+
+Haplotypes Phenotypes::ChooseHaplotypes(const StringVec1d& alleles)
+{
+    multiset<string> alleleSet = VecToSet(alleles);
+    PhenMapIter genotype = m_phenomap.find(alleleSet);
+    if (genotype == m_phenomap.end())
+    {
+        string msg = "Unable to find any phenotypes for the set of alleles \"";
+        for (size_t allele = 0; allele<alleles.size(); allele++)
+        {
+            msg += " " + alleles[allele];
+        }
+        msg += " \".";
+        throw data_error(msg);
+    }
+    DoubleVec1d penetrances = (*genotype).second.second; //for convenience
+    if (penetrances.size() == 1)
+    {
+        return GetHaplotypes((*genotype).second.first[0]);
+    }
+    double rand = registry.GetRandom().Float();
+    double sum = 0;
+    for (size_t which=0; which < penetrances.size(); which++)
+    {
+        sum += penetrances[which];
+        if (rand < sum)
+        {
+            return GetHaplotypes((*genotype).second.first[which]);
+        }
+    }
+    //Just in case
+    return GetHaplotypes(*(*genotype).second.first.rbegin());
+}
+
+Haplotypes Phenotypes::GetHaplotypes(string name)
+{
+    if (m_hapsmap.size() == 0)
+    {
+        MakeHaplotypes();
+    }
+    map<string, Haplotypes>::iterator hap = m_hapsmap.find(name);
+    if (hap == m_hapsmap.end())
+    {
+        throw data_error("Unable to find a set of haplotypes for the phenotype named " + name + ".");
+    }
+    return (*hap).second;
+}
+
+StringVec1d Phenotypes::GetPhenotypesXML(long nspaces) const
+{
+    StringVec1d xmllines;
+    if (m_phenomap.size() == 0)
+    {
+        return xmllines;
+    }
+    string line = MakeIndent(MakeTag(xmlstr::XML_TAG_PHENOTYPES), nspaces);
+    xmllines.push_back(line);
+    nspaces += INDENT_DEPTH;
+
+    for (PhenMapConstIter genotype=m_phenomap.begin(); genotype != m_phenomap.end(); genotype++)
+    {
+        line = MakeIndent(MakeTag(xmlstr::XML_TAG_GENOTYPE), nspaces);
+        xmllines.push_back(line);
+        nspaces += INDENT_DEPTH;
+
+        line = MakeTag(xmlstr::XML_TAG_ALLELES) + " "
+            + ToString(MultisetElemToString((*genotype).first))
+            + MakeCloseTag(xmlstr::XML_TAG_ALLELES);
+        xmllines.push_back(MakeIndent(line, nspaces));
+
+        for (size_t phenotype=0; phenotype<(*genotype).second.second.size(); phenotype++)
+        {
+            line = MakeIndent(MakeTag(xmlstr::XML_TAG_PHENOTYPE), nspaces);
+            xmllines.push_back(line);
+            nspaces += INDENT_DEPTH;
+
+            line = MakeTag(xmlstr::XML_TAG_PHENOTYPE_NAME) + " "
+                + (*genotype).second.first[phenotype] + " "
+                + MakeCloseTag(xmlstr::XML_TAG_PHENOTYPE_NAME);
+            xmllines.push_back(MakeIndent(line, nspaces));
+
+            line = MakeTag(xmlstr::XML_TAG_PENETRANCE) + " "
+                + ToString((*genotype).second.second[phenotype]) + " "
+                + MakeCloseTag(xmlstr::XML_TAG_PENETRANCE);
+            xmllines.push_back(MakeIndent(line, nspaces));
+
+            nspaces -= INDENT_DEPTH;
+            line = MakeIndent(MakeCloseTag(xmlstr::XML_TAG_PHENOTYPE), nspaces);
+            xmllines.push_back(line);
+        }
+
+        nspaces -= INDENT_DEPTH;
+        line = MakeIndent(MakeCloseTag(xmlstr::XML_TAG_GENOTYPE), nspaces);
+        xmllines.push_back(line);
+    }
+    nspaces -= INDENT_DEPTH;
+    line = MakeIndent(MakeCloseTag(xmlstr::XML_TAG_PHENOTYPES), nspaces);
+    xmllines.push_back(line);
+    return xmllines;
+}
+
+void Phenotypes::MakeHaplotypes()
+{
+    for(PhenMapIter genotype=m_phenomap.begin(); genotype != m_phenomap.end(); genotype++)
+    {
+        ScaleToSumToOne((*genotype).second.second);
+        assert((*genotype).second.first.size() == (*genotype).second.second.size());
+        for (size_t phenNum=0; phenNum<(*genotype).second.second.size(); phenNum++)
+        {
+            multiset<string> alleles = (*genotype).first;
+            string phenName = (*genotype).second.first[phenNum];
+            double penetrance = (*genotype).second.second[phenNum];
+
+            map<string, Haplotypes>::iterator hap = m_hapsmap.find(phenName);
+            if (hap == m_hapsmap.end())
+            {
+                Haplotypes newhaps(m_regionnum, m_locusname);
+                m_hapsmap.insert(make_pair(phenName, newhaps));
+                hap = m_hapsmap.find(phenName);
+                assert(hap != m_hapsmap.end());
+            }
+            (*hap).second.AddHaplotype(alleles, penetrance);
+        }
+    }
+}
+
+multiset<string> Phenotypes::VecToSet(StringVec1d vec)
+{
+    multiset<string> retset;
+    for (size_t str=0; str<vec.size(); str++)
+    {
+        retset.insert(vec[str]);
+    }
+    return retset;
+}
+
+//____________________________________________________________________________________
diff --git a/src/datalike/phenotypes.h b/src/datalike/phenotypes.h
new file mode 100644
index 0000000..53980a1
--- /dev/null
+++ b/src/datalike/phenotypes.h
@@ -0,0 +1,64 @@
+// $Id: phenotypes.h,v 1.5 2012/06/30 01:32:41 bobgian Exp $
+
+/*
+  Copyright 2006  Lucian Smith, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef PHENOTYPES_H
+#define PHENOTYPES_H
+
+#include <string>
+#include <vector>
+#include <set>
+#include <map>
+
+#include "haplotypes.h"
+
+class Phenotypes
+{
+  private:
+    Phenotypes(); //undefined
+    //The parent.  Needed to give to the haplotypes to make DLCells.
+    long m_regionnum;
+    string m_locusname;
+
+    // m_phenomap is a map of a set of alleles (as strings) to a vector of
+    //  phenotype names and a vector of phenotype penetrances.  This is stored
+    //  as a pair of vectors instead of a vector of pairs so that we can take
+    //  the vector of penetrances and scale it to sum to one before using it
+    //  to create actual haplotypes.
+    //
+    // So, to sum up:  map<alleles, pair<NameVector, PenetranceVector> >
+    std::map<std::multiset<std::string>, std::pair<std::vector<std::string>, std::vector<double> > > m_phenomap;
+
+    // m_hapsmap is a map of phenotype names to a Haplotypes object.  When we
+    // find out that an individual has a particular phenotype, we give it
+    // the Haplotypes object (this will normally happen due to simulation)
+    std::map<std::string, Haplotypes> m_hapsmap;
+
+    //Private function to convert the m_phenomap into m_hapsmap.
+    void MakeHaplotypes();
+    std::multiset<std::string> VecToSet(StringVec1d vec);
+
+  public:
+    Phenotypes(long regionnum, string lname);
+    Phenotypes(string lname); //A blank copy.
+    ~Phenotypes() {};
+    //We accept the default for:
+    //Phenotypes& operator=(const Phenotypes& src);
+    //Phenotypes(const Phenotypes& src);
+
+    void AddPhenotype(const StringVec1d& alleles, string name, double penetrance);
+    Haplotypes ChooseHaplotypes(const StringVec1d& alleles);
+    Haplotypes GetHaplotypes(string phenotypeName);
+    StringVec1d GetPhenotypesXML(long nspaces) const;
+    bool AnyDefinedPhenotypes() const {return (m_phenomap.size()>0);};
+};
+
+#endif // PHENOTYPES_H
+
+//____________________________________________________________________________________
diff --git a/src/datalike/region.cpp b/src/datalike/region.cpp
new file mode 100644
index 0000000..06171da
--- /dev/null
+++ b/src/datalike/region.cpp
@@ -0,0 +1,1299 @@
+// $Id: region.cpp,v 1.93 2013/11/08 21:46:21 mkkuhner Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+#include <fstream>                      // for Region::WriteFlucFile
+#include <functional>                   // for Region::CountNBases()
+#include <iostream>                     // jmdbg
+#include <numeric>                      // for std::accumulate
+
+#include "argtree.h"
+#include "branch.h"
+#include "collector.h"
+#include "constants.h"
+#include "datatype.h"
+#include "dlmodel.h"
+#include "errhandling.h"
+#include "force.h"                      // for Region::CreateTree() for recombination && localpartitionforce combos
+#include "mathx.h"
+#include "region.h"
+#include "registry.h"
+#include "runreport.h"
+#include "stringx.h"                    // for MakeTag()/MakeCloseTag() in Region::ToXML()
+#include "toxml.h"                      // for SampleXML/IndividualXML/PopulationXML in Region::MakePopXML()
+#include "tree.h"
+#include "ui_constants.h"               // for uiconst::GLOBAL_ID
+#include "ui_vars_forces.h"             // for getLegalForce
+#include "xml_strings.h"                // for Region::ToXML()
+
+#ifdef DMALLOC_FUNC_CHECK
+#include "/usr/local/include/dmalloc.h"
+#endif
+
+using namespace std;
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+bool Region::RecombinationCanBeEstimated() const
+{
+    long loc;
+    long nvar(0);
+    // we only have primitives to get a list by xpart; we just sum it
+    for(loc = 0; loc < GetNloci(); ++loc) {
+       LongVec1d nvarmarkers = m_loci[loc].CalcNVariableMarkers();
+       nvar += std::accumulate(nvarmarkers.begin(),nvarmarkers.end(),0L);
+    }
+    if (nvar > 1) return true;
+    return false;
+}
+
+//------------------------------------------------------------------------------------
+
+bool Region::MultiSampleIndividuals() const
+{
+    return m_loci[0].MultiSampleIndividuals();
+}
+
+//------------------------------------------------------------------------------------
+
+Individual& Region::GetIndividual(long n)
+{
+    assert(0 <= n && n < static_cast<long>(m_individuals.size()));
+    return m_individuals[n];
+} // GetIndividual non-const
+
+bool Region::ValidLocus(long locus) const
+{
+    if (locus == uiconst::GLOBAL_ID) return true;
+    return locus >= 0 && locus < static_cast<long>(m_loci.size());
+}
+
+bool Region::ValidMovingLocus(long locus) const
+{
+    return locus >= 0 && locus < static_cast<long>(m_movingloci.size());
+}
+
+//------------------------------------------------------------------------------------
+
+void Region::AddLocus(bool movable, string name)
+{
+    Locus locus(m_loci.size(), movable, name);
+    m_loci.push_back(locus);
+} // AddLocus
+
+//------------------------------------------------------------------------------------
+
+void Region::InitializeRegionalMapPositionsUsingGlobalMapPositions()
+{
+    vector<long> gmaps;
+    vector<Locus>::iterator locus;
+    // first do the normal loci
+    for(locus = m_loci.begin(); locus != m_loci.end(); ++locus)
+    {
+        gmaps.push_back(locus->GetGlobalMapPosition());
+    }
+    long regionstart(*min_element(gmaps.begin(),gmaps.end()));
+
+    for(locus = m_loci.begin(); locus != m_loci.end(); ++locus)
+    {
+        long newregionalpos(locus->GetGlobalMapPosition());
+        newregionalpos -= regionstart;
+        locus->SetRegionalMapPosition(newregionalpos);
+    }
+
+    // now do the moving loci
+    if (!m_movingloci.empty())          // don't try if there are none!
+    {
+        gmaps.clear();
+        for(locus = m_movingloci.begin(); locus != m_movingloci.end(); ++locus)
+        {
+            gmaps.push_back(locus->GetGlobalMapPosition());
+        }
+        regionstart = (*min_element(gmaps.begin(),gmaps.end()));
+
+        for(locus = m_movingloci.begin(); locus != m_movingloci.end(); ++locus)
+        {
+            long newregionalpos(locus->GetGlobalMapPosition());
+            newregionalpos -= regionstart;
+            locus->SetRegionalMapPosition(newregionalpos);
+        }
+    }
+
+}
+
+//------------------------------------------------------------------------------------
+
+void Region::SetupAndMoveAllLoci()
+{
+    for (unsigned long ind=0; ind<m_individuals.size(); ind++)
+    {
+        m_individuals[ind].RandomizeAllHaplotypes();
+    }
+
+    MakeAllMovableLociMove();
+
+    // Set up the correspondence between regional and global positions
+    // (This must be done before Setup, since it's needed to make
+    // a healthy DLCalculator object).
+    InitializeRegionalMapPositionsUsingGlobalMapPositions();
+
+    // Setup the non-moving loci
+    for (unsigned long loc = 0; loc < m_loci.size(); ++loc)
+    {
+        m_loci[loc].Setup(m_individuals);
+    }
+
+    // Setup the moving loci
+    for (unsigned long mloc=0; mloc<m_movingloci.size(); ++mloc)
+    {
+        m_movingloci[mloc].Setup(m_individuals);
+        m_movingloci[mloc].SetIndex(mloc);
+    }
+
+}
+
+//------------------------------------------------------------------------------------
+
+void Region::MakeAllMovableLociMove()
+{
+    for (vector<Locus>::iterator locus = m_loci.begin();
+         locus != m_loci.end();)
+    {
+        if (locus->IsMovable())
+        {
+            m_movingloci.push_back(*locus);
+            locus = m_loci.erase(locus);
+        }
+        else
+        {
+            ++locus;
+        }
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+void Region::RevertMovingLoci()
+{
+    for (vector<Locus>::iterator locus = m_movingloci.begin(); locus != m_movingloci.end(); ++locus)
+    {
+        m_loci.push_back(*locus);
+    }
+    m_movingloci.clear();
+}
+
+//------------------------------------------------------------------------------------
+
+void Region::SetNmarkers(long locus, long n)
+{
+    assert(ValidLocus(locus));
+    m_loci[locus].SetNmarkers(n);
+} // Region::SetNmarkers
+
+//------------------------------------------------------------------------------------
+
+void Region::SetGlobalMapPosition(long locus, long n)
+{
+    assert(ValidLocus(locus));
+    m_loci[locus].SetGlobalMapPosition(n);
+} // Region::SetGlobalMapPosition
+
+//------------------------------------------------------------------------------------
+
+void Region::SetName(long locus, string name)
+{
+    assert(ValidLocus(locus));
+    m_loci[locus].SetName(name);
+} // Region::SetName
+
+//------------------------------------------------------------------------------------
+
+void Region::SetOffset(long locus, long n)
+{
+    assert(ValidLocus(locus));
+    m_loci[locus].SetOffset(n);
+} // Region::SetOffset
+
+//------------------------------------------------------------------------------------
+
+void Region::SetPositions(long locus, const LongVec1d& pos)
+{
+    assert(ValidLocus(locus));
+    unsigned long index;
+    for(index = 0; index < pos.size(); index++)
+    {
+        if(pos[index] < 0)
+        {
+            data_error e("Marker location must be >= 0");
+            throw e;
+        }
+    }
+    LongVec1d newpos(pos);
+    sort(newpos.begin(), newpos.end());
+    LongVec1d::iterator it = unique(newpos.begin(), newpos.end());
+    if (it != newpos.end())
+    {
+        // unique removed some elements, so they were not all unique!
+        data_error e("Duplicate locations in marker location list");
+        throw e;
+    }
+
+    m_loci[locus].SetPositions(pos);
+} // Region::SetPositions
+
+//------------------------------------------------------------------------------------
+
+void Region::SetPositions(long locus)
+{
+    assert(ValidLocus(locus));
+    m_loci[locus].SetPositions();
+} // Region::SetPositions default form
+
+//------------------------------------------------------------------------------------
+
+void Region::SetNsites(long locus, long n)
+{
+    assert(ValidLocus(locus));
+    if(n < 1)
+    {
+        assert(false);
+        data_error e("Number of sites in region must be > 0");
+        throw e;
+    }
+    m_loci[locus].SetNsites(n);
+} // Region::SetNsites
+
+//------------------------------------------------------------------------------------
+
+void Region::SetDataType(long locus, const DataType_ptr dt)
+{
+    assert(ValidLocus(locus));
+    m_loci[locus].SetDataType(dt);
+
+} // SetDataType
+
+//------------------------------------------------------------------------------------
+
+void Region::SetTipData(long locus, const TipData& td)
+{
+    assert(ValidLocus(locus));
+    m_loci[locus].SetTipData(td);
+} // SetTipData
+
+//------------------------------------------------------------------------------------
+
+void Region::SetPhaseMarkers(long indnum, LongVec2d& phasesites)
+{
+    assert(static_cast<size_t>(indnum) < m_individuals.size());
+    LongVec2d allphasemarkers;
+    for (size_t locus = 0; locus < phasesites.size(); ++locus)
+    {
+        LongVec1d phasemarkers;
+        for (size_t sitenum = 0; sitenum<phasesites[locus].size(); ++sitenum)
+        {
+            long site = phasesites[locus][sitenum];
+            long marker = m_loci[locus].SiteToMarker(site);
+            if (marker==FLAGLONG)
+            {
+                throw data_error("Site "
+                                 + ToString(site + GetOffset(locus))
+                                 + " in locus "
+                                 + ToString(locus+1)
+                                 + " does not have a marker associated with it,"
+                                 " and may therefore not be set 'phase unknown'.");
+            }
+            phasemarkers.push_back(marker);
+        }
+        allphasemarkers.push_back(phasemarkers);
+    }
+
+    m_individuals[indnum].SetPhaseMarkers(allphasemarkers);
+    m_individuals[indnum].SetPhaseSites(phasesites);
+}
+
+//------------------------------------------------------------------------------------
+
+LongVec1d Region::CalcNVariableMarkers() const
+{
+    LongVec1d total(m_loci[0].CalcNVariableMarkers());
+    vector<Locus>::const_iterator loc;
+    for(loc = m_loci.begin(), ++loc; loc != m_loci.end(); ++loc)
+    {
+        transform(total.begin(),total.end(),
+                  loc->CalcNVariableMarkers().begin(),total.begin(),
+                  plus<long>());
+    }
+    return total;
+
+} // Region::CalcNVariableMarkers
+
+//------------------------------------------------------------------------------------
+
+bool Region::HasLocus(string lname) const
+{
+    for (size_t locus = 0; locus < m_loci.size(); ++locus)
+    {
+        if (m_loci[locus].GetName() == lname) return true;
+    }
+    for (size_t locus = 0; locus < m_movingloci.size(); ++locus)
+    {
+        if (m_movingloci[locus].GetName() == lname) return true;
+    }
+    return false;
+}
+
+//------------------------------------------------------------------------------------
+
+long Region::GetNumAllLoci() const
+{
+    return (m_loci.size() + m_movingloci.size());
+}
+
+//------------------------------------------------------------------------------------
+
+long Region::GetNumFixedLoci() const
+{
+    long numFixed = 0;
+    for (size_t locus = 0; locus < m_loci.size(); ++locus)
+    {
+        if ( ! (m_loci[locus].IsMovable())) ++numFixed;
+    }
+    return numFixed;
+}
+
+//------------------------------------------------------------------------------------
+
+long Region::GetLocusIndex(string lname) const
+{
+    for (size_t locus = 0; locus < m_loci.size(); ++locus)
+    {
+        if (m_loci[locus].GetName() == lname) return locus;
+    }
+    if (HasLocus(lname))
+    {
+        assert(false);
+        throw implementation_error("GetLocusIndex should only be used in phase one.");
+    }
+    return FLAGLONG;
+}
+
+//------------------------------------------------------------------------------------
+
+const Locus& Region::GetLocus(string lname) const
+{
+    for (size_t locus = 0; locus < m_movingloci.size(); ++locus)
+    {
+        if (m_movingloci[locus].GetName() == lname) return m_movingloci[locus];
+    }
+    for (size_t locus = 0; locus < m_loci.size(); ++locus)
+    {
+        if (m_loci[locus].GetName() == lname) return m_loci[locus];
+    }
+    throw implementation_error("GetLocus(name) called on a nonexistent locus.");
+}
+
+//------------------------------------------------------------------------------------
+// Note that this number includes sites within all Loci in the region plus sites in all inter-locus
+// areas, for which recombination must be modeled even though we have no data for these sites.
+//
+// RSGFIXUP:  Range::s_numRegionSites is computed here.  Seems to be same value as Tree::m_totalSites.
+// Either merge the two variables or guaranteed they track each other (or test with ASSERT that they do).
+
+long Region::GetNumSites() const
+{
+    //LS DEBUG MAPPING  This assumes that there is no space to the right of the right-most, non-moving locus.
+    long nsites = 0;
+
+    for (size_t locus = 0; locus < m_loci.size(); ++locus)
+    {
+        if (!m_loci[locus].IsMovable())
+        {
+            nsites = max(nsites, m_loci[locus].GetRegionalMapPosition() + m_loci[locus].GetNsites());
+        }
+    }
+    return nsites;
+}
+
+//------------------------------------------------------------------------------------
+
+rangepair Region::GetSiteSpan() const
+{
+    //We want the labels of the first and last included site.
+    long first = m_loci[0].GetGlobalMapPosition();
+    long last = first + m_loci[0].GetNsites() - 1;
+
+    for (size_t locus = 1; locus < m_loci.size(); ++locus)
+    {
+        if (!m_loci[locus].IsMovable())
+        {
+            long position1 = m_loci[locus].GetGlobalMapPosition();
+            long position2 = position1 + m_loci[locus].GetNsites();
+            first = min(first, position1);
+            last = max(last, position2);
+        }
+    }
+    return make_pair(first, last);
+}
+
+//------------------------------------------------------------------------------------
+
+bool Region::HasUserTree() const
+{
+    if (m_usertree->Exists())
+    {
+        return true;
+    }
+    else if (m_argedges.size() > 0)
+    {
+        return true;
+    }
+    else
+    {
+        return false;
+    }
+} // HasUserTree
+
+//------------------------------------------------------------------------------------
+
+void Region::MakeUserTree(Tree* treetips)
+{
+    if (m_usertree->Exists())
+    {
+        m_usertree->ToLamarcTree(*treetips);
+    }
+    else if (m_argedges.size() > 0)
+    {
+        ARGTree* argtree = new ARGTree();
+        argtree->ToLamarcTree(*treetips, m_argedges);
+    }
+} // Region::MakeUserTree
+
+//------------------------------------------------------------------------------------
+
+StringVec1d Region::ToXML(unsigned long nspaces) const
+{
+    StringVec1d xmllines;
+    string line = MakeIndent(MakeTagWithName(xmlstr::XML_TAG_REGION,GetRegionName()),nspaces);
+    xmllines.push_back(line);
+
+    nspaces += INDENT_DEPTH;
+    vector<Locus>::const_iterator loc;
+    for(loc = m_loci.begin(); loc != m_loci.end(); ++loc)
+    {
+        StringVec1d dlmodelxml(loc->GetDataModel()->ToXML(nspaces));
+        xmllines.insert(xmllines.end(),dlmodelxml.begin(),dlmodelxml.end());
+    }
+
+    string mytag = MakeTag(xmlstr::XML_TAG_EFFECTIVE_POPSIZE);
+    line = MakeIndent(mytag,nspaces) + ToString(m_effpopsize) +
+        MakeCloseTag(mytag);
+    xmllines.push_back(line);
+
+#if 0  // JDEBUG -- if (user_specified_tree)
+    line = MakeIndent(MakeTag(xmlstr::XML_TAG_TREE),nspaces)
+        xmllines.push_back(line);
+    nspaces += INDENT_DEPTH;
+    NewickConverter nc;
+    string outtree = nc.LamarcTreeToNewickString(a_lamarc_tree_reference);
+    OR
+        NewickTree nt(stored_user_newick_tree);
+    string outtree = nt.Need_To_Write_Dump_Of_Newick_Format_Function();
+    line = MakeIndent(outtree,nspaces);
+    xmllines.push_back(line);
+    nspaces -= INDENT_DEPTH;
+    line = MakeIndent(MakeCloseTag(xmlstr::XML_TAG_TREE),nspaces)
+        xmllines.push_back(line);
+#endif // 0
+
+    line = MakeIndent(MakeTag(xmlstr::XML_TAG_SPACING),nspaces);
+    xmllines.push_back(line);
+    nspaces += INDENT_DEPTH;
+
+    unsigned long locus, nloci(m_loci.size());
+    for(locus = 0; locus < nloci; ++locus)
+    {
+        line = MakeIndent(MakeTagWithName(xmlstr::XML_TAG_BLOCK,m_loci[locus].GetName()),nspaces);
+        xmllines.push_back(line);
+        nspaces += INDENT_DEPTH;
+        mytag = MakeTag(xmlstr::XML_TAG_MAP_POSITION);
+        line = MakeIndent(mytag,nspaces) + ToString(GetGlobalMapPosition(locus)) + MakeCloseTag(mytag);
+        xmllines.push_back(line);
+        mytag = MakeTag(xmlstr::XML_TAG_LENGTH);
+        line = MakeIndent(mytag,nspaces) + ToString(GetLocusNsites(locus)) + MakeCloseTag(mytag);
+        xmllines.push_back(line);
+        if (!m_loci[locus].IsUsingDefaultLocations())
+        {
+            mytag = MakeTag(xmlstr::XML_TAG_LOCATIONS);
+            line = MakeIndent(mytag,nspaces) + ToString(GetUserMarkerLocations(locus))
+                + MakeCloseTag(mytag);
+            xmllines.push_back(line);
+        }
+        mytag = MakeTag(xmlstr::XML_TAG_OFFSET);
+        line = MakeIndent(mytag,nspaces) + ToString(GetOffset(locus)) + MakeCloseTag(mytag);
+        xmllines.push_back(line);
+        nspaces -= INDENT_DEPTH;
+        line = MakeIndent(MakeCloseTag(xmlstr::XML_TAG_BLOCK),nspaces);
+        xmllines.push_back(line);
+    }
+
+    nspaces -= INDENT_DEPTH;
+    line = MakeIndent(MakeCloseTag(xmlstr::XML_TAG_SPACING),nspaces);
+    xmllines.push_back(line);
+
+    StringVec1d traits(MakeTraitsXML(nspaces));
+    xmllines.insert(xmllines.end(), traits.begin(), traits.end());
+
+    nspaces -= INDENT_DEPTH;
+    //cerr << "in Region::ToXML calling Region::ToXML" << endl; //jmdbg
+    StringVec1d populations(MakePopXML(nspaces));
+    xmllines.insert(xmllines.end(),populations.begin(),populations.end());
+
+    line = MakeIndent(MakeCloseTag(xmlstr::XML_TAG_REGION),nspaces);
+    xmllines.push_back(line);
+
+    return xmllines;
+} // Region::ToXML
+
+//------------------------------------------------------------------------------------
+
+StringVec1d Region::MakeTraitsXML(unsigned long nspaces) const
+{
+    StringVec1d finalxml;
+    StringVec1d xmllines;
+
+    long regoffset = GetSiteSpan().first;
+    for (size_t mlnum = 0; mlnum < m_movingloci.size(); ++mlnum)
+    {
+        nspaces += INDENT_DEPTH;
+        StringVec1d locxml = m_movingloci[mlnum].MakeTraitXML(nspaces, regoffset);
+        nspaces -= INDENT_DEPTH;
+        xmllines.insert(xmllines.end(), locxml.begin(), locxml.end());
+    }
+    if (xmllines.size() > 0)
+    {
+        string line = MakeIndent(MakeTag(xmlstr::XML_TAG_TRAITS),nspaces);
+        finalxml.push_back(line);
+        finalxml.insert(finalxml.end(), xmllines.begin(), xmllines.end());
+        line = MakeIndent(MakeCloseTag(xmlstr::XML_TAG_TRAITS),nspaces);
+        finalxml.push_back(line);
+    }
+    return finalxml;
+}
+
+//------------------------------------------------------------------------------------
+
+StringVec1d Region::MakePopXML(unsigned long nspaces) const
+{
+    StringVec1d xmllines;
+
+    vector<PopulationXML> pops(MakeVectorOfPopulationXML());
+    //cerr << "in Region::MakePopXML pops.size: " << pops.size() << endl; //jmdbg
+
+    vector<PopulationXML>::iterator population;
+    for(population = pops.begin(); population != pops.end(); ++population)
+    {
+        StringVec1d pxml(population->ToXML(nspaces));
+        xmllines.insert(xmllines.end(),pxml.begin(),pxml.end());
+    }
+
+    return xmllines;
+
+} // MakePopXML
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d Region::GetMuRatios() const
+{
+    DoubleVec1d ratios;
+    vector<Locus>::const_iterator loc = m_loci.begin();
+    for( ; loc != m_loci.end(); ++loc)
+    {
+        ratios.push_back(loc->GetDataModel()->GetRelMuRate());
+    }
+    return ratios;
+} // Region::GetMuRatios
+
+//------------------------------------------------------------------------------------
+
+long Region::GetNXTips(long xpart) const
+{
+    map<force_type,string> tipid = registry.GetDataPack().GetTipId(xpart);
+    return m_loci[0].GetNTips(tipid);
+}
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d Region::CountNBases() const
+{
+    DoubleVec1d nbases(BASES, 0L);
+    vector<Locus>::const_iterator locit;
+    for(locit = m_loci.begin(); locit != m_loci.end(); ++locit)
+    {
+        DoubleVec1d nnucs(locit->CountNNucleotides());
+        transform(nbases.begin(),nbases.end(),nnucs.begin(),
+                  nbases.begin(),plus<double>());
+    }
+
+    return nbases;
+
+} // Region::CountNBases
+
+//------------------------------------------------------------------------------------
+
+void Region::AddUniqueNamesTo(set<string>& popnames) const
+{
+    vector<Locus>::const_iterator locit;
+    for(locit = m_loci.begin(); locit != m_loci.end(); ++locit)
+    {
+        locit->AddUniqueNamesTo(popnames);
+    }
+
+} // Region::AddUniqueNamesTo
+
+//------------------------------------------------------------------------------------
+
+long Region::GetLocusNsites(long locus) const
+{
+    assert(ValidLocus(locus));
+    return m_loci[locus].GetNsites();
+} // Region::GetLocusNsites
+
+//------------------------------------------------------------------------------------
+
+StringVec1d Region::GetAllLociNames() const
+{
+    StringVec1d lnames;
+    vector<Locus>::const_iterator locit;
+    for(locit = m_loci.begin(); locit != m_loci.end(); ++locit)
+        lnames.push_back(locit->GetName());
+    for(locit = m_movingloci.begin(); locit != m_movingloci.end(); ++locit)
+        lnames.push_back(locit->GetName());
+
+    return lnames;
+
+} // Region::GetAllLociNames
+
+//------------------------------------------------------------------------------------
+
+StringVec1d Region::GetAllLociDataTypes() const
+{
+    StringVec1d ltypes;
+    vector<Locus>::const_iterator locit;
+    for(locit = m_loci.begin(); locit != m_loci.end(); ++locit)
+        ltypes.push_back(ToString(locit->GetDataType()));
+    for(locit = m_movingloci.begin(); locit != m_movingloci.end(); ++locit)
+        ltypes.push_back(ToString(locit->GetDataType()));
+
+    return ltypes;
+
+} // Region::GetAllLociDataTypes
+
+//------------------------------------------------------------------------------------
+
+StringVec1d Region::GetAllLociMuRates() const
+{
+    StringVec1d lrates;
+    vector<Locus>::const_iterator locit;
+    for(locit = m_loci.begin(); locit != m_loci.end(); ++locit)
+        lrates.push_back(ToString(locit->GetMuRate()));
+    for(locit = m_movingloci.begin(); locit != m_movingloci.end(); ++locit)
+        lrates.push_back(ToString(locit->GetMuRate()));
+
+    return lrates;
+
+} // Region::GetAllLociMuRates
+
+//------------------------------------------------------------------------------------
+
+// MDEBUG This routine is on the wrong class, and pokes way too
+// much into Tree's innards.  If we replace use of the Prototype
+// pattern with Factory we can get a lot of this into the
+// Tree ctor instead, where it belongs
+
+Tree* Region::CreateTree()
+{
+    // Create the Tree.
+    Tree* tree = registry.GetProtoTree().MakeStump();
+
+    // This function cannot be made const because of these two calls:
+    tree->SetLocusVec(&m_loci);
+    tree->SetSnpPanelFlag(GetSnpPanel());
+
+    const ForceSummary& fs(registry.GetForceSummary());
+    if (fs.CheckForce(force_REC))
+    {
+        // If recombination is on, the tree needs the moving locus vector (even if it's empty).
+        dynamic_cast<RecTree*>(tree)->SetMovingLocusVec(&m_movingloci);
+    }
+
+    // We now rely on this happening before tip construction, which it does.
+    rangeset traitmutationsites;
+    if (fs.GetNLocalPartitionForces() != 0)
+    {
+        // Fill up the tree's m_alwaysactive rangeset.
+        const ForceVec lpforces(fs.GetLocalPartitionForces());
+        ForceVec::const_iterator lpforce;
+        for(lpforce = lpforces.begin(); lpforce != lpforces.end(); ++lpforce)
+        {
+            const LocalPartitionForce* lpf(dynamic_cast<const LocalPartitionForce*>(*lpforce));
+            assert(lpf);
+            rangepair localsite(lpf->GetLocalSite(), lpf->GetLocalSite() + 1);
+            traitmutationsites = AddPairToRange(localsite, traitmutationsites);
+        }
+    }
+
+    // Rewrite of individual setup code to use the region's already existing
+    // individual vector, Region::individuals -- Jon 2001/11/08
+    //
+    // NB:  This code modified *copies* of the individual and tipdata
+    // information, leaving the originals untouched.  I have changed
+    // it to modify the originals.  Mary 2002/8/20
+    //
+    // NBB: Cannot modify the originals!  CreateTree() is called once for
+    // each different temperature, and needs a copy for the branch pointers.
+
+    IndVec individuals = m_individuals;
+
+    // We used to pair sequences into individuals if it could be done, for
+    // haplotyping purposes.  However, this can be accomplished easily by
+    // the converter, and it was messing stuff up with the new haplotyping
+    // of traits code, so it's now gone!  Expunged!  Brutally eviscerated!
+
+    long ntips = GetNTips();
+
+    // Collect the initialized LocusCells for each tip.
+    vector<vector<LocusCell> > tipcells(ntips);
+    for (size_t locus = 0; locus < m_loci.size(); ++locus)
+    {
+        vector<LocusCell> locuscells = m_loci[locus].GetTipCells();
+        for (long tip = 0; tip < ntips; ++tip)
+        {
+            tipcells[tip].push_back(locuscells[tip]);
+        }
+    }
+    vector<vector<LocusCell> > movingtipcells(ntips);
+    for (size_t mlocus = 0; mlocus < m_movingloci.size(); ++mlocus)
+    {
+        vector<LocusCell> movinglocuscells = m_movingloci[mlocus].GetTipCells();
+        for (long tip = 0; tip < ntips; ++tip)
+        {
+            movingtipcells[tip].push_back(movinglocuscells[tip]);
+        }
+    }
+
+    // The locus' TipData is needed here (tip_id) only for non-
+    // locus-specific attributes (label and membership).
+
+    const vector<TipData>& tip_id = m_loci[0].GetTipData();
+
+    for (long tip = 0; tip < ntips; ++tip)
+    {
+        // create the tip and put the cells into the tip
+        TBranch_ptr pTip;
+        if (m_snppanel)
+        {
+            // has panels
+            pTip = tree->CreateTip(tip_id[tip], tipcells[tip], movingtipcells[tip], traitmutationsites, m_loci);
+        }
+        else
+        {
+            // no panels
+            pTip = tree->CreateTip(tip_id[tip], tipcells[tip], movingtipcells[tip], traitmutationsites);
+        }
+
+        long ind = tip_id[tip].individual;
+        if (ind == FLAGLONG)
+        {
+            // FLAGLONG means "not part of an individual"
+            //LS NOTE:  You should never hit this since the XML reader requires
+            // all samples to be included in an 'individual' tag.
+            assert(false);
+            continue;
+        }
+        IndVec::iterator indiv;
+        for(indiv = individuals.begin(); indiv != individuals.end(); ++indiv)
+        {
+            if (indiv->GetId() == ind)
+            {
+                indiv->AddTip(pTip);
+                break;
+            }
+        }
+    }
+
+    PruneSamePhaseUnknownSites(individuals);
+
+    tree->SetIndividualsWithTips(individuals);
+
+    tree->SetupAliases(m_loci);
+
+    return tree;
+
+} // Region::CreateTree
+
+//------------------------------------------------------------------------------------
+
+bool Region::AnyPhaseUnknownSites() const
+{
+    for (IndVec::const_iterator indiv = m_individuals.begin();
+         indiv != m_individuals.end(); ++indiv)
+    {
+        if (indiv->AnyPhaseUnknownSites()) return true;
+    }
+    return false;
+}
+
+//------------------------------------------------------------------------------------
+
+void Region::PruneSamePhaseUnknownSites(IndVec& indivs) const
+{
+    IndVec::iterator ind;
+    for(ind = indivs.begin(); ind != indivs.end(); ++ind)
+        ind->PruneSamePhaseUnknownSites();
+
+} // Region::PruneSamePhaseUnknownSites
+
+//------------------------------------------------------------------------------------
+
+void Region::RemovePartitionFromLoci(force_type forcename)
+{
+    vector<Locus>::iterator locus;
+    for(locus = m_loci.begin(); locus != m_loci.end(); ++locus)
+        locus->RemovePartitionFromTipDatas(forcename);
+
+} // Region::RemovePartitionFromLoci
+
+//------------------------------------------------------------------------------------
+
+void Region::CopyTipDataForLocus(const string& lname)
+{
+    long lnum = GetLocusIndex(lname);
+    if (lnum == 0)
+    {
+        throw data_error("We do not currently support haplotype data as the only data about a region.  Haplotype"
+                         " information should be used for mapping onto sequenced or otherwise known segments.");
+    }
+    m_loci[lnum].SetEmptyTipData(m_loci[0].GetTipData());
+}
+
+//------------------------------------------------------------------------------------
+
+StringVec2d Region::GetMarkerDataWithLabels() const
+{
+    StringVec2d labeleddata;
+    vector<Locus>::const_iterator locit;
+    for(locit = m_loci.begin(); locit != m_loci.end(); ++locit)
+        labeleddata.push_back(locit->GetMarkerDataWithLabels(m_individuals));
+    for(locit = m_movingloci.begin(); locit != m_movingloci.end(); ++locit)
+        labeleddata.push_back(locit->GetMarkerDataWithLabels(m_individuals));
+
+    return labeleddata;
+
+} // Region::GetMarkerDataWithLabels
+
+//------------------------------------------------------------------------------------
+
+bool Region::CanHaplotype() const
+{
+    // we can haplotype if there are individuals with multiple sequences and
+    // if there are phase unknown sites.
+    return (AnyPhaseUnknownSites() && MultiSampleIndividuals());
+} // CanHaplotype
+
+bool Region::AnyMapping() const
+{
+    return (m_movingloci.size() > 0);
+}
+
+bool Region::AnyJumpingAnalyses() const
+{
+    for (size_t locus = 0; locus < m_movingloci.size(); ++locus)
+    {
+        if (m_movingloci[locus].GetAnalysisType() == mloc_mapjump) return true;
+    }
+    return false;
+}
+
+bool Region::AnySimulatedLoci() const
+{
+    for (size_t locus = 0; locus < m_loci.size(); ++locus)
+    {
+        if (m_loci[locus].GetShouldSimulate()) return true;
+    }
+    for (size_t locus = 0; locus < m_movingloci.size(); ++locus)
+    {
+        if (m_movingloci[locus].GetShouldSimulate()) return true;
+    }
+    return false;
+}
+
+bool Region::AnySNPDataWithDefaultLocations() const
+{
+    for (size_t locus = 0; locus < m_loci.size(); ++locus)
+    {
+        if ((m_loci[locus].GetDataType() == dtype_SNP) && (m_loci[locus].IsUsingDefaultLocations()))
+        {
+            return true;
+        }
+    }
+    //Don't check the moving loci because that would just be weird.  Moving SNPs?  What?
+    return false;
+}
+
+//------------------------------------------------------------------------------------
+
+bool Region::IsValidRegion(string & errorString) const
+{
+    size_t i;
+    vector<pair<long,long> > spans;
+
+    // check validity of individual loci, and store their spans
+    for (i = 0; i < m_loci.size(); ++i)
+    {
+        if (!m_loci[i].IsValidLocus(errorString)) return false;
+        if (!m_loci[i].IsMovable())
+        {
+            spans.push_back(m_loci[i].GetGlobalScaleSiteSpan());
+        }
+    }
+
+    // check that locus spans do not overlap
+    sort(spans.begin(), spans.end());
+
+    for (i = 1; i < spans.size(); ++i)
+    {
+        if (spans[i].first < spans[i-1].second)
+        {
+            errorString = "Overlapping segments in region " + m_regionname;
+            return false;
+        }
+    }
+
+    // validate individuals
+    for (i = 0; i < m_individuals.size(); ++i)
+    {
+        if (!m_individuals[i].IsValidIndividual())
+        {
+            errorString = "Invalid individuals in region " + m_regionname;
+            return false;
+        }
+    }
+    return true;
+
+} // Region::IsValidRegion
+
+//------------------------------------------------------------------------------------
+
+bool Region::IsDuplicateTipName(const string& newname) const
+{
+    size_t i;
+    for(i = 0; i < m_loci.size(); ++i)
+    {
+        if (m_loci[i].IsDuplicateTipName(newname)) return true;
+    }
+
+    return false;
+
+} // Region::IsDuplicateTipName
+
+//------------------------------------------------------------------------------------
+
+void Region::SaveMappingInfo(MapCollector* mapcoll)
+{
+    DoubleVec2d maps = mapcoll->GetMapSummary();
+    assert(maps.size() == m_movingloci.size());
+
+    for (size_t locus = 0; locus < m_movingloci.size(); ++locus)
+    {
+        m_movingloci[locus].SetRawMappingInfo(maps[locus]);
+        ScaleToSumToOne(maps[locus]);
+        m_movingloci[locus].SetMappingInfo(maps[locus]);
+    }
+    //LS NOTE SIM: If I want to resurrect the old code here that tried
+    // to deal with simulating sites which we then left 'unsequenced' in the
+    // middle of 'sequenced' sites, it was here as of 3/31/06.  But I deleted
+    // it as part of our code cleanup, and it didn't really work anyway.
+    //
+    // Note that if it's added back in, it would also need to be used in the
+    // other SaveMappingInfo function, below.
+}
+
+void Region::SaveMappingInfo(vector<MapCollector*> mapcolls, DoubleVec1d logweights)
+{
+    if (m_movingloci.size() == 0) return;
+    DoubleVec1d weights = SafeExp(logweights);
+    size_t numreps = mapcolls.size();
+    assert(numreps > 0);
+    DoubleVec2d averagedmaps = (*mapcolls[0]).GetMapSummary();
+    for (size_t map=0; map<averagedmaps.size(); map++)
+    {
+        //multiply by the correct weight (for averaging)
+        transform(averagedmaps[map].begin(),
+                  averagedmaps[map].end(),
+                  averagedmaps[map].begin(),
+                  bind2nd(multiplies<double>(),weights[0]));
+    }
+    for (size_t rep=1; rep<numreps; rep++)
+    {
+        DoubleVec2d repmaps = (*mapcolls[rep]).GetMapSummary();
+        for (size_t map=0; map<repmaps.size(); map++)
+        {
+            //Multiply by the appropriate weight
+            transform(repmaps[map].begin(),
+                      repmaps[map].end(),
+                      repmaps[map].begin(),
+                      bind2nd(multiplies<double>(),weights[rep]));
+            //sum
+            transform(repmaps[map].begin(),
+                      repmaps[map].end(),
+                      averagedmaps[map].begin(),
+                      averagedmaps[map].begin(),
+                      plus<double>());
+        }
+    }
+    assert(averagedmaps.size() == m_movingloci.size());
+    for (size_t locus = 0; locus < m_movingloci.size(); ++locus)
+    {
+        m_movingloci[locus].SetRawMappingInfo(averagedmaps[locus]);
+        ScaleToSumToOne(averagedmaps[locus]);
+        m_movingloci[locus].SetMappingInfo(averagedmaps[locus]);
+    }
+
+}
+
+void Region::ReportMappingInfo()
+{
+    long regoffset = GetSiteSpan().first;
+    for (size_t locus = 0; locus < m_movingloci.size(); ++locus)
+    {
+        registry.GetRunReport().ReportNormal(m_movingloci[locus].ReportMappingInfo(regoffset));
+    }
+    for (size_t locus = 0; locus < m_loci.size(); ++locus)
+    {
+        if (m_loci[locus].GetShouldSimulate())
+        {
+            string msg = ToString(CountSites(m_loci[locus].CalculateCompleteVariableRange()))
+                + " variable sites simulated in segment "
+                + ToString(m_loci[locus].GetName()) + ", "
+                + ToString(CountSites(m_loci[locus].CalculateVariableRange()))
+                + " of which had a minor allele frequency of at least three.";
+            registry.GetRunReport().ReportNormal(msg);
+
+#if 0  // LS DEBUG SIM:  Reporting tool for the whole 'known/unknown' thing:
+            msg = ToString(m_loci[locus].GetVariabilityOfUnknownRange()) +
+                " variable sites in the 'unknown' section of the segment.";
+            registry.GetRunReport().ReportNormal(msg);
+#endif // 0
+        }
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+StringVec2d Region::CreateAllDataModelReports() const
+{
+    StringVec2d reports;
+    for (size_t lnum = 0; lnum < m_loci.size(); ++lnum)
+    {
+        reports.push_back(m_loci[lnum].CreateDataModelReport(m_regionname));
+    }
+    for (size_t lnum = 0; lnum < m_movingloci.size(); ++lnum)
+    {
+        reports.push_back(m_movingloci[lnum].CreateDataModelReport(m_regionname));
+    }
+    return reports;
+}
+
+//------------------------------------------------------------------------------------
+
+void Region::WriteAnyMapping() const
+{
+    for (size_t lnum = 0; lnum < m_movingloci.size(); ++lnum)
+    {
+        m_movingloci[lnum].WriteMapping(GetRegionName(), GetSiteSpan().first);
+    }
+}
+
+//------------------------------------------------------------------------------------
+// WriteFlucFile is used only when JSIM is defined (as of 3/30/06 --LS)
+
+void Region::WriteFlucFile(const string& outfilename, bool onlyregion) const
+{
+    long locus(0L);
+
+    ofstream ofile;
+    if (onlyregion) ofile.open(outfilename.c_str(),ios::out | ios::trunc);
+    else ofile.open(outfilename.c_str(),ios::out | ios::app);
+
+    vector<TipData> tips(GetAllTipData(locus));
+
+    if (onlyregion) ofile << 1 << endl;
+    ofile << tips.size() << " " << GetNmarkers(locus) << endl;
+
+    vector<TipData>::iterator tit;
+    for(tit = tips.begin(); tit != tips.end(); ++tit)
+    {
+        string dlm("");
+        ofile << MakeJustified(tit->label, 10L) << tit->GetFormattedData(dlm) << endl;
+    }
+
+    ofile.close();
+
+} // Region::WriteFlucFile
+
+//------------------------------------------------------------------------------------
+//
+// These various XML writing functions are used when JSIM is defined (and only
+// then) --3/30/06, LS
+//
+// This simulation function does not handle spacing or multiple loci correctly
+
+void Region::WritePopulationXMLFiles() const
+{
+    ofstream ofile;
+    StringVec2d populations(MakeByPopXML(0L));
+
+    long pop, npops(populations.size());
+    for(pop = 0; pop < npops; ++pop)
+    {
+        string fname(GetRegionName());
+        fname += "pop" + ToString(pop);
+        ofile.open(fname.c_str(), ios::out | ios::trunc);
+        StringVec1d::iterator plines;
+        for(plines = populations[pop].begin(); plines != populations[pop].end();
+            ++plines)
+            ofile << *plines << endl;
+        ofile.close();
+
+        fname = GetRegionName() + "popall";
+        ofile.open(fname.c_str(), ios::out | ios::app);
+        for(plines = populations[pop].begin(), ++plines;
+            plines != populations[pop].end(); ++plines)
+            ofile << *plines << endl;
+        ofile.close();
+    }
+
+}
+
+//------------------------------------------------------------------------------------
+
+void Region::WriteToXMLFileUsing(ofstream& ofile, StringVec1d& region_contents) const
+{
+    ofile << MakeTagWithName(xmlstr::XML_TAG_REGION,GetRegionName()) << endl;
+
+    StringVec1d::iterator cline;
+    for(cline = region_contents.begin(); cline != region_contents.end(); ++cline)
+    {
+        ofile << *cline << endl;
+    }
+
+    ofile << MakeCloseTag(xmlstr::XML_TAG_REGION) << endl;
+}
+
+//------------------------------------------------------------------------------------
+
+void Region::WriteToXMLFileUsing(ofstream& ofile, StringVec2d& region_contents,
+                                 bool produceonepop) const
+{
+    ofile << MakeTagWithName(xmlstr::XML_TAG_REGION,GetRegionName()) << endl;
+
+    StringVec2d::iterator pop;
+    for(pop = region_contents.begin(); pop != region_contents.end(); ++pop)
+    {
+        StringVec1d::iterator cline;
+        for(cline = pop->begin(); cline != pop->end(); ++cline)
+        {
+            if (produceonepop)
+            {
+                // skip the population start xml tags, unless we're doing the
+                // first population
+                if (pop != region_contents.begin() && cline == pop->begin())
+                {
+                    continue;
+                }
+                // skip the end population tag, we'll manually add one
+                if (*cline == pop->back()) // slow, compares strings, would like to
+                {
+                    continue;           // compare iterators, but --(pop->end())
+                } // and similar may not be supported
+            }
+            ofile << *cline << endl;
+        }
+        if (produceonepop)
+        {
+            ofile << MakeCloseTag(xmlstr::XML_TAG_POPULATION) << endl;
+        }
+    }
+
+    ofile << MakeCloseTag(xmlstr::XML_TAG_REGION) << endl;
+}
+
+//------------------------------------------------------------------------------------
+// RSGNOTE:  If DIVMIG is on, must record more info (EPOCH times, which pop diverge, etc), than with MIG (and no DIV).
+// (DIVMIG, DIVERGENCE, EPOCH) all travel together.
+// MIG and (DIVMIG, DIVERGENCE, EPOCH) are mutually contradictory.  If MIG is present, none of other three can be.
+// If any one of (DIVMIG, DIVERGENCE, EPOCH) is present, so must be the other two, and MIG cannot be.
+
+vector<PopulationXML> Region::MakeVectorOfPopulationXML() const
+{
+    force_type migforce;
+    unsigned long npops;
+
+    // NB:  If neither force_MIG nor force_DIVMIG is present, this code goes through
+    // defaults (in GetAllPartitionNames()) which result in a single population.
+
+    if (registry.GetForceSummary().CheckForce(force_MIG))
+    {
+        migforce = force_MIG;
+    }
+    else
+    {
+        migforce = force_DIVMIG;
+    }
+
+    StringVec1d popnames(registry.GetDataPack().GetAllPartitionNames(migforce));
+    vector<PopulationXML> pops;
+
+    npops = popnames.size();
+    if (migforce == force_DIVMIG)
+    {                                   // We only want entries for modern populations.
+        npops = (npops + 1) / 2;        // Truncation is deliberate.
+    }
+
+    for(size_t pindex = 0; pindex < npops; pindex++)
+    {
+        pops.push_back(PopulationXML(*this,popnames[pindex]));
+    }
+
+    return pops;
+
+} // MakeVectorOfPopulationXML
+
+//------------------------------------------------------------------------------------
+
+StringVec2d Region::MakeByPopXML(unsigned long nspaces) const
+{
+    StringVec2d xmllines;
+
+    vector<PopulationXML> pops(MakeVectorOfPopulationXML());
+
+    vector<PopulationXML>::iterator population;
+    for(population = pops.begin(); population != pops.end(); ++population)
+    {
+        StringVec1d pxml(population->ToXML(nspaces));
+        xmllines.push_back(pxml);
+    }
+
+    return xmllines;
+
+} // MakeByPopXML
+
+//____________________________________________________________________________________
diff --git a/src/datalike/region.h b/src/datalike/region.h
new file mode 100644
index 0000000..851f890
--- /dev/null
+++ b/src/datalike/region.h
@@ -0,0 +1,224 @@
+// $Id: region.h,v 1.54 2012/06/30 01:32:41 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+// Region--a storage class, containing data information that applies
+//    to the genetic info of that region (regional quick theta estimate,
+//    number of tips, region name).
+//
+//    In addition, Region serves as the bridge between the data and the
+//    tree via Region::CreateTree().  CreateTree() copies the prototype
+//    tree and finishes tree construction (creates the tips and individuals,
+//    sets the site ranges) and enables data likelihood calculation (creates
+//    the data likelihood calculator).  It returns an owning pointer to
+//    the newly created tree.
+//
+// Written by Jim Sloan, heavily revised by Jon Yamato
+// 2002/01/03 changed Tipdata::data to a vector for generality--Mary Kuhner
+// 2002/07/25 split out Locus class -- Mary Kuhner
+
+#ifndef REGION_H
+#define REGION_H
+
+#include <cassert>                      // May be needed for inline definitions.
+#include <string>
+#include <vector>
+
+#include "argtree.h"
+#include "constants.h"
+#include "forceparam.h"                 // for return type of GetScalars()
+#include "individual.h"                 // for IndVec typedef
+#include "locus.h"                      // for TipData and for Locus member
+#include "newick.h"                     // for UserTree member, m_usertree;
+#include "toxml.h"                      // for SampleXML/IndividualXML/PopulationXML in Region::MakePopXML()
+#include "types.h"
+#include "vectorx.h"
+
+//------------------------------------------------------------------------------------
+
+class Tree;
+class MapCollector;
+
+//------------------------------------------------------------------------------------
+
+class Region
+{
+  private:
+
+    Region(const Region& src);             // not defined
+    Region& operator=(const Region& src);  // not defined
+
+    IndVec             m_individuals;
+    vector<Locus>      m_loci;
+    vector<Locus>      m_movingloci;       // either 'floating' or 'jumping'.
+    string             m_regionname;
+    long               m_id;               // region number
+    UserTree*          m_usertree;         // we own this! - this is the Newick tree if read in
+    double             m_effpopsize;
+    std::set<long int> m_ploidies;         // Phase 1 only.
+    bool               m_snppanel;
+
+    // utility functions
+    bool         MultiSampleIndividuals() const;
+    Individual&  GetIndividual(long n);
+    bool         ValidLocus(long locus) const;
+    bool         ValidMovingLocus(long locus) const;
+    StringVec1d  MakeTraitsXML(unsigned long nspaces) const;
+    StringVec1d  MakePopXML(unsigned long nspaces) const;
+
+  public:
+
+    bool         RecombinationCanBeEstimated() const;
+    Region(std::string rname) : m_regionname(rname), m_usertree(new NullUserTree()),
+                                m_effpopsize(1.0) {};
+    ~Region()    { delete m_usertree; };
+
+    // Add a new Locus
+    void AddLocus(bool movable=false, string name="");
+
+    // ARGtree storage for phase 2
+    vector<ARGEdge> m_argedges;      // ARG tree, public so parsetreetodata can mess with it
+    void AddARGEdge(ARGEdge edge){m_argedges.push_back(edge);};
+
+    //Save ploidy (used for trait stuff)
+    void AddPloidy(long ploid) {m_ploidies.insert(ploid);};
+
+    // Access
+    void AddIndividual(const Individual& newind) {m_individuals.push_back(newind);};
+    void SetID(long newid)                       {m_id = newid; };
+    void SetUserTree(UserTree* utree)            {delete m_usertree; m_usertree = utree;};
+    void SetSnpPanel(bool val)                   {m_snppanel = val;};
+    bool GetSnpPanel()                           {return m_snppanel;};
+
+    //Phase 1 to Phase 2 functions:
+    void SetEffectivePopSize(double newsize)     {m_effpopsize = newsize; };
+    void InitializeRegionalMapPositionsUsingGlobalMapPositions();
+    void SetupAndMoveAllLoci();
+    void MakeAllMovableLociMove();
+
+    //Phase 2 to Phase 1 function:
+    void RevertMovingLoci();
+
+    // Forwarders to Locus class; used by xml.cpp
+    void SetNmarkers(long locus, long n);
+    void SetGlobalMapPosition(long locus, long n);
+    void SetName(long locus, std::string name);
+    void SetOffset(long locus, long n);
+    void SetPositions(long locus, const LongVec1d& pos);
+    void SetPositions(long locus); // set to defaults
+    void SetNsites(long locus, long numsites);
+    void SetDataType(long locus, const DataType_ptr dtype);
+    void SetTipData(long locus, const TipData& td);
+
+    //Forwarder to individuals; used by parsetreetodata.cpp
+    void SetPhaseMarkers(long indnum, LongVec2d& phases);
+
+    // Potentially expensive Forces::QuickCalc() helper functions
+    LongVec1d    CalcNVariableMarkers() const;  // dim: by xpart
+
+    //Getter functions
+    long         GetID()               const {return m_id;};
+    double       GetEffectivePopSize() const {return m_effpopsize;};
+    long         GetNIndividuals()     const {return m_individuals.size();};
+    const Individual& GetIndividual(long n) const {return m_individuals[n];};
+    string       GetRegionName()       const {return m_regionname;};
+    Locus&       GetLocus(long locus)        {assert(ValidLocus(locus)); return m_loci[locus]; };
+    const Locus& GetLocus(long locus)  const {assert(ValidLocus(locus)); return m_loci[locus]; };
+    const Locus& GetMovingLocus(long mloc) const {assert(ValidMovingLocus(mloc)); return m_movingloci[mloc];}
+    const IndVec& GetIndividuals() const {return m_individuals;};
+    bool         HasLocus(string lname)const;
+    const Locus& GetLocus(string lname)const;
+    long         GetLocusIndex(string lname) const;
+    long         GetNloci()            const {return m_loci.size(); };
+    long         GetNumAllLoci()const;
+    long         GetNumFixedLoci()const;
+    long         GetNumMovingLoci()    const {return m_movingloci.size();};
+    long         GetNumSites() const;
+    rangepair    GetSiteSpan() const;
+    void         MakeUserTree(Tree* treetips);
+    bool         HasUserTree() const;
+    StringVec1d  ToXML(unsigned long nspaces) const;
+    DoubleVec1d  GetMuRatios()         const;
+    std::set<long int> GetPloidies() const {return m_ploidies;};
+
+    // Forwarders to Locus class
+    long         GetNmarkers(long locus)    const {return m_loci[locus].GetNmarkers();};
+    long         GetGlobalMapPosition(long locus) const {return m_loci[locus].GetGlobalMapPosition();};
+    long         GetOffset(long locus)      const {return m_loci[locus].GetOffset();};
+    LongVec1d    GetUserMarkerLocations(long locus)   const {return m_loci[locus].GetUserMarkerLocations();};
+    long         GetLocusNsites(long locus)      const;
+    StringVec1d  GetAllLociNames()   const;
+    StringVec1d  GetAllLociDataTypes() const;
+    StringVec1d  GetAllLociMuRates() const;
+    DataModel_ptr GetLocusDataModel(long locus)  const {return m_loci[locus].GetDataModel(); };
+    long         GetNTips()                 const {return m_loci[0].GetNTips();};
+    long         GetNXTips(long xpart)      const;
+    vector<TipData> GetAllTipData(long locus)   const {return m_loci[locus].GetTipData();};
+    DoubleVec1d  CountNBases() const;
+    void         AddUniqueNamesTo(std::set<string>& popnames) const;
+
+    // Factory function
+    Tree*        CreateTree();
+
+    // Genotypic-data support functions
+    bool         CanHaplotype() const;
+    void         PruneSamePhaseUnknownSites(IndVec& indivs) const;
+    bool         AnyPhaseUnknownSites() const;
+
+    bool         AnyMapping() const;
+    bool         AnyJumpingAnalyses() const;
+    bool         AnySimulatedLoci() const;
+
+    bool         AnySNPDataWithDefaultLocations() const;
+
+    // Helper function for original use in DataPack::RemoveUneededPartitions
+    void         RemovePartitionFromLoci(force_type);
+    void         CopyTipDataForLocus(const string& lname);
+
+    // Helper function for ReportPage(DataPage::Show, echoing data)
+    StringVec2d  GetMarkerDataWithLabels() const;  // dim: locus X tip
+
+    // Validity checking
+    bool         IsValidRegion(string & errorString) const;
+    // helper for parsetreetodata
+    bool         IsDuplicateTipName(const string& newname) const;
+
+    //We save the results of mapping in the appropriate loci.
+    void SaveMappingInfo(MapCollector* mapcoll);
+    void SaveMappingInfo(std::vector<MapCollector*> mapcolls,
+                         DoubleVec1d logweights);
+    void ReportMappingInfo();
+    StringVec2d CreateAllDataModelReports() const;
+
+    //Write out mapping files
+    void         WriteAnyMapping() const;
+
+    // These are power-user functions, generally used by our lab in writing
+    // simulations; they are not called in normal execution.  They are only
+    // used when JSIM is defined:
+
+    // Write a Fluctuate format file.
+    void         WriteFlucFile(const string& outfilename, bool onlyregion) const;
+    // Break up a multi-population data set into single-population ones
+    StringVec2d  MakeByPopXML(unsigned long nspaces) const;
+    void         WritePopulationXMLFiles() const;
+    void         WriteToXMLFileUsing(std::ofstream& ofile, StringVec1d& region_contents) const;
+    void         WriteToXMLFileUsing(std::ofstream& ofile, StringVec2d& region_contents, bool produceonepop) const;
+
+    // Helpers for XML infile creation.
+    // vector<ARGNode> GetARGNodes() {return m_argnodes;};
+
+  private:
+    std::vector<PopulationXML> MakeVectorOfPopulationXML() const;
+
+};
+
+#endif // REGION_H
+
+//____________________________________________________________________________________
diff --git a/src/datalike/tipdata.cpp b/src/datalike/tipdata.cpp
new file mode 100644
index 0000000..a107455
--- /dev/null
+++ b/src/datalike/tipdata.cpp
@@ -0,0 +1,159 @@
+// $Id: tipdata.cpp,v 1.14 2012/06/30 01:32:41 bobgian Exp $
+
+/*
+  Copyright 2003  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+#include <functional>                   // for Locus::CountNNucleotides()
+
+#include "tipdata.h"
+#include "constants.h"
+#include "registry.h"
+#include "dlcalc.h"
+#include "dlmodel.h"
+#include "mathx.h"                      // for IsEven
+#include "force.h"                      // for TipData::GetBranchPartitions()
+#include "xml_strings.h"                // for TipData::ToXML()
+
+using namespace std;
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+TipData::TipData()
+    : partitions(),
+      individual(FLAGLONG),
+      m_locus(FLAGLONG),
+      m_hap(FLAGLONG),
+      m_nodata(false),
+      label(""),
+      m_popname(""),
+      data()
+{
+    // intentionally blank
+} // TipData::TipData
+
+//------------------------------------------------------------------------------------
+
+void TipData::Clear()
+{
+    partitions.erase(partitions.begin(),partitions.end());
+    individual = FLAGLONG;
+    m_locus = FLAGLONG;
+    m_hap = FLAGLONG;
+    data.clear();
+
+} // Clear
+
+//------------------------------------------------------------------------------------
+
+bool TipData::BelongsTo(long ind) const
+{
+    return (individual == ind);
+} // BelongsTo
+
+//------------------------------------------------------------------------------------
+
+bool TipData::IsInPopulation(const string& popname) const
+{
+    return (m_popname == popname);
+} // IsInPopulation
+
+//------------------------------------------------------------------------------------
+
+string TipData::GetFormattedData(const string& dlm) const
+{
+    string result;
+
+    unsigned long i;
+    for (i = 0; i < data.size(); ++i)
+    {
+        result += data[i] + dlm;
+    }
+
+    return result;
+
+} // GetFormattedData
+
+//------------------------------------------------------------------------------------
+
+long TipData::GetPartition(force_type partname) const
+{
+    string partitionname = partitions.find(partname)->second;
+    return registry.GetDataPack().GetPartitionNumber(partname,partitionname);
+} // TipData::GetPartition
+
+//------------------------------------------------------------------------------------
+
+LongVec1d TipData::GetBranchPartitions() const
+{
+    LongVec1d parts(partitions.size(),FLAGLONG);
+    const ForceSummary& forcesum = registry.GetForceSummary();
+    const DataPack& datapack = registry.GetDataPack();
+    map<force_type,string>::const_iterator pits;
+    for(pits = partitions.begin(); pits != partitions.end(); ++pits)
+    {
+        force_type forcename = pits->first;
+        string partname = pits->second;
+
+        PartitionForce* pforce = dynamic_cast<PartitionForce*>
+            (*forcesum.GetForceByTag(forcename));
+
+        parts[pforce->GetPartIndex()] =
+            datapack.GetPartitionNumber(forcename,partname);
+    }
+
+    return parts;
+
+} // TipData::GetBranchPartitions
+
+//------------------------------------------------------------------------------------
+
+void TipData::RemovePartition(force_type forcename)
+{
+    if (partitions.erase(forcename) == 0)
+        assert(false); // failed to erase something that wasn't there!
+
+} // TipData::RemovePartition
+
+//------------------------------------------------------------------------------------
+
+void TipData::AddPartition(pair<force_type,string> newpart)
+{
+    partitions.insert(newpart);
+} // TipData::AddPartition
+
+//------------------------------------------------------------------------------------
+
+bool TipData::IsInCrossPartition(map<force_type,string> xpart) const
+{
+    return (xpart == partitions);
+} // TipData::IsInCrossPartition
+
+//------------------------------------------------------------------------------------
+
+data_source TipData::GetDataSource() const
+{
+    return m_source;
+}
+
+//------------------------------------------------------------------------------------
+
+void TipData::SetDataSource(const string tag)
+{
+    if (CaselessStrCmp(tag, lamarcstrings::PANEL))
+    {
+        m_source = dsource_panel;
+    }
+    else
+    {
+        m_source = dsource_study;
+    }
+} // TipData::SetDataSource
+
+//____________________________________________________________________________________
diff --git a/src/datalike/tipdata.h b/src/datalike/tipdata.h
new file mode 100644
index 0000000..9bfc9dc
--- /dev/null
+++ b/src/datalike/tipdata.h
@@ -0,0 +1,80 @@
+// $Id: tipdata.h,v 1.9 2011/03/07 06:08:49 bobgian Exp $
+
+/*
+  Copyright 2003  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+/***************************************************************
+ The TipData class holds the linked genetic data for a tip in a tree of
+ haplotypes.  It also stores, for the tip, its partition membership,
+ individual membership, and name.
+
+ In general, TipData is a helper class for locus that stores much of
+ the tip specific information in a manner that is useful for tree generation
+ and rearrangement.  Each locus owns a container of TipData objects, which
+ correspond to the tips in the tree that "belong" to that particular locus.
+
+ TipData written by Jim Sloan, revised by Jon Yamato
+ 2002/01/03 changed Tipdata::data to a vector for generality--Mary Kuhner
+ 2004/09/15 split TipData out into its own file--Mary Kuhner
+****************************************************************/
+
+#ifndef TIPDATA_H
+#define TIPDATA_H
+
+#include <string>
+#include <vector>
+#include <map>
+#include "constants.h"
+#include "types.h"
+#include "vectorx.h"
+#include "datatype.h"    // for DataType_ptr
+#include "defaults.h"    // for force_type
+#include "toxml.h"       // for SampleXML construction in TipData::ToXML()
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+class TipData
+{
+  public:
+    std::map<force_type,std::string> partitions; // forcename & partitionname
+    long individual;
+    long m_locus;
+    long m_hap;
+    bool m_nodata;         // Flag for loci where the data is all in haplotypes
+    std::string label;     // sample name
+    std::string m_popname; // we need to keep this because of migration's special
+    // status in the (current) xml
+    StringVec1d data;      // one string per marker
+    data_source m_source;  // source of data
+
+    data_source GetDataSource() const;
+    void SetDataSource(const string tag);
+
+    TipData();
+    // we accept the default copy-ctor and operator=
+
+    void Clear(); // return tipdata to newly constructed form
+    // used by xmlreader, DataFile::DoSamples()
+
+    bool BelongsTo(long ind) const;
+    bool IsInPopulation(const string& popname) const;
+
+    std::string GetFormattedData(const std::string& dlm) const;
+    long GetPartition(force_type partname) const;
+    LongVec1d GetBranchPartitions() const;
+
+    void RemovePartition(force_type);
+    void AddPartition(std::pair<force_type,std::string> newpart);
+
+    bool IsInCrossPartition(std::map<force_type,std::string> xpart) const;
+};
+
+#endif // TIPDATA_H
+
+//____________________________________________________________________________________
diff --git a/src/force/epoch.cpp b/src/force/epoch.cpp
new file mode 100644
index 0000000..c66c947
--- /dev/null
+++ b/src/force/epoch.cpp
@@ -0,0 +1,26 @@
+// $Id: epoch.cpp,v 1.4 2011/04/23 02:02:48 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+//------------------------------------------------------------------------------------
+
+#include "epoch.h"
+#include "vectorx.h"
+
+//------------------------------------------------------------------------------------
+
+Epoch::Epoch(const LongVec1d& here, const LongVec1d& departing, long arriving)
+    : m_here(here),
+      m_departing(departing),
+      m_arriving(arriving)
+{
+    // deliberately blank
+} // Epoch ctor
+
+//____________________________________________________________________________________
diff --git a/src/force/epoch.h b/src/force/epoch.h
new file mode 100644
index 0000000..1ebffab
--- /dev/null
+++ b/src/force/epoch.h
@@ -0,0 +1,39 @@
+// $Id: epoch.h,v 1.3 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+//------------------------------------------------------------------------------------
+
+#ifndef EPOCH_H
+#define EPOCH_H
+
+#include <vector>
+
+//------------------------------------------------------------------------------------
+
+class Epoch
+{
+  public:
+    Epoch(const std::vector<long>& here, const std::vector<long>& departing, long arriving);
+
+    const std::vector<long>& PopulationsHere() const { return m_here; };
+    const std::vector<long>& Departing() const { return m_departing; };
+    long Arriving() const { return m_arriving; };
+
+  private:
+    std::vector<long> m_here;
+    std::vector<long> m_departing;
+    long m_arriving;
+};
+
+//------------------------------------------------------------------------------------
+
+#endif
+
+//____________________________________________________________________________________
diff --git a/src/force/event.cpp b/src/force/event.cpp
new file mode 100644
index 0000000..c0e5806
--- /dev/null
+++ b/src/force/event.cpp
@@ -0,0 +1,1182 @@
+// $Id: event.cpp,v 1.65 2013/10/25 17:00:52 mkkuhner Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <algorithm>
+#include <cassert>
+#include <functional>                   // for divides<> and less<> used in ThrowIfSubPopSizeTiny
+
+#include "local_build.h"
+#include "dynatracer.h"                 // Defines some debugging macros.
+
+#include "arranger.h"
+#include "branch.h"
+#include "epoch.h"                      // for EpochEvent
+#include "event.h"
+#include "fc_status.h"                  // for DoEvent()
+#include "forcesummary.h"
+#include "mathx.h"
+#include "range.h"                      // For Link-related typedefs and constants.
+#include "registry.h"
+#include "shared_ptr.hpp"
+#include "stringx.h"
+#include "timemanager.h"                // for use in Picktime for coals and disease
+#include "tree.h"
+#include "vectorx.h"                    // for Contains()
+
+#ifdef DMALLOC_FUNC_CHECK
+#include "/usr/local/include/dmalloc.h"
+#endif
+
+using namespace std;
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+typedef boost::shared_ptr<RBranch> RBranch_ptr;
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+Event::Event(const ForceSummary& fs)
+    : m_pOwningArranger(NULL),
+      m_allparams(fs.GetStartParameters())
+{
+    // intentionally blank
+} // Event ctor
+
+//------------------------------------------------------------------------------------
+
+bool Event::Done() const
+{
+    // As far as this event knows, we are Done if there are no more active lineages.
+    if (m_pOwningArranger->ActiveSize() == 0) return true;
+    else return false;
+} // Done
+
+//------------------------------------------------------------------------------------
+
+Event::Event(const Event& src)
+    : m_pOwningArranger(src.m_pOwningArranger),
+      m_maxEvents(src.m_maxEvents),
+      m_pindex(src.m_pindex),
+      m_thetas(src.m_thetas),
+      m_growths(src.m_growths),
+      m_allparams(src.m_allparams)
+{
+    // intentionally blank
+} // Event copy ctor
+
+//------------------------------------------------------------------------------------
+
+void Event::InstallParameters(const ForceParameters& starts)
+{
+    // MCHECK--we don't need these cached separately!
+    m_thetas = starts.GetRegionalThetas();
+    m_growths = starts.GetGrowthRates();
+    m_allparams = starts;
+} // Event::InstallParameters
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+XPartitionEvent::XPartitionEvent(const ForceSummary& fs)
+    : Event(fs),
+      m_nxparts(registry.GetDataPack().GetNCrossPartitions())
+{
+    // deliberately blank
+} // XPartitionEvent ctor
+
+//------------------------------------------------------------------------------------
+
+void XPartitionEvent::ThrowIfSubPopSizeTiny(double eventT) const
+{
+    DoubleVec1d popsizes(m_pOwningArranger->GetTree()->XpartThetasAtT(eventT, m_allparams));
+    transform(popsizes.begin(), popsizes.end(), popsizes.begin(), bind2nd(divides<double>(),defaults::minMuRate));
+    if (find_if(popsizes.begin(), popsizes.end(), bind2nd(less<double>(), 1.0)) != popsizes.end())
+    {
+        string estring("XPartitionEvent::ThrowIfSubPopSizeTiny:  ");
+        estring += "Tried to have a coal event at " + ToString(eventT);
+        tinypopulation_error e(estring);
+        throw e;
+    }
+} // XPartitionEvent::ThrowIfSubPopSizeTiny
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+CoalEvent::CoalEvent(const ForceSummary& fs, bool isactive)
+    : XPartitionEvent(fs),
+      m_chosenxpart(FLAGLONG),
+      m_isactive(isactive)
+{
+    // Set up base class fields deferred to the subclass.
+    m_maxEvents = fs.GetMaxEvents(force_COAL);
+    m_pindex = FLAGLONG;                // We use cross partitions, not partitions.
+} // CoalEvent constructor
+
+//------------------------------------------------------------------------------------
+
+void CoalEvent::InstallParameters(const ForceParameters& starts)
+{
+    Event::InstallParameters(starts);
+} // InstallParameters
+
+//------------------------------------------------------------------------------------
+
+void CoalEvent::DoEvent(double eventT, FC_Status& fcstatus)
+{
+    ThrowIfSubPopSizeTiny(eventT);
+    // Pick two active branches from the population at random.
+    double randomweight = m_pOwningArranger->randomSource->Float();
+    Branch_ptr branch1 = m_pOwningArranger->m_activelist.RemoveBranch(force_COAL, m_chosenxpart, randomweight);
+
+    randomweight = m_pOwningArranger->randomSource->Float();
+    Branch_ptr branch2;
+    if (m_isactive)
+    {
+        branch2 = m_pOwningArranger->m_activelist.RemoveBranch(force_COAL, m_chosenxpart, randomweight);
+    }
+    else
+    {
+        branch2 = m_pOwningArranger->m_inactivelist.RemoveBranch(force_COAL, m_chosenxpart, randomweight);
+    }
+
+    rangeset fcsites;
+
+#if FINAL_COALESCENCE_ON
+    rangeset decrsites(Intersection(branch1->GetLiveSites(), branch2->GetLiveSites()));
+    fcstatus.Decrement_FC_Counts(decrsites);
+    fcsites = fcstatus.Coalesced_Sites();
+#endif
+
+    assert(branch1->HasSamePartitionsAs(branch2));
+
+    if (m_isactive)
+    {
+        Branch_ptr newbranch = m_pOwningArranger->GetTree()->CoalesceActive(eventT, branch1, branch2, fcsites);
+        m_pOwningArranger->m_activelist.Append(newbranch);
+    }
+    else
+    {
+        Branch_ptr newbranch = m_pOwningArranger->GetTree()->CoalesceInactive(eventT, branch1, branch2, fcsites);
+        m_pOwningArranger->m_inactivelist.Collate(newbranch);
+    }
+
+} // CoalEvent::DoEvent
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+ActiveCoal::ActiveCoal(const ForceSummary& fs)
+    : CoalEvent(fs, true),
+      m_invTheta()
+{
+    // intentionally blank
+} // ActiveCoal constructor
+
+//------------------------------------------------------------------------------------
+
+Event* ActiveCoal::Clone() const
+{
+    ActiveCoal* event = new ActiveCoal(*this);
+    return event;
+} // ActiveCoal::Clone
+
+//------------------------------------------------------------------------------------
+
+void ActiveCoal::InstallParameters(const ForceParameters& starts)
+{
+    CoalEvent::InstallParameters(starts);
+
+    m_invTheta = starts.GetRegionalThetas();
+
+    // store 1/Theta
+    transform(m_invTheta.begin(), m_invTheta.end(),
+              m_invTheta.begin(),
+              bind1st(divides<double>(),1.0));
+} // InstallParameters
+
+//------------------------------------------------------------------------------------
+
+double ActiveCoal::PickTime(double starttime, double maxtime)
+{
+    double result = m_pOwningArranger->m_tree->GetTimeManager()->
+        TimeOfActiveCoal(starttime, m_pOwningArranger->m_xactives, m_allparams, m_chosenxpart, maxtime);
+
+    if (result < maxtime) return result;
+    else return FLAGDOUBLE;
+} // ActiveCoal::PickTime
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+InactiveCoal::InactiveCoal(const ForceSummary& fs)
+    : CoalEvent(fs, false),
+      m_inv2Theta()
+{
+    // intentionally blank
+} // InactiveCoal copy constructor
+
+//------------------------------------------------------------------------------------
+
+Event* InactiveCoal::Clone() const
+{
+    InactiveCoal* event = new InactiveCoal(*this);
+    return event;
+} // InactiveCoal::Clone
+
+//------------------------------------------------------------------------------------
+
+void InactiveCoal::InstallParameters(const ForceParameters& starts)
+{
+    CoalEvent::InstallParameters(starts);
+
+    // We store 2/Theta for speed
+    m_inv2Theta = starts.GetRegionalThetas();
+
+    // store 2/Theta
+    transform(m_inv2Theta.begin(), m_inv2Theta.end(),
+              m_inv2Theta.begin(),
+              bind1st(divides<double>(),2.0));
+} // InstallParameters
+
+//------------------------------------------------------------------------------------
+
+double InactiveCoal::PickTime(double starttime, double maxtime)
+{
+    double result =
+        m_pOwningArranger->m_tree->GetTimeManager()->TimeOfInactiveCoal(starttime, m_pOwningArranger->m_xactives,
+                                                                        m_pOwningArranger->m_xinactives,
+                                                                        m_allparams, m_chosenxpart, maxtime);
+    if (result < maxtime) return result;
+    else return FLAGDOUBLE;
+} // InactiveCoal::PickTime
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+PartitionEvent::PartitionEvent(const ForceSummary& fs)
+    : Event(fs)
+{
+    // intentionally blank
+} // PartitionEvent ctor
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+MigEvent::MigEvent(const ForceSummary& fs)
+    : PartitionEvent(fs),
+      m_rescaledMigRates(),
+      m_immigrationRates(),
+      m_frompop(FLAGLONG),
+      m_topop(FLAGLONG)
+{
+    m_nparts = registry.GetDataPack().GetNPartitionsByForceType(force_MIG);
+    m_forcetype = force_MIG;
+    m_maxEvents = fs.GetMaxEvents(force_MIG);
+    m_pindex = fs.GetPartIndex(force_MIG);
+} // MigEvent ctor
+
+//------------------------------------------------------------------------------------
+
+Event* MigEvent::Clone() const
+{
+    MigEvent* event = new MigEvent(*this);
+    return event;
+} // MigEvent::Clone
+
+//------------------------------------------------------------------------------------
+
+void MigEvent::InstallParameters(const ForceParameters& starts)
+{
+    Event::InstallParameters(starts);
+
+    // pre-compute migration rate tables
+    ComputeCumulativeRates(starts);
+} // InstallParameters
+
+//------------------------------------------------------------------------------------
+
+void MigEvent::ComputeCumulativeRates(const ForceParameters& starts)
+{
+    vector<vector<double> > migRates = starts.GetRegional2dRates(force_MIG);
+    vector<double> indRate;
+
+    m_rescaledMigRates.clear();
+    m_immigrationRates.clear();
+
+    double totalMig;
+    long i, j;
+
+    // Computes cumulative migration rates in "m_rescaledMigRates"
+    // and total immigration into a population in "m_immigrationRates".
+
+    for (i = 0; i < m_nparts; ++i)
+    {
+        totalMig = 0.0;
+        indRate.clear();
+        for (j = 0; j < m_nparts; ++j)
+        {
+            totalMig += migRates[i][j];
+            indRate.push_back(totalMig);            // cumulative rate
+        }
+
+        // Normalize cumulative rates to total.
+        if (totalMig > 0)
+        {
+            for (j = 0; j < m_nparts; ++j)
+            {
+                indRate[j] /= totalMig;
+            }
+        }
+
+        m_rescaledMigRates.push_back(indRate);      // cumulative rates
+        m_immigrationRates.push_back(totalMig);     // total immigration
+    }
+} // ComputeCumulativeRates
+
+//------------------------------------------------------------------------------------
+
+double MigEvent::PickTime(double starttime, double maxtime)
+{
+    // Computes timestamp of event by computing "delta t" and adding starttime.
+
+    // Note:  The expectation value of this "delta t" is 1/(M*k), where M and k
+    // are the immigration rate and number of active lineages of the population
+    // which yields the smallest "delta t."
+    map<double, long> times;
+    double newtime;
+    long i;
+
+    for (i = 0; i < m_nparts; ++i)
+    {
+        if (m_pOwningArranger->m_pactives[m_pindex][i] > 0)
+        {
+            newtime = -log (m_pOwningArranger->randomSource->Float()) /
+                (m_immigrationRates[i] * m_pOwningArranger->m_pactives[m_pindex][i]);
+            times.insert(make_pair(newtime, i));
+        }
+    }
+
+    if (times.empty())
+    {
+        // No event is possible.
+        m_frompop = FLAGLONG;
+        m_topop = FLAGLONG;
+        return FLAGDOUBLE;
+    }
+    else
+    {
+        // The first map entry is the smallest, and thus chosen, time.
+        map<double, long>::const_iterator mapit = times.begin();
+        m_frompop = (*mapit).second;
+        double randomweight = m_pOwningArranger->randomSource->Float();
+        for (m_topop = 0; m_rescaledMigRates[m_frompop][m_topop] < randomweight; ++m_topop)
+        {};
+        double result = (*mapit).first + starttime;
+        if (result < maxtime) return result;
+        else return FLAGDOUBLE;
+    }
+
+} // MigEvent::PickTime
+
+//------------------------------------------------------------------------------------
+
+void MigEvent::DoEvent(double eventT, FC_Status&)
+{
+    assert(m_frompop >= 0 && m_topop >= 0);
+
+    // Pick an active branch from the population at random.
+    double randomweight = m_pOwningArranger->randomSource->Float();
+    Branch_ptr active = m_pOwningArranger->m_activelist.RemovePartitionBranch(force_MIG, m_frompop, randomweight);
+
+    Branch_ptr newbranch = m_pOwningArranger->GetTree()->Migrate(eventT, m_topop, m_maxEvents, active);
+    m_pOwningArranger->m_activelist.Append(newbranch);
+
+} // MigEvent::DoEvent
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+DivMigEvent::DivMigEvent(const ForceSummary& fs)
+    : PartitionEvent(fs),
+      m_rescaledMigRates(),
+      m_immigrationRates(),
+      m_frompop(FLAGLONG),
+      m_topop(FLAGLONG)
+{
+    m_nparts = registry.GetDataPack().GetNPartitionsByForceType(force_DIVMIG);
+    m_forcetype = force_DIVMIG;
+    m_maxEvents = fs.GetMaxEvents(force_DIVMIG);
+    m_pindex = fs.GetPartIndex(force_DIVMIG);
+    m_epochptr = fs.GetEpochs();
+    m_currentepoch = 0;
+
+} // DivMigEvent ctor
+
+//------------------------------------------------------------------------------------
+
+Event* DivMigEvent::Clone() const
+{
+    DivMigEvent* event = new DivMigEvent(*this);
+    return event;
+} // DivMigEvent::Clone
+
+//------------------------------------------------------------------------------------
+
+void DivMigEvent::InstallParameters(const ForceParameters& starts)
+{
+    Event::InstallParameters(starts);
+
+    // Pre-compute migration rate tables.
+    ComputeCumulativeRates();
+
+    // Add 0 as time of first epoch.
+    m_epochtimes = m_allparams.GetEpochTimes();
+    m_epochtimes.insert(m_epochtimes.begin(), 0.0);
+
+} // InstallParameters
+
+//------------------------------------------------------------------------------------
+
+void DivMigEvent::ComputeCumulativeRates()
+{
+    vector<vector<double> > migRates = m_allparams.GetRegional2dRates(force_DIVMIG);
+    vector<double> indRate;
+
+    vector<long> pops = (*m_epochptr)[m_currentepoch].PopulationsHere();
+    m_rescaledMigRates.clear();
+    m_immigrationRates.clear();
+
+    double totalMig;
+    long i, j;
+
+    // Computes cumulative migration rates in "m_rescaledMigRates"
+    // and total immigration into a population in "m_immigrationRates".
+    for (i = 0; i < m_nparts; ++i)
+    {
+        totalMig = 0.0;
+        indRate.clear();
+        for (j = 0; j < m_nparts; ++j)
+        {
+            if (Contains(pops,i) && Contains(pops,j)) totalMig += migRates[i][j];
+            indRate.push_back(totalMig);            // cumulative rate
+        }
+
+        // Normalize cumulative rates to total.
+        if (totalMig > 0)
+        {
+            for (j = 0; j < m_nparts; ++j)
+            {
+                indRate[j] /= totalMig;
+            }
+        }
+
+        m_rescaledMigRates.push_back(indRate);      // cumulative rates
+        m_immigrationRates.push_back(totalMig);     // total immigration
+    }
+} // ComputeCumulativeRates
+
+//------------------------------------------------------------------------------------
+
+double DivMigEvent::PickTime(double starttime, double maxtime)
+{
+    // Computes timestamp of event by computing "delta t" and adding starttime.
+
+    // Note:  The expectation value of this "delta t" is 1/(M*k), where M and k are the immigration
+    // rate and number of active lineages of the population which yields the smallest "delta t."
+
+    map<double, long> times;
+    double newtime;
+    long i;
+
+    // Find epoch that contains starttime.
+    unsigned long epochno;
+    for (epochno = 1; epochno < m_epochtimes.size(); ++epochno)
+    {
+        // If this never triggers, we get the right answer by fallthrough.
+        if (starttime < m_epochtimes[epochno]) break;
+    }
+    --epochno;  // We found the epoch past the one we wanted.
+
+    vector<long> activepops = (*m_epochptr)[epochno].PopulationsHere();
+    if (epochno != static_cast<unsigned long>(m_currentepoch))
+    {
+        // We are in a new epoch and need new rates.
+        m_currentepoch = epochno;
+        ComputeCumulativeRates();
+    }
+    for (i = 0; i < m_nparts; ++i)
+    {
+        if (m_pOwningArranger->m_pactives[m_pindex][i] > 0 && m_immigrationRates[i] > 0)
+        {
+            newtime = -log (m_pOwningArranger->randomSource->Float()) /
+                (m_immigrationRates[i] * m_pOwningArranger->m_pactives[m_pindex][i]);
+            times.insert(make_pair(newtime, i));
+        }
+    }
+
+    if (times.empty())
+    {
+        m_frompop = FLAGLONG;           // No event is possible.
+        m_topop = FLAGLONG;
+        return FLAGDOUBLE;
+    }
+    else
+    {
+        // The first map entry is the smallest, and thus chosen, time.
+        map<double, long>::const_iterator mapit = times.begin();
+        m_frompop = (*mapit).second;
+        double randomweight = m_pOwningArranger->randomSource->Float();
+        for (m_topop = 0; m_rescaledMigRates[m_frompop][m_topop] < randomweight; ++m_topop)
+        {};
+        double result = (*mapit).first + starttime;
+        if (result < maxtime) return result;
+        else return FLAGDOUBLE;
+    }
+} // DivMigEvent::PickTime
+
+//------------------------------------------------------------------------------------
+
+void DivMigEvent::DoEvent(double eventT, FC_Status&)
+{
+    assert(m_frompop >= 0 && m_topop >= 0);
+
+    // Pick an active branch from the population at random.
+    double randomweight = m_pOwningArranger->randomSource->Float();
+    Branch_ptr active = m_pOwningArranger->m_activelist.RemovePartitionBranch(force_DIVMIG, m_frompop, randomweight);
+
+    Branch_ptr newbranch = m_pOwningArranger->GetTree()->Migrate(eventT, m_topop, m_maxEvents, active);
+    m_pOwningArranger->m_activelist.Append(newbranch);
+
+} // DivMigEvent::DoEvent
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+DiseaseEvent::DiseaseEvent(const ForceSummary& fs)
+    : PartitionEvent(fs),
+      m_startdis(FLAGLONG),
+      m_enddis(FLAGLONG)
+{
+    m_nparts = registry.GetDataPack().GetNPartitionsByForceType(force_DISEASE);
+    m_forcetype = force_DISEASE;
+    m_maxEvents = fs.GetMaxEvents(force_DISEASE);
+    m_pindex = fs.GetPartIndex(force_DISEASE);
+
+} // DiseaseEvent ctor
+
+//------------------------------------------------------------------------------------
+
+Event* DiseaseEvent::Clone() const
+{
+    DiseaseEvent* event = new DiseaseEvent(*this);
+    return event;
+
+} // DiseaseEvent::Clone
+
+//------------------------------------------------------------------------------------
+
+double DiseaseEvent::PickTime(double starttime, double maxtime)
+{
+    // Sets member variables m_startdis and m_enddis as a side effect!
+    double result = m_pOwningArranger->m_tree->GetTimeManager()->
+        TimeOfTraitMutation(starttime, m_pOwningArranger->m_pactives[m_pindex],
+                            m_allparams, m_startdis, m_enddis, maxtime);
+
+    if (result < maxtime) return result;
+    else return FLAGDOUBLE;
+
+} // DiseaseEvent::PickTime
+
+//------------------------------------------------------------------------------------
+
+void DiseaseEvent::DoEvent(double eventT, FC_Status&)
+{
+    assert(m_startdis >= 0 && m_enddis >= 0);
+
+    // Pick an active branch from the disease category at random.
+    double randomweight = m_pOwningArranger->randomSource->Float();
+    Branch_ptr active = m_pOwningArranger->m_activelist.RemovePartitionBranch(force_DISEASE, m_startdis, randomweight);
+
+    Branch_ptr newbranch = m_pOwningArranger->GetTree()->DiseaseMutate(eventT, m_enddis, m_maxEvents, active);
+    m_pOwningArranger->m_activelist.Append(newbranch);
+
+} // DiseaseEvent::DoEvent
+
+//------------------------------------------------------------------------------------
+
+void DiseaseEvent::InstallParameters(const ForceParameters& starts)
+{
+    Event::InstallParameters(starts);
+} // DiseaseEvent::InstallParameters
+
+//------------------------------------------------------------------------------------
+
+ActiveRec::ActiveRec(const ForceSummary& fs)
+    : Event(fs),
+      m_recrate(defaults::recombinationRate),
+      m_onPartitionForces()
+{
+    m_maxEvents = fs.GetMaxEvents(force_REC);
+    m_pindex = FLAGLONG;                // We don't use partitions...yet!
+
+} // ActiveRec ctor
+
+//------------------------------------------------------------------------------------
+
+Event* ActiveRec::Clone() const
+{
+    ActiveRec* event = new ActiveRec(*this);
+    return event;
+
+} // ActiveRec::Clone
+
+//------------------------------------------------------------------------------------
+
+void ActiveRec::InstallParameters(const ForceParameters& starts)
+{
+    Event::InstallParameters(starts);
+
+    assert(starts.GetRecRates().size() == 1); // Only one recombination rate!
+    m_recrate = starts.GetRecRates()[0];
+
+} // InstallParameters
+
+//------------------------------------------------------------------------------------
+
+double ActiveRec::PickTime(double starttime, double maxtime)
+{
+    // Computes the timestamp of the end of the interval by means of
+    // computing "delta t" and adding it to tstart.
+
+    // Note:  The expectation value of "delta t" is 1/(r*nA), where r and nA are the recombination rate
+    // and active Link recweights for the population which yields the smallest value of "delta t".
+
+    RecTree * tr = dynamic_cast<RecTree *>(m_pOwningArranger->GetTree());
+
+#if 1
+    DebugAssert2(!(tr->GetCurTargetLinkweight() > ZERO && m_pOwningArranger->ActiveSize() == 0),
+                 tr->GetCurTargetLinkweight(),
+                 m_pOwningArranger->ActiveSize());
+#else // Equivalent to DebugAssert2 above, in case it is removed later.
+    assert(!(tr->GetCurTargetLinkweight() > ZERO && m_pOwningArranger->ActiveSize() == 0));
+#endif
+
+    // Active sites without lineages.
+#if 1
+    DebugAssert4(tr->GetCurTargetLinkweight() == m_pOwningArranger->m_activelist.AccumulatedCurTargetLinkweight(),
+                 tr->GetNewTargetLinkweight(),
+                 m_pOwningArranger->m_inactivelist.AccumulatedNewTargetLinkweight(),
+                 tr->GetCurTargetLinkweight(),
+                 m_pOwningArranger->m_activelist.AccumulatedCurTargetLinkweight());
+#else // Equivalent to DebugAssert4 above, in case it is removed later.
+    assert(tr->GetCurTargetLinkweight() == m_pOwningArranger->m_activelist.AccumulatedCurTargetLinkweight());
+#endif
+
+    if (tr->GetCurTargetLinkweight() > ZERO)
+    {
+        double retval = -log(m_pOwningArranger->randomSource->Float()) / (m_recrate * tr->GetCurTargetLinkweight());
+        double result = retval + starttime;
+        if (result < maxtime) return result;
+    }
+
+    return FLAGDOUBLE;
+
+} // ActiveRec::PickTime
+
+//------------------------------------------------------------------------------------
+
+void ActiveRec::DoEvent(double eventT, FC_Status& fcstatus)
+{
+    RecTree * tr = dynamic_cast<RecTree *>(m_pOwningArranger->GetTree());
+
+    // Pick an active branch and a recombination point (Littlelink) at random.
+#ifdef RUN_BIGLINKS
+    // 0.0 < randomweight < <Linkweight_of_all_targetable_links_on_all_branches> (but arbitrarily close to ends).
+    Linkweight randomweight = m_pOwningArranger->randomSource->Float() * tr->GetCurTargetLinkweight();
+#else
+    // 0 <= randomweight < GetCurTargetLinkweight (ie, random proportion of all targetable links over all branches).
+    Linkweight randomweight = m_pOwningArranger->randomSource->Long(tr->GetCurTargetLinkweight());
+#endif
+
+    Branch_ptr active;
+    Branchiter brit;
+    for (brit = m_pOwningArranger->m_activelist.BeginBranch(); ; ++brit)
+    {
+        active = *brit;
+        Linkweight recweight = active->GetRangePtr()->GetCurTargetLinkweight();
+        if (recweight > randomweight)
+        {
+            // Due to decrement below, what remains in "randomweight" is random weight threshold
+            // compared to total weight of all targetable links on the CHOSEN branch only.
+            break;                      // Found it!
+        }
+        randomweight -= recweight;
+    }
+
+    // Get (and test) a pointer to the selected branch's RecRange.
+    RecRange * recrange_ptr(dynamic_cast<RecRange *>(active->GetRangePtr()));
+    assert(recrange_ptr);
+
+    // Find potential recombination points.
+    // Currently targetable Links on the selected branch.
+    linkrangeset curTargetLinks(recrange_ptr->GetCurTargetLinks());
+
+    // Assert that the selected branch does potentially contain a recombination point.
+    assert( ! curTargetLinks.empty() );
+
+#ifdef RUN_BIGLINKS
+    //
+    // Initially just a Littlelink index; later converted to index marking Littlelink in middle of chosen Biglink.
+    long int recpoint(FLAGLONG);        // Initialize to "Recpoint Not Found" indication so we can test later.
+    BiglinkVectormap biglink_vectormap(RecRange::GetBiglinkVectormap());
+    //
+    // Scan across each LINKRANGEPAIR (representing a set of Biglinks) in the LINKRANGESET of targetable Biglinks.
+    linkrangeset::const_iterator rit;
+    for (rit = curTargetLinks.begin(); rit != curTargetLinks.end(); ++rit)
+    {
+        unsigned long int limit(biglink_vectormap.size());
+        unsigned long int lo_idx(rit->first);    // Index of (included) lower Biglink in Map.
+        unsigned long int hi_idx(rit->second);   // Index of (excluded) upper Biglink in Map.
+        //
+#if 1
+        // Since "lo_idx" and "hi_idx" are UNSIGNED, "foo < limit" is also equivalent "foo >= 0".
+        DebugAssert3((lo_idx < limit) && (hi_idx <= limit), lo_idx, hi_idx, limit);
+#else // Equivalent to DebugAssert3 above, in case it is removed later.
+        assert((lo_idx < limit) && (hi_idx <= limit));
+#endif
+        //
+        // Now scan across the Biglinks included in the LINKRANGEPAIR denoted by iterator "rit".
+        // Note that this FOR loop will work for weight summation (finding the recombination point)
+        // whether the targetable links happen to be contiguous or not.  In THIS function, they
+        // happen always to be contiguous.
+        bool done(false);
+        for (unsigned long int idx = lo_idx ; idx < hi_idx ; ++idx)
+        {
+            BiglinkMapitem biglink = biglink_vectormap[idx];
+            long int s1 = biglink.GetBiglinkLowerLittlelink();   // (Included) lower Littlelink index.
+            long int s2 = biglink.GetBiglinkUpperLittlelink();   // (Excluded) upper Littlelink index.
+            if ((s1 + static_cast<long int>(randomweight)) < s2) // Found it!
+            {
+                recpoint = (s1 + s2) / 2; // Compute the midpoint of this Biglink.
+                done = true;
+                break;
+            }
+            // Decrement "randomweight" by weight of current Biglink.  This is because we are scanning
+            // across TARGETABLE Biglinks only, not across ALL Biglinks.  The weight in our comparison
+            // threshold is a random proportion of the total weight of targetable Biglinks only.
+            randomweight -= biglink.GetBiglinkWeight();
+        }
+        if (done) break;
+    }
+    //
+#else // RUN_BIGLINKS
+    //
+    // Littlelink version is simple (compared to equiv code in InactiveRec::DoEvent)
+    // because the targetable links are assumed always to be contiguous.
+    long int recpoint = curTargetLinks.begin()->first;
+    recpoint += randomweight;
+    //
+#endif // RUN_BIGLINKS
+
+    m_pOwningArranger->m_activelist.Remove(brit);
+
+    // Pick a partition at random if we have a local partition force (i.e. Disease) on and active at this region.
+    FPartMap fparts;
+    if (!m_onPartitionForces.empty())
+    {
+        for (vector<force_type>::iterator spart = m_onPartitionForces.begin();
+             spart != m_onPartitionForces.end(); ++spart)
+        {
+            if (IsLocalPartForce(*spart))
+            {
+                DoubleVec1d allSizes = m_pOwningArranger->GetTree()->PartitionThetasAtT(eventT,*spart,m_allparams);
+                long randomPart = ChooseRandomFromWeights(allSizes);
+                fparts.insert(make_pair(*spart, randomPart));
+            }
+        }
+    }
+
+    rangeset fcsites;
+#if FINAL_COALESCENCE_ON
+    fcsites = fcstatus.Coalesced_Sites();
+#endif
+
+    branchpair newbranches = tr->RecombineActive(eventT, m_maxEvents, fparts, active, recpoint, fcsites, true);
+
+    m_pOwningArranger->m_activelist.Append(newbranches.first);
+    m_pOwningArranger->m_activelist.Append(newbranches.second);
+    assert(newbranches.first->PartitionsConsistentWith(active));
+    assert(newbranches.second->PartitionsConsistentWith(active));
+
+} // ActiveRec::DoEvent
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+InactiveRec::InactiveRec(const ForceSummary& fs)
+    : Event(fs),
+      m_recrate(defaults::recombinationRate)
+{
+    m_maxEvents = fs.GetMaxEvents(force_REC);
+    m_pindex = FLAGLONG;                // We don't use partitions...yet!
+
+} // InactiveRec ctor
+
+//------------------------------------------------------------------------------------
+
+Event* InactiveRec::Clone() const
+{
+    InactiveRec* event = new InactiveRec(*this);
+    return event;
+
+} // InactiveRec::Clone
+
+//------------------------------------------------------------------------------------
+
+void InactiveRec::InstallParameters(const ForceParameters& starts)
+{
+    Event::InstallParameters(starts);
+
+    assert(starts.GetRecRates().size() == 1); // Only one recombination rate!
+    m_recrate = starts.GetRecRates()[0];
+
+} // InstallParameters
+
+//------------------------------------------------------------------------------------
+
+double InactiveRec::PickTime(double starttime, double maxtime)
+{
+    // Computes the timestamp of the end of the interval by means of computing "delta t" and adding it to tstart.
+
+    // Note:  The expectation value of "delta t" is 1/(r*nO), where r and nO are the recombination
+    // rate and active Link weight for the population which yields the smallest value of "delta t".
+
+    RecTree* tr = dynamic_cast<RecTree*>(m_pOwningArranger->GetTree());
+    Linkweight recweight = tr->GetNewTargetLinkweight();
+
+#if 1
+    DebugAssert4(tr->GetNewTargetLinkweight() == m_pOwningArranger->m_inactivelist.AccumulatedNewTargetLinkweight(),
+                 tr->GetNewTargetLinkweight(),
+                 m_pOwningArranger->m_inactivelist.AccumulatedNewTargetLinkweight(),
+                 tr->GetCurTargetLinkweight(),
+                 m_pOwningArranger->m_inactivelist.AccumulatedCurTargetLinkweight());
+#else // Equivalent to DebugAssert4 above, in case it is removed later.
+    assert(recweight == m_pOwningArranger->m_inactivelist.AccumulatedNewTargetLinkweight());
+#endif
+
+    if (recweight > ZERO)
+    {
+        double retval = -log(m_pOwningArranger->randomSource->Float()) / (m_recrate * recweight);
+        double result = retval + starttime;
+        if (result < maxtime) return result;
+    }
+
+    return FLAGDOUBLE;
+
+} // InactiveRec::PickTime
+
+//------------------------------------------------------------------------------------
+
+void InactiveRec::DoEvent(double eventT, FC_Status& fcstatus)
+{
+    RecTree* tr = dynamic_cast<RecTree*>(m_pOwningArranger->GetTree());
+
+    // Pick a partition at random if we have a local partition force (i.e. Disease) on and active at this region.
+    FPartMap fparts;
+    if (!m_onPartitionForces.empty())
+    {
+        for (vector<force_type>::iterator spart = m_onPartitionForces.begin();
+             spart != m_onPartitionForces.end(); ++spart)
+        {
+            if (IsLocalPartForce(*spart))
+            {
+                DoubleVec1d allSizes = m_pOwningArranger->GetTree()->PartitionThetasAtT(eventT,*spart,m_allparams);
+                long randomPart = ChooseRandomFromWeights(allSizes);
+                fparts.insert(make_pair(*spart, randomPart));
+            }
+        }
+    }
+
+    // Assert that the set of possible branches does potentially contain a recombination point.
+#if 1
+    DebugAssert4(tr->GetNewTargetLinkweight() == m_pOwningArranger->m_inactivelist.AccumulatedNewTargetLinkweight(),
+                 tr->GetNewTargetLinkweight(),
+                 m_pOwningArranger->m_inactivelist.AccumulatedNewTargetLinkweight(),
+                 tr->GetCurTargetLinkweight(),
+                 m_pOwningArranger->m_inactivelist.AccumulatedCurTargetLinkweight());
+#else // Equivalent to DebugAssert4 above, in case it is removed later.
+    assert(tr->GetNewTargetLinkweight() == m_pOwningArranger->m_inactivelist.AccumulatedNewTargetLinkweight());
+#endif
+
+#ifdef RUN_BIGLINKS
+    //
+    Branch_ptr branch;                  // Pick an open branch at random.
+    // 0.0 < randomweight < <Linkweight_of_all_targetable_links_on_all_branches> (but arbitrarily close to ends).
+    Linkweight randomweight = m_pOwningArranger->randomSource->Float() * tr->GetNewTargetLinkweight();
+    //
+#else // RUN_BIGLINKS
+    //
+    Branch_ptr branch;                  // Pick an open branch at random.
+    // 0 <= randomweight < GetNewTargetLinkweight (ie, random proportion of targetable links over all branches).
+    Linkweight randomweight = m_pOwningArranger->randomSource->Long(tr->GetNewTargetLinkweight());
+    //
+#endif // RUN_BIGLINKS
+
+    Branchiter brit;
+    for (brit = m_pOwningArranger->m_inactivelist.BeginBranch(); ; ++brit)
+    {
+        assert (brit != m_pOwningArranger->m_inactivelist.EndBranch());
+        branch = *brit;
+
+        Linkweight recweight = branch->GetRangePtr()->GetNewTargetLinkweight();
+        if (recweight > randomweight)
+        {
+            // Due to decrement below, what remains in "randomweight" is random weight threshold
+            // compared to total weight of all targetable links on the chosen branch only.
+            break;                      // Found it!
+        }
+        randomweight -= recweight;
+    }
+
+    // At this point "randomweight" contains the accumulated weight (for Littlelinks: link count, counting from zero)
+    // along the selected branch to the desired recombination point on the branch (var "branch") selected just above.
+    // Assert that the selected branch does potentially contain a recombination point.
+    assert(branch != Branch::NONBRANCH);
+
+    m_pOwningArranger->m_inactivelist.Remove(brit);
+
+    // Find the recombination point (Littlelink in the RecRange on the selected branch).
+    long int recpoint(FLAGLONG);        // Initialize to "Recpoint Not Found" indication so we can test later.
+
+    // Newly targetable Links on the selected branch.
+    linkrangeset newTargetLinks = branch->GetRangePtr()->GetNewTargetLinks();
+
+    // Assert that the selected branch does potentially contain a recombination point.
+    assert(! newTargetLinks.empty());
+
+#ifdef RUN_BIGLINKS
+    BiglinkVectormap biglink_vectormap(RecRange::GetBiglinkVectormap());
+#endif
+
+    // Scan across each LINKRANGEPAIR (representing a set of Links) in the LINKRANGESET of targetable Links.  Note
+    // that "Links" means "Biglinks" or "Littlelinks" depending on appropriate model (which one is "#ifdef"ed in).
+    linkrangeset::const_iterator rit;
+    for (rit = newTargetLinks.begin(); rit != newTargetLinks.end(); ++rit)
+    {
+#ifdef RUN_BIGLINKS
+        //
+        unsigned long int limit(biglink_vectormap.size());
+        unsigned long int lo_idx(rit->first);    // Index of (included) lower Biglink in Map.
+        unsigned long int hi_idx(rit->second);   // Index of (excluded) upper Biglink in Map.
+        //
+#if 1
+        // Since "lo_idx" and "hi_idx" are UNSIGNED, "foo < limit" is also equivalent "foo >= 0".
+        DebugAssert3((lo_idx < limit) && (hi_idx <= limit), lo_idx, hi_idx, limit);
+#else // Equivalent to DebugAssert3 above, in case it is removed later.
+        assert((lo_idx < limit) && (hi_idx <= limit));
+#endif
+        //
+        // Now scan across the Biglinks included in the LINKRANGEPAIR denoted by iterator "rit".
+        // Note that this FOR loop will work for weight summation (finding the recombination point)
+        // whether the targetable links happen to be contiguous or not.  In THIS function, they
+        // explicitly might NOT be contiguous.
+        bool done(false);
+        for (unsigned long int idx = lo_idx ; idx < hi_idx ; ++idx)
+        {
+            BiglinkMapitem biglink = biglink_vectormap[idx];
+            long int s1 = biglink.GetBiglinkLowerLittlelink();   // (Included) lower Littlelink index.
+            long int s2 = biglink.GetBiglinkUpperLittlelink();   // (Excluded) upper Littlelink index.
+            if ((s1 + static_cast<long int>(randomweight)) < s2) // Found it!
+            {
+                recpoint = (s1 + s2) / 2; // Compute the midpoint of this Biglink.
+                done = true;
+                break;
+            }
+            // Decrement "randomweight" by weight of current Biglink.  This is because we are scanning
+            // across TARGETABLE Biglinks only, not across ALL Biglinks.  The weight in our comparison
+            // threshold is a random proportion of the total weight of targetable Biglinks only.
+            randomweight -= biglink.GetBiglinkWeight();
+        }
+        if (done) break;
+        //
+#else   // RUN_BIGLINKS
+        //
+        // Littlelink version is more complicated (compared to equiv code in ActiveRec::DoEvent)
+        // because the targetable links CANNOT be assumed always to be contiguous.  Thus we must
+        // use the same "updating RANDOMWEIGHT" hack for both Biglinks and Littlelinks.
+        long int s1 = rit->first;       // (Included) lower Littlelink index.
+        long int s2 = rit->second;      // (Excluded) upper Littlelink index.
+        recpoint = s1 + randomweight;
+        if (recpoint < s2)              // Found it!
+        {
+            break;
+        }
+        // Decrement "randomweight" by weight of current Link.
+        // NOTE: We will have to change this handling of Link weights (doubles) as Littlelink counts
+        // (long ints) when we add capability to represent variable recombination probability with location.
+        randomweight -= (s2 - s1);
+        //
+#endif  //  RUN_BIGLINKS
+    }
+
+    // Assert that the selected branch actually contains a recombination point.
+    assert(recpoint != FLAGLONG);
+
+#if FINAL_COALESCENCE_ON
+    branchpair branches = tr->RecombineInactive(eventT, m_maxEvents, fparts, branch, recpoint, fcstatus.Coalesced_Sites());
+#else
+    rangeset emptyset;
+    branchpair branches = tr->RecombineInactive(eventT, m_maxEvents, fparts, branch, recpoint, emptyset);
+#endif
+
+    m_pOwningArranger->m_inactivelist.Collate(branches.first);
+    m_pOwningArranger->m_activelist.Append(branches.second);
+    assert(branches.first->PartitionsConsistentWith(branch));
+    assert(branches.second->PartitionsConsistentWith(branch));
+
+} // InactiveRec::DoEvent
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+EpochEvent::EpochEvent(const ForceSummary& fs)
+    : Event(fs),
+      m_epochptr(fs.GetEpochs())
+{
+    m_maxEvents = fs.GetMaxEvents(force_DIVERGENCE);
+    m_pindex = FLAGLONG;
+}
+
+//------------------------------------------------------------------------------------
+
+double EpochEvent::PickTime(double starttime, double maxtime)
+{
+    unsigned long epochno;
+
+    // We never pick the first epoch as it starts at time 0.
+    for (epochno = 1; epochno < m_epochtimes.size(); ++epochno)
+    {
+        if (starttime < m_epochtimes[epochno])
+        {
+            if (m_epochtimes[epochno] <= maxtime) return m_epochtimes[epochno];
+            else return FLAGDOUBLE;
+        }
+    }
+    return FLAGDOUBLE;
+
+} // EpochEvent::PickTime
+
+//------------------------------------------------------------------------------------
+
+void EpochEvent::DoEvent(double eventT, FC_Status&)
+{
+    // Find the appropriate Epoch.
+    unsigned long i;
+    long epno = FLAGLONG;
+    for (i = 1; i < m_epochtimes.size(); ++i)
+    {
+        if (m_epochtimes[i] == eventT) epno = i;
+    }
+
+    assert(epno != FLAGLONG);           // Failed to find the epoch?
+    const Epoch& epoch = (*m_epochptr)[epno];
+
+    Branchiter branch;
+    vector<Branchiter> removebranches;
+
+    // Make a collection of branches to be replaced.
+    for (branch = m_pOwningArranger->m_activelist.BeginBranch();
+         branch != m_pOwningArranger->m_activelist.EndBranch();
+         ++branch)
+    {
+        // if branch is in epoch.m_departing
+        if (find(epoch.Departing().begin(), epoch.Departing().end(),
+                 (*branch)->GetPartition(force_DIVMIG)) != epoch.Departing().end())
+        {
+            removebranches.push_back(branch);
+        }
+    }
+
+    // Remove all of them, replacing with EBranches.
+    for (i = 0; i < removebranches.size(); ++i)
+    {
+        // Obtain a Branch_ptr to the offending branch (avoids iterator invalidation!).
+        Branch_ptr badbranch = *(removebranches[i]);
+
+        // Remove it from activelist.
+        m_pOwningArranger->m_activelist.Remove(removebranches[i]);
+
+        // Attach an EBranch to it indicating m_epochptr[epno].arriving
+        // (this call puts the EBranch into the TimeList and updates tree counts).
+        Branch_ptr newbranch = m_pOwningArranger->GetTree()->
+            TransitionEpoch(eventT, epoch.Arriving(), m_maxEvents, badbranch);
+
+        // put EBranch in activelist
+        m_pOwningArranger->m_activelist.Append(newbranch);
+    }
+
+} // EpochEvent::DoEvent
+
+//------------------------------------------------------------------------------------
+
+void EpochEvent::InstallParameters(const ForceParameters& starts)
+{
+    Event::InstallParameters(starts);
+    m_epochtimes = starts.GetEpochTimes();
+    m_epochtimes.insert(m_epochtimes.begin(), 0.0);
+
+} // InstallParameters
+
+//------------------------------------------------------------------------------------
+
+Event* EpochEvent::Clone() const
+{
+    EpochEvent* event = new EpochEvent(*this);
+    return event;
+
+} // EpochEvent::Clone
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+string ToString(const Event &e)
+{
+    switch (e.Type())
+    {
+        case activeCoalEvent:
+            return "activeCoalEvent";
+            break;
+        case inactiveCoalEvent:
+            return "inactiveCoalEvent";
+            break;
+        case migEvent:
+            return "migEvent";
+            break;
+        case diseaseEvent:
+            return "diseaseEvent";
+            break;
+        case activeRecEvent:
+            return "activeRecEvent";
+            break;
+        case inactiveRecEvent:
+            return "inactiveRecEvent";
+            break;
+        case epochEvent:
+            return "epochEvent";
+            break;
+    }
+
+    assert(false);                      // Unhandled case.
+    return "";
+}
+
+//____________________________________________________________________________________
diff --git a/src/force/event.h b/src/force/event.h
new file mode 100644
index 0000000..d216855
--- /dev/null
+++ b/src/force/event.h
@@ -0,0 +1,348 @@
+// $Id: event.h,v 1.37 2012/06/30 01:32:41 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef EVENT_H
+#define EVENT_H
+
+#include <vector>
+#include "vectorx.h"
+#include "defaults.h"
+#include "forceparam.h"
+#include "tree.h" // for branchpair typedef
+#include "epoch.h"
+
+class ForceSummary;
+class ResimArranger;
+class Branch;
+class TimeList;
+class FC_Status;
+class Epoch;
+
+enum event_class {activeCoalEvent, inactiveCoalEvent, migEvent, diseaseEvent,
+                  activeRecEvent, inactiveRecEvent, epochEvent};
+
+/***************************************************************
+  This class contains expertise needed to handle one type of "event" in the rearrangment process.
+
+  Events are things such as a migration, a recombination on an active lineage, a recombination on an
+  inactive lineage, etc.  The PickTime() routine calculates the time of the next such event.  The DoEvent()
+  routine calls Tree code needed to actually implement the event.  The Done() routine indicates whether
+  resimulation needs to continue; only if all events are Done() will resimulation terminate before
+  the end of the tree is reached.  The TieAllowed() routine says whether the event is allowed to occur
+  at the exact end of the time interval under consideration or not.
+
+  The class is polymorphic on event type, which is related to Force, but more than one event type
+  can be associated with the same Force (for example, the two types of recombination).
+
+  Events must be initialized with the parameters of the current chain via a call to
+  InstallParameters() before they are used.
+
+  Written by Mary Kuhner
+  March 2002--revised to enable Growth force
+
+  December 2003--revised to move parameters up to ArrangerVec class to enable BayesArranger.  Mary
+
+  November 2004--revised to split DoEvent into DoEvent (which picks all the random stuff) and FinishEvent
+    (which uses the stuff chosen by DoEvent).  DoEventThatMatches written to parallel the new DoEvent
+    so it could make non-random choices.  --Lucian
+
+  March 2006--removed DoEventThatMatches due to Branch_ptr refactor and subsequent realization that
+    TreeSizeArranger need not erase the tree!
+
+  September 2010--added EpochEvent for divergence, deleted FinishEvent
+
+***************************************************************/
+
+//------------------------------------------------------------------------------------
+
+class Event
+{
+  public:
+    Event(const ForceSummary& fs);
+    virtual        ~Event() {};
+
+    virtual double PickTime(double, double) = 0;
+    virtual void   DoEvent(double eventT, FC_Status& fcstatus) = 0;
+    virtual void   InstallParameters(const ForceParameters& starts);
+    virtual bool   Done() const;
+    virtual Event* Clone() const = 0;
+    virtual event_class Type() const = 0;   // RTII
+    void           SetArranger(ResimArranger& newowner) { m_pOwningArranger = &newowner; };
+    virtual bool   IsInactive() { return false; };
+    virtual bool   IsEpoch() const { return false; };
+    virtual bool   TiesAllowed() const { return false; };
+
+  protected:
+    Event(const Event& src);
+    ResimArranger* m_pOwningArranger;   // points back to owning arranger
+
+    // these are all set by InstallParameters()
+    long           m_maxEvents;         // maximum for this force
+    long           m_pindex;            // index into a standard partition force
+
+    // ordered vector for an event
+    // theta and growth values for use with m_CalcSizeAtTime plus other use (empty is OK)
+    DoubleVec1d    m_thetas, m_growths;
+    ForceParameters m_allparams;        // general parameter interface for use m_CalcSizeAtTime
+
+  private:
+    Event& operator=(const Event& src); // not defined
+    Event();                            // not defined
+}; // Event class
+
+//------------------------------------------------------------------------------------
+
+class XPartitionEvent : public Event
+{
+  public:
+    XPartitionEvent(const ForceSummary& fs);
+    virtual       ~XPartitionEvent() {};
+    // we accept default copy ctor; def ctor and op= disabled in base class
+
+  protected:
+    long m_nxparts;
+
+    virtual void  ThrowIfSubPopSizeTiny(double eventT) const;
+}; // XPartition Event
+
+//------------------------------------------------------------------------------------
+
+class CoalEvent : public XPartitionEvent
+{
+  public:
+    CoalEvent(const ForceSummary& fs, bool isactive);
+    virtual        ~CoalEvent() {};
+    // we accept default copy ctor; def ctor and op= disabled in base class
+
+    virtual void   DoEvent(double eventT, FC_Status& fcstatus);
+    virtual void   InstallParameters(const ForceParameters& starts);
+    virtual bool   IsInactive() { return (!m_isactive); };
+
+  protected:
+    long m_chosenxpart;
+    const bool m_isactive;
+}; // CoalEvent class
+
+//------------------------------------------------------------------------------------
+
+class ActiveCoal : public CoalEvent
+{
+  public:
+    ActiveCoal(const ForceSummary& fs);
+    virtual        ~ActiveCoal() {};
+    // we accept default copy ctor; def ctor and op= disabled in base class
+
+    virtual double PickTime(double starttime, double maxtime);
+    virtual void   InstallParameters(const ForceParameters& starts);
+    virtual Event* Clone() const;
+    virtual event_class Type() const { return activeCoalEvent; };   // RTII
+
+  private:                              // these are set up by InstallParameters
+    vector<double> m_invTheta;          // precomputed 1/Theta array
+}; // ActiveCoal Event
+
+class InactiveCoal : public CoalEvent
+{
+  public:
+    InactiveCoal(const ForceSummary& fs);
+    virtual        ~InactiveCoal() {};
+    // we accept default copy ctor; def ctor and op= disabled in base class
+
+    virtual double PickTime(double starttime, double maxtime);
+    virtual void   InstallParameters(const ForceParameters& starts);
+    virtual Event* Clone() const;
+    // RTII
+    virtual event_class Type() const { return inactiveCoalEvent; };
+
+  private:                              // these are set up by InstallParameters
+    vector<double> m_inv2Theta;         // precomputed 2/Theta array
+}; // InactiveCoal Event
+
+//------------------------------------------------------------------------------------
+
+class PartitionEvent : public Event
+{
+  public:
+    PartitionEvent(const ForceSummary& fs);
+    virtual       ~PartitionEvent() {};
+    // we accept default copy ctor; def ctor and op= disabled in base class
+
+  protected:
+    long m_nparts;
+
+    force_type m_forcetype;
+
+}; // Partition Event
+
+//------------------------------------------------------------------------------------
+
+class MigEvent : public PartitionEvent
+{
+  public:
+    MigEvent(const ForceSummary& fs);
+    virtual        ~MigEvent() {};
+    // we accept default copy ctor; def ctor and op= disabled in base class
+
+    virtual double PickTime(double starttime, double maxtime);
+    virtual void   DoEvent(double eventT, FC_Status& fcstatus);
+    virtual void   InstallParameters(const ForceParameters& starts);
+    virtual Event* Clone() const;
+    virtual event_class Type() const { return migEvent; };   // RTII
+
+  private:                              // these are set up by InstallParameters
+    DoubleVec2d    m_rescaledMigRates;  // rescaled parameters
+    DoubleVec1d    m_immigrationRates;  // combined over source populations
+    long           m_frompop;
+    long           m_topop;
+
+    void           ComputeCumulativeRates(const ForceParameters& starts);
+
+}; // MigEvent
+
+//------------------------------------------------------------------------------------
+
+class DivMigEvent : public PartitionEvent
+{
+  public:
+    DivMigEvent(const ForceSummary& fs);
+    virtual        ~DivMigEvent() {};
+    // we accept default copy ctor; def ctor and op= disabled in base class
+
+    virtual double PickTime(double starttime, double maxtime);
+    virtual void   DoEvent(double eventT, FC_Status& fcstatus);
+    virtual void   InstallParameters(const ForceParameters& starts);
+    virtual Event* Clone() const;
+    virtual event_class Type() const { return migEvent; };   // RTII
+
+  private:                              // these are set up by InstallParameters
+    DoubleVec2d    m_rescaledMigRates;  // rescaled parameters
+    DoubleVec1d    m_immigrationRates;  // combined over source populations
+    long           m_frompop;
+    long           m_topop;
+    const std::vector<Epoch>* m_epochptr; // Epoch structure
+    long           m_currentepoch;        // scratchpad for current epoch
+    std::vector<double> m_epochtimes;     // epoch times, starting from 0
+
+    void           ComputeCumulativeRates();
+
+}; // DivMigEvent
+
+//------------------------------------------------------------------------------------
+
+class DiseaseEvent : public PartitionEvent
+{
+    // All member variables are initialized by InstallParameters().
+
+  protected:
+    long m_startdis;
+    long m_enddis;
+
+  public:
+    DiseaseEvent(const ForceSummary& fs);
+    virtual        ~DiseaseEvent() {};
+    // we accept default copy ctor; def ctor and op= disabled in base class
+
+    virtual Event* Clone() const;
+    virtual double PickTime(double starttime, double maxtime);
+    virtual void   DoEvent(double eventT, FC_Status& fcstatus);
+    virtual void   InstallParameters(const ForceParameters& starts);
+    virtual event_class Type() const { return diseaseEvent; };   // RTTI
+}; // DiseaseEvent
+
+//------------------------------------------------------------------------------------
+
+class ActiveRec : public Event
+{
+  public:
+    ActiveRec(const ForceSummary& fs);
+    virtual        ~ActiveRec() {};
+    // we accept default copy ctor; def ctor and op= disabled in base class
+
+    virtual double PickTime(double starttime, double maxtime);
+    virtual void   DoEvent(double eventT, FC_Status& fcstatus);
+    virtual void   InstallParameters(const ForceParameters& starts);
+    virtual Event* Clone() const;
+    virtual event_class Type() const { return activeRecEvent; };   // RTII
+
+    void   AddSizeForce(force_type fname) { m_onPartitionForces.push_back(fname); };
+
+  private:                              // these are set up by InstallParameters
+    double  m_recrate;                  // recombination rate
+
+    // m_onPartitionForces is filled by Force::ModifyEvents
+    vector<force_type> m_onPartitionForces;
+}; // ActiveRec Event
+
+//------------------------------------------------------------------------------------
+
+class InactiveRec : public Event
+{
+  public:
+    InactiveRec(const ForceSummary& fs);
+    virtual        ~InactiveRec() {};
+
+    virtual double PickTime(double starttime, double maxtime);
+    virtual void   DoEvent(double eventT, FC_Status& fcstatus);
+    virtual void   InstallParameters(const ForceParameters& starts);
+    virtual Event* Clone() const;
+    // RTII
+    virtual event_class Type() const { return inactiveRecEvent; };
+
+    void   AddSizeForce(force_type fname)
+    { m_onPartitionForces.push_back(fname); };
+    virtual bool  IsInactive() { return true; };
+
+    // In the absence of a more correct algorithm for detecting upcoming
+    // InactiveRecombinations, we presume that resimulation must continue
+    // to the bottom of the tree.  NB:  RecTree::Prune and ResimArranger rely
+    // on this continuation of the rearrangement to the bottom.  DenovoArranger
+    // does not, due to lack of any structure below the active face.
+
+    virtual bool   Done() const           { return false; };
+
+  private:                              // these are set up by InstallParameters
+    double  m_recrate;                  // recombination rate
+
+    // m_onPartitionForces is filled by Force::ModifyEvents
+    vector<force_type> m_onPartitionForces;
+}; // InactiveRec Event
+
+//------------------------------------------------------------------------------------
+
+class EpochEvent : public Event
+{
+  public:
+    EpochEvent(const ForceSummary& fs);
+    virtual ~EpochEvent() {};
+
+    virtual double PickTime(double starttime, double maxtime);
+    virtual void DoEvent(double eventT, FC_Status& fcstatus);
+    virtual void InstallParameters(const ForceParameters& starts);
+    virtual Event* Clone() const;
+    // RTII
+    virtual event_class Type() const { return epochEvent; };
+    virtual bool IsEpoch() const { return true; };
+    virtual bool TiesAllowed() const { return true; };
+
+  private:
+    const std::vector<Epoch>* m_epochptr;
+    std::vector<double> m_epochtimes;
+};
+
+//------------------------------------------------------------------------------------
+
+// Helper function.
+std::string ToString(const Event &e);
+
+//------------------------------------------------------------------------------------
+
+#endif // EVENT_H
+
+//____________________________________________________________________________________
diff --git a/src/force/force.cpp b/src/force/force.cpp
new file mode 100644
index 0000000..d79133c
--- /dev/null
+++ b/src/force/force.cpp
@@ -0,0 +1,1940 @@
+// $Id: force.cpp,v 1.79 2013/11/08 21:46:21 mkkuhner Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <algorithm>                    // for stl::remove
+#include <cassert>
+#include <cmath>
+#include <numeric>                      // for stl::accumulate
+
+#include "local_build.h"                // for definition of LAMARC_NEW_FEATURE_RELATIVE_SAMPLING
+
+#include "chainpack.h"
+#include "constants.h"
+#include "datapack.h"
+#include "datatype.h"
+#include "defaults.h"
+#include "event.h"
+#include "force.h"
+#include "forcesummary.h"
+#include "mathx.h"
+#include "plforces.h"
+#include "region.h"
+#include "registry.h"
+#include "runreport.h"
+#include "stringx.h"
+#include "summary.h"
+#include "ui_vars.h"
+#include "ui_strings.h"
+#include "userparam.h"
+#include "xml_strings.h"                // for Force::ToXML() and PartitionForce::GetXMLStatusTag()
+
+#ifdef DMALLOC_FUNC_CHECK
+#include "/usr/local/include/dmalloc.h"
+#endif
+
+using namespace std;
+
+//------------------------------------------------------------------------------------
+
+Force::Force(       force_type          tag,
+                    string              name,
+                    string              fullname,
+                    string              paramname,
+                    string              fullparamname,
+                    vector<Parameter>   parameters,
+                    long                maxevents,
+                    double              defvalue,
+                    double              maxvalue,
+                    double              minvalue,
+                    double              maximizer_maxvalue,
+                    double              maximizer_minvalue,
+                    double              highval,
+                    double              lowval,
+                    double              highmult,
+                    double              lowmult,
+                    vector<ParamGroup>  identgroups,
+                    vector<ParamGroup>  multgroups,
+                    const DoubleVec1d&  paramgroupmults,
+                    const UIVarsPrior&  prior)
+    :   m_tag(tag),
+        m_name(name),
+        m_fullname(fullname),
+        m_paramname(paramname),
+        m_fullparamname(fullparamname),
+        m_parameters(parameters),
+        m_maxevents(maxevents),
+        m_defvalue(defvalue),
+        m_maxvalue(maxvalue),
+        m_minvalue(minvalue),
+        m_maximizer_maxvalue(maximizer_maxvalue),
+        m_maximizer_minvalue(maximizer_minvalue),
+        m_highval(highval),
+        m_lowval(lowval),
+        m_highmult(highmult),
+        m_lowmult(lowmult),
+        m_paramgroupmultipliers(paramgroupmults),
+        m_plforceptr(NULL),
+        m_identgroups(identgroups),
+        m_multgroups(multgroups),
+        m_defaultPrior(prior)
+{
+    // intentionally blank
+}
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d Force::PopParameters(DoubleVec1d& valuelist)
+{
+    // chew off as many values from the front of "valuelist"
+    // as this force has parameters, and return them.
+    // The passed vector is reduced in size.
+    DoubleVec1d tempvec;
+    DoubleVec1d::iterator start = valuelist.begin();
+    DoubleVec1d::iterator end = start + m_parameters.size();
+
+    tempvec.insert(tempvec.end(), start, end);
+    valuelist.erase(start, end);
+    return tempvec;
+
+} // PopParameters
+
+//------------------------------------------------------------------------------------
+
+vector<proftype> Force::GetProfileTypes() const
+{
+    vector<proftype> profiles;
+
+    vector<Parameter>::const_iterator param = m_parameters.begin();
+    for ( ; param != m_parameters.end(); ++param)
+        profiles.push_back(param->GetProfileType());
+
+    return profiles;
+
+} // GetProfileTypes
+
+//------------------------------------------------------------------------------------
+
+vector<ParamStatus> Force::GetParamstatuses() const
+{
+    vector<ParamStatus> pstats;
+
+    vector<Parameter>::const_iterator param = m_parameters.begin();
+    for ( ; param != m_parameters.end(); ++param)
+        pstats.push_back(param->GetStatus());
+
+    return pstats;
+
+} // GetProfileTypes
+
+//------------------------------------------------------------------------------------
+
+StringVec1d Force::MakeStartParamReport() const
+{
+    StringVec1d rpt;
+    verbosity_type verbose = registry.GetUserParameters().GetVerbosity();
+
+    if (verbose == NORMAL || verbose == VERBOSE)
+    {
+        string line = m_fullparamname;
+        MethodTypeVec1d meth = registry.GetForceSummary().GetMethods(GetTag());
+        DoubleVec1d start = registry.GetForceSummary().GetStartParameters().
+            GetGlobalParametersByTag(GetTag());
+        if (m_parameters.size() > 1UL)
+        {
+            line += " (USR = user, WAT = watterson, FST = FST)";
+            rpt.push_back(line);
+            line = "Population";
+            unsigned long param;
+            for(param = 0; param < m_parameters.size(); ++param)
+            {
+                line += " "+indexToKey(param)+" "+ToString(meth[param],false); // false => short version of name
+                line += " "+Pretty(start[param],12);
+                rpt.push_back(line);
+                line.assign(10,' ');
+            }
+        }
+        else
+        {
+            line += " (" + ToString(meth[0],false) + ")"; // ToString(method_type,false) gives short version of name
+            line = MakeJustified(line,-25);
+            line += MakeJustified(Pretty(start[0]),25);
+            rpt.push_back(line);
+        }
+    }
+
+    return(rpt);
+} // Force::MakeStartParamReport
+
+//------------------------------------------------------------------------------------
+
+StringVec1d Force::MakeChainParamReport(const ChainOut& chout) const
+{
+    ForceParameters fparam = chout.GetEstimates();
+    return(Tabulate(fparam.GetGlobalParametersByTag(GetTag()), 8));
+} // Force::MakeChainParamReport
+
+//------------------------------------------------------------------------------------
+
+bool Force::HasNoVaryingParameters() const
+{
+    vector<Parameter>::const_iterator it;
+    for (it = m_parameters.begin(); it != m_parameters.end(); ++it)
+    {
+        if (it->IsVariable()) return false;
+    }
+    return true;
+
+} // Force::HasNoVaryingParameters
+
+//------------------------------------------------------------------------------------
+
+DoubleVec2d Force::GetMles() const
+{
+    assert(m_parameters.size() != 0);   // if there are no Parameters yet it's too early to call this!
+
+    vector<Parameter>::const_iterator it;
+    DoubleVec2d result;
+    DoubleVec1d empty;
+
+    for (it = m_parameters.begin(); it != m_parameters.end(); ++it)
+    {
+        if (it->IsValidParameter()) result.push_back(it->GetRegionMLEs());
+        else result.push_back(empty);
+    }
+    return result;
+
+} // GetMles
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d Force::GetPopmles() const
+{
+    assert(m_parameters.size() != 0);   // if there are no Parameters yet it's too early to call this!
+
+    vector<Parameter>::const_iterator it;
+    DoubleVec1d result;
+
+    for (it = m_parameters.begin(); it != m_parameters.end(); ++it)
+    {
+        if (it->IsValidParameter()) result.push_back(it->GetOverallMLE());
+        else result.push_back(0.0);
+    }
+    return result;
+
+} // GetPopmles
+
+//------------------------------------------------------------------------------------
+
+MethodTypeVec1d Force::GetMethods() const
+{
+    assert(m_parameters.size() != 0);   // if there are no Parameters yet it's too early to call this!
+
+    vector<Parameter>::const_iterator it;
+    MethodTypeVec1d result;
+
+    for (it = m_parameters.begin(); it != m_parameters.end(); ++it)
+    {
+        result.push_back(it->GetMethod());
+    }
+    return result;
+
+} // GetMethods
+
+//------------------------------------------------------------------------------------
+
+StringVec1d Force::GetAllParamNames() const
+{
+    StringVec1d names;
+    vector<Parameter>::const_iterator param;
+    for(param = m_parameters.begin(); param != m_parameters.end(); ++param)
+        if (param->IsValidParameter())
+            names.push_back(param->GetName());
+        else
+        {
+            string emptystring("");
+            names.push_back(emptystring);
+        }
+
+    return names;
+
+} // GetAllParamNames
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d Force::CreateVectorOfParametersWithValue(double val) const
+{
+    DoubleVec1d pvec(m_parameters.size(),val);
+
+    return pvec;
+} // CreateVectorOfParametersWithValue
+
+//------------------------------------------------------------------------------------
+
+proftype Force::SummarizeProfTypes() const
+{
+    vector<Parameter>::const_iterator param;
+    for(param = m_parameters.begin(); param != m_parameters.end(); ++param)
+        if (param->GetProfileType() != profile_NONE)
+            return param->GetProfileType();
+
+    return profile_NONE;
+
+} // SummarizeProfTypes
+
+//------------------------------------------------------------------------------------
+// GetIdenticalGroupedParams is used in the Forcesummary constructor.
+
+ULongVec2d Force::GetIdenticalGroupedParams() const
+{
+    ULongVec2d groups;
+    for (unsigned long gnum = 0; gnum < m_identgroups.size(); ++gnum)
+    {
+        if (m_identgroups[gnum].first.Status() == pstat_identical_head)
+        {
+            ULongVec1d onegroup;
+            for (unsigned long pnum = 0; pnum < m_identgroups[gnum].second.size(); ++pnum)
+            {
+                onegroup.push_back(m_parameters[m_identgroups[gnum].second[pnum]].GetParamVecIndex());
+            }
+            groups.push_back(onegroup);
+        }
+    }
+    return groups;
+}
+
+//------------------------------------------------------------------------------------
+// GetMultiplicativeGroupedParams is used in the Forcesummary constructor.
+
+ULongVec2d Force::GetMultiplicativeGroupedParams() const
+{
+    ULongVec2d groups;
+    for (unsigned long gnum = 0; gnum < m_multgroups.size(); ++gnum)
+    {
+        if (m_multgroups[gnum].first.Status() == pstat_multiplicative_head)
+        {
+            ULongVec1d onegroup;
+            for (unsigned long pnum = 0; pnum < m_multgroups[gnum].second.size(); ++pnum)
+            {
+                onegroup.push_back(m_parameters [m_multgroups[gnum].second[pnum]].GetParamVecIndex());
+            }
+            groups.push_back(onegroup);
+        }
+    }
+    return groups;
+}
+
+//------------------------------------------------------------------------------------
+// GetParamGroupMultipliers is used in the Forcesummary constructor.
+
+DoubleVec1d Force::GetParamGroupMultipliers() const
+{
+    return m_paramgroupmultipliers;
+}
+
+//------------------------------------------------------------------------------------
+
+string Force::MakeOpeningTag(unsigned long nspaces) const
+{
+    return MakeIndent(MakeTag(GetXMLName()),nspaces);
+}
+
+StringVec1d Force::ToXML(unsigned long nspaces) const
+{
+    StringVec1d xmllines;
+    string line = MakeOpeningTag(nspaces);
+    xmllines.push_back(line);
+
+    nspaces += INDENT_DEPTH;
+    string mytag(MakeTag(xmlstr::XML_TAG_START_VALUES));
+    const ForceParameters& fp = registry.GetForceSummary().GetStartParameters();
+    DoubleVec1d params = fp.GetGlobalParametersByTag(GetTag());
+    line = MakeIndent(mytag,nspaces) + ToString(params,7) + MakeCloseTag(mytag);
+    xmllines.push_back(line);
+    mytag = MakeTag(xmlstr::XML_TAG_METHOD);
+    line = MakeIndent(mytag,nspaces) + ToString(GetMethods()) + MakeCloseTag(mytag);
+    xmllines.push_back(line);
+    if (m_tag != force_GROW && m_tag != force_COAL)
+    {
+        mytag = MakeTag(xmlstr::XML_TAG_MAX_EVENTS);
+        line = MakeIndent(mytag,nspaces) + ToString(GetMaxEvents()) + MakeCloseTag(mytag);
+        xmllines.push_back(line);
+    }
+    mytag = MakeTag(xmlstr::XML_TAG_PROFILES);
+    line = MakeIndent(mytag,nspaces) + " " + ToString(GetProfileTypes()) + MakeCloseTag(mytag);
+    xmllines.push_back(line);
+    mytag = MakeTag(xmlstr::XML_TAG_CONSTRAINTS);
+    line = MakeIndent(mytag,nspaces) + " " + ToString(GetParamstatuses()) + MakeCloseTag(mytag);
+    xmllines.push_back(line);
+
+    for (unsigned long gnum = 0; gnum < m_identgroups.size(); ++gnum)
+    {
+        mytag = MakeTagWithConstraint(xmlstr::XML_TAG_GROUP, ToString(m_identgroups[gnum].first.Status()));
+        LongVec1d indexes = m_identgroups[gnum].second;
+        for (unsigned long i=0; i<indexes.size(); i++)
+            indexes[i]++;
+        line = MakeIndent(mytag, nspaces) + ToString(indexes) + " " + MakeCloseTag(xmlstr::XML_TAG_GROUP);
+        xmllines.push_back(line);
+    }
+
+    // MFIX  need a similar code block for multiplicative groups, once the UI format is settled
+
+    //Write out the default prior
+    mytag = MakeTagWithType(xmlstr::XML_TAG_PRIOR, ToString(m_defaultPrior.GetPriorType()));
+    line = MakeIndent(mytag, nspaces);
+    xmllines.push_back(line);
+    nspaces += INDENT_DEPTH;
+
+    mytag = MakeTag(xmlstr::XML_TAG_PARAMINDEX);
+    line = MakeIndent(mytag, nspaces) + " " + ToString(uistr::allStr) + " " + MakeCloseTag(mytag);
+    xmllines.push_back(line);
+    mytag = MakeTag(xmlstr::XML_TAG_PRIORLOWERBOUND);
+    line = MakeIndent(mytag, nspaces) + " " + ToString(m_defaultPrior.GetLowerBound()) + " " + MakeCloseTag(mytag);
+    xmllines.push_back(line);
+    mytag = MakeTag(xmlstr::XML_TAG_PRIORUPPERBOUND);
+    line = MakeIndent(mytag, nspaces) + " " + ToString(m_defaultPrior.GetUpperBound()) + " " + MakeCloseTag(mytag);
+    xmllines.push_back(line);
+#ifdef LAMARC_NEW_FEATURE_RELATIVE_SAMPLING
+    mytag = MakeTag(xmlstr::XML_TAG_RELATIVE_SAMPLE_RATE);
+    line = MakeIndent(mytag, nspaces) + " " + ToString(m_defaultPrior.GetSamplingRate()) + " " + MakeCloseTag(mytag);
+    xmllines.push_back(line);
+#endif
+
+    nspaces -= INDENT_DEPTH;
+    line = MakeIndent(MakeCloseTag(xmlstr::XML_TAG_PRIOR), nspaces);
+    xmllines.push_back(line);
+
+    //Write out any overridden priors
+    for (unsigned long pnum = 0; pnum < m_parameters.size(); ++pnum)
+    {
+        if (m_parameters[pnum].IsValidParameter())
+        {
+            Prior thisPrior = m_parameters[pnum].GetPrior();
+            if (!(thisPrior == m_defaultPrior))
+            {
+                mytag = MakeTagWithType(xmlstr::XML_TAG_PRIOR, ToString(thisPrior.GetPriorType()));
+                line = MakeIndent(mytag, nspaces);
+                xmllines.push_back(line);
+                nspaces += INDENT_DEPTH;
+
+                mytag = MakeTag(xmlstr::XML_TAG_PARAMINDEX);
+                line = MakeIndent(mytag, nspaces) + " " + ToString(pnum + 1) + " " + MakeCloseTag(mytag);
+                xmllines.push_back(line);
+                mytag = MakeTag(xmlstr::XML_TAG_PRIORLOWERBOUND);
+                line = MakeIndent(mytag, nspaces) + " " + ToString(thisPrior.GetLowerBound()) + " " + MakeCloseTag(mytag);
+                xmllines.push_back(line);
+                mytag = MakeTag(xmlstr::XML_TAG_PRIORUPPERBOUND);
+                line = MakeIndent(mytag, nspaces) + " " + ToString(thisPrior.GetUpperBound()) + " " + MakeCloseTag(mytag);
+                xmllines.push_back(line);
+#ifdef LAMARC_NEW_FEATURE_RELATIVE_SAMPLING
+                mytag = MakeTag(xmlstr::XML_TAG_RELATIVE_SAMPLE_RATE);
+                line = MakeIndent(mytag, nspaces) + " " + ToString(thisPrior.GetSamplingRate()) + " " + MakeCloseTag(mytag);
+                xmllines.push_back(line);
+#endif
+
+                nspaces -= INDENT_DEPTH;
+                line = MakeIndent(MakeCloseTag(xmlstr::XML_TAG_PRIOR), nspaces);
+                xmllines.push_back(line);
+
+            }
+        }
+    }
+
+    nspaces -= INDENT_DEPTH;
+    line = MakeIndent(MakeCloseTag(GetXMLName()),nspaces);
+    xmllines.push_back(line);
+    return xmllines;
+} // ToXML
+
+//------------------------------------------------------------------------------------
+
+bool Force::IsAMember(const LongVec1d& m1, const LongVec1d& m2) const
+{
+    return m1 == m2;
+} // IsAMember
+
+//------------------------------------------------------------------------------------
+
+deque<bool> Force::UseCalculatedValues() const
+{
+    deque<bool> howtouse;
+
+    vector<Parameter>::const_iterator it;
+    for (it = m_parameters.begin(); it != m_parameters.end(); ++it)
+    {
+        howtouse.push_back(it->GetMethod() != method_USER && it->GetMethod() != method_PROGRAMDEFAULT);
+    }
+
+    return howtouse;
+} // Force::UseCalculatedValues
+
+//------------------------------------------------------------------------------------
+
+double Force::Truncate(double target) const
+{
+    if (target > m_maxvalue)
+        return m_maxvalue;
+    if (target < m_minvalue)
+        return m_minvalue;
+    return target;
+
+} // Force::Truncate
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d Force::RetrieveGlobalParameters(const ForceParameters& fp) const
+{
+    return fp.GetGlobalParametersByTag(GetTag());
+} // Force::RetrieveParameters
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d Force::RetrieveRegionalParameters(const ForceParameters& fp) const
+{
+    return fp.GetRegionalParametersByTag(GetTag());
+} // Force::RetrieveParameters
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d Force::RetrieveGlobalLogParameters(const ForceParameters& fp) const
+{
+    DoubleVec1d params(fp.GetGlobalParametersByTag(GetTag()));
+    DoubleVec1d result(params.size(), 0.0);
+    LogVec0(params, result);
+    return result;
+} // Force::RetrieveLogParameters
+
+DoubleVec1d Force::RetrieveRegionalLogParameters(const ForceParameters& fp) const
+{
+    DoubleVec1d params(fp.GetRegionalParametersByTag(GetTag()));
+    DoubleVec1d result(params.size(), 0.0);
+    LogVec0(params, result);
+    return result;
+} // Force::RetrieveLogParameters
+
+//------------------------------------------------------------------------------------
+
+unsigned long Force::FindOrdinalPosition(long cannonicalpos) const
+{
+    assert(cannonicalpos >= m_plforceptr->GetStart());
+    return cannonicalpos - m_plforceptr->GetStart();
+} // Force::FindOrdinalPosition
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+CoalForce::CoalForce(vector<Parameter> parameters,
+                     long maxevents,
+                     bool withGrowth,
+                     bool withLogisticSelection,
+                     vector<ParamGroup> identgroups,
+                     vector<ParamGroup> multgroups,
+                     const DoubleVec1d& paramgroupmults,
+                     const UIVarsPrior& prior)
+    : Force(    force_COAL,
+                "Coal",
+                "Coalescence",
+                "Theta",
+                "Theta",
+                parameters,
+                maxevents,
+                defaults::theta,
+                defaults::maxTheta,
+                defaults::minTheta,
+                defaults::maximization_maxTheta,
+                defaults::maximization_minTheta,
+                defaults::highvalTheta,
+                defaults::lowvalTheta,
+                defaults::highmultTheta,
+                defaults::lowmultTheta,
+                identgroups,
+                multgroups,
+                paramgroupmults,
+                prior)
+{
+    m_axisname.push_back(string("Population"));
+    m_axisname.push_back(string("Region"));
+
+    // Setting up the coalescePL object as a pointer that will be used later by the PostLike::GetPLfunction().
+    if(withGrowth)
+    {
+        m_plforceptr = new CoalesceGrowPL(parameters.size());
+    }
+    else if (withLogisticSelection)
+    {
+        m_plforceptr = new CoalesceLogisticSelectionPL(parameters.size());
+    }
+    else
+    {
+        m_plforceptr = new CoalescePL (parameters.size());
+    }
+} // CoalForce constructor
+
+//------------------------------------------------------------------------------------
+
+vector<Event*> CoalForce::MakeEvents(const ForceSummary& fs) const
+{
+    vector<Event*> tempvec;
+    tempvec.push_back(new ActiveCoal(fs));
+    tempvec.push_back(new InactiveCoal(fs));
+    return tempvec;
+} // MakeEvents
+
+//------------------------------------------------------------------------------------
+
+Summary* CoalForce::CreateSummary(IntervalData& interval, bool shortness) const
+{
+    return new CoalSummary(interval, shortness);
+} // CreateSummary
+
+//------------------------------------------------------------------------------------
+
+long CoalForce::ReportDimensionality() const
+{
+    if(m_parameters.size() == 1)
+    {
+        return 1L;
+    }
+    return 2L;
+}
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+RegionGammaForce::RegionGammaForce(vector<Parameter> parameters,
+                                   vector<ParamGroup> identgroups,
+                                   vector<ParamGroup> multgroups,
+                                   const DoubleVec1d& paramgroupmults,
+                                   const UIVarsPrior& prior)
+    : Force(    force_REGION_GAMMA,
+                "RegionGamma",
+                "Background mutation rate gamma-distributed over regions",
+                "Alpha",
+                "Scaled shape parameter (\"alpha\")",
+                parameters,
+                0,
+                defaults::gammaOverRegions,
+                defaults::maxGammaOverRegions,
+                defaults::minGammaOverRegions,
+                defaults::maximization_maxGammaOverRegions,
+                defaults::maximization_minGammaOverRegions,
+                defaults::highvalGammaOverRegions,
+                defaults::lowvalGammaOverRegions,
+                defaults::highmultGammaOverRegions,
+                defaults::lowmultGammaOverRegions,
+                identgroups,
+                multgroups,
+                paramgroupmults,
+                prior)  // ignored
+{
+    // intentionally blank
+}
+
+//------------------------------------------------------------------------------------
+
+vector<Event*> RegionGammaForce::MakeEvents(const ForceSummary& fs) const
+{
+    vector<Event*> tempvec;             // Yes this returns an empty vector.
+    return tempvec;
+
+} // RegionGammaForce::MakeEvents()
+
+//------------------------------------------------------------------------------------
+
+long RegionGammaForce::ReportDimensionality() const
+{
+    return 1L;
+}
+
+//------------------------------------------------------------------------------------
+
+Summary* RegionGammaForce::CreateSummary(IntervalData& interval, bool shortness) const
+{
+    return NULL;                        // Yes, we have no summary of our own.
+
+} // RegionGammaForce::CreateSummary
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+MigForce::MigForce(vector<Parameter> parameters,
+                   long maxevents,
+                   long npop,
+                   vector<ParamGroup> identgroups,
+                   vector<ParamGroup> multgroups,
+                   const DoubleVec1d& paramgroupmults,
+                   const UIVarsPrior& prior)
+    : PartitionForce(
+        force_MIG,
+        "Mig",
+        "Migration",
+        "Mig",
+        "Migration Rate",
+        parameters,
+        maxevents,
+        defaults::migration,
+        defaults::maxMigRate,
+        defaults::minMigRate,
+        defaults::maximization_maxMigRate,
+        defaults::maximization_minMigRate,
+        defaults::highvalMig,
+        defaults::lowvalMig,
+        defaults::highmultMig,
+        defaults::lowmultMig,
+        npop,
+        identgroups,
+        multgroups,
+        paramgroupmults,
+        prior)
+{
+    m_axisname.push_back(string("Population"));
+    m_axisname.push_back(string("Region"));
+
+    // Setting up the coalescePL object as a pointer that will be used later by the
+    // PostLike::GetPLfunction()
+    m_plforceptr = new MigratePL (npop);
+
+} // MigForce constructor
+
+//------------------------------------------------------------------------------------
+
+string MigForce::GetXMLStatusTag() const
+{
+    return string("");
+} // GetXMLStatusTag
+
+//------------------------------------------------------------------------------------
+
+vector<Event*> MigForce::MakeEvents(const ForceSummary& fs) const
+{
+    vector<Event*> tempvec;
+    tempvec.push_back(new MigEvent(fs));
+    return tempvec;
+} // MakeEvents
+
+//------------------------------------------------------------------------------------
+
+Summary* MigForce::CreateSummary(IntervalData& interval, bool shortness) const
+{
+    return new MigSummary(interval, shortness);
+} // CreateSummary
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+DivMigForce::DivMigForce(vector<Parameter> parameters,
+                         long maxevents,
+                         long npop,
+                         vector<ParamGroup> identgroups,
+                         vector<ParamGroup> multgroups,
+                         const DoubleVec1d& paramgroupmults,
+                         const UIVarsPrior& prior)
+    : PartitionForce(
+        force_DIVMIG,
+
+#if 0                                   // RSGNOTE: Was this.
+        "DivMig",
+        "DivergenceMigration",
+        "DivMig",
+        "Divergence Migration Rate",
+#else                                   // RSGNOTE: Changed to this.
+        "Mig",                          // Is this correct?  Seems odd in a force called "DivMigForce".
+        "Migration",                    // These are the arguments to MigForce.
+        "Mig",
+        "Migration Rate",
+#endif
+
+        parameters,
+        maxevents,
+        defaults::migration,
+        defaults::maxMigRate,
+        defaults::minMigRate,
+        defaults::maximization_maxMigRate,
+        defaults::maximization_minMigRate,
+        defaults::highvalMig,
+        defaults::lowvalMig,
+        defaults::highmultMig,
+        defaults::lowmultMig,
+        npop,
+        identgroups,
+        multgroups,
+        paramgroupmults,
+        prior)
+{
+    m_axisname.push_back(string("Population"));
+    m_axisname.push_back(string("Region"));
+
+    // Setting up the coalescePL object as a pointer that will be used later by the
+    // PostLike::GetPLfunction()
+    m_plforceptr = new DivMigPL (npop);
+
+} // DivMigForce constructor
+
+//------------------------------------------------------------------------------------
+
+string DivMigForce::GetXMLStatusTag() const
+{
+    return string("");
+} // GetXMLStatusTag
+
+//------------------------------------------------------------------------------------
+
+vector<Event*> DivMigForce::MakeEvents(const ForceSummary& fs) const
+{
+    vector<Event*> tempvec;
+    tempvec.push_back(new DivMigEvent(fs));
+    return tempvec;
+} // MakeEvents
+
+//------------------------------------------------------------------------------------
+
+Summary* DivMigForce::CreateSummary(IntervalData& interval, bool shortness) const
+{
+    return new DivMigSummary(interval, shortness);
+} // CreateSummary
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+RecForce::RecForce(vector<Parameter> parameters,
+                   long maxevents,
+                   vector<ParamGroup> identgroups,
+                   vector<ParamGroup> multgroups,
+                   const DoubleVec1d& paramgroupmults,
+                   const UIVarsPrior& prior )
+    : Force(    force_REC,
+                "Rec",
+                "Recombination",
+                "Rec",
+                "Recombination Rate",
+                parameters,
+                maxevents,
+                defaults::recombinationRate,
+                defaults::maxRecRate,
+                defaults::minRecRate,
+                defaults::maximization_maxRecRate,
+                defaults::maximization_minRecRate,
+                defaults::highvalRec,
+                defaults::lowvalRec,
+                defaults::highmultRec,
+                defaults::lowmultRec,
+                identgroups,
+                multgroups,
+                paramgroupmults,
+                prior)
+{
+    m_axisname.push_back(string(""));
+    m_axisname.push_back(string("Region"));
+
+    // Setting up the recombinePL object as a pointer that will be used later by the
+    // PostLike::GetPLfunction()
+    m_plforceptr = new RecombinePL (0);
+
+} // RecForce constructor
+
+//------------------------------------------------------------------------------------
+
+vector<Event*> RecForce::MakeEvents(const ForceSummary& fs) const
+{
+    vector<Event*> tempvec;
+    tempvec.push_back(new ActiveRec(fs));
+    tempvec.push_back(new InactiveRec(fs));
+    return tempvec;
+} // MakeEvents
+
+//------------------------------------------------------------------------------------
+
+Summary* RecForce::CreateSummary(IntervalData& interval, bool shortness) const
+{
+    // as far as we know, RecSummaries are always short.
+    // NB: JDEBUG/MDEBUG? Not in the presence of growth/selection plus disease!
+    return new RecSummary(interval, shortness);
+} // CreateSummary
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+PartitionForce::PartitionForce(
+    force_type          tag,
+    string              name,
+    string              fullname,
+    string              paramname,
+    string              fullparamname,
+    vector<Parameter>   parameters,
+    long                maxevents,
+    double              defvalue,
+    double              maxvalue,
+    double              minvalue,
+    double              maximizer_maxvalue,
+    double              maximizer_minvalue,
+    double              highval,
+    double              lowval,
+    double              highmult,
+    double              lowmult,
+    long                npartitions,
+    vector<ParamGroup>  identgroups,
+    vector<ParamGroup>  multgroups,
+    const DoubleVec1d&  paramgroupmults,
+    const UIVarsPrior&  prior)
+    : Force(        tag,
+                    name,
+                    fullname,
+                    paramname,
+                    fullparamname,
+                    parameters,
+                    maxevents,
+                    defvalue,
+                    maxvalue,
+                    minvalue,
+                    maximizer_maxvalue,
+                    maximizer_minvalue,
+                    highval,
+                    lowval,
+                    highmult,
+                    lowmult,
+                    identgroups,
+                    multgroups,
+                    paramgroupmults,
+                    prior),
+      m_npartitions(npartitions)
+{
+    // intentionally blank
+    // as a pure-virtual method class, the ctor just calls through
+} // PartitionForce::ctor
+
+//------------------------------------------------------------------------------------
+
+long PartitionForce::ChoosePartition(long origpart, long chosenpart, bool islow, long midpoint) const
+{
+    return origpart;
+} // PartitionForce::ChoosePartition
+
+//------------------------------------------------------------------------------------
+
+void PartitionForce::ModifyEvents(const ForceSummary& fs, vector<Event*>& events) const
+{
+    vector<Event*>::iterator it;
+    for (it = events.begin(); it != events.end(); ++it)
+    {
+
+        if ((*it)->Type() == activeRecEvent)
+            dynamic_cast<ActiveRec*>(*it)->AddSizeForce(m_tag);
+
+        if ((*it)->Type() == inactiveRecEvent)
+            dynamic_cast<InactiveRec*>(*it)->AddSizeForce(m_tag);
+
+    }
+
+} // PartitionForce::ModifyEvents
+
+//------------------------------------------------------------------------------------
+
+bool PartitionForce::SetPartIndex(long value)
+{
+    m_partindex = value;
+    return true;
+} // PartitionForce::SetPartIndex
+
+//------------------------------------------------------------------------------------
+
+bool PartitionForce::IsAMember(const LongVec1d& m1, const LongVec1d& m2) const
+{
+    return m1[m_partindex] == m2[m_partindex];
+} // PartitionForce::IsAMember
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d PartitionForce::CreateVectorOfParametersWithValue(double val) const
+{
+    DoubleVec1d pvec(m_parameters.size());
+    DoubleVec1d::iterator part(pvec.begin());
+    unsigned long part1, part2, nparts(GetNPartitions());
+
+    assert(m_parameters.size() == nparts*nparts);
+
+    for(part1 = 0; part1 < nparts && part != pvec.end(); ++part1)
+        for(part2 = 0; part2 < nparts; ++part2, ++part)
+            if (part1 == part2) *part = 0.0;
+            else *part = val;
+
+    return pvec;
+
+} // PartitionForce::CreateVectorOfParametersWithValue
+
+//------------------------------------------------------------------------------------
+
+StringVec1d PartitionForce::MakeStartParamReport() const
+{
+    StringVec1d rpt;
+    verbosity_type verbose = registry.GetUserParameters().GetVerbosity();
+
+    if (verbose == NORMAL || verbose == VERBOSE)
+    {
+        string line = m_fullparamname+" (USR = user, FST = FST)";
+        rpt.push_back(line);
+        MethodTypeVec2d meth = registry.GetForceSummary().Get2DMethods(GetTag());
+        DoubleVec2d start = registry.GetForceSummary().GetStartParameters().GetGlobal2dRates(GetTag());
+        long nparts = registry.GetDataPack().GetNPartitionsByForceType(GetTag());
+        long colwidth1 = 9, colwidth2 = 14;
+        long totlength = nparts * colwidth2 + colwidth1;
+        line = MakeCentered("from",totlength);
+        rpt.push_back(line);
+        line = MakeJustified("Pop",colwidth1);
+        long part;
+        for(part = 0; part < nparts; ++part)
+            line += MakeCentered(indexToKey(part),colwidth2);
+        rpt.push_back(line);
+        line = MakeJustified("into 1",colwidth1);
+        for(part = 0; part < nparts; ++part)
+        {
+            long ipart;
+            for(ipart = 0; ipart < nparts; ++ipart)
+            {
+                string name = ToString(meth[part][ipart], false); // false to get short name
+                if (part == ipart) name = "-";
+                if (name == "-") line += MakeCentered(name,colwidth2);
+                else line += MakeCentered(name + " " + Pretty(start[part][ipart], colwidth1), colwidth2);
+            }
+            rpt.push_back(line);
+            line.erase();
+            line = MakeJustified(ToString(part+2),colwidth1);
+        }
+    }
+
+    return(rpt);
+} // PartitionForce::MakeStartParamReport
+
+//------------------------------------------------------------------------------------
+
+StringVec1d PartitionForce::MakeChainParamReport(const ChainOut& chout) const
+{
+    return(Tabulate(chout.GetEstimates().GetGlobal2dRates(GetTag()), 8));
+} // PartitionForce::MakeChainParamReport
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d PartitionForce::SumXPartsToParts(const DoubleVec1d& xparts) const
+{
+    DoubleVec1d parts(m_npartitions, 0.0);
+    LongVec1d nparts(registry.GetDataPack().GetAllNPartitions());
+
+    LongVec1d indicator(registry.GetDataPack().GetNPartitionForces(), 0L);
+
+    DoubleVec1d::const_iterator xpart;
+    for(xpart = xparts.begin(); xpart != xparts.end(); ++xpart)
+    {
+        parts[indicator[m_partindex]] += *xpart;
+
+        long part;
+        for(part = nparts.size() - 1; part >= 0; --part)
+        {
+            ++indicator[part];
+            if (indicator[part] < nparts[part]) break;
+            indicator[part] = 0;
+        }
+    }
+
+    return parts;
+} // PartitionForce::SumXPartsToParts
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d PartitionForce::SumXPartsToParts(const DoubleVec1d& xparts, const DoubleVec1d& growths, double etime) const
+{
+    DoubleVec1d parts(m_npartitions, 0.0);
+    LongVec1d nparts(registry.GetDataPack().GetAllNPartitions());
+
+    LongVec1d indicator(registry.GetDataPack().GetNPartitionForces(), 0L);
+
+    long xpart, nxparts = xparts.size();
+    for(xpart = 0; xpart < nxparts; ++xpart)
+    {
+        parts[indicator[m_partindex]] += xparts[xpart] * exp(-growths[xpart]*etime);
+
+        long part;
+        for(part = nparts.size() - 1; part >= 0; --part)
+        {
+            ++indicator[part];
+            if (indicator[part] < nparts[part]) break;
+            indicator[part] = 0;
+        }
+    }
+
+    return parts;
+} // PartitionForce::SumXPartsToParts
+
+//------------------------------------------------------------------------------------
+
+string PartitionForce::MakeStatusXML(const string& mystatus) const
+{
+    return(GetXMLStatusTag() + mystatus + MakeCloseTag(GetXMLStatusTag()));
+} // PartitionForce::MakeStatusXML
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+LocalPartitionForce::LocalPartitionForce(
+    force_type          tag,
+    string              name,
+    string              fullname,
+    string              paramname,
+    string              fullparamname,
+    vector<Parameter>   parameters,
+    long                maxevents,
+    double              defvalue,
+    double              maxvalue,
+    double              minvalue,
+    double              maximizer_maxvalue,
+    double              maximizer_minvalue,
+    double              highval,
+    double              lowval,
+    double              highmult,
+    double              lowmult,
+    long                npartitions,
+    long                localsite,
+    vector<ParamGroup>  identgroups,
+    vector<ParamGroup>  multgroups,
+    const DoubleVec1d&  paramgroupmults,
+    const UIVarsPrior&  prior)
+    : PartitionForce(
+        tag,
+        name,
+        fullname,
+        paramname,
+        fullparamname,
+        parameters,
+        maxevents,
+        defvalue,
+        maxvalue,
+        minvalue,
+        maximizer_maxvalue,
+        maximizer_minvalue,
+        highval,
+        lowval,
+        highmult,
+        lowmult,
+        npartitions,
+        identgroups,
+        multgroups,
+        paramgroupmults,
+        prior),
+      m_localsite(localsite)
+{
+    // deliberately blank
+} // LocalPartitionForce ctor
+
+//------------------------------------------------------------------------------------
+
+StringVec1d LocalPartitionForce::ToXML(unsigned long nspaces) const
+{
+    StringVec1d xmllines(Force::ToXML(nspaces));
+
+    string mytag(MakeTag(xmlstr::XML_TAG_DISEASELOCATION));
+    nspaces += INDENT_DEPTH;
+    string line = MakeIndent(mytag,nspaces) + ToString(GetLocalSite()) + MakeCloseTag(mytag);
+
+    // insert location information just before close tag
+    StringVec1d::iterator it = xmllines.end();
+    --it;
+    xmllines.insert(it,line);
+
+    return xmllines;
+
+} // LocalPartitionForce::ToXML
+
+//------------------------------------------------------------------------------------
+
+long LocalPartitionForce::ChoosePartition(long origpart, long chosenpart, bool islow, long midpoint) const
+{
+    // "midpoint" is a recombination Littlelink (the one at center of recombination Biglink, if Biglinks enabled).
+    if (!((m_localsite <= midpoint && islow) || (m_localsite > midpoint && !islow)))
+    {
+        // If the disease marker is NOT present on our branch ...
+        return chosenpart;
+    }
+    else
+    {
+        return origpart;
+    }
+
+} // LocalPartitionForce::ChoosePartition
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+GrowthForce::GrowthForce(vector<Parameter> parameters,
+                         long maxevents,
+                         vector<ParamGroup> identgroups,
+                         vector<ParamGroup> multgroups,
+                         const DoubleVec1d& paramgroupmults,
+                         const UIVarsPrior& prior )
+    : Force(    force_GROW,
+                "Grow",
+                "Growth",
+                "Growth",
+                "GrowthRate",
+                parameters,
+                maxevents,
+                defaults::growth,
+                defaults::maxGrowRate,
+                defaults::minGrowRate,
+                defaults::maximization_maxGrowRate,
+                defaults::maximization_minGrowRate,
+                defaults::highvalGrowth,
+                defaults::lowvalGrowth,
+                defaults::highmultGrowth,
+                defaults::lowmultGrowth,
+                identgroups,
+                multgroups,
+                paramgroupmults,
+                prior)
+{
+    m_axisname.push_back(string("Population"));
+    m_axisname.push_back(string("Region"));
+
+    // setting up the growPL object
+    // as a pointer that will be used later by the
+    // PostLike::GetPLfunction()
+    m_plforceptr = new GrowPL (parameters.size());
+
+} // GrowthForce::ctor
+
+//------------------------------------------------------------------------------------
+
+vector<Event*> GrowthForce::MakeEvents(const ForceSummary& fs) const
+{
+    vector<Event*> tempvec;             // Yes this returns an empty vector.
+    return tempvec;
+
+} // GrowthForce::MakeEvents()
+
+//------------------------------------------------------------------------------------
+
+Summary* GrowthForce::CreateSummary(IntervalData& interval, bool shortness) const
+{
+    return NULL;                        // Yes, we have no summary of our own.
+
+} // CreateSummary
+
+//------------------------------------------------------------------------------------
+
+void GrowthForce::ModifyEvents(const ForceSummary& fs, vector<Event*>& events) const
+{
+    // this is now a no-op
+} // GrowthForce::ModifyEvents()
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d GrowthForce::RetrieveGlobalLogParameters(const ForceParameters& fp) const
+{
+    DoubleVec1d params(fp.GetGlobalParametersByTag(GetTag()));
+
+    return params;
+
+} // Force::RetrieveLogParameters
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d GrowthForce::RetrieveRegionalLogParameters(const ForceParameters& fp) const
+{
+    DoubleVec1d params(fp.GetRegionalParametersByTag(GetTag()));
+
+    return params;
+
+} // Force::RetrieveLogParameters
+
+//------------------------------------------------------------------------------------
+
+long GrowthForce::ReportDimensionality() const
+{
+    if(m_parameters.size() == 1)
+    {
+        return 1L;
+    }
+    return 2L;
+}
+
+//------------------------------------------------------------------------------------
+
+string GrowthForce::MakeOpeningTag(unsigned long nspaces) const
+{
+    return MakeIndent(MakeTagWithType(GetXMLName(),ToString(growth_CURVE)), nspaces);
+}
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+LogisticSelectionForce::LogisticSelectionForce(vector<Parameter> parameters,
+                                               long maxevents,
+                                               long paramvecindex,
+                                               vector<ParamGroup> identgroups,
+                                               vector<ParamGroup> multgroups,
+                                               const DoubleVec1d& paramgroupmults,
+                                               const UIVarsPrior& prior )
+    : Force(    force_LOGISTICSELECTION,
+                "LSelect",
+                "Logistic Selection",
+                "LSelectCoeff",
+                "Logistic Selection Coefficient",
+                parameters,
+                maxevents,
+                defaults::logisticSelection,
+                defaults::maxLSelectCoeff,
+                defaults::minLSelectCoeff,
+                defaults::maximization_maxLSelectCoeff,
+                defaults::maximization_minLSelectCoeff,
+                defaults::highvalLSelect,
+                defaults::lowvalLSelect,
+                defaults::highmultLSelect,
+                defaults::lowmultLSelect,
+                identgroups,
+                multgroups,
+                paramgroupmults,
+                prior)
+{
+    m_axisname.push_back(string("Population"));
+    m_axisname.push_back(string("Region"));
+
+    long numPartitionForces = registry.GetDataPack().GetNPartitionForces();
+
+    if (1 != numPartitionForces)
+    {
+        string msg = "LogisticSelectionForce constructor, detected ";
+        msg += ToString(numPartitionForces) + " partition forces; ";
+        msg += "currently, only one such force type is allowed.";
+        throw implementation_error(msg);
+    }
+    long numCrossPartitions = registry.GetDataPack().GetNCrossPartitions();
+    if (2 != numCrossPartitions)
+    {
+        string msg = "LogisticSelectionForce constructor, detected ";
+        msg += ToString(numCrossPartitions) + " populations; ";
+        msg += "must have exactly 2, one for the favored allele, ";
+        msg += "one for the disfavored allele.";
+        throw implementation_error(msg);
+    }
+    // Setting up the LogisticSelectionPL object as a pointer that will be used later by the
+    // PostLike::GetPLfunction()
+    m_plforceptr = new LogisticSelectionPL(paramvecindex);
+
+} // LogisticSelectionForce::ctor
+
+//------------------------------------------------------------------------------------
+
+vector<Event*> LogisticSelectionForce::MakeEvents(const ForceSummary& fs) const
+{
+    vector<Event*> tempvec;             // Yes this returns an empty vector.
+    return tempvec;
+
+} // LogisticSelectionForce::MakeEvents()
+
+//------------------------------------------------------------------------------------
+
+Summary* LogisticSelectionForce::CreateSummary(IntervalData& interval, bool shortness) const
+{
+    return NULL;                        // Yes, we have no summary of our own.
+
+} // CreateSummary
+
+//------------------------------------------------------------------------------------
+
+void LogisticSelectionForce::ModifyEvents(const ForceSummary& fs, vector<Event*>& events) const
+{
+    // this is now a no-op
+} // LogisticSelectionForce::ModifyEvents()
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d LogisticSelectionForce::RetrieveGlobalLogParameters(const ForceParameters& fp) const
+{
+    DoubleVec1d params(fp.GetGlobalParametersByTag(GetTag()));
+
+    return params;
+
+}
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d LogisticSelectionForce::RetrieveRegionalLogParameters(const ForceParameters& fp) const
+{
+    DoubleVec1d params(fp.GetRegionalParametersByTag(GetTag()));
+
+    return params;
+
+}
+
+//------------------------------------------------------------------------------------
+
+long LogisticSelectionForce::ReportDimensionality() const
+{
+    if(m_parameters.size() == 1)
+    {
+        return 1L;
+    }
+    return 2L;
+}
+
+//------------------------------------------------------------------------------------
+
+string LogisticSelectionForce::MakeOpeningTag(unsigned long nspaces) const
+{
+    return MakeIndent(MakeTag(GetXMLName()), nspaces);
+}
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+DiseaseForce::DiseaseForce(
+    vector<Parameter> parameters,
+    long maxevents,
+    bool hasLogisticSelection,
+    long nstates,
+    long localsite,
+    vector<ParamGroup> identgroups,
+    vector<ParamGroup> multgroups,
+    const DoubleVec1d& paramgroupmults,
+    const UIVarsPrior& prior)
+    : LocalPartitionForce(
+        force_DISEASE,
+        "Disease",
+        "Disease Status",
+        "Disease MuRate",
+        "Disease Mutation Rate",
+        parameters,
+        maxevents,
+        defaults::disease,
+        defaults::maxDiseaseRate,
+        defaults::minDiseaseRate,
+        defaults::maximization_maxDiseaseRate,
+        defaults::maximization_minDiseaseRate,
+        defaults::highvalDisease,
+        defaults::lowvalDisease,
+        defaults::highmultDisease,
+        defaults::lowmultDisease,
+        nstates,
+        localsite,
+        identgroups,
+        multgroups,
+        paramgroupmults,
+        prior)
+{
+    m_axisname.push_back(string("Population"));
+    m_axisname.push_back(string("Region"));
+
+    // Setting up the diseasePL object as a pointer that will be used later by the
+    // PostLike::GetPLfunction()
+    if (hasLogisticSelection)
+        m_plforceptr = new DiseaseLogisticSelectionPL(nstates);
+    else
+        m_plforceptr = new DiseasePL(nstates);
+    assert(parameters.size()==static_cast<DoubleVec1d::size_type>(nstates*nstates));
+
+} // DiseaseForce::ctor
+
+//------------------------------------------------------------------------------------
+
+string DiseaseForce::GetXMLStatusTag() const
+{
+    return MakeTag(xmlstr::XML_TAG_DISEASESTATUS);
+} // DiseaseForce::GetXMLStatusTag
+
+//------------------------------------------------------------------------------------
+
+vector<Event*> DiseaseForce::MakeEvents(const ForceSummary& fs) const
+{
+    vector<Event*> tempvec;
+    tempvec.push_back(new DiseaseEvent(fs));
+    return tempvec;
+} // MakeEvents
+
+//------------------------------------------------------------------------------------
+
+Summary* DiseaseForce::CreateSummary(IntervalData& interval, bool shortness) const
+{
+    return new DiseaseSummary(interval, shortness);
+} // DiseaseForce::CreateSummary
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+StickForce::StickForce(
+    force_type         tag,
+    string             name,
+    string             fullname,
+    string             paramname,
+    string             fullparamname,
+    vector<Parameter>  parameters,
+    long               maxevents,
+    double             defvalue,
+    double             maxvalue,
+    double             minvalue,
+    double             maximizer_maxvalue,
+    double             maximizer_minvalue,
+    double             highval,
+    double             lowval,
+    double             highmult,
+    double             lowmult,
+    vector<ParamGroup> identgroups,
+    vector<ParamGroup> multgroups,
+    const DoubleVec1d& paramgroupmults,
+    const UIVarsPrior& prior)
+    : Force(tag,name,fullname,paramname,fullparamname,parameters,
+            maxevents,defvalue,maxvalue,minvalue,maximizer_maxvalue,
+            maximizer_minvalue,highval,lowval,highmult,
+            lowmult,identgroups,multgroups,paramgroupmults,prior),
+      m_percentchange(defaults::perThetaChange)
+{
+    // intentionally blank
+    // as a pure-virtual method class, the ctor just calls through
+} // StickForce::ctor
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+StickExpGrowForce::StickExpGrowForce(
+    vector<Parameter> parameters,
+    long maxevents ,
+    vector<ParamGroup> identgroups,
+    vector<ParamGroup> multgroups,
+    const DoubleVec1d& paramgroupmults,
+    const UIVarsPrior& prior)
+    : StickForce(
+        force_EXPGROWSTICK,
+        "StickExpGrow",
+        "StickExpGrow Status",
+        "StickExpGrow MuRate",
+        "StickExpGrow Mutation Rate",
+        parameters,
+        maxevents,
+        defaults::growth,
+        defaults::maxGrowRate,
+        defaults::minGrowRate,
+        defaults::maximization_maxGrowRate,
+        defaults::maximization_minGrowRate,
+        defaults::highvalGrowth,
+        defaults::lowvalGrowth,
+        defaults::highmultGrowth,
+        defaults::lowmultGrowth,
+        identgroups,
+        multgroups,
+        paramgroupmults,
+        prior),
+      m_negln(log(1.0+m_percentchange)),
+      m_posln(log(1.0-m_percentchange))
+
+{
+    m_axisname.push_back(string("Population"));
+    m_axisname.push_back(string("Region"));
+    // setting up the normal growPL object
+    // as a pointer that will be used later by the
+    // PostLike::GetPLfunction()
+    m_plforceptr = new GrowPL (parameters.size());
+
+} // StickExpGrowForce::ctor
+
+//------------------------------------------------------------------------------------
+
+string StickExpGrowForce::MakeOpeningTag(unsigned long nspaces) const
+{
+    return MakeIndent(MakeTagWithType(GetXMLName(), xmlstr::XML_ATTRVALUE_STICKEXP),nspaces);
+}
+
+//------------------------------------------------------------------------------------
+
+string StickExpGrowForce::GetXMLName() const
+{
+    return xmlstr::XML_TAG_GROWTH;
+}
+
+//------------------------------------------------------------------------------------
+
+vector<Event*> StickExpGrowForce::MakeEvents(const ForceSummary& fs) const
+{
+    vector<Event*> tempvec;             // Yes this returns an empty vector.
+    return tempvec;
+}
+
+//------------------------------------------------------------------------------------
+
+void StickExpGrowForce::ModifyEvents(const ForceSummary& fs, vector<Event*>& events) const
+{
+    // this is now a no-op
+} // StickExpGrowForce::ModifyEvents()
+
+//------------------------------------------------------------------------------------
+
+Summary* StickExpGrowForce::CreateSummary(IntervalData& interval, bool shortness) const
+{
+    return NULL;                        // Yes, we have no summary of our own.
+}
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d StickExpGrowForce::RetrieveGlobalLogParameters
+(const ForceParameters& fp) const
+{
+    DoubleVec1d params(fp.GetGlobalParametersByTag(GetTag()));
+    return params;
+}
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d StickExpGrowForce::RetrieveRegionalLogParameters
+(const ForceParameters& fp) const
+{
+    DoubleVec1d params(fp.GetRegionalParametersByTag(GetTag()));
+    return params;
+}
+
+//------------------------------------------------------------------------------------
+
+long StickExpGrowForce::ReportDimensionality() const
+{
+    if (m_parameters.size() == 1)
+    {
+        return 1L;
+    }
+    return 2L;
+}
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+StickSelectForce::StickSelectForce(
+    vector<Parameter> parameters,
+    long maxevents ,
+    vector<ParamGroup> identgroups,
+    vector<ParamGroup> multgroups,
+    const DoubleVec1d& paramgroupmults,
+    const UIVarsPrior& prior)
+    : StickForce(
+        force_LOGSELECTSTICK,
+        "StickSelect",
+        "StickSelect Status",
+        "StickSelect MuRate",
+        "StickSelect Mutation Rate",
+        parameters,
+        maxevents,
+        defaults::logisticSelection,
+        defaults::maxLSelectCoeff,
+        defaults::minLSelectCoeff,
+        defaults::maximization_maxLSelectCoeff,
+        defaults::maximization_minLSelectCoeff,
+        defaults::highvalLSelect,
+        defaults::lowvalLSelect,
+        defaults::highmultLSelect,
+        defaults::lowmultLSelect,
+        identgroups,
+        multgroups,
+        paramgroupmults,
+        prior)
+
+{
+    m_axisname.push_back(string("Population"));
+    m_axisname.push_back(string("Region"));
+    const DataPack& dp(registry.GetDataPack());
+
+    long numpforces(dp.GetNPartitionForces());
+    if (numpforces != 1)
+    {
+        string msg = "StickLogisticSelectionForce constructor, detected ";
+        msg += ToString(numpforces) + " partition forces; ";
+        msg += "currently, only one such force type is allowed.";
+        throw implementation_error(msg);
+    }
+    long numcpforces(dp.GetNCrossPartitions());
+    if (numcpforces != 2)
+    {
+        string msg = "StickLogisticSelectionForce constructor, detected ";
+        msg += ToString(numcpforces) + " populations; ";
+        msg += "must have exactly 2, one for the favored allele, ";
+        msg += "one for the disfavored allele.";
+        throw implementation_error(msg);
+    }
+
+    // The ForceSummary() does not necessairly exist when we call this ctor in Registry::InstallForcesAllOverThePlace().
+    // Also, we need special case code when putting PLForces into the PostLike objects.  So, we will defer construction
+    // of the PLForces object until then.
+    // m_plforceptr = new StickSelectPL (registry.GetForceSummary());
+
+} // StickSelectForce::ctor
+
+//------------------------------------------------------------------------------------
+
+string StickSelectForce::GetXMLName() const
+{
+    return xmlstr::XML_TAG_STOCHASTICSELECTION;
+}
+
+//------------------------------------------------------------------------------------
+
+vector<Event*> StickSelectForce::MakeEvents(const ForceSummary& fs) const
+{
+    vector<Event*> tempvec;             // Yes this returns an empty vector.
+    return tempvec;
+}
+
+//------------------------------------------------------------------------------------
+
+string StickSelectForce::MakeOpeningTag(unsigned long nspaces) const
+{
+    return MakeIndent(MakeTagWithType(GetXMLName(), xmlstr::XML_ATTRVALUE_STICK),nspaces);
+}
+
+//------------------------------------------------------------------------------------
+
+Summary* StickSelectForce::CreateSummary(IntervalData& interval, bool shortness) const
+{
+    return NULL;                        // Yes, we have no summary of our own.
+}
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d StickSelectForce::RetrieveGlobalLogParameters(const ForceParameters& fp) const
+{
+    DoubleVec1d params(fp.GetRegionalParametersByTag(GetTag()));
+    return params;
+}
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d StickSelectForce::RetrieveRegionalLogParameters(const ForceParameters& fp) const
+{
+    DoubleVec1d params(fp.GetRegionalParametersByTag(GetTag()));
+    return params;
+}
+
+//------------------------------------------------------------------------------------
+
+long StickSelectForce::ReportDimensionality() const
+{
+    if(m_parameters.size() == 1) return 1L;
+    else return 2L;
+}
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+DivForce::DivForce(vector<Parameter> parameters, long int maxEvents,
+                   vector<ParamGroup> identgroups,
+                   vector<ParamGroup> multgroups,
+                   const DoubleVec1d& paramgroupmults,
+                   const UIVarsPrior& prior,
+                   const vector<vector<string> >& newpops,
+                   const vector<string>& ancestors,
+                   const DataPack& dpack)
+    : Force(force_DIVERGENCE,
+            "Div",
+            "Divergence",
+            "Epoch",
+            "EpochTime",
+            parameters,
+            defaults::epochEvents,
+            defaults::epochtime,
+            defaults::maxEpoch,
+            defaults::minEpoch,
+            defaults::maximization_maxEpoch,
+            defaults::maximization_minEpoch,
+            defaults::highvalEpoch,
+            defaults::lowvalEpoch,
+            defaults::highmultEpoch,
+            defaults::lowmultEpoch,
+            identgroups,
+            multgroups,
+            paramgroupmults,
+            prior)
+{
+    // DO EPOCHS HERE
+
+    // Create initial list of populations.
+    LongVec1d pops_here;
+    for (long pop = 0; pop < dpack.GetNumTipPopulations(); ++pop)
+        pops_here.push_back(pop);
+
+    // The first Epoch has no child or ancestor info, just pops_here.
+    vector<long> fakeancestors;
+    Epoch firstepoch(pops_here, fakeancestors, FLAGLONG);
+    m_epochs.push_back(firstepoch);
+
+    // The remaining Epochs
+    for (StringVec2d::size_type ep = 0; ep < newpops.size(); ++ep)
+    {
+        LongVec1d children;
+        for (StringVec1d::size_type pop = 0; pop < newpops[ep].size(); ++pop)
+        {
+            children.push_back(dpack.GetPartitionNumber(force_DIVMIG,newpops[ep][pop]));
+        }
+        long ancestornum(dpack.GetPartitionNumber(force_DIVMIG,ancestors[ep]));
+        // fix up the current populations list for the boundary changes
+        for (LongVec1d::const_iterator searchpop = children.begin();
+             searchpop != children.end(); ++searchpop)
+        {
+            pops_here.erase(remove(pops_here.begin(),pops_here.end(),*searchpop), pops_here.end());
+        }
+        pops_here.push_back(ancestornum);
+
+        // Make the epoch.
+        Epoch epoch(pops_here,children,ancestornum);
+        m_epochs.push_back(epoch);
+    }
+
+    m_axisname.push_back(string("Population"));
+    m_axisname.push_back(string("Region"));
+
+    // NB:  Even though these parameters don't factor into P(G|param), we make
+    // a PLForce to keep the maximizer happy--it expects all parameters to
+    // correspond to a PLForce.
+    m_plforceptr = new DivPL(m_epochs.size() - 1);
+}
+
+//------------------------------------------------------------------------------------
+
+long DivForce::ReportDimensionality() const
+{
+    if(m_parameters.size() == 1)
+    {
+        return 1L;
+    }
+    return 2L;
+}
+
+//------------------------------------------------------------------------------------
+
+Summary* DivForce::CreateSummary(IntervalData& interval, bool shortness) const
+{
+    return new EpochSummary(interval, shortness); // Apparently we do have a Summary!.
+    // It must be used to mark boundary lines in the tree....
+} // CreateSummary
+
+//------------------------------------------------------------------------------------
+
+vector<Event*> DivForce::MakeEvents(const ForceSummary& fs) const
+{
+    vector<Event*> myevents;
+    myevents.push_back(new EpochEvent(fs));
+    return myevents;
+} // MakeEvents
+
+//------------------------------------------------------------------------------------
+
+StringVec1d DivForce::ToXML(unsigned long nspaces) const
+{
+    StringVec1d xmllines(Force::ToXML(nspaces));
+    StringVec1d epochblock;
+
+    StringVec1d popsList = registry.GetDataPack().GetAllPartitionNames(force_DIVMIG);
+    string poptree(MakeTag(xmlstr::XML_TAG_POPTREE));
+
+    nspaces += INDENT_DEPTH;
+    // population-tree
+    string line = MakeIndent(poptree, nspaces);
+    epochblock.push_back(line);
+    unsigned long epoch;
+    // we do not write XML for the first epoch
+    for (epoch = 1; epoch < m_epochs.size(); ++epoch)
+    {
+        // epoch-boundary
+        string boundary(MakeTag(xmlstr::XML_TAG_EPOCH_BOUNDARY));
+        nspaces += INDENT_DEPTH;
+        line = MakeIndent(boundary, nspaces);
+        epochblock.push_back(line);
+
+        // new-populations
+        string newpop(MakeTag(xmlstr::XML_TAG_NEWPOP));
+        nspaces += INDENT_DEPTH;
+        line = MakeIndent(newpop,nspaces);
+
+        unsigned long pop;
+        for (pop = 0; pop < m_epochs[epoch].Departing().size(); ++pop)
+        {
+            long epochIndex = m_epochs[epoch].Departing()[pop];
+            string popName = popsList[epochIndex];
+            line += " " + popName;
+        }
+
+        line += " " + MakeCloseTag(newpop);
+        epochblock.push_back(line);
+
+        // ancestor
+        string ancestor(MakeTag(xmlstr::XML_TAG_ANCESTOR));
+        long ancestorIndex = m_epochs[epoch].Arriving();
+        line = MakeIndent(ancestor,nspaces) + " " + popsList[ancestorIndex] + " " + MakeCloseTag(ancestor);
+        epochblock.push_back(line);
+        nspaces -= INDENT_DEPTH;
+
+        // end epoch-boundary
+        line = MakeIndent(MakeCloseTag(boundary),nspaces);
+        epochblock.push_back(line);
+    }
+
+    //  end population-tree
+    nspaces -= INDENT_DEPTH;
+    line = MakeIndent(MakeCloseTag(poptree),nspaces);
+    epochblock.push_back(line);
+
+    // add to existing array just before end
+    // very inefficient but I doubt this matters
+    unsigned long i;
+    for (i = 0; i < epochblock.size(); ++i)
+    {
+        StringVec1d::iterator it = xmllines.end();
+        --it;
+        xmllines.insert(it,epochblock[i]);
+    }
+
+    return xmllines;
+} // DivForce::ToXML
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+StringVec1d Tabulate(double params, long width=7)
+{
+    StringVec1d str;
+    str.push_back(Pretty(params,width));
+    return(str);
+} // Tabulate(double)
+
+//------------------------------------------------------------------------------------
+
+StringVec1d Tabulate(const DoubleVec1d& params, long width=7)
+{
+    StringVec1d str;
+    long i;
+    for (i = 0; i < (long)params.size(); ++i) str.push_back(Pretty(params[i],width));
+    return(str);
+} // Tabulate(DoubleVec1d)
+
+//------------------------------------------------------------------------------------
+
+StringVec1d Tabulate(const DoubleVec2d& params, long width=7)
+{
+    StringVec1d str;
+    long i;
+    long j;
+    for (i = 0; i < static_cast<long>(params.size()); ++i)
+    {
+        string tempstr = "";
+        for (j = 0; j < static_cast<long>(params[0].size()); ++j)
+        {
+            if (j != 0) tempstr += " ";
+            if (j != i) tempstr += Pretty(params[i][j],width);
+            else
+            {
+                long k;
+                for (k = 0; k < width; ++k) tempstr += "-";
+            }
+            tempstr += " ";
+        }
+        str.push_back(tempstr);
+    }
+    return(str);
+} // Tabulate(DoubleVec2d)
+
+//____________________________________________________________________________________
diff --git a/src/force/force.h b/src/force/force.h
new file mode 100644
index 0000000..a0892e6
--- /dev/null
+++ b/src/force/force.h
@@ -0,0 +1,778 @@
+// $Id: force.h,v 1.9 2002/07/30 01:44:41 mkkuhner Exp
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef FORCE_H
+#define FORCE_H
+
+#include <deque>
+#include <string>
+#include <utility>                      // std::pair
+#include <vector>
+
+#include "constants.h"
+#include "parameter.h"                  // for Parameter member object
+#include "plforces.h"                   // for PLForces classes
+#include "stringx.h"
+#include "types.h"
+#include "vectorx.h"
+#include "xml_strings.h"                // for string constants in GetXMLName()
+#include "epoch.h"
+#include "paramstat.h"
+
+// in .cpp
+// #include "forcesummary.h"   for forcesummary getter functions
+// #include "event.h"          to set up Event vectors in arrangers
+
+//------------------------------------------------------------------------------------
+
+class Event;
+class Registry;
+class ChainPack;
+class ChainOut;
+class DataPack;
+class ForceSummary;
+class ForceParameters;
+class Summary;
+class IntervalData;
+class Locus;
+class Region;
+class UIVarsPrior;
+
+/******************************************************************
+ The Force class defines a polymorphic object which embodies
+ information about one of the evolutionary forces active in the
+ program.  It has four main functions:
+
+ (1)  To indicate, by its presence in the ForceSummary, that the
+ force is active.
+
+ (2)  To manage the parameters associated with its force (for
+ example, the theta values associated with Coalescence).  It maintains
+ a vector of Parameter objects for this purpose.
+
+ (3)  To provide information to the rest of the program about
+ its force (quickestimate, name, dimensionality, table headers, etc.)
+
+ (4)  To act as a factory for the Event objects (representing concepts
+ such as "a coalescence of two active lineages") used by the Arrangers,
+ and for the Summary objects (representing presence of a force) used
+ by the TreeSummary.
+
+ A fifth main function, handling force-specific math, may be
+ added when we integrate the posterior-likelihood module.
+
+ The only place Force objects normally exist is inside of the
+ (singular) ForceSummary object, which owns them.
+
+ Forces are only created after both data reading and user input
+ (menu) are completed.
+
+ Forces are by design, neither copyable nor assignable.
+
+ Written by Mary Kuhner and Jon Yamato
+ * 04/04/02 Peter Beerli added PLforces objects
+ * 11/14/02 Peter Beerli made Profile-modifier force specific
+ * 6/9/10 Mary Kuhner and Jon Yamato added DivForce and DivMigForce
+ ******************************************************************/
+
+class Force
+{
+  private:
+    // these are undefined because we should not be copying or
+    // assigning Forces. The registry creates one force of each
+    // used type, and puts them in the ForceSummary object.
+    Force();                                // undefined
+    Force(const Force& src);                // undefined
+    Force&     operator=(const Force& src); // undefined
+
+  protected:
+
+    force_type  m_tag;                  // identifying tag of force
+    string      m_name;                 // concise name of the force
+    string      m_fullname;             // full name of the force
+    string      m_paramname;            // concise name of the parameter type
+    string      m_fullparamname;        // full name of the parameter type
+
+    std::vector<Parameter> m_parameters; // all parameters related to this force
+
+    long m_maxevents;                   // maximum "events" of this force allowed per tree
+    double m_defvalue;                  // default value of parameters for this force
+    double m_maxvalue;                  // maximum allowed value for each initial param estimate
+    double m_minvalue;                  // minimum allowed value for each initial param estimate
+    double m_maximizer_maxvalue;        // maximum value allowed during maximization
+    double m_maximizer_minvalue;        // minimum value allowed during maximiztion
+
+    double m_highval;                   // a reasonable upper guess for the force.
+    double m_lowval;                    // a reasonable lower guess for the force.
+    double m_highmult;                  // The stepsize up for high guesses.
+    double m_lowmult;                   // The stepsize down for low guesses.
+
+    StringVec1d m_axisname;             // name of each table axis
+
+    DoubleVec1d m_paramgroupmultipliers; // dim: by param group, multiplier
+    // is ratio first/head param over other param
+    // NB Assumes never more than 2 parameters in a multiplicative group!
+    // if we ever need more than 2, need to redimension
+
+    // postlike forces related objects that contain functions that are called by
+    // point() and wait() in the post-likelihood calculation
+    PLForces * m_plforceptr;            // holds force object; OWNING POINTER
+
+    // When parameters are constrained to be equal to one another
+    // that information lives here:
+    std::vector<ParamGroup> m_identgroups;
+
+    // When parameters are constrained to be multiples of one another
+    // that information lives here:
+    std::vector<ParamGroup> m_multgroups;
+
+    //This variable is solely for the XML writer.  Hmmm.
+    Prior m_defaultPrior;
+
+    // Utility function used by InsertParameters
+    DoubleVec1d PopParameters(DoubleVec1d& parameters);
+
+    // used by Force::ToXML()
+    virtual std::string MakeOpeningTag(unsigned long nspaces) const;
+    std::vector<proftype> GetProfileTypes() const;
+    std::vector<ParamStatus> GetParamstatuses() const;
+
+  public:
+
+    Force(   force_type          tag,
+             string              name,
+             string              fullname,
+             string              paramname,
+             string              fullparamname,
+             std::vector<Parameter>   parameters,
+             long int            maxevents,
+             double              defvalue,
+             double              maxvalue,
+             double              minvalue,
+             double              maximizer_maxvalue,
+             double              maximizer_minvalue,
+             double              highval,
+             double              lowval,
+             double              highmult,
+             double              lowmult,
+             std::vector<ParamGroup> identgroups,
+             std::vector<ParamGroup> multgroups,
+             const DoubleVec1d&      paramgroupmults,
+             const UIVarsPrior&      prior);
+
+    virtual ~Force() { delete m_plforceptr; };
+
+    // Report writing functions -- these could be implemented at base level
+    // except for MigrationParameter
+    virtual StringVec1d    MakeStartParamReport() const;
+    virtual StringVec1d    MakeChainParamReport(const ChainOut& chout) const;
+    virtual bool           HasNoVaryingParameters() const;
+
+    // RTTI for use by ForceSummary::GetPartitionForces()
+    virtual bool           IsPartitionForce() const { return false; };
+    // RTTI for use by ForceSummary::GetSelectedSites()
+    virtual bool           IsLocalPartitionForce() const { return false; };
+    virtual bool           IsSexualForce() const    { return false; };
+    // RTTI for use by ForceSummary::UsingStick()
+    virtual bool           IsStickForce() const     { return false; };
+
+    // Getters
+    force_type     GetTag()           const { return m_tag; };
+    string         GetFullName()      const { return m_fullname; };
+    string         GetFullparamname() const { return m_fullparamname; };
+    string         GetShortparamname()const { return m_paramname; };
+    virtual string         GetClassName()     const { return "Class"; };
+    virtual string         GetXMLName()       const = 0;
+    virtual long int       ReportDimensionality() const = 0;
+    StringVec1d    GetAxisname()      const { return m_axisname; };
+    DoubleVec2d    GetMles()          const;
+    DoubleVec1d    GetPopmles()       const;
+
+    long int       GetMaxEvents()     const { return m_maxevents; };
+    double         GetDefaultValue()  const { return m_defvalue; };
+    MethodTypeVec1d GetMethods()      const;
+    long int       GetNParams()       const { return m_parameters.size(); };
+
+    StringVec1d    GetAllParamNames() const;
+    double GetHighVal() const  { return m_highval; };
+    double GetLowVal() const   { return m_lowval; };
+    double GetHighMult() const { return m_highmult; };
+    double GetLowMult() const  { return m_lowmult; };
+    double GetMaximizerMaxVal() const { return m_maximizer_maxvalue; };
+    double GetMaximizerMinVal() const { return m_maximizer_minvalue; };
+
+    std::vector<Parameter>& GetParameters()  { return m_parameters; };
+    const std::vector<Parameter>& GetParameters() const { return m_parameters; };
+
+    // used by ForceParameters::SetAllTo(), polymorphic in PartitionForce
+    virtual DoubleVec1d CreateVectorOfParametersWithValue(double val) const;
+
+    // returns pointer to PLForces object [that holds the force specific
+    // wait() and point() functions
+    PLForces * GetPLForceFunction() { return m_plforceptr; };
+
+    // SummarizeProfTypes--used by ReportPage::SetupColhdr()
+    proftype       SummarizeProfTypes() const;
+
+    // Handle grouping of parameters constrained to be related to one another
+    ULongVec2d     GetIdenticalGroupedParams() const;
+    ULongVec2d     GetMultiplicativeGroupedParams() const;
+    DoubleVec1d    GetParamGroupMultipliers() const;
+
+    virtual StringVec1d    ToXML(unsigned long int nspaces) const;
+
+    // Setters
+    // No setters for intrinsic class qualities like name, as these
+    // are built into the subclasses.
+    // SetParameters is a helper function for
+    // ForceSummary::SetAllParameters() used in ParamVector dtor.
+    void        SetParameters(const std::vector<Parameter>& src)
+    { m_parameters = src; };
+    // true if value is set, false otherwise
+    virtual bool        SetPartIndex(long int) { return false; };
+
+    // used by Branch::IsAMember()
+    virtual bool        IsAMember(const LongVec1d& m1, const LongVec1d& m2) const;
+
+    // used by ForceSummary::SummarizeData()
+    std::deque<bool> UseCalculatedValues() const;
+    virtual double      Truncate(double target) const;
+
+    // generally interface into a ForceParameters object
+    virtual DoubleVec1d RetrieveGlobalParameters(const ForceParameters& fp) const;
+    virtual DoubleVec1d RetrieveRegionalParameters(const ForceParameters& fp) const;
+    virtual DoubleVec1d RetrieveGlobalLogParameters(const ForceParameters& fp) const;
+    virtual DoubleVec1d RetrieveRegionalLogParameters(const ForceParameters& fp) const;
+    // FindOrdinalPosition used by BayesArranger::Rearrange
+    unsigned long int FindOrdinalPosition(long int cannonicalorder) const;
+
+    // The following functions manage the Events associated with a particular
+    // force.  Events are owned by the recipient (an Arranger).
+    virtual std::vector<Event*> MakeEvents(const ForceSummary& fs) const = 0;
+    virtual void           ModifyEvents(const ForceSummary& fs,
+                                        std::vector<Event*>&) const {};
+
+    // The following function is used in creating the partition holder in each branch.
+    virtual long int      GetNPartitions() const { return 0; };
+
+    // The following is a factory function to make Summary objects for the TreeSummary.
+    virtual Summary* CreateSummary(IntervalData& interval, bool shortness) const = 0;
+}; // Force
+
+//------------------------------------------------------------------------------------
+
+class RecForce : public Force
+{
+  private:
+    RecForce();                               // unimplemented to block call
+    RecForce(const RecForce& src);            // unimplemented to block call
+    RecForce& operator=(const RecForce& src); // unimplemented to block call
+
+  public:
+    RecForce ( std::vector<Parameter>  parameters,
+               long int                maxEvents,
+               std::vector<ParamGroup> identgroups,
+               std::vector<ParamGroup> multgroups,
+               const DoubleVec1d&      paramgroupmults,
+               const UIVarsPrior&      prior);
+    virtual                ~RecForce() {};
+
+    virtual bool           IsSexualForce() const { return true; };
+    virtual string         GetXMLName() const { return xmlstr::XML_TAG_RECOMBINATION; };
+    virtual std::vector<Event*> MakeEvents(const ForceSummary& fs) const;
+    virtual Summary*       CreateSummary(IntervalData& interval, bool shortness) const;
+    virtual long int       ReportDimensionality() const { return 1L; };
+
+}; // RecForce
+
+//------------------------------------------------------------------------------------
+
+class CoalForce : public Force
+{
+  private:
+    CoalForce();                                // unimplemented to block call
+    CoalForce(const CoalForce& src);            // unimplemented to block call
+    CoalForce& operator=(const CoalForce& src); // unimplemented to block call
+
+  public:
+    CoalForce( std::vector<Parameter>   parameters,
+               long int            maxEvents,
+               bool                withGrowth,
+               bool                withLogisticSelection,
+               std::vector<ParamGroup> identgroups,
+               std::vector<ParamGroup> multgroups,
+               const DoubleVec1d&      paramgroupmults,
+               const UIVarsPrior&      prior);
+
+    virtual                ~CoalForce() {};
+
+    virtual string         GetXMLName() const { return xmlstr::XML_TAG_COALESCENCE; };
+    virtual std::vector<Event*> MakeEvents(const ForceSummary & fs) const;
+    virtual Summary*       CreateSummary(IntervalData& interval, bool shortness) const;
+    virtual long int       ReportDimensionality() const;
+
+}; // CoalForce
+
+//------------------------------------------------------------------------------------
+
+class GrowthForce : public Force
+{
+  private:
+    GrowthForce();                                  // unimplemented to block call
+    GrowthForce(const GrowthForce& src);            // unimplemented to block call
+    GrowthForce& operator=(const GrowthForce& src); // unimplemented to block call
+
+  protected:
+    virtual std::string MakeOpeningTag(unsigned long int nspaces) const;
+  public:
+    GrowthForce ( std::vector<Parameter>  parameters,
+                  long int                maxEvents,
+                  std::vector<ParamGroup> identgroups,
+                  std::vector<ParamGroup> multgroups,
+                  const DoubleVec1d&      paramgroupmults,
+                  const UIVarsPrior&      prior);
+    virtual              ~GrowthForce() {};
+
+    virtual string         GetXMLName() const { return xmlstr::XML_TAG_GROWTH; };
+    virtual std::vector<Event*> MakeEvents(const ForceSummary& fs)   const;
+    virtual void           ModifyEvents(const ForceSummary& fs, std::vector<Event*>& events) const;
+
+    virtual Summary*       CreateSummary(IntervalData& interval, bool shortness) const;
+
+    virtual DoubleVec1d    RetrieveGlobalLogParameters(const ForceParameters& fp) const;
+    virtual DoubleVec1d    RetrieveRegionalLogParameters(const ForceParameters& fp) const;
+
+    virtual long int       ReportDimensionality() const;
+
+}; // GrowthForce
+
+//------------------------------------------------------------------------------------
+
+class LogisticSelectionForce : public Force
+{
+  private:
+    LogisticSelectionForce();                                             // unimplemented to block call
+    LogisticSelectionForce(const LogisticSelectionForce& src);            // unimplemented to block call
+    LogisticSelectionForce& operator=(const LogisticSelectionForce& src); // unimplemented to block call
+
+  protected:
+    virtual std::string MakeOpeningTag(unsigned long int nspaces) const;
+  public:
+    LogisticSelectionForce(std::vector<Parameter>  parameters,
+                           long int                maxEvents,
+                           long int                paramvecindex,
+                           std::vector<ParamGroup> identgroups,
+                           std::vector<ParamGroup> multgroups,
+                           const DoubleVec1d&      paramgroupmults,
+                           const UIVarsPrior&      prior);
+    virtual              ~LogisticSelectionForce() {};
+
+    virtual string         GetXMLName() const { return xmlstr::XML_TAG_LOGISTICSELECTION; };
+    virtual std::vector<Event*> MakeEvents(const ForceSummary& fs)   const;
+    virtual void           ModifyEvents(const ForceSummary& fs, std::vector<Event *> & events) const;
+
+    virtual Summary *      CreateSummary(IntervalData& interval, bool shortness) const;
+
+    virtual DoubleVec1d    RetrieveGlobalLogParameters(const ForceParameters & fp) const;
+    virtual DoubleVec1d    RetrieveRegionalLogParameters(const ForceParameters & fp) const;
+
+    virtual long int       ReportDimensionality() const;
+
+}; // LogisticSelectionForce
+
+//------------------------------------------------------------------------------------
+
+class RegionGammaForce : public Force
+{
+  private:
+    RegionGammaForce();                            // unimplemented to block call
+    RegionGammaForce(const RegionGammaForce& src); // unimplemented to block call
+
+  public:
+    RegionGammaForce(std::vector<Parameter>  parameters,
+                     std::vector<ParamGroup> identgroups,
+                     std::vector<ParamGroup> multgroups,
+                     const DoubleVec1d&      paramgroupmults,
+                     const UIVarsPrior&      prior);
+    virtual ~RegionGammaForce() {};
+
+    virtual string          GetXMLName() const { return xmlstr::XML_TAG_REGION_GAMMA; };
+    virtual std::vector<Event *> MakeEvents(const ForceSummary & fs) const;
+    virtual Summary *       CreateSummary(IntervalData& interval, bool shortness) const;
+    virtual long int        ReportDimensionality() const;
+
+}; // RegionGammaForce
+
+//------------------------------------------------------------------------------------
+
+class PartitionForce : public Force
+{
+    PartitionForce();                                     // unimplemented to block call
+    PartitionForce(const PartitionForce& src);            // unimplemented to block call
+    PartitionForce& operator=(const PartitionForce& src); // unimplemented to block call
+  protected:
+    long int     m_npartitions;
+    long int     m_partindex;   // what is my index in a branch's partitionvector?
+
+    virtual std::string  GetXMLStatusTag() const = 0;
+
+  public:
+    PartitionForce( force_type          tag,
+                    string              name,
+                    string              fullname,
+                    string              paramname,
+                    string              fullparamname,
+                    std::vector<Parameter>   parameters,
+                    long int            maxevents,
+                    double              defvalue,
+                    double              maxvalue,
+                    double              minvalue,
+                    double              maximizer_maxvalue,
+                    double              maximizer_minvalue,
+                    double              highval,
+                    double              lowval,
+                    double              highmult,
+                    double              lowmult,
+                    long                npartitions,
+                    std::vector<ParamGroup> identgroups,
+                    std::vector<ParamGroup> multgroups,
+                    const DoubleVec1d&      paramgroupmults,
+                    const UIVarsPrior&      prior);
+    virtual              ~PartitionForce() {};
+
+    // RTTI for use by ForceSummary::GetPartitionForces()
+    virtual bool         IsPartitionForce() const { return true; };
+    virtual long int     ChoosePartition(long int origpart, long int chosenpart, bool islow, long int midpoint) const;
+
+    virtual void         ModifyEvents(const ForceSummary& fs, std::vector<Event*>& events) const;
+
+    virtual StringVec1d  MakeStartParamReport() const;
+    virtual StringVec1d  MakeChainParamReport(const ChainOut& chout) const;
+
+    // used by ForceSummary::SummarizeData()
+    virtual bool         SetPartIndex(long int val);
+
+    virtual long int     GetNPartitions() const { return m_npartitions; };
+    virtual long int     GetPartIndex() const { return m_partindex; };
+
+    virtual bool         IsAMember(const LongVec1d& m1, const LongVec1d& m2) const;
+
+    virtual DoubleVec1d  CreateVectorOfParametersWithValue(double val) const;
+
+    virtual DoubleVec1d  SumXPartsToParts(const DoubleVec1d& xparts) const;
+    virtual DoubleVec1d  SumXPartsToParts(const DoubleVec1d& xparts, const DoubleVec1d& growths, double etime) const;
+
+    // used by SampleXML::ToXML()
+    virtual std::string  MakeStatusXML(const std::string& mystatus) const;
+
+}; // PartitionForce
+
+//------------------------------------------------------------------------------------
+
+class LocalPartitionForce : public PartitionForce
+{
+  private:
+    LocalPartitionForce();                                          // undefined
+    LocalPartitionForce(const LocalPartitionForce& src);            // undefined
+    LocalPartitionForce& operator=(const LocalPartitionForce& src); // undefined
+
+  protected:
+    long int m_localsite;               // the site that the force is dependent on
+
+  public:
+    LocalPartitionForce(    force_type          tag,
+                            string              name,
+                            string              fullname,
+                            string              paramname,
+                            string              fullparamname,
+                            std::vector<Parameter>   parameters,
+                            long int            maxevents,
+                            double              defvalue,
+                            double              maxvalue,
+                            double              minvalue,
+                            double              maximizer_maxvalue,
+                            double              maximizer_minvalue,
+                            double              highval,
+                            double              lowval,
+                            double              highmult,
+                            double              lowmult,
+                            long                npartitions,
+                            long                localsite,
+                            std::vector<ParamGroup> identgroups,
+                            std::vector<ParamGroup> multgroups,
+                            const DoubleVec1d&      paramgroupmults,
+                            const UIVarsPrior&      prior);
+    virtual              ~LocalPartitionForce() {};
+    virtual bool         IsLocalPartitionForce() const { return true; };
+    virtual long int     ChoosePartition(long int origpart, long int chosenpart, bool islow, long int midpoint) const;
+
+    // getter used by ToXML
+    long int     GetLocalSite() const { return m_localsite; };
+    virtual StringVec1d  ToXML(unsigned long int nspaces) const;
+
+}; // LocalPartitionForce
+
+//------------------------------------------------------------------------------------
+
+class MigForce : public PartitionForce
+{
+  private:
+    MigForce();                                // undefined
+    MigForce(const MigForce& src);             // undefined
+    MigForce& operator=(const MigForce& src);  // undefined
+
+  protected:
+    virtual std::string    GetXMLStatusTag() const;
+
+  public:
+    MigForce ( std::vector<Parameter>   parameters,
+               long                maxEvents,
+               long                numPops,
+               std::vector<ParamGroup> identgroups,
+               std::vector<ParamGroup> multgroups,
+               const DoubleVec1d&      paramgroupmults,
+               const UIVarsPrior&      prior);
+    virtual                ~MigForce() {};
+    virtual string         GetClassName() const { return "Population"; };
+
+    virtual string         GetXMLName() const { return xmlstr::XML_TAG_MIGRATION; };
+    virtual std::vector<Event*> MakeEvents(const ForceSummary& fs)   const;
+    virtual Summary*       CreateSummary(IntervalData& interval, bool shortness) const;
+    virtual std::string    MakeStatusXML(const std::string& mystatus) const { return string(""); };
+    virtual long int       ReportDimensionality() const { return 3L; };
+
+}; // MigForce
+
+//------------------------------------------------------------------------------------
+
+class DivMigForce : public PartitionForce
+// Migration in the presence of divergence; should never exist unless
+// DivForce also exists
+{
+  private:
+    DivMigForce();                                  // undefined
+    DivMigForce(const DivMigForce& src);            // undefined
+    DivMigForce& operator=(const DivMigForce& src); // undefined
+
+  protected:
+    virtual std::string    GetXMLStatusTag() const;
+
+  public:
+    DivMigForce ( std::vector<Parameter>   parameters,
+                  long                maxEvents,
+                  long                numPops,
+                  std::vector<ParamGroup> identgroups,
+                  std::vector<ParamGroup> multgroups,
+                  const DoubleVec1d&      paramgroupmults,
+                  const UIVarsPrior&      prior);
+    virtual                ~DivMigForce() {};
+    virtual string         GetClassName() const { return "Population"; };
+
+    virtual string         GetXMLName() const { return xmlstr::XML_TAG_DIVMIG; };
+    virtual std::vector<Event*> MakeEvents(const ForceSummary& fs)   const;
+    virtual Summary*       CreateSummary(IntervalData& interval, bool shortness) const;
+    virtual std::string    MakeStatusXML(const std::string& mystatus) const { return string(""); };
+    virtual long int       ReportDimensionality() const { return 3L; };
+
+}; // DivMigForce
+
+//------------------------------------------------------------------------------------
+
+class DiseaseForce : public LocalPartitionForce
+{
+  private:
+    // there exists no quickcalculator equivalent for disease
+    DiseaseForce();                                   // undefined
+    DiseaseForce(const DiseaseForce& src);            // undefined
+    DiseaseForce& operator=(const DiseaseForce& src); // undefined
+
+  protected:
+    virtual std::string  GetXMLStatusTag() const;
+
+  public:
+    DiseaseForce(const DataPack& dpack, const UIVars& uivars);
+    DiseaseForce(std::vector<Parameter>   parameters,
+                 long int                 maxEvents,
+                 bool                     hasLogisticSelection,
+                 long int                 numDiseaseStates,
+                 long int                 localsite,
+                 std::vector<ParamGroup>  identgroups,
+                 std::vector<ParamGroup>  multgroups,
+                 const DoubleVec1d&       paramgroupmults,
+                 const UIVarsPrior&       prior);
+    virtual ~DiseaseForce() {};
+
+    virtual string          GetXMLName() const { return xmlstr::XML_TAG_DISEASE; };
+    virtual std::vector<Event *> MakeEvents(const ForceSummary & fs) const;
+
+    virtual Summary *       CreateSummary(IntervalData& interval, bool shortness) const;
+    virtual long int        ReportDimensionality() const { return 3L; };
+
+}; // DiseaseForce
+
+//------------------------------------------------------------------------------------
+
+class StickForce : public Force
+{
+  private:
+    // the default ctor, copy ctor, and operator= are all
+    // deliberately undefined to block call
+    StickForce();
+    StickForce(const StickForce& src);
+    StickForce& operator=(const StickForce& src);
+
+  protected:
+    double m_percentchange;
+    StickForce (force_type         tag,
+                string             name,
+                string             fullname,
+                string             paramname,
+                string             fullparamname,
+                std::vector<Parameter>  parameters,
+                long int           maxevents,
+                double             defvalue,
+                double             maxvalue,
+                double             minvalue,
+                double             maximizer_maxvalue,
+                double             maximizer_minvalue,
+                double             highval,
+                double             lowval,
+                double             highmult,
+                double             lowmult,
+                std::vector<ParamGroup> identgroups,
+                std::vector<ParamGroup> multgroups,
+                const DoubleVec1d&      paramgroupmults,
+                const UIVarsPrior&      prior);
+
+  public:
+    virtual        ~StickForce() {};
+    virtual bool   IsStickForce() const     { return true; };
+};
+
+//------------------------------------------------------------------------------------
+
+class StickExpGrowForce : public StickForce
+{
+  private:
+    // the default ctor, copy ctor, and operator= are all
+    // deliberately undefined to block call
+    StickExpGrowForce();
+    StickExpGrowForce(const StickExpGrowForce& src);
+    StickExpGrowForce& operator=(const StickExpGrowForce& src);
+
+    const double m_negln, m_posln;
+
+  protected:
+
+    virtual std::string MakeOpeningTag(unsigned long int nspaces) const;
+
+  public:
+    StickExpGrowForce  ( std::vector<Parameter> parameters,
+                         long int          maxEvents,
+                         std::vector<ParamGroup> identgroups,
+                         std::vector<ParamGroup> multgroups,
+                         const DoubleVec1d&      paramgroupmults,
+                         const UIVarsPrior&      prior);
+    virtual             ~StickExpGrowForce() {};
+
+    virtual string         GetXMLName() const;
+    virtual std::vector<Event*> MakeEvents(const ForceSummary& fs) const;
+    virtual void           ModifyEvents(const ForceSummary& fs, std::vector<Event*>& events) const;
+    virtual Summary*       CreateSummary(IntervalData& interval, bool shortness) const;
+
+    virtual DoubleVec1d    RetrieveGlobalLogParameters
+    (const ForceParameters& fp) const;
+    virtual DoubleVec1d    RetrieveRegionalLogParameters
+    (const ForceParameters& fp) const;
+
+    virtual long int       ReportDimensionality() const;
+};
+
+//------------------------------------------------------------------------------------
+
+class StickSelectForce : public StickForce
+{
+  private:
+    // The default ctor, copy ctor, and operator= are all deliberately undefined to block call.
+    StickSelectForce();
+    StickSelectForce(const StickSelectForce& src);
+    StickSelectForce& operator=(const StickSelectForce& src);
+
+  protected:
+
+    virtual std::string MakeOpeningTag(unsigned long int nspaces) const;
+
+  public:
+    StickSelectForce  ( std::vector<Parameter> parameters,
+                        long int          maxEvents,
+                        std::vector<ParamGroup> identgroups,
+                        std::vector<ParamGroup> multgroups,
+                        const DoubleVec1d&      paramgroupmults,
+                        const UIVarsPrior&      prior);
+    virtual             ~StickSelectForce() {};
+
+    virtual string         GetXMLName() const;
+    virtual long int       ReportDimensionality() const;
+    virtual Summary *       CreateSummary(IntervalData& interval, bool shortness) const;
+
+    virtual std::vector<Event*> MakeEvents(const ForceSummary& fs) const ;
+
+    virtual DoubleVec1d    RetrieveGlobalLogParameters
+    (const ForceParameters& fp) const;
+    virtual DoubleVec1d    RetrieveRegionalLogParameters
+    (const ForceParameters& fp) const;
+};
+
+//------------------------------------------------------------------------------------
+
+class DivForce : public Force
+{
+  private:
+    // the default ctor, copy ctor, and operator= are all
+    // deliberately undefined to block call
+    DivForce();
+    DivForce(const DivForce& src);
+    DivForce& operator=(const DivForce& src);
+
+    std::vector<Epoch> m_epochs;
+
+  public:
+    DivForce           ( std::vector<Parameter> parameters,
+                         long int          maxEvents,
+                         std::vector<ParamGroup> identgroups,
+                         std::vector<ParamGroup> multgroups,
+                         const DoubleVec1d&      paramgroupmults,
+                         const UIVarsPrior&      prior,
+                         const std::vector<std::vector<std::string> >& newpops,
+                         const std::vector<std::string>& ancestors,
+                         const DataPack& dpack);
+    virtual             ~DivForce() {};
+
+    virtual string         GetXMLName() const { return xmlstr::XML_TAG_DIVERGENCE; };
+    virtual long int       ReportDimensionality() const;
+    virtual Summary *       CreateSummary(IntervalData& interval, bool shortness) const;
+
+    virtual std::vector<Event*> MakeEvents(const ForceSummary& fs) const ;
+    const std::vector<Epoch>* GetEpochs() const { return &m_epochs; };
+    virtual StringVec1d    ToXML(unsigned long int nspaces) const;
+};
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+// These free functions pretty-print a matrix of various dimensions
+
+StringVec1d Tabulate(double params, long int width);
+StringVec1d Tabulate(const DoubleVec1d & params, long int width);
+StringVec1d Tabulate(const DoubleVec2d & params, long int width);
+
+#endif // FORCE_H
+
+//____________________________________________________________________________________
diff --git a/src/force/forceparam.cpp b/src/force/forceparam.cpp
new file mode 100644
index 0000000..e1ee703
--- /dev/null
+++ b/src/force/forceparam.cpp
@@ -0,0 +1,749 @@
+// $Id: forceparam.cpp,v 1.42 2013/11/07 22:46:06 mkkuhner Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+#include <iostream>
+
+#include "forceparam.h"
+#include "forcesummary.h"
+#include "force.h"
+#include "mathx.h"
+#include "region.h"
+#include "sumfilehandler.h"
+#include "vectorx.h"                    // for LongSquareRootOfLong
+#include "xmlsum_strings.h"             // for xml sumfile handling
+
+#ifdef DMALLOC_FUNC_CHECK
+#include "/usr/local/include/dmalloc.h"
+#endif
+
+using namespace std;
+
+//------------------------------------------------------------------------------------
+
+ForceParameters::ForceParameters(long region)
+    : m_region(region),
+      m_space(known_region),
+      m_forceTags(registry.GetForceSummary().GetForceTags()),
+      m_forceSizes(registry.GetForceSummary().GetForceSizes()),
+      m_epochptr(registry.GetForceSummary().GetEpochs())
+{
+    assert (region >= 0);
+}
+
+//------------------------------------------------------------------------------------
+
+ForceParameters::ForceParameters(param_space space)
+    : m_region(FLAGLONG),
+      m_space(space),
+      m_forceTags(registry.GetForceSummary().GetForceTags()),
+      m_forceSizes(registry.GetForceSummary().GetForceSizes())
+{
+    assert (m_space != known_region); //can be either global or unknown
+}
+
+//------------------------------------------------------------------------------------
+//Need this constructor for the registry, when we don't have a forcesummary yet.
+
+ForceParameters::ForceParameters(param_space space, ForceTypeVec1d types, LongVec1d sizes)
+    : m_region(FLAGLONG),
+      m_space(space),
+      m_forceTags(types),
+      m_forceSizes(sizes)
+{
+    assert (m_space != known_region);   // Can be either global or unknown.
+}
+
+//------------------------------------------------------------------------------------
+
+ForceParameters::ForceParameters(const ForceParameters &fp, long region)
+    : m_region(region),
+      m_space(known_region),
+      m_forceTags(fp.m_forceTags),
+      m_forceSizes(fp.m_forceSizes)
+{
+    assert (region >= 0);
+    switch (fp.GetParamSpace())
+    {
+        case unknown_region:
+            CopyRegionalMembers(fp);
+            FillGlobalParamsFromRegionalParams();
+            break;
+        case known_region:
+            if (fp.m_region != region)
+            {
+                assert(false); //Why are we assigning fps for different regions to each other?
+            }
+            //else fall through to:
+        case global_region:
+            CopyGlobalMembers(fp);
+            FillRegionalParamsFromGlobalParams();
+            break;
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+void ForceParameters::CopyGlobalMembers(const ForceParameters& fp)
+{
+    m_globalThetas = fp.m_globalThetas;
+    m_migrates = fp.m_migrates;
+    m_recrates = fp.m_recrates;
+    m_growths  = fp.m_growths;
+    m_diseases = fp.m_diseases;
+    m_logisticSelectionCoefficient = fp.m_logisticSelectionCoefficient;
+    m_epochtimes = fp.m_epochtimes;
+} // ForceParameters::CopyGlobalMembers
+
+//------------------------------------------------------------------------------------
+
+void ForceParameters::CopyRegionalMembers(const ForceParameters& fp)
+{
+    m_regionalThetas = fp.m_regionalThetas;
+    m_migrates = fp.m_migrates;
+    m_recrates = fp.m_recrates;
+    m_growths  = fp.m_growths;
+    m_diseases = fp.m_diseases;
+    m_logisticSelectionCoefficient = fp.m_logisticSelectionCoefficient;
+    m_epochtimes = fp.m_epochtimes;
+} // ForceParameters::CopyRegionalMembers
+
+//------------------------------------------------------------------------------------
+
+void ForceParameters::SetGlobalParametersByTag(force_type tag, const DoubleVec1d& v)
+{
+    assert (m_space != unknown_region);
+    string msg = "ForceParameters::SetGlobalParametersByTag() received tag ";
+    switch (tag)
+    {
+        case force_COAL:
+            SetGlobalThetas(v);
+            return;
+        case force_MIG:
+        case force_DIVMIG:
+            SetMigRates(v);
+            return;
+        case force_REC:
+            SetRecRates(v);
+            return;
+        case force_EXPGROWSTICK:
+        case force_GROW:
+            SetGrowthRates(v);
+            return;
+        case force_LOGSELECTSTICK:
+        case force_LOGISTICSELECTION:
+            SetLogisticSelectionCoefficient(v);
+            return;
+        case force_DISEASE:
+            SetDiseaseRates(v);
+            return;
+        case force_DIVERGENCE:
+            SetEpochTimes(v);
+            return;
+        case force_REGION_GAMMA:
+            msg += "\"" + ToString(tag) + ",\" which should never happen. (This is a ";
+            msg += "pseudo-force that\'s only treated as a force within certain ";
+            msg += "contexts of the program.)";
+            throw implementation_error(msg);
+        case force_NONE:
+            msg += "\"" + ToString(tag) + ",\" which should never happen.";
+            throw implementation_error(msg);
+    }
+
+    msg += "\"" + ToString(tag) + ",\" but it does not know how to use it.";
+    throw implementation_error(msg);
+} // SetGlobalParametersByTag
+
+//------------------------------------------------------------------------------------
+
+void ForceParameters::SetRegionalParametersByTag(force_type tag, const DoubleVec1d& v)
+{
+    assert (m_space != global_region);
+    string msg = "ForceParameters::SetRegionalParametersByTag() received tag ";
+    switch (tag)
+    {
+        case force_COAL:
+            SetRegionalThetas(v);
+            return;
+        case force_MIG:
+        case force_DIVMIG:
+            SetMigRates(v);
+            return;
+        case force_REC:
+            SetRecRates(v);
+            return;
+        case force_EXPGROWSTICK:
+        case force_GROW:
+            SetGrowthRates(v);
+            return;
+        case force_LOGSELECTSTICK:
+        case force_LOGISTICSELECTION:
+            SetLogisticSelectionCoefficient(v);
+            return;
+        case force_DISEASE:
+            SetDiseaseRates(v);
+            return;
+        case force_DIVERGENCE:
+            SetEpochTimes(v);
+            return;
+        case force_REGION_GAMMA:
+            msg += "\"" + ToString(tag) + ",\" which should never happen. (This is a ";
+            msg += "pseudo-force that\'s only treated as a force within certain ";
+            msg += "contexts of the program.)";
+            throw implementation_error(msg);
+        case force_NONE:
+            msg += "\"" + ToString(tag) + ",\" which should never happen.";
+            throw implementation_error(msg);
+    }
+
+    msg += "\"" + ToString(tag) + ",\" but it does not know how to use it.";
+    throw implementation_error(msg);
+} // SetRegionalParametersByTag
+
+//------------------------------------------------------------------------------------
+
+void ForceParameters::SetGlobalThetas(const DoubleVec1d& v)
+{
+    assert(m_space != unknown_region);
+    m_globalThetas = v;
+    if (m_space == known_region)
+    {
+        FillRegionalParamsFromGlobalParams();
+    }
+} // SetThetas
+
+//------------------------------------------------------------------------------------
+
+void ForceParameters::SetRegionalThetas(const DoubleVec1d& v)
+{
+    assert(m_space != global_region);
+    m_regionalThetas = v;
+    if (m_space == known_region)
+    {
+        FillGlobalParamsFromRegionalParams();
+    }
+} // SetThetas
+
+//------------------------------------------------------------------------------------
+
+void ForceParameters::SetMigRates(const DoubleVec1d& v)
+{
+    long rowsize = LongSquareRootOfLong(v.size());
+
+    // put the given values into migrates, but never put a
+    // non-zero value into the diagonal entries!
+
+    long i, j;
+    long index = 0;
+    m_migrates.clear();
+    for (i = 0; i < rowsize; ++i)
+    {
+        for (j = 0; j < rowsize; ++j)
+        {
+            if (i == j)
+            {
+                m_migrates.push_back(0.0);
+                assert(v[index] == 0.0);
+            }
+            else m_migrates.push_back(v[index]);
+            ++index;
+        }
+    }
+} // SetMigRates
+
+//------------------------------------------------------------------------------------
+
+void ForceParameters::SetRecRates(const DoubleVec1d& v)
+{
+    assert(static_cast<long> (v.size()) == 1); // Program supports only 1 recombination rate.
+    m_recrates = v;
+    //LS NOTE:  if this gets split into SetRegionalRecRates and SetGlobalRecRates
+    // we will need to also fill the appropriate parallel vector if m_space is
+    // known_region (see SetGlobalThetas)
+} // SetRecRates
+
+//------------------------------------------------------------------------------------
+
+void ForceParameters::SetGrowthRates(const DoubleVec1d& v)
+{
+    m_growths = v;
+} // SetGrowths
+
+//------------------------------------------------------------------------------------
+
+void ForceParameters::SetLogisticSelectionCoefficient(const DoubleVec1d& v)
+{
+    if (1 != v.size())
+    {
+        string msg = "ForceParameters::SetLogisticSelectionCoefficient() received ";
+        msg += "a vector of " + ToString(v.size()) + " elements.  This vector must ";
+        msg += "contain exactly one element, corresponding to the selection coefficient.";
+        throw implementation_error(msg);
+    }
+
+    m_logisticSelectionCoefficient = v;
+} // SetLogisticSelectionCoefficient
+
+//------------------------------------------------------------------------------------
+
+void ForceParameters::SetDiseaseRates(const DoubleVec1d& v)
+{
+    long rowsize = LongSquareRootOfLong(v.size());
+
+    // put the given values into disease rates, but never put a
+    // non-zero value into the diagonal entries!
+
+    long i, j;
+    long index = 0;
+    m_diseases.clear();
+    for (i = 0; i < rowsize; ++i)
+    {
+        for (j = 0; j < rowsize; ++j)
+        {
+            if (i == j)
+            {
+                m_diseases.push_back(0.0);
+                assert(v[index] == 0.0);
+            }
+            else
+            {
+                m_diseases.push_back(v[index]);
+            }
+            ++index;
+        }
+    }
+} // SetDiseaseRates
+
+//------------------------------------------------------------------------------------
+
+void ForceParameters::SetEpochTimes(const DoubleVec1d& v)
+{
+    m_epochtimes = v;
+}
+
+//------------------------------------------------------------------------------------
+
+void ForceParameters::SetGlobalParameters(const DoubleVec1d& v)
+{
+    assert(m_space != unknown_region);
+    SetParameters(v, true);
+} // SetGlobalParameters
+
+//------------------------------------------------------------------------------------
+
+void ForceParameters::SetRegionalParameters(const DoubleVec1d& v)
+{
+    assert(m_space != global_region);
+    SetParameters(v, false);
+} // SetRegionalParameters
+
+//------------------------------------------------------------------------------------
+// SetParameters is private; use the two above functions.
+
+void ForceParameters::SetParameters(const DoubleVec1d& v, bool isGlobal)
+{
+    for (unsigned long fnum = 0, pnum = 0; fnum < m_forceTags.size(); fnum++)
+    {
+        DoubleVec1d oneForceVec;
+        for (long fpnum = 0; fpnum < m_forceSizes[fnum]; pnum++, fpnum++)
+        {
+            assert (v.size() > static_cast<unsigned long>(pnum));
+            oneForceVec.push_back(v[pnum]);
+        }
+        if (isGlobal)
+        {
+            SetGlobalParametersByTag(m_forceTags[fnum], oneForceVec);
+        }
+        else
+        {
+            SetRegionalParametersByTag(m_forceTags[fnum], oneForceVec);
+        }
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+const DoubleVec1d& ForceParameters::GetGlobalThetas()  const
+{
+    assert(m_space != unknown_region);
+    return m_globalThetas;
+}
+
+//------------------------------------------------------------------------------------
+
+const DoubleVec1d& ForceParameters::GetRegionalThetas()const
+{
+    assert(m_space != global_region);
+    return m_regionalThetas;
+}
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d ForceParameters::GetGlobalLogParameters() const
+{
+    assert(m_space != unknown_region);
+    return GetLogParameters(true);
+}
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d ForceParameters::GetRegionalLogParameters() const
+{
+    assert(m_space != global_region);
+    return GetLogParameters(false);
+}
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d ForceParameters::GetOnlyDiseaseRates() const
+{
+    DoubleVec1d returnvec;
+    long nallparams(m_diseases.size());
+    long ncontigparams(sqrt(nallparams));
+    long ind(1); // skip the first element as it is always invalid
+    while (ind < nallparams)
+    {
+        long startind(ind);
+        for(; ind < startind+ncontigparams; ++ind)
+            returnvec.push_back(m_diseases[ind]);
+        ++ind; // there's always 1 invalid param between sets of valid
+        // params
+    }
+    assert(static_cast<long>(returnvec.size()) == ncontigparams * (ncontigparams-1));
+    return returnvec;
+}
+
+//------------------------------------------------------------------------------------
+
+//GetLogParameters is private; use the above two calls in code.
+DoubleVec1d ForceParameters::GetLogParameters(bool isGlobal) const
+{
+    DoubleVec1d tempvec;
+    DoubleVec1d resultvec;
+
+    for (unsigned long fnum=0; fnum < m_forceTags.size(); fnum++)
+    {
+        if (isGlobal)
+        {
+            tempvec = GetGlobalParametersByTag(m_forceTags[fnum]);
+        }
+        else
+        {
+            tempvec = GetRegionalParametersByTag(m_forceTags[fnum]);
+        }
+        if (m_forceTags[fnum] != force_GROW)
+        {
+            LogVec0(tempvec, tempvec);
+        }
+        resultvec.insert(resultvec.end(), tempvec.begin(), tempvec.end());
+    }
+
+    return resultvec;
+} // GetLogParameters
+
+//------------------------------------------------------------------------------------
+
+double ForceParameters::GetRegionalLogParameter(long pnum) const
+{
+    DoubleVec1d tempvec = GetRegionalParameters();
+    const ParamVector paramvec(true);
+    if (paramvec[pnum].IsForce(force_GROW))
+    {
+        return tempvec[pnum];
+    }
+    else
+    {
+        return SafeLog(tempvec[pnum]);
+    }
+}
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+DoubleVec1d ForceParameters::GetGlobalParameters() const
+{
+    assert(m_space != unknown_region);
+    return GetParameters(true);
+}
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d ForceParameters::GetRegionalParameters() const
+{
+    assert(m_space != global_region);
+    return GetParameters(false);
+}
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d ForceParameters::GetParameters(bool isGlobal) const
+{
+    DoubleVec1d tempvec;
+    DoubleVec1d resultvec;
+
+    for (unsigned long fnum=0; fnum < m_forceTags.size(); fnum++)
+    {
+        if (isGlobal)
+        {
+            tempvec = GetGlobalParametersByTag(m_forceTags[fnum]);
+        }
+        else
+        {
+            tempvec = GetRegionalParametersByTag(m_forceTags[fnum]);
+        }
+        resultvec.insert(resultvec.end(), tempvec.begin(), tempvec.end());
+    }
+
+    return resultvec;
+} // GetParameters
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+const DoubleVec1d& ForceParameters::GetRegionalParametersByTag(force_type tag) const
+{
+    assert(m_space != global_region);
+    string msg = "ForceParameters::GetRegionalParametersByTag() received tag ";
+    switch (tag)
+    {
+        case force_COAL:
+            return GetRegionalThetas();
+        case force_MIG:
+        case force_DIVMIG:
+            return GetMigRates();
+        case force_REC:
+            return GetRecRates();
+        case force_EXPGROWSTICK:
+        case force_GROW:
+            return GetGrowthRates();
+        case force_DISEASE:
+            return GetDiseaseRates();
+        case force_LOGSELECTSTICK:
+        case force_LOGISTICSELECTION:
+            return GetLogisticSelectionCoefficient();
+        case force_DIVERGENCE:
+            return GetEpochTimes();
+        case force_REGION_GAMMA:
+            msg += "\"" + ToString(tag) + ",\" which should never happen. (This is a ";
+            msg += "pseudo-force that\'s only treated as a force within certain ";
+            msg += "contexts of the program.)";
+            throw implementation_error(msg);
+        case force_NONE:
+            msg += "\"" + ToString(tag) + ",\" which should never happen.";
+            throw implementation_error(msg);
+    }
+
+    msg += "\"" + ToString(tag) + ",\" but it does not know how to use it.";
+    throw implementation_error(msg);
+} // GetParametersByTag
+
+//------------------------------------------------------------------------------------
+
+const DoubleVec1d& ForceParameters::GetGlobalParametersByTag(force_type tag) const
+{
+    assert(m_space != unknown_region);
+    string msg = "ForceParameters::GetGlobalParametersByTag() received tag ";
+    switch (tag)
+    {
+        case force_COAL:
+            return GetGlobalThetas();
+        case force_MIG:
+        case force_DIVMIG:
+            return GetMigRates();
+        case force_REC:
+            return GetRecRates();
+        case force_EXPGROWSTICK:
+        case force_GROW:
+            return GetGrowthRates();
+        case force_LOGSELECTSTICK:
+        case force_LOGISTICSELECTION:
+            return GetLogisticSelectionCoefficient();
+        case force_DISEASE:
+            return GetDiseaseRates();
+        case force_DIVERGENCE:
+            return GetEpochTimes();
+        case force_REGION_GAMMA:
+            msg += "\"" + ToString(tag) + ",\" which should never happen. (This is a ";
+            msg += "pseudo-force that\'s only treated as a force within certain ";
+            msg += "contexts of the program.)";
+            throw implementation_error(msg);
+        case force_NONE:
+            msg += "\"" + ToString(tag) + ",\" which should never happen.";
+            throw implementation_error(msg);
+    }
+
+    msg += "\"" + ToString(tag) + ",\" but it does not know how to use it.";
+    throw implementation_error(msg);
+} // GetParametersByTag
+
+//------------------------------------------------------------------------------------
+
+DoubleVec2d ForceParameters::GetGlobal2dRates(force_type tag) const
+{
+    assert(tag == force_DISEASE || tag == force_MIG || tag == force_DIVMIG);
+    return SquareOffVector(GetGlobalParametersByTag(tag));
+} // Get2dRates
+
+//------------------------------------------------------------------------------------
+
+DoubleVec2d ForceParameters::GetRegional2dRates(force_type tag) const
+{
+    assert(tag == force_DISEASE || tag == force_MIG || tag == force_DIVMIG);
+    return SquareOffVector(GetRegionalParametersByTag(tag));
+} // Get2dRates
+
+//------------------------------------------------------------------------------------
+
+void ForceParameters::FillRegionalParamsFromGlobalParams()
+{
+    if (m_region == FLAGLONG)
+    {
+        assert(false);
+        throw implementation_error("Unable to convert global parameters to regional parameters"
+                                   " because we don't have a region to convert to.");
+    }
+    double effPopSize = registry.GetDataPack().GetRegion(m_region).GetEffectivePopSize();
+
+    m_regionalThetas = m_globalThetas;
+    transform(m_globalThetas.begin(),
+              m_globalThetas.end(),
+              m_regionalThetas.begin(),
+              bind2nd(multiplies<double>(),effPopSize));
+}
+
+//------------------------------------------------------------------------------------
+
+void ForceParameters::FillGlobalParamsFromRegionalParams()
+{
+    if (m_region == FLAGLONG)
+    {
+        assert(false);
+        throw implementation_error("Unable to convert regional parameters to global parameters"
+                                   " because we don't know what region we're converting from.");
+    }
+
+    double effPopSize = registry.GetDataPack().GetRegion(m_region).GetEffectivePopSize();
+
+    m_globalThetas = m_regionalThetas;
+    transform(m_regionalThetas.begin(),
+              m_regionalThetas.end(),
+              m_globalThetas.begin(),
+              bind2nd(divides<double>(),effPopSize));
+}
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d ForceParameters::GetRegionalScalars() const
+{
+    DoubleVec1d resultvec;
+    for (unsigned long force=0; force<m_forceTags.size(); force++)
+    {
+        double val = 1.0;
+        if (m_forceTags[force] == force_COAL)
+        {
+            val = registry.GetDataPack().GetRegion(m_region).GetEffectivePopSize();
+        }
+        if (force_REGION_GAMMA == m_forceTags[force])
+            continue; // should never hit this, but let's put it here for safety
+        DoubleVec1d tempvec(m_forceSizes[force], val);
+        resultvec.insert(resultvec.end(), tempvec.begin(), tempvec.end());
+    }
+
+    return resultvec;
+} // GetRegionalScalars
+
+//------------------------------------------------------------------------------------
+
+void ForceParameters::WriteForceParameters( ofstream& sumout, long numtabs) const
+{
+    string tabs(numtabs, '\t');
+    if ( sumout.is_open() )
+    {
+        vector<double> vd;
+
+        sumout << tabs << xmlsum::ESTIMATES_START << endl;
+
+        vd = GetGlobalThetas();
+        if (vd.size() > 0)
+        {
+            sumout << tabs << "\t" << xmlsum::THETAS_START << " ";
+            SumFileHandler::WriteVec1D( sumout, vd );
+            sumout << xmlsum::THETAS_END << endl;
+        }
+
+        vd = GetMigRates();
+        if (vd.size() > 0)
+        {
+            string startstring(xmlsum::MIGRATES_START),
+                   endstring(xmlsum::MIGRATES_END);
+            if (GetEpochTimes().size() > 0) {
+               startstring = xmlsum::DIVMIGRATES_START;
+               endstring = xmlsum::DIVMIGRATES_END;
+            }
+            sumout << tabs << "\t" << startstring << " ";
+            SumFileHandler::WriteVec1D( sumout, vd );
+            sumout << endstring << endl;
+        }
+
+        vd = GetRecRates();
+        if (vd.size() > 0)
+        {
+            sumout << tabs << "\t" << xmlsum::RECRATES_START << " ";
+            SumFileHandler::WriteVec1D( sumout, vd );
+            sumout << xmlsum::RECRATES_END << endl;
+        }
+        vd = GetGrowthRates();
+        if (vd.size() > 0)
+        {
+            sumout << tabs << "\t" << xmlsum::GROWTHRATES_START << " ";
+            SumFileHandler::WriteVec1D( sumout, vd );
+            sumout << xmlsum::GROWTHRATES_END << endl;
+        }
+        vd = GetLogisticSelectionCoefficient();
+        if (vd.size() > 0)
+        {
+            sumout << tabs << "\t" << xmlsum::LOGISTICSELECTION_START << " ";
+            SumFileHandler::WriteVec1D( sumout, vd );
+            sumout << xmlsum::LOGISTICSELECTION_END << endl;
+        }
+        vd = GetDiseaseRates();
+        if (vd.size() > 0)
+        {
+            sumout << tabs << "\t" << xmlsum::DISEASERATES_START << " ";
+            SumFileHandler::WriteVec1D( sumout, vd );
+            sumout << xmlsum::DISEASERATES_END << endl;
+        }
+        vd = GetEpochTimes();
+        if (vd.size() > 0)
+        {
+            sumout << tabs << "\t" << xmlsum::EPOCHTIMES_START << " ";
+            SumFileHandler::WriteVec1D( sumout, vd );
+            sumout << xmlsum::EPOCHTIMES_END << endl;
+        }
+
+        if (global_region == m_space)
+        {
+            const RegionGammaInfo *pRegionGammaInfo = registry.GetRegionGammaInfo();
+            if (pRegionGammaInfo)
+            {
+                vd.clear();
+                vd.push_back(pRegionGammaInfo->GetMLE());
+                sumout << tabs << "\t" << xmlsum::GAMMAOVERREGIONS_START << " ";
+                SumFileHandler::WriteVec1D( sumout, vd );
+                sumout << xmlsum::GAMMAOVERREGIONS_END << endl;
+            }
+        }
+        sumout << tabs << xmlsum::ESTIMATES_END << endl;
+    }
+    else
+        SumFileHandler::HandleSumOutFileFormatError("WriteForceParameters");
+} // WriteForceParameters
+
+//____________________________________________________________________________________
diff --git a/src/force/forceparam.h b/src/force/forceparam.h
new file mode 100644
index 0000000..beb47bb
--- /dev/null
+++ b/src/force/forceparam.h
@@ -0,0 +1,171 @@
+// $Id: forceparam.h,v 1.28 2011/03/07 06:08:49 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+/********************************************************************
+ ForceParameters is a collection class used for internal communication
+ throughout Lamarc.
+
+ ForceParameters is a package of information about the values of
+ all force parameters (for all forces--it is not polymorphic).
+ Any routine which wishes to pass or return a bundle of force
+ parameters should use this class.  It is normally passed by
+ value.
+
+ Internally, the parameters are kept in separate vectors, but can
+ present them as one big vector using the registry's ForceSummary
+ object.
+
+ Written by Jim Sloan, revised by Mary Kuhner, further revised by Lucian Smith
+
+ Jon Yamato:
+    added SetToMeanOf(vector<forceparameters>)
+********************************************************************/
+/* There are three 'modes' a ForceParameters object can come in.  In the
+   'fully aware' version, it holds values for all the parameters, and it
+   knows what those parameters should be for a particular genomic region.
+   In this case, m_space is known_region.  All the parameter vectors
+   will be filled with values in this case, and it can safely return either
+   regional or global values.
+
+   There are two 'partially aware' versions of the object, one where all it
+   knows is the global parameter values, and one where all it knows are one
+   region's parameter values, but doesn't know what region that is.  In the
+   former case, m_space is global_region, and in the latter, m_space is
+   unknown_region.  The starting values, for example, are global_region,
+   since they must be used for every region, and the likelihood maximizer
+   works with unknown_region objects, since it's just working, and nobody
+   tells it what region it's working on.
+
+   If you have either partially aware versions and want a fully aware version,
+   you construct a new ForceParameters object and tell it what region you
+   want:
+
+   ForceParameters aware_fp(global_fp, reg#);
+   ForceParameters aware_fp(unknown_fp, reg#);
+
+   This constructor copies the appropriate vectors, then looks up the scaling
+   factor it needs to fill in the other vectors.
+   -Lucian
+*/
+
+#ifndef FORCEPARAM_H
+#define FORCEPARAM_H
+
+#include <vector>
+#include "vectorx.h"
+#include "constants.h"
+#include "defaults.h"
+
+class ForceSummary;
+class Epoch;
+
+enum param_space {global_region, known_region, unknown_region};
+
+class ForceParameters
+{
+  private:
+    ForceParameters(); //undefined
+
+    // the parameter vectors.  recrates is a vector of one element, for
+    // consistency of interface.
+    DoubleVec1d m_globalThetas;
+    DoubleVec1d m_regionalThetas;
+    DoubleVec1d m_migrates;
+    DoubleVec1d m_diseases;
+    DoubleVec1d m_recrates;
+    DoubleVec1d m_growths;
+    // one-element vector, for consistency
+    DoubleVec1d m_logisticSelectionCoefficient;
+    DoubleVec1d m_epochtimes;
+
+    long m_region; //Either the appropriate region or FLAGLONG for global.
+    param_space m_space;
+
+    vector<force_type> m_forceTags;
+    vector<long>       m_forceSizes;
+
+    // non-owning pointer to Epoch information, null if none exists
+    const std::vector<Epoch>* m_epochptr;
+
+    void CopyGlobalMembers(const ForceParameters& src);
+    void CopyRegionalMembers(const ForceParameters& src);
+
+    DoubleVec1d GetLogParameters(bool isGlobal) const;
+    DoubleVec1d GetParameters(bool isGlobal)    const;
+
+    void SetParameters(const DoubleVec1d& v, bool isGlobal);
+
+    void FillGlobalParamsFromRegionalParams();
+    void FillRegionalParamsFromGlobalParams();
+
+  public:
+    // ForceParameters(const ForceParameters& src); //we accept the default
+    // ForceParameters& operator=(const ForceParameters& src);
+    ForceParameters(long region);
+    ForceParameters(param_space space);
+    ForceParameters(param_space space,
+                    ForceTypeVec1d types, LongVec1d sizes);
+    ForceParameters(const ForceParameters& src, long region);
+    // If we ever end up creating one of these things in global space but
+    // don't know it, we would need a converter from unknown_region to global:
+    // ForceParameters(const ForceParameters& src, param_space space);
+
+    // Setters
+    void  SetGlobalThetas  (const DoubleVec1d& v);
+    void  SetRegionalThetas(const DoubleVec1d& v);
+    void  SetMigRates(const DoubleVec1d& v);
+    void  SetDiseaseRates(const DoubleVec1d& v);
+    void  SetRecRates(const DoubleVec1d& v);
+    void  SetGrowthRates(const DoubleVec1d& v);
+    void  SetLogisticSelectionCoefficient(const DoubleVec1d& v);
+    void  SetEpochTimes(const DoubleVec1d& v);
+    void  SetGlobalParameters(const DoubleVec1d& v);
+    void  SetRegionalParameters(const DoubleVec1d& v);
+    void  SetGlobalParametersByTag(force_type tag, const DoubleVec1d& v);
+    void  SetRegionalParametersByTag(force_type tag, const DoubleVec1d& v);
+
+    // Getters
+    // erynes 2004/01/22: made these return by constant reference
+    // to improve speed.  This might become inappropriate as the
+    // code base evolves.
+    param_space        GetParamSpace()    const { return m_space; };
+    const DoubleVec1d& GetGlobalThetas()  const;
+    const DoubleVec1d& GetRegionalThetas()const;
+    const DoubleVec1d& GetMigRates()      const { return m_migrates; };
+    const DoubleVec1d& GetDiseaseRates()  const { return m_diseases; };
+    const DoubleVec1d& GetRecRates()      const { return m_recrates; };
+    const DoubleVec1d& GetGrowthRates()   const { return m_growths;  };
+    const DoubleVec1d& GetLogisticSelectionCoefficient() const
+    { return m_logisticSelectionCoefficient;  };
+    const DoubleVec1d& GetEpochTimes()    const { return m_epochtimes; };
+    const DoubleVec1d& GetGlobalParametersByTag  (force_type tag)    const;
+    const DoubleVec1d& GetRegionalParametersByTag(force_type tag)    const;
+    double GetRegionalLogParameter(long pnum) const;
+    const std::vector<Epoch>* GetEpochs() const { return m_epochptr; };
+
+    // only pull the valid (in the paramvec sense) disease rates
+    DoubleVec1d GetOnlyDiseaseRates() const;
+
+    // GetLogParameters() is used by the BayesArranger
+    DoubleVec1d GetGlobalLogParameters()           const;
+    DoubleVec1d GetRegionalLogParameters()         const;
+    DoubleVec1d GetGlobalParameters()              const;
+    DoubleVec1d GetRegionalParameters()            const;
+    DoubleVec2d GetGlobal2dRates(force_type tag)   const;
+    DoubleVec2d GetRegional2dRates(force_type tag) const;
+
+    DoubleVec1d GetRegionalScalars() const;
+
+    void WriteForceParameters(std::ofstream& out, long numtabs) const;
+};
+
+#endif // FORCEPARAM_H
+
+//____________________________________________________________________________________
diff --git a/src/force/forcesummary.cpp b/src/force/forcesummary.cpp
new file mode 100644
index 0000000..777e975
--- /dev/null
+++ b/src/force/forcesummary.cpp
@@ -0,0 +1,1094 @@
+// $Id: forcesummary.cpp,v 1.63 2013/11/07 22:46:06 mkkuhner Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+#include <cmath>
+
+#include "chainout.h"
+#include "datapack.h"                   // for access to Region functions in SummarizeData()
+#include "force.h"                      // to create Force objects
+#include "forceparam.h"
+#include "forcesummary.h"
+#include "plotstat.h"
+#include "region.h"
+#include "runreport.h"
+#include "summary.h"
+#include "tree.h"                       //to create Tree objects
+#include "ui_vars.h"
+#include "vector_constants.h"
+#include "xml_strings.h"                // for ToXML()
+#include "event.h"                      // to create Event objects
+#include "regiongammainfo.h"
+#include "timemanager.h"
+
+#ifdef DMALLOC_FUNC_CHECK
+#include "/usr/local/include/dmalloc.h"
+#endif
+
+using namespace std;
+
+//------------------------------------------------------------------------------------
+
+ForceSummary::ForceSummary(ForceVec fvec, ForceParameters fparams, DataPack& dpack)
+    : m_forcevec(fvec),
+      m_startParameters(fparams),
+      m_llikemles(dpack.GetNRegions()), // likelihoods at the MLEs per region.
+      m_overallLlikemle()
+{
+    // first set the partition info
+    LongVec1d partitioncounts;
+    long ind = 0;
+    ForceVec::iterator fit;
+    for(fit = m_forcevec.begin(); fit != m_forcevec.end(); ++fit)
+    {
+        if ((*fit)->SetPartIndex(ind))
+        {
+            ++ind;
+            long partCount = (*fit)->GetNPartitions();
+            partitioncounts.push_back(partCount);
+        }
+        //Set up the identical groups;
+        ULongVec2d newgroups = (*fit)->GetIdenticalGroupedParams();
+        for (unsigned long gnum = 0; gnum<newgroups.size(); ++gnum)
+        {
+            m_identicalGroups.push_back(newgroups[gnum]);
+        }
+        //Set up the multiplicative groups;
+        newgroups = (*fit)->GetMultiplicativeGroupedParams();
+        for (unsigned long gnum = 0; gnum<newgroups.size(); ++gnum)
+        {
+            m_multiplicativeGroups.push_back(newgroups[gnum]);
+        }
+        //Set up the multiplicative multipliers;
+        DoubleVec1d newmults = (*fit)->GetParamGroupMultipliers();
+        m_multgroupmultiplier.insert(m_multgroupmultiplier.end(),newmults.begin(),newmults.end());
+    }
+    dpack.SetFinalPartitionCounts(partitioncounts);
+
+} // ForceSummary constructor
+
+//------------------------------------------------------------------------------------
+
+ForceSummary::~ForceSummary()
+{
+    ForceVec::iterator fit;
+    for (fit = m_forcevec.begin(); fit != m_forcevec.end(); ++fit)
+        delete(*fit);
+    m_forcevec.clear();
+} // ForceSummary destructor
+
+//------------------------------------------------------------------------------------
+
+vector<Parameter> ForceSummary::GetAllParameters()
+// a private function; public users should create a ParamVector
+{
+    ForceVec::iterator force;
+    vector<Parameter> result;
+
+    for (force = m_forcevec.begin(); force != m_forcevec.end(); ++force)
+    {
+        vector<Parameter>& tempvec = (*force)->GetParameters();
+        result.insert(result.end(), tempvec.begin(), tempvec.end());
+    }
+
+    return result;
+
+} // GetAllParameters
+
+//------------------------------------------------------------------------------------
+
+void ForceSummary::SetAllParameters(const vector<Parameter>& src)
+// a private function; public users should create a ParamVector
+{
+    ForceVec::iterator force;
+    vector<Parameter>::const_iterator startparam = src.begin();
+    vector<Parameter>::const_iterator endparam;
+    for (force = m_forcevec.begin(); force != m_forcevec.end(); ++force)
+    {
+        endparam = startparam + (*force)->GetNParams();
+        vector<Parameter> result(startparam, endparam);
+        (*force)->SetParameters(result);
+        startparam = endparam;
+    }
+    //LS NOTE:  We might not be at the end of the param vector if there's a gamma, but
+    // we throw that information away.  Or keep it somewhere else.  Or who knows what.
+
+} // SetAllParameters
+
+//------------------------------------------------------------------------------------
+
+Tree *ForceSummary::CreateProtoTree() const
+{
+    Tree *tree;
+
+    if (CheckForce(force_REC))
+    {
+        tree = new RecTree();
+    }
+
+    else
+    {
+        tree = new PlainTree();
+    }
+
+    tree->SetTreeTimeManager(CreateTimeManager());
+
+    return tree;
+}
+
+//------------------------------------------------------------------------------------
+
+TreeSummary* ForceSummary::CreateProtoTreeSummary() const
+{
+    bool shortform = true;
+
+    // if growth is in effect, short form summaries cannot be used
+    if (HasGrowth() || HasLogisticSelection())
+    {
+        shortform = false;
+    }
+
+    // if recombination is in effect, a recombinant summary must be
+    // created
+    TreeSummary* treesum;
+    if (CheckForce(force_REC))
+    {
+        treesum = new RecTreeSummary();
+    }
+    else
+    {
+        treesum = new TreeSummary();
+    }
+
+    IntervalData& interval = treesum->GetIntervalData();
+
+    ForceVec::const_iterator fit = m_forcevec.begin();
+    for ( ; fit != m_forcevec.end(); ++fit)
+    {
+        Summary* sum;
+        if (force_REC == (*fit)->GetTag() && !shortform)
+        {
+            if (CheckForce(force_DISEASE))
+                sum = (*fit)->CreateSummary(interval, false);
+            else
+                sum = (*fit)->CreateSummary(interval, true);
+        }
+        else
+            sum = (*fit)->CreateSummary(interval, shortform);
+        if (sum)
+        {
+            treesum->AddSummary((*fit)->GetTag(), sum);
+        }
+    }
+
+    return treesum;
+
+} // CreateProtoTreeSummary
+
+//------------------------------------------------------------------------------------
+
+vector<Event*> ForceSummary::CreateEventVec() const
+{
+    vector<Event*> events;
+    ForceVec::const_iterator fit(m_forcevec.begin());
+    for( ; fit != m_forcevec.end(); ++fit)
+    {
+        vector<Event*> ev((*fit)->MakeEvents(*this));
+        events.insert(events.end(),ev.begin(),ev.end());
+    }
+
+    for(fit = m_forcevec.begin(); fit != m_forcevec.end(); ++fit)
+        (*fit)->ModifyEvents(*this,events);
+
+    return events;
+}
+
+//------------------------------------------------------------------------------------
+
+long ForceSummary::GetNLocalPartitionForces() const
+{
+    long nforces(0L);
+    ForceVec::const_iterator fit(m_forcevec.begin());
+    for( ; fit != m_forcevec.end(); ++fit)
+    {
+        if ((*fit)->IsLocalPartitionForce()) ++nforces;
+    }
+    return nforces;
+}
+
+//------------------------------------------------------------------------------------
+
+ForceVec::const_iterator ForceSummary::GetForceByTag(force_type tag) const
+{
+    // returns an iterator to the element, or end() if none is found
+    ForceVec::const_iterator it;
+    for (it = m_forcevec.begin(); it != m_forcevec.end(); ++it)
+    {
+        if ((*it)->GetTag() == tag) return (it);
+    }
+    it = m_forcevec.end();
+    return (it);
+
+} // GetForceByTag
+
+//------------------------------------------------------------------------------------
+
+const StickForce& ForceSummary::GetStickForce() const
+{
+    ForceVec::const_iterator it;
+    for (it = m_forcevec.begin(); it != m_forcevec.end(); ++it)
+    {
+        if ((*it)->IsStickForce())
+        {
+            StickForce* fp(dynamic_cast<StickForce*>(*it));
+            return *fp;
+        }
+    }
+
+    assert(false);  // asked for a stickforce when there wasn't one to be had!!
+
+    return *(dynamic_cast<StickForce*>(*it)); // this is probably illegal,
+    // but we should never be here
+    // see assert above
+
+} // GetStickForce
+
+//------------------------------------------------------------------------------------
+
+LongVec1d ForceSummary::GetLocalPartitionIndexes() const
+{
+    LongVec1d lpindexes;
+
+    ForceVec::const_iterator it;
+    for (it = m_forcevec.begin(); it != m_forcevec.end(); ++it)
+    {
+        if ((*it)->IsLocalPartitionForce())
+        {
+            lpindexes.push_back(dynamic_cast<PartitionForce*>(*it)->GetPartIndex());
+        }
+    }
+
+    return lpindexes;
+
+} // GetLocalPartitionIndexes
+
+//------------------------------------------------------------------------------------
+
+LongVec1d ForceSummary::GetNonLocalPartitionIndexes() const
+{
+    LongVec1d lpindexes;
+
+    ForceVec::const_iterator it;
+    for (it = m_forcevec.begin(); it != m_forcevec.end(); ++it)
+    {
+        if ((*it)->IsPartitionForce() && (!(*it)->IsLocalPartitionForce()))
+        {
+            lpindexes.push_back(dynamic_cast<PartitionForce*>(*it)->GetPartIndex());
+        }
+    }
+
+    return lpindexes;
+
+} // GetNonLocalPartitionIndexes
+
+//------------------------------------------------------------------------------------
+
+force_type ForceSummary::GetNonLocalPartitionForceTag() const
+{
+    if (CheckForce(force_MIG)) return force_MIG;
+    if (CheckForce(force_DIVMIG)) return force_DIVMIG;
+    assert(false);
+    return force_NONE;  // should never happen
+} // GetNonLocalPartitionForceTag
+
+//------------------------------------------------------------------------------------
+
+bool ForceSummary::IsMissingForce(force_type tag) const
+{
+    ForceVec::const_iterator it = GetForceByTag(tag);
+    return (it == m_forcevec.end());
+} // IsMissingForce
+
+//------------------------------------------------------------------------------------
+
+bool ForceSummary::AnyForcesOtherThanGrowCoal() const
+{
+    ForceVec::const_iterator it;
+    for (it = m_forcevec.begin(); it != m_forcevec.end(); ++it)
+    {
+        if ((*it)->GetTag() != force_COAL && (*it)->GetTag() != force_GROW)
+            return true;
+    }
+
+    return false;
+
+} // AnyForcesOtherThanGrowCoal
+
+//------------------------------------------------------------------------------------
+
+bool ForceSummary::UsingStick() const
+{
+    ForceVec::const_iterator it;
+    for (it = m_forcevec.begin(); it != m_forcevec.end(); ++it)
+    {
+        if ((*it)->IsStickForce()) return true;
+    }
+
+    return false;
+}
+
+//------------------------------------------------------------------------------------
+
+bool ForceSummary::CheckForce(force_type tag) const
+{
+    if (GetForceByTag(tag) == m_forcevec.end()) return false;
+    return true;
+}
+
+//------------------------------------------------------------------------------------
+
+long ForceSummary::GetMaxEvents(force_type tag) const
+{
+    ForceVec::const_iterator it = GetForceByTag(tag);
+    assert(it != m_forcevec.end());
+    return((*it)->GetMaxEvents());
+} // GetMaxEvents
+
+//------------------------------------------------------------------------------------
+
+void ForceSummary::SetRegionMLEs(const ChainOut& chout, long region)
+{
+    // we do this by way of the ParamVector linearized form; it's easier
+
+    ParamVector paramvec(false);
+    vector<double> estimates = chout.GetEstimates().GetGlobalParameters();
+
+    assert(estimates.size() == paramvec.size());  // need one estimate per parameter
+
+    ParamVector::iterator param;
+    vector<double>::const_iterator est = estimates.begin();
+
+    for (param = paramvec.begin(); param != paramvec.end(); ++param, ++est)
+    {
+        if (param->IsValidParameter()) param->AddMLE(*est, region);
+    }
+
+    m_llikemles[region] = chout.GetLlikemle();
+
+} // SetRegionMLEs
+
+//------------------------------------------------------------------------------------
+
+void ForceSummary::SetOverallMLE(const ChainOut& chout)
+{
+    ParamVector paramvec(false);
+    vector<double> estimates = chout.GetEstimates().GetGlobalParameters();
+
+    assert(estimates.size() == paramvec.size());  // need one estimate per parameter
+
+    ParamVector::iterator param;
+    vector<double>::const_iterator est = estimates.begin();
+
+    for (param = paramvec.begin(); param != paramvec.end(); ++param, ++est)
+    {
+        if (param->IsValidParameter()) param->AddOverallMLE(*est);
+    }
+
+    m_overallLlikemle = chout.GetLlikemle();
+
+} // SetOverallMLEs
+
+//------------------------------------------------------------------------------------
+
+MethodTypeVec1d ForceSummary::GetMethods(force_type tag) const
+{
+    ForceVec::const_iterator it = GetForceByTag(tag);
+    assert(it != m_forcevec.end());
+    return (*it)->GetMethods();
+
+} // GetMethods
+
+//------------------------------------------------------------------------------------
+
+const vector<Epoch>* ForceSummary::GetEpochs() const
+{
+    if (!CheckForce(force_DIVERGENCE)) return NULL;
+    ForceVec::const_iterator it = GetForceByTag(force_DIVERGENCE);
+    const DivForce* divforce = dynamic_cast<const DivForce*>(*it);
+    return divforce->GetEpochs();
+} // GetEpochs
+
+//------------------------------------------------------------------------------------
+
+StringVec1d ForceSummary::ToXML(unsigned long nspaces) const
+{
+    StringVec1d xmllines;
+
+    string line = MakeIndent(MakeTag(xmlstr::XML_TAG_FORCES),nspaces);
+    xmllines.push_back(line);
+    nspaces += INDENT_DEPTH;
+    ForceVec::const_iterator force;
+    for(force = m_forcevec.begin(); force != m_forcevec.end(); ++force)
+    {
+        StringVec1d forcexml((*force)->ToXML(nspaces));
+        xmllines.insert(xmllines.end(),forcexml.begin(),forcexml.end());
+    }
+    if (registry.GetRegionGammaInfo())  //checks a pointer value
+    {
+        StringVec1d gammaforcexml((*registry.GetRegionGammaInfo()).ToXML(nspaces));
+        xmllines.insert(xmllines.end(),gammaforcexml.begin(),gammaforcexml.end());
+    }
+    nspaces -= INDENT_DEPTH;
+    line = MakeIndent(MakeCloseTag(xmlstr::XML_TAG_FORCES),nspaces);
+    xmllines.push_back(line);
+
+    return xmllines;
+
+} // ToXML
+
+//------------------------------------------------------------------------------------
+
+bool ForceSummary::ConstrainParameterValues(ForceParameters& fp) const
+{
+    bool parameters_constrained = false;
+    const ParamVector pvec(true);
+    ForceVec::const_iterator fit;
+    DoubleVec1d params = fp.GetGlobalParameters();
+    bool islog = false;
+    for (unsigned long pnum = 0; pnum< pvec.size(); ++pnum)
+    {
+        if (pvec[pnum].IsVariable())
+        {
+            force_type ftype = pvec[pnum].WhichForce();
+            fit = GetForceByTag(ftype);
+            double trimvalue = (*fit)->Truncate(params[pnum]);
+            if (trimvalue != params[pnum])
+            {
+                string msg = "The parameter \"" + pvec[pnum].GetName() +
+                    "\" maximized at " + ToString(params[pnum]) +
+                    " but is being constrained to " + ToString(trimvalue) + ".\n";
+                RunReport& runreport = registry.GetRunReport();
+                runreport.ReportNormal(msg);
+                SetParamWithConstraints(pnum, trimvalue, params, islog);
+                //params[param] = trimvalue;
+                parameters_constrained = true;
+            }
+        }
+    }
+    fp.SetGlobalParameters(params);
+
+    return parameters_constrained;
+} // ConstrainParameterValues
+
+//------------------------------------------------------------------------------------
+
+MethodTypeVec2d ForceSummary::Get2DMethods(force_type tag) const
+{
+    ForceVec::const_iterator it = GetForceByTag(tag);
+    assert(it != m_forcevec.end());
+    MethodTypeVec2d result = SquareOffVector((*it)->GetMethods());
+    return result;
+} // Get2DMethods
+
+//------------------------------------------------------------------------------------
+
+const ForceVec ForceSummary::GetPartitionForces() const
+{
+    ForceVec partitions;
+    ForceVec::const_iterator it;
+    for(it = m_forcevec.begin(); it != m_forcevec.end(); ++it)
+        if ((*it)->IsPartitionForce()) partitions.push_back(*it);
+
+    return partitions;
+} // GetPartitionForces
+
+//------------------------------------------------------------------------------------
+
+const ForceVec ForceSummary::GetLocalPartitionForces() const
+{
+    ForceVec partitions;
+    ForceVec::const_iterator it;
+    for(it = m_forcevec.begin(); it != m_forcevec.end(); ++it)
+        if ((*it)->IsLocalPartitionForce()) partitions.push_back(*it);
+
+    return partitions;
+} // GetLocalPartitionForces
+
+//------------------------------------------------------------------------------------
+
+ULongVec1d ForceSummary::GetParamsIdenticalGroupedWith(unsigned long pindex) const
+{
+    for (unsigned long gnum = 0; gnum < m_identicalGroups.size(); ++gnum)
+    {
+        for (unsigned long gpnum = 0; gpnum < m_identicalGroups[gnum].size(); ++gpnum)
+        {
+            if (m_identicalGroups[gnum][gpnum] == pindex)
+            {
+                return m_identicalGroups[gnum];
+            }
+        }
+    }
+    assert(false); //It's probably not a good idea to check this for *all*
+    // pindices.
+    ULongVec1d emptyvec;
+    emptyvec.push_back(pindex);
+    return emptyvec;
+}
+
+//------------------------------------------------------------------------------------
+
+ULongVec1d ForceSummary::GetParamsMultiplicativelyGroupedWith(unsigned long pindex) const
+{
+    for (unsigned long gnum = 0; gnum < m_multiplicativeGroups.size(); ++gnum)
+    {
+        for (unsigned long gpnum = 0; gpnum < m_multiplicativeGroups[gnum].size(); ++gpnum)
+        {
+            if (m_multiplicativeGroups[gnum][gpnum] == pindex)
+            {
+                return m_multiplicativeGroups[gnum];
+            }
+        }
+    }
+    assert(false); //It's probably not a good idea to check this for *all*
+    // pindices.
+    ULongVec1d emptyvec;
+    emptyvec.push_back(pindex);
+    return emptyvec;
+}
+
+//------------------------------------------------------------------------------------
+
+double ForceSummary::GetGroupMultiplier(unsigned long pindex) const
+{
+    for (unsigned long gnum = 0; gnum < m_multiplicativeGroups.size(); ++gnum)
+    {
+        for (unsigned long gpnum = 0; gpnum < m_multiplicativeGroups[gnum].size(); ++gpnum)
+        {
+            if (m_multiplicativeGroups[gnum][gpnum] == pindex)
+            {
+                return m_multgroupmultiplier[gnum];
+            }
+        }
+    }
+    assert(false); //It's probably not a good idea to check this for *all*
+    // pindices.
+    return FLAGDOUBLE;
+}
+
+//------------------------------------------------------------------------------------
+
+vector<force_type> ForceSummary::GetForceTags() const
+{
+    vector<force_type> retVec;
+    for (unsigned long fnum = 0; fnum < m_forcevec.size(); ++fnum)
+    {
+        retVec.push_back(m_forcevec[fnum]->GetTag());
+    }
+    return retVec;
+}
+
+//------------------------------------------------------------------------------------
+
+vector<long> ForceSummary::GetForceSizes() const
+{
+    vector<long> retVec;
+    for (unsigned long fnum = 0; fnum < m_forcevec.size(); ++fnum)
+    {
+        retVec.push_back(m_forcevec[fnum]->GetNParams());
+    }
+    return retVec;
+}
+
+//------------------------------------------------------------------------------------
+
+void ForceSummary::SetParamWithConstraints(long pindex, double newval,
+                                           DoubleVec1d& pvecnums, bool islog) const
+{
+    //Note that this function is sometimes called with 'pvecnums' equal to
+    // *log* parameters, not always 'normal' parameters.
+
+    const ParamVector pvec(true);
+    // MDEBUG a good refactor would be to unify identical-group and multiplicative-group logic here
+    ParamStatus mystatus = pvec[pindex].GetStatus();
+    ULongVec1d groupmembers;
+    double multiplier = 1.0;
+    if (mystatus.Status() == pstat_identical_head)
+    {
+        groupmembers = GetParamsIdenticalGroupedWith(pindex);
+    }
+    if (mystatus.Status() == pstat_multiplicative_head)
+    {
+        groupmembers = GetParamsMultiplicativelyGroupedWith(pindex);
+        multiplier = GetGroupMultiplier(pindex);
+        if (islog) multiplier = log(multiplier);
+    }
+    mystatus.SetWithConstraints(pindex, newval, pvecnums, groupmembers, multiplier);
+}
+
+//------------------------------------------------------------------------------------
+
+long ForceSummary::GetNParameters(force_type tag) const
+{
+    ForceVec::const_iterator it = GetForceByTag(tag);
+    assert(it != m_forcevec.end());
+    return (*it)->GetNParams();
+} // GetNParameters
+
+//------------------------------------------------------------------------------------
+
+long ForceSummary::GetAllNParameters() const
+{
+    long total = 0;
+    ForceVec::const_iterator it;
+    for(it = m_forcevec.begin(); it != m_forcevec.end(); ++it)
+        total += (*it)->GetNParams();
+    return total;
+} // ForceSummary::GetAllNParameters
+
+//------------------------------------------------------------------------------------
+
+vector<double> ForceSummary::GetModifiers(long posinparamvec) const
+{
+    const ParamVector paramvec(true);  // read-only copy
+    vector<double> results;
+
+    // diagnose whether we have fixed or percentile profiles.
+
+    proftype profiletype = paramvec[posinparamvec].GetProfileType();
+
+    //Note:  The profiling type must be consistent within a force, but can
+    // change from force to force.  Also within a force, profiling can be turned
+    // on or off.
+
+    if (profiletype == profile_NONE) return results;  // no profiles, hence no modifiers
+
+    verbosity_type verbosity = registry.GetUserParameters().GetVerbosity();
+
+    if (profiletype == profile_PERCENTILE) // percentile
+    {
+        if (verbosity == CONCISE || verbosity == NONE)
+            results = vecconst::percentiles_short;
+        else
+            results = vecconst::percentiles;
+        return results;
+    }
+    else
+    {                                   // fixed
+        if (verbosity == CONCISE || verbosity == NONE)
+        {
+            results = vecconst::multipliers_short; //growth or not, just do this one.
+        }
+        else
+        {
+            if(paramvec[posinparamvec].IsForce(force_GROW))
+            {
+                results =  vecconst::growthmultipliers;
+                DoubleVec1d gfixed = vecconst::growthfixed;
+                for (unsigned long i = 0; i < gfixed.size(); i++)
+                    results.push_back(gfixed[i]);
+                //There are two components to this vector--the first the normal
+                // multipliers of growth, but the second actual values for growth
+                // which we want to test.  We'll have to catch this later in
+                // Analyzer::CalcProfileFixed (in profile.cpp)
+            }
+            else
+                results = vecconst::multipliers;
+        }
+        return results;
+    }
+} // GetModifiers
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d ForceSummary::GetModifiersByForce(force_type tag) const
+{
+    const ParamVector paramvec(true);  // read-only copy
+    for (unsigned long i = 0; i < paramvec.size(); ++i)
+        if (paramvec[i].IsValidParameter() && paramvec[i].IsForce(tag))
+            if (paramvec[i].GetProfileType() != profile_NONE)
+                return GetModifiers(i);
+    return GetModifiers(0);
+    //This assumes that all parameters of a given force have either the
+    // same profiling type or none at all.
+} // GetModifiersByForce
+
+//------------------------------------------------------------------------------------
+
+proftype ForceSummary::GetOverallProfileType() const
+{
+    unsigned long i;
+    for (i = 0; i < m_forcevec.size(); ++i)
+    {
+        proftype mytype = m_forcevec[i]->SummarizeProfTypes();
+        if (mytype != profile_NONE) return mytype;
+    }
+
+    return profile_NONE;
+
+} // GetOverallProfileType
+
+//------------------------------------------------------------------------------------
+
+double ForceSummary::GetLowParameter(long posinparamvec) const
+{
+    const ParamVector paramvec(true);  // read-only copy
+    force_type tag = paramvec[posinparamvec].WhichForce();
+    if (force_REGION_GAMMA == tag)
+    {
+        const RegionGammaInfo *pRegionGammaInfo = registry.GetRegionGammaInfo();
+        if (!pRegionGammaInfo)
+        {
+            string msg = "ForceSummary::GetLowParameter() attempted to get the low ";
+            msg += "parameter value for force \"" + ToString(tag);
+            msg += ",\" but the necessary RegionGammaInfo was not found in the registry.";
+            throw implementation_error(msg);
+        }
+        return pRegionGammaInfo->GetLowValue();
+    }
+    ForceVec::const_iterator fit = GetForceByTag(tag);
+    if (m_forcevec.end() == fit)
+    {
+        string msg = "ForceSummary::GetLowParameter() failed to get a force object ";
+        msg += "corresponding to tag \"" + ToString(tag) + ".\"";
+        throw implementation_error(msg);
+    }
+    return (*fit)->GetLowVal();
+}
+
+//------------------------------------------------------------------------------------
+
+double ForceSummary::GetHighParameter(long posinparamvec) const
+{
+    const ParamVector paramvec(true);  // read-only copy
+    force_type tag = paramvec[posinparamvec].WhichForce();
+    if (force_REGION_GAMMA == tag)
+    {
+        const RegionGammaInfo *pRegionGammaInfo = registry.GetRegionGammaInfo();
+        if (!pRegionGammaInfo)
+        {
+            string msg = "ForceSummary::GetHighParameter() attempted to get the high ";
+            msg += "parameter value for force \"" + ToString(tag);
+            msg += ",\" but the necessary RegionGammaInfo was not found in the registry.";
+            throw implementation_error(msg);
+        }
+        return pRegionGammaInfo->GetHighValue();
+    }
+    ForceVec::const_iterator fit = GetForceByTag(tag);
+    if (m_forcevec.end() == fit)
+    {
+        string msg = "ForceSummary::GetHighParameter() failed to get a force object ";
+        msg += "corresponding to tag \"" + ToString(tag) + ".\"";
+        throw implementation_error(msg);
+    }
+    return (*fit)->GetHighVal();
+}
+
+//------------------------------------------------------------------------------------
+
+double ForceSummary::GetLowMult(long posinparamvec) const
+{
+    const ParamVector paramvec(true);  // read-only copy
+    force_type tag = paramvec[posinparamvec].WhichForce();
+    if (force_REGION_GAMMA == tag)
+    {
+        const RegionGammaInfo *pRegionGammaInfo = registry.GetRegionGammaInfo();
+        if (!pRegionGammaInfo)
+        {
+            string msg = "ForceSummary::GetLowMult() attempted to get the low ";
+            msg += "multiplier value for force \"" + ToString(tag);
+            msg += ",\" but the necessary RegionGammaInfo was not found in the registry.";
+            throw implementation_error(msg);
+        }
+        return pRegionGammaInfo->GetLowMultiplier();
+    }
+    ForceVec::const_iterator fit = GetForceByTag(tag);
+    if (m_forcevec.end() == fit)
+    {
+        string msg = "ForceSummary::GetLowMult() failed to get a force object ";
+        msg += "corresponding to tag \"" + ToString(tag) + ".\"";
+        throw implementation_error(msg);
+    }
+    return (*fit)->GetLowMult();
+}
+
+//------------------------------------------------------------------------------------
+
+double ForceSummary::GetHighMult(long posinparamvec) const
+{
+    const ParamVector paramvec(true);  // read-only copy
+    force_type tag = paramvec[posinparamvec].WhichForce();
+    if (force_REGION_GAMMA == tag)
+    {
+        const RegionGammaInfo *pRegionGammaInfo = registry.GetRegionGammaInfo();
+        if (!pRegionGammaInfo)
+        {
+            string msg = "ForceSummary::GetHighMult() attempted to get the high ";
+            msg += "multiplier value for force \"" + ToString(tag);
+            msg += ",\" but the necessary RegionGammaInfo was not found in the registry.";
+            throw implementation_error(msg);
+        }
+        return pRegionGammaInfo->GetHighMultiplier();
+    }
+    ForceVec::const_iterator fit = GetForceByTag(tag);
+    if (m_forcevec.end() == fit)
+    {
+        string msg = "ForceSummary::GetHighMult() failed to get a force object ";
+        msg += "corresponding to tag \"" + ToString(tag) + ".\"";
+        throw implementation_error(msg);
+    }
+    return (*fit)->GetHighMult();
+}
+
+//------------------------------------------------------------------------------------
+
+long ForceSummary::GetPartIndex(force_type forcename) const
+{
+    assert(dynamic_cast<PartitionForce*>(*GetForceByTag(forcename)));
+    return dynamic_cast<PartitionForce*>(*GetForceByTag(forcename))->
+        GetPartIndex();
+} // ForceSummary::GetPartIndex
+
+//------------------------------------------------------------------------------------
+
+TimeManager* ForceSummary::CreateTimeManager() const
+{
+    if (UsingStick())
+    {
+        if (HasGrowth())
+        {
+            if (HasSelection())
+            {
+                string msg = "ForceSummary::CreateTimeManager(), detected the ";
+                msg += "simultaneous presence of the growth and selection ";
+                msg += "forces.  If LAMARC has been updated to allow both of these ";
+                msg += "forces to act at the same time, then ";
+                msg += "ForceSummary::CreateTimeManager() needs to be updated ";
+                msg += "accordingly.";
+                throw implementation_error(msg);
+            }
+            return new ExpGrowStickTimeManager();
+        }
+
+        if (HasSelection())
+            return new SelectStickTimeManager();
+
+    }
+    else
+    {
+        if (HasGrowth())
+        {
+            if (HasSelection())
+            {
+                string msg = "ForceSummary::CreateTimeManager(), detected the ";
+                msg += "simultaneous presence of the growth and selection ";
+                msg += "forces.  If LAMARC has been updated to allow both of these ";
+                msg += "forces to act at the same time, then ";
+                msg += "ForceSummary::CreateTimeManager() needs to be updated ";
+                msg += "accordingly.";
+                throw implementation_error(msg);
+            }
+            return new ExpGrowTimeManager();
+        }
+
+        if (HasLogisticSelection())
+            return new LogSelectTimeManager();
+    }
+
+    return new ConstantTimeManager();
+
+} // ForceSummary::CreateTimeManager
+
+//------------------------------------------------------------------------------------
+// WARNING warning--butt ugly code!  maybe add new classes of
+// forces LocalPartitionForces and SexualForces (REC && CONVERSION)
+
+LongVec1d ForceSummary::GetSelectedSites() const
+{
+    bool foundsex(false);
+    ForceVec::const_iterator force;
+    for(force = m_forcevec.begin(); force != m_forcevec.end(); ++force)
+    {
+        if ((*force)->IsSexualForce())
+        {
+            foundsex = true;
+            break;
+        }
+    }
+
+    LongVec1d selsites;
+
+    if (foundsex)
+    {
+        for(force = m_forcevec.begin(); force != m_forcevec.end(); ++force)
+        {
+            if ((*force)->IsLocalPartitionForce())
+                selsites.push_back(dynamic_cast<LocalPartitionForce*>(*force)->GetLocalSite());
+        }
+    }
+
+    return selsites;
+
+} // ForceSummary::GetSelectedSites
+
+//------------------------------------------------------------------------------------
+
+bool ForceSummary::IsValidForceSummary() const
+{
+    // Some parameters must be present!
+    if (m_forcevec.size() == 0) return false;
+
+    // unsigned long npops = nparams;
+
+    unsigned long ncoal = 0, nmig = 0, ndisease = 0, nrec = 0, ngrow = 0, ngamma = 0;
+    unsigned long nlselect = 0, nepoch = 0;
+
+    if (!CheckForce(force_COAL)) return false;
+    ncoal = m_startParameters.GetGlobalThetas().size();
+
+    if (CheckForce(force_MIG))
+    {
+        nmig = m_startParameters.GetMigRates().size();
+        if (nmig < 2) return false;
+    }
+
+    if (CheckForce(force_DIVMIG))
+    {
+        nmig = m_startParameters.GetMigRates().size();
+        if (nmig < 2) return false;
+    }
+
+    if (CheckForce(force_DISEASE))
+    {
+        ndisease = m_startParameters.GetDiseaseRates().size();
+        if (ndisease < 2) return false;
+    }
+
+    //nmig and ndisease are set to 1 because I later take the square root to
+    // determine the number of partitions for each.
+
+    if (CheckForce(force_DIVERGENCE))
+    {
+        nepoch = m_startParameters.GetEpochTimes().size();
+        unsigned long nmigparts = static_cast<long>(sqrt(static_cast<double>(nmig)));
+        if (nepoch != (nmigparts - 1)/2) return false;   // truncation okay
+    }
+
+    if (CheckForce(force_REC))
+    {
+        nrec = m_startParameters.GetRecRates().size();
+        if (nrec != 1) return false;
+        // Until we implement distinct recombination rates per population, at least.
+    }
+
+    if (CheckForce(force_GROW))
+    {
+        ngrow = m_startParameters.GetGrowthRates().size();
+        if (ngrow != ncoal) return false;
+    }
+
+    if (CheckForce(force_LOGISTICSELECTION))
+    {
+        nlselect = m_startParameters.GetLogisticSelectionCoefficient().size();
+        if (1 != nlselect) return false;
+    }
+
+    if (CheckForce(force_EXPGROWSTICK))
+    {
+        ngrow = m_startParameters.GetGrowthRates().size();
+        if (ngrow != ncoal) return false;
+    }
+
+    unsigned long nmigparts;
+    if (nmig)
+    {
+        nmigparts = static_cast<long>(sqrt(static_cast<double>(nmig)));
+        if (nmigparts*nmigparts != nmig) return false;
+    }
+    else nmigparts = 1;
+
+    unsigned long ndiseaseparts;
+    if (ndisease)
+    {
+        ndiseaseparts = static_cast<long>(sqrt(static_cast<double>(ndisease)));
+        if (ndiseaseparts*ndiseaseparts != ndisease) return false;
+    }
+    else ndiseaseparts = 1;
+
+    if (ncoal != nmigparts * ndiseaseparts) return false;
+
+    //Now check out a parameter vector and check to be sure the number of total
+    // parameters is accurate, and that each parameter's index value is right.
+    const ParamVector testvec(true);
+
+    if (testvec.size() != ncoal + nmig + ndisease + nrec + nepoch + ngrow + ngamma + nlselect)
+        return false;
+
+    for (unsigned long index = 0; index < testvec.size(); index++)
+    {
+        if (index != testvec[index].GetParamVecIndex()) return false;
+    }
+
+    // WARNING DEBUG should check validity of the parameterlist
+    // (but I don't know how) -- Mary
+
+    return true;
+} // IsValidForceSummary
+
+//------------------------------------------------------------------------------------
+
+void ForceSummary::ValidateForceParamOrBarf(const ForceParameters& fp)
+{
+    DoubleVec1d params;
+    vector<force_type> allforces;
+    allforces.push_back(force_COAL);
+    allforces.push_back(force_DISEASE);
+    allforces.push_back(force_REC);
+    allforces.push_back(force_GROW);
+    allforces.push_back(force_LOGISTICSELECTION);
+    allforces.push_back(force_DIVERGENCE);
+    if (CheckForce(force_DIVERGENCE)) {
+      allforces.push_back(force_DIVMIG);
+    } else {
+      allforces.push_back(force_MIG);
+    }
+    for (vector<force_type>::iterator force = allforces.begin();
+         force != allforces.end(); force++)
+    {
+        params = fp.GetGlobalParametersByTag((*force));
+        if (CheckForce((*force)))
+        {
+            //This force is active, and should have a non-zero-sized vector in the forceparameters object.
+            if (params.size() == 0)
+            {
+                string msg = "Error:  The force " + ToString((*force)) +
+                    " was turned on in this run of LAMARC, but\n" +
+                    " was not on in the LAMARC run that created this summary file.\n\n"
+                    + "Re-run LAMARC with " + ToString((*force)) +
+                    " turned off to accurately continue the old run.";
+                throw data_error(msg);
+            }
+        }
+        else
+        {
+            //This force is inactive, and should have a zero-sized vector in the forceparameters object.
+            if (params.size() > 0)
+            {
+                string msg = "Error:  The force " + ToString((*force)) +
+                    " was turned off in this run of LAMARC, but\n" +
+                    " was on in the LAMARC run that created this summary file.\n\n" +
+                    "Re-run LAMARC with " + ToString((*force)) +
+                    " turned on to accurately continue the old run.";
+                throw data_error(msg);
+            }
+        }
+    }
+}
+
+//____________________________________________________________________________________
diff --git a/src/force/forcesummary.h b/src/force/forcesummary.h
new file mode 100644
index 0000000..e85dcd8
--- /dev/null
+++ b/src/force/forcesummary.h
@@ -0,0 +1,217 @@
+// $Id: forcesummary.h,v 1.34 2012/06/30 01:32:41 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+/********************************************************************
+ ForceSummary is the governing object for polymorphism along force lines.
+ It creates the basic tree and manages other force-specific objects.
+
+ It is not polymorphic along force lines, but it contains forcevec, a
+ vector of pointers to Force objects, which are polymorphic and
+ can be used as a run-down of which forces are in effect in the program.
+ This information is used in maximization and in output reporting.
+
+ ForceSummary owns the things that its forcevec points to, and will
+ delete them in its destructor.  No one else deletes these objects!
+ When a ForceSummary is copied, a new set of Force objects is
+ created for it to point to.
+
+ ForceSummary is also the factory for TimeManager objects (see force.h).
+
+ Written by Jim Sloan, heavily revised by Mary Kuhner
+
+Jon Yamato:
+ added SummarizeData(datapack)
+Peter Beerli:
+ added parameterlist class variable and
+ several Getters and Setters for it (8/30/01)
+Mary Kuhner:
+ took them out again, created the ParamVector class to remove
+ the need for them (October 2001)
+********************************************************************/
+
+#ifndef FORCESUMMARY_H
+#define FORCESUMMARY_H
+
+#include <cassert>  // May be needed for inline definitions.
+#include <map>
+#include <stdlib.h>
+#include <string>
+#include <vector>
+
+#include "constants.h"
+#include "defaults.h"
+#include "forceparam.h"
+#include "types.h"
+#include "vectorx.h"
+
+//------------------------------------------------------------------------------------
+
+class Random;
+class Force;
+class Tree;
+class TreeSummary;
+class ChainPack;
+class DataPack;
+class Parameter;
+class ParamVector;
+class ChainOut;
+class Summary;
+class UIVars;
+class StickForce;
+class Event;    // for CreateEventVec(), we're the callthrough factory
+class TimeManager;
+
+//------------------------------------------------------------------------------------
+
+class ForceSummary
+{
+  private:
+
+    ForceVec m_forcevec;  // contains one instance of each active force
+
+    ForceParameters m_startParameters; // Starting force parameter values.
+
+    DoubleVec1d m_llikemles;          // likelihoods at the MLEs per region.
+    double      m_overallLlikemle;
+
+    // A vector of indices for the groups which are constrained to be identical
+    ULongVec2d m_identicalGroups;
+
+    // A vector of indices for the groups which are constrained to
+    // be multiplicative
+    ULongVec2d m_multiplicativeGroups;
+
+    // The multiplier of each group in the same order as
+    // m_multiplicativeGroups.
+    DoubleVec1d m_multgroupmultiplier;
+
+    // mechanism for accessing the Parameters as a linearized vector using
+    // a check-in/check-out scheme
+    friend class ParamVector;
+    std::vector<Parameter>   GetAllParameters();
+    void                     SetAllParameters(const std::vector<Parameter>& src);
+
+    // There should only be one ForceSummary object
+    ForceSummary(const ForceSummary& src);              // undefined
+    ForceSummary& operator=(const ForceSummary& src);   // undefined
+
+
+    ULongVec1d GetParamsIdenticalGroupedWith(unsigned long pindex) const;
+    ULongVec1d
+    GetParamsMultiplicativelyGroupedWith(unsigned long pindex) const;
+    double     GetGroupMultiplier(unsigned long pindex) const;
+
+  public:
+
+    ForceSummary(ForceVec fvec, ForceParameters fparams, DataPack& dpac);
+    ~ForceSummary();
+
+    // is this force active?
+    bool          CheckForce(force_type tag)   const;
+
+    // Posterior output setup
+    void          SetupPosteriors(const ChainPack& chpack,
+                                  const DataPack& dpack) const;
+
+    // Factory Functions
+    Tree*          CreateProtoTree() const;
+    TreeSummary*   CreateProtoTreeSummary() const;
+    vector<Event*> CreateEventVec() const;  // used by ResimArranger::ctor
+
+    // Many Set Functions removed because now that we have separation between front and back end,
+    // we don't have to turn any forces on or off after the forcesummary is created.
+    // ewalkup 8/17/2004
+    void SetRegionMLEs(const ChainOut& chout, long region);
+    void SetOverallMLE(const ChainOut& chout);
+
+    bool  IsMissingForce(force_type tag) const;
+    bool  AnyForcesOtherThanGrowCoal() const;
+    bool  UsingStick() const;
+    bool  HasGrowth() const { return CheckForce(force_GROW) || CheckForce(force_EXPGROWSTICK); };
+    bool  HasSelection() const { return CheckForce(force_LOGISTICSELECTION) || CheckForce(force_LOGSELECTSTICK); };
+    bool  HasLogisticSelection() const { return CheckForce(force_LOGISTICSELECTION); };
+    // Get Functions
+    long        GetNForces()       const { return m_forcevec.size(); };
+    long        GetNLocalPartitionForces() const;
+    ForceVec::const_iterator GetForceByTag(force_type tag) const;
+    const StickForce& GetStickForce() const;
+    LongVec1d GetLocalPartitionIndexes() const;
+    LongVec1d GetNonLocalPartitionIndexes() const;
+    force_type GetNonLocalPartitionForceTag() const;
+
+    long        GetMaxEvents(force_type tag) const;
+    MethodTypeVec1d GetMethods(force_type tag) const;
+    const std::vector<Epoch>* GetEpochs() const;
+
+    StringVec1d ToXML(unsigned long nspaces) const;
+
+    const ForceParameters& GetStartParameters() const { return m_startParameters; };
+
+    bool ConstrainParameterValues(ForceParameters& fp) const;
+
+    double GetLlikeMle(long region) const   {
+        assert(static_cast<unsigned>(region) < m_llikemles.size());
+        return m_llikemles[region]; };
+
+    double GetOverallLlikeMle()     const    { double mle =
+            ((m_llikemles.size() == 1)
+             ? m_llikemles[0] : m_overallLlikemle);
+        return mle; };
+
+    // It is a ghastly error to call the next function unless the underlying model actually is 2D
+    // (and square).  It will throw an exception if you try.
+
+    MethodTypeVec2d  Get2DMethods(force_type tag) const;
+
+    const ForceVec&    GetAllForces()  const  { return m_forcevec; };
+    vector<long>       GetForceSizes() const;
+    vector<force_type> GetForceTags()  const;
+    ULongVec2d       GetIdenticalGroupedParams() const { return m_identicalGroups; };
+    ULongVec2d       GetMultiplicativeGroupedParams() const { return m_multiplicativeGroups; };
+    void   SetParamWithConstraints(long pindex, double newval, DoubleVec1d& pvec, bool islog) const;
+
+    const ForceVec   GetPartitionForces() const;
+    const ForceVec   GetLocalPartitionForces() const;
+
+    long             GetNParameters(force_type tag) const;
+
+    long             GetAllNParameters() const;
+    DoubleVec1d      GetModifiers(long posinparamvec) const;
+    DoubleVec1d      GetModifiersByForce(force_type tag) const;
+    proftype         GetOverallProfileType() const;
+
+    double GetLowParameter(long posinparamvec) const;
+    double GetHighParameter(long posinparamvec) const;
+    double GetHighMult(long posinparamvec) const;
+    double GetLowMult(long posinparamvec) const;
+
+    // If you need to check out a set of Parameters as a linearized vector, declare a variable of type
+    // ParamVector.  That constructor uses the singleton ForceSummary object from the registry and can
+    // give you a read-only or read-write version of the parameters.  See parameter.h
+
+    // GetPartIndex() is a call through function to handle dynamic
+    // casting
+    long             GetPartIndex(force_type forcename) const;
+
+    // Used by the code that constructs proto-ranges
+    LongVec1d        GetSelectedSites() const;
+
+    TimeManager*     CreateTimeManager() const;
+
+    // Validation
+    bool IsValidForceSummary() const;
+    // This is used in summary file reading, to make sure we are
+    // reading a file created with the same forces as we now have.
+    void ValidateForceParamOrBarf(const ForceParameters& fp);
+};
+
+#endif // FORCESUMMARY_H
+
+//____________________________________________________________________________________
diff --git a/src/force/paramstat.cpp b/src/force/paramstat.cpp
new file mode 100644
index 0000000..b91ed10
--- /dev/null
+++ b/src/force/paramstat.cpp
@@ -0,0 +1,232 @@
+// $Id: paramstat.cpp,v 1.12 2012/06/30 01:32:41 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+
+#include "paramstat.h"
+#include "vectorx.h"
+
+//-------------------------------------------------------------------------------------
+
+ParamStatus::ParamStatus(pstatus mytype)
+    : m_impl(CreateParamStatus(mytype))
+{
+    // deliberately blank
+}
+
+//-------------------------------------------------------------------------------------
+
+string InvalidStatus::ConstraintDescription(long groupindex) const
+{
+    if (groupindex != FLAGLONG) return "be held at zero and ignored in output)";
+    return " (held at zero and ignored in output)";
+} // ConstraintDescription
+
+//-------------------------------------------------------------------------------------
+
+string UnconstrainedStatus::ConstraintDescription(long groupindex) const
+{
+    if (groupindex != FLAGLONG)
+    {
+        assert(false); //Groups should not be able to set 'unconstrained'
+        return "start at the same value, then vary freely)";
+    }
+    // return " (unconstrained)";
+    // That took too much space, but we could add it back if needed.
+    return "";
+} // ConstraintDescription
+
+//-------------------------------------------------------------------------------------
+
+string ConstantStatus::ConstraintDescription(long groupindex) const
+{
+    if (groupindex != FLAGLONG) return "stay at the same constant value)";
+    return " (held constant)";
+} // ConstraintDescription
+
+//-------------------------------------------------------------------------------------
+
+string ConstantStatus::ToggleIndividualStatus(force_type ftype) const
+{
+    if (ftype == force_COAL || ftype == force_REGION_GAMMA) return ToString(pstat_unconstrained);
+    else return ToString(pstat_invalid);
+} // ToggleGroupStatus
+
+//-------------------------------------------------------------------------------------
+
+string ConstantStatus::ToggleGroupStatus(force_type ftype) const
+{
+    if (ftype == force_COAL) return ToString(pstat_identical);
+    else return ToString(pstat_invalid);
+} // ToggleGroupStatus
+
+//-------------------------------------------------------------------------------------
+
+string IdenticalStatus::ConstraintDescription(long) const
+{
+    return "be identical)";
+} // ConstraintDescription
+
+//-------------------------------------------------------------------------------------
+
+string IdenticalHeadStatus::ConstraintDescription(long) const
+{
+    return "be identical)";
+} // ConstraintDescription
+
+//-------------------------------------------------------------------------------------
+
+void IdenticalHeadStatus::SetWithConstraints(long pindex, double newval,
+                                             DoubleVec1d& pvecnums,
+                                             const ULongVec1d& groupmembers,
+                                             double) const
+{
+    unsigned long i;
+    for (i = 0; i < groupmembers.size(); ++i)
+    {
+        pvecnums[i] = newval;
+    }
+} //SetWithConstraints
+
+//-------------------------------------------------------------------------------------
+
+string MultiplicativeStatus::ConstraintDescription(long) const
+{
+    return "have a constant ratio)";
+} // ConstraintDescription
+
+//-------------------------------------------------------------------------------------
+
+string MultiplicativeHeadStatus::ConstraintDescription(long) const
+{
+    return "have a constant ratio)";
+} // ConstraintDescription
+
+//-------------------------------------------------------------------------------------
+
+void MultiplicativeHeadStatus::SetWithConstraints(long pindex, double newval,
+                                                  DoubleVec1d& pvecnums,
+                                                  const ULongVec1d& groupmembers,
+                                                  double multiplier) const
+{
+    unsigned long i;
+
+    // NB This code, trickily, sets the first member of the group without
+    // using the multiplier, and then sets the rest (currently there must be
+    // no more than one of them) using the value * multiplier.
+    // Calling code must log the multiplier if newval is a log!
+
+    pvecnums[pindex] = newval;
+    for (i = 1; i < groupmembers.size(); ++i)
+    {
+        pvecnums[i] = newval * multiplier;
+    }
+} //SetWithConstraints
+
+//-------------------------------------------------------------------------------------
+
+#if 0 // MREMOVE
+string EpochTimeStatus::ConstraintDescription(long) const
+{
+    return ""; //yes, we return nothing, the case is similar to 'unconstrained'
+} // ConstraintDescription
+#endif
+
+//-------------------------------------------------------------------------------------
+
+ParamStatusImpl* ParamStatus::CreateParamStatus(pstatus mytype)
+{
+    switch (mytype)
+    {
+        case pstat_invalid:
+            return new InvalidStatus;
+        case pstat_unconstrained:
+            return new UnconstrainedStatus;
+        case pstat_constant:
+            return new ConstantStatus;
+        case pstat_identical:
+            return  new IdenticalStatus;
+        case pstat_identical_head:
+            return  new IdenticalHeadStatus;
+        case pstat_multiplicative:
+            return  new MultiplicativeStatus;
+        case pstat_multiplicative_head:
+            return  new MultiplicativeHeadStatus;
+#if 0 // MREMOVE
+        case pstat_epochtime:
+            return  new EpochTimeStatus;
+#endif
+        default:
+            assert(false);  // unknown parameter status
+            break;
+    }
+    assert(false);
+    return new InvalidStatus;  // this line can't be reached
+}
+
+//-------------------------------------------------------------------------------------
+
+ParamStatus::ParamStatus(const ParamStatus& src)
+    : m_impl(CreateParamStatus(src.Status()))
+{
+    // deliberately blank
+}
+
+//-------------------------------------------------------------------------------------
+
+ParamStatus& ParamStatus::operator=(const ParamStatus& src)
+{
+    delete m_impl;
+    m_impl = CreateParamStatus(src.Status());
+    return *this;
+}
+
+//-------------------------------------------------------------------------------------
+
+// These can't be in the header due to circularity
+pstatus ParamStatus::Status() const
+{ return m_impl->Status(); }
+
+bool ParamStatus::Valid() const
+{ return m_impl->Valid(); };
+
+bool ParamStatus::Inferred() const
+{ return m_impl->Inferred(); };
+
+string ParamStatus::ConstraintDescription(long groupindex) const
+{ return m_impl->ConstraintDescription(groupindex); };
+
+bool ParamStatus::Varies() const
+{ return m_impl->Varies(); };
+
+bool ParamStatus::Grouped() const
+{ return m_impl->Grouped(); };
+
+void ParamStatus::SetWithConstraints(long pindex, double newval,
+                                     vector<double>& pvecnums,
+                                     const vector<unsigned long>& groupmembers,
+                                     double multiplier) const
+{ return m_impl->SetWithConstraints(pindex, newval, pvecnums, groupmembers,
+                                    multiplier); };
+
+string ParamStatus::ToggleIndividualStatus(force_type ftype) const
+{ return ToString(m_impl->ToggleIndividualStatus(ftype)); };
+
+string ParamStatus::ToggleGroupStatus(force_type ftype) const
+{ return ToString(m_impl->ToggleGroupStatus(ftype)); };
+
+//-------------------------------------------------------------------------------------
+
+ParamStatus::~ParamStatus()
+{
+    delete m_impl;
+}
+
+//_____________________________________________________________________________________
diff --git a/src/force/paramstat.h b/src/force/paramstat.h
new file mode 100644
index 0000000..2cf817b
--- /dev/null
+++ b/src/force/paramstat.h
@@ -0,0 +1,256 @@
+// $Id: paramstat.h,v 1.9 2011/04/23 02:02:49 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef PARAMSTAT_H
+#define PARAMSTAT_H
+
+#include <cassert>
+#include <string>
+#include <vector>
+
+#include "constants.h"
+#include "stringx.h"
+
+//-------------------------------------------------------------------------------------
+
+// We want this class to be polymorphic, because it has a lot of subtypes with
+// different behavior.  But, we want it not to be polymorphic because it has to
+// go in vectors, and pointers/smart pointers are a pain.  So, it is a non-
+// poly class which holds a poly one inside; no one else need care about that.
+// ParamStatusImpl is the inside, hidden class.
+
+class ParamStatus;
+class ParamStatusImpl;
+typedef std::pair<ParamStatus, std::vector<long> > ParamGroup;
+
+class ParamStatus
+{
+  private:
+    // owning pointer!
+    ParamStatusImpl* m_impl;
+
+    ParamStatusImpl* CreateParamStatus(pstatus mytype);
+
+  public:
+    // construction etc.
+    ParamStatus(pstatus mytype);
+    ParamStatus(const ParamStatus& src);
+    ParamStatus& operator=(const ParamStatus& src);
+    ~ParamStatus();
+
+    // queries
+    pstatus Status() const;
+    bool Valid() const;
+    bool Inferred() const;
+    std::string ConstraintDescription(long groupindex) const;
+    bool Varies() const;
+    bool Grouped() const;
+
+    // setters
+    void SetWithConstraints(long pindex, double newval,
+                            std::vector<double>& pvecnums, const std::vector<unsigned long>& groupmembers,
+                            double multiplier) const;
+    std::string ToggleIndividualStatus(force_type ftype) const;
+    std::string ToggleGroupStatus(force_type ftype) const;
+};
+
+//-------------------------------------------------------------------------------------
+
+class ParamStatusImpl
+{
+  public:
+    virtual ~ParamStatusImpl() {};
+    virtual pstatus Status() const = 0;
+    virtual bool Valid() const = 0;
+    virtual bool Inferred() const = 0;
+    virtual std::string ConstraintDescription(long groupindex) const = 0;
+    virtual bool Varies() const = 0;
+    virtual bool Grouped() const = 0;
+    virtual void SetWithConstraints(long pindex, double newval,
+                                    std::vector<double>& pvecnums, const std::vector<unsigned long>& groupmembers,
+                                    double multiplier) const = 0;
+    virtual std::string ToggleIndividualStatus(force_type ftype) const = 0;
+    virtual std::string ToggleGroupStatus(force_type ftype) const = 0;
+};
+
+//-------------------------------------------------------------------------------------
+
+class InvalidStatus: public ParamStatusImpl
+{
+  public:
+    virtual ~InvalidStatus() {};
+    virtual pstatus Status() const { return pstat_invalid; };
+    virtual bool Valid() const { return false; };
+    virtual bool Inferred() const { return false; };
+    virtual std::string ConstraintDescription(long groupindex) const;
+    virtual bool Varies() const { return false; };
+    virtual bool Grouped() const { return false; };
+
+    virtual void SetWithConstraints(long, double, std::vector<double>&, const std::vector<unsigned long>&, double) const
+    { assert(false); };
+
+    virtual std::string ToggleIndividualStatus(force_type ftype) const { return ToString(pstat_unconstrained); };
+    virtual std::string ToggleGroupStatus(force_type ftype) const { return ToString(pstat_identical); };
+};
+
+//-------------------------------------------------------------------------------------
+
+class UnconstrainedStatus: public ParamStatusImpl
+{
+  public:
+    virtual ~UnconstrainedStatus() {};
+    virtual pstatus Status() const { return pstat_unconstrained; };
+    virtual bool Valid() const { return true; };
+    virtual bool Inferred() const { return true; };
+    virtual std::string ConstraintDescription(long groupindex) const;
+    virtual bool Varies() const { return true; };
+    virtual bool Grouped() const { return false; };
+
+    virtual void SetWithConstraints(long pindex, double newval,
+                                    std::vector<double>& pvecnums,
+                                    const std::vector<unsigned long>&, double) const
+    { pvecnums[pindex] = newval; };
+
+    virtual std::string ToggleIndividualStatus(force_type ftype) const { return ToString(pstat_constant); };
+    virtual std::string ToggleGroupStatus(force_type ftype) const { return ToString(pstat_constant); };
+};
+
+//-------------------------------------------------------------------------------------
+
+class ConstantStatus: public ParamStatusImpl
+{
+  public:
+    virtual ~ConstantStatus() {};
+    virtual pstatus Status() const { return pstat_constant; };
+    virtual bool Valid() const { return true; };
+    virtual bool Inferred() const { return false; };
+    virtual std::string ConstraintDescription(long groupindex) const;
+    virtual bool Varies() const { return false; };
+    virtual bool Grouped() const { return false; };
+
+    virtual void SetWithConstraints(long, double, std::vector<double>&,
+                                    const std::vector<unsigned long>&, double) const
+    { assert(false); };
+
+    virtual std::string ToggleIndividualStatus(force_type ftype) const;
+    virtual std::string ToggleGroupStatus(force_type ftype) const;
+};
+
+//-------------------------------------------------------------------------------------
+
+class IdenticalStatus: public ParamStatusImpl
+{
+  public:
+    virtual ~IdenticalStatus() {};
+    virtual pstatus Status() const { return pstat_identical; };
+    virtual bool Valid() const { return true; };
+    virtual bool Inferred() const { return false; };
+    virtual std::string ConstraintDescription(long groupindex) const;
+    virtual bool Varies() const { return true; };
+    virtual bool Grouped() const { return true; };
+
+    virtual void SetWithConstraints(long, double, std::vector<double>&,
+                                    const std::vector<unsigned long>&, double) const
+    { assert(false); };
+
+    virtual std::string ToggleIndividualStatus(force_type ftype) const { return ToString(pstat_unconstrained); };
+    virtual std::string ToggleGroupStatus(force_type ftype) const { return ToString(pstat_constant); };
+};
+
+//-------------------------------------------------------------------------------------
+
+class IdenticalHeadStatus: public ParamStatusImpl
+{
+  public:
+    virtual ~IdenticalHeadStatus() {};
+    virtual pstatus Status() const { return pstat_identical_head; };
+    virtual bool Valid() const { return true; };
+    virtual bool Inferred() const { return true; };
+    virtual std::string ConstraintDescription(long groupindex) const;
+    virtual bool Varies() const { return true; };
+    virtual bool Grouped() const { return true; };
+    virtual void SetWithConstraints(long pindex, double newval, std::vector<double>& pvecnums,
+                                    const std::vector<unsigned long>& groupmembers, double) const;
+    virtual std::string ToggleIndividualStatus(force_type ftype) const { return ToString(pstat_unconstrained); };
+    virtual std::string ToggleGroupStatus(force_type ftype) const { return ToString(pstat_constant); };
+};
+
+//-------------------------------------------------------------------------------------
+
+class MultiplicativeStatus: public ParamStatusImpl
+{
+  public:
+    virtual ~MultiplicativeStatus() {};
+    virtual pstatus Status() const { return pstat_multiplicative; };
+    virtual bool Valid() const { return true; };
+    virtual bool Inferred() const { return false; };
+    virtual std::string ConstraintDescription(long groupindex) const;
+    virtual bool Varies() const { return true; };
+    virtual bool Grouped() const { return true; };
+
+    virtual void SetWithConstraints(long, double, std::vector<double>&, const std::vector<unsigned long>&, double) const
+    { assert(false); };
+
+    virtual std::string ToggleIndividualStatus(force_type ftype) const { return ToString(pstat_unconstrained); };
+    virtual std::string ToggleGroupStatus(force_type ftype) const { return ToString(pstat_constant); };
+};
+
+//-------------------------------------------------------------------------------------
+
+class MultiplicativeHeadStatus: public ParamStatusImpl
+{
+  public:
+    virtual ~MultiplicativeHeadStatus() {};
+    virtual pstatus Status() const { return pstat_multiplicative_head; };
+    virtual bool Valid() const { return true; };
+    virtual bool Inferred() const { return true; };
+    virtual std::string ConstraintDescription(long groupindex) const;
+    virtual bool Varies() const { return true; };
+    virtual bool Grouped() const { return true; };
+    virtual void SetWithConstraints(long pindex, double newval, std::vector<double>& pvecnums,
+                                    const std::vector<unsigned long>& groupmembers, double multiplier) const;
+    virtual std::string ToggleIndividualStatus(force_type ftype) const { return ToString(pstat_unconstrained); };
+    virtual std::string ToggleGroupStatus(force_type ftype) const { return ToString(pstat_constant); };
+};
+
+//-------------------------------------------------------------------------------------
+
+# if 0
+class EpochTimeStatus: public ParamStatusImpl
+{
+  public:
+    virtual ~EpochTimeStatus() {};
+    virtual pstatus Status() const { return pstat_epochtime; };
+    virtual bool Valid() const { return true; };
+    virtual bool Inferred() const { return true; };
+    virtual std::string ConstraintDescription(long groupindex) const;
+    virtual bool Varies() const { return true; };
+    virtual bool Grouped() const { return false; };
+
+    virtual void SetWithConstraints(long pindex, double newval, std::vector<double>& pvecnums,
+                                    const std::vector<unsigned long>&, double) const
+    { pvecnums[pindex] = newval; };
+
+    // currently no toggles possible for epoch times
+    virtual std::string ToggleIndividualStatus(force_type ftype) const
+    {   assert(false);
+        return ToString(pstat_invalid); };
+
+    virtual std::string ToggleGroupStatus(force_type ftype) const
+    {   assert(false);
+        return ToString(pstat_invalid); };
+};
+#endif
+
+//-------------------------------------------------------------------------------------
+
+#endif // PARAMSTAT_H
+
+//_____________________________________________________________________________________
diff --git a/src/force/priorreport.cpp b/src/force/priorreport.cpp
new file mode 100644
index 0000000..bdfea12
--- /dev/null
+++ b/src/force/priorreport.cpp
@@ -0,0 +1,69 @@
+// $Id: priorreport.cpp,v 1.7 2012/06/30 01:32:41 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+#include <iostream>
+#include <algorithm>                    // for std::find
+
+#include "priorreport.h"
+#include "force.h"
+#include "parameter.h"
+#include "prior.h"
+
+//------------------------------------------------------------------------------------
+
+PriorReport::PriorReport(const Force& theforce) :
+    m_forcename(theforce.GetFullName())
+{
+    const vector<Parameter>& params(theforce.GetParameters());
+
+    vector<Parameter>::const_iterator param;
+    for(param = params.begin(); param != params.end(); ++param)
+    {
+        if (!(param->IsValidParameter())) continue;
+        if (param->GetStatus().Status() == pstat_constant) continue;
+
+        vector<Prior>::iterator found(std::find(m_priors.begin(), m_priors.end(), param->GetPrior()));
+        if (found != m_priors.end())
+            m_whichparams[std::distance(m_priors.begin(),found)].
+                push_back(param->GetUserName());
+        else
+        {
+            StringVec1d newname(1UL,param->GetUserName());
+            m_whichparams.push_back(newname);
+            m_priors.push_back(param->GetPrior());
+        }
+    }
+} // PriorReport ctor
+
+//------------------------------------------------------------------------------------
+
+void PriorReport::WriteTo(std::ofstream& outf) const
+{
+    assert(m_whichparams.size() == m_priors.size());
+
+    outf << m_forcename << " Priors" << std::endl;
+    outf << "parameter(s)   type   bounds" << std::endl;
+    unsigned long ind, nind(m_whichparams.size());
+    for(ind = 0; ind < nind; ++ind)
+    {
+        string line(m_whichparams[ind][0]);
+        line += " | " + ToString(m_priors[ind].GetPriorType());
+        line += " | " + ToString(m_priors[ind].GetLowerBound());
+        line += " to " + ToString(m_priors[ind].GetUpperBound());
+        outf << line << std::endl;
+        StringVec1d::const_iterator addtlname(m_whichparams[ind].begin());
+        for(++addtlname; addtlname != m_whichparams[ind].end(); ++addtlname)
+            outf << *addtlname << " -- as above" << std::endl;
+        outf << std::endl;
+    }
+} // PriorReport::WriteTo
+
+//____________________________________________________________________________________
diff --git a/src/force/priorreport.h b/src/force/priorreport.h
new file mode 100644
index 0000000..dc96011
--- /dev/null
+++ b/src/force/priorreport.h
@@ -0,0 +1,51 @@
+// $Id: priorreport.h,v 1.3 2011/03/07 06:08:49 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+/********************************************************************
+ PriorReport is a collection class that creates an output report on
+ the priors used during a Lamarc run.
+
+ Currently the report takes the form of a StringVec1d, dimension is
+ the number of lines of character output.
+
+ Written by Jon Yamato
+********************************************************************/
+
+#ifndef PRIORREPORT_H
+#define PRIORREPORT_H
+
+#include "vectorx.h"
+#include "constants.h"
+#include "defaults.h"
+
+class Force;
+class Prior;
+
+class PriorReport
+{
+  private:
+    PriorReport(); //undefined
+
+    std::string m_forcename;
+
+    // all dim: # unique priors for force
+    StringVec2d m_whichparams;
+    vector<Prior> m_priors;
+
+  public:
+    // we accept default copy-ctor and operator=
+    PriorReport(const Force& theforce);
+
+    void WriteTo(std::ofstream& out) const;
+};
+
+#endif // PRIORREPORT_H
+
+//____________________________________________________________________________________
diff --git a/src/force/stair.cpp b/src/force/stair.cpp
new file mode 100644
index 0000000..f654218
--- /dev/null
+++ b/src/force/stair.cpp
@@ -0,0 +1,71 @@
+// $Id: stair.cpp,v 1.3 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include "stair.h"
+
+//------------------------------------------------------------------------------------
+
+StairRiser::StairRiser(const DoubleVec1d& thetas, double tiptime,
+                       double roottime) : m_thetas(thetas), m_tiptime(tiptime),
+                                          m_roottime(roottime)
+{
+    // deliberately blank
+} // StairRiser ctor
+
+//------------------------------------------------------------------------------------
+
+bool StairRiser::operator<(const StairRiser& other) const
+{
+    return (m_tiptime < other.m_tiptime);
+} // StairRiser::operator<
+
+//------------------------------------------------------------------------------------
+
+void StairRiser::SetThetas(const DoubleVec1d& thetas)
+{
+    m_thetas = thetas;
+} // StairRiser::SetThetas
+
+//------------------------------------------------------------------------------------
+
+void StairRiser::SetTipendTime(double newtime)
+{
+    m_tiptime = newtime;
+} // StairRiser::SetTipendTime
+
+//------------------------------------------------------------------------------------
+
+void StairRiser::SetRootendTime(double newtime)
+{
+    m_roottime = newtime;
+} // StairRiser::SetRootendTime
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d StairRiser::GetThetas() const
+{
+    return m_thetas;
+} // StairRiser::GetThetas
+
+//------------------------------------------------------------------------------------
+
+double StairRiser::GetTipendTime() const
+{
+    return m_tiptime;
+} // StairRiser::GetTipendTime
+
+//------------------------------------------------------------------------------------
+
+double StairRiser::GetRootendTime() const
+{
+    return m_roottime;
+} // StairRiser::GetRootendTime
+
+//____________________________________________________________________________________
diff --git a/src/force/stair.h b/src/force/stair.h
new file mode 100644
index 0000000..8252ca9
--- /dev/null
+++ b/src/force/stair.h
@@ -0,0 +1,49 @@
+// $Id: stair.h,v 1.4 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef STAIR_H
+#define STAIR_H
+
+#include<list>
+#include "vectorx.h"
+
+//------------------------------------------------------------------------------------
+
+class StairRiser
+{
+  public:
+    StairRiser(const DoubleVec1d& thetas, double tiptime, double roottime);
+    // we accept the default copy ctor, operator= and dtor.
+    bool operator<(const StairRiser& other) const;
+
+    void SetThetas(const DoubleVec1d& thetas);
+    void SetTipendTime(double newtime);
+    void SetRootendTime(double newtime);
+
+    DoubleVec1d GetThetas() const;
+    double GetTipendTime() const;
+    double GetRootendTime() const;
+
+  private:
+    DoubleVec1d m_thetas;
+    double m_tiptime, m_roottime; // times at the tipward and rootward edge of
+    // a riser.
+
+    StairRiser();                 // the default ctor is disabled.
+
+};
+
+//------------------------------------------------------------------------------------
+
+typedef std::list<StairRiser>     stair;
+
+#endif // STAIR_H
+
+//____________________________________________________________________________________
diff --git a/src/force/timemanager.cpp b/src/force/timemanager.cpp
new file mode 100644
index 0000000..6246476
--- /dev/null
+++ b/src/force/timemanager.cpp
@@ -0,0 +1,2194 @@
+// $Id: timemanager.cpp,v 1.15 2013/11/08 21:46:21 mkkuhner Exp $
+
+/*
+ *  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+ *
+ *  This software is distributed free of charge for non-commercial use
+ *  and is copyrighted.  Of course, we do not guarantee that the software
+ *  works, and are not responsible for any damage you may cause or have.
+ */
+
+#include <cassert>
+#include <iostream>       // for debug PrintStick function
+#include <iterator>       // for ostream_iterator in debug Print fns
+#include <limits>         // for epsilon and numeric_limits
+
+#include "timemanager.h"
+#include "forceparam.h"   // parameter handling
+#include "random.h"       // random choice of times
+#include "constants.h"    // DBL_MAX
+#include "definitions.h"  // typedefs
+
+#include "registry.h"     // to get ForceSummary
+#include "force.h"        // Force-specific partition handling
+#include "mathx.h"        // for SafeProductWithExp
+#include "treesum.h"      // for ScoreStick!
+
+//------------------------------------------------------------------------------------
+// TimeManager
+
+TimeManager::TimeManager()
+    : randomSource(registry.GetRandom())
+{
+    // deliberately blank
+} // TimeManager default ctor
+
+//------------------------------------------------------------------------------------
+
+TimeManager::TimeManager(const TimeManager& src)
+    : randomSource(src.randomSource)
+{
+    // deliberately blank
+} // TimeManager copy ctor
+
+//------------------------------------------------------------------------------------
+
+TimeManager& TimeManager::operator=(const TimeManager& src)
+{
+    // it is not necessary to copy randomSource, as it should
+    // already be correct
+    return *this;
+} // TimeManager operator=
+
+//------------------------------------------------------------------------------------
+
+void TimeManager::MakeStickUsingBranches(const ForceParameters&,
+                                         const std::vector<std::pair<double,LongVec1d> >&)
+{
+    // we do nothing since there is no stick
+} // TimeManager::MakeStickUsingBranches
+
+//------------------------------------------------------------------------------------
+
+void TimeManager::MakeStickTilTime(const ForceParameters&, double)
+{
+    // we do nothing since there is no stick
+} // TimeManager::MakeStickTilTime
+
+//------------------------------------------------------------------------------------
+
+bool TimeManager::UsingStick() const
+{
+    return false;
+} // TimeManager::UsingStick
+
+//------------------------------------------------------------------------------------
+
+void TimeManager::CopyStick(const TimeManager&)
+{
+    // we do nothing since there is no stick
+} // TimeManager::UsingStick
+
+//------------------------------------------------------------------------------------
+
+void TimeManager::ClearStick()
+{
+    // we do nothing since there is no stick
+} // TimeManager::ClearStick
+
+//------------------------------------------------------------------------------------
+
+void TimeManager::SetStickParameters(const ForceParameters&)
+{
+    // we do nothing since there is no stick
+} // TimeManager::SetStickParameters
+
+//------------------------------------------------------------------------------------
+
+void TimeManager::ScoreStick(TreeSummary&) const
+{
+    // we do nothing since there is no stick
+} // TimeManager::ScoreStick
+
+//------------------------------------------------------------------------------------
+
+void TimeManager::ChopOffStickAt(double)
+{
+    // we do nothing since there is no stick
+} // TimeManager::ChopOffStickAt
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+
+void TimeManager::PrintStickThetasToFile(std::ofstream&) const
+{
+    // we do nothing since there is no stick
+} // TimeManager::PrintStickThetasToFile
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+
+void TimeManager::PrintStickFreqsToFile(std::ofstream&) const
+{
+    // we do nothing since there is no stick
+} // TimeManager::PrintStickFreqsToFile
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+
+void TimeManager::PrintStickFreqsToFileAtTime(std::ofstream&, double) const
+{
+    // we do nothing since there is no stick
+} // TimeManager::PrintStickFreqsToFileAtTime
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+
+void TimeManager::PrintStickThetasToFileForJoint300(std::ofstream&) const
+{
+    // we do nothing since there is no stick
+} // TimeManager::PrintStickThetasToFileForJoint300
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+
+void TimeManager::PrintStickToFile(std::ofstream&) const
+{
+    // we do nothing since there is no stick
+} // TimeManager::PrintStickToFile
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+
+void TimeManager::PrintStickFreqsToFileForJoint300(std::ofstream&) const
+{
+    // we do nothing since there is no stick
+} // TimeManager::PrintStickFreqsToFileForJoint300
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+// ConstantTimeManager
+
+DoubleVec1d ConstantTimeManager::XpartThetasAtT(double t, const ForceParameters& fp)
+{
+    return fp.GetRegionalThetas();
+} // XpartThetasAtT
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d ConstantTimeManager::PartitionThetasAtT(double t, force_type force, const ForceParameters& fp)
+{
+    DoubleVec1d thetas(fp.GetRegionalThetas());
+    return dynamic_cast<PartitionForce*>(*registry.GetForceSummary().GetForceByTag(force))->SumXPartsToParts(thetas);
+} // PartitionThetasAtT
+
+//------------------------------------------------------------------------------------
+
+double ConstantTimeManager::TimeOfActiveCoal(double tstart,
+                                             const LongVec1d& lineages, const ForceParameters& fp, long& targetxpart,
+                                             double)
+{
+    // Computes the timestamp of the end of the interval by means of
+    // computing "delta t" and adding it to tstart
+
+    // Note:  The expectation value of this "delta t" is theta/(k*(k-1)),
+    // where k (the number of active lineages) and theta correspond to the
+    // population which yields the smallest "delta t."
+
+    double newtime;
+    long i;
+    DoubleVec1d thetas = fp.GetRegionalThetas();
+    long nxparts = thetas.size();
+    double besttime = DBL_MAX;
+    long target = 0;
+
+    for (i = 0; i < nxparts; ++i)
+    {
+        // If there are at least two active lineages in this population ...
+        if (lineages[i] > 1)
+        {
+            double randnum = randomSource.Float();
+
+            // Compute the time of the next coalescence.
+            newtime = - log(randnum) / (lineages[i] * (lineages[i] - 1.0) / thetas[i]);
+
+            // Keep it only if it's shorter than previous ones.
+            if (newtime < besttime)
+            {
+                besttime = newtime;
+                target = i;
+            }
+        }
+    }
+
+    if (besttime == DBL_MAX)
+    {
+        // no event is possible
+        targetxpart = FLAGLONG;
+        return FLAGDOUBLE;
+    }
+    else
+    {
+        assert (besttime >= 0.0);
+        targetxpart = target;
+        return tstart + besttime;
+    }
+} // TimeOfActiveCoal
+
+//------------------------------------------------------------------------------------
+
+double ConstantTimeManager::TimeOfInactiveCoal(double tstart,
+                                               const LongVec1d& activelines, const LongVec1d& inactivelines,
+                                               const ForceParameters& fp, long& targetxpart, double)
+{
+    // Computes the timestamp of the end of the interval by means of
+    // computing "delta t" and adding it to tstart
+
+    // Note:  The expectation value of this "delta t" is theta/(j*k),
+    // where j and k (the numbers of inactive and active lineages) and theta
+    // correspond to the population which yields the smallest "delta t."
+
+    double newtime = 0.0;
+    long i;
+    DoubleVec1d thetas = fp.GetRegionalThetas();
+    long nxparts = thetas.size();
+    double besttime = DBL_MAX;
+    long target = 0;
+
+    for (i = 0; i < nxparts; ++i)
+    {
+        if (activelines[i] > 0 && inactivelines[i] > 0)
+        {
+            // denominator contains j*k, inactives*actives,
+            // instead of k*(k-1)
+            newtime = - log (randomSource.Float()) /
+                (2.0 * activelines[i] * inactivelines[i] / thetas[i]);
+            if (newtime < besttime)
+            {
+                besttime = newtime;
+                target = i;
+            }
+        }
+    }
+
+    if (besttime == DBL_MAX)
+    {
+        // no event is possible
+        targetxpart = FLAGLONG;
+        return FLAGDOUBLE;
+    }
+    else
+    {
+        assert (besttime >= 0.0);
+        targetxpart = target;
+        return tstart + besttime;
+    }
+
+} // TimeOfInactiveCoal
+
+//------------------------------------------------------------------------------------
+
+double ConstantTimeManager::TimeOfTraitMutation(double tstart,
+                                                const LongVec1d& lineages, const ForceParameters& fp, long& tiptrait,
+                                                long& roottrait, double)
+{
+    // Computes the timestamp of the end of the interval by means of
+    // computing "delta t" and adding it to tstart
+
+    // MDEBUG NOT CORRECT in presence of migration due to use of
+    // crosspartition rather than partition Thetas
+    // needs a function PartitionForce::ComputePartitionTheta
+
+    double newtime = 0.0;
+    DoubleVec1d thetas = fp.GetRegionalThetas();
+    // mus[0] is a->A, mus[1] is A->a in forward time
+    DoubleVec1d mus = fp.GetOnlyDiseaseRates();
+    double besttime = DBL_MAX;
+    long tiptarget, roottarget;
+
+    // unrolling the loop since we only have 2 partitions and
+    // avoid partition/xpartition mapping by doing so....
+    if (lineages[0] > 0)
+    {
+        newtime = - log (randomSource.Float()) /
+            (lineages[0] * mus[0] * thetas[1]/thetas[0]);
+        if (newtime < besttime)
+        {
+            besttime = newtime;
+            tiptarget = 0;
+            roottarget = 1;
+        }
+    }
+
+    if (lineages[1] > 0)
+    {
+        newtime = - log (randomSource.Float()) /
+            (lineages[1] * mus[1] * thetas[0]/thetas[1]);
+        if (newtime < besttime)
+        {
+            besttime = newtime;
+            tiptarget = 1;
+            roottarget = 0;
+        }
+    }
+
+    if (besttime == DBL_MAX)
+    {
+        // no event is possible
+        tiptrait = FLAGLONG;
+        roottrait = FLAGLONG;
+        return FLAGDOUBLE;
+    }
+    else
+    {
+        assert (besttime >= 0.0);
+        tiptrait = tiptarget;
+        roottrait = roottarget;
+        return tstart + besttime;
+    }
+
+
+} // TimeOfTraitMutation
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+// ExpGrowTimeManager
+
+DoubleVec1d ExpGrowTimeManager::XpartThetasAtT(double t, const ForceParameters& fp)
+{
+    DoubleVec1d thetas(fp.GetRegionalThetas()),growths(fp.GetGrowthRates());
+    assert(thetas.size() == growths.size());
+    DoubleVec1d newthetas(thetas.size());
+    unsigned long param;
+    for(param = 0; param < thetas.size(); ++param)
+    {
+        newthetas[param] = SafeProductWithExp(thetas[param],-growths[param]*t);
+    }
+    return newthetas;
+} // XpartThetasAtT
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d ExpGrowTimeManager::PartitionThetasAtT(double t, force_type force, const ForceParameters& fp)
+{
+    DoubleVec1d thetas(fp.GetRegionalThetas()),growths(fp.GetGrowthRates());
+    return dynamic_cast<PartitionForce*>(*registry.GetForceSummary().
+                                         GetForceByTag(force))->SumXPartsToParts(thetas,growths,t);
+} // PartitionThetasAtT
+
+//------------------------------------------------------------------------------------
+
+double ExpGrowTimeManager::TimeOfActiveCoal(double tstart,
+                                            const LongVec1d& lineages, const ForceParameters& fp, long& targetxpart,
+                                            double)
+{
+    // Computes the timestamp of the end of the interval by means of computing "delta t" and adding it to tstart.
+
+    // Note:  The expectation value of this "delta t" is:
+    //                           (1/g)*ExpE1(k*(k-1)*exp(g*tstart)/(g*theta)),
+    // where g, theta, and k are the growth rate, theta, and number of active lineages
+    // for the population which yields the smallest "delta t."
+    // For details on the function ExpE1(), see mathx.cpp.
+
+    double newtime = 0.0;
+    double magicstart;
+    long i;
+    DoubleVec1d thetas = fp.GetRegionalThetas();
+    DoubleVec1d growths = fp.GetGrowthRates();
+    long nxparts = thetas.size();
+    double besttime = DBL_MAX;
+    long target = 0;
+
+    // compute coalescence terms per cross partition
+    for (i = 0; i < nxparts; ++i)
+    {
+        // if there are at least two active lineages in this population...
+        if (lineages[i] > 1)
+        {
+            // compute the time of the next coalescence
+            // first do computation in "magic time"
+            newtime =  - log(randomSource.Float()) /
+                (lineages[i] * (lineages[i] - 1.0) / thetas[i]);
+
+            // now convert to "real time"
+            if (growths[i] != 0)
+            {
+                // convert tstart to magical time
+                magicstart = -(1.0 - exp(growths[i] * tstart)) / growths[i];
+                // add tstart to displacement
+                newtime += magicstart;
+                // comvert result to real time
+                if (1.0 + growths[i] * newtime > 0.0)
+                {
+                    newtime = log(1.0 + growths[i] * newtime)/growths[i];
+                    // convert back to displacement
+                    newtime -= tstart;
+                    // guard against underflow
+                    if (newtime < DBL_EPSILON)
+                    {
+                        tinypopulation_error e("bad time proposed in Event::PickTime");
+                        throw e;
+                    }
+                }
+                else
+                {
+                    newtime = DBL_BIG; // make this interval length very unlikely to win the race
+                }
+            }
+            // (else newtime holds the same number that would have been
+            // computed by ActiveCoal, in the absence of growth)
+
+            // is this the best time so far?
+            if (newtime < besttime)
+            {
+                besttime = newtime;
+                target = i;
+            }
+        }
+    }
+
+    if (besttime == DBL_MAX)
+    {
+        // no event is possible
+        targetxpart = FLAGLONG;
+        return FLAGDOUBLE;
+    }
+    else
+    {
+        assert (besttime >= 0.0);
+        targetxpart = target;
+        return tstart + besttime;
+    }
+
+} // TimeOfActiveCoal
+
+//------------------------------------------------------------------------------------
+
+double ExpGrowTimeManager::TimeOfInactiveCoal(double tstart,
+                                              const LongVec1d& activelines, const LongVec1d& inactivelines,
+                                              const ForceParameters& fp, long& targetxpart, double)
+{
+    // Computes the timestamp of the end of the interval by means of computing "delta t" and adding it to tstart.
+
+    // Note:  The expectation value of this "delta t" is:
+    //                           (1/g)*ExpE1(j*k*exp(g*tstart)/(2*g*theta)),
+    // where g, theta, j, and k are the growth rate, theta, and numbers of
+    // inactive and active lineages for the population which yields the
+    // smallest "delta t."  (Note also the factor of 2, absent from ActiveGrowCoal.)
+    // For details on the function ExpE1(), see mathx.cpp.
+
+    double newtime = 0.0;
+    double magicstart;
+    long i;
+    DoubleVec1d thetas = fp.GetRegionalThetas();
+    DoubleVec1d growths = fp.GetGrowthRates();
+    long nxparts = thetas.size();
+    double besttime = DBL_MAX;
+    long target = 0;
+
+    for (i = 0; i < nxparts; ++i)
+    {
+        if (activelines[i] > 0 && inactivelines[i] > 0)
+        {
+            // first do calculation in "magic time"
+            // Numerator contains j*k, inactives*actives,
+            // instead of k*(k-1)
+            // Also, denominator contains a factor of 2, unlike ActiveGrowCoal.
+            newtime = - log(randomSource.Float()) /
+                ((2.0 / thetas[i]) * activelines[i] * inactivelines[i]);
+
+            // then convert to "real time"
+            if (growths[i] != 0)
+            {
+                // convert tstart to magical time
+                magicstart = -(1.0 - exp(growths[i] * tstart)) / growths[i];
+                // add tstart to displacement
+                newtime += magicstart;
+                if (1.0 + growths[i] * newtime > 0.0)
+                {
+                    // convert result to real time
+                    newtime = log(1.0 + growths[i] * newtime)/growths[i];
+                    // convert back to displacement
+                    newtime -= tstart;
+                    // guard against underflow
+                    if (newtime < DBL_EPSILON)
+                    {
+                        tinypopulation_error e("bad time proposed in Event::PickTime");
+                        throw e;
+                    }
+                }
+                else
+                    newtime = DBL_BIG; // make this interval length
+                // unlikely to win the race
+            }
+
+            // is this the best time so far?
+            if (newtime < besttime)
+            {
+                besttime = newtime;
+                target = i;
+            }
+        }
+    }
+
+    if (besttime == DBL_MAX)
+    {
+        // no event is possible
+        targetxpart = FLAGLONG;
+        return FLAGDOUBLE;
+    }
+    else
+    {
+        assert (besttime >= 0.0);
+        targetxpart = target;
+        return tstart + besttime;
+    }
+} // TimeOfInactiveCoal
+
+//------------------------------------------------------------------------------------
+
+double ExpGrowTimeManager::TimeOfTraitMutation(double tstart,
+                                               const LongVec1d& lineages, const ForceParameters& fp, long& tiptrait,
+                                               long& roottrait, double)
+{
+    // Computes a displacement, a.k.a. "delta t" -- the length of the time
+    // interval, not the timestamp for the end of the interval.
+    assert(false);  // Can't combine Disease and ExpGrowth right now!
+    return 0.0;     // Silence compiler warning.
+} // TimeOfTraitMutation
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+// LogSelectTimeManager
+
+DoubleVec1d LogSelectTimeManager::XpartThetasAtT(double t, const ForceParameters& fp)
+{
+    DoubleVec1d thetas(fp.GetRegionalThetas()),
+        selcoeffs(fp.GetLogisticSelectionCoefficient());
+
+    if (2 != thetas.size())
+    {
+        string msg = "LogisticSelectionTimeSize::SizeAt(), needed to receive ";
+        msg += "two thetas, one for each allele, but instead received ";
+        msg += ToString(thetas.size());
+        msg + " thetas.";
+        throw implementation_error(msg);
+    }
+
+    if (1 != selcoeffs.size())
+    {
+        string msg = "LogisticSelectionTimeSize::SizeAt(), needed to receive ";
+        msg += "1 selection coefficient, but instead received ";
+        msg += ToString(selcoeffs.size());
+        msg + " coefficients.";
+        throw implementation_error(msg);
+    }
+
+    DoubleVec1d newthetas(2);
+    double t_term = SafeProductWithExp(thetas[1], selcoeffs[0]*t);
+    newthetas[0] = (thetas[0]+thetas[1])/(thetas[0] + t_term);
+    newthetas[1] = newthetas[0] * t_term;
+    newthetas[0] = newthetas[0] * thetas[0];
+
+    return newthetas;
+} // XpartThetasAtT
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d LogSelectTimeManager::PartitionThetasAtT(double t, force_type force, const ForceParameters& fp)
+{
+    // MDEBUG not done!
+    assert(false); // MDEBUG we are not ready for this yet
+    DoubleVec1d answers;
+    return answers;
+
+} // PartitionThetasAtT
+
+//------------------------------------------------------------------------------------
+
+double LogSelectTimeManager::TimeOfActiveCoal(double tstart,
+                                              const LongVec1d& lineages, const ForceParameters& fp, long& targetxpart,
+                                              double)
+{
+    DoubleVec1d thetas = fp.GetRegionalThetas();
+    double selectionCoefficient = fp.GetLogisticSelectionCoefficient()[0];
+    long nxparts = thetas.size();
+
+    if (2 != nxparts)
+    {
+        string msg = "ActiveLogisticSelectionCoal::PickTime() called with nxparts = ";
+        msg += ToString(nxparts);
+        msg += ".  nxparts must equal 2, reflecting one population with the major ";
+        msg += "allele and one population with the minor allele.";
+        throw implementation_error(msg);
+    }
+
+    if (0.0 == thetas[0] || 0.0 == thetas[1])
+    {
+        string msg = "ActiveLogisticSelectionCoal::PickTime() received an invalid Theta ";
+        msg += "of 0.0.";
+        throw impossible_error(msg);
+    }
+
+    if (lineages[0] <= 1 && lineages[1] <= 1)
+    {
+        // no event is possible
+        targetxpart = FLAGLONG;
+        return FLAGDOUBLE;
+    }
+
+    // First, compute the interval lengths in "magic time"--drawn from an
+    // exponential distribution assuming constant size for population "A"
+    // and for population "a".
+    double dtau_A(FLAGDOUBLE), dtau_a(FLAGDOUBLE); // important:  these are < 0
+
+    if (lineages[0] > 1)
+        dtau_A = -log(randomSource.Float()) /
+            (lineages[0]*(lineages[0] - 1.0)/thetas[0]);
+
+    if (lineages[1] > 1)
+        dtau_a = -log(randomSource.Float()) /
+            (lineages[1]*(lineages[1] - 1.0)/thetas[1]);
+
+    return tstart + LogisticSelectionPickTime(thetas[0],thetas[1],selectionCoefficient,
+                                              tstart,dtau_A,dtau_a,&targetxpart);
+
+} // TimeOfActiveCoal
+
+//------------------------------------------------------------------------------------
+
+// This helper function performs the transformations from "magic time"
+// into "real time."
+// These transformations are:
+//
+// dtau_A = (theta_a0*exp(s*t_s)*((exp(s*dt_A)-1)/s) +
+//           theta_A0*dt_A)/(theta_A0+theta_a0)
+//
+// dtau_a = (theta_A0*exp(-s*t_s)*((exp(-s*dt_a)-1)/(-s)) +
+//           theta_a0*dt_a)/(theta_A0+theta_a0)
+//
+// Given dtau_A and dtau_a, we solve for dt_A and dt_a, and return the
+// shorter of these, along with the identity of the population which "won
+// the horse race."
+// Note that each transformation includes dt in both an exponential term
+// and a linear term.
+// In some cases, one of these terms is negligible; these are explained in
+// the code below.
+// When neither the exponential nor the linear term is negligible, the
+// transformation can only be solved by iterative guessing.
+//
+// Note:  If this function is called, then a coalescence is possible
+// in at least one of the two populations.  This is a requirement.
+//
+double LogSelectTimeManager::LogisticSelectionPickTime(const double& theta_A0, const double& theta_a0,
+                                                       const double& s, const double& t_s,
+                                                       const double& dtau_A, const double& dtau_a,
+                                                       long* pChosenPopulation)
+{
+    double dt_A(FLAGDOUBLE), dt_a(FLAGDOUBLE); // important: these are < 0
+    double minimum_possible_dt(t_s * std::numeric_limits<double>::epsilon());
+    double ln_theta_A0(log(theta_A0)), ln_theta_a0(log(theta_a0)), a, b(0.0), c, x;
+
+    if (s > 0)
+    {
+        if (s*t_s > EXPMAX - ln_theta_a0)
+        {
+            // theta_a0 * exp(s*t_s) overflows.
+            // This means the expectation E[dt_A] underflows to zero,
+            // so if we return any nonzero dt_A, no matter how short,
+            // it will be many times longer than its expected value.
+            // Return the shortest interval possible (guaranteed to win the
+            // "horse race"), if a coalescence is allowed in population "A"
+            // (not allowed if k = 1).
+            if (dtau_A > .999*FLAGDOUBLE)
+            {
+                *pChosenPopulation = 0;
+                return minimum_possible_dt;
+            }
+            // Otherwise, population "a" coalesces.  Underflow simplifies the
+            // result.
+            *pChosenPopulation = 1;
+            return dtau_a*(theta_A0+theta_a0)/theta_a0;
+        }
+
+        if (s*t_s > 4.0*LOG10 + ln_theta_A0 - ln_theta_a0)
+        {
+            if (dtau_A > .999*FLAGDOUBLE)
+            {
+                // It's possible to coalesce in population "A",
+                // and the linear term in the right-hand side of the
+                // transformation for dtau_A may safely be ignored.
+                b = SafeProductWithExp(theta_a0,s*t_s); // guaranteed > 0
+                c = -dtau_A*(theta_A0+theta_a0); // c < 0
+                x = s*(-c)/b;
+                if (x > 1.0e-04)
+                    dt_A = (1.0/s)*log(1.0 + x);
+                else
+                    dt_A = x/s; // because log(1+x) = x + O(x^2)
+                if (dt_A < minimum_possible_dt)
+                    dt_A = minimum_possible_dt;
+            }
+            if (dtau_a > .999*FLAGDOUBLE)
+            {
+                // It's possible to coalesce in population "a",
+                // and the exponential term in the right-hand side of the
+                // transformation for dtau_a may safely be ignored.
+                // For s>0, (exp(-s*dt)-1)/(-s) is maximal at s=0.
+                dt_a = dtau_a*(theta_A0+theta_a0)/theta_a0;
+                if (dtau_A <= .999*FLAGDOUBLE || dt_a < dt_A)
+                {
+                    *pChosenPopulation = 1;
+                    return dt_a;
+                }
+            }
+            // Recall a coalescence must be possible at least one of these
+            // populations, per this function's definition.
+            *pChosenPopulation = 0;
+            return dt_A;
+        }
+    }
+
+    if (s < 0.0)
+    {
+        // These overflow and almost-overflow cases are analogous to those above.
+        if (-s*t_s > EXPMAX - ln_theta_A0)
+        {
+            if (dtau_a > .999*FLAGDOUBLE)
+            {
+                *pChosenPopulation = 1;
+                return minimum_possible_dt;
+            }
+            *pChosenPopulation = 0;
+            return dtau_A*(theta_A0+theta_a0)/theta_A0;
+        }
+
+        if (-s*t_s > 4.0*LOG10 + ln_theta_a0 - ln_theta_A0)
+        {
+            if (dtau_a > .999*FLAGDOUBLE)
+            {
+                b = SafeProductWithExp(theta_A0,-s*t_s); // guaranteed > 0
+                c = -dtau_a*(theta_A0+theta_a0); // c < 0
+                x = s*c/b;
+                if (x > 1.0e-04)
+                    dt_a = (1.0/(-s))*log(1.0 + x);
+                else
+                    dt_a = x/(-s); // because log(1+x) = x + O(x^2)
+                if (dt_a < minimum_possible_dt)
+                    dt_a = minimum_possible_dt;
+            }
+            if (dtau_A > .999*FLAGDOUBLE)
+            {
+                dt_A = dtau_A*(theta_A0+theta_a0)/theta_A0;
+                if (dtau_a <= .999*FLAGDOUBLE || dt_A < dt_a)
+                {
+                    *pChosenPopulation = 0;
+                    return dt_A;
+                }
+            }
+            // Recall a coalescence must be possible at least one of these
+            // populations, per this function's definition.
+            *pChosenPopulation = 1;
+            return dt_a;
+        }
+    }
+
+    // If we get here, then solve for dt_A and/or dt_a
+    // by iterating over a series of increasingly better guesses.
+    if (s != 0.0)
+    {
+        double initialGuess_dt, discriminant;
+        long nIter, maxIter(1000);
+
+        if (dtau_A > .999*FLAGDOUBLE)
+        {
+            // It's possible to coalesce in population "A".
+
+            if (0.5*s*t_s > 1.0e-04 || 0.5*(-s)*t_s > 1.0e-04)
+                b = SafeProductWithExp(theta_a0,s*t_s);
+            // s>0: 0 < b <= 10000*theta_A0.  s<0: b >= (1.0e-04)*theta_A0.
+            else
+                b = theta_a0*(1.0 + s*t_s);
+            a = 0.5 * b * s; // sgn(a)=sgn(s). Can't overflow. Can underflow
+            // if s almost underflows.
+            b += theta_A0; // b > 0
+            c = -dtau_A*(theta_A0+theta_a0); // c < 0
+            discriminant = b*b - 4.0*a*c; // > 0 if s > 0
+            if (a > 0.0 && 1.0/a < DBL_BIG)
+                initialGuess_dt = (-b + sqrt(discriminant))/(2.0*a);
+            else if (a < 0.0 && -1.0/a < DBL_BIG)
+            {
+                if (discriminant >= 0.0)
+                    initialGuess_dt = (-b + sqrt(discriminant))/(2.0*a);
+                else
+                    initialGuess_dt = -c/theta_A0; // heuristic guess
+            }
+            else // s is extremely tiny, equivalent to s=0
+            {
+                initialGuess_dt = dtau_A;
+                maxIter = 0; // prevent the iterative loop from being executed
+            }
+
+            // Try Newton-Raphson:  solve f(x) = 0 for x, where x is dt.
+            double xL = 0, xR, dx, B = (b - theta_A0)/s, f, df;
+            // long numTimesOutOfBounds = 0; // for debugging
+
+            if (s > 0)
+                xR = (EXPMAX + log(s) - ln_theta_a0)/s - t_s;
+            else
+                xR = 2.0; // this should be big enough
+            dx = xR - xL;
+            x = initialGuess_dt;
+            if (x <= xL || x >= xR)
+                x = 0.5*(xL+xR);
+            nIter = 0;
+
+            do {
+                df = SafeProductWithExp(s*B, s*x) + theta_A0;
+                f = (df-theta_A0)/s - B + theta_A0*x + c;
+                dx = f/df; // denominator is guaranteed nonzero
+                x -= dx;
+                if (x < xL)
+                {
+                    if (s < 0)
+                        x = xL;
+                    else
+                    {
+                        // erynes BUGBUG DEBUG: revisit this prior to releasing
+                        string msg = "variable out of bounds; t_s = ";
+                        msg += ToString(t_s);
+                        msg += ", dtau_A = " + ToString(dtau_A) + ", s = ";
+                        msg += ToString(s);
+                        msg += ", f = " + ToString(f) + ", df = " + ToString(df);
+                        msg += ", prev. x = " + ToString(x+dx) + ", new x = ";
+                        msg += ToString(x);
+                        msg += ", nIter = " + ToString(nIter);
+                        throw impossible_error(msg);
+                    }
+                }
+                else if (x > xR)
+                {
+                    if (s > 0)
+                        x = xR;
+                    else
+                    {
+                        // erynes BUGBUG DEBUG: revisit this prior to releasing
+                        string msg = "variable out of bounds; t_s = ";
+                        msg += ToString(t_s);
+                        msg += ", dtau_A = " + ToString(dtau_A) + ", s = ";
+                        msg += ToString(s);
+                        msg += ", f = " + ToString(f) + ", df = " + ToString(df);
+                        msg += ", prev. x = " + ToString(x+dx) + ", new x = ";
+                        msg += ToString(x);
+                        msg += ", nIter = " + ToString(nIter);
+                        throw impossible_error(msg);
+                    }
+                }
+                nIter++;
+            } while (fabs(dx/x) > 0.00001 && nIter < maxIter);
+
+            if (nIter < maxIter)
+                dt_A = x;
+            else if (0 == maxIter)
+                dt_A = dtau_A;
+            else
+                dt_A = MAX_LENGTH;  // BUG? -- danger probably too low, try DBL_BIG
+                                    // and make sure that any matching code trying
+                                    // to detect what kind of horserace conditions
+                                    // these actually are is also changed
+        }
+
+        if (dtau_a > .999*FLAGDOUBLE)
+        {
+            // It's possible to coalesce in population "a".
+
+            maxIter = 1000;
+
+            if (0.5*s*t_s > 1.0e-04 || 0.5*(-s)*t_s > 1.0e-04)
+            {
+                // If we have just completed computing dt_A, re-use one
+                // computation.
+                if (dtau_A > .999*FLAGDOUBLE)
+                    b = theta_A0*theta_a0/(b-theta_A0);
+                // b = SafeProductWithExp(theta_A0, -s*t_s)
+                else
+                    b = SafeProductWithExp(theta_A0, -s*t_s);
+                // s>0: b >= (1.0e-04)*theta_a0.  s<0: 0 < b <= 10000*theta_a0.
+            }
+            else
+                b = theta_A0*(1.0 - s*t_s);
+            a = -0.5 * b * s; // sgn(a) = -sgn(s). Can't overflow.
+            // Can underflow if s almost underflows.
+            b += theta_a0; // b > 0
+            c = -dtau_a*(theta_A0+theta_a0); // c < 0
+            discriminant = b*b - 4.0*a*c; // > 0 if s < 0
+            if (a > 0.0 && 1.0/a < DBL_BIG)
+                initialGuess_dt = (-b + sqrt(discriminant))/(2.0*a);
+            else if (a < 0.0 && -1.0/a < DBL_BIG)
+            {
+                if (discriminant >= 0.0)
+                    initialGuess_dt = (-b + sqrt(discriminant))/(2.0*a);
+                else
+                    initialGuess_dt = -c/theta_a0; // heuristic guess
+            }
+            else // s is extremely tiny, equivalent to s=0
+            {
+                initialGuess_dt = dtau_a;
+                maxIter = 0; // prevent either iterative loop from being executed
+            }
+
+            // Try Newton-Raphson:  solve f(x) = 0 for x, where x is dt.
+            double xL = 0, xR, dx, B = (b - theta_a0)/(-s), f, df;
+            //long numTimesOutOfBounds = 0; // for debugging
+
+            if (s < 0)
+                xR = (EXPMAX + log(-s) - ln_theta_A0)/(-s) - t_s;
+            else
+                xR = 2.0; // this should be big enough
+            dx = xR - xL;
+            x = initialGuess_dt;
+            if (x <= xL || x >= xR)
+                x = 0.5*(xL+xR);
+            nIter = 0;
+
+            do {
+                df = SafeProductWithExp(-s*B, -s*x) + theta_a0;
+                f = (df-theta_a0)/(-s) - B + theta_a0*x + c;
+                dx = f/df; // guaranteed nonzero
+                x -= dx;
+                if (x < xL)
+                {
+                    if (s > 0)
+                        x = xL;
+                    else
+                    {
+                        // erynes BUGBUG DEBUG: revisit this prior to releasing
+                        string msg = "variable out of bounds; t_s = ";
+                        msg += ToString(t_s);
+                        msg += ", dtau_a = " + ToString(dtau_a) + ", s = ";
+                        msg += ToString(s);
+                        msg += ", f = " + ToString(f) + ", df = " + ToString(df);
+                        msg += ", prev. x = " + ToString(x+dx) + ", new x = ";
+                        msg += ToString(x);
+                        msg += ", nIter = " + ToString(nIter);
+                        throw impossible_error(msg);
+                    }
+                }
+                else if (x > xR)
+                {
+                    if (s < 0)
+                        x = xR;
+                    else
+                    {
+                        // erynes BUGBUG DEBUG: revisit this prior to releasing
+                        string msg = "variable out of bounds; t_s = ";
+                        msg += ToString(t_s);
+                        msg += ", dtau_a = " + ToString(dtau_a) + ", s = ";
+                        msg += ToString(s);
+                        msg += ", f = " + ToString(f) + ", df = " + ToString(df);
+                        msg += ", prev. x = " + ToString(x+dx) + ", new x = ";
+                        msg += ToString(x);
+                        msg += ", nIter = " + ToString(nIter);
+                        throw impossible_error(msg);
+                    }
+                }
+                nIter++;
+            } while (fabs(dx/x) > 0.00001 && nIter < maxIter);
+
+            if (nIter < maxIter)
+                dt_a = x;
+            else if (0 == maxIter)
+                dt_a = dtau_a;
+            else
+                dt_a = MAX_LENGTH;  // BUG? -- danger probably too low, try DBL_BIG
+                                    // and make sure that any matching code trying
+                                    // to detect what kind of horserace conditions
+                                    // these actually are is also changed
+        }
+
+    } // end of the iterative solution code
+
+    else // s == 0, and "magic time" is actually "real time"
+    {
+        dt_A = dtau_A;
+        dt_a = dtau_a;
+    }
+
+    if ((dt_A >= 0.0 && dt_A < minimum_possible_dt) ||
+        (dt_a >= 0.0 && dt_a < minimum_possible_dt))
+    {
+        string msg = "Bad time length (" + ToString(std::min(dt_A,dt_a));
+        msg += ") proposed in LogisticSelectionPickTime().";
+        tinypopulation_error e(msg);
+        throw e;
+    }
+    if (dt_a < 0.0) // FLAGDOUBLE
+    {
+        *pChosenPopulation = (dt_A < 0.0) ? FLAGLONG : 0;
+        return dt_A;
+    }
+    if (dt_A < 0.0) // FLAGDOUBLE
+    {
+        *pChosenPopulation = 1;
+        return dt_a;
+    }
+    // If we get here, both interval lengths are valid.
+    if (dt_A < dt_a)
+    {
+        *pChosenPopulation = 0;
+        return dt_A;
+    }
+    *pChosenPopulation = 1;
+    return dt_a;
+
+} // LogisticSelectionPickTime
+
+//------------------------------------------------------------------------------------
+
+double LogSelectTimeManager::TimeOfInactiveCoal(double tstart,
+                                                const LongVec1d& activelines, const LongVec1d& inactivelines,
+                                                const ForceParameters& fp, long& targetxpart, double)
+{
+    DoubleVec1d thetas = fp.GetRegionalThetas();
+    double selectionCoefficient = fp.GetLogisticSelectionCoefficient()[0];
+    long nxparts = thetas.size();
+    if (2 != nxparts)
+    {
+        string msg = "InactiveLogisticSelectionCoal::PickTime() called ";
+        msg += "with nxparts = ";
+        msg += ToString(nxparts);
+        msg += ".  nxparts must equal 2, reflecting one population with ";
+        msg += "the major allele and one population with the minor allele.";
+        throw implementation_error(msg);
+    }
+
+    if (0.0 == thetas[0] || 0.0 == thetas[1])
+    {
+        string msg = "InactiveLogisticSelectionCoal::PickTime() received an ";
+        msg += "invalid Theta of 0.0.";
+        throw impossible_error(msg);
+    }
+
+    if (!((activelines[0] >= 1 && inactivelines[0] >= 1) ||
+          (activelines[1] >= 1 && inactivelines[1] >= 1)))
+    {
+        // no event is possible
+        targetxpart = FLAGLONG;
+        return FLAGDOUBLE;
+    }
+
+    // First, compute the interval lengths in "magic time"--drawn from an
+    // exponential distribution assuming constant size for population "A"
+    // and for population "a".
+    double dtau_A(FLAGDOUBLE), dtau_a(FLAGDOUBLE); // important:  these are < 0
+
+    if (activelines[0] > 0 && inactivelines[0] > 0)
+        dtau_A = -log(randomSource.Float()) /
+            (2.0*activelines[0]*inactivelines[0]/thetas[0]);
+
+    if (activelines[1] > 0 && inactivelines[1] > 0)
+        dtau_a = -log(randomSource.Float()) /
+            (2.0*activelines[1]*inactivelines[1]/thetas[1]);
+
+    return tstart + LogisticSelectionPickTime(thetas[0],thetas[1],selectionCoefficient,
+                                              tstart,dtau_A,dtau_a,&targetxpart);
+
+} // TimeOfInactiveCoal
+
+//------------------------------------------------------------------------------------
+
+double LogSelectTimeManager::TimeOfTraitMutation(double tstart,
+                                                 const LongVec1d& lineages, const ForceParameters& fp, long& tiptrait,
+                                                 long& roottrait, double)
+{
+
+    // Computes the timestamp of the end of the interval by means of
+    // computing "delta t" and adding it to tstart
+    DoubleVec2d murates(fp.GetRegional2dRates(force_DISEASE));
+    if (2 != murates[0].size() || 2 != murates[1].size() ||
+        0 != murates[0][0] || 0 != murates[1][1] || 0 >= murates[0][1] ||
+        0 >= murates[1][0])
+    {
+        string msg("LogSelectTimeManager::TimeOfTraitMutation():  ");
+        msg += "bad murates matrix";
+        throw implementation_error(msg);
+    }
+
+    double mu_into_A_from_a(murates[0][1]);
+    double mu_into_a_from_A(murates[1][0]);
+    double s(fp.GetLogisticSelectionCoefficient()[0]); // one-element vector
+    double theta_A0(fp.GetGlobalThetas()[0]);
+    double theta_a0(fp.GetGlobalThetas()[1]);
+    double dtau_A(FLAGDOUBLE), dtau_a(FLAGDOUBLE), dt_A(FLAGDOUBLE),
+        dt_a(FLAGDOUBLE), argOfLog;
+
+    if (lineages[0] > 0)
+    {
+        dtau_A = -log(randomSource.Float()) /
+            (mu_into_A_from_a * lineages[0]);
+        if (0.0 == s)
+            dt_A = dtau_A;
+        else
+        {
+            argOfLog = 1.0 +
+                SafeProductWithExp((dtau_A*s*theta_A0/theta_a0),-s*tstart);
+            if (argOfLog > 0.0)
+                dt_A = log(argOfLog) / s;
+        }
+    }
+    if (lineages[1] > 0)
+    {
+        dtau_a = -log(randomSource.Float()) /
+            (mu_into_a_from_A * lineages[1]);
+        if (0.0 == s)
+            dt_a = dtau_a;
+        else
+        {
+            argOfLog = 1.0 +
+                SafeProductWithExp((dtau_a*(-s)*theta_a0/theta_A0),s*tstart);
+            if (argOfLog > 0.0)
+                dt_a = log(argOfLog) / (-s);
+        }
+    }
+
+    if (dt_a <= 0.0)
+    {
+        if (dt_A <= 0.0)
+        {
+            // no event is possible
+            tiptrait = FLAGLONG;
+            roottrait = FLAGLONG;
+            return FLAGDOUBLE;
+        }
+        tiptrait = 0; // going backward in time, from tip to root
+        roottrait = 1;
+        return dt_A;
+    }
+
+    // If we get here, dt_a > 0.
+    if (dt_A <= 0.0 || dt_A > dt_a)
+    {
+        tiptrait = 1;
+        roottrait = 0;
+        return tstart + dt_a;
+    }
+
+    // If we get here, 0 < dt_A <= dt_a.
+    tiptrait = 0;
+    roottrait = 1;
+    return tstart + dt_A;
+
+} // TimeOfTraitMutation
+
+//------------------------------------------------------------------------------------
+
+TimeManager& StickTimeManager::operator=(const TimeManager& src)
+{
+    TimeManager::operator=(src);
+    m_stair = dynamic_cast<const StickTimeManager&>(src).m_stair;
+    return *this;
+} // TimeManager operator=
+
+//------------------------------------------------------------------------------------
+
+void StickTimeManager::CopyStick(const TimeManager& src)
+{
+    m_stair = dynamic_cast<const StickTimeManager&>(src).m_stair;
+} // StickTimeManager::UsingStick
+
+//------------------------------------------------------------------------------------
+
+bool StickTimeManager::UsingStick() const
+{
+    return true;
+} // StickTimeManager::UsingStick
+
+//------------------------------------------------------------------------------------
+
+void StickTimeManager::ClearStick()
+{
+    m_stair.clear();
+} // StickTimeManager::ClearStick
+
+//------------------------------------------------------------------------------------
+
+void StickTimeManager::SetStickParameters(const ForceParameters& fp)
+{
+    if (m_stair.empty())
+    {
+        // We don't need to do anything!?!
+    }
+    else
+    {
+        DoubleVec1d newthetas(fp.GetRegionalThetas());
+        DoubleVec1d oldthetas(m_stair.begin()->GetThetas());
+        if (oldthetas != newthetas)
+        {
+            double ratio(std::accumulate(newthetas.begin(),newthetas.end(), 0.0) /
+                         std::accumulate(oldthetas.begin(),oldthetas.end(), 0.0));
+            stair::iterator step;
+            for(step = m_stair.begin(); step != m_stair.end(); ++step)
+            {
+                DoubleVec1d ths(step->GetThetas());
+                std::transform(ths.begin(),ths.end(),ths.begin(),
+                               bind2nd(std::multiplies<double>(),ratio));
+                step->SetThetas(ths);
+            }
+        }
+    }
+} // StickTimeManager::SetStickParameters
+
+//------------------------------------------------------------------------------------
+
+void StickTimeManager::ScoreStick(TreeSummary& treesum) const
+{
+    // NB: I don't think it's ever right for the stick to be
+    // empty here; why were we trying to sample it, and how could
+    // it be empty?  Adding an assert to test.
+
+    assert(!m_stair.empty());
+
+    DoubleVec2d stickfreqs;
+    DoubleVec1d sticklengths;
+    DoubleVec1d tipfreqs;
+    DoubleVec2d sticklnfreqs;
+
+    stair::const_iterator br;
+    for(br = m_stair.begin(); br != m_stair.end(); ++br)
+    {
+        DoubleVec1d thetas(br->GetThetas());
+        DoubleVec1d freqs(thetas.size());
+        DoubleVec1d ratios(thetas.size());
+        DoubleVec1d lnfreqs(thetas.size());
+        double zero(0.0);
+        std::transform(thetas.begin(),thetas.end(),freqs.begin(),
+                       std::bind2nd(std::divides<double>(),
+                                    std::accumulate(thetas.begin(),thetas.end(),zero)));
+        stickfreqs.push_back(freqs);
+        double newlength(br->GetRootendTime() - br->GetTipendTime());
+        sticklengths.push_back(newlength);
+
+        if (br == m_stair.begin()) tipfreqs = freqs;
+
+        lnfreqs = freqs;
+        DoubleVec1d::iterator frit;
+        for(frit = lnfreqs.begin(); frit != lnfreqs.end(); ++frit)
+            (*frit) = log(*frit);
+        sticklnfreqs.push_back(lnfreqs);
+    }
+
+    treesum.SetStickSummary(stickfreqs,sticklengths,sticklnfreqs);
+
+} // StickTimeManager::ScoreStick
+
+//------------------------------------------------------------------------------------
+
+void StickTimeManager::ChopOffStickAt(double lasttime)
+{
+    if (m_stair.empty()) return;
+
+    stair::iterator cuthere(m_stair.end());
+    --cuthere;
+    while(cuthere->GetTipendTime() > lasttime) --cuthere;
+    ++cuthere;
+    if (cuthere == m_stair.end()) return;
+
+    m_stair.erase(cuthere,m_stair.end());
+
+} // StickTimeManager::ChopOffStickAt
+
+//------------------------------------------------------------------------------------
+
+void StickTimeManager::MakeStickUsingBranches(const ForceParameters& fp,
+                                              const std::vector<std::pair<double,LongVec1d> >& lpcounts)
+{
+    ClearStick();
+
+    // add the first joint back in
+    AppendRiser(fp);
+
+    double timeprev(0.0),timenow(m_stair.begin()->GetRootendTime());
+    double lasteventtime(lpcounts.back().first);
+    std::vector<std::pair<double,LongVec1d> >::const_iterator
+        lpnow(lpcounts.begin());
+    std::vector<std::pair<double,LongVec1d> >::const_iterator lpnext(lpnow);
+    ++lpnext;
+    while(timenow < lasteventtime)
+    {
+        // average branch counts over previous joint
+        DoubleVec1d discounts;
+        discounts.assign(lpnow->second.begin(),lpnow->second.end());
+        DoubleVec1d oneless(discounts);
+        std::transform(oneless.begin(),oneless.end(),oneless.begin(),
+                       std::bind2nd(std::minus<double>(),1.0));
+        std::transform(discounts.begin(),discounts.end(),oneless.begin(),
+                       discounts.begin(),std::multiplies<double>());
+
+        // weight each total by the fraction of the time it represents
+        double tstart(std::max(lpnow->first,timeprev));
+        double tend(std::min(lpnext->first,timenow));
+        double weight((tend-tstart)/(timenow-timeprev));
+        std::transform(discounts.begin(),discounts.end(),discounts.begin(),
+                       std::bind2nd(std::multiplies<double>(),weight));
+
+        long nevents(1);
+        while(lpnext->first < timenow)
+        {
+            ++lpnow;
+            ++lpnext;
+            ++nevents;
+            DoubleVec1d total(lpnow->second.begin(),lpnow->second.end());
+            oneless = total;
+            std::transform(oneless.begin(),oneless.end(),oneless.begin(),
+                           std::bind2nd(std::minus<double>(),1.0));
+            std::transform(total.begin(),total.end(),oneless.begin(),
+                           total.begin(),std::multiplies<double>());
+
+            // weight each total by the fraction of the time it represents
+            tstart = std::max(lpnow->first,timeprev);
+            tend = std::min(lpnext->first,timenow);
+            weight = (tend-tstart)/(timenow-timeprev);
+            std::transform(total.begin(),total.end(),total.begin(),
+                           std::bind2nd(std::multiplies<double>(),weight));
+
+            std::transform(discounts.begin(),discounts.end(),total.begin(),
+                           discounts.begin(),std::plus<double>());
+        }
+
+        std::transform(discounts.begin(),discounts.end(),discounts.begin(),
+                       std::bind2nd(std::divides<double>(),nevents));
+
+        AppendRiser(fp,discounts);
+
+        timeprev = timenow;
+        timenow = m_stair.back().GetRootendTime();
+    }
+
+} // StickTimeManager::MakeStickUsingBranches
+
+//------------------------------------------------------------------------------------
+
+void StickTimeManager::MakeStickTilTime(const ForceParameters& fp, double endtime)
+{
+    ClearStick();
+
+    // add the first joint back in
+    AppendRiser(fp);
+
+    double timenow(m_stair.begin()->GetRootendTime());
+    while(timenow < endtime)
+    {
+        AppendRiser(fp);
+        timenow = m_stair.back().GetRootendTime();
+    }
+
+} // StickTimeManager::MakeStickTilTime
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+
+void StickTimeManager::PrintStickThetasToFile(std::ofstream& of) const
+{
+    stair::const_iterator br;
+    for(br = m_stair.begin(); br != m_stair.end(); ++br)
+    {
+        of << "stick-thetas:";
+        DoubleVec1d thetas(br->GetThetas());
+        std::copy(thetas.begin(),thetas.end(),
+                  std::ostream_iterator<double>(of, " "));
+        of << std::endl;
+    }
+} // StickTimeManager::PrintStickThetasToFile
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+
+void StickTimeManager::PrintStickFreqsToFile(std::ofstream& of) const
+{
+    of << m_stair.size() << std::endl;
+    stair::const_iterator br;
+    for(br = m_stair.begin(); br != m_stair.end(); ++br)
+    {
+        of << "stick-freqs:";
+        DoubleVec1d freqs(br->GetThetas());
+        double totaltheta(std::accumulate(freqs.begin(),freqs.end(), 0.0));
+        std::transform(freqs.begin(),freqs.end(),freqs.begin(),
+                       std::bind2nd(std::divides<double>(),totaltheta));
+        std::copy(freqs.begin(),freqs.end(),
+                  std::ostream_iterator<double>(of, " "));
+        of << std::endl;
+    }
+} // StickTimeManager::PrintStickFreqsToFile
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+
+void StickTimeManager::PrintStickFreqsToFileAtTime(std::ofstream& of, double time) const
+{
+    stair::const_iterator br;
+    for(br = m_stair.begin(); br != m_stair.end(); ++br)
+    {
+        if (br->GetRootendTime() > time)
+        {
+            of << "stick-freqs at time " << time << ":";
+            DoubleVec1d freqs(br->GetThetas());
+            double totaltheta(std::accumulate(freqs.begin(),freqs.end(), 0.0));
+            std::transform(freqs.begin(),freqs.end(),freqs.begin(),
+                           std::bind2nd(std::divides<double>(),totaltheta));
+            std::copy(freqs.begin(),freqs.end(),
+                      std::ostream_iterator<double>(of, " "));
+            of << std::endl;
+            break;
+        }
+    }
+
+} // StickTimeManager::PrintStickFreqsToFileAtTime
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+
+void StickTimeManager::PrintStickThetasToFileForJoint300(std::ofstream& of) const
+{
+    if (m_stair.size() < 300) return;
+
+    long count;
+    stair::const_iterator br;
+    of << "stick-thetas:";
+    for(br = m_stair.begin(), count=0; br != m_stair.end() && count < 300;
+        ++br, ++count) ;
+
+    assert(count == 300);
+
+    DoubleVec1d thetas(br->GetThetas());
+    std::copy(thetas.begin(),thetas.end(),
+              std::ostream_iterator<double>(of, " "));
+    of << std::endl;
+
+} // StickTimeManager::PrintStickThetasToFileForJoint300
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+
+void StickTimeManager::PrintStickFreqsToFileForJoint300(std::ofstream& of) const
+{
+    if (m_stair.size() < 300) return;
+
+    long count;
+    stair::const_iterator br;
+    of << "stick-freqs:";
+    for(br = m_stair.begin(), count=0; br != m_stair.end() && count < 300;
+        ++br, ++count) ;
+
+    assert(count == 300);
+
+    DoubleVec1d freqs(br->GetThetas());
+    double totaltheta(std::accumulate(freqs.begin(),freqs.end(), 0.0));
+    std::transform(freqs.begin(),freqs.end(),freqs.begin(),
+                   std::bind2nd(std::divides<double>(),totaltheta));
+    std::copy(freqs.begin(),freqs.end(),
+              std::ostream_iterator<double>(of, " "));
+    of << std::endl;
+
+} // StickTimeManager::PrintStickFreqsToFileForJoint300
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+
+void StickTimeManager::PrintStickToFile(std::ofstream& of) const
+{
+    long maxjoints(120);
+
+    long count(0);
+    stair::const_iterator br(m_stair.begin());
+    stair::const_iterator next(br);
+    ++next;
+
+    // we add a cap of 50 joints to avoid too big files...
+    for(; next != m_stair.end() && count < maxjoints; ++br, ++next, ++count)
+    {
+        DoubleVec1d freqs(br->GetThetas());
+        double totaltheta(std::accumulate(freqs.begin(),freqs.end(), 0.0));
+        std::transform(freqs.begin(),freqs.end(),freqs.begin(),
+                       std::bind2nd(std::divides<double>(),totaltheta));
+        of << "joint#" << count << " ";
+        of << "freqA=" << freqs[0] << " :lnfreqA=";
+        of << log(freqs[0]) << ";" << " with length=";
+        of << next->GetTipendTime() - br->GetTipendTime() << std::endl;
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d StickTimeManager::XpartThetasAtT(double t, const ForceParameters& fp)
+{
+    DoubleVec1d thetas(FindJointAtTime(t,fp)->GetThetas());
+    return thetas;
+} // StickTimeManager::XpartThetasAtT
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d StickTimeManager::PartitionThetasAtT(double t, force_type partforce, const ForceParameters& fp)
+{
+    DoubleVec1d thetas(FindJointAtTime(t,fp)->GetThetas());
+    return dynamic_cast<PartitionForce*>(*registry.GetForceSummary().
+                                         GetForceByTag(partforce))->SumXPartsToParts(thetas);
+
+} // StickTimeManager::PartitionThetasAtT
+
+//------------------------------------------------------------------------------------
+
+double StickTimeManager::TimeOfActiveCoal(double tstart,
+                                          const LongVec1d& lineages, const ForceParameters& fp, long& targetxpart,
+                                          double maxtime)
+{
+    // Computes the timestamp of the end of the interval by means of computing "delta t" and adding it to tstart.
+
+    // If there are no populations with multiple lineages, coalescence is impossible, so return immediately.
+    if (*max_element(lineages.begin(),lineages.end()) < 2)
+    {
+        targetxpart = FLAGLONG;
+        return FLAGDOUBLE;
+    }
+
+    stair::iterator joint(FindJointAtTime(tstart,fp));
+    DoubleVec1d::size_type nxparts = fp.GetRegionalThetas().size();
+
+    DoubleVec1d newfrac(nxparts);
+    DoubleVec1d::iterator it;
+    for(it = newfrac.begin(); it != newfrac.end(); ++it)
+    {
+        // technically, we should be taking the log of (1-rnd[float]),
+        // but that's the same as log(rnd[float]), so omitted for
+        // simplicity/speed
+        (*it) = -1.0 * log(randomSource.Float());
+    }
+
+    DoubleVec1d total(nxparts, 0.0), oldtotal(nxparts, 0.0);
+    double length(joint->GetRootendTime() - tstart), totallength(0.0);
+
+    double maxdisplace(maxtime - tstart);
+
+    while(true)
+    {
+        oldtotal = total;
+        DoubleVec1d thetas(joint->GetThetas());
+
+        assert(thetas.size() == newfrac.size());
+
+        double mindisplace(DBL_MAX);
+        DoubleVec1d::size_type i;
+        for (i = 0; i < nxparts; ++i)
+        {
+            if (lineages[i] < 2) continue;
+            total[i] += lineages[i] * (lineages[i] - 1.0) /
+                thetas[i] * length;
+            if (total[i] > newfrac[i])
+            {
+                double newdisplace(newfrac[i] - oldtotal[i]);
+                newdisplace /= lineages[i] * (lineages[i] - 1.0) / thetas[i];
+                newdisplace += totallength;
+                if (newdisplace < mindisplace)
+                {
+                    targetxpart = i;
+                    if (newdisplace > MAX_LENGTH)
+                        newdisplace = MAX_LENGTH;
+                    // BUG? -- danger probably too low, try DBL_BIG
+                    // and make sure that any matching code trying
+                    // to detect what kind of horserace conditions
+                    // these actually are is also changed
+                    mindisplace = newdisplace;
+                }
+            }
+        }
+        if (mindisplace < DBL_MAX) return tstart + mindisplace;
+        totallength += length;
+        if (totallength >= maxdisplace)
+        {
+            targetxpart = FLAGLONG;
+            return FLAGDOUBLE;
+        }
+
+        joint = NextJointAppendingIfLast(joint,fp);
+        length = joint->GetRootendTime() - joint->GetTipendTime();
+    }
+
+    // can't get here
+    assert(false);
+    targetxpart = FLAGLONG;
+    return FLAGDOUBLE;
+
+} // StickTimeManager::TimeOfActiveCoal
+
+//------------------------------------------------------------------------------------
+
+bool StickTimeManager::InactiveCoalImpossible(const LongVec1d& alines, const LongVec1d& ilines) const
+{
+    // JCHECK
+    assert(alines.size() == ilines.size());
+
+    LongVec1d::size_type xpart;
+    for(xpart = 0; xpart < alines.size(); ++xpart)
+        if (ilines[xpart] > 0 && alines[xpart] > 0) return false;
+
+    return true;
+
+} // StickTimeManager::InactiveCoalImpossible
+
+//------------------------------------------------------------------------------------
+
+double StickTimeManager::TimeOfInactiveCoal(double tstart,
+                                            const LongVec1d& activeline, const LongVec1d& inactivelines,
+                                            const ForceParameters& fp, long& targetxpart, double maxtime)
+{
+    // Computes the timestamp of the end of the interval by means of
+    // computing "delta t" and adding it to tstart
+    // if no such coalescences are legal, return immediately
+    if (InactiveCoalImpossible(activeline,inactivelines))
+    {
+        targetxpart = FLAGLONG;
+        return FLAGDOUBLE;
+    }
+
+    stair::iterator joint(FindJointAtTime(tstart,fp));
+    DoubleVec1d::size_type nxparts = fp.GetRegionalThetas().size();
+    DoubleVec1d newfrac(nxparts);
+    DoubleVec1d::iterator it;
+    for(it = newfrac.begin(); it != newfrac.end(); ++it)
+        (*it) = -1.0 * log(randomSource.Float());
+
+    DoubleVec1d total(nxparts, 0.0), oldtotal(nxparts, 0.0);
+    double length(joint->GetRootendTime() - tstart), totallength(0.0);
+
+    double maxdisplace(maxtime - tstart);
+
+    while(true)
+    {
+        oldtotal = total;
+        DoubleVec1d thetas(joint->GetThetas());
+
+        assert(thetas.size() == newfrac.size());
+
+        double mindisplace(DBL_MAX);
+        DoubleVec1d::size_type i;
+        for (i = 0; i < nxparts; ++i)
+        {
+            if (inactivelines[i] < 1 || activeline[i] < 1) continue;
+            total[i] += 2.0 * activeline[i] * inactivelines[i] /
+                thetas[i] * length;
+            if (total[i] > newfrac[i])
+            {
+                double newdisplace(newfrac[i] - oldtotal[i]);
+                newdisplace /= 2.0 * activeline[i] * inactivelines[i] /
+                    thetas[i];
+                newdisplace += totallength;
+                if (newdisplace < mindisplace)
+                {
+                    targetxpart = i;
+                    if (newdisplace > MAX_LENGTH)
+                        newdisplace = MAX_LENGTH;
+                    // BUG? -- danger probably too low, try DBL_BIG
+                    // and make sure that any matching code trying
+                    // to detect what kind of horserace conditions
+                    // these actually are is also changed
+                    mindisplace = newdisplace;
+                }
+            }
+        }
+
+        if (mindisplace < DBL_MAX) return tstart + mindisplace;
+
+        totallength += length;
+
+        if (totallength >= maxdisplace)
+        {
+            targetxpart = FLAGLONG;
+            return FLAGDOUBLE;
+        }
+
+        joint = NextJointAppendingIfLast(joint,fp);
+        length = joint->GetRootendTime() - joint->GetTipendTime();
+    }
+
+    // can't get here
+    assert(false);
+    targetxpart = FLAGLONG;
+    return FLAGDOUBLE;
+
+} // StickTimeManager::TimeOfInactiveCoal
+
+//------------------------------------------------------------------------------------
+
+bool StickTimeManager::MutationImpossible(const LongVec1d& lineages,
+                                          const DoubleVec1d& mus) const
+{
+    assert(mus.size() == lineages.size());
+    LongVec1d::size_type part;
+    for(part = 0; part < lineages.size(); ++part)
+        if (lineages[part] * mus[part] > 0.0) return false;
+
+    return true;
+
+} // StickTimeManager::MutationImpossible
+
+//------------------------------------------------------------------------------------
+
+double StickTimeManager::TimeOfTraitMutation(double tstart,
+                                             const LongVec1d& lineages, const ForceParameters& fp, long& tiptrait,
+                                             long& roottrait, double maxtime)
+{
+    // Computes the timestamp of the end of the interval by means of
+    // computing "delta t" and adding it to tstart
+    DoubleVec1d mus(fp.GetOnlyDiseaseRates());
+    if (MutationImpossible(lineages,mus))
+    {
+        tiptrait = FLAGLONG;
+        roottrait = FLAGLONG;
+        return FLAGDOUBLE;
+    }
+
+    DoubleVec1d::size_type nparts = mus.size();
+    DoubleVec1d newfrac(nparts);
+    DoubleVec1d::iterator it;
+    for(it = newfrac.begin(); it != newfrac.end(); ++it)
+    {
+        // technically, we should be taking the log of (1-rnd[float]),
+        // but that's the same as log(rnd[float]), so omitted for
+        // simplicity/speed
+        (*it) = -1.0 * log(randomSource.Float());
+    }
+
+    DoubleVec1d total(nparts, 0.0), oldtotal(nparts, 0.0);
+    stair::iterator joint(FindJointAtTime(tstart,fp));
+    double length(joint->GetRootendTime() - tstart), totallength(0.0);
+
+    double maxdisplace(maxtime - tstart);
+    assert(nparts == 2.0);
+
+    while(true)
+    {
+        oldtotal = total;
+        DoubleVec1d thetas(joint->GetThetas());
+
+        assert(thetas.size() == 2);
+
+        double mindisplace(DBL_MAX);
+
+        // unrolling the loop since we only have 2 partitions and
+        // avoid partition/xpartition mapping by doing so....
+        if (lineages[0]*mus[0] > 0)
+        {
+            total[0] += lineages[0] * mus[0] * thetas[1] / thetas[0] * length;
+            if (total[0] > newfrac[0])
+            {
+                double newdisplace(newfrac[0] - oldtotal[0]);
+                newdisplace /= lineages[0] * mus[0] * thetas[1] / thetas[0];
+                newdisplace += totallength;
+                if (newdisplace < mindisplace)
+                {
+                    tiptrait = 0;
+                    roottrait = 1;
+                    mindisplace = newdisplace;
+                }
+            }
+        }
+
+        if (lineages[1]*mus[1] > 0)
+        {
+            total[1] += lineages[1] * mus[1] * thetas[0] / thetas[1] * length;
+            if (total[1] > newfrac[1])
+            {
+                double newdisplace(newfrac[1] - oldtotal[1]);
+                newdisplace /= lineages[1] * mus[1] * thetas[0] / thetas[1];
+                newdisplace += totallength;
+                if (newdisplace < mindisplace)
+                {
+                    tiptrait = 1;
+                    roottrait = 0;
+                    mindisplace = newdisplace;
+                }
+            }
+        }
+
+        if (mindisplace != DBL_MAX && mindisplace > MAX_LENGTH)
+            mindisplace = MAX_LENGTH;
+        // BUG? -- danger probably too low, try DBL_BIG
+        // and make sure that any matching code trying
+        // to detect what kind of horserace conditions
+        // these actually are is also changed
+
+#if 0
+        DoubleVec1d::size_type i;
+        for (i = 0; i < nparts; ++i)
+        {
+            if (lineages[i] < 1) continue;
+            total[i] += lineages[i] * (lineages[i] - 1.0) /
+                thetas[i] * length;
+            if (total[i] > newfrac[i])
+            {
+                double newdisplace(newfrac[i] - oldtotal[i]);
+                newdisplace /= lineages[i] * (lineages[i] - 1.0) / thetas[i];
+                newdisplace += totallength;
+                if (newdisplace < mindisplace)
+                {
+                    targetxpart = i;
+                    if (mindisplace > MAX_LENGTH)
+                        mindisplace = MAX_LENGTH;
+                    // BUG? -- danger probably too low, try DBL_BIG
+                    // and make sure that any matching code trying
+                    // to detect what kind of horserace conditions
+                    // these actually are is also changed
+                    mindisplace = newdisplace;
+                }
+            }
+        }
+#endif
+
+        if (mindisplace < DBL_MAX) return tstart + mindisplace;
+        totallength += length;
+        if (totallength >= maxdisplace)
+        {
+            tiptrait = FLAGLONG;
+            roottrait = FLAGLONG;
+            return FLAGDOUBLE;
+        }
+
+        joint = NextJointAppendingIfLast(joint,fp);
+        length = joint->GetRootendTime() - joint->GetTipendTime();
+    }
+
+    // can't get here
+    assert(false);
+    tiptrait = FLAGLONG;
+    roottrait = FLAGLONG;
+    return FLAGDOUBLE;
+
+} // StickTimeManager::TimeOfTraitMutation
+
+//------------------------------------------------------------------------------------
+
+stair::iterator StickTimeManager::FindJointAtTime(double time, const ForceParameters& fp)
+{
+    stair::iterator joint;
+    for (joint = m_stair.begin(); joint != m_stair.end(); ++joint)
+    {
+        if (time < joint->GetRootendTime()) return joint;
+    }
+
+    // if we got here, the stick ran out too soon
+    AppendRiser(fp);
+    --joint;
+    while(time >= joint->GetRootendTime())
+    {
+        AppendRiser(fp);
+        ++joint;
+    }
+    return joint;
+
+} // StickTimeManager::FindJointAtTime
+
+//------------------------------------------------------------------------------------
+
+stair::iterator StickTimeManager::NextJointAppendingIfLast(
+    const stair::iterator& start, const ForceParameters& fp)
+{
+    stair::iterator joint(start);
+    ++joint;
+    if (joint == m_stair.end())
+    {
+        AppendRiser(fp);
+        --joint;
+    }
+
+    return joint;
+
+} // StickTimeManager::NextJointAppendingIfLast
+
+//------------------------------------------------------------------------------------
+
+void StickTimeManager::AppendRiser(const ForceParameters& fp)
+{
+    double newtiptime((m_stair.empty() ? 0.0 : m_stair.back().GetRootendTime()));
+    double newroottime(NextRootTime(newtiptime,fp));
+    DoubleVec1d newthetas(NextThetas(newtiptime,fp));
+    m_stair.push_back(StairRiser(newthetas,newtiptime,newroottime));
+
+} // StickTimeManager::AppendRiser
+
+//------------------------------------------------------------------------------------
+
+void StickTimeManager::AppendRiser(const ForceParameters& fp, const DoubleVec1d& discounts)
+{
+    double newtiptime((m_stair.empty() ? 0.0 : m_stair.back().GetRootendTime()));
+    double newroottime(NextRootTime(newtiptime,fp));
+    DoubleVec1d newthetas(NextThetas(newtiptime,fp,discounts));
+    m_stair.push_back(StairRiser(newthetas,newtiptime,newroottime));
+} // StickTimeManager::AppendRiser(twoargs)
+
+//------------------------------------------------------------------------------------
+
+void StickTimeManager::PrintStick() const
+{
+    stair::const_iterator it;
+    for (it = m_stair.begin(); it != m_stair.end(); ++it)
+    {
+        DoubleVec1d ths(it->GetThetas());
+        std::cout << "Joint from " << it->GetTipendTime() <<
+            " to " << it->GetRootendTime() << " ";
+        std::cout << ths[0] << "/" << ths[1] << std::endl;
+    }
+} // PrintStick debug function
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+ExpGrowStickTimeManager::ExpGrowStickTimeManager()
+    : StickTimeManager(),
+      m_percentchange(defaults::perThetaChange),
+      m_negln(log(1.0+m_percentchange)),m_posln(log(1.0-m_percentchange))
+{
+} // ExpGrowStickTimeManager default ctor
+
+//------------------------------------------------------------------------------------
+
+double ExpGrowStickTimeManager::NextRootTime(double timenow, const
+                                             ForceParameters& fp) const
+{
+    double newtime(DBL_BIG);
+    DoubleVec1d params(fp.GetGrowthRates());
+    DoubleVec1d::const_iterator param;
+    assert(!params.empty());  // no growth?!
+    for(param = params.begin(); param != params.end(); ++param)
+    {
+        double ptime((*param < 0.0) ? timenow - m_negln/(*param) :
+                     timenow - m_posln/(*param));
+        if (ptime < newtime) newtime = ptime;
+    }
+
+    return newtime;
+
+} // ExpGrowStickTimeManager::NextRootTime
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d ExpGrowStickTimeManager::NextThetas(double nexttime, const ForceParameters& fp) const
+{
+    DoubleVec1d thetas(fp.GetRegionalThetas()),growths(fp.GetGrowthRates());
+    assert(thetas.size() == growths.size());
+    if (nexttime == 0.0) return thetas;  // don't bother, we're at the tips
+    DoubleVec1d newthetas(thetas.size());
+    unsigned long param;
+    for(param = 0; param < thetas.size(); ++param)
+    {
+        newthetas[param] = SafeProductWithExp(thetas[param], -growths[param]*nexttime);
+    }
+
+    return newthetas;
+
+} // ExpGrowStickTimeManager::NextThetas
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d ExpGrowStickTimeManager::NextThetas(double nexttime, const ForceParameters& fp, const DoubleVec1d&) const
+{
+    // We ignore the third argument as it shouldn't make any difference for us.
+    return NextThetas(nexttime,fp);
+} // ExpGrowStickTimeManager::NextThetas
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+double SelectStickTimeManager::NextRootTime(double timenow, const ForceParameters& fp) const
+{
+    // JDEBUG--constant length joints for now?
+    // double newlength = 0.002; // for lpl'ish data
+    double newlength = 0.0015; // for theta around 1.0
+    // double newlength = 0.015; // for comparison with sticktree2
+    // double newlength = 0.4;  // for artificial sims
+    return timenow + newlength;
+} // SelectStickTimeManager::NextRootTime
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d SelectStickTimeManager::NextThetas(double nexttime, const ForceParameters& fp) const
+{
+    DoubleVec1d newthetas;
+
+    if (m_stair.empty())                // first riser being built
+    {
+        newthetas = fp.GetRegionalThetas();
+        return newthetas;
+    }
+
+    stair::const_iterator prevjoint(m_stair.end());
+    if (nexttime >= m_stair.back().GetRootendTime()) // appending a new stair
+    {
+        --prevjoint;
+        newthetas = CalculateThetasWithPrevOnly(prevjoint,fp);
+    }
+    else if (nexttime > m_stair.back().GetTipendTime()) // modifying last thetas
+    {
+        assert(false); // you should never pick the first joint...which is the
+        // same as the last joint when there is only one
+        // joint.
+        --prevjoint;
+        --prevjoint;
+        newthetas = CalculateThetasWithPrevOnly(prevjoint,fp);
+    }
+    else
+    {
+        // modifying thetas with both a previous and next riser present
+        // JDEBUG--can't use
+        //    stair::const_iterator chosen(FindJointAtTime(nexttime,fp));
+        // because it's non-const....
+        stair::const_iterator chosen(m_stair.begin());
+        while(chosen->GetRootendTime() < nexttime)
+        {
+            ++chosen;
+            assert(chosen != m_stair.end()); // or even --(m_stair.end())!!
+        }
+        prevjoint = chosen;
+        --prevjoint;
+        stair::const_iterator nextjoint = chosen;
+        ++nextjoint;
+        newthetas = CalculateThetasWithPrevAndNext(prevjoint,chosen,nextjoint,fp);
+    }
+
+    assert(!newthetas.empty());
+    return newthetas;
+
+} // SelectStickTimeManager::NextThetas
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d SelectStickTimeManager::NextThetas(double nexttime, const ForceParameters& fp,
+                                               const DoubleVec1d& discounts) const
+{
+    DoubleVec1d newthetas;
+
+    if (m_stair.empty())                // first riser being built
+    {
+        newthetas = fp.GetRegionalThetas();
+        return newthetas;
+    }
+
+    stair::const_iterator prevjoint(m_stair.end());
+    --prevjoint;
+    newthetas = CalculateThetasWithPrevOnly(prevjoint,fp,discounts);
+
+    return newthetas;
+
+} // SelectStickTimeManager::NextThetas
+
+//------------------------------------------------------------------------------------
+
+double SelectStickTimeManager::Mean_delta_x(double x, double s, double mu,
+                                            double nu, double delta_t) const
+// compute M(delta_x) = delta_t(sx(1-x) + mu(1-x) - nu(x))
+{
+    double mean_delta(delta_t*(s*x*(1.0 - x) + mu*(1.0 - x) - nu*x));
+    assert (-1.0 <= mean_delta && mean_delta <= 1.0);
+    return mean_delta;
+} // SelectStickTimeManager::Mean_delta_x
+
+//------------------------------------------------------------------------------------
+
+double SelectStickTimeManager::SD_delta_x(double x, double theta,
+                                          double delta_t) const
+{
+    double var_delta(2.0*delta_t*x*(1.0 - x)/theta);
+    assert(var_delta >= 0.0);
+    return sqrt(var_delta);
+} // SelectStickTimeManager::SD_delta_x
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d SelectStickTimeManager::CalculateThetasWithPrevAndNext(
+    stair::const_iterator prevjoint, stair::const_iterator chosen,
+    stair::const_iterator nextjoint, const ForceParameters& fp) const
+{
+    //MDEBUG If not all stick-joints are the same length, these formulae
+    //may need to be weighted so that more distant points have less
+    //influence than closer ones.
+
+    double scoeff(fp.GetLogisticSelectionCoefficient()[0]);
+    DoubleVec1d murates(fp.GetOnlyDiseaseRates());
+    assert(murates.size() == 2);
+    double toBigA(murates[0]), toSmallA(murates[1]);
+
+    DoubleVec1d nextths(nextjoint->GetThetas());
+    DoubleVec1d prevths(prevjoint->GetThetas());
+    assert(nextths.size() == 2 && prevths.size() == 2);
+    double nextfreq = nextths[0] / (nextths[0] + nextths[1]);
+    double prevfreq = prevths[0] / (prevths[0] + prevths[1]);
+    double delta_t1 = chosen->GetRootendTime() - chosen->GetTipendTime();
+    double delta_t2 = prevjoint->GetRootendTime()- prevjoint->GetTipendTime();
+    double mean_x =
+        (nextfreq + Mean_delta_x(nextfreq,scoeff,toBigA,toSmallA,delta_t1) +
+         prevfreq + Mean_delta_x(prevfreq,scoeff,toBigA,toSmallA,delta_t2))
+        / 2.0;
+    double sd_delta = (SD_delta_x(nextfreq, nextths[0]+nextths[1],delta_t1) +
+                       SD_delta_x(prevfreq,prevths[0]+prevths[1],delta_t2)) / 2.0;
+
+    return PickNewThetasWithMeanAndSD(mean_x,sd_delta,fp);
+
+} // SelectStickTimeManager::CalculateThetasWithPrevAndNext
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d SelectStickTimeManager::CalculateThetasWithPrevOnly(
+    stair::const_iterator prevjoint, const ForceParameters& fp) const
+{
+    double scoeff(fp.GetLogisticSelectionCoefficient()[0]);
+    DoubleVec1d murates(fp.GetOnlyDiseaseRates());
+    assert(murates.size() == 2);
+    double toBigA(murates[0]), toSmallA(murates[1]);
+
+    DoubleVec1d prevths(prevjoint->GetThetas());
+    double prevfreq = prevths[0] / (prevths[0] + prevths[1]);
+    double delta_t1 = prevjoint->GetRootendTime() - prevjoint->GetTipendTime();
+    double mean_x = prevfreq + Mean_delta_x(prevfreq, scoeff, toBigA, toSmallA, delta_t1);
+    double sd_delta = SD_delta_x(prevfreq, prevths[0] + prevths[1], delta_t1);
+
+    return PickNewThetasWithMeanAndSD(mean_x,sd_delta,fp);
+
+} // SelectStickTimeManager::CalculateThetasWithPrevOnly
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d SelectStickTimeManager::CalculateThetasWithPrevOnly(
+    stair::const_iterator prevjoint, const ForceParameters& fp,
+    const DoubleVec1d& discounts) const
+{
+    double scoeff(fp.GetLogisticSelectionCoefficient()[0]);
+    DoubleVec1d murates(fp.GetOnlyDiseaseRates());
+    assert(murates.size() == 2);
+    double toBigA(murates[0]), toSmallA(murates[1]);
+
+    DoubleVec1d prevths(prevjoint->GetThetas());
+    double totaltheta = prevths[0] + prevths[1];
+    double thetasq = totaltheta * totaltheta;
+    double prevfreq = prevths[0] / totaltheta;
+    double delta_t1 = prevjoint->GetRootendTime() - prevjoint->GetTipendTime();
+    double mean_x = prevfreq + Mean_delta_x(prevfreq, scoeff, toBigA, toSmallA, delta_t1);
+    mean_x += (discounts[0]*prevths[1]/prevths[0] -
+               discounts[1]*prevths[0]/prevths[1]) * delta_t1 * delta_t1 * 2.0 / thetasq;
+    double sd_delta = SD_delta_x(prevfreq, prevths[0] + prevths[1], delta_t1);
+
+    return PickNewThetasWithMeanAndSD(mean_x,sd_delta,fp);
+
+} // SelectStickTimeManager::CalculateThetasWithPrevOnly
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d SelectStickTimeManager::PickNewThetasWithMeanAndSD(
+    double mean, double sd, const ForceParameters& fp) const
+{
+    DoubleVec1d newthetas(2, 0.0);
+    double new_x;
+    do {
+        double pick = randomSource.Normal();
+        new_x = pick * sd + mean;
+    } while (new_x >= 1.0 || new_x <= 0.0);
+
+    DoubleVec1d tipthetas(fp.GetRegionalThetas());
+    double totaltheta(std::accumulate(tipthetas.begin(), tipthetas.end(), 0.0));
+    // NB:  tipthetas are used here because they are handy, and we
+    // assume that totaltheta is conserved throughout the tree.  Fix
+    // if allowing totaltheta to change!
+    newthetas[0] = new_x * totaltheta;
+    newthetas[1] = (1.0 - new_x) * totaltheta;
+
+    return newthetas;
+
+} // SelectStickTimeManager::PickNewThetasWithMeanAndSD
+
+//____________________________________________________________________________________
diff --git a/src/force/timemanager.h b/src/force/timemanager.h
new file mode 100644
index 0000000..44fe6f8
--- /dev/null
+++ b/src/force/timemanager.h
@@ -0,0 +1,291 @@
+// $Id: timemanager.h,v 1.8 2012/06/30 01:32:41 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef TIMEMANAGER_H
+#define TIMEMANAGER_H
+
+#include <iostream>                     // for debug function PrintStickThetasToFile() and related debugs
+#include <map>                          // for MakeStickUsingBranches()
+
+#include "vectorx.h"
+#include "constants.h"
+#include "stair.h"
+
+//------------------------------------------------------------------------------------
+
+class ForceParameters;
+class Random;
+class Force;
+class TreeSummary;
+
+//------------------------------------------------------------------------------------
+
+class TimeManager
+{
+  public:
+    // creation, destruction, copying semantics
+    TimeManager();
+    TimeManager(const TimeManager& src);
+    virtual TimeManager& operator=(const TimeManager& src);
+    virtual ~TimeManager() {};
+
+    // SizeAtTimeT functionality
+    virtual DoubleVec1d XpartThetasAtT(double t, const ForceParameters& fp) =0;
+    virtual DoubleVec1d PartitionThetasAtT(double t, force_type force, const ForceParameters& fp) =0;
+
+    virtual double TimeOfActiveCoal(double tstart, const LongVec1d& lineages,
+                                    const ForceParameters& fp, long& targetxpart, double maxtime) =0;
+
+    virtual double TimeOfInactiveCoal(double tstart, const LongVec1d& activeline,
+                                      const LongVec1d& inactivelines, const ForceParameters& fp,
+                                      long& targetxpart, double maxtime) = 0;
+
+    virtual double TimeOfTraitMutation(double tstart, const LongVec1d& lineages,
+                                       const ForceParameters& fp, long& tiptrait,
+                                       long& roottrait, double maxtime) = 0;
+
+    // To avoid dynamic_cast, these are defined on the base class even
+    // if not useful there.  Most of them are no-ops on the base class.
+    virtual bool UsingStick() const;
+    virtual void CopyStick(const TimeManager& src);
+    virtual void ClearStick();
+    virtual void SetStickParameters(const ForceParameters& fp);
+    virtual void ScoreStick(TreeSummary& treesum) const;
+    virtual void ChopOffStickAt(double lasttime);
+    virtual void MakeStickUsingBranches(const ForceParameters& fp,
+                                        const std::vector<std::pair<double,LongVec1d> >& lpcounts);
+    virtual void MakeStickTilTime(const ForceParameters& fp, double);
+
+    // Debugging functions.
+    virtual void PrintStickThetasToFile(std::ofstream& of) const;
+    virtual void PrintStickFreqsToFile(std::ofstream& of) const;
+    virtual void PrintStickFreqsToFileAtTime(std::ofstream& of, double time) const;
+    virtual void PrintStickThetasToFileForJoint300(std::ofstream& of) const;
+    virtual void PrintStickFreqsToFileForJoint300(std::ofstream& of) const;
+    virtual long GetStairSize() const { return 0L; };
+    virtual void PrintStickToFile(std::ofstream& of) const;
+
+  protected:
+    // convenience reference to random number generator
+    Random& randomSource;
+};
+
+//-----------------------------------------------------------------------
+
+class ConstantTimeManager : public TimeManager
+{
+  public:
+    // SizeAtTimeT functionality
+    virtual DoubleVec1d XpartThetasAtT(double t, const ForceParameters& fp);
+    virtual DoubleVec1d PartitionThetasAtT(double t, force_type force,
+                                           const ForceParameters& fp);
+
+    virtual double TimeOfActiveCoal(double tstart, const LongVec1d& lineages,
+                                    const ForceParameters& fp, long& targetxpart, double);
+
+    virtual double TimeOfInactiveCoal(double tstart, const LongVec1d&
+                                      activelines, const LongVec1d& inactivelines, const ForceParameters& fp,
+                                      long& targetxpart, double);
+
+    virtual double TimeOfTraitMutation(double tstart, const LongVec1d& lineages,
+                                       const ForceParameters& fp, long& tiptrait, long& roottrait, double);
+};
+
+//-----------------------------------------------------------------------
+
+class ExpGrowTimeManager : public TimeManager
+{
+  public:
+    // SizeAtTimeT functionality
+    virtual DoubleVec1d XpartThetasAtT(double t, const ForceParameters& fp);
+    virtual DoubleVec1d PartitionThetasAtT(double t, force_type force,
+                                           const ForceParameters& fp);
+
+    virtual double TimeOfActiveCoal(double tstart, const LongVec1d& lineages,
+                                    const ForceParameters& fp, long& targetxpart, double);
+
+    virtual double TimeOfInactiveCoal(double tstart, const LongVec1d&
+                                      activelines, const LongVec1d& inactivelines, const ForceParameters& fp,
+                                      long& targetxpart, double);
+
+    virtual double TimeOfTraitMutation(double tstart, const LongVec1d& lineages,
+                                       const ForceParameters& fp, long& tiptrait, long& roottrait, double);
+};
+
+//-----------------------------------------------------------------------
+
+class LogSelectTimeManager : public TimeManager
+{
+
+  public:
+    // SizeAtTimeT functionality
+    virtual DoubleVec1d XpartThetasAtT(double t, const ForceParameters& fp);
+    virtual DoubleVec1d PartitionThetasAtT(double t, force_type force,
+                                           const ForceParameters& fp);
+
+    virtual double TimeOfActiveCoal(double tstart, const LongVec1d& lineages,
+                                    const ForceParameters& fp, long& targetxpart, double);
+
+    virtual double TimeOfInactiveCoal(double tstart, const LongVec1d&
+                                      activelines, const LongVec1d& inactivelines, const ForceParameters& fp,
+                                      long& targetxpart, double);
+
+    virtual double TimeOfTraitMutation(double tstart, const LongVec1d& lineages,
+                                       const ForceParameters& fp, long& tiptrait, long& roottrait, double);
+
+  private:
+    double LogisticSelectionPickTime(const double& theta_A0, const double& theta_a0,
+                                     const double& s, const double& t_s, const double& dtau_A,
+                                     const double& dtau_a, long* pChosenPopulation);
+};
+
+//-----------------------------------------------------------------------
+
+class StickTimeManager : public TimeManager
+{
+  public:
+    virtual TimeManager& operator=(const TimeManager& src);
+    virtual bool UsingStick() const;
+    virtual void CopyStick(const TimeManager& src);
+    virtual void ClearStick();
+    virtual void SetStickParameters(const ForceParameters& fp);
+    virtual void ScoreStick(TreeSummary& treesum) const;
+    virtual void ChopOffStickAt(double lasttime);
+    virtual void MakeStickUsingBranches(const ForceParameters& fp,
+                                        const std::vector<std::pair<double,LongVec1d> >& lpcounts);
+    virtual void MakeStickTilTime(const ForceParameters& fp, double endtime);
+
+    // Debugging functions.
+    virtual void PrintStickThetasToFile(std::ofstream& of) const;
+    virtual void PrintStickFreqsToFile(std::ofstream& of) const;
+    virtual void PrintStickFreqsToFileAtTime(std::ofstream& of, double time) const;
+    virtual void PrintStickThetasToFileForJoint300(std::ofstream& of) const;
+    virtual void PrintStickFreqsToFileForJoint300(std::ofstream& of) const;
+    virtual long GetStairSize() const { return m_stair.size(); };
+
+    // Debugging function.
+    // PrintStickToFile() is meant to match the output of TreeSummary::PrintStickSummaryToFile()
+    virtual void PrintStickToFile(std::ofstream& of) const;
+
+    virtual double TimeOfActiveCoal(double tstart, const LongVec1d& lineages,
+                                    const ForceParameters& fp, long& targetxpart, double maxtime);
+
+    virtual double TimeOfInactiveCoal(double tstart, const LongVec1d&
+                                      activelines, const LongVec1d& inactivelines, const ForceParameters& fp,
+                                      long& targetxpart, double maxtime);
+
+    virtual double TimeOfTraitMutation(double tstart, const LongVec1d& lineages,
+                                       const ForceParameters& fp, long& tiptrait, long& roottrait, double maxtime);
+    virtual DoubleVec1d XpartThetasAtT(double t, const ForceParameters& fp);
+
+    virtual DoubleVec1d PartitionThetasAtT(double t, force_type force,
+                                           const ForceParameters& fp);
+
+  protected:
+    // the stick
+    stair m_stair;
+
+    // FindJointAtTime() is non-const because it may have to add joints
+    // when the stick is too short
+    stair::iterator FindJointAtTime(double time, const ForceParameters& fp);
+    stair::iterator NextJointAppendingIfLast(const stair::iterator& start,
+                                             const ForceParameters& fp );
+
+    void AppendRiser(const ForceParameters& fp);
+    void AppendRiser(const ForceParameters& fp, const DoubleVec1d& discounts);
+
+    virtual double NextRootTime(double timenow, const ForceParameters& fp)
+        const = 0;
+
+    virtual DoubleVec1d NextThetas(double nexttime, const ForceParameters&
+                                   fp) const = 0;
+
+    virtual DoubleVec1d NextThetas(double nexttime, const ForceParameters&
+                                   fp, const DoubleVec1d& discounts) const = 0;
+
+    // debug function
+    void PrintStick() const;
+
+  private:
+    bool InactiveCoalImpossible(const LongVec1d& alines, const LongVec1d& ilines)
+        const;
+
+    bool MutationImpossible(const LongVec1d& lineages, const DoubleVec1d& mus)
+        const;
+
+};
+
+//-----------------------------------------------------------------------
+
+class ExpGrowStickTimeManager : public StickTimeManager
+{
+  public:
+    ExpGrowStickTimeManager();
+    // we accept StickTimeManager's defaults for the public interface
+    // --TimeOfActiveCoal,TimeOfInactiveCoal,XpartThetasAtT,
+    //   PartitionThetasAtT,TimeOfTraitMutation
+
+  protected:
+    virtual double NextRootTime(double timenow, const ForceParameters& fp)
+        const;
+
+    virtual DoubleVec1d NextThetas(double nexttime, const ForceParameters& fp)
+        const;
+
+    virtual DoubleVec1d NextThetas(double nexttime, const ForceParameters& fp,
+                                   const DoubleVec1d& discounts) const;
+
+  private:
+    double m_percentchange;   // how much change allowed in theta per joint
+    double m_negln, m_posln;  // used in calculating new times for stick joints
+};
+
+//-----------------------------------------------------------------------
+
+class SelectStickTimeManager : public StickTimeManager
+{
+  public:
+    // we accept StickTimeManager's defaults for the public interface
+    // --TimeOfActiveCoal,TimeOfInactiveCoal,XpartThetasAtT,
+    //   PartitionThetasAtT,TimeOfTraitMutation
+
+  protected:
+    virtual double NextRootTime(double timenow, const ForceParameters& fp)
+        const;
+
+    virtual DoubleVec1d NextThetas(double nexttime, const ForceParameters& fp)
+        const;
+
+    virtual DoubleVec1d NextThetas(double nexttime, const ForceParameters& fp,
+                                   const DoubleVec1d& discounts) const;
+
+  private:
+    double Mean_delta_x(double x, double s, double mu, double nu,
+                        double delta_t) const;
+
+    double SD_delta_x(double x, double theta, double delta_t) const;
+
+    DoubleVec1d CalculateThetasWithPrevAndNext(stair::const_iterator prevjoint,
+                                               stair::const_iterator chosen, stair::const_iterator nextjoint,
+                                               const ForceParameters& fp) const;
+
+    DoubleVec1d CalculateThetasWithPrevOnly(stair::const_iterator prevjoint,
+                                            const ForceParameters& fp) const;
+
+    DoubleVec1d CalculateThetasWithPrevOnly(stair::const_iterator prevjoint,
+                                            const ForceParameters& fp, const DoubleVec1d& discounts) const;
+
+    DoubleVec1d PickNewThetasWithMeanAndSD(double mean, double sd,
+                                           const ForceParameters& fp) const;
+};
+
+#endif // TIMEMANAGER_H
+
+//____________________________________________________________________________________
diff --git a/src/force/timesize.cpp b/src/force/timesize.cpp
new file mode 100644
index 0000000..941f58e
--- /dev/null
+++ b/src/force/timesize.cpp
@@ -0,0 +1,115 @@
+// $Id: timesize.cpp,v 1.10 2012/06/30 01:32:41 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+
+#include "force.h"
+#include "mathx.h"    // for SafeProductWithExp()
+#include "timesize.h"
+
+#ifdef DMALLOC_FUNC_CHECK
+#include "/usr/local/include/dmalloc.h"
+#endif
+
+using namespace std;
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d GrowTimeSize::PartitionSizeAt(force_type partforce,
+                                          const ForceParameters& fp, double etime) const
+{
+    DoubleVec1d thetas(fp.GetRegionalThetas()), growths(fp.GetGrowthRates());
+    return dynamic_cast<PartitionForce*>(*registry.GetForceSummary().
+                                         GetForceByTag(partforce))->SumXPartsToParts(thetas, growths, etime);
+} // GrowTimeSize::PartitionSizeAt
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d GrowTimeSize::SizeAt(const ForceParameters& fp, double etime) const
+{
+    DoubleVec1d thetas(fp.GetRegionalThetas()), growths(fp.GetGrowthRates());
+    assert(thetas.size() == growths.size());
+    DoubleVec1d newthetas(thetas.size());
+    unsigned long param;
+    for(param = 0; param < thetas.size(); ++param)
+    {
+        newthetas[param] = SafeProductWithExp(thetas[param],-growths[param]*etime);
+    }
+
+    return newthetas;
+} // GrowTimeSize::SizeAt
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+DoubleVec1d StaticTimeSize::PartitionSizeAt(force_type partforce,
+                                            const ForceParameters& fp, double etime) const
+{
+    DoubleVec1d thetas(fp.GetRegionalThetas());
+    return dynamic_cast<PartitionForce*>(*registry.GetForceSummary().
+                                         GetForceByTag(partforce))->SumXPartsToParts(thetas);
+} // StaticTimeSize::PartitionSizeAt
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d StaticTimeSize::SizeAt(const ForceParameters& fp, double etime)
+    const
+{
+    DoubleVec1d thetas(fp.GetRegionalThetas());
+    return thetas;
+} // StaticTimeSize::SizeAt
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+DoubleVec1d LogisticSelectionTimeSize::SizeAt(const ForceParameters& fp,
+                                              double t) const
+{
+    DoubleVec1d thetas(fp.GetRegionalThetas()),
+        selcoeffs(fp.GetLogisticSelectionCoefficient());
+
+    if (2 != thetas.size())
+    {
+        string msg = "LogisticSelectionTimeSize::SizeAt(), needed to receive ";
+        msg += "two thetas, one for each allele, but instead received ";
+        msg += ToString(thetas.size());
+        msg + " thetas.";
+        throw implementation_error(msg);
+    }
+
+    if (1 != selcoeffs.size())
+    {
+        string msg = "LogisticSelectionTimeSize::SizeAt(), needed to receive ";
+        msg += "1 selection coefficient, but instead received ";
+        msg += ToString(selcoeffs.size());
+        msg + " coefficients.";
+        throw implementation_error(msg);
+    }
+
+    DoubleVec1d newthetas(2);
+    double t_term = SafeProductWithExp(thetas[1], selcoeffs[0]*t);
+    newthetas[0] = (thetas[0]+thetas[1])/(thetas[0] + t_term);
+    newthetas[1] = newthetas[0] * t_term;
+    newthetas[0] = newthetas[0] * thetas[0];
+
+    return newthetas;
+} // LogisticSelectionTimeSize::SizeAt
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d LogisticSelectionTimeSize::PartitionSizeAt(force_type partforce,
+                                                       const ForceParameters& fp, double etime) const
+{
+    DoubleVec1d emptyvec;
+    throw implementation_error("LogisticSelectionTimeSize::PartitionSizeAt() was called; shouldn\'t be?");
+    return emptyvec;
+} // LogisticSelectionTimeSize::PartitionSizeAt
+
+//____________________________________________________________________________________
diff --git a/src/force/timesize.h b/src/force/timesize.h
new file mode 100644
index 0000000..6820c7c
--- /dev/null
+++ b/src/force/timesize.h
@@ -0,0 +1,81 @@
+// $Id: timesize.h,v 1.6 2011/03/07 06:08:50 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef TIMESIZE_H
+#define TIMESIZE_H
+
+#include "constants.h"
+#include "vectorx.h"
+#include "forceparam.h"
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+class TimeSize
+{
+  public:
+    // we accept the compiler-generated ctor, copy-ctor, and operator=
+    virtual ~TimeSize() {};
+    virtual TimeSize* Clone() const = 0;
+
+    virtual DoubleVec1d PartitionSizeAt(force_type, const ForceParameters& fp,
+                                        double etime) const = 0;
+    virtual DoubleVec1d SizeAt(const ForceParameters& fp, double etime) const = 0;
+
+}; // TimeSize
+
+//------------------------------------------------------------------------------------
+
+class GrowTimeSize : public TimeSize
+{
+  public:
+    // we accept the compiler-generated ctor, copy-ctor, and operator=
+    virtual ~GrowTimeSize() {};
+    virtual TimeSize* Clone() const { return new GrowTimeSize(*this); };
+
+    virtual DoubleVec1d PartitionSizeAt(force_type partforce,
+                                        const ForceParameters& fp, double etime) const;
+    virtual DoubleVec1d SizeAt(const ForceParameters& fp, double etime) const;
+
+}; // GrowTimeSize
+
+//------------------------------------------------------------------------------------
+
+class StaticTimeSize : public TimeSize
+{
+  public:
+    // we accept the compiler-generated ctor, copy-ctor, and operator=
+    virtual ~StaticTimeSize() {};
+    virtual TimeSize* Clone() const { return new StaticTimeSize(*this); };
+
+    virtual DoubleVec1d PartitionSizeAt(force_type partforce,
+                                        const ForceParameters& fp, double etime) const;
+    virtual DoubleVec1d SizeAt(const ForceParameters& fp, double etime) const;
+
+}; // StaticTimeSize
+
+//------------------------------------------------------------------------------------
+
+class LogisticSelectionTimeSize : public TimeSize
+{
+  public:
+    LogisticSelectionTimeSize() {};
+    virtual ~LogisticSelectionTimeSize() {};
+    virtual TimeSize* Clone() const { return new LogisticSelectionTimeSize(*this); };
+
+    virtual DoubleVec1d PartitionSizeAt(force_type partforce,
+                                        const ForceParameters& fp, double etime) const;
+    virtual DoubleVec1d SizeAt(const ForceParameters& fp, double etime) const;
+
+}; // LogisticSelectionTimeSize
+
+#endif // TIMESIZE_H
+
+//____________________________________________________________________________________
diff --git a/src/guiconv/batchconverter.cpp b/src/guiconv/batchconverter.cpp
new file mode 100644
index 0000000..270a905
--- /dev/null
+++ b/src/guiconv/batchconverter.cpp
@@ -0,0 +1,101 @@
+// $Id: batchconverter.cpp,v 1.22 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include "batchconverter.h"
+#include "gc_cmdline.h"
+#include "gc_data.h"
+#include "gc_datastore.h"
+#include "gc_errhandling.h"
+#include "gc_strings.h"
+#include "tinyxml.h"
+#include "wx/cmdline.h"
+
+BatchConverterApp::BatchConverterApp()
+    : GCCmdLineManager()
+{
+}
+
+BatchConverterApp::~BatchConverterApp()
+{
+}
+
+IMPLEMENT_APP(BatchConverterApp)
+
+void
+BatchConverterApp::OnInitCmdLine(wxCmdLineParser& parser)
+{
+    wxAppConsole::OnInitCmdLine(parser);
+    GCCmdLineManager::AddOptions(parser);
+}
+
+bool
+BatchConverterApp::OnCmdLineParsed(wxCmdLineParser& parser)
+{
+    bool parentReturned = wxAppConsole::OnCmdLineParsed(parser);
+    GCCmdLineManager::ExtractValues(parser);
+    return parentReturned;
+}
+
+bool
+BatchConverterApp::OnInit()
+{
+    return wxAppConsole::OnInit();
+}
+
+int
+BatchConverterApp::OnRun()
+{
+    try
+    {
+        // EWFIX.P4 LATER -- break up command line and command file processing
+        int exitCode = GCCmdLineManager::ProcessCommandLineAndCommandFile(m_dataStore);
+        if(exitCode == 0)
+        {
+            exitCode = GCCmdLineManager::DoExport(m_dataStore);
+        }
+        if(exitCode == 0)
+        {
+            wxLogMessage(gcstr::batchSafeFinish);
+        }
+        return exitCode;
+    }
+    catch(const gc_fatal_error& e)
+    {
+        wxLogError(wxString::Format(gcerr::fatalError));
+        return 2;
+    }
+    catch(const std::exception& f)
+    {
+        wxLogError(wxString::Format(gcerr::uncaughtException,f.what()));
+        return 2;
+    }
+
+    return 3;       // EWFIX.P3 -- what should this be?
+}
+
+int
+BatchConverterApp::OnExit()
+{
+    if(m_doDebugDump)
+    {
+        m_dataStore.DebugDump();
+    }
+    if(! m_batchOutName.IsEmpty())
+    {
+        TiXmlDocument * doc = m_dataStore.ExportBatch();
+        m_dataStore.WriteBatchFile(doc,m_batchOutName);
+        delete doc;
+    }
+
+    m_dataStore.NukeContents();
+    return wxAppConsole::OnExit();
+}
+
+//____________________________________________________________________________________
diff --git a/src/guiconv/batchconverter.h b/src/guiconv/batchconverter.h
new file mode 100644
index 0000000..dc1092a
--- /dev/null
+++ b/src/guiconv/batchconverter.h
@@ -0,0 +1,41 @@
+// $Id: batchconverter.h,v 1.5 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef BATCHCONVERTER_H
+#define BATCHCONVERTER_H
+
+#include "wx/app.h"
+#include "wx/wx.h"
+#include "gc_cmdline.h"
+#include "gc_datastore.h"
+
+class wxCmdLineParser;
+
+class BatchConverterApp: public wxAppConsole, public GCCmdLineManager
+                         // main gui converter application
+{
+  private:
+    GCDataStore     m_dataStore;
+
+  public:
+    BatchConverterApp();
+    virtual ~BatchConverterApp();
+
+    virtual bool    OnCmdLineParsed(wxCmdLineParser&);
+    virtual int     OnExit();
+    virtual bool    OnInit();
+    virtual void    OnInitCmdLine(wxCmdLineParser&);
+    virtual int     OnRun();
+
+};
+
+#endif  // BATCHCONVERTER_H
+
+//____________________________________________________________________________________
diff --git a/src/guiconv/gc_assigntab.cpp b/src/guiconv/gc_assigntab.cpp
new file mode 100644
index 0000000..b5d4038
--- /dev/null
+++ b/src/guiconv/gc_assigntab.cpp
@@ -0,0 +1,753 @@
+// $Id: gc_assigntab.cpp,v 1.28 2012/06/30 01:32:41 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+#include <stdio.h>
+
+#include "gc_assigntab.h"
+#include "gc_block_dialogs.h"
+#include "gc_clickpanel.h"
+#include "gc_data_display.h"
+#include "gc_default.h"
+#include "gc_event_publisher.h"
+#include "gc_file_list.h"               // EWFIX.P3 for GCExclaimBitmap
+#include "gc_locus_dialogs.h"
+#include "gc_logic.h"
+//#include "gc_matrix_dialogs.h"
+#include "gc_parse_block.h"
+#include "gc_population_dialogs.h"
+#include "gc_region_dialogs.h"
+#include "gc_panel.h"
+#include "gc_panel_dialogs.h"
+#include "gc_parent.h"
+#include "gc_parent_dialogs.h"
+#include "gc_strings.h"
+#include "gc_strings_region.h"
+#include "gc_structures.h"
+#include "gc_structures_err.h"          // JMDBG
+#include "gc_frame.h"
+#include "wx/log.h"                     // JMDBG
+#include "wx/notebook.h"
+#include "wx/colour.h"
+//#include "wx/gdicmn.h"
+#include "wx/statline.h"
+
+//------------------------------------------------------------------------------------
+
+gcBlockCell::gcBlockCell(wxWindow * parent, const GCParseBlock & blockRef)
+    :   gcClickCell(parent,gcstr::dataBlocks),
+        m_blockId(blockRef.GetId())
+{
+    AddText(wxString::Format(gcstr::blockInfo1,
+                             (int)(blockRef.GetSamples().size()),
+                             blockRef.GetParse().GetDataTypeString().c_str()));
+    AddText(wxString::Format(gcstr::blockInfo2,
+                             blockRef.GetParse().GetFileRef().GetShortName().c_str()));
+    FinishSizing();
+}
+
+gcBlockCell::~gcBlockCell()
+{
+}
+
+void
+gcBlockCell::NotifyLeftDClick()
+{
+    gcEventActor * blockEditActor = new gcActor_BlockEdit(m_blockId);
+    PublishScreenEvent(GetEventHandler(),blockEditActor);
+}
+
+//------------------------------------------------------------------------------------
+
+gcPopCell::gcPopCell(wxWindow * parent, const gcPopulation & popRef)
+    :   gcClickCell(parent,gcstr::population),
+        m_popId(popRef.GetId())
+{
+    AddText(wxString::Format(gcstr::popLabelName,popRef.GetName().c_str()));
+    FinishSizing();
+}
+
+gcPopCell::~gcPopCell()
+{
+}
+
+void
+gcPopCell::NotifyLeftDClick()
+{
+    gcEventActor * popEditActor = new gcActor_Pop_Edit(m_popId);
+    PublishScreenEvent(GetEventHandler(),popEditActor);
+}
+
+//------------------------------------------------------------------------------------
+
+gcRegionCell::gcRegionCell(wxWindow * parent, const gcRegion & regRef)
+    :   gcClickCell(parent,gcstr::region),
+        m_regionId(regRef.GetId())
+{
+    AddText(wxString::Format(gcstr::regionLabelName,regRef.GetName().c_str()));
+    if(regRef.HasEffectivePopulationSize())
+    {
+        AddText(wxString::Format(gcstr_region::effPopSize,regRef.GetEffectivePopulationSize()));
+    }
+    FinishSizing();
+}
+
+gcRegionCell::~gcRegionCell()
+{
+}
+
+void
+gcRegionCell::NotifyLeftDClick()
+{
+    gcEventActor * regionEditActor = new gcActor_RegionEdit(m_regionId);
+    PublishScreenEvent(GetEventHandler(),regionEditActor);
+}
+
+//------------------------------------------------------------------------------------
+
+gcLocusCell::gcLocusCell(wxWindow * parent, const gcLocus & locusRef)
+    :   gcClickCell(parent,locusRef.GetLinked() ? gcstr::locus : gcstr::locusUnlinked),
+        m_locusId(locusRef.GetId())
+{
+    AddText(wxString::Format(gcstr::locusLabelName,locusRef.GetName().c_str()));
+    AddText(wxString::Format(gcstr::locusLabelDataType,locusRef.GetDataTypeString().c_str()));
+    AddText(wxString::Format(gcstr::locusLabelSites,locusRef.GetNumMarkersString().c_str()));
+
+    FinishSizing();
+}
+
+gcLocusCell::~gcLocusCell()
+{
+}
+
+void
+gcLocusCell::NotifyLeftDClick()
+{
+    gcEventActor * locusEditActor = new gcActor_LocusEdit(m_locusId);
+    PublishScreenEvent(GetEventHandler(),locusEditActor);
+}
+
+//------------------------------------------------------------------------------------
+
+gcPanelCell::gcPanelCell(wxWindow * parent, const gcPanel & panRef)
+    :   gcClickCell(parent,gcstr::panel),
+        m_panelId(panRef.GetId())
+{
+    //wxLogVerbose("gcPanelCell panRefID:%i",(int)panRef.GetId());  // JMDBG
+    AddText(wxString::Format(gcstr::panelLabelName, panRef.GetName().c_str()));
+    AddText(wxString::Format(gcstr::members, panRef.GetNumPanelsString().c_str()));
+    //wxLogVerbose("gcPanelCell m_panelid: %i",(int)m_panelId);  // JMDBG
+
+    FinishSizing();
+}
+
+gcPanelCell::~gcPanelCell()
+{
+}
+
+void
+gcPanelCell::NotifyLeftDClick()
+{
+    //wxLogVerbose("notifyLeftDClick m_panelid: %s",ToWxString(m_panelId).c_str());  // JMDBG
+    //wxLogVerbose("notifyLeftDClick m_panelid: %i",(int)m_panelId);  // JMDBG
+    gcEventActor * panelEditActor = new gcActor_PanelEdit(m_panelId);
+    PublishScreenEvent(GetEventHandler(),panelEditActor);
+}
+
+//------------------------------------------------------------------------------------
+
+gcEmptyCell::gcEmptyCell(wxWindow * parent)
+    :   wxPanel(parent,-1,wxDefaultPosition,wxDefaultSize),
+        m_sizer(NULL)
+{
+    // m_sizer = new wxStaticBoxSizer(wxVERTICAL,this); // don't do this
+    // it generates an odd little spot on the screen that looks like a bug
+    // but is actually a zero by zero dimensioned box frame.
+}
+
+gcEmptyCell::~gcEmptyCell()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+gcDivergenceToggleCell::gcDivergenceToggleCell(wxWindow * parent, bool divergenceOn, int unused)
+    :   gcClickCell(parent,gcstr::divergence),
+        m_divergenceOn(divergenceOn)
+{
+    if(!m_divergenceOn)
+    {
+        AddText(wxString::Format("%s", gcstr::off.c_str()));
+    }
+    else
+    {
+        if (unused < 2)
+        {
+            AddText(wxString::Format("%s", "Done"));
+        }
+        else
+        {
+            AddText(wxString::Format("%s", "Continue"));
+        }
+    }
+    FinishSizing();
+}
+
+gcDivergenceToggleCell::~gcDivergenceToggleCell()
+{
+}
+
+void
+gcDivergenceToggleCell::NotifyLeftDClick()
+{
+    ClearBackground ();
+    //wxLogVerbose(" DivergenceToggle button pushed");  // JMDBG
+    gcEventActor * parentActor = new gcActor_ParentNew();
+    PublishScreenEvent(GetEventHandler(),parentActor);
+}
+
+//------------------------------------------------------------------------------------
+
+gcPanelsToggleCell::gcPanelsToggleCell(wxWindow * parent, bool panelsOn)
+    :   gcClickCell(parent,gcstr::panelToggle),
+        m_panelsOn(panelsOn)
+
+{
+    if (!m_panelsOn)
+    {
+        AddText(gcstr::off);
+    }
+    else
+    {
+        AddText(gcstr::on);
+    }
+    FinishSizing();
+}
+
+gcPanelsToggleCell::~gcPanelsToggleCell()
+{
+}
+
+void
+gcPanelsToggleCell::NotifyLeftDClick()
+{
+    ClearBackground ();
+    wxLogVerbose("PanelToggle button pushed");  // JMDBG
+    gcEventActor * PanelsToggleActor = new gcActor_PanelsToggle();
+    PublishScreenEvent(GetEventHandler(),PanelsToggleActor);
+}
+
+//------------------------------------------------------------------------------------
+
+bool
+gcActor_PanelsToggle::OperateOn(wxWindow * parent, GCDataStore & dataStore)
+{
+    dataStore.GetStructures().SetPanelsState(!dataStore.GetStructures().GetPanelsState());
+    return true;
+}
+
+//------------------------------------------------------------------------------------
+
+gcParentCell::gcParentCell(wxWindow * parent, const gcParent & parRef)
+    :   gcClickCell(parent,gcstr::parent),
+        m_parentId(parRef.GetId())
+{
+    // wxLogVerbose("gcParentCell parRefID:%i",(int)parRef.GetId());  // JMDBG
+    AddText(wxString::Format(gcstr::parentLabelName,parRef.GetName().c_str()));
+    //wxLogVerbose("gcParentCell m_panelid: %i",(int)m_parentId);  // JMDBG
+
+    FinishSizing();
+}
+
+gcParentCell::~gcParentCell()
+{
+}
+
+void
+gcParentCell::NotifyLeftDClick()
+{
+    //wxLogVerbose("notifyLeftDClick m_panelid: %s",ToWxString(m_panelId).c_str());  // JMDBG
+    //wxLogVerbose("notifyLeftDClick m_panelid: %i",(int)m_parentId);  // JMDBG
+    gcEventActor * parentActor = new gcActor_ParentEdit(m_parentId);
+    PublishScreenEvent(GetEventHandler(),parentActor);
+}
+
+//------------------------------------------------------------------------------------
+
+wxPanel *
+GCAssignmentTab::blockControl(wxWindow * window, constBlockVector & blocks)
+{
+    wxPanel * unitPanel = new wxPanel(window);
+    if(blocks.empty())
+    {
+        wxBoxSizer * statBox = new wxBoxSizer(wxHORIZONTAL);
+        statBox->AddStretchSpacer(1);
+        statBox->Add(new wxStaticBitmap(unitPanel,-1,GCExclaimBitmap::exclBitmap()),0,wxALIGN_CENTRE_VERTICAL);
+        statBox->Add(new wxStaticText(unitPanel,-1,gcdefault::emptyBlock),0,wxALIGN_CENTRE_VERTICAL);
+        statBox->AddStretchSpacer(1);
+        unitPanel->SetSizerAndFit(statBox);
+        return unitPanel;
+    }
+    else
+    {
+        wxBoxSizer * statBox = new wxBoxSizer(wxVERTICAL);
+        for(constBlockVector::const_iterator iter = blocks.begin();
+            iter != blocks.end(); iter++)
+        {
+            const GCParseBlock * blockP = *iter;
+            assert(blockP != NULL);
+            statBox->Add(new gcBlockCell(unitPanel,*blockP),1,wxEXPAND);
+        }
+        unitPanel->SetSizerAndFit(statBox);
+        return unitPanel;
+    }
+    assert(false);
+    return NULL;
+}
+
+GCAssignmentTab::GCAssignmentTab( wxWindow * parent, GCLogic & logic)
+    :
+    gcInfoPane(parent, logic, gcstr::assignTabTitle)
+{
+}
+
+GCAssignmentTab::~GCAssignmentTab()
+{
+}
+
+wxPanel *
+GCAssignmentTab::MakeContent()
+{
+    wxPanel * newPanel = new wxPanel(   m_scrolled,
+                                        -1,
+                                        wxDefaultPosition,
+                                        wxDefaultSize,
+                                        wxTAB_TRAVERSAL);
+
+    GCDataDisplaySizer * dds = new GCDataDisplaySizer();
+    const GCStructures & st = m_logic.GetStructures();
+    GCStructures & st_var = m_logic.GetStructures();
+
+    size_t popIndex = 0;
+    size_t parIndex = 0;
+    constObjVector popsToDisplay    =  st.GetConstDisplayablePops();
+    constObjVector regionsToDisplay =  st.GetConstDisplayableRegions();
+    constObjVector parents          =  st.GetConstParents();
+
+    int panelMult = 1;
+    if (st.GetPanelsState())
+    {
+        panelMult = 2;
+    }
+
+    // panel toggle button
+    if (popsToDisplay.size() > 0) {
+        dds->AddPanelsToggleCell(new gcPanelsToggleCell(newPanel,st.GetPanelsState()));
+        wxLogVerbose(" Make Panels button");  // JMDBG
+    }
+
+    dds->SetOffset(parents.size());
+
+    if (popsToDisplay.size() > 1) {
+        int unused = st.GetUnusedPopCount() + st.GetUnusedParentCount();
+        dds->AddDivergenceToggleCell(new gcDivergenceToggleCell(newPanel,st.GetDivergenceState(),unused));
+        wxLogVerbose(" Make Divergence button");  // JMDBG
+    }
+
+    int nPopsDisplayOrder = 0;
+    for(    constObjVector::const_iterator piter=popsToDisplay.begin();
+            piter != popsToDisplay.end();
+            piter++)
+    {
+        size_t popId = (*piter)->GetId();
+        if (st.GetPop(popId).HasDispOrder())
+        {
+            nPopsDisplayOrder++;
+        }
+    }
+    wxLogVerbose(" nPopsDisplayOrder: %i", nPopsDisplayOrder);  // JMDBG
+
+    int dispIndex = 0;
+    if(nPopsDisplayOrder == 0)
+    {
+        // no display order defined, output in the order found
+        for(    constObjVector::const_iterator piter=popsToDisplay.begin();
+                piter != popsToDisplay.end();
+                piter++)
+        {
+            popIndex++;
+            size_t popId = (*piter)->GetId();
+            const gcPopulation & pop = st.GetPop(popId);
+            dispIndex = (panelMult*(popIndex-1)) + 2;
+            st_var.GetPop(popId).SetDispIndex(dispIndex);
+            dds->AddPop(new gcPopCell(newPanel,pop),dispIndex, panelMult);
+            wxLogVerbose(" unordered MakeContent Pop: %s dispIndex: %i Id: %i",(*piter)->GetName().c_str(), dispIndex, popId);  // JMDBG
+        }
+    }
+    else
+    {
+        // has a display order, so use it
+        int nPopDisplay = 1;
+
+        while (nPopDisplay <= nPopsDisplayOrder)
+        {
+            for(    constObjVector::const_iterator piter=popsToDisplay.begin();
+                    piter != popsToDisplay.end();
+                    piter++)
+            {
+                size_t popId = (*piter)->GetId();
+                wxLogVerbose(" check order MakeContent Pop: %s display order: %i",(*piter)->GetName().c_str(),                 st.GetPop(popId).GetDispOrder());  // JMDBG
+                if (st.GetPop(popId).GetDispOrder() == nPopDisplay)
+                {
+                    popIndex++;
+                    const gcPopulation & pop = st.GetPop(popId);
+                    dispIndex = (panelMult*(popIndex-1)) + 2;
+                    st_var.GetPop(popId).SetDispIndex(dispIndex);
+                    dds->AddPop(new gcPopCell(newPanel,pop),dispIndex, panelMult);
+                    nPopDisplay++;
+                    wxLogVerbose(" ordered MakeContent Pop: %s",(*piter)->GetName().c_str());  // JMDBG
+                }
+            }
+        }
+        // now display the ones that haven't been used yet
+        for(    constObjVector::const_iterator piter=popsToDisplay.begin();
+                piter != popsToDisplay.end();
+                piter++)
+        {
+            size_t popId = (*piter)->GetId();
+            if (st.GetPop(popId).GetDispOrder() == 0)
+            {
+                popIndex++;
+                const gcPopulation & pop = st.GetPop(popId);
+                dispIndex = (panelMult*(popIndex-1)) + 2;
+                st_var.GetPop(popId).SetDispIndex(dispIndex);
+                dds->AddPop(new gcPopCell(newPanel,pop),dispIndex, panelMult);
+                wxLogVerbose(" not ordered MakeContent Pop: %s",(*piter)->GetName().c_str());  // JMDBG
+            }
+        }
+    }
+
+    size_t locusIndex = 0;
+    for(    constObjVector::const_iterator giter=regionsToDisplay.begin();
+            giter != regionsToDisplay.end();
+            giter++)
+    {
+        size_t regionId = (*giter)->GetId();
+        const gcRegion & group = st.GetRegion(regionId);
+        constObjVector loci = st.GetConstDisplayableLociInMapOrderFor(regionId);
+        wxLogVerbose(" MakeContent Region: %s Id: %i",(*giter)->GetName().c_str(), regionId);  // JMDBG
+
+        if(loci.size() == 0)
+        {
+            dds->AddRegion(new gcRegionCell(newPanel,group), locusIndex,locusIndex);
+            locusIndex++;
+        }
+        else
+        {
+            if(st.RegionHasAnyLinkedLoci(regionId))
+            {
+                dds->AddRegion(new gcRegionCell(newPanel,group),locusIndex,locusIndex+loci.size()-1);
+            }
+
+            int subRegionLocusCount = 0;
+            for(constObjVector::const_iterator liter=loci.begin(); liter != loci.end(); liter++)
+            {
+
+                size_t locusId = (*liter)->GetId();
+
+                // display locus
+                const gcLocus & locus = st.GetLocus(locusId);
+                dds->AddLocus(new gcLocusCell(newPanel,locus),locusIndex);
+                wxLogVerbose(" MakeContent Locus: %s id: %i",locus.GetName().c_str(), locusId);  // JMDBG
+
+                // display blocks for locus
+                popIndex = 0;
+                int npanel = 0;
+
+                if(nPopsDisplayOrder == 0)
+                {
+                    // no display order defined, output in the order found
+                    for(constObjVector::const_iterator piter=popsToDisplay.begin();
+                        piter != popsToDisplay.end();
+                        piter++)
+                    {
+                        popIndex++;
+                        size_t popId = (*piter)->GetId();
+                        wxLogVerbose(" No Order MakeContent block pop: %i locus: %i",(int)popId, (int) locusId);  // JMDBG
+                        constBlockVector blockVec = m_logic.GetBlocks(popId,locusId);
+                        dispIndex = (panelMult*(popIndex-1)) + 2;
+                        dds->AddData(blockControl(newPanel,blockVec), dispIndex, locusIndex+1);
+
+                        if (st.GetPanelsState())
+                        {
+                            dispIndex = (panelMult*(popIndex-1)) + 3;
+                            if (subRegionLocusCount == 0)
+                            {
+                                int offset = dds->GetOffset();
+
+                                if(blockVec.empty() && (loci.size() == 1))
+                                {
+                                    wxLogVerbose("AddPanel Empty cell row: %i column: %i length: %i", dispIndex,locusIndex+1+offset, loci.size());
+
+                                    dds->AddPanel(new gcEmptyCell(newPanel), dispIndex, locusIndex+1, loci.size());
+                                    if(!st.HasPanel(regionId, popId))
+                                    {
+                                        wxLogVerbose("blockVec Making panel for Region: %i pop: %i", regionId, popId);
+                                        st_var.CreatePanel(regionId, popId);
+                                    }
+                                }
+                                else
+                                {
+                                    if(!st.HasPanel(regionId, popId))
+                                    {
+                                        // have to make a new one because the region didn't exist when this population was read in
+                                        wxLogVerbose("!HasPanel Making panel for Region: %i pop: %i", regionId, popId);
+                                        st_var.CreatePanel(regionId, popId);
+                                    }
+                                    else
+                                    {
+                                        wxLogVerbose("Panel exists for Region: %i pop: %i", regionId, popId);
+                                        const gcPanel& panel= st.GetPanel(regionId, popId);
+                                        dds->AddPanel(new gcPanelCell(newPanel, panel), dispIndex, locusIndex+1, loci.size());
+                                    }
+                                }
+                            }
+                        }
+                    }
+                }
+                else
+                {
+                    // has a display order, so use it
+                    int nPopDisplay = 1;
+                    while (nPopDisplay <= nPopsDisplayOrder)
+                    {
+                        for(    constObjVector::const_iterator piter=popsToDisplay.begin();
+                                piter != popsToDisplay.end();
+                                piter++)
+                        {
+                            size_t popId = (*piter)->GetId();
+                            if (st.GetPop(popId).GetDispOrder() == nPopDisplay)
+                            {
+                                popIndex++;
+                                wxLogVerbose(" Display Order MakeContent block pop: %i locus: %i",(int)popId, (int) locusId);  // JMDBG
+                                constBlockVector blockVec = m_logic.GetBlocks(popId,locusId);
+                                dispIndex = (panelMult*(popIndex-1)) + 2;
+                                dds->AddData(blockControl(newPanel,blockVec), dispIndex, locusIndex+1);
+                                nPopDisplay++;
+                                if (st.GetPanelsState())
+                                {
+                                    dispIndex = (panelMult*(popIndex-1)) + 3;
+                                    if (subRegionLocusCount == 0)
+                                    {
+                                        int offset = dds->GetOffset();
+
+                                        if(blockVec.empty() && (loci.size() == 1))
+                                        {
+                                            wxLogVerbose("AddPanel Empty cell row: %i column: %i length: %i", dispIndex,locusIndex+1+offset, loci.size());
+
+                                            dds->AddPanel(new gcEmptyCell(newPanel), dispIndex, locusIndex+1, loci.size());
+                                        }
+                                        else
+                                        {
+                                            if(!st.HasPanel(regionId, popId))
+                                            {
+                                                wxLogVerbose("No panel for Region: %i pop: %i", regionId, popId);
+                                            }
+                                            else
+                                            {
+                                                wxLogVerbose("Panel exists for Region: %i pop: %i", regionId, popId);
+                                                const gcPanel& panel= st.GetPanel(regionId, popId);
+                                                dds->AddPanel(new gcPanelCell(newPanel, panel), dispIndex, locusIndex+1, loci.size());
+                                            }
+                                        }
+                                    }
+                                }
+                            }
+                        }
+                    }
+
+                    // now display the ones that haven't been used yet
+                    for(    constObjVector::const_iterator piter=popsToDisplay.begin();
+                            piter != popsToDisplay.end();
+                            piter++)
+                    {
+                        size_t popId = (*piter)->GetId();
+                        if (st.GetPop(popId).GetDispOrder() == 0)
+                        {
+                            popIndex++;
+                            wxLogVerbose(" After Display Order MakeContent block pop: %i locus: %i",(int)popId, (int) locusId);  // JMDBG
+                            constBlockVector blockVec = m_logic.GetBlocks(popId,locusId);
+                            dispIndex = (panelMult*(popIndex-1)) + 2;
+                            dds->AddData(blockControl(newPanel,blockVec), dispIndex, locusIndex+1);
+                            nPopDisplay++;
+                            if (st.GetPanelsState())
+                            {
+                                dispIndex = (panelMult*(popIndex-1)) + 3;
+                                if (subRegionLocusCount == 0)
+                                {
+                                    int offset = dds->GetOffset();
+
+                                    if(blockVec.empty() && (loci.size() == 1))
+                                    {
+                                        wxLogVerbose("AddPanel Empty cell row: %i column: %i length: %i", dispIndex,locusIndex+1+offset, loci.size());
+
+                                        dds->AddPanel(new gcEmptyCell(newPanel), dispIndex, locusIndex+1, loci.size());
+                                    }
+                                    else
+                                    {
+                                        if(!st.HasPanel(regionId, popId))
+                                        {
+                                            wxLogVerbose("No panel for Region: %i pop: %i", regionId, popId);
+                                        }
+                                        else
+                                        {
+                                            wxLogVerbose("Panel exists for Region: %i pop: %i", regionId, popId);
+                                            const gcPanel& panel= st.GetPanel(regionId, popId);
+                                            dds->AddPanel(new gcPanelCell(newPanel, panel), dispIndex, locusIndex+1, loci.size());
+                                        }
+                                    }
+                                }
+                            }
+                        }
+                    }
+                }
+                subRegionLocusCount++;
+                locusIndex++;
+            }
+        }
+    }
+
+    // display parents on left side of display
+    int npar = 0;
+    int lastDispLevel = 0;
+    int lastDispIndex = 0;
+    if (parents.size() > 0)
+    {
+        wxLogVerbose(" parents.size(): %i",(int)parents.size());  // JMDBG
+        for(constObjVector::const_iterator piter=parents.begin();
+            piter != parents.end();
+            piter++)
+        {
+            int curDispLevel = parents.size() - npar - 1;
+            npar++;
+            size_t parId = (*piter)->GetId();
+            gcParent & par = st_var.GetParent(parId);
+            //int curDispIndex = par.GetDispIndex();
+            //int curDispLength = par.GetDispLength();
+            int child1DispIndex;
+            int child2DispIndex;
+            int child1DispLength;
+            int child2DispLength;
+
+            // get the child dimensions
+            if (st.IsPop(par.GetChild1Id()))
+            {
+                child1DispIndex  = st.GetPop(par.GetChild1Id()).GetDispIndex();
+                child1DispLength = panelMult;
+                wxLogVerbose("Child1: pop child1DispIndex: %i child1DispLength: %i", child1DispIndex, child1DispLength);  // JMDBG
+            }
+            else
+            {
+                child1DispIndex  = st.GetParent(par.GetChild1Id()).GetDispIndex();
+                child1DispLength = st.GetParent(par.GetChild1Id()).GetDispLength();
+                wxLogVerbose("Child1: parent child1DispIndex: %i child1DispLength: %i", child1DispIndex, child1DispLength);  // JMDBG
+            }
+
+            if (st.IsPop(par.GetChild2Id()))
+            {
+                child2DispIndex  = st.GetPop(par.GetChild2Id()).GetDispIndex();
+                child2DispLength = panelMult;
+                wxLogVerbose("Child2: pop child1DispIndex: %i child1DispLength: %i", child2DispIndex, child2DispLength);  // JMDBG
+            }
+            else
+            {
+                child2DispIndex  = st.GetParent(par.GetChild2Id()).GetDispIndex();
+                child2DispLength = st.GetParent(par.GetChild2Id()).GetDispLength();
+                wxLogVerbose("Child2: parent child1DispIndex: %i child1DispLength: %i", child2DispIndex, child2DispLength);  // JMDBG
+            }
+
+            // *** the following calculation is messy ***
+            int topChildIndex;
+            int botChildIndex;
+            int topChildLength;
+            int botChildLength;
+
+            // find the top child
+            if (child1DispIndex < child2DispIndex)
+            {
+                topChildIndex  = child1DispIndex;
+                botChildIndex  = child2DispIndex;
+                topChildLength = child1DispLength;
+                botChildLength = child2DispLength;
+            }
+            else
+            {
+                topChildIndex  = child2DispIndex;
+                botChildIndex  = child1DispIndex;
+                topChildLength = child2DispLength;
+                botChildLength = child1DispLength;
+            }
+            wxLogVerbose("topChildIndex: %i topChildLength: %i", topChildIndex, topChildLength);  // JMDBG
+            wxLogVerbose("botChildIndex: %i botChildLength: %i", botChildIndex, botChildLength);  // JMDBG
+
+            int curDispLength = 2 * panelMult;
+
+            // find where the top of the parent box is
+            int curDispIndex;
+            if ((topChildLength + botChildLength) == curDispLength)
+            {
+                // 2 pop child case
+                curDispIndex = topChildIndex;
+                wxLogVerbose("2 pop child case");
+            }
+            else if ((topChildLength + botChildLength) == 3*panelMult)
+            {
+                // 1 pop, 1 parent child case
+                if (topChildLength > botChildLength)
+                {
+                    // parent on top
+                    curDispIndex = topChildIndex + panelMult;
+                    wxLogVerbose("1 pop, 1 parent, parent on top case");
+                }
+                else
+                {
+                    // pop on top
+                    curDispIndex = topChildIndex;
+                    wxLogVerbose("1 pop, 1 parent, child on top case");
+                }
+            }
+            else
+            {
+                // 2 parent child case
+                curDispIndex = topChildIndex + panelMult;
+                wxLogVerbose("2 parent child case");
+            }
+
+            dds->AddParent(new gcParentCell(newPanel,par),curDispIndex,curDispLevel, curDispLength);
+            parIndex++;
+            par.SetDispIndex(curDispIndex);
+            par.SetDispLevel(curDispLevel);
+            par.SetDispLength(curDispLength);
+            wxLogVerbose(" MakeContent Parent: %s level: %i",(*piter)->GetName().c_str(), par.GetDispLevel());  // JMDBG
+        }
+    }
+    newPanel->SetSizerAndFit(dds);
+
+    wxLogVerbose("****newPanel done****");
+    return newPanel;
+}
+
+wxString
+GCAssignmentTab::MakeLabel()
+{
+    return m_panelLabelFmt;
+}
+
+//____________________________________________________________________________________
diff --git a/src/guiconv/gc_assigntab.h b/src/guiconv/gc_assigntab.h
new file mode 100644
index 0000000..d6e5fd8
--- /dev/null
+++ b/src/guiconv/gc_assigntab.h
@@ -0,0 +1,160 @@
+// $Id: gc_assigntab.h,v 1.16 2012/06/30 01:32:41 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_ASSIGNTAB_H
+#define GC_ASSIGNTAB_H
+
+#include "gc_clickpanel.h"
+#include "gc_gridpanel.h"
+#include "gc_quantum.h"
+
+class wxPanel;
+class wxWindow;
+class constBlockVector;
+class gcLocus;
+class gcPanel;
+class GCParseBlock;
+class gcPopulation;
+class gcRegion;
+class gcParent;
+class GCStructures;
+
+class gcBlockCell : public gcClickCell
+{
+  private:
+  protected:
+    size_t      m_blockId;
+  public:
+    gcBlockCell(wxWindow * parent, const GCParseBlock &);
+    virtual ~gcBlockCell();
+    void NotifyLeftDClick();
+};
+
+class gcPopCell : public gcClickCell
+{
+  private:
+  protected:
+    size_t      m_popId;
+  public:
+    gcPopCell(wxWindow * parent, const gcPopulation &);
+    virtual ~gcPopCell();
+
+    void    NotifyLeftDClick();
+};
+
+class gcRegionCell : public gcClickCell
+{
+  private:
+  protected:
+    size_t      m_regionId;
+  public:
+    gcRegionCell(wxWindow * parent, const gcRegion &);
+    virtual ~gcRegionCell();
+
+    void    NotifyLeftDClick();
+};
+
+class gcLocusCell : public gcClickCell
+{
+  private:
+  protected:
+    size_t      m_locusId;
+  public:
+    gcLocusCell(wxWindow * parent, const gcLocus &);
+    virtual ~gcLocusCell();
+
+    void    NotifyLeftDClick();
+};
+
+class gcPanelCell : public gcClickCell
+{
+  private:
+  protected:
+    size_t      m_panelId;
+  public:
+    gcPanelCell(wxWindow * parent, const gcPanel &);
+    virtual ~gcPanelCell();
+
+    void    NotifyLeftDClick();
+};
+
+class gcEmptyCell : public wxPanel
+{
+  private:
+  protected:
+    wxStaticBoxSizer * m_sizer;
+
+  public:
+    gcEmptyCell(wxWindow * parent);
+    virtual ~gcEmptyCell();
+};
+
+class gcDivergenceToggleCell : public gcClickCell
+{
+  private:
+    bool m_divergenceOn;
+  public:
+    gcDivergenceToggleCell(wxWindow * parent, bool divergenceOn, int unused);
+    virtual ~gcDivergenceToggleCell();
+
+    void    NotifyLeftDClick();
+};
+
+class gcPanelsToggleCell : public gcClickCell
+{
+  private:
+    bool m_panelsOn;
+  public:
+    gcPanelsToggleCell(wxWindow * parent, bool panelsOn);
+    virtual ~gcPanelsToggleCell();
+
+    void    NotifyLeftDClick();
+};
+
+class gcActor_PanelsToggle : public gcEventActor
+{
+  public:
+    gcActor_PanelsToggle() {};
+    virtual ~gcActor_PanelsToggle() {};
+    virtual bool OperateOn(wxWindow * parent, GCDataStore & dataStore);
+};
+
+
+class gcParentCell : public gcClickCell
+{
+  private:
+  protected:
+    size_t      m_parentId;
+  public:
+    gcParentCell(wxWindow * parent, const gcParent &);
+    virtual ~gcParentCell();
+
+    void    NotifyLeftDClick();
+};
+
+class GCAssignmentTab : public gcInfoPane
+{
+  private:
+    GCAssignmentTab();        // undefined
+
+  protected:
+    wxPanel *   MakeContent();
+    wxString    MakeLabel();
+    wxPanel *   blockControl(wxWindow *, constBlockVector &);
+
+  public:
+    GCAssignmentTab(wxWindow * parent, GCLogic & logic);
+    //GCAssignmentTab(wxBookCtrlBase * parent, GCLogic & logic);
+    virtual ~GCAssignmentTab();
+};
+
+#endif  // GC_ASSIGNTAB_H
+
+//____________________________________________________________________________________
diff --git a/src/guiconv/gc_block_dialogs.cpp b/src/guiconv/gc_block_dialogs.cpp
new file mode 100644
index 0000000..afd8302
--- /dev/null
+++ b/src/guiconv/gc_block_dialogs.cpp
@@ -0,0 +1,442 @@
+// $Id: gc_block_dialogs.cpp,v 1.8 2011/06/22 18:22:22 jmcgill Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+
+#include "gc_block_dialogs.h"
+#include "gc_data.h"
+#include "gc_datastore.h"
+#include "gc_layout.h"
+#include "gc_parse_block.h"
+#include "gc_parse_locus.h"
+#include "gc_parse_pop.h"
+#include "gc_parse_pop.h"
+#include "gc_parse_sample.h"
+#include "gc_strings.h"
+
+#include "wx/checkbox.h"
+#include "wx/log.h"
+#include "wx/sizer.h"
+#include "wx/statline.h"
+
+//------------------------------------------------------------------------------------
+
+gcBlockPopChoice::gcBlockPopChoice(size_t blockId, size_t popId)
+    :
+    m_blockId(blockId),
+    m_popId(popId),
+    m_box(NULL)
+{
+}
+
+gcBlockPopChoice::~gcBlockPopChoice()
+{
+}
+
+void
+gcBlockPopChoice::UpdateDisplayInitial(GCDataStore & dataStore)
+{
+    gcPopulation & popRef = dataStore.GetStructures().GetPop(m_popId);
+    m_box->SetLabel(popRef.GetName());
+
+    UpdateDisplayInterim(dataStore);
+}
+
+void
+gcBlockPopChoice::UpdateDisplayInterim(GCDataStore & dataStore)
+{
+    size_t popIdFromStructures = dataStore.GetStructures().GetPopForBlock(m_blockId);
+    if(popIdFromStructures ==  m_popId)
+    {
+        m_box->SetValue(1);
+    }
+    else
+    {
+        m_box->SetValue(0);
+    }
+
+    // if there is only one region, we'd better be assigned to it
+    assert( (dataStore.GetStructures().GetDisplayablePopIds().size() > 1) || (popIdFromStructures == m_popId));
+
+    m_box->Enable(true);
+}
+
+void
+gcBlockPopChoice::UpdateDataInterim(GCDataStore & dataStore)
+{
+    if(m_box->GetValue() > 0 )
+    {
+        dataStore.GetStructures().AssignBlockToPop(m_blockId,m_popId);
+    }
+}
+
+void
+gcBlockPopChoice::UpdateDataFinal(GCDataStore & dataStore)
+{
+    UpdateDataInterim(dataStore);
+}
+
+wxWindow *
+gcBlockPopChoice::MakeWindow(wxWindow * parent)
+{
+    m_box = new wxCheckBox(parent,-1,wxEmptyString);
+    return m_box;
+}
+
+wxWindow *
+gcBlockPopChoice::FetchWindow()
+{
+    assert(m_box != NULL);
+    return m_box;
+}
+
+size_t
+gcBlockPopChoice::GetRelevantId()
+// this is the one you want if you're recording which item is checked
+{
+    return m_popId;
+}
+
+//------------------------------------------------------------------------------------
+
+gcBlockLocusChoice::gcBlockLocusChoice(size_t blockId, size_t locusId)
+    :
+    m_blockId(blockId),
+    m_locusId(locusId),
+    m_box(NULL)
+{
+}
+
+gcBlockLocusChoice::~gcBlockLocusChoice()
+{
+}
+
+void
+gcBlockLocusChoice::UpdateDisplayInitial(GCDataStore & dataStore)
+{
+    gcLocus & locusRef = dataStore.GetStructures().GetLocus(m_locusId);
+    m_box->SetLabel(locusRef.GetName());
+
+    UpdateDisplayInterim(dataStore);
+}
+
+void
+gcBlockLocusChoice::UpdateDisplayInterim(GCDataStore & dataStore)
+{
+    size_t locusIdFromStructures = dataStore.GetStructures().GetLocusForBlock(m_blockId);
+    if(locusIdFromStructures ==  m_locusId)
+    {
+        m_box->SetValue(1);
+    }
+    else
+    {
+        m_box->SetValue(0);
+    }
+
+    // if there is only one locus, we'd better be assigned to it
+    assert( (dataStore.GetStructures().GetDisplayableLocusIds().size() > 1) || (locusIdFromStructures == m_locusId));
+
+    const GCParseBlock * blockP = dataStore.GetParseBlock(m_blockId);
+    assert(blockP != NULL);
+    const GCParseLocus & pLocus = blockP->GetLocusRef();
+
+    const gcLocus & locusRef = dataStore.GetStructures().GetLocus(m_locusId);
+    bool canAssign = dataStore.CanAssignParseLocus(pLocus,locusRef);
+    m_box->Enable(canAssign);
+}
+
+void
+gcBlockLocusChoice::UpdateDataInterim(GCDataStore & dataStore)
+{
+    if(m_box->GetValue() > 0 )
+    {
+        dataStore.GetStructures().AssignBlockToLocus(m_blockId,m_locusId);
+    }
+}
+
+void
+gcBlockLocusChoice::UpdateDataFinal(GCDataStore & dataStore)
+{
+    UpdateDataInterim(dataStore);
+}
+
+wxWindow *
+gcBlockLocusChoice::MakeWindow(wxWindow * parent)
+{
+    m_box = new wxCheckBox(parent,-1,wxEmptyString);
+    return m_box;
+}
+
+wxWindow *
+gcBlockLocusChoice::FetchWindow()
+{
+    assert(m_box != NULL);
+    return m_box;
+}
+
+size_t
+gcBlockLocusChoice::GetRelevantId()
+// this is the one you want if you're recording which item is checked
+{
+    return m_locusId;
+}
+
+//------------------------------------------------------------------------------------
+
+gcBlockFileInfo::gcBlockFileInfo(size_t blockId)
+    :
+    m_blockId(blockId)
+{
+}
+
+gcBlockFileInfo::~gcBlockFileInfo()
+{
+}
+
+wxString
+gcBlockFileInfo::FromDataStore(GCDataStore & dataStore)
+{
+    const GCParseBlock * pb = dataStore.GetParseBlock(m_blockId);
+    assert(pb != NULL);
+    const GCFile & fileRef = pb->GetParse().GetFileRef();
+    return fileRef.GetShortName();
+}
+
+//------------------------------------------------------------------------------------
+
+gcBlockPopIndex::gcBlockPopIndex(size_t blockId)
+    :
+    m_blockId(blockId)
+{
+}
+
+gcBlockPopIndex::~gcBlockPopIndex()
+{
+}
+
+wxString
+gcBlockPopIndex::FromDataStore(GCDataStore & dataStore)
+{
+    const GCParseBlock * pb = dataStore.GetParseBlock(m_blockId);
+    assert(pb != NULL);
+    size_t index =  pb->GetPopRef().GetIndexInParse();
+    return wxString::Format("%ld",(long)index);
+}
+
+//------------------------------------------------------------------------------------
+
+gcBlockLocusIndex::gcBlockLocusIndex(size_t blockId)
+    :
+    m_blockId(blockId)
+{
+}
+
+gcBlockLocusIndex::~gcBlockLocusIndex()
+{
+}
+
+wxString
+gcBlockLocusIndex::FromDataStore(GCDataStore & dataStore)
+{
+    const GCParseBlock * pb = dataStore.GetParseBlock(m_blockId);
+    assert(pb != NULL);
+    size_t index =  pb->GetLocusRef().GetIndexInParse();
+    return wxString::Format("%ld",(long)index);
+}
+
+//------------------------------------------------------------------------------------
+
+gcBlockPloidyInfo::gcBlockPloidyInfo(size_t blockId)
+    :
+    m_blockId(blockId)
+{
+}
+
+gcBlockPloidyInfo::~gcBlockPloidyInfo()
+{
+}
+
+wxString
+gcBlockPloidyInfo::FromDataStore(GCDataStore & dataStore)
+{
+    const GCParseBlock * pb = dataStore.GetParseBlock(m_blockId);
+    assert(pb != NULL);
+    size_t sampleCount = pb->GetSamples().size();
+    size_t hapCount = pb->FindSample(0).GetSequencesPerLabel();
+    wxString hapString = gcdata::getPloidyString(hapCount);
+    return wxString::Format(gcstr::blockPloidyInfo,(long)sampleCount,hapString.c_str());
+}
+
+//------------------------------------------------------------------------------------
+
+gcBlockEditDialog::gcBlockEditDialog(   wxWindow *      parent,
+                                        GCDataStore &   dataStore,
+                                        size_t          blockId)
+    :
+    gcUpdatingDialog(   parent,
+                        dataStore,
+                        gcstr::editParseBlock,
+                        false),
+    m_blockId(blockId)
+{
+}
+
+gcBlockEditDialog::~gcBlockEditDialog()
+{
+}
+
+void
+gcBlockEditDialog::DoDelete()
+{
+    m_dataStore.GetStructures().RemoveBlockAssignment(m_blockId);
+}
+
+//------------------------------------------------------------------------------------
+
+bool
+DoDialogEditBlock(wxWindow *    parentWindow,
+                  GCDataStore &   dataStore,
+                  size_t          blockId)
+{
+    gcBlockEditDialog dialog(parentWindow,dataStore,blockId);
+
+    // build the dialog
+    gcDialogCreator creator;
+    wxBoxSizer * contentSizer = new wxBoxSizer(wxHORIZONTAL);
+
+    wxBoxSizer * leftSizer = new wxBoxSizer(wxVERTICAL);
+
+    /////////////////////////
+    // file data came from
+    gcPlainTextHelper * blockFileInfo = new gcBlockFileInfo(blockId);
+    gcUpdatingComponent * fileInfo = new gcUpdatingPlainText(&dialog,
+                                                             gcstr::blockFileInfo,
+                                                             blockFileInfo);
+
+    /////////////////////////
+    // pop index within file
+    gcPlainTextHelper * blockPopInfo = new gcBlockPopIndex(blockId);
+    gcUpdatingComponent * popInfo = new gcUpdatingPlainText(&dialog,
+                                                            gcstr::blockPopIndexInFile,
+                                                            blockPopInfo);
+
+    /////////////////////////
+    // locus index within file
+    gcPlainTextHelper * blockLocusInfo = new gcBlockLocusIndex(blockId);
+    gcUpdatingComponent * locusInfo = new gcUpdatingPlainText(&dialog,
+                                                              gcstr::blockLocusIndexInFile,
+                                                              blockLocusInfo);
+
+    /////////////////////////
+    // sample ploidy
+    gcPlainTextHelper * blockPloidyInfo = new gcBlockPloidyInfo(blockId);
+    gcUpdatingComponent * ploidyInfo = new gcUpdatingPlainText(&dialog,
+                                                               gcstr::blockPloidyTitle,
+                                                               blockPloidyInfo);
+
+    leftSizer->Add(fileInfo,
+                   0,
+                   wxALL | wxALIGN_CENTER | wxEXPAND,
+                   gclayout::borderSizeSmall);
+    leftSizer->Add(popInfo,
+                   0,
+                   wxALL | wxALIGN_CENTER | wxEXPAND,
+                   gclayout::borderSizeSmall);
+    leftSizer->Add(locusInfo,
+                   0,
+                   wxALL | wxALIGN_CENTER | wxEXPAND,
+                   gclayout::borderSizeSmall);
+    leftSizer->Add(ploidyInfo,
+                   0,
+                   wxALL | wxALIGN_CENTER | wxEXPAND,
+                   gclayout::borderSizeSmall);
+    contentSizer->Add(leftSizer,
+                      1,
+                      wxALL | wxALIGN_CENTER | wxEXPAND,
+                      gclayout::borderSizeSmall);
+
+    creator.AddComponent(dialog,fileInfo);
+    creator.AddComponent(dialog,popInfo);
+    creator.AddComponent(dialog,locusInfo);
+    creator.AddComponent(dialog,ploidyInfo);
+
+    //////////////////////////////////////////////////////
+    // re-assigning to a different population
+    wxBoxSizer * middleSizer = new wxBoxSizer(wxVERTICAL);
+
+    std::vector<gcChoiceObject*> popChoices;
+    gcDisplayOrder ids = dataStore.GetStructures().GetDisplayablePopIds();
+    for(gcDisplayOrder::iterator iter=ids.begin(); iter != ids.end(); iter++)
+    {
+        size_t id = *iter;
+        gcBlockPopChoice * choice = new gcBlockPopChoice(blockId,id);
+        popChoices.push_back(choice);
+    }
+    gcUpdatingComponent * pops = new gcUpdatingChoose(&dialog,
+                                                      gcstr::blockPopChoice,
+                                                      popChoices);
+
+    middleSizer->Add(pops,
+                     1,
+                     wxALL | wxALIGN_CENTER | wxEXPAND,
+                     gclayout::borderSizeSmall);
+    contentSizer->Add(new wxStaticLine(&dialog,-1,wxDefaultPosition,wxDefaultSize,wxLI_VERTICAL),
+                      0,
+                      wxALL | wxALIGN_CENTER | wxEXPAND,
+                      gclayout::borderSizeSmall);
+    contentSizer->Add(middleSizer,
+                      1,
+                      wxALL | wxALIGN_CENTER | wxEXPAND,
+                      gclayout::borderSizeSmall);
+    creator.AddComponent(dialog,pops);
+
+    //////////////////////////////////////////////////////
+    // re-assigning to a different population
+    wxBoxSizer * rightSizer = new wxBoxSizer(wxVERTICAL);
+    std::vector<gcChoiceObject*> locusChoices;
+    gcDisplayOrder locusIds = dataStore.GetStructures().GetDisplayableLocusIds();
+    for(gcDisplayOrder::iterator iter=locusIds.begin(); iter != locusIds.end(); iter++)
+    {
+        size_t id = *iter;
+        gcBlockLocusChoice * choice = new gcBlockLocusChoice(blockId,id);
+        locusChoices.push_back(choice);
+    }
+    gcUpdatingComponent * loci = new gcUpdatingChoose(&dialog,
+                                                      gcstr::blockLocusChoice,
+                                                      locusChoices);
+    rightSizer->Add(loci,
+                    1,
+                    wxALL | wxALIGN_CENTER | wxEXPAND,
+                    gclayout::borderSizeSmall);
+    contentSizer->Add(new wxStaticLine(&dialog,-1,wxDefaultPosition,wxDefaultSize,wxLI_VERTICAL),
+                      0,
+                      wxALL | wxALIGN_CENTER | wxEXPAND,
+                      gclayout::borderSizeSmall);
+    contentSizer->Add(rightSizer,
+                      1,
+                      wxALL | wxALIGN_CENTER | wxEXPAND,
+                      gclayout::borderSizeSmall);
+    creator.AddComponent(dialog,loci);
+
+    creator.PlaceContent(dialog,contentSizer);
+
+    // invoke the dialog
+    return dialog.Go();
+}
+
+//------------------------------------------------------------------------------------
+
+bool
+gcActor_BlockEdit::OperateOn(wxWindow * parent, GCDataStore & dataStore)
+{
+    return DoDialogEditBlock(parent,dataStore,m_blockId);
+}
+
+//____________________________________________________________________________________
diff --git a/src/guiconv/gc_block_dialogs.h b/src/guiconv/gc_block_dialogs.h
new file mode 100644
index 0000000..5373442
--- /dev/null
+++ b/src/guiconv/gc_block_dialogs.h
@@ -0,0 +1,139 @@
+// $Id: gc_block_dialogs.h,v 1.6 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_BLOCK_DIALOGS_H
+#define GC_BLOCK_DIALOGS_H
+
+#include "gc_quantum.h"
+#include "gc_dialog.h"
+
+class gcBlockPopChoice : public gcChoiceObject
+{
+  private:
+    gcBlockPopChoice();               // undefined
+    size_t                          m_blockId;
+    size_t                          m_popId;
+    wxCheckBox *                    m_box;
+  protected:
+  public:
+    gcBlockPopChoice(size_t blockId, size_t popId);
+    ~gcBlockPopChoice();
+
+    void        UpdateDisplayInitial    (GCDataStore &) ;
+    void        UpdateDisplayInterim    (GCDataStore &) ;
+    void        UpdateDataInterim       (GCDataStore &) ;
+    void        UpdateDataFinal         (GCDataStore &) ;
+
+    wxWindow *  MakeWindow(wxWindow * parent)           ;
+    wxWindow *  FetchWindow()                           ;
+    size_t      GetRelevantId();
+};
+
+class gcBlockLocusChoice : public gcChoiceObject
+{
+  private:
+    gcBlockLocusChoice();           // undefined
+    size_t                          m_blockId;
+    size_t                          m_locusId;
+    wxCheckBox *                    m_box;
+  protected:
+  public:
+    gcBlockLocusChoice(size_t blockId, size_t locusId);
+    ~gcBlockLocusChoice();
+
+    void        UpdateDisplayInitial    (GCDataStore &) ;
+    void        UpdateDisplayInterim    (GCDataStore &) ;
+    void        UpdateDataInterim       (GCDataStore &) ;
+    void        UpdateDataFinal         (GCDataStore &) ;
+
+    wxWindow *  MakeWindow(wxWindow * parent)           ;
+    wxWindow *  FetchWindow()                           ;
+    size_t      GetRelevantId();
+};
+
+class gcBlockFileInfo : public gcPlainTextHelper
+{
+  private:
+    gcBlockFileInfo();              // undefined
+    size_t                          m_blockId;
+  protected:
+  public:
+    gcBlockFileInfo(size_t blockId);
+    virtual ~gcBlockFileInfo();
+    wxString FromDataStore(GCDataStore&);
+};
+
+class gcBlockPopIndex : public gcPlainTextHelper
+{
+  private:
+    gcBlockPopIndex();              // undefined
+    size_t                          m_blockId;
+  protected:
+  public:
+    gcBlockPopIndex(size_t blockId);
+    virtual ~gcBlockPopIndex();
+    wxString FromDataStore(GCDataStore&);
+};
+
+class gcBlockLocusIndex : public gcPlainTextHelper
+{
+  private:
+    gcBlockLocusIndex();              // undefined
+    size_t                          m_blockId;
+  protected:
+  public:
+    gcBlockLocusIndex(size_t blockId);
+    virtual ~gcBlockLocusIndex();
+    wxString FromDataStore(GCDataStore&);
+};
+
+class gcBlockPloidyInfo : public gcPlainTextHelper
+{
+  private:
+    gcBlockPloidyInfo();              // undefined
+    size_t                          m_blockId;
+  protected:
+  public:
+    gcBlockPloidyInfo(size_t blockId);
+    virtual ~gcBlockPloidyInfo();
+    wxString FromDataStore(GCDataStore&);
+};
+
+class gcBlockEditDialog : public gcUpdatingDialog
+{
+  private:
+  protected:
+    size_t          m_blockId;
+    void DoDelete();
+  public:
+    gcBlockEditDialog(wxWindow *    parentWindow,
+                      GCDataStore &   dataStore,
+                      size_t          blockId);
+    virtual ~gcBlockEditDialog();
+};
+
+bool DoDialogEditBlock( wxWindow *      parentWindow,
+                        GCDataStore &   dataStore,
+                        size_t          locusId);
+
+class gcActor_BlockEdit : public gcEventActor
+{
+  private:
+    gcActor_BlockEdit();     // undefined
+    size_t                  m_blockId;
+  public:
+    gcActor_BlockEdit(size_t blockId) : m_blockId(blockId) {};
+    virtual ~gcActor_BlockEdit() {};
+    virtual bool OperateOn(wxWindow * parent, GCDataStore & dataStore);
+};
+
+#endif  // GC_BLOCK_DIALOGS_H
+
+//____________________________________________________________________________________
diff --git a/src/guiconv/gc_cmdline.cpp b/src/guiconv/gc_cmdline.cpp
new file mode 100644
index 0000000..fab9209
--- /dev/null
+++ b/src/guiconv/gc_cmdline.cpp
@@ -0,0 +1,132 @@
+// $Id: gc_cmdline.cpp,v 1.23 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include "wx/cmdline.h"
+#include "wx/log.h"
+#include "conf.h"
+#include "gc_errhandling.h"
+#include "gc_cmdline.h"
+#include "gc_data.h"
+#include "gc_datastore.h"
+#include "gc_strings.h"
+#include "tinyxml.h"
+
+class GCFile;
+
+GCCmdLineManager::GCCmdLineManager()
+    :
+    m_commandFileName   (wxEmptyString),
+    m_outputFileName    (wxEmptyString),
+    m_batchOnly         (false),
+    m_doDebugDump       (false),
+    m_batchOutName      (wxEmptyString)
+{
+    m_inputFileNames.Empty();
+}
+
+GCCmdLineManager::~GCCmdLineManager()
+{
+}
+
+void
+GCCmdLineManager::AddOptions(wxCmdLineParser& parser)
+{
+    wxString logoString = wxString::Format(gcstr::usageHeader,VERSION);
+    parser.SetLogo(logoString);
+    parser.AddSwitch(   gcstr::cmdBatchChar,
+                        gcstr::cmdBatch,
+                        gcstr::cmdBatchDescription);
+    parser.AddSwitch(   gcstr::cmdDumpChar,
+                        gcstr::cmdDump,
+                        gcstr::cmdDumpDescription);
+    parser.AddOption(   gcstr::cmdCommandChar,
+                        gcstr::cmdCommand,
+                        gcstr::cmdCommandDescription,
+                        wxCMD_LINE_VAL_STRING);
+    wxString cmdOutputDesc = wxString::Format(gcstr::cmdOutputDescription,
+                                              gcstr::exportFileDefault.c_str());
+    parser.AddOption(   gcstr::cmdOutputChar,
+                        gcstr::cmdOutput,
+                        cmdOutputDesc,
+                        wxCMD_LINE_VAL_STRING);
+    parser.AddOption(   gcstr::cmdWriteBatchChar,
+                        gcstr::cmdWriteBatch,
+                        gcstr::cmdWriteBatchDescription,
+                        wxCMD_LINE_VAL_STRING);
+    parser.AddParam (   gcstr::cmdInputDescription,
+                        wxCMD_LINE_VAL_STRING,
+                        wxCMD_LINE_PARAM_OPTIONAL |
+                        wxCMD_LINE_PARAM_MULTIPLE);
+}
+
+void
+GCCmdLineManager::ExtractValues(wxCmdLineParser& parser)
+{
+    m_batchOnly = parser.Found(gcstr::cmdBatch);
+    m_doDebugDump = parser.Found(gcstr::cmdDump);
+    parser.Found(gcstr::cmdCommand, &m_commandFileName);
+    parser.Found(gcstr::cmdOutput,  &m_outputFileName);
+    parser.Found(gcstr::cmdWriteBatch,  &m_batchOutName);
+
+    size_t numFiles = parser.GetParamCount();
+    for(size_t p = 0; p < numFiles; p++)
+    {
+        m_inputFileNames.Add(parser.GetParam(p));
+    }
+}
+
+int
+GCCmdLineManager::ProcessCommandLineAndCommandFile( GCDataStore &   dataStore)
+{
+    int exitCode = 0;
+    for(size_t i=0; i < m_inputFileNames.Count(); i++)
+    {
+        try
+        {
+            dataStore.AddDataFile(m_inputFileNames[i]);
+        }
+        catch(const gc_ex& e)  // EWFIX.P4 -- make more specific type ??
+        {
+            dataStore.GCFatalBatchWarnGUI(e.what());
+            exitCode = 1;
+        }
+    }
+    if(!m_commandFileName.IsEmpty())
+    {
+        exitCode = dataStore.ProcessCmdFile(m_commandFileName);
+    }
+
+    if(!m_outputFileName.IsEmpty())
+    {
+        dataStore.SetOutfileName(m_outputFileName);
+    }
+    return exitCode;
+
+}
+
+int
+GCCmdLineManager::DoExport(GCDataStore& dataStore)
+{
+    try
+    {
+        TiXmlDocument * doc = dataStore.ExportFile();
+        dataStore.WriteExportedData(doc);
+        wxLogVerbose(gcverbose::exportSuccess,dataStore.GetOutfileName().c_str());
+        delete doc;
+        return 0;
+    }
+    catch(const gc_ex& e)
+    {
+        dataStore.GCError(wxString::Format(gcerr::unableToExport,e.what()));
+        return 1;
+    }
+}
+
+//____________________________________________________________________________________
diff --git a/src/guiconv/gc_cmdline.h b/src/guiconv/gc_cmdline.h
new file mode 100644
index 0000000..1acd2e5
--- /dev/null
+++ b/src/guiconv/gc_cmdline.h
@@ -0,0 +1,41 @@
+// $Id: gc_cmdline.h,v 1.13 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_CMDLINE_H
+#define GC_CMDLINE_H
+
+#include "wx/string.h"
+
+class wxCmdLineParser;
+class GCDataStore;
+
+class GCCmdLineManager
+{
+  protected:
+    wxString        m_commandFileName;
+    wxString        m_outputFileName;
+    wxArrayString   m_inputFileNames;
+    bool            m_batchOnly;
+    bool            m_doDebugDump;
+    wxString        m_batchOutName;
+
+    void AddOptions(wxCmdLineParser& parser);
+    void ExtractValues(wxCmdLineParser& parser);
+    int  ProcessCommandLineAndCommandFile(  GCDataStore& dataStore);
+    int  DoExport(GCDataStore& dataStore);
+
+  public:
+    GCCmdLineManager();
+    virtual ~GCCmdLineManager();
+};
+
+#endif  // GC_CMDLINE_H
+
+//____________________________________________________________________________________
diff --git a/src/guiconv/gc_color.cpp b/src/guiconv/gc_color.cpp
new file mode 100644
index 0000000..8a6d5bd
--- /dev/null
+++ b/src/guiconv/gc_color.cpp
@@ -0,0 +1,35 @@
+// $Id: gc_color.cpp,v 1.4 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include "gc_color.h"
+
+#include "wx/colour.h"
+#include "wx/gdicmn.h"
+#include "wx/settings.h"
+
+const wxColour & gccolor::selectedFile()
+{
+    static wxColour selectedFileColor = wxTheColourDatabase->Find("LIGHT BLUE");
+    return selectedFileColor;
+}
+
+const wxColour & gccolor::enteredObject()
+{
+    static wxColour enteredObjectColor = wxSystemSettings::GetColour(wxSYS_COLOUR_HIGHLIGHT);
+    return enteredObjectColor;
+}
+
+const wxColour & gccolor::activeObject()
+{
+    static wxColour activeObjectColor = wxTheColourDatabase->Find("YELLOW");
+    return activeObjectColor;
+}
+
+//____________________________________________________________________________________
diff --git a/src/guiconv/gc_color.h b/src/guiconv/gc_color.h
new file mode 100644
index 0000000..ef8f35f
--- /dev/null
+++ b/src/guiconv/gc_color.h
@@ -0,0 +1,26 @@
+// $Id: gc_color.h,v 1.5 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_COLOR_H
+#define GC_COLOR_H
+
+class wxColour;
+
+class gccolor
+{
+  public:
+    static const wxColour & selectedFile();
+    static const wxColour & enteredObject();
+    static const wxColour & activeObject();
+};
+
+#endif  // GC_COLOR_H
+
+//____________________________________________________________________________________
diff --git a/src/guiconv/gc_data.cpp b/src/guiconv/gc_data.cpp
new file mode 100644
index 0000000..90a035c
--- /dev/null
+++ b/src/guiconv/gc_data.cpp
@@ -0,0 +1,629 @@
+// $Id: gc_data.cpp,v 1.38 2011/12/30 22:50:10 jmcgill Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+
+#include "cnv_strings.h"
+#include "gc_cmdfile_err.h"
+#include "gc_data.h"
+#include "gc_strings.h"
+#include "gc_strings_mig.h"
+#include "gc_types.h"
+#include "constants.h"
+#include "wx/dynarray.h"
+#include "wx/log.h"
+
+//------------------------------------------------------------------------------------
+
+const size_t gcdata::defaultHapCount    = 2;
+
+const long gcdata::noLengthSet          = -1;
+const long gcdata::noMarkerCountSet     = -1;
+const long gcdata::noMapPositionSet     = -1;
+const long gcdata::noOffsetSet          = -1;
+const long gcdata::noStyle              =  0;
+const long gcdata::defaultMapPosition   =  0;
+const long gcdata::defaultOffset        =  0;
+
+const GCFileFormat  gcdata::defaultFileFormat   =   format_NONE_SET;
+
+//------------------------------------------------------------------------------------
+
+const wxArrayString gcdata::specificDataTypeChoices()
+{
+    static wxArrayString dataTypes;
+    if(dataTypes.GetCount() < 1)
+    {
+        dataTypes.Add(gcstr::unknown);
+        dataTypes.Add(gcstr::dna);
+        dataTypes.Add(gcstr::snp);
+        dataTypes.Add(gcstr::microsat);
+        dataTypes.Add(gcstr::kallele);
+    }
+    return dataTypes;
+}
+
+const wxArrayString gcdata::fileFormatChoices()
+{
+    static wxArrayString fileTypes;
+    if(fileTypes.GetCount() < 1)
+    {
+        fileTypes.Add(gcstr::unknown);
+        fileTypes.Add(gcstr::migrate);
+        fileTypes.Add(gcstr::phylip);
+    }
+    return fileTypes;
+}
+
+const wxArrayString gcdata::interleavingChoices()
+{
+    static wxArrayString interleavingChoices;
+    if(interleavingChoices.GetCount() < 1)
+    {
+        interleavingChoices.Add(gcstr::unknown);
+        interleavingChoices.Add(gcstr::sequential);
+        interleavingChoices.Add(gcstr::interleaved);
+    }
+    return interleavingChoices;
+}
+
+const wxArrayString gcdata::genericLocusChoices()
+{
+    static wxArrayString locusChoices;
+    if(locusChoices.GetCount() < 1)
+    {
+        locusChoices.Add(gcstr::locusSelect);
+        locusChoices.Add(gcstr::locusCreate);
+    }
+    return locusChoices;
+}
+
+const wxArrayString gcdata::genericRegionChoices()
+{
+    static wxArrayString regionChoices;
+    if(regionChoices.GetCount() < 1)
+    {
+        regionChoices.Add(gcstr::regionSelect);
+        regionChoices.Add(gcstr::regionCreate);
+    }
+    return regionChoices;
+}
+
+const wxArrayString gcdata::genericPopulationChoices()
+{
+    static wxArrayString populationChoices;
+    if(populationChoices.GetCount() < 1)
+    {
+        populationChoices.Add(gcstr::populationSelect);
+        populationChoices.Add(gcstr::populationCreate);
+    }
+    return populationChoices;
+}
+
+const wxArrayString gcdata::migrationConstraints()
+{
+    static wxArrayString migrationChoices;
+    if(migrationChoices.GetCount() < 1)
+    {
+        migrationChoices.Add(gcstr_mig::migconstraintInvalid);
+        migrationChoices.Add(gcstr_mig::migconstraintConstant);
+        migrationChoices.Add(gcstr_mig::migconstraintSymmetric);
+        migrationChoices.Add(gcstr_mig::migconstraintUnconstained);
+    }
+    return migrationChoices;
+}
+
+const wxArrayString gcdata::migrationMethods()
+{
+    static wxArrayString migrationMethods;
+    if(migrationMethods.GetCount() < 1)
+    {
+        migrationMethods.Add(gcstr_mig::migmethodUser);
+        migrationMethods.Add(gcstr_mig::migmethodFST);
+    }
+    return migrationMethods;
+}
+
+const wxArrayString gcdata::migrationProfiles()
+{
+    static wxArrayString migrationProfiles;
+    if(migrationProfiles.GetCount() < 1)
+    {
+        migrationProfiles.Add(gcstr_mig::migprofileNone);
+        migrationProfiles.Add(gcstr_mig::migprofileFixed);
+        migrationProfiles.Add(gcstr_mig::migprofilePercentile);
+    }
+    return migrationProfiles;
+}
+
+const wxArrayString
+gcdata::integerList()
+{
+    static wxArrayString digits;
+    if(digits.GetCount() < 1)
+    {
+        digits.Add("-");
+        digits.Add("0");
+        digits.Add("1");
+        digits.Add("2");
+        digits.Add("3");
+        digits.Add("4");
+        digits.Add("5");
+        digits.Add("6");
+        digits.Add("7");
+        digits.Add("8");
+        digits.Add("9");
+    }
+    return digits;
+}
+const wxArrayString
+gcdata::integerListWithSpaces()
+{
+    static wxArrayString digits;
+    if(digits.GetCount() < 1)
+    {
+        digits.Add(" ");
+        digits.Add("-");
+        digits.Add("0");
+        digits.Add("1");
+        digits.Add("2");
+        digits.Add("3");
+        digits.Add("4");
+        digits.Add("5");
+        digits.Add("6");
+        digits.Add("7");
+        digits.Add("8");
+        digits.Add("9");
+    }
+    return digits;
+}
+
+const wxArrayString
+gcdata::nonNegativeIntegerList()
+{
+    static wxArrayString digits;
+    if(digits.GetCount() < 1)
+    {
+        digits.Add("0");
+        digits.Add("1");
+        digits.Add("2");
+        digits.Add("3");
+        digits.Add("4");
+        digits.Add("5");
+        digits.Add("6");
+        digits.Add("7");
+        digits.Add("8");
+        digits.Add("9");
+    }
+    return digits;
+}
+
+const wxArrayString
+gcdata::positiveFloatChars()
+{
+    static wxArrayString digits;
+    if(digits.GetCount() < 1)
+    {
+        digits.Add("0");
+        digits.Add("1");
+        digits.Add("2");
+        digits.Add("3");
+        digits.Add("4");
+        digits.Add("5");
+        digits.Add("6");
+        digits.Add("7");
+        digits.Add("8");
+        digits.Add("9");
+        digits.Add(".");
+    }
+    return digits;
+}
+
+const gcGeneralDataType
+gcdata::allDataTypes()
+{
+    static gcGeneralDataType dtype;
+    if(dtype.empty())
+    {
+        dtype.insert(sdatatype_DNA);
+        dtype.insert(sdatatype_SNP);
+        dtype.insert(sdatatype_MICROSAT);
+        dtype.insert(sdatatype_KALLELE);
+    }
+    return dtype;
+}
+
+const gcGeneralDataType
+gcdata::allelicDataTypes()
+{
+    static gcGeneralDataType dtype;
+    if(dtype.empty())
+    {
+        dtype.insert(sdatatype_MICROSAT);
+        dtype.insert(sdatatype_KALLELE);
+    }
+    return dtype;
+}
+
+const gcGeneralDataType
+gcdata::nucDataTypes()
+{
+    static gcGeneralDataType dtype;
+    if(dtype.empty())
+    {
+        dtype.insert(sdatatype_DNA);
+        dtype.insert(sdatatype_SNP);
+    }
+    return dtype;
+}
+
+wxString
+gcdata::getPloidyString(size_t ploidy)
+{
+    if(ploidy == 1) return gcstr::ploidy_1;
+    if(ploidy == 2) return gcstr::ploidy_2;
+    if(ploidy == 3) return gcstr::ploidy_3;
+    if(ploidy == 4) return gcstr::ploidy_4;
+    return wxString::Format(gcstr::ploidy,(int)ploidy);
+}
+
+bool
+ProduceBoolFromProximityOrBarf(wxString string)
+{
+    if (string.CmpNoCase(gcstr::linkageYes) == 0) return true;
+    if (string.CmpNoCase(gcstr::linkageNo) == 0) return false;
+
+    throw gc_bad_proximity(string);
+}
+
+bool
+ProduceBoolFromYesNoOrBarf(wxString string)
+{
+    if (string.CmpNoCase(gcstr::yes) == 0) return true;
+    if (string.CmpNoCase(gcstr::no)  == 0) return false;
+
+    throw gc_bad_yes_no(string);
+}
+
+gcGeneralDataType
+ProduceGeneralDataTypeOrBarf(wxString string, bool allowUnknown)
+{
+    gcGeneralDataType dataTypes;
+
+    if(allowUnknown)
+    {
+        if (string.CmpNoCase(gcstr::unknown) == 0)
+        {
+            return dataTypes;
+        }
+    }
+
+    if (string.CmpNoCase(gcstr::nuc) == 0)
+    {
+        dataTypes.insert(sdatatype_DNA);
+        dataTypes.insert(sdatatype_SNP);
+        return dataTypes;
+    }
+
+    if (string.CmpNoCase(gcstr::allelic) == 0)
+    {
+        dataTypes.insert(sdatatype_MICROSAT);
+        dataTypes.insert(sdatatype_KALLELE);
+        return dataTypes;
+    }
+
+    throw gc_bad_general_data_type(string);
+    return dataTypes;
+}
+
+gcSpecificDataType
+ProduceSpecificDataTypeOrBarf(wxString string, bool allowUnknown)
+{
+    if(allowUnknown)
+    {
+        if (string.CmpNoCase(gcstr::unknown) == 0)  return sdatatype_NONE_SET;
+    }
+    if (string.CmpNoCase(gcstr::dna) == 0)      return sdatatype_DNA;
+    if (string.CmpNoCase(gcstr::snp) == 0)      return sdatatype_SNP;
+    if (string.CmpNoCase(gcstr::kallele) == 0)  return sdatatype_KALLELE;
+    if (string.CmpNoCase(gcstr::microsat) == 0) return sdatatype_MICROSAT;
+
+    throw gc_bad_specific_data_type(string);
+    return sdatatype_NONE_SET;
+}
+
+GCFileFormat
+ProduceGCFileFormatOrBarf(wxString string, bool allowUnknown)
+{
+    if(allowUnknown)
+    {
+        if (string.CmpNoCase(gcstr::unknown) == 0)  return format_NONE_SET;
+    }
+    if (string.CmpNoCase(gcstr::migrate) == 0)  return format_MIGRATE;
+    if (string.CmpNoCase(gcstr::phylip) == 0)   return format_PHYLIP;
+
+    throw gc_bad_file_format(string);
+    return format_NONE_SET;
+
+}
+
+GCInterleaving
+ProduceGCInterleavingOrBarf(wxString string, bool allowUnknown)
+{
+    if(allowUnknown)
+    {
+        if (string.CmpNoCase(gcstr::unknown) == 0)      return interleaving_NONE_SET;
+    }
+    if (string.CmpNoCase(gcstr::interleaved) == 0)  return interleaving_INTERLEAVED;
+    if (string.CmpNoCase(gcstr::sequential) == 0)   return interleaving_SEQUENTIAL;
+    //EWFIX.FROMLUCIAN:  do we need moot any more?
+    if (string.CmpNoCase(gcstr::moot) == 0)         return interleaving_MOOT;
+
+    throw gc_bad_interleaving(string);
+    return interleaving_NONE_SET;
+}
+
+migration_method
+ProduceMigMethodOrBarf(wxString string)
+{
+    if (string.CmpNoCase(gcstr_mig::migmethodUser) == 0)  return migmethod_USER;
+    if (string.CmpNoCase(gcstr_mig::migmethodFST) == 0)   return migmethod_FST;
+    return migmethod_USER;
+}
+
+migration_profile
+ProduceMigProfileOrBarf(wxString string)
+{
+    if (string.CmpNoCase(gcstr_mig::migprofileNone) == 0)  return migprofile_NONE;
+    if (string.CmpNoCase(gcstr_mig::migprofileFixed) == 0)  return migprofile_FIXED;
+    if (string.CmpNoCase(gcstr_mig::migprofilePercentile) == 0)  return migprofile_PERCENTILE;
+    return     migprofile_NONE;
+}
+
+migration_constraint
+ProduceMigConstraintOrBarf(wxString string)
+{
+    if (string.CmpNoCase(gcstr_mig::migconstraintInvalid) == 0)  return migconstraint_INVALID;
+    if (string.CmpNoCase(gcstr_mig::migconstraintConstant) == 0)   return migconstraint_CONSTANT;
+    if (string.CmpNoCase(gcstr_mig::migconstraintSymmetric) == 0)   return migconstraint_SYMMETRIC;
+    if (string.CmpNoCase(gcstr_mig::migconstraintUnconstained) == 0)   return migconstraint_UNCONSTRAINED;
+    return migconstraint_UNCONSTRAINED;
+}
+
+wxString
+ToWxString(bool booleanValue)
+{
+    if(booleanValue) return gcstr::trueVal;
+    return gcstr::falseVal;
+}
+
+wxString
+ToWxStringLinked(bool booleanValue)
+{
+    if(booleanValue) return gcstr::linkageYes;
+    return gcstr::linkageNo;
+}
+
+wxString
+ToWxString(gcGeneralDataType dtype)
+{
+    wxString retStr = "";
+    for(gcGeneralDataType::const_iterator i = dtype.begin(); i != dtype.end(); i++)
+    {
+        if(i != dtype.begin() && dtype.size() > 1)
+        {
+            retStr += "/";  // EWFIX.STRINGS
+        }
+        retStr += ToWxString((gcSpecificDataType)(*i));
+    }
+
+    if (retStr.IsEmpty())
+    {
+        retStr = ToWxString(sdatatype_NONE_SET);
+    }
+
+    return retStr;
+}
+
+wxString
+ToWxString(gcSpecificDataType type)
+{
+    switch(type)
+    {
+        case sdatatype_NONE_SET:
+            return gcstr::unknown;
+            break;
+        case sdatatype_DNA:
+            return gcstr::dna;
+            break;
+        case sdatatype_SNP:
+            return gcstr::snp;
+            break;
+        case sdatatype_MICROSAT:
+            return gcstr::microsat;
+            break;
+        case sdatatype_KALLELE:
+            return gcstr::kallele;
+            break;
+    }
+    assert(false);
+    return wxT("");
+}
+
+wxString
+ToWxString(GCFileFormat format)
+{
+    switch(format)
+    {
+        case format_NONE_SET:
+            return gcstr::unknown;
+            break;
+        case format_MIGRATE:
+            return gcstr::migrate;
+            break;
+        case format_PHYLIP:
+            return gcstr::phylip;
+            break;
+    }
+    assert(false);
+    return wxT("");
+}
+
+wxString
+ToWxString(GCInterleaving interleaving)
+{
+    switch(interleaving)
+    {
+        case interleaving_NONE_SET:
+            return gcstr::unknown;
+            break;
+        case interleaving_SEQUENTIAL:
+            return gcstr::sequential;
+            break;
+        case interleaving_INTERLEAVED:
+            return gcstr::interleaved;
+            break;
+        case interleaving_MOOT:
+            return gcstr::moot;
+            break;
+    }
+    assert(false);
+    return wxT("");
+}
+
+wxString
+ToWxString(gcPhaseSource ps)
+{
+    switch(ps)
+    {
+        case phaseSource_NONE_SET:
+            return gcstr::unknown;
+            break;
+        case phaseSource_PHASE_FILE:
+            return gcstr::phaseFile;
+            break;
+        case phaseSource_MULTI_PHASE_SAMPLE:
+            return gcstr::multiPhaseSample;
+            break;
+        case phaseSource_FILE_ADJACENCY:
+            return gcstr::fileSetting;
+            break;
+        case phaseSource_COUNT:
+            assert(false);
+            return gcstr::unknown;
+            break;
+    }
+    assert(false);
+    return wxT("");
+}
+
+wxString
+ToWxString(loc_match lm)
+{
+    switch(lm)
+    {
+        case locmatch_DEFAULT:
+            return cnvstr::ATTR_VAL_DEFAULT;
+            break;
+        case locmatch_SINGLE:
+            return cnvstr::ATTR_VAL_SINGLE;
+            break;
+        case locmatch_LINKED:
+            return cnvstr::ATTR_VAL_LINKED;
+            break;
+        case locmatch_VECTOR:
+            return cnvstr::ATTR_VAL_BYLIST;
+            break;
+    }
+    assert(false);
+    return gcstr::unknown;
+}
+
+wxString
+ToWxString(pop_match lm)
+{
+    switch(lm)
+    {
+        case popmatch_DEFAULT:
+            return cnvstr::ATTR_VAL_DEFAULT;
+            break;
+        case popmatch_NAME:
+            return cnvstr::ATTR_VAL_BYNAME;
+            break;
+        case popmatch_SINGLE:
+            return cnvstr::ATTR_VAL_SINGLE;
+            break;
+        case popmatch_VECTOR:
+            return cnvstr::ATTR_VAL_BYLIST;
+            break;
+    }
+    assert(false);
+    return gcstr::unknown;
+}
+
+wxString
+ToWxString(migration_method mm)
+{
+    switch(mm)
+    {
+        case migmethod_USER:
+            return gcstr_mig::migmethodUser;
+            break;
+        case migmethod_FST:
+            return gcstr_mig::migmethodFST;
+            break;
+    }
+    assert(false);
+    return gcstr::unknown;
+}
+
+wxString
+ToWxString(migration_profile mp)
+{
+    switch(mp)
+    {
+        case migprofile_NONE:
+            return gcstr_mig::migprofileNone;
+            break;
+        case migprofile_FIXED:
+            return gcstr_mig::migprofileFixed;
+            break;
+        case migprofile_PERCENTILE:
+            return gcstr_mig::migprofilePercentile;
+            break;
+    }
+    assert(false);
+    return gcstr::unknown;
+}
+
+wxString
+ToWxString(migration_constraint mc)
+{
+    switch(mc)
+    {
+        case migconstraint_INVALID:
+            return gcstr_mig::migconstraintInvalid;
+            break;
+        case migconstraint_CONSTANT:
+            return gcstr_mig::migconstraintConstant;
+            break;
+        case migconstraint_SYMMETRIC:
+            return gcstr_mig::migconstraintSymmetric;
+            break;
+        case migconstraint_UNCONSTRAINED:
+            return gcstr_mig::migconstraintUnconstained;
+            break;
+    }
+    assert(false);
+    return gcstr::unknown;
+}
+
+
+//____________________________________________________________________________________
diff --git a/src/guiconv/gc_data.h b/src/guiconv/gc_data.h
new file mode 100644
index 0000000..bd30dab
--- /dev/null
+++ b/src/guiconv/gc_data.h
@@ -0,0 +1,81 @@
+// $Id: gc_data.h,v 1.32 2012/06/30 01:32:41 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_DATA_H
+#define GC_DATA_H
+
+#include "gc_types.h"
+#include "gc_strings_mig.h"
+#include "wx/arrstr.h"
+#include "wx/list.h"
+
+class gcdata
+{
+  public:
+    static const wxArrayString fileFormatChoices();
+    static const wxArrayString specificDataTypeChoices();
+    static const wxArrayString interleavingChoices();
+    static const wxArrayString genericLocusChoices();
+    static const wxArrayString genericPopulationChoices();
+    static const wxArrayString genericRegionChoices();
+    static const wxArrayString integerList();
+    static const wxArrayString integerListWithSpaces();
+    static const wxArrayString nonNegativeIntegerList();
+    static const wxArrayString positiveFloatChars();
+
+    static const wxArrayString migrationConstraints();
+    static const wxArrayString migrationMethods();
+    static const wxArrayString migrationProfiles();
+
+    static const size_t defaultHapCount;
+
+    static const long defaultMapPosition;
+    static const long defaultOffset;
+    static const long noLengthSet;
+    static const long noMapPositionSet;
+    static const long noMarkerCountSet;
+    static const long noOffsetSet;
+    static const long noStyle;
+
+    static const GCFileFormat   defaultFileFormat;
+
+    static const gcGeneralDataType  allDataTypes();
+    static const gcGeneralDataType  allelicDataTypes();
+    static const gcGeneralDataType  nucDataTypes();
+
+    static wxString getPloidyString(size_t ploidy);
+};
+
+bool                 ProduceBoolFromProximityOrBarf(wxString string);
+bool                 ProduceBoolFromYesNoOrBarf(wxString string);
+GCFileFormat         ProduceGCFileFormatOrBarf(wxString string, bool allowUnknown=true);
+gcGeneralDataType    ProduceGeneralDataTypeOrBarf(wxString string, bool allowUnknown=true);
+gcSpecificDataType   ProduceSpecificDataTypeOrBarf(wxString string, bool allowUnknown=true);
+GCInterleaving       ProduceGCInterleavingOrBarf(wxString string, bool allowUnknown=true);
+migration_method     ProduceMigMethodOrBarf(wxString string);
+migration_profile    ProduceMigProfileOrBarf(wxString string);
+migration_constraint ProduceMigConstraintOrBarf(wxString string);
+
+wxString        ToWxString(bool);
+wxString        ToWxStringLinked(bool);
+wxString        ToWxString(gcGeneralDataType);
+wxString        ToWxString(gcSpecificDataType);
+wxString        ToWxString(GCFileFormat);
+wxString        ToWxString(GCInterleaving);
+wxString        ToWxString(gcPhaseSource);
+wxString        ToWxString(loc_match);
+wxString        ToWxString(pop_match);
+wxString        ToWxString(migration_method);
+wxString        ToWxString(migration_profile);
+wxString        ToWxString(migration_constraint);
+
+#endif  // GC_DATA_H
+
+//____________________________________________________________________________________
diff --git a/src/guiconv/gc_data_display.cpp b/src/guiconv/gc_data_display.cpp
new file mode 100644
index 0000000..1889cc6
--- /dev/null
+++ b/src/guiconv/gc_data_display.cpp
@@ -0,0 +1,103 @@
+// $Id: gc_data_display.cpp,v 1.10 2012/06/30 01:32:41 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+//#include <iostream>
+#include <string>
+
+#include "gc_data_display.h"
+#include "gc_layout.h"
+#include "wx/log.h"
+
+//using namespace std;
+
+GCDataDisplaySizer::GCDataDisplaySizer()
+    : wxGridBagSizer(gclayout::borderSize,gclayout::borderSize)
+{
+    m_offset = 0;
+}
+
+GCDataDisplaySizer::~GCDataDisplaySizer()
+{
+}
+
+void
+GCDataDisplaySizer::SetOffset(int offset)
+{
+    m_offset = offset;
+}
+
+int
+GCDataDisplaySizer::GetOffset()
+{
+    return m_offset;
+}
+
+void
+GCDataDisplaySizer::AddPop(wxWindow * header, int rowIndex, int length)
+{
+    int colIndex = m_offset;
+    //wxLogVerbose("AddPop row: %i column: %i", rowIndex, colIndex);  // JMDBG
+    Add(header,wxGBPosition(rowIndex,colIndex),wxGBSpan(length,1),wxALL | wxEXPAND);
+}
+
+void
+GCDataDisplaySizer::AddRegion(wxWindow * header, size_t firstLocus, size_t lastLocus)
+{
+    // always in row 0, fiirst column always skipped
+    //wxLogVerbose(" Region row 0 col: %i len: %i",firstLocus+1+m_offset, lastLocus-firstLocus+1);  // JMDBG
+    Add(header,wxGBPosition(0,firstLocus+1+m_offset),wxGBSpan(1,lastLocus-firstLocus+1),wxALL | wxEXPAND);
+}
+
+void
+GCDataDisplaySizer::AddLocus(wxWindow * header, size_t locusIndex)
+{
+    // always is row 1, first column always skipped
+    //wxLogVerbose(" Locus row 1 col: %i" ,locusIndex+1+m_offset);  // JMDBG
+    Add(header,wxGBPosition(1,locusIndex+1+m_offset),wxDefaultSpan,wxALL | wxEXPAND);
+}
+
+void
+GCDataDisplaySizer::AddData(wxWindow * header, int rowIndex, int colIndex)
+{
+    colIndex += m_offset;
+    //wxLogVerbose("AddData row: %i column: %i", rowIndex, colIndex);
+    Add(header,wxGBPosition(rowIndex,colIndex),wxDefaultSpan,wxALL | wxEXPAND);
+}
+
+void
+GCDataDisplaySizer::AddPanel(wxWindow * header, int rowIndex, int colIndex, int width)
+{
+    //wxLogVerbose("AddPanel row: %i column: %i length: %i", rowIndex,colIndex+m_offset, width);
+    Add(header,wxGBPosition(rowIndex,colIndex+m_offset),wxGBSpan(1, width),wxALL | wxEXPAND);
+}
+
+void
+GCDataDisplaySizer::AddDivergenceToggleCell(wxWindow * header)
+{
+    //wxLogVerbose("AddDivergenceToggleCell row: 0 column: 0");  // JMDBG
+    Add(header,wxGBPosition(0,0),wxGBSpan(1,1),wxALL | wxEXPAND);
+}
+
+
+void
+GCDataDisplaySizer::AddPanelsToggleCell(wxWindow * header)
+{
+    //wxLogVerbose("AddPanelToggleCell row: 1 column: 0");  // JMDBG
+    Add(header,wxGBPosition(1,0),wxGBSpan(1,1),wxALL | wxEXPAND);
+}
+
+void
+GCDataDisplaySizer::AddParent(wxWindow * header, size_t parRow, int parCol, int parSpan)
+{
+    //wxLogVerbose("AddParent row: %i column: %i height: %i", parRow, parCol, parSpan);  // JMDBG
+    Add(header,wxGBPosition(parRow,parCol),wxGBSpan(parSpan,1),wxALL | wxEXPAND);
+}
+
+//____________________________________________________________________________________
diff --git a/src/guiconv/gc_data_display.h b/src/guiconv/gc_data_display.h
new file mode 100644
index 0000000..dc82112
--- /dev/null
+++ b/src/guiconv/gc_data_display.h
@@ -0,0 +1,38 @@
+// $Id: gc_data_display.h,v 1.9 2012/06/30 01:32:41 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_DATA_DISPLAY_H
+#define GC_DATA_DISPLAY_H
+
+#include "wx/gbsizer.h"
+
+class GCDataDisplaySizer : public wxGridBagSizer
+{
+  private:
+    int m_offset;
+  public:
+    GCDataDisplaySizer();
+    virtual ~GCDataDisplaySizer();
+    void SetOffset(int offset);
+    int  GetOffset();
+
+    void AddPop(wxWindow * header,int rowIndex, int length);
+    void AddRegion(wxWindow * header,size_t firstLocus, size_t lastLocus);
+    void AddLocus(wxWindow * header, size_t locusIndex);
+    void AddData(wxWindow * header, int rowIndex, int colIndex);
+    void AddPanel(wxWindow * header, int rowIndex, int colIndex, int lastCol);
+    void AddDivergenceToggleCell(wxWindow * header);
+    void AddPanelsToggleCell(wxWindow * header);
+    void AddParent(wxWindow * header,size_t parIndex, int parLevel, int parSpan);
+};
+
+#endif  // GC_DATA_DISPLAY_H
+
+//____________________________________________________________________________________
diff --git a/src/guiconv/gc_dialog.cpp b/src/guiconv/gc_dialog.cpp
new file mode 100644
index 0000000..05c0762
--- /dev/null
+++ b/src/guiconv/gc_dialog.cpp
@@ -0,0 +1,681 @@
+// $Id: gc_dialog.cpp,v 1.30 2012/06/30 01:32:41 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+
+#include "gc_data.h"
+#include "gc_datastore.h"
+#include "gc_dialog.h"
+#include "gc_errhandling.h"
+#include "gc_event_ids.h"
+#include "gc_event_publisher.h"
+#include "gc_layout.h"
+#include "gc_locus_err.h"
+#include "gc_strings.h"
+
+#include "wx/button.h"
+#include "wx/checkbox.h"
+#include "wx/choice.h"
+#include "wx/log.h"
+#include "wx/radiobox.h"
+#include "wx/sizer.h"
+#include "wx/statbox.h"
+#include "wx/statline.h"
+#include "wx/stattext.h"
+#include "wx/textctrl.h"
+
+//------------------------------------------------------------------------------------
+
+gcUpdatingComponent::gcUpdatingComponent(   wxWindow *  parent,
+                                            wxString    instructions)
+    :
+    wxPanel(parent,-1)
+{
+    m_statBoxSizer = new wxStaticBoxSizer(wxVERTICAL,this,instructions);
+    SetSizer(m_statBoxSizer);
+}
+
+gcUpdatingComponent::~gcUpdatingComponent()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+gcPlainTextHelper::gcPlainTextHelper()
+{
+}
+
+gcPlainTextHelper::~gcPlainTextHelper()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+gcUpdatingPlainText::gcUpdatingPlainText(   wxWindow *          parent,
+                                            wxString            instructions,
+                                            gcPlainTextHelper * helper)
+    :
+    gcUpdatingComponent(parent,instructions),
+    m_textHelper(helper)
+{
+    m_statText = new wxStaticText(this,-1,"");
+    m_statBoxSizer->Add(m_statText,
+                        0,
+                        wxALL | wxALIGN_LEFT | wxALIGN_TOP | wxEXPAND ,
+                        gclayout::borderSizeSmall);
+}
+
+gcUpdatingPlainText::~gcUpdatingPlainText()
+{
+    delete m_textHelper;
+}
+
+void
+gcUpdatingPlainText::BuildDisplay(GCDataStore & dataStore)
+{
+}
+
+void
+gcUpdatingPlainText::UpdateDisplay(GCDataStore & dataStore)
+{
+    m_statText->SetLabel(m_textHelper->FromDataStore(dataStore));
+}
+
+void
+gcUpdatingPlainText::UpdateDataInterim(GCDataStore & dataStore)
+// no updates to dataStore -- this is for display only
+{
+}
+
+void
+gcUpdatingPlainText::UpdateDataFinal(GCDataStore & dataStore)
+// no updates to dataStore -- this is for display only
+{
+}
+
+//------------------------------------------------------------------------------------
+
+gcTextHelper::gcTextHelper()
+{
+}
+
+gcTextHelper::~gcTextHelper()
+{
+}
+
+const wxValidator &
+gcTextHelper::GetValidator()
+{
+    return wxDefaultValidator;
+}
+
+wxString
+gcTextHelper::InitialString()
+{
+    return wxEmptyString;
+}
+
+//------------------------------------------------------------------------------------
+
+BEGIN_EVENT_TABLE(gcTextCtrlWithInstructions,wxTextCtrl)
+EVT_MOUSE_EVENTS(gcTextCtrlWithInstructions::OnMouse)
+END_EVENT_TABLE()
+
+gcTextCtrlWithInstructions::gcTextCtrlWithInstructions( wxWindow *          parent,
+                                                        wxString            instructions,
+                                                        const wxValidator & validator)
+:
+wxTextCtrl(parent,-1,instructions,wxDefaultPosition,wxDefaultSize,
+           gcdata::noStyle,validator),
+    m_instructions(instructions)
+{
+}
+
+gcTextCtrlWithInstructions::~gcTextCtrlWithInstructions()
+{
+}
+
+void
+gcTextCtrlWithInstructions::OnMouse(wxMouseEvent & event)
+{
+    // according to the wxWidgets documentation, calling event.Skip()
+    // here might be necessary because it might be handling more basic
+    // fuctionality (such as bringing the window to the front.
+    event.Skip();
+
+    if( !event.Moving())
+    {
+        if( event.LeftDown() )
+        {
+            if( GetValue() == m_instructions)
+            {
+                ChangeValue(wxEmptyString);
+            }
+        }
+    }
+
+}
+
+//------------------------------------------------------------------------------------
+
+gcUpdatingTextCtrl::gcUpdatingTextCtrl( wxWindow *          parent,
+                                        wxString            instructions,
+                                        gcTextHelper *      helper)
+    :
+    gcUpdatingComponent(parent,instructions),
+    m_textCtrl(NULL),
+    m_textHelper(helper)
+{
+    m_textCtrl = new gcTextCtrlWithInstructions(this,
+                                                m_textHelper->InitialString(),
+                                                m_textHelper->GetValidator());
+
+    m_statBoxSizer->Add(m_textCtrl,
+                        0,
+                        wxALL | wxALIGN_LEFT | wxALIGN_TOP | wxEXPAND ,
+                        gclayout::borderSizeSmall);
+}
+
+gcUpdatingTextCtrl::~gcUpdatingTextCtrl()
+{
+    delete m_textHelper;
+}
+
+void
+gcUpdatingTextCtrl::BuildDisplay(GCDataStore & dataStore)
+{
+}
+
+void
+gcUpdatingTextCtrl::UpdateDisplay(GCDataStore & dataStore)
+{
+    m_textCtrl->SetValue(m_textHelper->FromDataStore(dataStore));
+}
+
+void
+gcUpdatingTextCtrl::UpdateDataInterim(GCDataStore & dataStore)
+{
+    m_textHelper->ToDataStore(dataStore,m_textCtrl->GetValue());
+}
+
+void
+gcUpdatingTextCtrl::UpdateDataFinal(GCDataStore & dataStore)
+{
+    UpdateDataInterim(dataStore);
+}
+
+//------------------------------------------------------------------------------------
+
+gcChoiceObject::gcChoiceObject()
+{
+}
+
+gcChoiceObject::~gcChoiceObject()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+BEGIN_EVENT_TABLE(gcUpdatingChoose, gcUpdatingComponent)
+EVT_CHECKBOX( wxID_ANY,        gcUpdatingChoose::OnCheck )
+END_EVENT_TABLE()
+
+gcUpdatingChoose::gcUpdatingChoose( wxWindow *                      parent,
+                                    wxString                        instructions,
+                                    std::vector<gcChoiceObject*>    choices)
+:
+gcUpdatingComponent(parent,instructions),
+    m_choices(choices)
+{
+}
+
+gcUpdatingChoose::~gcUpdatingChoose()
+{
+    for(std::vector<gcChoiceObject*>::iterator i = m_choices.begin(); i != m_choices.end(); i++)
+    {
+        delete *i;
+    }
+}
+
+void
+gcUpdatingChoose::BuildDisplay(GCDataStore & dataStore)
+{
+    // add new check boxes
+    for(size_t index = 0; index < m_choices.size(); index++)
+    {
+        gcChoiceObject * choice = m_choices[index];
+        //wxLogVerbose("****in gcUpdatingDialog::BuildDisplay event: %i", eventId);  // JMDBG
+
+        m_statBoxSizer->Add(choice->MakeWindow(this),
+                            0,
+                            wxALL | wxALIGN_LEFT | wxALIGN_TOP | wxEXPAND ,
+                            gclayout::borderSizeSmall);
+
+        choice->UpdateDisplayInitial(dataStore);
+    }
+}
+
+void
+gcUpdatingChoose::UpdateDisplay(GCDataStore & dataStore)
+{
+    for(size_t index=0; index < m_choices.size(); index++)
+    {
+        gcChoiceObject * choice = m_choices[index];
+        choice->UpdateDisplayInterim(dataStore);
+    }
+}
+
+void
+gcUpdatingChoose::UpdateDataInterim(GCDataStore & dataStore)
+{
+    for(size_t index=0; index < m_choices.size(); index++)
+    {
+        gcChoiceObject * choice = m_choices[index];
+        choice->UpdateDataInterim(dataStore);
+    }
+}
+
+void
+gcUpdatingChoose::UpdateDataFinal(GCDataStore & dataStore)
+{
+    for(size_t index=0; index < m_choices.size(); index++)
+    {
+        gcChoiceObject * choice = m_choices[index];
+        choice->UpdateDataFinal(dataStore);
+    }
+}
+
+void
+gcUpdatingChoose::OnCheck(wxCommandEvent & event)
+{
+    if(event.IsChecked())
+    {
+        wxObject * obj = event.GetEventObject();
+        ProcessPositiveCheck(obj);
+    }
+}
+
+void
+gcUpdatingChoose::ProcessPositiveCheck(wxObject * obj)
+// enforces one check per set of choices
+{
+    for(size_t index=0; index < m_choices.size(); index++)
+    {
+        wxWindow * choice = m_choices[index]->FetchWindow();
+        if(choice != obj)
+        {
+            wxCheckBox * cb = dynamic_cast<wxCheckBox*>(choice);
+            assert(cb != NULL);
+            cb->SetValue(0);
+        }
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+BEGIN_EVENT_TABLE(gcUpdatingChooseMulti, gcUpdatingChoose)
+EVT_BUTTON( GC_SelectAll,    gcUpdatingChooseMulti::OnSelectAll )
+EVT_BUTTON( GC_UnselectAll,  gcUpdatingChooseMulti::OnUnselectAll )
+END_EVENT_TABLE()
+
+gcUpdatingChooseMulti::gcUpdatingChooseMulti(
+    wxWindow *                      parent,
+    wxString                        instructions,
+    std::vector<gcChoiceObject*>    choices)
+:
+gcUpdatingChoose(parent,instructions,choices)
+{
+}
+
+gcUpdatingChooseMulti::~gcUpdatingChooseMulti()
+{
+}
+
+wxString
+gcUpdatingChooseMulti::NoChoicesText() const
+{
+    return gcstr::noChoice;
+}
+
+void
+gcUpdatingChooseMulti::BuildDisplay(GCDataStore & dataStore)
+{
+    gcUpdatingChoose::BuildDisplay(dataStore);
+
+    if (m_choices.empty())
+    {
+        m_statBoxSizer->AddStretchSpacer(1);
+        m_statBoxSizer->Add(new wxStaticText(this,-1,NoChoicesText()),
+                            1,
+                            wxALL | wxALIGN_CENTER | wxEXPAND ,
+                            gclayout::borderSizeSmall);
+        m_statBoxSizer->AddStretchSpacer(1);
+    }
+    else
+    {
+        wxBoxSizer * buttonSizer = new wxBoxSizer(wxHORIZONTAL);
+        buttonSizer->AddStretchSpacer(1);
+        buttonSizer->Add(new wxButton(this,GC_SelectAll,gcstr::buttonSelectAll),
+                         0,
+                         wxALL | wxALIGN_CENTER ,
+                         gclayout::borderSizeSmall);
+        buttonSizer->Add(new wxButton(this,GC_UnselectAll,gcstr::buttonUnselectAll),
+                         0,
+                         wxALL | wxALIGN_CENTER ,
+                         gclayout::borderSizeSmall);
+
+        m_statBoxSizer->AddStretchSpacer(1);
+        m_statBoxSizer->Add(buttonSizer,
+                            0,
+                            wxALL | wxALIGN_LEFT | wxALIGN_TOP | wxEXPAND ,
+                            gclayout::borderSizeSmall);
+    }
+}
+
+void
+gcUpdatingChooseMulti::ProcessPositiveCheck(wxObject * obj)
+// don't do anything
+{
+}
+
+void
+gcUpdatingChooseMulti::UpdateDataFinal(GCDataStore & dataStore)
+{
+    gcUpdatingChoose::UpdateDataFinal(dataStore);
+
+    gcIdVec checkedIds;
+    for(size_t index=0; index < m_choices.size(); index++)
+    {
+        gcChoiceObject * choice = m_choices[index];
+        wxWindow * window = choice->FetchWindow();
+        wxCheckBox * cb = dynamic_cast<wxCheckBox*>(window);
+        assert(cb != NULL);
+        if(cb->GetValue() > 0)
+        {
+            checkedIds.push_back(choice->GetRelevantId());
+        }
+    }
+    DoFinalForMulti(dataStore,checkedIds);
+}
+
+void
+gcUpdatingChooseMulti::OnSelectAll(wxCommandEvent & event)
+{
+    for(size_t index=0; index < m_choices.size(); index++)
+        // EWFIX.P3 -- should refactor to eliminate dynamic cast
+    {
+        gcChoiceObject * choice = m_choices[index];
+        wxWindow * window = choice->FetchWindow();
+        wxCheckBox * cb = dynamic_cast<wxCheckBox*>(window);
+        if(cb->IsEnabled())
+        {
+            cb->SetValue(1);
+        }
+    }
+}
+
+void
+gcUpdatingChooseMulti::OnUnselectAll(wxCommandEvent & event)
+{
+    for(size_t index=0; index < m_choices.size(); index++)
+        // EWFIX.P3 -- should refactor to eliminate dynamic cast
+    {
+        gcChoiceObject * choice = m_choices[index];
+        wxWindow * window = choice->FetchWindow();
+        wxCheckBox * cb = dynamic_cast<wxCheckBox*>(window);
+        cb->SetValue(0);
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+#if 0
+
+gcSelectObject::gcSelectObject()
+{
+}
+
+gcSelectObject::~gcSelectObject()
+{
+}
+
+gcUpdatingSelect::gcUpdatingSelect(
+    wxWindow *                      parent,
+    wxString                        instructions,
+    std::vector<gcSelectObject*>    choices)
+    :
+    gcUpdatingComponent(parent,instructions)
+{
+}
+
+gcUpdatingSelect::~gcUpdatingSelect()
+{
+}
+
+void
+gcUpdatingSelect::BuildDisplay(GCDataStore& ds)
+{
+}
+
+void
+gcUpdatingSelect::UpdateDisplay(GCDataStore& ds)
+{
+}
+
+void
+gcUpdatingSelect::UpdateDataInterim(GCDataStore& ds)
+{
+}
+
+void
+gcUpdatingSelect::UpdateDataFinal(GCDataStore& ds)
+{
+}
+
+#endif
+
+//------------------------------------------------------------------------------------
+
+gcDialogCreator::gcDialogCreator()
+{
+}
+
+gcDialogCreator::~gcDialogCreator()
+{
+}
+
+void
+gcDialogCreator::AddComponent(gcUpdatingDialog & dialog, gcUpdatingComponent * component)
+{
+    assert(component != NULL);
+    dialog.m_components.push_back(component);
+}
+
+void
+gcDialogCreator::PlaceContent(gcUpdatingDialog & dialog, wxSizer * sizerWithContent)
+{
+    assert(sizerWithContent != NULL);
+    dialog.m_sizer->Insert(0,
+                           sizerWithContent,
+                           0,
+                           wxALL | wxALIGN_CENTER | wxEXPAND,
+                           gclayout::borderSizeSmall);
+}
+
+//------------------------------------------------------------------------------------
+
+BEGIN_EVENT_TABLE(gcUpdatingDialog, wxDialog)
+EVT_BUTTON( wxID_ANY,        gcUpdatingDialog::OnButton )
+EVT_S2D   ( wxID_ANY,        gcUpdatingDialog::ScreenEvent)
+END_EVENT_TABLE()
+
+gcUpdatingDialog::gcUpdatingDialog( wxWindow *      parent,
+                                    GCDataStore &   dataStore,
+                                    wxString        title,
+                                    bool            forJustCreatedObj)
+:
+wxDialog(parent,
+         -1,
+         title,
+         wxDefaultPosition,wxDefaultSize,
+         wxCAPTION | wxCLOSE_BOX | wxRESIZE_BORDER),
+    m_dataStore(dataStore),
+    m_sizer(NULL)
+{
+
+    m_sizer = new wxBoxSizer(wxVERTICAL);
+    m_sizer->AddStretchSpacer(1);
+    m_sizer->Add(new wxStaticLine(this,-1,wxDefaultPosition,wxDefaultSize,wxLI_HORIZONTAL),
+                 0,
+                 wxALL | wxALIGN_CENTER | wxEXPAND,
+                 gclayout::borderSizeSmall);
+    m_sizer->AddStretchSpacer(1);
+    m_sizer->Add(new gcEditButtons(this,forJustCreatedObj),0,wxEXPAND);
+    SetSizer(m_sizer);
+}
+
+gcUpdatingDialog::~gcUpdatingDialog()
+{
+    for(size_t i=0; i < m_components.size(); i++)
+    {
+        delete (m_components[i]);
+    }
+}
+
+void
+gcUpdatingDialog::DoUpdateData()
+{
+    for(size_t i=0; i < m_components.size(); i++)
+    {
+        (m_components[i])->UpdateDataFinal(m_dataStore);
+    }
+}
+
+void
+gcUpdatingDialog::DoBuildDisplay()
+{
+    for(size_t i=0; i < m_components.size(); i++)
+    {
+        (m_components[i])->BuildDisplay(m_dataStore);
+        (m_components[i])->UpdateDisplay(m_dataStore);
+    }
+}
+
+void
+gcUpdatingDialog::OnButton(wxCommandEvent & event)
+{
+    int eventId = event.GetId();
+    wxLogVerbose("****in gcUpdatingDialog::OnButton event: %i", eventId);  // JMDBG
+    switch(eventId)
+    {
+        case wxID_APPLY:
+            try
+            {
+                DoUpdateData();
+                EndModal(eventId);
+            }
+            catch(const gc_data_error& e)
+            {
+                m_dataStore.GCError(e.what());
+                EndModal(wxID_CANCEL);
+            }
+            return;
+            break;
+        case wxID_CANCEL:
+            EndModal(eventId);
+            return;
+            break;
+        case wxID_DELETE:
+            DoDelete();
+            EndModal(eventId);
+            return;
+            break;
+        default:
+            assert(false);
+    }
+}
+
+void
+gcUpdatingDialog::ScreenEvent(wxCommandEvent & event)
+{
+    wxLogDebug("ScreenEvent");
+}
+
+void
+gcUpdatingDialog::DoDelete()
+{
+    gc_implementation_error e(gcerr::provideDoDelete.c_str());
+    throw e;
+}
+
+bool
+gcUpdatingDialog::Go()
+{
+    DoBuildDisplay();
+    Layout();
+    Fit();
+    CentreOnParent();
+    int retval = ShowModal();
+    switch(retval)
+    {
+        case wxID_APPLY:
+            // we should take all changes
+            return true;
+            break;
+        case wxID_CANCEL:
+            // we should not take any changes
+            return false;
+            break;
+        case wxID_DELETE:
+            // we should take all changes
+            return true;
+            break;
+        default:
+            m_dataStore.GCWarning(wxString::Format("saw unexpected dialog return code %d. Not taking your changes.",retval));   // EWFIX.P2
+            return false;
+            break;
+    }
+
+    assert(false);
+    return false;
+
+}
+
+//------------------------------------------------------------------------------------
+
+gcEditButtons::gcEditButtons(wxWindow * parent,bool forJustCreatedObj)
+    :
+    wxPanel(parent)
+{
+    wxBoxSizer * sizer = new wxBoxSizer(wxHORIZONTAL);
+    if(!forJustCreatedObj)
+    {
+        sizer->Add(new wxButton(this,wxID_DELETE,gcstr::editDelete),
+                   0,
+                   wxALL | wxALIGN_CENTER ,
+                   gclayout::borderSizeSmall);
+    }
+    sizer->AddStretchSpacer(1);
+    sizer->Add(new wxButton(this,wxID_CANCEL,gcstr::editCancel),
+               0,
+               wxALL | wxALIGN_CENTER ,
+               gclayout::borderSizeSmall);
+    sizer->Add(new wxButton(this,wxID_APPLY,gcstr::editApply),
+               0,
+               wxALL | wxALIGN_CENTER ,
+               gclayout::borderSizeSmall);
+    SetSizer(sizer);
+}
+
+gcEditButtons::~gcEditButtons()
+{
+}
+
+//____________________________________________________________________________________
diff --git a/src/guiconv/gc_dialog.h b/src/guiconv/gc_dialog.h
new file mode 100644
index 0000000..99e426d
--- /dev/null
+++ b/src/guiconv/gc_dialog.h
@@ -0,0 +1,293 @@
+// $Id: gc_dialog.h,v 1.20 2012/06/30 01:32:41 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_DIALOG_H
+#define GC_DIALOG_H
+
+#include <vector>
+
+#include "gc_structure_maps.h"
+
+#include "wx/button.h"
+#include "wx/dialog.h"
+#include "wx/panel.h"
+#include "wx/stattext.h"
+#include "wx/string.h"
+#include "wx/textctrl.h"
+
+class GCDataStore;
+class wxBoxSizer;
+class wxCheckBox;
+class wxChoice;
+class wxRadioBox;
+class wxStaticBoxSizer;
+
+class gcUpdatingComponent : public wxPanel
+{
+  private:
+    gcUpdatingComponent();      // undefined
+  protected:
+    // protected so inheritors can add objects
+    wxStaticBoxSizer *          m_statBoxSizer;
+  public:
+    gcUpdatingComponent(wxWindow * parent, wxString instructions);
+    virtual ~gcUpdatingComponent();
+
+    // build initial display
+    virtual void BuildDisplay(GCDataStore &)         = 0;
+
+    // bring display of component in line with current data
+    virtual void UpdateDisplay(GCDataStore &)        = 0;
+
+    // update current data for most operations
+    virtual void UpdateDataInterim(GCDataStore &)    = 0;
+
+    // update current data as display of component ends
+    virtual void UpdateDataFinal(GCDataStore &)      = 0;
+};
+
+class gcPlainTextHelper
+// can get current text value from data store
+{
+  private:
+  protected:
+  public:
+    gcPlainTextHelper();
+    virtual ~gcPlainTextHelper();
+
+    virtual wxString                FromDataStore(GCDataStore &) = 0;
+};
+
+class gcUpdatingPlainText : public gcUpdatingComponent
+// uses gcPlainTextHelper to keep an uneditable text value up to date
+{
+  private:
+    gcUpdatingPlainText();      // undefined
+    wxStaticText *      m_statText;
+    gcPlainTextHelper * m_textHelper;
+  protected:
+  public:
+    gcUpdatingPlainText(wxWindow *          parent,
+                        wxString            instructions,
+                        gcPlainTextHelper * helper);
+    virtual ~gcUpdatingPlainText();
+
+    void BuildDisplay(GCDataStore &);
+    void UpdateDisplay(GCDataStore &);
+    void UpdateDataInterim(GCDataStore &) ;
+    void UpdateDataFinal(GCDataStore &) ;
+};
+
+class gcTextHelper
+// can get and set current text value from/to data store
+{
+  private:
+  protected:
+  public:
+    gcTextHelper();
+    virtual ~gcTextHelper();
+
+    virtual wxString                FromDataStore(GCDataStore &) = 0;
+    virtual void                    ToDataStore(GCDataStore &, wxString text) = 0;
+    virtual const wxValidator &     GetValidator();
+    virtual wxString                InitialString();
+};
+
+class gcTextCtrlWithInstructions : public wxTextCtrl
+{
+  private:
+  protected:
+    wxString        m_instructions;
+  public:
+    gcTextCtrlWithInstructions( wxWindow *          parent,
+                                wxString            instructions,
+                                const wxValidator & validator);
+    virtual ~gcTextCtrlWithInstructions();
+
+    void    OnMouse(wxMouseEvent & event);
+
+    DECLARE_EVENT_TABLE()
+};
+
+class gcUpdatingTextCtrl : public gcUpdatingComponent
+// uses gcTextHelper to keep an editable text value up to date
+{
+  private:
+    gcUpdatingTextCtrl();           // undefined
+    gcTextCtrlWithInstructions *    m_textCtrl;
+    gcTextHelper *                  m_textHelper;
+  protected:
+  public:
+    gcUpdatingTextCtrl( wxWindow *          parent,
+                        wxString            instructions,
+                        gcTextHelper *      helper);
+    virtual ~gcUpdatingTextCtrl();
+
+    void BuildDisplay(GCDataStore &);
+    void UpdateDisplay(GCDataStore &);
+    void UpdateDataInterim(GCDataStore &) ;
+    void UpdateDataFinal(GCDataStore &) ;
+};
+
+class gcChoiceObject
+{
+  private:
+  protected:
+  public:
+    gcChoiceObject();
+    virtual ~gcChoiceObject();
+
+    virtual void        UpdateDisplayInitial    (GCDataStore &) = 0;
+    virtual void        UpdateDisplayInterim    (GCDataStore &) = 0;
+    virtual void        UpdateDataInterim       (GCDataStore &) = 0;
+    virtual void        UpdateDataFinal         (GCDataStore &) = 0;
+
+    virtual wxWindow *  MakeWindow(wxWindow * parent)           = 0;
+    virtual wxWindow *  FetchWindow()                           = 0;
+
+    virtual size_t      GetRelevantId()                         = 0;
+};
+
+class gcUpdatingChoose : public gcUpdatingComponent
+{
+  private:
+    gcUpdatingChoose();             // undefined
+  protected:
+    std::vector<gcChoiceObject*>    m_choices;
+    std::vector<wxCheckBox*>        m_boxes;
+  public:
+    gcUpdatingChoose(   wxWindow *                      parent,
+                        wxString                        instructions,
+                        std::vector<gcChoiceObject*>    choices);
+    virtual ~gcUpdatingChoose();
+
+    virtual void BuildDisplay(GCDataStore &)         ;
+    virtual void UpdateDisplay(GCDataStore &)        ;
+    virtual void UpdateDataInterim(GCDataStore &)    ;
+    virtual void UpdateDataFinal(GCDataStore &)      ;
+
+    virtual void OnCheck(wxCommandEvent &);
+    virtual void ProcessPositiveCheck(wxObject * checkBox);
+
+    DECLARE_EVENT_TABLE()
+
+};
+
+class gcUpdatingChooseMulti : public gcUpdatingChoose
+{
+  public:
+    gcUpdatingChooseMulti(  wxWindow *                      parent,
+                            wxString                        instructions,
+                            std::vector<gcChoiceObject*>    choices);
+    virtual ~gcUpdatingChooseMulti();
+
+    virtual void BuildDisplay(GCDataStore &)         ;
+
+    virtual void    ProcessPositiveCheck(wxObject * checkBox);
+
+    virtual void    UpdateDataFinal(GCDataStore &)      ;
+    virtual void    DoFinalForMulti(GCDataStore &, gcIdVec checkedIds) = 0;
+    virtual wxString    NoChoicesText() const;
+
+    virtual void    OnSelectAll(wxCommandEvent &);
+    virtual void    OnUnselectAll(wxCommandEvent &);
+
+    DECLARE_EVENT_TABLE()
+};
+
+#if 0
+
+class gcSelectObject
+{
+  public:
+    gcSelectObject();
+    virtual ~gcSelectObject();
+
+    virtual wxString    GetCurrentLabel         (GCDataStore&) = 0;
+    virtual void        UpdateDataInterim       (GCDataStore&) = 0;
+    virtual void        UpdateDataFinal         (GCDataStore&) = 0;
+};
+
+class gcUpdatingSelect : public gcUpdatingComponent
+{
+  public:
+    gcUpdatingSelect(   wxWindow *                      parent,
+                        wxString                        instructions,
+                        std::vector<gcSelectObject*>    selections);
+    virtual ~gcUpdatingSelect();
+
+    virtual void BuildDisplay(GCDataStore &);
+    virtual void UpdateDisplay(GCDataStore &);
+    virtual void UpdateDataInterim(GCDataStore &);
+    virtual void UpdateDataFinal(GCDataStore &);
+};
+
+#endif
+
+class gcUpdatingDialog : public wxDialog
+// this needs to be built by the gcDialogCreator factory class
+// that is its friend. When complete, it will contain any number
+// of gcUpdatingComponent panels, plus a set of gcEditButtons
+// at the bottom of the dialog
+{
+    friend class gcDialogCreator;
+  private:
+    gcUpdatingDialog();
+  protected:
+    virtual void DoDelete();
+    virtual void DoUpdateData();
+    virtual void DoBuildDisplay();
+    //
+    GCDataStore &                       m_dataStore;
+    std::vector<gcUpdatingComponent*>   m_components;
+    wxSizer *                           m_sizer;
+  public:
+    gcUpdatingDialog(   wxWindow *      parent,
+                        GCDataStore &   dataStore,
+                        wxString        title,
+                        bool            forJustCreatedObj);
+    virtual ~gcUpdatingDialog();
+
+    virtual void OnButton       (wxCommandEvent &);
+    virtual void ScreenEvent    (wxCommandEvent &);
+
+    virtual bool Go();
+
+    DECLARE_EVENT_TABLE()
+};
+
+class gcDialogCreator
+{
+  private:
+  protected:
+  public:
+    gcDialogCreator();
+    virtual ~gcDialogCreator();
+
+    void AddComponent(gcUpdatingDialog&,gcUpdatingComponent*);
+    void PlaceContent(gcUpdatingDialog&,wxSizer * sizerWithContent);
+};
+
+//------------------------------------------------------------------------------------
+
+class gcEditButtons : public wxPanel
+{
+  private:
+    gcEditButtons();        // undefined
+  public:
+    gcEditButtons(wxWindow * parent, bool forJustCreatedObj);
+    virtual ~gcEditButtons();
+};
+
+//------------------------------------------------------------------------------------
+
+#endif  // GC_DIALOG_H
+
+//____________________________________________________________________________________
diff --git a/src/guiconv/gc_event_ids.h b/src/guiconv/gc_event_ids.h
new file mode 100644
index 0000000..4e5f822
--- /dev/null
+++ b/src/guiconv/gc_event_ids.h
@@ -0,0 +1,89 @@
+// $Id: gc_event_ids.h,v 1.61 2012/06/30 01:32:41 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_EVENT_IDS_H
+#define GC_EVENT_IDS_H
+
+// It's safer to put all these in a single header file,
+// thus ensuring all event ids get a different value.
+// However, it does mean that a single change in the
+// GUI events means nearly everything needs to be recompiled.
+// If this gets to be a problem, we should consider breaking
+// this enum up. One likely possibility would be to put the
+// GC_*, S2D_* and D2S_* events in different enums. At this
+// writing (10/14/2004) S2D_* event id's are only generated
+// by events of type SCREEN_2_DATA (and similar for D2S_* and
+// DATA_2_SCREEN) so a number collision with other events would
+// likely not cause confusion.
+
+enum GCEventId
+{
+    gcEvent_Generic    = 1,
+
+    gcEvent_Debug_Dump,
+
+    gcEvent_CmdFile_Read,
+
+    gcEvent_File_Add,
+    gcEvent_File_Edit,
+    gcEvent_File_Export,
+    gcEvent_Batch_Export,
+
+    gcEvent_LinkG_Add,
+
+    gcEvent_Locus_Add,
+
+    gcEvent_Pop_Add,
+    gcEvent_Pop_Edit,
+
+    gcEvent_ToggleVerbose,
+
+    gcEvent_ViewToggle_InputFiles,
+    gcEvent_ViewToggle_Partitions,
+
+    //////////////////
+
+    GC_EditCancel,
+    GC_EditDelete,
+    GC_EditOK,
+    //
+    //
+    GC_LinkGMerge,
+    GC_LinkGRemove,
+    GC_LinkGRename,
+    //
+    //
+    GC_TraitAdd,
+    GC_TraitRemove,
+    GC_TraitRename,
+    //
+
+    // injected into event space by GCFrame after dispatching any
+    // S2D event. This event marks the end of a possible series
+    // of other D2S events that cause screen updates to be cached
+    D2S_UserInteractionPhaseEnd,
+
+    // dialog buttons
+    GC_MigParseTakeFile,
+    GC_MigParseTakeUser,
+    GC_ExportCancel,
+    GC_ExportContinue,
+    GC_ExportEdit,
+
+    GC_SelectAll,
+    GC_UnselectAll,
+
+    // divergence button
+    GC_Divergence
+};
+
+#endif  // GC_EVENT_IDS_H
+
+//____________________________________________________________________________________
diff --git a/src/guiconv/gc_event_publisher.cpp b/src/guiconv/gc_event_publisher.cpp
new file mode 100644
index 0000000..414339a
--- /dev/null
+++ b/src/guiconv/gc_event_publisher.cpp
@@ -0,0 +1,90 @@
+// $Id: gc_event_publisher.cpp,v 1.20 2012/06/30 01:32:41 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <set>
+
+#include "gc_event_ids.h"
+#include "gc_event_publisher.h"
+#include "gc_quantum.h"
+#include "wx/clntdata.h"
+#include "wx/event.h"
+
+const wxEventType DATA_2_SCREEN = wxNewEventType();
+const wxEventType SCREEN_2_DATA = wxNewEventType();
+
+void PublishDataEvent(wxEvtHandler* handler,int eventId)
+{
+    wxCommandEvent myEvent(DATA_2_SCREEN,eventId);
+    wxPostEvent(handler,myEvent);
+}
+
+#if 0
+
+void PublishDataEvent(wxEvtHandler* handler,int eventId, const GCQuantum* obj)
+{
+    wxCommandEvent myEvent(DATA_2_SCREEN,eventId);
+    wxClientData * clientP = new GCClientData(obj);
+    myEvent.SetClientObject(clientP);
+    wxPostEvent(handler,myEvent);
+}
+
+void PublishScreenEvent(wxEvtHandler* handler,int eventId)
+{
+    wxCommandEvent myEvent(SCREEN_2_DATA,eventId);
+    wxPostEvent(handler,myEvent);
+}
+
+void PublishScreenEvent(wxEvtHandler* handler,int eventId,int intData)
+{
+    wxCommandEvent myEvent(SCREEN_2_DATA,eventId);
+    myEvent.SetInt(intData);
+    wxPostEvent(handler,myEvent);
+}
+
+void PublishScreenEvent(wxEvtHandler* handler,int eventId,wxString stringData)
+{
+    wxCommandEvent myEvent(SCREEN_2_DATA,eventId);
+    myEvent.SetString(stringData);
+    wxPostEvent(handler,myEvent);
+}
+
+#endif
+
+void PublishScreenEvent(wxEvtHandler* handler, gcEventActor * obj)
+{
+    wxCommandEvent myEvent(SCREEN_2_DATA,gcEvent_Generic);
+    wxClientData * clientP = new GCClientData(obj);
+    myEvent.SetClientObject(clientP);
+    wxPostEvent(handler,myEvent);
+}
+
+#if 0
+
+void PublishScreenEvent(wxEvtHandler* handler,int eventId, const GCQuantum* obj,int intData)
+{
+    wxCommandEvent myEvent(SCREEN_2_DATA,eventId);
+    wxClientData * clientP = new GCClientData(obj);
+    myEvent.SetClientObject(clientP);
+    myEvent.SetInt(intData);
+    wxPostEvent(handler,myEvent);
+}
+
+void PublishScreenEvent(wxEvtHandler* handler,int eventId, const GCQuantum* obj,wxString stringData)
+{
+    wxCommandEvent myEvent(SCREEN_2_DATA,eventId);
+    wxClientData * clientP = new GCClientData(obj);
+    myEvent.SetClientObject(clientP);
+    myEvent.SetString(stringData);
+    wxPostEvent(handler,myEvent);
+}
+
+#endif
+
+//____________________________________________________________________________________
diff --git a/src/guiconv/gc_event_publisher.h b/src/guiconv/gc_event_publisher.h
new file mode 100644
index 0000000..42396c0
--- /dev/null
+++ b/src/guiconv/gc_event_publisher.h
@@ -0,0 +1,74 @@
+// $Id: gc_event_publisher.h,v 1.17 2012/06/30 01:32:41 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_EVENT_PUBLISHER_H
+#define GC_EVENT_PUBLISHER_H
+
+#include <map>
+
+#include "wx/event.h"       // for wxEventType
+
+class GCQuantum;
+class gcEventActor;
+
+// defines events signalling a change in values in the
+// datastore. These events are intended to be subscribed
+// to by GUI elements which need to update their appearance
+// when data changes.
+extern const wxEventType DATA_2_SCREEN;
+
+// event table entry capable of dispatching a DATA_2_SCREEN
+// event to the method which handles it
+#define EVT_D2S(ev,fn)                                  \
+    DECLARE_EVENT_TABLE_ENTRY(                          \
+        DATA_2_SCREEN, ev, wxID_ANY,                    \
+        (wxObjectEventFunction)(wxEventFunction)&fn,    \
+        (wxObject *) NULL                               \
+        ),
+
+// defines events signalling a change created from the GUI
+extern const wxEventType SCREEN_2_DATA;
+// event table entry capable of dispatching a SCREEN_2_DATA
+// event to the method which handles it
+#define EVT_S2D(ev,fn)                                  \
+    DECLARE_EVENT_TABLE_ENTRY(                          \
+        SCREEN_2_DATA, ev, wxID_ANY,                    \
+        (wxObjectEventFunction)(wxEventFunction)&fn,    \
+        (wxObject *) NULL                               \
+        ),
+
+void PublishDataEvent(wxEvtHandler*,int eventId);
+
+void PublishScreenEvent(wxEvtHandler*, gcEventActor *);
+
+#if 0
+void PublishScreenEvent(wxEvtHandler*,int eventId);
+void PublishScreenEvent(wxEvtHandler*,int eventId,int);
+void PublishScreenEvent(wxEvtHandler*,int eventId,wxString);
+void PublishScreenEvent(wxEvtHandler*,int eventId,const GCQuantum*);
+void PublishScreenEvent(wxEvtHandler*,int eventId,const GCQuantum*,int);
+void PublishScreenEvent(wxEvtHandler*,int eventId,const GCQuantum*,wxString);
+#endif
+
+// we need these so we can build a multimap to represent a
+// wxEvtHandler that wants to be notified about events of
+// a certain type and id. A complication is that the event
+// id (an integer -- second member of the GCEventDescriptor
+// pair) can legally be wxID_ANY -- meaning I want to know
+// about *all* events of a given type
+typedef std::pair<wxEventType,int> GCEventDescriptor;
+typedef std::pair<GCEventDescriptor, wxEvtHandler *> GCSubscriberPair;
+typedef std::multimap<GCEventDescriptor, wxEvtHandler *> GCSubscriberMap;
+typedef GCSubscriberMap::iterator GCSubscriberMapIter;
+typedef std::pair<GCSubscriberMapIter,GCSubscriberMapIter> GCSubscriberMapIterPair;
+
+#endif  // GC_EVENT_PUBLISHER_H
+
+//____________________________________________________________________________________
diff --git a/src/guiconv/gc_export_dialogs.cpp b/src/guiconv/gc_export_dialogs.cpp
new file mode 100644
index 0000000..3e58c67
--- /dev/null
+++ b/src/guiconv/gc_export_dialogs.cpp
@@ -0,0 +1,91 @@
+// $Id: gc_export_dialogs.cpp,v 1.7 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include "gc_event_ids.h"
+#include "gc_export_dialogs.h"
+#include "gc_layout.h"
+#include "gc_strings.h"
+#include "wx/sizer.h"
+#include "wx/stattext.h"
+
+BEGIN_EVENT_TABLE(GCBadLocusLengthDialog, wxDialog)
+EVT_BUTTON( GC_ExportCancel,   GCBadLocusLengthDialog::OnTakeCancel)
+EVT_BUTTON( GC_ExportContinue, GCBadLocusLengthDialog::OnTakeContinue)
+EVT_BUTTON( GC_ExportEdit,     GCBadLocusLengthDialog::OnTakeEdit)
+END_EVENT_TABLE()
+
+GCBadLocusLengthDialog::GCBadLocusLengthDialog(   wxWindow    * parent,
+                                                  wxArrayString nameList)
+:
+wxDialog(parent,-1,gcstr::exportWarning),
+    m_regionNameList(nameList),
+    m_nextStep(badlocus_CANCEL)
+{
+    Init();
+}
+
+void
+GCBadLocusLengthDialog::Init()
+{
+    wxBoxSizer * topSizer      = new wxBoxSizer(wxVERTICAL);
+
+    wxBoxSizer * buttonSizer   = new wxBoxSizer(wxHORIZONTAL);
+
+    wxString regionListString = gcstr::badLocusLength1;
+    for(size_t index = 0; index < m_regionNameList.GetCount(); index++)
+    {
+        regionListString += "\n\t";
+        regionListString += m_regionNameList[index];
+    }
+    regionListString += "\n\n";
+    regionListString += gcstr::badLocusLength2;
+
+    wxStaticText * message = new wxStaticText(this,-1,regionListString);
+
+    wxButton    * continueButton = new wxButton(this,GC_ExportContinue,
+                                                gcstr::continueString.c_str());
+    wxButton    * editButton     = new wxButton(this,GC_ExportEdit,
+                                                gcstr::regionEditString.c_str());
+    wxButton    * cancelButton   = new wxButton(this,GC_ExportCancel,
+                                                gcstr::cancelString.c_str());
+
+    buttonSizer->Add(continueButton,0,wxALL,gclayout::borderSize);
+    buttonSizer->Add(editButton,0,wxALL,gclayout::borderSize);
+    buttonSizer->Add(cancelButton,0,wxALL,gclayout::borderSize);
+
+    topSizer->Add(message,1,wxEXPAND | wxALL,gclayout::borderSize);
+    topSizer->Add(buttonSizer,0,wxALIGN_CENTER);
+
+    SetSizer(topSizer);
+    topSizer->SetSizeHints( this);
+}
+
+void
+GCBadLocusLengthDialog::OnTakeCancel(wxCommandEvent & event)
+{
+    m_nextStep = badlocus_CANCEL;
+    OnOK(event);
+}
+
+void
+GCBadLocusLengthDialog::OnTakeContinue(wxCommandEvent & event)
+{
+    m_nextStep = badlocus_CONTINUE;
+    OnOK(event);
+}
+
+void
+GCBadLocusLengthDialog::OnTakeEdit(wxCommandEvent & event)
+{
+    m_nextStep = badlocus_EDIT;
+    OnOK(event);
+}
+
+//____________________________________________________________________________________
diff --git a/src/guiconv/gc_export_dialogs.h b/src/guiconv/gc_export_dialogs.h
new file mode 100644
index 0000000..52c7bda
--- /dev/null
+++ b/src/guiconv/gc_export_dialogs.h
@@ -0,0 +1,52 @@
+// $Id: gc_export_dialogs.h,v 1.6 2011/03/08 19:22:00 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_EXPORT_DIALOGS_H
+#define GC_EXPORT_DIALOGS_H
+
+#include "wx/dialog.h"
+#include "wx/string.h"
+
+enum GCSuspiciousLocusChoice
+{
+    badlocus_CANCEL,
+    badlocus_CONTINUE,
+    badlocus_EDIT
+};
+
+enum GCSuspiciousRegionChoice
+{
+    badregion_CANCEL,
+    badregion_CONTINUE,
+    badregion_EDIT
+};
+
+class GCBadLocusLengthDialog : public wxDialog
+{
+  private:
+    wxArrayString               m_regionNameList;
+    GCSuspiciousLocusChoice     m_nextStep;
+  public:
+    GCBadLocusLengthDialog(  wxWindow   * parent, wxArrayString nameList);
+    virtual ~GCBadLocusLengthDialog() {};
+    void Init();
+
+    GCSuspiciousLocusChoice GetChoice()  { return m_nextStep;};
+
+    DECLARE_EVENT_TABLE()
+
+    virtual void OnTakeContinue(wxCommandEvent& event);
+    virtual void OnTakeCancel(wxCommandEvent& event);
+    virtual void OnTakeEdit(wxCommandEvent& event);
+};
+
+#endif  // GC_EXPORT_DIALOGS_H
+
+//____________________________________________________________________________________
diff --git a/src/guiconv/gc_file_dialogs.cpp b/src/guiconv/gc_file_dialogs.cpp
new file mode 100644
index 0000000..b938759
--- /dev/null
+++ b/src/guiconv/gc_file_dialogs.cpp
@@ -0,0 +1,783 @@
+// $Id: gc_file_dialogs.cpp,v 1.49 2012/06/30 01:32:41 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+
+#include "errhandling.h"
+#include "gc_data.h"
+#include "gc_datastore.h"
+#include "gc_errhandling.h"
+#include "gc_event_ids.h"
+#include "gc_file.h"
+#include "gc_file_dialogs.h"
+#include "gc_layout.h"
+#include "gc_loci_match.h"
+#include "gc_phase_err.h"
+#include "gc_pop_match.h"
+#include "gc_strings.h"
+#include "gc_strings_cmdfile.h"
+#include "gc_strings_phase.h"
+#include "gc_text_ctrl.h"
+
+#include "giraffe32.xpm"
+#include "tinyxml.h"
+
+#include "wx/checkbox.h"
+#include "wx/filedlg.h"
+#include "wx/icon.h"
+#include "wx/log.h"
+#include "wx/sizer.h"
+#include "wx/statline.h"
+
+//------------------------------------------------------------------------------------
+
+gcFullPath::gcFullPath(size_t fileId)
+    :
+    m_fileId(fileId)
+{
+}
+
+gcFullPath::~gcFullPath()
+{
+}
+
+wxString
+gcFullPath::FromDataStore(GCDataStore & dataStore)
+{
+    return dataStore.GetDataFile(m_fileId).GetName();
+}
+
+//------------------------------------------------------------------------------------
+
+gcParseInfoNone::gcParseInfoNone()
+{
+}
+
+gcParseInfoNone::~gcParseInfoNone()
+{
+}
+
+wxString
+gcParseInfoNone::FromDataStore(GCDataStore &)
+{
+    return gcstr::parseInfoNone;
+}
+
+//------------------------------------------------------------------------------------
+
+gcParseInfoOne::gcParseInfoOne(size_t fileId)
+    :
+    m_fileId(fileId)
+{
+}
+
+gcParseInfoOne::~gcParseInfoOne()
+{
+}
+
+wxString
+gcParseInfoOne::FromDataStore(GCDataStore & dataStore)
+{
+    const GCFile & fileRef = dataStore.GetDataFile(m_fileId);
+    const GCParse & parse = dataStore.GetParse(fileRef);
+    wxString parseInfo = parse.GetSettings();
+    return parseInfo;
+}
+
+//------------------------------------------------------------------------------------
+
+gcParseChoice::gcParseChoice(size_t parseId)
+    :
+    m_parseId(parseId),
+    m_box(NULL)
+{
+}
+
+gcParseChoice::~gcParseChoice()
+{
+}
+
+#if 0
+
+wxString
+gcParseChoice::GetLabel(GCDataStore & dataStore)
+{
+    const GCParse & parse = dataStore.GetParse(m_parseId);
+    wxString parseInfo = parse.GetSettings();
+    return parseInfo;
+}
+
+bool
+gcParseChoice::GetEnabled(GCDataStore & dataStore)
+{
+    return true;
+}
+
+bool
+gcParseChoice::GetSelected(GCDataStore & dataStore)
+{
+    const GCParse & thisParse = dataStore.GetParse(m_parseId);
+    const GCFile & fileRef = thisParse.GetFileRef();
+
+    if(dataStore.GetStructures().HasParse(fileRef))
+    {
+        const GCParse & assignedParse = dataStore.GetStructures().GetParse(fileRef);
+        return (assignedParse.GetId() == m_parseId);
+    }
+}
+
+void
+gcParseChoice::SetSelected(GCDataStore & dataStore, bool selected)
+{
+}
+
+#endif
+
+void
+gcParseChoice::UpdateDisplayInitial(GCDataStore & dataStore)
+{
+    assert(m_box != NULL);
+
+    // if it parsed, it's a legal choice
+    m_box->Enable(true);
+
+    // display settings next to check box
+    const GCParse & parse = dataStore.GetParse(m_parseId);
+    wxString parseInfo = parse.GetSettings();
+    m_box->SetLabel(parseInfo);
+
+    UpdateDisplayInterim(dataStore);
+}
+
+void
+gcParseChoice::UpdateDisplayInterim(GCDataStore & dataStore)
+{
+    assert(m_box != NULL);
+
+    const GCParse & thisParse = dataStore.GetParse(m_parseId);
+    const GCFile & fileRef = thisParse.GetFileRef();
+
+    if(dataStore.GetStructures().HasParse(fileRef))
+    {
+        const GCParse & assignedParse = dataStore.GetStructures().GetParse(fileRef);
+        if (assignedParse.GetId() == m_parseId)
+        {
+            m_box->SetValue(1);
+            return;
+        }
+    }
+
+    m_box->SetValue(0);
+}
+
+void
+gcParseChoice::UpdateDataInterim(GCDataStore & dataStore)
+{
+    int value = m_box->GetValue();
+    if(value > 0)
+    {
+        const GCParse & thisParse = dataStore.GetParse(m_parseId);
+        dataStore.GetStructures().SetParse(thisParse);
+    }
+}
+
+void
+gcParseChoice::UpdateDataFinal(GCDataStore & dataStore)
+{
+    UpdateDataInterim(dataStore);
+}
+
+wxWindow *
+gcParseChoice::MakeWindow(wxWindow * parent)
+{
+    m_box = new wxCheckBox(parent,-1,"");
+    return m_box;
+}
+
+wxWindow *
+gcParseChoice::FetchWindow()
+{
+    return m_box;
+}
+
+size_t
+gcParseChoice::GetRelevantId()
+{
+    return m_parseId;
+}
+
+//------------------------------------------------------------------------------------
+
+gcFileEditDialog::gcFileEditDialog( wxWindow *      parent,
+                                    GCDataStore &   dataStore,
+                                    size_t          fileId)
+    :
+    gcUpdatingDialog(   parent,
+                        dataStore,
+                        wxString::Format(gcstr::editFileSettings,
+                                         dataStore.GetDataFile(fileId).GetShortName().c_str()),
+                        false),
+    // false value is because there is no such
+    // thing as a newborn, empty file
+    m_fileId(fileId)
+{
+}
+
+gcFileEditDialog::~gcFileEditDialog()
+{
+}
+
+void
+gcFileEditDialog::DoDelete()
+{
+    m_dataStore.RemoveDataFile(m_dataStore.GetDataFile(m_fileId));
+}
+
+bool
+DoDialogAddFiles(wxWindow * parentWindow, GCDataStore & dataStore)
+{
+    wxFileDialog dataFileDialog(parentWindow,
+                                gcstr::dataFilesSelect,
+                                wxEmptyString,      // default directory == current
+                                wxEmptyString,      // default file = none
+                                gcstr::dataFiles,   // show .phy and .mig files
+                                wxFD_OPEN | wxFD_CHANGE_DIR | wxFD_FILE_MUST_EXIST | wxFD_MULTIPLE);
+
+    dataFileDialog.SetIcon(wxICON(giraffe32));
+    bool producedResultWeShouldKeep = false;
+
+    std::vector<GCFile*> addedFiles;
+
+    if(dataFileDialog.ShowModal() == wxID_OK)
+    {
+        wxArrayString fullPathFileNames;
+        dataFileDialog.GetPaths(fullPathFileNames);
+        fullPathFileNames.Sort();
+        for(size_t fileIndex=0; fileIndex < fullPathFileNames.Count(); fileIndex++)
+        {
+            try
+            {
+                GCFile & fileRef = dataStore.AddDataFile(fullPathFileNames[fileIndex]);
+                producedResultWeShouldKeep = true;
+                addedFiles.push_back(&fileRef);
+            }
+            catch (const gc_ex& e)
+            {
+                dataStore.GCError(e.what());
+            }
+        }
+    }
+
+#if 0
+    std::vector<GCFile*> suspiciousFiles;
+    for(size_t i = 0; i < addedFiles.size(); i++)
+    {
+        GCFile & fileRef = *(addedFiles[i]);
+        bool hasShortName = false;
+        for(size_t j = 0; j < fileRef.GetParseCount(); j++)
+        {
+            const GCParse & parseRef = fileRef.GetParse(j);
+            if(parseRef.GetHasSpacesInNames())
+            {
+                hasShortName = true;
+            }
+        }
+        if(hasShortName)
+        {
+            suspiciousFiles.push_back(&fileRef);
+        }
+    }
+
+    if(!suspiciousFiles.empty())
+    {
+        for(size_t i=0; i < suspiciousFiles.size(); i++)
+        {
+            wxLogDebug("short name file %s",suspiciousFiles[i]->GetName().c_str());
+        }
+    }
+#endif
+
+    return producedResultWeShouldKeep;
+}
+
+//------------------------------------------------------------------------------------
+
+gcHapDefaultChoice::gcHapDefaultChoice(size_t fileId)
+    :
+    m_fileId(fileId)
+{
+}
+
+gcHapDefaultChoice::~gcHapDefaultChoice()
+{
+}
+
+void
+gcHapDefaultChoice::UpdateDisplayInitial(GCDataStore & dataStore)
+{
+    UpdateDisplayInterim(dataStore);
+}
+
+void
+gcHapDefaultChoice::UpdateDisplayInterim(GCDataStore & dataStore)
+{
+    m_box->SetLabel(gcstr::hapFileDefault);
+    m_box->SetValue( dataStore.GetStructures().HasHapFileAdjacent(m_fileId) ? 0 : 1 );
+}
+
+void
+gcHapDefaultChoice::UpdateDataInterim(GCDataStore & dataStore)
+{
+    if(m_box->GetValue() > 0)
+    {
+        dataStore.GetStructures().UnsetHapFileAdjacent(m_fileId);
+    }
+}
+
+void
+gcHapDefaultChoice::UpdateDataFinal(GCDataStore & dataStore)
+{
+    UpdateDataInterim(dataStore);
+}
+
+wxWindow *
+gcHapDefaultChoice::MakeWindow(wxWindow * parent)
+{
+    m_box = new wxCheckBox(parent,-1,"");
+    return m_box;
+}
+
+wxWindow *
+gcHapDefaultChoice::FetchWindow()
+{
+    return m_box;
+}
+
+size_t
+gcHapDefaultChoice::GetRelevantId()
+{
+    assert(false);
+    return 0;
+}
+
+//------------------------------------------------------------------------------------
+
+gcHapAdjacentChoice::gcHapAdjacentChoice(size_t fileId)
+    :
+    m_fileId(fileId)
+{
+}
+
+gcHapAdjacentChoice::~gcHapAdjacentChoice()
+{
+}
+
+void
+gcHapAdjacentChoice::UpdateDisplayInitial(GCDataStore & dataStore)
+{
+    UpdateDisplayInterim(dataStore);
+}
+
+void
+gcHapAdjacentChoice::UpdateDisplayInterim(GCDataStore & dataStore)
+{
+    const GCStructures & structures = dataStore.GetStructures();
+
+    if(dataStore.FileInducesHaps(m_fileId))
+    {
+        m_box->SetValue(0);
+        m_panel->Enable(false);
+    }
+
+    if(structures.HasHapFileAdjacent(m_fileId))
+    {
+        m_box->SetValue(1);
+        m_text->SetValue(wxString::Format("%ld",(long)structures.GetHapFileAdjacent(m_fileId)));
+    }
+    else
+    {
+        m_box->SetValue(0);
+    }
+}
+
+void
+gcHapAdjacentChoice::UpdateDataInterim(GCDataStore & dataStore)
+{
+    if(m_box->GetValue() > 0)
+    {
+        wxString numHapsString = m_text->GetValue();
+        long numHaps;
+        if(numHapsString.ToLong(&numHaps) && (numHaps > 1))
+        {
+            dataStore.GetStructures().SetHapFileAdjacent(m_fileId,(size_t)numHaps);
+        }
+        else
+        {
+            throw gc_bad_ind_match_adjacency_value(numHapsString);
+        }
+    }
+}
+
+void
+gcHapAdjacentChoice::UpdateDataFinal(GCDataStore & dataStore)
+{
+    UpdateDataInterim(dataStore);
+}
+
+wxWindow *
+gcHapAdjacentChoice::MakeWindow(wxWindow * parent)
+{
+    m_panel = new wxPanel(parent,-1);
+    wxBoxSizer * sizer = new wxBoxSizer(wxHORIZONTAL);
+    m_box = new wxCheckBox(m_panel,-1,gcstr_phase::adjacentHaps1);
+    m_text = new GCNonNegativeIntegerInput(m_panel);
+    m_text->SetValue(wxString::Format("%ld",(long)(gcdata::defaultHapCount)));
+    wxStaticText * statText = new wxStaticText(m_panel,-1,gcstr_phase::adjacentHaps2);
+    sizer->Add(m_box);
+    sizer->Add(m_text);
+    sizer->Add(statText);
+    m_panel->SetSizerAndFit(sizer);
+    return m_panel;
+}
+
+wxWindow *
+gcHapAdjacentChoice::FetchWindow()
+{
+    return m_box;
+}
+
+size_t
+gcHapAdjacentChoice::GetRelevantId()
+{
+    assert(false);
+    return 0;
+}
+
+//------------------------------------------------------------------------------------
+
+#if 0
+
+gcHapResolverChoice::gcHapResolverChoice(size_t fileId, size_t hapResolverId)
+    :
+    m_fileId(fileId),
+    m_hapResolverId(hapResolverId)
+{
+}
+
+gcHapResolverChoice::~gcHapResolverChoice()
+{
+}
+
+void
+gcHapResolverChoice::UpdateDisplayInitial(GCDataStore & dataStore)
+{
+
+    const gcHapResolver & hapResolver = dataStore.GetHapResolver(m_hapResolverId);
+    m_box->SetLabel(hapResolver.DisplayString());
+    UpdateDisplayInterim(dataStore);
+}
+
+void
+gcHapResolverChoice::UpdateDisplayInterim(GCDataStore & dataStore)
+{
+    const GCStructures & structures = dataStore.GetStructures();
+
+    int checkValue = 0;
+    if(structures.HasHapFile(m_fileId))
+    {
+        if(structures.GetHapFileId(m_fileId) == m_hapResolverId)
+        {
+            checkValue = 1;
+        }
+    }
+
+    m_box->SetValue(checkValue);
+}
+
+void
+gcHapResolverChoice::UpdateDataInterim(GCDataStore & dataStore)
+{
+    if(m_box->GetValue() > 0)
+    {
+        dataStore.GetStructures().SetHapFile(m_fileId,m_hapResolverId);
+    }
+}
+
+void
+gcHapResolverChoice::UpdateDataFinal(GCDataStore & dataStore)
+{
+    UpdateDataInterim(dataStore);
+}
+
+wxWindow *
+gcHapResolverChoice::MakeWindow(wxWindow * parent)
+{
+    m_box = new wxCheckBox(parent,-1,"");
+    return m_box;
+
+}
+
+wxWindow *
+gcHapResolverChoice::FetchWindow()
+{
+    return m_box;
+}
+
+size_t
+gcHapResolverChoice::GetRelevantId()
+{
+    assert(false);
+    return 0;
+}
+
+#endif
+
+//------------------------------------------------------------------------------------
+
+gcHapChoices::gcHapChoices( wxWindow *                      parent,
+                            GCDataStore &                   dataStore,
+                            size_t                          fileId,
+                            std::vector<gcChoiceObject*>    choices)
+    :
+    gcUpdatingChoose(parent,gcstr::chooseHapResolution,choices),
+    m_fileId(fileId)
+{
+}
+
+gcHapChoices::~gcHapChoices()
+{
+}
+
+void
+gcHapChoices::BuildDisplay(GCDataStore & dataStore)
+{
+    gcUpdatingChoose::BuildDisplay(dataStore);
+}
+
+//------------------------------------------------------------------------------------
+
+bool
+DoDialogEditFile(wxWindow * parentWindow, GCDataStore & dataStore, size_t fileId)
+{
+    gcFileEditDialog dialog(parentWindow,dataStore,fileId);
+
+    // build the dialog
+    gcDialogCreator creator;
+    wxBoxSizer * contentSizer = new wxBoxSizer(wxVERTICAL);
+
+    // always give the full path name
+    gcPlainTextHelper * fullPathHelper = new gcFullPath(fileId);
+    gcUpdatingComponent * fullPath = new gcUpdatingPlainText(
+        &dialog,
+        gcstr::fullPath,
+        fullPathHelper);
+    contentSizer->Add(fullPath,
+                      0,
+                      wxALL | wxALIGN_CENTER | wxEXPAND ,
+                      gclayout::borderSizeSmall);
+    creator.AddComponent(dialog,fullPath);
+
+    // always give the parse info, but what kind depends on if there
+    // were any choices
+    gcUpdatingComponent * parseInfo = NULL;
+    const GCFile & fileRef = dataStore.GetDataFile(fileId);
+    size_t numParses = fileRef.GetParseCount();
+
+    if(numParses == 0)
+    {
+        gcPlainTextHelper * parseNone = new gcParseInfoNone();
+        parseInfo = new gcUpdatingPlainText(&dialog,
+                                            gcstr::parseInfo,
+                                            parseNone);
+    }
+
+    if(numParses == 1)
+    {
+        gcPlainTextHelper * parseOne = new gcParseInfoOne(fileId);
+        parseInfo = new gcUpdatingPlainText(&dialog,
+                                            gcstr::parseInfo,
+                                            parseOne);
+    }
+
+    if(numParses > 1)
+    {
+        std::vector<gcChoiceObject*> parseChoices;
+        for(size_t index=0; index < numParses; index++)
+        {
+            const GCParse & parseRef = fileRef.GetParse(index);
+            gcParseChoice * choice = new gcParseChoice(parseRef.GetId());
+            parseChoices.push_back(choice);
+        }
+
+        parseInfo = new gcUpdatingChoose(&dialog,
+                                         gcstr::chooseOneParse,
+                                         parseChoices);
+    }
+
+    assert(parseInfo != NULL);
+    contentSizer->Add(parseInfo,
+                      0,
+                      wxALL | wxALIGN_CENTER | wxEXPAND ,
+                      gclayout::borderSizeSmall);
+    creator.AddComponent(dialog,parseInfo);
+
+    ////////////////////////////////////////////
+    std::vector<gcChoiceObject*> hapChoiceVec;
+    hapChoiceVec.push_back(new gcHapDefaultChoice(fileId));
+    hapChoiceVec.push_back(new gcHapAdjacentChoice(fileId));
+
+    gcHapChoices * hapFileInfo = new gcHapChoices(&dialog,dataStore,fileId,hapChoiceVec);
+    contentSizer->Add(hapFileInfo,
+                      1,
+                      wxALL | wxALIGN_CENTER | wxEXPAND ,
+                      gclayout::borderSizeSmall);
+    creator.AddComponent(dialog,hapFileInfo);
+
+    ////////////////////////////////////////////
+
+    creator.PlaceContent(dialog,contentSizer);
+
+    return dialog.Go();
+}
+
+bool
+DoDialogExportFile(wxWindow * parentWindow, GCDataStore & dataStore)
+{
+    try
+    {
+        TiXmlDocument * docPointer = dataStore.ExportFile();
+        wxString titleString = gcstr::dataFileExport;
+#ifdef LAMARC_COMPILE_MACOSX
+        titleString += gcstr::saveFileInstructionsForMac;
+#endif
+        wxFileDialog dataFileDialog(    parentWindow,
+                                        titleString,
+                                        wxEmptyString,      // current dir
+                                        dataStore.GetOutfileName(),
+                                        gcstr::exportFileGlob,
+                                        wxFD_SAVE | wxFD_OVERWRITE_PROMPT);
+        // don't wxFD_CHANGE_DIR -- it should be contextual
+        // based on operation, but since it's not, let's
+        // not change on export
+        if(dataFileDialog.ShowModal() == wxID_OK)
+        {
+            wxString fullPathFileName = dataFileDialog.GetPath();
+            dataStore.SetOutfileName(fullPathFileName);
+            dataStore.WriteExportedData(docPointer);
+            wxLogVerbose(gcverbose::exportSuccess,fullPathFileName.c_str());
+            return true;
+        }
+    }
+    catch(const gc_abandon_export& x)
+    {
+        // do nothing except interrupt;
+    }
+    catch(const gc_ex& g)
+    {
+        dataStore.GCError(g.what());
+    }
+    return false;
+}
+
+bool
+DoDialogExportBatchFile(wxWindow * parentWindow, GCDataStore & dataStore)
+{
+    try
+    {
+        TiXmlDocument * docPointer = dataStore.ExportBatch();
+        wxString titleString = gcstr::dataFileBatchExport;
+#ifdef LAMARC_COMPILE_MACOSX
+        titleString += gcstr::saveFileInstructionsForMac;
+#endif
+        wxFileDialog batchFileDialog(   parentWindow,
+                                        titleString,
+                                        wxEmptyString,      // current dir
+                                        gcstr::batchFileDefault,
+                                        gcstr::exportFileGlob,
+                                        wxFD_SAVE | wxFD_OVERWRITE_PROMPT);
+        // don't wxFD_CHANGE_DIR -- it should be contextual
+        // based on operation, but since it's not, let's
+        // not change on export
+        if(batchFileDialog.ShowModal() == wxID_OK)
+        {
+            wxString fullPathFileName = batchFileDialog.GetPath();
+            dataStore.WriteBatchFile(docPointer,fullPathFileName);
+            delete docPointer;
+            return true;
+        }
+    }
+    catch(const gc_ex& g)
+    {
+        dataStore.GCError(g.what());
+    }
+    return false;
+}
+
+bool
+DoDialogReadCmdFile(wxWindow * parentWindow, GCDataStore & dataStore)
+{
+    wxFileDialog cmdFileDialog(parentWindow,
+                               gcstr_cmdfile::cmdFilesSelect,
+                               wxEmptyString,      // default directory == current
+                               wxEmptyString,      // default file = none
+                               gcstr::xmlFiles,    // files to display
+                               wxFD_OPEN | wxFD_CHANGE_DIR | wxFD_FILE_MUST_EXIST );
+    // EWFIX.P3.BUG.524 -- add wxFD_MULTIPLE to add multiple files
+    // and use GetPaths instead of GetPath
+    bool producedResultWeShouldKeep = false;
+
+    if(cmdFileDialog.ShowModal() == wxID_OK)
+    {
+#if 0   // EWFIX.P3.BUG.524
+        wxArrayString fullPathFileNames;
+        cmdFileDialog.GetPaths(fullPathFileNames);
+        fullPathFileNames.Sort();
+        for(size_t fileIndex=0; fileIndex < fullPathFileNames.Count(); fileIndex++)
+        {
+            wxString cmdFileName = fullPathFileNames[fileIndex];
+#endif
+            wxString cmdFileName = cmdFileDialog.GetPath();
+            try
+            {
+                dataStore.ProcessCmdFile(cmdFileName);
+                producedResultWeShouldKeep = true;
+            }
+            catch(const gc_ex& e)
+            {
+                // EWFIX.P3 -- use file name
+                wxString msg = wxString::Format(gcerr_cmdfile::badCmdFile,
+                                                cmdFileName.c_str(),
+                                                e.what());
+                dataStore.GCError(msg);
+            }
+            catch(const data_error& e)
+            {
+                // EWFIX.P3 -- use file name
+                wxString msg = wxString::Format(gcerr_cmdfile::badCmdFile,
+                                                cmdFileName.c_str(),
+                                                e.what());
+                dataStore.GCError(msg);
+            }
+#if 0   // EWFIX.P3.BUG.524
+        }
+#endif
+    }
+    return producedResultWeShouldKeep;
+}
+
+//------------------------------------------------------------------------------------
+
+bool
+gcActor_File_Edit::OperateOn(wxWindow * parent, GCDataStore & dataStore)
+{
+    return DoDialogEditFile(parent,dataStore,m_fileId);
+}
+
+//____________________________________________________________________________________
diff --git a/src/guiconv/gc_file_dialogs.h b/src/guiconv/gc_file_dialogs.h
new file mode 100644
index 0000000..bd18b27
--- /dev/null
+++ b/src/guiconv/gc_file_dialogs.h
@@ -0,0 +1,181 @@
+// $Id: gc_file_dialogs.h,v 1.25 2012/06/30 01:32:41 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_FILE_DIALOGS_H
+#define GC_FILE_DIALOGS_H
+
+#include "gc_quantum.h"
+#include "gc_dialog.h"
+
+class wxWindow;
+class GCFile;
+class GCDataStore;
+
+class gcFullPath : public gcPlainTextHelper
+{
+  private:
+    gcFullPath();   // undefined
+
+    size_t          m_fileId;
+  protected:
+  public:
+    gcFullPath(size_t fileId);
+    virtual ~gcFullPath();
+    wxString    FromDataStore(GCDataStore &);
+};
+
+class gcParseInfoNone : public gcPlainTextHelper
+{
+  private:
+  protected:
+  public:
+    gcParseInfoNone();
+    virtual ~gcParseInfoNone();
+
+    wxString    FromDataStore(GCDataStore &);
+};
+
+class gcParseInfoOne : public gcPlainTextHelper
+{
+  private:
+    gcParseInfoOne();   // undefined
+    size_t              m_fileId;
+  protected:
+  public:
+    gcParseInfoOne(size_t fileId);
+    virtual ~gcParseInfoOne();
+
+    wxString    FromDataStore(GCDataStore &);
+};
+
+class gcParseChoice : public gcChoiceObject
+{
+  private:
+    gcParseChoice();        // undefined
+    size_t                  m_parseId;
+    wxCheckBox *            m_box;
+  protected:
+  public:
+    gcParseChoice(size_t parseId);
+    virtual ~gcParseChoice();
+
+#if 0
+    wxString    GetLabel    (GCDataStore &);
+    bool        GetEnabled  (GCDataStore &);
+    bool        GetSelected (GCDataStore &);
+    void        SetSelected (GCDataStore &, bool selected);
+#endif
+
+    void        UpdateDisplayInitial    (GCDataStore &) ;
+    void        UpdateDisplayInterim    (GCDataStore &) ;
+    void        UpdateDataInterim       (GCDataStore &) ;
+    void        UpdateDataFinal         (GCDataStore &) ;
+
+    wxWindow *  MakeWindow(wxWindow * parent);
+    wxWindow *  FetchWindow();
+
+    size_t      GetRelevantId();
+
+};
+
+class gcHapDefaultChoice : public gcChoiceObject
+{
+  private:
+    size_t                      m_fileId;
+    gcHapDefaultChoice();       // undefined
+  protected:
+    wxCheckBox *        m_box;
+  public:
+    gcHapDefaultChoice(size_t fileId);
+    virtual ~gcHapDefaultChoice();
+
+    void        UpdateDisplayInitial    (GCDataStore &) ;
+    void        UpdateDisplayInterim    (GCDataStore &) ;
+    void        UpdateDataInterim       (GCDataStore &) ;
+    void        UpdateDataFinal         (GCDataStore &) ;
+
+    wxWindow *  MakeWindow(wxWindow * parent)           ;
+    wxWindow *  FetchWindow()                           ;
+
+    size_t      GetRelevantId()                         ;
+};
+
+class gcHapAdjacentChoice : public gcChoiceObject
+{
+  private:
+    size_t                      m_fileId;
+    gcHapAdjacentChoice();      // undefined
+  protected:
+    wxPanel *           m_panel;
+    wxCheckBox *        m_box;
+    wxTextCtrl *        m_text;
+  public:
+    gcHapAdjacentChoice(size_t fileId);
+    virtual ~gcHapAdjacentChoice();
+
+    void        UpdateDisplayInitial    (GCDataStore &) ;
+    void        UpdateDisplayInterim    (GCDataStore &) ;
+    void        UpdateDataInterim       (GCDataStore &) ;
+    void        UpdateDataFinal         (GCDataStore &) ;
+
+    wxWindow *  MakeWindow(wxWindow * parent)           ;
+    wxWindow *  FetchWindow()                           ;
+
+    size_t      GetRelevantId()                         ;
+};
+
+class gcHapChoices : public gcUpdatingChoose
+{
+  private:
+    size_t              m_fileId;
+  protected:
+  public:
+    gcHapChoices(   wxWindow *                      parent,
+                    GCDataStore &                   dataStore,
+                    size_t                          fileId,
+                    std::vector<gcChoiceObject*>    choices);
+    virtual ~gcHapChoices();
+
+    void    BuildDisplay(GCDataStore &);
+};
+
+class gcFileEditDialog : public gcUpdatingDialog
+{
+  private:
+  protected:
+    size_t      m_fileId;
+    void DoDelete();
+  public:
+    gcFileEditDialog(   wxWindow *      parentWindow,
+                        GCDataStore &   dataStore,
+                        size_t          fileId);
+    virtual ~gcFileEditDialog();
+};
+
+bool DoDialogAddFiles(wxWindow * parent, GCDataStore & dataStore);
+bool DoDialogEditFile(wxWindow * parent, GCDataStore & dataStore, size_t fileId);
+bool DoDialogExportFile(wxWindow * parent, GCDataStore & dataStore);
+bool DoDialogExportBatchFile(wxWindow * parent, GCDataStore & dataStore);
+bool DoDialogReadCmdFile(wxWindow * parent, GCDataStore & dataStore);
+
+class gcActor_File_Edit : public gcEventActor
+{
+  private:
+    gcActor_File_Edit();    // undefined
+    size_t                  m_fileId;
+  public:
+    gcActor_File_Edit(size_t fileId) : m_fileId(fileId) {};
+    virtual ~gcActor_File_Edit() {};
+    virtual bool OperateOn(wxWindow * parent, GCDataStore & dataStore);
+};
+
+#endif  // GC_FILE_DIALOGS_H
+
+//____________________________________________________________________________________
diff --git a/src/guiconv/gc_file_list.cpp b/src/guiconv/gc_file_list.cpp
new file mode 100644
index 0000000..7bd797e
--- /dev/null
+++ b/src/guiconv/gc_file_list.cpp
@@ -0,0 +1,135 @@
+// $Id: gc_file_list.cpp,v 1.23 2012/06/30 01:32:41 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+
+#include "gc_event_publisher.h"
+#include "gc_file_dialogs.h"
+#include "gc_file_list.h"
+#include "gc_logic.h"
+#include "gc_strings.h"
+
+#include "wx/bitmap.h"
+#include "wx/gdicmn.h"
+#include "wx/image.h"
+#include "wx/log.h"
+#include "wx/statbmp.h"
+
+#include "empty16.xpm"
+#include "excl16.xpm"
+
+//------------------------------------------------------------------------------------
+
+const wxBitmap &
+GCExclaimBitmap::emptyBitmap()
+{
+    //static wxBitmap emptyBitmap(wxBITMAP(empty16));
+    static wxBitmap emptyBitmap = wxBITMAP(empty16);
+    return emptyBitmap;
+}
+
+const wxBitmap &
+GCExclaimBitmap::exclBitmap()
+{
+    static wxBitmap exclBitmap = wxBITMAP(excl16);
+    return exclBitmap;
+}
+
+GCExclaimBitmap::GCExclaimBitmap(wxPanel * panel, bool hasExclaim)
+    :   wxStaticBitmap(panel,-1,emptyBitmap(),wxDefaultPosition)
+{
+    if(hasExclaim)
+    {
+        SetBitmap(exclBitmap());
+    }
+}
+
+GCExclaimBitmap::~GCExclaimBitmap()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+gcFilePane::gcFilePane(wxWindow * parent)
+    :
+    gcGridPane(parent,4,0)
+{
+}
+
+gcFilePane::~gcFilePane()
+{
+}
+
+void
+gcFilePane::NotifyLeftDClick(size_t row, size_t col)
+{
+    assert(row < m_objVec.size());
+    size_t fileId = m_objVec[row];
+    gcEventActor * fileEditActor = new gcActor_File_Edit(fileId);
+    PublishScreenEvent(GetEventHandler(),fileEditActor);
+
+}
+
+//------------------------------------------------------------------------------------
+
+GCFileList::GCFileList( wxWindow * parent, GCLogic & logic)
+    :
+    gcInfoPane(parent, logic, gcstr::dataFilesTitle)
+{
+}
+
+GCFileList::~GCFileList()
+{
+}
+
+wxPanel *
+GCFileList::MakeContent()
+{
+    gcGridPane * pane = new gcFilePane(m_scrolled);
+    pane->SetBackgroundColour(wxTheColourDatabase->Find("WHITE"));
+
+    const dataFileSet & files = m_logic.GetDataFiles();
+    int line = 0;
+    int xval = 10;
+    for(dataFileSet::iterator iter= files.begin(); iter != files.end(); iter++)
+    {
+        const GCFile & fileRef = *(*iter);
+        line++;
+        xval += 10;
+
+        wxArrayString labels;
+        labels.Add(fileRef.GetShortName());
+
+        if(m_logic.HasParse(fileRef))
+        {
+            const GCParse & parseRef = m_logic.GetParse(fileRef);
+            labels.Add(parseRef.GetFormatString());
+            labels.Add(parseRef.GetDataTypeString());
+            labels.Add(parseRef.GetInterleavingString());
+        }
+        else
+        {
+            labels.Add(fileRef.GetFormatString());
+            labels.Add(fileRef.GetDataTypeString());
+            labels.Add(fileRef.GetInterleavingString());
+        }
+        pane->AddRow(fileRef.GetId(),labels);
+    }
+    pane->Finish();
+    return pane;
+}
+
+wxString
+GCFileList::MakeLabel()
+{
+    return wxString::Format(m_panelLabelFmt,m_logic.GetDataFileCount());
+}
+
+//____________________________________________________________________________________
diff --git a/src/guiconv/gc_file_list.h b/src/guiconv/gc_file_list.h
new file mode 100644
index 0000000..a95e0ef
--- /dev/null
+++ b/src/guiconv/gc_file_list.h
@@ -0,0 +1,66 @@
+// $Id: gc_file_list.h,v 1.13 2011/03/08 19:22:01 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_FILELIST_H
+#define GC_FILELIST_H
+
+#include "gc_gridpanel.h"
+#include "wx/button.h"
+#include "wx/checkbox.h"
+#include "wx/choice.h"
+#include "wx/radiobox.h"
+
+class GCFile;
+class GCParse;
+class wxBitmap;
+class wxWindow;
+
+class GCExclaimBitmap : public wxStaticBitmap
+{
+  private:
+    GCExclaimBitmap();    // undefined
+  protected:
+  public:
+    static const wxBitmap & emptyBitmap();
+    static const wxBitmap & exclBitmap();
+
+    GCExclaimBitmap(wxPanel * panel, bool hasExclaim);
+    virtual ~GCExclaimBitmap();
+};
+
+class gcFilePane : public gcGridPane
+{
+  private:
+    gcFilePane();           // undefined
+  protected:
+  public:
+    gcFilePane(wxWindow * parent);
+    virtual ~gcFilePane();
+    virtual void NotifyLeftDClick(size_t row, size_t col);
+
+};
+
+class GCFileList : public gcInfoPane
+{
+  private:
+    GCFileList();        // undefined
+
+  protected:
+    wxPanel * MakeContent();
+    wxString  MakeLabel();
+
+  public:
+    GCFileList(wxWindow * parent, GCLogic & logic);
+    virtual ~GCFileList();
+};
+
+#endif  // GC_FILELIST_H
+
+//____________________________________________________________________________________
diff --git a/src/guiconv/gc_frame.cpp b/src/guiconv/gc_frame.cpp
new file mode 100644
index 0000000..0621032
--- /dev/null
+++ b/src/guiconv/gc_frame.cpp
@@ -0,0 +1,354 @@
+// $Id: gc_frame.cpp,v 1.109 2012/06/30 01:32:41 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+//#include <iostream>
+#include <stdio.h>
+
+#include "errhandling.h"
+#include "gc_assigntab.h"
+#include "gc_data.h"
+//#include "gc_divtab.h"
+#include "gc_errhandling.h"
+#include "gc_event_ids.h"
+#include "gc_event_publisher.h"
+#include "gc_file_list.h"
+#include "gc_frame.h"
+#include "gc_layout.h"
+#include "gc_file_dialogs.h"
+#include "gc_loci_match.h"
+#include "gc_locus_dialogs.h"
+#include "gc_logic.h"
+#include "gc_menu_actors.h"
+#include "gc_migtab.h"
+#include "gc_pop_match.h"
+#include "gc_population_dialogs.h"
+#include "gc_region_dialogs.h"
+#include "gc_strings.h"
+
+#include "wx/artprov.h"
+#include "wx/imaglist.h"
+#include "wx/log.h"
+#include "wx/sizer.h"
+#include "wx/splitter.h"
+#include "wx/textctrl.h"
+
+//using namespace std;
+
+//------------------------------------------------------------------------------------
+
+// events the bottom level construct should respond to
+BEGIN_EVENT_TABLE(GCFrame,wxFrame)
+EVT_MENU        (wxID_ANY,              GCFrame::DispatchMenuEvent)
+EVT_D2S         (wxID_ANY,              GCFrame::DispatchDataEvent)
+EVT_S2D         (wxID_ANY,              GCFrame::DispatchScreenEvent)
+EVT_NOTEBOOK_PAGE_CHANGED(wxID_ANY,     GCFrame::OnNotebookCtrl)
+EVT_NOTEBOOK_PAGE_CHANGING(wxID_ANY,    GCFrame::OnNotebookCtrl)
+END_EVENT_TABLE()
+
+void
+GCFrame::EnableMenus()
+{
+    ///////////////////
+    m_fileMenu->Enable( gcEvent_File_Export, true);     // EWFIX.P3 -- disable when not exportable
+    m_fileMenu->Enable( gcEvent_Batch_Export, true);    // EWFIX.P3 -- disable when not exportable
+
+    m_verbose->Check(wxLog::GetVerbose());
+}
+
+void
+GCFrame::SetUpMenus()
+{
+    // Setting up menu items within File Menu
+    m_fileMenu = new wxMenu;
+    m_fileMenu->Append( gcEvent_File_Add,       "Read &Data File\tCTRL-O");
+    m_fileMenu->Append( gcEvent_CmdFile_Read,   "Read &Command File\tCTRL+SHIFT-O");
+    m_fileMenu->AppendSeparator();              //////////////////////////
+    m_fileMenu->Append( gcEvent_File_Export,    "Write &Lamarc File\tCTRL-L");
+    m_fileMenu->Append( gcEvent_Batch_Export,   "Write &Batch Command File (alpha test -- use with caution)\tCTRL-B");
+    m_fileMenu->AppendSeparator();              //////////////////////////
+    m_fileMenu->Append( wxID_EXIT,              "&Quit" );
+
+    m_insertMenu    = new wxMenu;
+    m_insertMenu->Append( gcEvent_LinkG_Add,    "New &Region");
+    m_insertMenu->Append( gcEvent_Locus_Add,    "New &Segment");
+    m_insertMenu->Append( gcEvent_Pop_Add,      "New &Population");
+
+    m_viewMenu      = new wxMenu;
+    m_verbose = m_viewMenu->AppendCheckItem(    gcEvent_ToggleVerbose, "&Log Verbosely");
+    m_verbose->Check(wxLog::GetVerbose());
+
+    // Setting up menu items within Help menu
+    wxMenu * GCHelpMenu = new wxMenu;
+    GCHelpMenu->Append( wxID_ABOUT,         "&About..." );
+
+    // Setting up menu items within Debug menu
+    wxMenu * GCDebugMenu = new wxMenu;
+    GCDebugMenu->Append(gcEvent_Debug_Dump, "&Dump" );
+
+    // Placing top-level items on Menu Bar
+    wxMenuBar *GCMenuBar = new wxMenuBar;
+    GCMenuBar->Append( m_fileMenu,      "&File" );
+    GCMenuBar->Append( m_insertMenu,    "&Insert" );
+    GCMenuBar->Append( m_viewMenu,      "&View" );
+    GCMenuBar->Append( GCHelpMenu,      "&Help" );
+#ifndef NDEBUG
+    GCMenuBar->Append( GCDebugMenu,     "&Debug" );
+#endif
+
+    EnableMenus();
+
+    // Installing Menu Bar
+    SetMenuBar( GCMenuBar );
+}
+
+GCFrame::GCFrame(   const wxString& title, GCLogic & logic)
+    :   wxFrame(    NULL,
+                    -1,
+                    title,
+                    wxDefaultPosition,
+                    wxDefaultSize,
+                    wxTAB_TRAVERSAL | wxDEFAULT_FRAME_STYLE
+        ),
+        m_logic(logic),
+        m_logPanel(NULL),
+        m_logText(NULL),
+        m_filePanel(NULL),
+        m_gridPanel(NULL),
+        m_migPanel(NULL),
+        m_fileMenu(NULL),
+        m_insertMenu(NULL),
+        m_viewMenu(NULL)
+{
+    m_sizerFrame = new wxBoxSizer(wxVERTICAL);
+
+    m_basePanel = new wxPanel(this);
+
+    m_bookCtrl = new wxNotebook(m_basePanel, wxID_ANY, wxDefaultPosition, wxDefaultSize, wxBK_TOP);
+
+    // UGH! The log text must have the wxNotebook as it's parent BUT, you must
+    // create this wxTextCtrl object and set up the logging BEFORE you add anything
+    // to the wxNotebook !!!
+    m_logText   = new wxTextCtrl(m_bookCtrl,wxID_ANY,wxEmptyString,
+                                 wxDefaultPosition,wxDefaultSize,
+                                 wxTE_MULTILINE | wxTE_READONLY);
+    m_logText->SetEditable(false);
+    m_oldLog = wxLog::SetActiveTarget(new wxLogTextCtrl(m_logText));
+
+    // the order of these AddPage calls is the order of the tabs
+    m_gridPanel = new GCAssignmentTab(m_bookCtrl,m_logic);
+
+    m_bookCtrl->AddPage( m_gridPanel, DATA_PARTITIONS, true );
+    //wxLogMessage("create panel tab");  // JMDBG
+
+
+    m_migPanel = new gcMigTab(m_bookCtrl,m_logic);
+    m_bookCtrl->AddPage( m_migPanel, MIGRATION_MATRIX, false );
+    //cout << "create migration tab" << endl;
+
+    m_filePanel = new GCFileList(m_bookCtrl,m_logic);
+    m_bookCtrl->AddPage( m_filePanel, DATA_FILES, false );
+    //cout << "create file list tab" << endl;
+
+    // log at bottom of every tab panel - better for debugging
+    //m_sizerFrame->Add(m_logText, 0, wxEXPAND );
+
+    // log in its own tab - safer for users
+    m_bookCtrl->AddPage( m_logText, LOG_TEXT, false );
+    //cout << "create log tab" << endl;
+
+    m_sizerFrame->Insert(0, m_bookCtrl, wxSizerFlags(5).Expand().Border());
+    m_sizerFrame->Show(m_bookCtrl);
+    m_sizerFrame->Layout();
+    m_sizerFrame->SetSizeHints(this);
+
+    m_basePanel->SetSizer(m_sizerFrame);
+    m_sizerFrame->Fit(this);
+    Centre(wxBOTH);
+
+    SetUpMenus();
+    UpdateUserCues();
+}
+
+GCFrame::~GCFrame()
+{
+    delete wxLog::SetActiveTarget(m_oldLog);
+}
+
+void
+GCFrame::UpdateUserCues()
+{
+    m_filePanel->UpdateUserCues();
+    m_gridPanel->UpdateUserCues();
+    m_migPanel->UpdateUserCues();
+
+    EnableMenus();
+    Layout();
+}
+
+void
+GCFrame::DispatchMenuEvent( wxCommandEvent& event)
+// wxWidgets can be a little tempermental about performing
+// time-consuming actions while in the middle of dispatching
+// a menu or control event. To overcome this, issuing a menu
+// command dispatches a home-grown event. We will get these
+// events after the menu display portion is done.
+{
+    gcEventActor * actor = NULL;
+    switch(event.GetId())
+        // stuff handled in the non-default changes the frame
+        // (this object) but not the datastore
+    {
+        case wxID_EXIT:
+            Close(TRUE);
+            break;
+        default:
+            actor = MakeMenuActor(event.GetId());
+            PublishScreenEvent(GetEventHandler(),actor);
+    }
+    UpdateUserCues();
+}
+
+void
+GCFrame::DispatchDataEvent( wxCommandEvent& event)
+{
+    switch(event.GetId())
+    {
+        case D2S_UserInteractionPhaseEnd:
+            // EWFIX.P3 UNDO -- insert undo/redo phase here, not inside UpdateUserCues
+            UpdateUserCues();
+            break;
+        default:
+            m_logic.GCWarning("unimplemented data event");
+            assert(false);
+            break;
+    }
+}
+
+void
+GCFrame::DispatchScreenEvent(wxCommandEvent& event)
+{
+    try
+    {
+        wxClientData * cd = event.GetClientObject();
+        GCClientData * gc = dynamic_cast<GCClientData*>(cd);
+        assert(gc != NULL);
+
+        GCLogic logicCopy(m_logic);
+        gcEventActor * actor = gc->GetActor();
+
+        bool takeResult = false;
+        try
+        {
+            takeResult = actor->OperateOn(this,m_logic);
+        }
+        catch(const gc_data_error& e)
+        {
+            // EWFIX.P4 -- add wrapper
+            m_logic.GCWarning(e.what());
+        }
+        catch(const incorrect_xml& g)
+        {
+            // EWFIX.P4 -- add wrapper
+            m_logic.GCWarning(g.what());
+        }
+        catch(const gc_implementation_error& f)
+        {
+            // EWFIX.P4 -- add wrapper
+            m_logic.GCWarning(f.what());
+        }
+
+        if(!takeResult)
+        {
+            m_logic = logicCopy;
+        }
+
+        delete gc;  // EWFIX.P4 -- hate that we have to do this
+    }
+    catch (const gc_data_error& e)
+    {
+        m_logic.GCFatal(wxString("uncaught data error: ")+e.what());
+    }
+    catch (const gc_implementation_error& e)
+    {
+        m_logic.GCFatalUnlessDebug(wxString("implementation error: ")+e.what());
+    }
+    catch (const gc_ex& e)
+    {
+        m_logic.GCFatal(wxString("gc exception: ")+e.what());
+    }
+    catch (const std::exception& e)
+    {
+        m_logic.GCFatal(wxString("unexpected exception: ")+e.what());
+    }
+    // each of the above method calls might result in the publishing
+    // of one or more DATA_2_SCREEN events. We now publish this event
+    // to indicate that we've finished publishing all the events
+    // for a single user action
+    PublishDataEvent(GetEventHandler(),D2S_UserInteractionPhaseEnd);
+}
+
+void
+GCFrame::OnNotebookCtrl(wxNotebookEvent& event)
+{
+    static const struct EventInfo
+    {
+        wxEventType typeChanged, typeChanging;
+        const wxChar *name;
+    } events[] =
+          {
+              {
+                  wxEVT_COMMAND_NOTEBOOK_PAGE_CHANGED,
+                  wxEVT_COMMAND_NOTEBOOK_PAGE_CHANGING,
+                  _T("wxNotebook")
+              },
+          };
+
+    wxString nameEvent, nameControl, veto;
+    const wxEventType eventType = event.GetEventType();
+    for ( size_t n = 0; n < WXSIZEOF(events); n++ )
+    {
+        const EventInfo& ei = events[n];
+        if ( eventType == ei.typeChanged )
+        {
+            nameEvent = wxT("Changed");
+        }
+        else if ( eventType == ei.typeChanging )
+        {
+            const int idx = event.GetOldSelection();
+
+            nameEvent = wxT("Changing");
+        }
+        else // skip end of the loop
+        {
+            continue;
+        }
+
+        nameControl = ei.name;
+        break;
+    }
+
+    static int s_num = 0;
+
+#if 0
+    wxLogVerbose(wxT("Event #%d: %s: %s (%d) new sel %d, old %d%s"),
+                 ++s_num,
+                 nameControl.c_str(),
+                 nameEvent.c_str(),
+                 eventType,
+                 event.GetSelection(),
+                 event.GetOldSelection(),
+                 veto.c_str());
+#endif
+
+    m_logText->SetInsertionPointEnd();
+
+}
+//____________________________________________________________________________________
diff --git a/src/guiconv/gc_frame.h b/src/guiconv/gc_frame.h
new file mode 100644
index 0000000..12b2c2a
--- /dev/null
+++ b/src/guiconv/gc_frame.h
@@ -0,0 +1,77 @@
+// $Id: gc_frame.h,v 1.56 2012/06/30 01:32:41 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_FRAME_H
+#define GC_FRAME_H
+
+#include "wx/wx.h"
+#include "wx/notebook.h"
+
+class wxSplitterWindow;
+class GCLogic;
+class gcInfoPane ;
+class GCAssignmentTab ;
+class gcMigTab;
+
+class GCFrame : public wxFrame
+// outer-most frame in the converter application
+{
+  private:
+    GCLogic         &   m_logic;    // interface with back end storage
+
+    wxPanel             * m_logPanel;
+    wxTextCtrl          * m_logText;
+    wxLog               * m_oldLog;
+    wxPanel             * m_basePanel;
+    wxBookCtrlBase      * m_bookCtrl;
+    wxBoxSizer          * m_sizerFrame;
+
+    gcInfoPane          * m_filePanel;
+    GCAssignmentTab     * m_gridPanel;
+    gcMigTab            * m_migPanel;
+
+    wxWindow *m_topLogWin;
+    wxWindow *m_botLogWin;
+
+    wxMenu          *   m_fileMenu;
+    wxMenu          *   m_insertMenu;
+    wxMenu          *   m_viewMenu;
+
+    wxMenuItem      *   m_verbose;
+
+    void DispatchDataEvent              (wxCommandEvent& event);
+    void DispatchMenuEvent              (wxCommandEvent& event);
+    void DispatchScreenEvent            (wxCommandEvent& event);
+
+    void SetUpMenus                     ();
+    void EnableMenus                    ();
+
+  protected:
+
+  public:
+    GCFrame();
+    GCFrame(const wxString& title, GCLogic & logic);
+    virtual ~GCFrame();
+
+    void UpdateUserCues                 ();
+    void OnNotebookCtrl(wxNotebookEvent& event);
+
+    DECLARE_EVENT_TABLE()
+};
+
+// name for each notebook page
+#define DATA_FILES        wxT("Data Files")
+#define DATA_PARTITIONS   wxT("Data Partitions")
+#define MIGRATION_MATRIX  wxT("Migration Matrix")
+#define LOG_TEXT          wxT("Debug Log")
+
+#endif  // GC_FRAME_H
+
+//____________________________________________________________________________________
diff --git a/src/guiconv/gc_layout.cpp b/src/guiconv/gc_layout.cpp
new file mode 100644
index 0000000..62033f1
--- /dev/null
+++ b/src/guiconv/gc_layout.cpp
@@ -0,0 +1,27 @@
+// $Id: gc_layout.cpp,v 1.29 2011/03/08 19:22:01 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include "gc_layout.h"
+
+const long      gclayout::appHeight                  = 800;
+const long      gclayout::appHeightPercent           = 90;
+const long      gclayout::appWidth                   = 800;
+const long      gclayout::appWidthPercent            = 90;
+const int       gclayout::borderSize                 = 10;
+const int       gclayout::borderSizeNone             = 0;
+const int       gclayout::borderSizeSmall            = 4;
+const int       gclayout::boxBorderSize              = 2;
+const long      gclayout::maxDataFiles               = 3;
+const int       gclayout::tabIconHeight              = 32;
+const int       gclayout::tabIconWidth               = 32;
+const int       gclayout::warningImageNeedsInput     = 0;
+const int       gclayout::warningImageOK             = -1;
+
+//____________________________________________________________________________________
diff --git a/src/guiconv/gc_layout.h b/src/guiconv/gc_layout.h
new file mode 100644
index 0000000..9923b65
--- /dev/null
+++ b/src/guiconv/gc_layout.h
@@ -0,0 +1,36 @@
+// $Id: gc_layout.h,v 1.23 2011/03/08 19:22:01 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_LAYOUT_H
+#define GC_LAYOUT_H
+
+#include "wx/gdicmn.h"
+
+class gclayout
+{
+  public:
+    static const long   appHeight;
+    static const long   appHeightPercent;
+    static const long   appWidth;
+    static const long   appWidthPercent;
+    static const int    borderSize;
+    static const int    borderSizeNone;
+    static const int    borderSizeSmall;
+    static const int    boxBorderSize;
+    static const long   maxDataFiles;
+    static const int    tabIconHeight;
+    static const int    tabIconWidth;
+    static const int    warningImageNeedsInput;
+    static const int    warningImageOK;
+};
+
+#endif  // GC_LAYOUT_H
+
+//____________________________________________________________________________________
diff --git a/src/guiconv/gc_locitab.cpp b/src/guiconv/gc_locitab.cpp
new file mode 100644
index 0000000..60edd1d
--- /dev/null
+++ b/src/guiconv/gc_locitab.cpp
@@ -0,0 +1,100 @@
+// $Id: gc_locitab.cpp,v 1.10 2011/03/08 19:22:01 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+
+#include "gc_event_publisher.h"
+#include "gc_locitab.h"
+#include "gc_locus_dialogs.h"
+#include "gc_logic.h"
+#include "gc_strings.h"
+#include "gc_structures.h"
+
+//------------------------------------------------------------------------------------
+
+gcLociPane::gcLociPane(wxWindow * parent)
+    :
+    gcGridPane(parent,7,0)
+{
+}
+
+gcLociPane::~gcLociPane()
+{
+}
+
+void
+gcLociPane::NotifyLeftDClick(size_t row, size_t col)
+{
+    assert(row < m_objVec.size());
+    size_t locId = m_objVec[row];
+    gcEventActor * locEditActor = new gcActor_LocusEdit(locId);
+    PublishScreenEvent(GetEventHandler(),locEditActor);
+}
+
+//------------------------------------------------------------------------------------
+
+gcLociTab::gcLociTab( wxWindow * parent, GCLogic & logic)
+    :
+    gcInfoPane(parent, logic, gcstr::lociTabTitle)
+{
+}
+
+gcLociTab::~gcLociTab()
+{
+}
+
+wxPanel *
+gcLociTab::MakeContent()
+{
+    gcGridPane * pane = new gcLociPane(m_scrolled);
+
+    objVector loci = m_logic.GetStructures().GetDisplayableLoci();
+    for(objVector::iterator iter=loci.begin(); iter != loci.end(); iter++)
+    {
+        GCQuantum * quantumP = *iter;
+        gcLocus * locusP = dynamic_cast<gcLocus*>(quantumP);
+        assert(locusP != NULL);
+
+        wxArrayString labels;
+
+        labels.Add(locusP->GetName());
+        labels.Add(wxString::Format(gcstr::locusLabelDataType,locusP->GetDataTypeString().c_str()));
+        labels.Add(wxString::Format(gcstr::locusLabelSites,locusP->GetNumSitesString().c_str()));
+        if(locusP->GetLinked())
+        {
+            gcRegion & regionRef = m_logic.GetStructures().GetRegion(locusP->GetRegionId());
+            labels.Add(wxString::Format(gcstr::locusLabelLinked,gcstr::yes.c_str()));
+            labels.Add(wxString::Format(gcstr::locusLabelLength,locusP->GetLengthString().c_str()));
+            labels.Add(wxString::Format(gcstr::locusLabelRegionName,regionRef.GetName().c_str()));
+            labels.Add(wxString::Format(gcstr::locusLabelMapPosition,locusP->GetMapPositionString().c_str()));
+        }
+        else
+        {
+            labels.Add(wxString::Format(gcstr::locusLabelLinked,gcstr::no.c_str()));
+            labels.Add(wxEmptyString);
+            labels.Add(wxEmptyString);
+            labels.Add(wxEmptyString);
+        }
+
+        pane->AddRow(quantumP->GetId(),labels);
+    }
+
+    pane->Finish();
+    return pane;
+
+}
+
+wxString
+gcLociTab::MakeLabel()
+{
+    return wxString::Format(m_panelLabelFmt,(int)m_logic.GetStructures().GetDisplayableLocusIds().size());
+}
+
+//____________________________________________________________________________________
diff --git a/src/guiconv/gc_locitab.h b/src/guiconv/gc_locitab.h
new file mode 100644
index 0000000..1cece38
--- /dev/null
+++ b/src/guiconv/gc_locitab.h
@@ -0,0 +1,46 @@
+// $Id: gc_locitab.h,v 1.8 2011/03/08 19:22:01 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_LOCITAB_H
+#define GC_LOCITAB_H
+
+#include "gc_gridpanel.h"
+
+class wxWindow;
+
+class gcLociPane : public gcGridPane
+{
+  private:
+    gcLociPane();            // undefined
+  protected:
+  public:
+    gcLociPane(wxWindow * parent);
+    virtual ~gcLociPane();
+
+    virtual void NotifyLeftDClick(size_t row, size_t col);
+};
+
+class gcLociTab : public gcInfoPane
+{
+  private:
+    gcLociTab();        // undefined
+
+  protected:
+    wxPanel *   MakeContent();
+    wxString    MakeLabel();
+
+  public:
+    gcLociTab(wxWindow * parent, GCLogic & logic);
+    virtual ~gcLociTab();
+};
+
+#endif  // GC_LOCITAB_H
+
+//____________________________________________________________________________________
diff --git a/src/guiconv/gc_locus_dialogs.cpp b/src/guiconv/gc_locus_dialogs.cpp
new file mode 100644
index 0000000..ee00cac
--- /dev/null
+++ b/src/guiconv/gc_locus_dialogs.cpp
@@ -0,0 +1,960 @@
+// $Id: gc_locus_dialogs.cpp,v 1.29 2011/03/08 19:22:01 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+
+#include "gc_creation_info.h"
+#include "gc_datastore.h"
+#include "gc_errhandling.h"
+#include "gc_event_ids.h"
+#include "gc_data.h"
+#include "gc_dialog.h"
+#include "gc_layout.h"
+#include "gc_locus.h"
+#include "gc_locus_dialogs.h"
+#include "gc_strings.h"
+#include "gc_types.h"
+
+#include "wx/checkbox.h"
+#include "wx/log.h"
+#include "wx/sizer.h"
+#include "wx/statbox.h"
+#include "wx/statline.h"
+#include "wx/textctrl.h"
+
+//------------------------------------------------------------------------------------
+
+gcLocusDataTypeChoice::gcLocusDataTypeChoice(   size_t              locusId,
+                                                gcSpecificDataType  type)
+    :
+    m_locusId(locusId),
+    m_type(type),
+    m_box(NULL)
+{
+}
+
+gcLocusDataTypeChoice::~gcLocusDataTypeChoice()
+{
+}
+
+void
+gcLocusDataTypeChoice::UpdateDisplayInitial(GCDataStore & dataStore)
+{
+    m_box->SetLabel(ToWxString(m_type));
+    UpdateDisplayInterim(dataStore);
+}
+
+void
+gcLocusDataTypeChoice::UpdateDisplayInterim(GCDataStore & dataStore)
+{
+    // grab the locus
+    gcLocus & locusRef = dataStore.GetStructures().GetLocus(m_locusId);
+
+    // set selection
+    bool isThisOne = (locusRef.GetDataType() == m_type );
+    m_box->SetValue( isThisOne ? 1 : 0 );
+
+    // enable all types possible for this locus
+
+    // EWFIX.P3.BUG.539 -- later -- it would be great if we
+    // could update the possible loci to merge with as
+    // we click individual loci -- at the moment it just
+    // sets the enable value at creation time
+    gcGeneralDataType types = dataStore.GetLegalLocusTypes(m_locusId);
+    assert(m_type != sdatatype_NONE_SET);
+    m_box->Enable(types.find(m_type) != types.end());
+
+}
+
+void
+gcLocusDataTypeChoice::UpdateDataInterim(GCDataStore & dataStore)
+{
+    if(m_box->GetValue() > 0 )
+    {
+        gcLocus & locusRef = dataStore.GetStructures().GetLocus(m_locusId);
+        locusRef.SetDataType(m_type);
+    }
+}
+
+void
+gcLocusDataTypeChoice::UpdateDataFinal(GCDataStore & dataStore)
+{
+    UpdateDataInterim(dataStore);
+}
+
+wxWindow *
+gcLocusDataTypeChoice::MakeWindow(wxWindow * parent)
+{
+    m_box = new wxCheckBox(parent,-1,wxEmptyString);
+    return m_box;
+}
+
+wxWindow *
+gcLocusDataTypeChoice::FetchWindow()
+{
+    assert(m_box != NULL);
+    return m_box;
+}
+
+size_t
+gcLocusDataTypeChoice::GetRelevantId()
+{
+    return m_locusId;
+}
+
+//------------------------------------------------------------------------------------
+
+gcLocusLength::gcLocusLength(size_t locusId)
+    :
+    m_locusId(locusId)
+{
+}
+
+gcLocusLength::~gcLocusLength()
+{
+}
+
+wxString
+gcLocusLength::FromDataStore(GCDataStore & dataStore)
+{
+    gcLocus & locusRef = dataStore.GetStructures().GetLocus(m_locusId);
+    if(locusRef.HasLength())
+    {
+        return wxString::Format("%d",(int)(locusRef.GetLength()));
+    }
+    return InitialString();
+}
+
+void
+gcLocusLength::ToDataStore(GCDataStore & dataStore, wxString text)
+{
+    gcLocus & locusRef = dataStore.GetStructures().GetLocus(m_locusId);
+
+    long length;
+    if(text.ToLong(&length))
+    {
+        locusRef.SetTotalLength(length);
+    }
+    else
+    {
+        wxLogVerbose(gcverbose::locusLengthNotLong,
+                     text.c_str(),
+                     locusRef.GetName().c_str());
+    }
+}
+
+const wxValidator &
+gcLocusLength::GetValidator()
+{
+    return m_validator;
+}
+
+wxString
+gcLocusLength::InitialString()
+{
+    return gcstr::unsetValueLocusLength;
+}
+
+//------------------------------------------------------------------------------------
+
+gcLocusFirstPosition::gcLocusFirstPosition(size_t locusId)
+    :
+    m_locusId(locusId)
+{
+}
+
+gcLocusFirstPosition::~gcLocusFirstPosition()
+{
+}
+
+wxString
+gcLocusFirstPosition::FromDataStore(GCDataStore & dataStore)
+{
+    gcLocus & locusRef = dataStore.GetStructures().GetLocus(m_locusId);
+    if(locusRef.HasOffset())
+    {
+        return wxString::Format("%d",(int)(locusRef.GetOffset()));
+    }
+    return InitialString();
+}
+
+void
+gcLocusFirstPosition::ToDataStore(GCDataStore & dataStore, wxString text)
+{
+    gcLocus & locusRef = dataStore.GetStructures().GetLocus(m_locusId);
+
+    long firstPosition;
+    if(text.ToLong(&firstPosition))
+    {
+        locusRef.SetOffset(firstPosition);
+    }
+    else
+    {
+        wxLogVerbose(gcverbose::firstPositionNotLong,
+                     text.c_str(),
+                     locusRef.GetName().c_str());
+    }
+}
+
+const wxValidator &
+gcLocusFirstPosition::GetValidator()
+{
+    return m_validator;
+}
+
+wxString
+gcLocusFirstPosition::InitialString()
+{
+    return gcstr::unsetValueOffset;
+}
+
+//------------------------------------------------------------------------------------
+
+gcLocusLocations::gcLocusLocations(size_t locusId)
+    :
+    m_locusId(locusId)
+{
+}
+
+gcLocusLocations::~gcLocusLocations()
+{
+}
+
+wxString
+gcLocusLocations::FromDataStore(GCDataStore & dataStore)
+{
+    gcLocus & locusRef = dataStore.GetStructures().GetLocus(m_locusId);
+    if(locusRef.HasLocations())
+    {
+        return locusRef.GetLocationsAsString();
+    }
+    return InitialString();
+}
+
+void
+gcLocusLocations::ToDataStore(GCDataStore & dataStore, wxString text)
+{
+    gcLocus & locusRef = dataStore.GetStructures().GetLocus(m_locusId);
+    if(text != InitialString() && !text.IsEmpty())
+    {
+        locusRef.SetLocations(text);
+    }
+    else
+    {
+        wxLogVerbose(gcverbose::locationsNotIntegers,
+                     text.c_str(),
+                     locusRef.GetName().c_str());
+    }
+
+}
+
+const wxValidator &
+gcLocusLocations::GetValidator()
+{
+    return m_validator;
+}
+
+wxString
+gcLocusLocations::InitialString()
+{
+    return gcstr::unsetValueLocations;
+}
+
+//------------------------------------------------------------------------------------
+
+gcLocusLinkageChoice::gcLocusLinkageChoice(size_t locusId, bool linked)
+    :
+    m_locusId(locusId),
+    m_linked(linked)
+{
+}
+
+gcLocusLinkageChoice::~gcLocusLinkageChoice()
+{
+}
+
+void
+gcLocusLinkageChoice::UpdateDisplayInitial(GCDataStore & dataStore)
+{
+    // set labels
+    if(m_linked)
+    {
+        m_box->SetLabel(gcstr::linkageYes);
+    }
+    else
+    {
+        m_box->SetLabel(gcstr::linkageNo);
+    }
+
+    UpdateDisplayInterim(dataStore);
+}
+
+void
+gcLocusLinkageChoice::UpdateDisplayInterim(GCDataStore & dataStore)
+{
+    gcLocus & locusRef = dataStore.GetStructures().GetLocus(m_locusId);
+
+    if(m_linked == locusRef.GetLinked())
+    {
+        m_box->SetValue(1);
+    }
+    else
+    {
+        m_box->SetValue(0);
+    }
+
+    gcGeneralDataType gtype = dataStore.GetLegalLocusTypes(m_locusId);
+    m_box->Enable( m_linked ||  (gtype.HasAllelic() && !gtype.HasNucleic())) ;
+
+}
+
+void
+gcLocusLinkageChoice::UpdateDataInterim(GCDataStore & dataStore)
+{
+    if(m_box->GetValue() > 0)
+    {
+        gcLocus & locusRef = dataStore.GetStructures().GetLocus(m_locusId);
+        locusRef.SetLinkedUserValue(m_linked); // EWFIX.P3.BUG.551 -- not always a user value
+    }
+}
+
+void
+gcLocusLinkageChoice::UpdateDataFinal(GCDataStore & dataStore)
+{
+    UpdateDataInterim(dataStore);
+}
+
+wxWindow *
+gcLocusLinkageChoice::MakeWindow(wxWindow * parent)
+{
+    m_box = new wxCheckBox(parent,-1,wxEmptyString);
+    return m_box;
+}
+
+wxWindow *
+gcLocusLinkageChoice::FetchWindow()
+{
+    assert(m_box != NULL);
+    return m_box;
+}
+
+size_t
+gcLocusLinkageChoice::GetRelevantId()
+{
+    return m_locusId;
+}
+
+//------------------------------------------------------------------------------------
+
+gcLocusMergeChoice::gcLocusMergeChoice( size_t choiceLocusId,
+                                        size_t dialogLocusId)
+    :
+    m_choiceLocusId(choiceLocusId),
+    m_dialogLocusId(dialogLocusId),
+    m_box(NULL)
+{
+}
+
+gcLocusMergeChoice::~gcLocusMergeChoice()
+{
+}
+
+void
+gcLocusMergeChoice::UpdateDisplayInitial(GCDataStore & dataStore)
+{
+    gcLocus & locusRef = dataStore.GetStructures().GetLocus(m_choiceLocusId);
+
+    m_box->SetLabel(locusRef.GetName());
+    m_box->SetValue(0);
+
+    UpdateDisplayInterim(dataStore);
+}
+
+void
+gcLocusMergeChoice::UpdateDisplayInterim(GCDataStore & dataStore)
+{
+    gcLocus & choiceLocus = dataStore.GetStructures().GetLocus(m_choiceLocusId);
+    gcLocus & dialogLocus = dataStore.GetStructures().GetLocus(m_dialogLocusId);
+    m_box->Enable(choiceLocus.CanMergeWith(dialogLocus));
+}
+
+void
+gcLocusMergeChoice::UpdateDataInterim(GCDataStore & dataStore)
+// nothing to do until the end at the top level
+{
+}
+
+void
+gcLocusMergeChoice::UpdateDataFinal(GCDataStore & dataStore)
+// nothing to do until the end at the top level
+{
+}
+
+wxWindow *
+gcLocusMergeChoice::MakeWindow(wxWindow * parent)
+{
+    m_box = new wxCheckBox(parent,-1,wxEmptyString);
+    return m_box;
+}
+
+wxWindow *
+gcLocusMergeChoice::FetchWindow()
+{
+    assert(m_box != NULL);
+    return m_box;
+}
+
+size_t
+gcLocusMergeChoice::GetRelevantId()
+{
+    return m_choiceLocusId;
+}
+
+//------------------------------------------------------------------------------------
+
+gcLocusMerge::gcLocusMerge( wxWindow *                      parent,
+                            size_t                          locusId,
+                            std::vector<gcChoiceObject*>    choices)
+    :
+    gcUpdatingChooseMulti(parent,gcstr::mergeLociInstructions,choices),
+    m_locusId(locusId)
+{
+}
+
+gcLocusMerge::~gcLocusMerge()
+{
+}
+
+void
+gcLocusMerge::DoFinalForMulti(GCDataStore & dataStore, gcIdVec chosens)
+{
+    chosens.insert(chosens.begin(),m_locusId);
+    dataStore.GetStructures().MergeLoci(chosens);
+}
+
+wxString
+gcLocusMerge::NoChoicesText() const
+{
+    return gcstr::noChoiceLocus;
+}
+
+//------------------------------------------------------------------------------------
+
+gcLocusPosition::gcLocusPosition(size_t locusId)
+    :
+    m_locusId(locusId)
+{
+}
+
+gcLocusPosition::~gcLocusPosition()
+{
+}
+
+wxString
+gcLocusPosition::FromDataStore(GCDataStore & dataStore)
+{
+    gcLocus & locusRef = dataStore.GetStructures().GetLocus(m_locusId);
+    if(locusRef.HasMapPosition())
+    {
+        long mapPosition = locusRef.GetMapPosition();
+        return wxString::Format("%ld",mapPosition);
+    }
+    return InitialString();
+}
+
+void
+gcLocusPosition::ToDataStore(GCDataStore & dataStore, wxString newPosition)
+{
+    gcLocus & locusRef = dataStore.GetStructures().GetLocus(m_locusId);
+
+    long position;
+    if(newPosition.ToLong(&position))
+    {
+        locusRef.SetMapPosition(position);
+    }
+    else
+    {
+        wxLogVerbose(gcverbose::locusPositionNotLong,
+                     newPosition.c_str(),
+                     locusRef.GetName().c_str());
+    }
+}
+
+const wxValidator &
+gcLocusPosition::GetValidator()
+{
+    return m_validator;
+}
+
+wxString
+gcLocusPosition::InitialString()
+{
+    return gcstr::unsetValueLocusPosition;
+}
+
+//------------------------------------------------------------------------------------
+
+gcLocusRegionChoice::gcLocusRegionChoice(size_t locusId, size_t regionId, bool isNewlyCreated)
+    :
+    m_locusId(locusId),
+    m_regionId(regionId),
+    m_newlyCreated(isNewlyCreated),
+    m_box(NULL)
+{
+}
+
+gcLocusRegionChoice::~gcLocusRegionChoice()
+{
+}
+
+void
+gcLocusRegionChoice::UpdateDisplayInitial(GCDataStore & dataStore)
+{
+    gcRegion & regionRef = dataStore.GetStructures().GetRegion(m_regionId);
+    if(m_newlyCreated)
+    {
+        m_box->SetLabel(gcstr::createNewRegion);
+    }
+    else
+    {
+        m_box->SetLabel(regionRef.GetName());
+    }
+
+    UpdateDisplayInterim(dataStore);
+}
+
+void
+gcLocusRegionChoice::UpdateDisplayInterim(GCDataStore & dataStore)
+{
+    gcLocus & locusRef = dataStore.GetStructures().GetLocus(m_locusId);
+
+    if(locusRef.GetRegionId() == m_regionId)
+    {
+        m_box->SetValue(1);
+    }
+    else
+    {
+        m_box->SetValue(0);
+    }
+
+    size_t numRegions = dataStore.GetStructures().GetDisplayableRegionIds().size();
+
+    // if there is only one region, we'd better be assigned to it
+    assert( (numRegions > 1) || (locusRef.GetRegionId() == m_regionId) );
+
+    bool doEnable = ( (numRegions > 1) || (locusRef.GetRegionId() != m_regionId) );
+    m_box->Enable(doEnable);
+}
+
+void
+gcLocusRegionChoice::UpdateDataInterim(GCDataStore & dataStore)
+{
+}
+
+void
+gcLocusRegionChoice::UpdateDataFinal(GCDataStore & dataStore)
+{
+    // EWFIX.BUG674
+}
+
+wxWindow *
+gcLocusRegionChoice::MakeWindow(wxWindow * parent)
+{
+    m_box = new wxCheckBox(parent,-1,wxEmptyString);
+    return m_box;
+}
+
+wxWindow *
+gcLocusRegionChoice::FetchWindow()
+{
+    assert(m_box != NULL);
+    return m_box;
+}
+
+size_t
+gcLocusRegionChoice::GetRelevantId()
+// this is the one you want if you're recording which item is checked
+{
+    return m_regionId;
+}
+
+//------------------------------------------------------------------------------------
+
+gcLocusOwnRegion::gcLocusOwnRegion(size_t locusId, bool isNewlyCreated)
+    :
+    m_locusId(locusId),
+    m_newlyCreated(isNewlyCreated),
+    m_box(NULL)
+{
+}
+
+gcLocusOwnRegion::~gcLocusOwnRegion()
+{
+}
+
+void
+gcLocusOwnRegion::UpdateDisplayInitial(GCDataStore & dataStore)
+{
+    UpdateDisplayInterim(dataStore);
+}
+
+void
+gcLocusOwnRegion::UpdateDisplayInterim(GCDataStore & dataStore)
+{
+    m_box->SetValue(0);
+    const gcLocus & locusRef = dataStore.GetStructures().GetLocus(m_locusId);
+    size_t regionId = locusRef.GetRegionId();
+    gcIdVec locusIds = dataStore.GetStructures().GetLocusIdsForRegionByMapPosition(regionId);
+    m_box->Enable(locusIds.size() > 1);
+}
+
+void
+gcLocusOwnRegion::UpdateDataInterim(GCDataStore & dataStore)
+{
+}
+
+void
+gcLocusOwnRegion::UpdateDataFinal(GCDataStore & dataStore)
+{
+    if(m_box->GetValue() > 0)
+    {
+        dataStore.GetStructures().LocusToOwnRegion(m_locusId);
+    }
+}
+
+wxWindow *
+gcLocusOwnRegion::MakeWindow(wxWindow * parent)
+{
+    m_box = new wxCheckBox(parent,-1,gcstr::locusOwnRegion);
+    return m_box;
+}
+
+wxWindow *
+gcLocusOwnRegion::FetchWindow()
+{
+    assert(m_box != NULL);
+    return m_box;
+}
+
+size_t
+gcLocusOwnRegion::GetRelevantId()
+// this is the one you want if you're recording which item is checked
+{
+    return m_locusId;
+}
+
+//------------------------------------------------------------------------------------
+
+gcLocusRename::gcLocusRename(size_t locusId)
+    :
+    m_locusId(locusId)
+{
+}
+
+gcLocusRename::~gcLocusRename()
+{
+}
+
+wxString
+gcLocusRename::FromDataStore(GCDataStore & dataStore)
+{
+    gcLocus & locusRef = dataStore.GetStructures().GetLocus(m_locusId);
+    return locusRef.GetName();
+}
+
+void
+gcLocusRename::ToDataStore(GCDataStore & dataStore, wxString newName)
+{
+    gcLocus & locusRef = dataStore.GetStructures().GetLocus(m_locusId);
+    dataStore.GetStructures().Rename(locusRef,newName);
+}
+
+//------------------------------------------------------------------------------------
+
+gcLocusMarkerCount::gcLocusMarkerCount(size_t locusId)
+    :
+    m_locusId(locusId)
+{
+}
+
+gcLocusMarkerCount::~gcLocusMarkerCount()
+{
+}
+
+wxString
+gcLocusMarkerCount::FromDataStore(GCDataStore & dataStore)
+{
+    gcLocus & locusRef = dataStore.GetStructures().GetLocus(m_locusId);
+    if(locusRef.HasNumMarkers())
+    {
+        return locusRef.GetNumMarkersString();
+    }
+    return "";
+}
+
+//------------------------------------------------------------------------------------
+
+gcLocusEditDialog::gcLocusEditDialog(   wxWindow *      parent,
+                                        GCDataStore &   dataStore,
+                                        size_t          locusId,
+                                        bool            forJustCreatedObj)
+    :
+    gcUpdatingDialog(   parent,
+                        dataStore,
+                        forJustCreatedObj
+                        ?
+                        gcstr::addLocus
+                        :
+                        wxString::Format(gcstr::editLocus,
+                                         dataStore.GetStructures().GetLocus(locusId).GetName().c_str()),
+                        forJustCreatedObj),
+    m_locusId(locusId)
+{
+}
+
+gcLocusEditDialog::~gcLocusEditDialog()
+{
+}
+
+void
+gcLocusEditDialog::DoDelete()
+{
+    gcLocus & locusRef = m_dataStore.GetStructures().GetLocus(m_locusId);
+    m_dataStore.GetStructures().RemoveLocus(locusRef);
+}
+
+//------------------------------------------------------------------------------------
+
+bool
+DoDialogEditLocus(wxWindow *      parentWindow,
+                  GCDataStore &   dataStore,
+                  size_t          locusId,
+                  bool            forJustCreatedObj)
+{
+    gcLocusEditDialog dialog(parentWindow,dataStore,locusId,forJustCreatedObj);
+
+    // build the dialog
+    gcDialogCreator creator;
+    wxBoxSizer * contentSizer = new wxBoxSizer(wxHORIZONTAL);
+    wxBoxSizer * leftSizer = new wxBoxSizer(wxVERTICAL);
+
+    /////////////////////////
+    // rename this locus
+    gcTextHelper * locusRenameHelp = new gcLocusRename(locusId);
+    gcUpdatingComponent * rename = new gcUpdatingTextCtrl(&dialog,
+                                                          forJustCreatedObj ? gcstr::locusNewName : gcstr::locusRename,
+                                                          locusRenameHelp);
+
+    /////////////////////////
+    // data type choices
+    std::vector<gcChoiceObject*> locusChoicesDT;
+    // EWFIX.P3.BUG.525  -- should be automated elsewhere
+    wxArrayString dataTypes = gcdata::specificDataTypeChoices();
+    for(size_t i=0; i < dataTypes.Count(); i++)
+    {
+        wxString dataTypeString = dataTypes[i];
+        gcSpecificDataType type = ProduceSpecificDataTypeOrBarf(dataTypeString);
+        if(type != sdatatype_NONE_SET)
+        {
+            locusChoicesDT.push_back(new gcLocusDataTypeChoice(locusId,type));
+        }
+    }
+    gcUpdatingComponent * dataType = new gcUpdatingChoose(&dialog,
+                                                          gcstr::dataType,
+                                                          locusChoicesDT);
+
+    /////////////////////////
+    // linked markers ?
+    std::vector<gcChoiceObject*> linkageChoices;
+    linkageChoices.push_back(new gcLocusLinkageChoice(locusId,true));
+    linkageChoices.push_back(new gcLocusLinkageChoice(locusId,false));
+    gcUpdatingComponent * linkage = new gcUpdatingChoose(&dialog,
+                                                         gcstr::linkageCaption,
+                                                         linkageChoices);
+
+    leftSizer->Add(rename,
+                   0,
+                   wxALL | wxALIGN_CENTER | wxEXPAND,
+                   gclayout::borderSizeSmall);
+    leftSizer->Add(dataType,
+                   0,
+                   wxALL | wxALIGN_CENTER | wxEXPAND,
+                   gclayout::borderSizeSmall);
+    leftSizer->Add(linkage,
+                   0,
+                   wxALL | wxALIGN_CENTER | wxEXPAND,
+                   gclayout::borderSizeSmall);
+
+    contentSizer->Add(leftSizer,
+                      1,
+                      wxALL | wxALIGN_CENTER | wxEXPAND,
+                      gclayout::borderSizeSmall);
+
+    creator.AddComponent(dialog,rename);
+    creator.AddComponent(dialog,dataType);
+    creator.AddComponent(dialog,linkage);
+
+    //////////////////////////////////////////////////////
+    wxBoxSizer * lMiddleSizer = new wxBoxSizer(wxVERTICAL);
+
+    /////////////////////////
+    // display number of markers
+    gcPlainTextHelper * locusMarkerCountHelp = new gcLocusMarkerCount(locusId);
+    gcUpdatingComponent * markerCount = new gcUpdatingPlainText(&dialog,
+                                                                gcstr::locusMarkerCount,
+                                                                locusMarkerCountHelp);
+
+    /////////////////////////
+    // display total length
+    gcTextHelper * locusLengthHelp = new gcLocusLength(locusId);
+    gcUpdatingComponent * totalLength = new gcUpdatingTextCtrl(&dialog,
+                                                               gcstr::locusLength,
+                                                               locusLengthHelp);
+
+    /////////////////////////
+    // display first position
+    gcTextHelper * firstPositionHelp = new gcLocusFirstPosition(locusId);
+    gcUpdatingComponent * firstPosition = new gcUpdatingTextCtrl(&dialog,
+                                                                 gcstr::firstPositionScanned,
+                                                                 firstPositionHelp);
+
+    /////////////////////////
+    // display locations
+    gcTextHelper * locationsHelp = new gcLocusLocations(locusId);
+    gcUpdatingComponent * locations = new gcUpdatingTextCtrl(&dialog,
+                                                             gcstr::locations,
+                                                             locationsHelp);
+
+    // order in sizer
+    lMiddleSizer->Add(markerCount,
+                      0,
+                      wxALL | wxALIGN_CENTER | wxEXPAND,
+                      gclayout::borderSizeSmall);
+    lMiddleSizer->Add(totalLength,
+                      0,
+                      wxALL | wxALIGN_CENTER | wxEXPAND,
+                      gclayout::borderSizeSmall);
+    lMiddleSizer->Add(firstPosition,
+                      0,
+                      wxALL | wxALIGN_CENTER | wxEXPAND,
+                      gclayout::borderSizeSmall);
+    lMiddleSizer->Add(locations,
+                      0,
+                      wxALL | wxALIGN_CENTER | wxEXPAND,
+                      gclayout::borderSizeSmall);
+
+    contentSizer->Add(new wxStaticLine(&dialog,-1,wxDefaultPosition,wxDefaultSize,wxLI_VERTICAL),
+                      0,
+                      wxALL | wxALIGN_CENTER | wxEXPAND,
+                      gclayout::borderSizeSmall);
+    contentSizer->Add(lMiddleSizer,
+                      1,
+                      wxALL | wxALIGN_CENTER | wxEXPAND,
+                      gclayout::borderSizeSmall);
+    creator.AddComponent(dialog,markerCount);
+    creator.AddComponent(dialog,totalLength);
+    creator.AddComponent(dialog,firstPosition);
+    creator.AddComponent(dialog,locations);
+
+    //////////////////////////////////////////////////////
+    wxBoxSizer * rMiddleSizer = new wxBoxSizer(wxVERTICAL);
+
+    std::vector<gcChoiceObject*> regionChoices;
+    gcDisplayOrder ids = dataStore.GetStructures().GetDisplayableRegionIds();
+    for(gcDisplayOrder::iterator iter=ids.begin(); iter != ids.end(); iter++)
+    {
+        size_t id = *iter;
+        bool isNewlyCreated = forJustCreatedObj
+            && (id == dataStore.GetStructures().GetLocus(locusId).GetRegionId());
+        gcLocusRegionChoice * choice = new gcLocusRegionChoice(locusId,id,isNewlyCreated);
+        regionChoices.push_back(choice);
+    }
+    regionChoices.push_back(new gcLocusOwnRegion(locusId,forJustCreatedObj));
+    gcUpdatingComponent * regions = new gcUpdatingChoose(   &dialog,
+                                                            gcstr::regionChoice,
+                                                            regionChoices);
+
+    gcTextHelper * locusPositionHelp = new gcLocusPosition(locusId);
+    gcUpdatingComponent * position = new gcUpdatingTextCtrl(&dialog,
+                                                            gcstr::locusDialogMapPosition,
+                                                            locusPositionHelp);
+    rMiddleSizer->Add(regions,
+                      1,
+                      wxALL | wxALIGN_CENTER | wxEXPAND,
+                      gclayout::borderSizeSmall);
+    rMiddleSizer->Add(position,
+                      0,
+                      wxALL | wxALIGN_CENTER | wxEXPAND,
+                      gclayout::borderSizeSmall);
+
+    contentSizer->Add(new wxStaticLine(&dialog,-1,wxDefaultPosition,wxDefaultSize,wxLI_VERTICAL),
+                      0,
+                      wxALL | wxALIGN_CENTER | wxEXPAND,
+                      gclayout::borderSizeSmall);
+    contentSizer->Add(rMiddleSizer,
+                      1,
+                      wxALL | wxALIGN_CENTER | wxEXPAND,
+                      gclayout::borderSizeSmall);
+    creator.AddComponent(dialog,regions);
+    creator.AddComponent(dialog,position);
+
+    //////////////////////////////////////////////////////
+    // merging loci only for existing loci
+    if(!forJustCreatedObj)
+    {
+        std::vector<gcChoiceObject*> locusChoices;
+        gcDisplayOrder ids = dataStore.GetStructures().GetDisplayableLocusIds();
+        for(gcDisplayOrder::iterator iter=ids.begin(); iter != ids.end(); iter++)
+        {
+            size_t id = *iter;
+            if(id != locusId)
+            {
+                gcLocusMergeChoice * choice = new gcLocusMergeChoice(id,locusId);
+                locusChoices.push_back(choice);
+            }
+        }
+        gcUpdatingComponent * right = new gcLocusMerge(&dialog,locusId,locusChoices);
+        contentSizer->Add(new wxStaticLine(&dialog,-1,wxDefaultPosition,wxDefaultSize,wxLI_VERTICAL),
+                          0,
+                          wxALL | wxALIGN_CENTER | wxEXPAND,
+                          gclayout::borderSizeSmall);
+        contentSizer->Add(right,
+                          1,
+                          wxALL | wxALIGN_CENTER | wxEXPAND,
+                          gclayout::borderSizeSmall);
+        creator.AddComponent(dialog,right);
+    }
+
+    creator.PlaceContent(dialog,contentSizer);
+
+    // invoke the dialog
+    return dialog.Go();
+}
+
+//------------------------------------------------------------------------------------
+
+bool
+gcActor_LocusAdd::OperateOn(wxWindow * parent, GCDataStore & dataStore)
+{
+    gcRegion & regionRef = dataStore.GetStructures().MakeRegion();
+    gcCreationInfo creationInfo = gcCreationInfo::MakeGuiCreationInfo();
+    gcLocus & locusRef = dataStore.GetStructures().MakeLocus(regionRef,"",true,creationInfo);
+    return DoDialogEditLocus(parent,dataStore,locusRef.GetId(),true);
+}
+
+//------------------------------------------------------------------------------------
+
+bool
+gcActor_LocusEdit::OperateOn(wxWindow * parent, GCDataStore & dataStore)
+{
+    return DoDialogEditLocus(parent,dataStore,m_locusId,false);
+}
+
+//____________________________________________________________________________________
diff --git a/src/guiconv/gc_locus_dialogs.h b/src/guiconv/gc_locus_dialogs.h
new file mode 100644
index 0000000..015f54f
--- /dev/null
+++ b/src/guiconv/gc_locus_dialogs.h
@@ -0,0 +1,288 @@
+// $Id: gc_locus_dialogs.h,v 1.18 2011/03/08 19:22:01 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_LOCUS_DIALOGS_H
+#define GC_LOCUS_DIALOGS_H
+
+#include "gc_quantum.h"
+#include "gc_dialog.h"
+#include "gc_types.h"
+#include "gc_validators.h"
+
+class GCDataStore;
+class gcLocus;
+class wxWindow;
+
+class gcLocusDataTypeChoice : public gcChoiceObject
+{
+  private:
+    gcLocusDataTypeChoice();        // undefined
+    size_t                          m_locusId;
+    gcSpecificDataType              m_type;
+    wxCheckBox *                    m_box;
+  protected:
+  public:
+    gcLocusDataTypeChoice(size_t forThisId, gcSpecificDataType type);
+    ~gcLocusDataTypeChoice();
+
+    void        UpdateDisplayInitial    (GCDataStore &) ;
+    void        UpdateDisplayInterim    (GCDataStore &) ;
+    void        UpdateDataInterim       (GCDataStore &) ;
+    void        UpdateDataFinal         (GCDataStore &) ;
+
+    wxWindow *  MakeWindow(wxWindow * parent)           ;
+    wxWindow *  FetchWindow()                           ;
+
+    size_t      GetRelevantId();
+};
+
+class gcLocusLength : public gcTextHelper
+{
+  private:
+    gcLocusLength();                // undefined
+    size_t                          m_locusId;
+    GCNonNegativeIntegerValidator   m_validator;
+  protected:
+  public:
+    gcLocusLength  (size_t locusId);
+    virtual ~gcLocusLength();
+
+    wxString                FromDataStore(GCDataStore &);
+    void                    ToDataStore(GCDataStore &, wxString text);
+    const wxValidator &     GetValidator();
+    wxString                InitialString();
+};
+
+class gcLocusFirstPosition : public gcTextHelper
+{
+  private:
+    gcLocusFirstPosition();              // undefined
+    size_t                          m_locusId;
+    GCIntegerValidator              m_validator;
+  protected:
+  public:
+    gcLocusFirstPosition  (size_t locusId);
+    virtual ~gcLocusFirstPosition();
+
+    wxString                FromDataStore(GCDataStore &);
+    void                    ToDataStore(GCDataStore &, wxString text);
+    const wxValidator &     GetValidator();
+    wxString                InitialString();
+};
+
+class gcLocusLocations : public gcTextHelper
+{
+  private:
+    gcLocusLocations();             // undefined
+    size_t                          m_locusId;
+    GCIntegerListValidator          m_validator;
+  protected:
+  public:
+    gcLocusLocations  (size_t locusId);
+    virtual ~gcLocusLocations();
+
+    wxString                FromDataStore(GCDataStore &);
+    void                    ToDataStore(GCDataStore &, wxString text);
+    const wxValidator &     GetValidator();
+    wxString                InitialString();
+};
+
+class gcLocusLinkageChoice : public gcChoiceObject
+{
+  private:
+    gcLocusLinkageChoice();         // undefined
+    size_t                          m_locusId;
+    bool                            m_linked;
+    wxCheckBox *                    m_box;
+  protected:
+  public:
+    gcLocusLinkageChoice(size_t locusId, bool linked);
+    ~gcLocusLinkageChoice();
+
+    void        UpdateDisplayInitial    (GCDataStore &) ;
+    void        UpdateDisplayInterim    (GCDataStore &) ;
+    void        UpdateDataInterim       (GCDataStore &) ;
+    void        UpdateDataFinal         (GCDataStore &) ;
+
+    wxWindow *  MakeWindow(wxWindow * parent)           ;
+    wxWindow *  FetchWindow()                           ;
+
+    size_t      GetRelevantId();
+};
+
+class gcLocusMergeChoice : public gcChoiceObject
+{
+  private:
+    gcLocusMergeChoice();        // undefined
+    size_t                      m_choiceLocusId;
+    size_t                      m_dialogLocusId;
+    wxCheckBox *                    m_box;
+  protected:
+  public:
+    gcLocusMergeChoice(size_t choiceLocusId, size_t dialogLocusId);
+    ~gcLocusMergeChoice();
+
+    void        UpdateDisplayInitial    (GCDataStore &) ;
+    void        UpdateDisplayInterim    (GCDataStore &) ;
+    void        UpdateDataInterim       (GCDataStore &) ;
+    void        UpdateDataFinal         (GCDataStore &) ;
+
+    wxWindow *  MakeWindow(wxWindow * parent)           ;
+    wxWindow *  FetchWindow()                           ;
+
+    size_t      GetRelevantId();
+};
+
+class gcLocusMerge : public gcUpdatingChooseMulti
+{
+  private:
+  protected:
+    size_t          m_locusId;
+  public:
+    gcLocusMerge(   wxWindow *                      parent,
+                    size_t                          locusId,
+                    std::vector<gcChoiceObject*>    choices);
+    virtual ~gcLocusMerge();
+
+    void    DoFinalForMulti(GCDataStore & dataStore, gcIdVec selectedChoices);
+    wxString    NoChoicesText() const;
+};
+
+class gcLocusPosition : public gcTextHelper
+{
+  private:
+    gcLocusPosition();              // undefined
+    size_t                          m_locusId;
+    GCIntegerValidator              m_validator;
+  protected:
+  public:
+    gcLocusPosition(size_t locusId);
+    virtual ~gcLocusPosition();
+
+    wxString                FromDataStore(GCDataStore &);
+    void                    ToDataStore(GCDataStore &, wxString text);
+    const wxValidator &     GetValidator();
+    wxString                InitialString();
+};
+
+class gcLocusRegionChoice : public gcChoiceObject
+{
+  private:
+    gcLocusRegionChoice();               // undefined
+    size_t                          m_locusId;
+    size_t                          m_regionId;
+    bool                            m_newlyCreated;
+    wxCheckBox *                    m_box;
+  protected:
+  public:
+    gcLocusRegionChoice(size_t locusId, size_t regionId, bool isNewlyCreatedParent=false);
+    ~gcLocusRegionChoice();
+
+    void        UpdateDisplayInitial    (GCDataStore &) ;
+    void        UpdateDisplayInterim    (GCDataStore &) ;
+    void        UpdateDataInterim       (GCDataStore &) ;
+    void        UpdateDataFinal         (GCDataStore &) ;
+
+    wxWindow *  MakeWindow(wxWindow * parent)           ;
+    wxWindow *  FetchWindow()                           ;
+
+    size_t      GetRelevantId();
+};
+
+class gcLocusOwnRegion : public gcChoiceObject
+{
+  private:
+    gcLocusOwnRegion();               // undefined
+    size_t                          m_locusId;
+    bool                            m_newlyCreated;
+    wxCheckBox *                    m_box;
+  protected:
+  public:
+    gcLocusOwnRegion(size_t locusId, bool isNewlyCreated=false);
+    ~gcLocusOwnRegion();
+
+    void        UpdateDisplayInitial    (GCDataStore &) ;
+    void        UpdateDisplayInterim    (GCDataStore &) ;
+    void        UpdateDataInterim       (GCDataStore &) ;
+    void        UpdateDataFinal         (GCDataStore &) ;
+
+    wxWindow *  MakeWindow(wxWindow * parent)           ;
+    wxWindow *  FetchWindow()                           ;
+
+    size_t      GetRelevantId();
+};
+
+class gcLocusRename : public gcTextHelper
+{
+  private:
+  protected:
+    size_t          m_locusId;
+  public:
+    gcLocusRename(size_t locusId);
+    ~gcLocusRename();
+
+    wxString    FromDataStore(GCDataStore &);
+    void        ToDataStore(GCDataStore &, wxString newText);
+};
+
+class gcLocusMarkerCount : public gcPlainTextHelper
+{
+  private:
+    gcLocusMarkerCount();             // undefined
+    size_t                          m_locusId;
+  protected:
+  public:
+    gcLocusMarkerCount(size_t locusId);
+    virtual ~gcLocusMarkerCount();
+
+    wxString FromDataStore(GCDataStore&);
+};
+
+class gcLocusEditDialog : public gcUpdatingDialog
+{
+  private:
+  protected:
+    size_t          m_locusId;
+    void DoDelete();
+  public:
+    gcLocusEditDialog(wxWindow *    parentWindow,
+                      GCDataStore &   dataStore,
+                      size_t          locusId,
+                      bool            forJustCreatedObj);
+    virtual ~gcLocusEditDialog();
+};
+
+bool DoDialogEditLocus( wxWindow *      parentWindow,
+                        GCDataStore &   dataStore,
+                        size_t          locusId,
+                        bool            forJustCreatedObj);
+
+class gcActor_LocusAdd : public gcEventActor
+{
+  public:
+    gcActor_LocusAdd() {};
+    virtual ~gcActor_LocusAdd() {};
+    virtual bool OperateOn(wxWindow * parent, GCDataStore & dataStore);
+};
+
+class gcActor_LocusEdit : public gcEventActor
+{
+  private:
+    gcActor_LocusEdit();     // undefined
+    size_t                  m_locusId;
+  public:
+    gcActor_LocusEdit(size_t locusId) : m_locusId(locusId) {};
+    virtual ~gcActor_LocusEdit() {};
+    virtual bool OperateOn(wxWindow * parent, GCDataStore & dataStore);
+};
+
+#endif  // GC_LOCUS_DIALOGS_H
+
+//____________________________________________________________________________________
diff --git a/src/guiconv/gc_logic.cpp b/src/guiconv/gc_logic.cpp
new file mode 100644
index 0000000..7198b3e
--- /dev/null
+++ b/src/guiconv/gc_logic.cpp
@@ -0,0 +1,131 @@
+// $Id: gc_logic.cpp,v 1.58 2012/06/30 01:32:41 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <vector>
+
+#include "gc_dialog.h"
+#include "gc_logic.h"
+#include "gc_strings.h"
+
+#include "wx/string.h"
+#include "wx/utils.h"
+
+GCLogic::GCLogic()
+    : m_displayParent(NULL)
+{
+}
+
+GCLogic::~GCLogic()
+{
+}
+
+void
+GCLogic::SetDisplayParent(wxWindow * win)
+{
+    m_displayParent = win;
+}
+
+void
+GCLogic::GCFatalBatchWarnGUI(wxString msg) const
+{
+    if(m_displayParent == NULL)
+    {
+        GCFatal(msg);
+    }
+    else
+    {
+        GCWarning(msg);
+    }
+}
+
+void
+GCLogic::GCError(wxString msg) const
+{
+    if(m_displayParent != NULL)
+    {
+        wxMessageDialog dialog(m_displayParent,msg,gcstr::error,wxOK|wxICON_ERROR);
+        dialog.ShowModal();
+    }
+    else
+    {
+        GCDataStore::GCError(msg);
+    }
+}
+
+void
+GCLogic::GCInfo(wxString msg) const
+{
+    if(m_displayParent != NULL)
+    {
+        wxMessageDialog dialog(m_displayParent,msg,gcstr::information,wxOK|wxICON_INFORMATION);
+        dialog.ShowModal();
+    }
+    else
+    {
+        GCDataStore::GCInfo(msg);
+    }
+}
+
+void
+GCLogic::GCWarning(wxString msg) const
+{
+    if(m_displayParent != NULL)
+    {
+        wxMessageDialog dialog(m_displayParent,msg,gcstr::warning,wxOK|wxICON_EXCLAMATION);
+        dialog.ShowModal();
+    }
+    else
+    {
+        GCDataStore::GCWarning(msg);
+    }
+}
+
+void
+GCLogic::batchFileRejectGuiLog(wxString msg, size_t lineNo) const
+{
+    warnLog(msg,lineNo);
+}
+
+bool
+GCLogic::guiQuestionBatchLog(wxString msg, wxString stopButton, wxString continueButton) const
+{
+    if(m_displayParent != NULL)
+    {
+        wxMessageDialog md(m_displayParent,
+                           msg,
+                           gcstr::questionHeader,
+                           wxYES_NO | wxNO_DEFAULT | wxICON_EXCLAMATION
+            );
+        int returnVal = md.ShowModal();
+        return(returnVal == wxID_YES);
+
+    }
+    else
+    {
+        GCDataStore::guiQuestionBatchLog(msg,stopButton,continueButton);
+    }
+    return true;
+}
+
+void
+GCLogic::GettingBusy(const wxString& msg) const
+{
+    GCDataStore::GettingBusy(msg);
+    wxBeginBusyCursor();
+}
+
+void
+GCLogic::LessBusy(const wxString& msg) const
+{
+    wxEndBusyCursor();
+    GCDataStore::LessBusy(msg);
+}
+
+//____________________________________________________________________________________
diff --git a/src/guiconv/gc_logic.h b/src/guiconv/gc_logic.h
new file mode 100644
index 0000000..8d354b7
--- /dev/null
+++ b/src/guiconv/gc_logic.h
@@ -0,0 +1,51 @@
+// $Id: gc_logic.h,v 1.43 2011/03/08 19:22:01 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_LOGIC_H
+#define GC_LOGIC_H
+
+#include <vector>
+#include "wx/wx.h"
+#include "gc_datastore.h"
+#include "gc_types.h"
+
+class GuiConverterApp;
+class wxWindow;
+
+class GCLogic   : public GCDataStore
+{
+    friend class GuiConverterApp;               // for SetDisplayParent()
+
+  private:
+    wxWindow        *   m_displayParent;    // for centering windows
+
+  protected:
+
+    void SetDisplayParent(wxWindow * win);
+
+  public:
+    GCLogic();
+    ~GCLogic();
+
+    void        GCFatalBatchWarnGUI(wxString msg) const;
+    void        GCError  (wxString msg) const;
+    void        GCInfo   (wxString msg) const;
+    void        GCWarning(wxString msg) const;
+
+    void    batchFileRejectGuiLog(wxString msg,size_t lineNo) const;
+    bool    guiQuestionBatchLog(wxString msg,wxString stopButton, wxString continueButton) const;
+
+    void    GettingBusy(const wxString& msg) const;
+    void    LessBusy(const wxString& msg) const;
+};
+
+#endif  // GC_LOGIC_H
+
+//____________________________________________________________________________________
diff --git a/src/guiconv/gc_matrix_display.cpp b/src/guiconv/gc_matrix_display.cpp
new file mode 100644
index 0000000..26d09e0
--- /dev/null
+++ b/src/guiconv/gc_matrix_display.cpp
@@ -0,0 +1,32 @@
+// $Id: gc_matrix_display.cpp,v 1.1 2011/12/01 22:32:42 jmcgill Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include "gc_matrix_display.h"
+#include "gc_layout.h"
+#include "wx/log.h"
+
+GCMatrixDisplaySizer::GCMatrixDisplaySizer()
+    : wxGridBagSizer(gclayout::borderSize,gclayout::borderSize)
+{
+}
+
+GCMatrixDisplaySizer::~GCMatrixDisplaySizer()
+{
+}
+
+void
+GCMatrixDisplaySizer::AddCell(wxWindow * matrix, size_t xpos, size_t ypos)
+{
+    Add(matrix,wxGBPosition(xpos, ypos),wxGBSpan(1,1),wxALL | wxEXPAND);
+    //wxLogMessage(wxT("xpos %i ypos %i"), xpos, ypos);
+}
+
+
+//____________________________________________________________________________________
diff --git a/src/guiconv/gc_matrix_display.h b/src/guiconv/gc_matrix_display.h
new file mode 100644
index 0000000..c852292
--- /dev/null
+++ b/src/guiconv/gc_matrix_display.h
@@ -0,0 +1,27 @@
+// $Id: gc_matrix_display.h,v 1.2 2012/06/30 01:32:41 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_MATRIX_DISPLAY_H
+#define GC_MATRIX_DISPLAY_H
+
+#include "wx/gbsizer.h"
+
+class GCMatrixDisplaySizer : public wxGridBagSizer
+{
+  public:
+    GCMatrixDisplaySizer();
+    virtual ~GCMatrixDisplaySizer();
+
+    void AddCell(wxWindow * matrix,size_t xpos, size_t ypos);
+};
+
+#endif  // GC_MATRIX_DISPLAY_H
+
+//____________________________________________________________________________________
diff --git a/src/guiconv/gc_menu_actors.cpp b/src/guiconv/gc_menu_actors.cpp
new file mode 100644
index 0000000..23d42d3
--- /dev/null
+++ b/src/guiconv/gc_menu_actors.cpp
@@ -0,0 +1,150 @@
+// $Id: gc_menu_actors.cpp,v 1.7 2011/03/08 19:22:01 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+
+#include "gc_datastore.h"
+#include "gc_event_ids.h"
+#include "gc_file_dialogs.h"
+#include "gc_locus_dialogs.h"
+#include "gc_menu_actors.h"
+#include "gc_population_dialogs.h"
+#include "gc_region_dialogs.h"
+#include "gc_strings.h"
+#include "wx/log.h"
+
+//------------------------------------------------------------------------------------
+
+basicMenuActor::basicMenuActor(int eventId)
+    :
+    m_eventId(eventId)
+{
+}
+
+basicMenuActor::~basicMenuActor()
+{
+}
+
+bool
+basicMenuActor::OperateOn(wxWindow * parent, GCDataStore & dataStore)
+{
+    wxLogDebug("saw menu for %d",m_eventId);
+    assert(false);
+    return true;
+}
+
+//------------------------------------------------------------------------------------
+
+bool
+gcActor_About::OperateOn(wxWindow * parent, GCDataStore & dataStore)
+{
+    dataStore.GCInfo(gcstr::converterInfo);
+    return false;       // does not change database
+}
+
+//------------------------------------------------------------------------------------
+
+bool
+gcActor_DebugDump::OperateOn(wxWindow * parent, GCDataStore & dataStore)
+{
+    dataStore.DebugDump();
+    return false;       // does not change database
+}
+
+//------------------------------------------------------------------------------------
+
+bool
+gcActor_ExportBatchFile::OperateOn(wxWindow * parent, GCDataStore & dataStore)
+{
+    DoDialogExportBatchFile(parent,dataStore);
+    return true;        // doesn't change database now, but will if we store
+                        // batch file name
+}
+
+//------------------------------------------------------------------------------------
+
+bool
+gcActor_ExportFile::OperateOn(wxWindow * parent, GCDataStore & dataStore)
+{
+    DoDialogExportFile(parent,dataStore);
+    return true;       // changes database -- outfile name
+}
+
+//------------------------------------------------------------------------------------
+
+bool
+gcActor_FileAdd::OperateOn(wxWindow * parent, GCDataStore & dataStore)
+{
+    return DoDialogAddFiles(parent,dataStore);
+}
+
+//------------------------------------------------------------------------------------
+
+bool
+gcActor_CmdFileRead::OperateOn(wxWindow * parent, GCDataStore & dataStore)
+{
+    return DoDialogReadCmdFile(parent,dataStore);
+}
+
+//------------------------------------------------------------------------------------
+
+bool
+gcActor_ToggleVerbose::OperateOn(wxWindow * parent, GCDataStore & dataStore)
+{
+    bool cur = wxLog::GetVerbose();
+    wxLog::SetVerbose(!cur);
+    return false;       // does not change database
+}
+
+//------------------------------------------------------------------------------------
+
+gcEventActor * MakeMenuActor(int eventId)
+{
+    gcEventActor * actor = NULL;
+    switch(eventId)
+    {
+        case wxID_ABOUT:
+            actor = new gcActor_About();
+            break;
+        case gcEvent_Batch_Export:
+            actor = new gcActor_ExportBatchFile();
+            break;
+        case gcEvent_CmdFile_Read:
+            actor = new gcActor_CmdFileRead();
+            break;
+        case gcEvent_Debug_Dump:
+            actor = new gcActor_DebugDump();
+            break;
+        case gcEvent_File_Add:
+            actor = new gcActor_FileAdd();
+            break;
+        case gcEvent_File_Export:
+            actor = new gcActor_ExportFile();
+            break;
+        case gcEvent_LinkG_Add:
+            actor = new gcActor_RegionAdd();
+            break;
+        case gcEvent_Locus_Add:
+            actor = new gcActor_LocusAdd();
+            break;
+        case gcEvent_Pop_Add:
+            actor = new gcActor_PopAdd();
+            break;
+        case gcEvent_ToggleVerbose:
+            actor = new gcActor_ToggleVerbose();
+            break;
+        default:
+            actor = new basicMenuActor(eventId);
+    };
+    assert(actor != NULL);
+    return actor;
+}
+
+//____________________________________________________________________________________
diff --git a/src/guiconv/gc_menu_actors.h b/src/guiconv/gc_menu_actors.h
new file mode 100644
index 0000000..914fe6f
--- /dev/null
+++ b/src/guiconv/gc_menu_actors.h
@@ -0,0 +1,96 @@
+// $Id: gc_menu_actors.h,v 1.7 2011/03/08 19:22:01 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_MENU_ACTORS_H
+#define GC_MENU_ACTORS_H
+
+#include "gc_quantum.h"
+
+gcEventActor * MakeMenuActor(int eventId);
+
+class basicMenuActor : public gcEventActor
+{
+  private:
+    basicMenuActor();   // undefined
+  protected:
+    int m_eventId;
+  public:
+    basicMenuActor(int eventId);
+    virtual ~basicMenuActor();
+    virtual bool OperateOn(wxWindow * parent, GCDataStore & dataStore);
+};
+
+class gcActor_About : public gcEventActor
+{
+  public:
+    gcActor_About() {};
+    virtual ~gcActor_About() {};
+    virtual bool OperateOn(wxWindow * parent, GCDataStore & dataStore);
+};
+
+class gcActor_DebugDump : public gcEventActor
+{
+  public:
+    gcActor_DebugDump() {};
+    virtual ~gcActor_DebugDump() {};
+    virtual bool OperateOn(wxWindow * parent, GCDataStore & dataStore);
+};
+
+class gcActor_ExportBatchFile : public gcEventActor
+{
+  public:
+    gcActor_ExportBatchFile() {};
+    virtual ~gcActor_ExportBatchFile() {};
+    virtual bool OperateOn(wxWindow * parent, GCDataStore & dataStore);
+};
+
+class gcActor_ExportFile : public gcEventActor
+{
+  public:
+    gcActor_ExportFile() {};
+    virtual ~gcActor_ExportFile() {};
+    virtual bool OperateOn(wxWindow * parent, GCDataStore & dataStore);
+};
+
+class gcActor_FileAdd : public gcEventActor
+{
+  public:
+    gcActor_FileAdd() {};
+    virtual ~gcActor_FileAdd() {};
+    virtual bool OperateOn(wxWindow * parent, GCDataStore & dataStore);
+};
+
+class gcActor_LinkGAdd : public gcEventActor
+{
+  public:
+    gcActor_LinkGAdd() {};
+    virtual ~gcActor_LinkGAdd() {};
+    virtual bool OperateOn(wxWindow * parent, GCDataStore & dataStore);
+};
+
+class gcActor_CmdFileRead : public gcEventActor
+{
+  public:
+    gcActor_CmdFileRead() {};
+    virtual ~gcActor_CmdFileRead() {};
+    virtual bool OperateOn(wxWindow * parent, GCDataStore & dataStore);
+};
+
+class gcActor_ToggleVerbose : public gcEventActor
+{
+  public:
+    gcActor_ToggleVerbose() {};
+    virtual ~gcActor_ToggleVerbose() {};
+    virtual bool OperateOn(wxWindow * parent, GCDataStore & dataStore);
+};
+
+#endif  // GC_MENU_ACTORS_H
+
+//____________________________________________________________________________________
diff --git a/src/guiconv/gc_migration_dialogs.cpp b/src/guiconv/gc_migration_dialogs.cpp
new file mode 100644
index 0000000..9b75d71
--- /dev/null
+++ b/src/guiconv/gc_migration_dialogs.cpp
@@ -0,0 +1,478 @@
+// $Id: gc_migration_dialogs.cpp,v 1.3 2012/06/30 01:32:41 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+
+#include "gc_datastore.h"
+#include "gc_data.h"
+#include "gc_dialog.h"
+#include "gc_errhandling.h"
+#include "gc_layout.h"
+#include "gc_migration_dialogs.h"
+#include "gc_strings.h"
+#include "gc_structures_err.h"
+
+#include "wx/checkbox.h"
+#include "wx/log.h"
+#include "wx/sizer.h"
+#include "wx/statbox.h"
+#include "wx/statline.h"
+#include "wx/textctrl.h"
+
+//------------------------------------------------------------------------------------
+
+gcMigrationRate::gcMigrationRate(size_t migrationId)
+    :
+    m_migrationId(migrationId)
+{
+}
+
+gcMigrationRate::~gcMigrationRate()
+{
+}
+
+wxString
+gcMigrationRate::FromDataStore(GCDataStore & dataStore)
+{
+    gcMigration & migrationRef = dataStore.GetStructures().GetMigration(m_migrationId);
+    return migrationRef.GetStartValueString();
+}
+
+void
+gcMigrationRate::ToDataStore(GCDataStore & dataStore, wxString newText)
+{
+    gcMigration & migrationRef = dataStore.GetStructures().GetMigration(m_migrationId);
+    double val;
+    if (newText.ToDouble(&val))
+    {
+        if (val < 0)
+        {
+            throw gc_rate_too_small_error();
+        }
+        else
+        {
+            migrationRef.SetStartValue(val);
+        }
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+gcMigrationMethodChoice::gcMigrationMethodChoice(size_t migrationId, migration_method  type)
+    :
+    m_migrationId(migrationId),
+    m_type(type),
+    m_box(NULL)
+{
+}
+
+gcMigrationMethodChoice::~gcMigrationMethodChoice()
+{
+}
+
+void
+gcMigrationMethodChoice::UpdateDisplayInitial(GCDataStore & dataStore)
+{
+    m_box->SetLabel(ToWxString(m_type));
+    UpdateDisplayInterim(dataStore);
+}
+
+void
+gcMigrationMethodChoice::UpdateDisplayInterim(GCDataStore & dataStore)
+{
+    // grab the migration
+    gcMigration& migRef = dataStore.GetStructures().GetMigration(m_migrationId);
+
+    // set selection
+    bool isThisOne = (migRef.GetMethod() == m_type );
+    m_box->SetValue( isThisOne ? 1 : 0 );
+
+    m_box->Enable();
+
+}
+
+void
+gcMigrationMethodChoice::UpdateDataInterim(GCDataStore & dataStore)
+{
+    if(m_box->GetValue() > 0 )
+    {
+        gcMigration& migRef = dataStore.GetStructures().GetMigration(m_migrationId);
+        migRef.SetMethod(m_type);
+    }
+}
+
+void
+gcMigrationMethodChoice::UpdateDataFinal(GCDataStore & dataStore)
+{
+    UpdateDataInterim(dataStore);
+}
+
+wxWindow *
+gcMigrationMethodChoice::MakeWindow(wxWindow * parent)
+{
+    m_box = new wxCheckBox(parent,-1,wxEmptyString);
+    return m_box;
+}
+
+wxWindow *
+gcMigrationMethodChoice::FetchWindow()
+{
+    assert(m_box != NULL);
+    return m_box;
+}
+
+size_t
+gcMigrationMethodChoice::GetRelevantId()
+{
+    return m_migrationId;
+}
+
+//------------------------------------------------------------------------------------
+
+gcMigrationProfileChoice::gcMigrationProfileChoice(size_t migrationId, migration_profile type)
+    :
+    m_migrationId(migrationId),
+    m_type(type),
+    m_box(NULL)
+{
+}
+
+gcMigrationProfileChoice::~gcMigrationProfileChoice()
+{
+}
+
+void
+gcMigrationProfileChoice::UpdateDisplayInitial(GCDataStore & dataStore)
+{
+    m_box->SetLabel(ToWxString(m_type));
+    UpdateDisplayInterim(dataStore);
+}
+
+void
+gcMigrationProfileChoice::UpdateDisplayInterim(GCDataStore & dataStore)
+{
+    // grab the migration
+    gcMigration& migRef = dataStore.GetStructures().GetMigration(m_migrationId);
+
+    // set selection
+    bool isThisOne = (migRef.GetProfile() == m_type );
+    m_box->SetValue( isThisOne ? 1 : 0 );
+
+    m_box->Enable();
+
+}
+
+void
+gcMigrationProfileChoice::UpdateDataInterim(GCDataStore & dataStore)
+{
+    if(m_box->GetValue() > 0 )
+    {
+        gcMigration& migRef = dataStore.GetStructures().GetMigration(m_migrationId);
+        migRef.SetProfile(m_type);
+    }
+}
+
+void
+gcMigrationProfileChoice::UpdateDataFinal(GCDataStore & dataStore)
+{
+    UpdateDataInterim(dataStore);
+}
+
+wxWindow *
+gcMigrationProfileChoice::MakeWindow(wxWindow * parent)
+{
+    m_box = new wxCheckBox(parent,-1,wxEmptyString);
+    return m_box;
+}
+
+wxWindow *
+gcMigrationProfileChoice::FetchWindow()
+{
+    assert(m_box != NULL);
+    return m_box;
+}
+
+size_t
+gcMigrationProfileChoice::GetRelevantId()
+{
+    return m_migrationId;
+}
+
+//------------------------------------------------------------------------------------
+
+gcMigrationConstraintChoice::gcMigrationConstraintChoice(size_t migrationId, migration_constraint type)
+    :
+    m_migrationId(migrationId),
+    m_type(type),
+    m_box(NULL)
+{
+}
+
+gcMigrationConstraintChoice::~gcMigrationConstraintChoice()
+{
+}
+
+void
+gcMigrationConstraintChoice::UpdateDisplayInitial(GCDataStore & dataStore)
+{
+    m_box->SetLabel(ToWxString(m_type));
+    UpdateDisplayInterim(dataStore);
+}
+
+void
+gcMigrationConstraintChoice::UpdateDisplayInterim(GCDataStore & dataStore)
+{
+    // grab the migration
+    gcMigration& migRef = dataStore.GetStructures().GetMigration(m_migrationId);
+
+    // set selection
+    bool isThisOne = (migRef.GetConstraint() == m_type );
+    m_box->SetValue( isThisOne ? 1 : 0 );
+    m_box->Enable();
+
+}
+
+void
+gcMigrationConstraintChoice::UpdateDataInterim(GCDataStore & dataStore)
+{
+    if(m_box->GetValue() > 0 )
+    {
+        gcMigration& migRef = dataStore.GetStructures().GetMigration(m_migrationId);
+        migRef.SetConstraint(m_type);
+        if (m_type == migconstraint_SYMMETRIC)
+        {
+            size_t toId = migRef.GetToId();
+            size_t fromId = migRef.GetFromId();
+            gcMigration& symMigRef = dataStore.GetStructures().GetMigration(toId, fromId);
+            symMigRef.SetConstraint(m_type);
+            symMigRef.SetStartValue(migRef.GetStartValue());
+        }
+        else
+        {
+            size_t toId = migRef.GetToId();
+            size_t fromId = migRef.GetFromId();
+            gcMigration& symMigRef = dataStore.GetStructures().GetMigration(toId, fromId);
+            if (symMigRef.GetConstraint() == migconstraint_SYMMETRIC)
+            {
+                symMigRef.SetConstraint(migconstraint_UNCONSTRAINED);
+            }
+        }
+    }
+}
+
+void
+gcMigrationConstraintChoice::UpdateDataFinal(GCDataStore & dataStore)
+{
+    UpdateDataInterim(dataStore);
+}
+
+wxWindow *
+gcMigrationConstraintChoice::MakeWindow(wxWindow * parent)
+{
+    m_box = new wxCheckBox(parent,-1,wxEmptyString);
+    return m_box;
+}
+
+wxWindow *
+gcMigrationConstraintChoice::FetchWindow()
+{
+    assert(m_box != NULL);
+    return m_box;
+}
+
+size_t
+gcMigrationConstraintChoice::GetRelevantId()
+{
+    return m_migrationId;
+}
+
+//------------------------------------------------------------------------------------
+
+gcMigrationEditDialog::gcMigrationEditDialog(   wxWindow *      parent,
+                                                GCDataStore &   dataStore,
+                                                size_t          migrationId,
+                                                wxString        fromName,
+                                                wxString        toName,
+                                                bool            forJustCreatedObj)
+    :
+    gcUpdatingDialog(   parent,
+                        dataStore,
+                        wxString::Format(gcstr::editMigration, fromName.c_str(), toName.c_str()),
+                        forJustCreatedObj),
+    m_migrationId(migrationId)
+{
+}
+
+gcMigrationEditDialog::~gcMigrationEditDialog()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+bool DoDialogEditMigration( wxWindow *      parent,
+                            GCDataStore &   dataStore,
+                            size_t          migrationId,
+                            bool            forJustCreatedObj)
+{
+    wxLogVerbose("DoDialogEditMigration: migrationID: %i pop? %i",(int)migrationId, dataStore.GetStructures().IsPop(migrationId));  // JMDBG
+
+    wxString fromName;
+    if (dataStore.GetStructures().IsPop(dataStore.GetStructures().GetMigration(migrationId).GetFromId()))
+    {
+        fromName = dataStore.GetStructures().GetPop(
+            dataStore.GetStructures().GetMigration(migrationId).GetFromId()).GetName();
+    }
+    else
+    {
+        fromName = dataStore.GetStructures().GetParent(
+            dataStore.GetStructures().GetMigration(migrationId).GetFromId()).GetName();
+    }
+
+    wxString toName;
+    if (dataStore.GetStructures().IsPop(dataStore.GetStructures().GetMigration(migrationId).GetToId()))
+    {
+        toName = dataStore.GetStructures().GetPop(
+            dataStore.GetStructures().GetMigration(migrationId).GetToId()).GetName();
+    }
+    else
+    {
+        toName = dataStore.GetStructures().GetParent(
+            dataStore.GetStructures().GetMigration(migrationId).GetToId()).GetName();
+    }
+
+    gcMigrationEditDialog dialog(parent,dataStore,migrationId, fromName, toName,forJustCreatedObj);
+
+
+    // migration rate
+    gcTextHelper * migrationRate  = new gcMigrationRate(migrationId);
+    gcUpdatingComponent * rate    = new gcUpdatingTextCtrl( &dialog,
+                                                            gcstr::migLabelRate,
+                                                            migrationRate);
+
+    // method choices
+    std::vector<gcChoiceObject*> methodChoicesDT;
+    wxArrayString migMethods = gcdata::migrationMethods();
+
+    for(size_t i=0; i < migMethods.Count(); i++)
+    {
+        bool usestr = true;
+        if(dataStore.GetStructures().GetDivergenceState())
+        {
+            // shut off FST option which will break divergence
+            if (migMethods[i].Contains("FST"))
+            {
+                usestr = false;
+            }
+        }
+        if (usestr)
+        {
+            wxString dataTypeString = migMethods[i];
+            migration_method type = ProduceMigMethodOrBarf(dataTypeString);
+            methodChoicesDT.push_back(new gcMigrationMethodChoice(migrationId,type));
+        }
+    }
+    gcUpdatingComponent * migMethod = new gcUpdatingChoose(&dialog,
+                                                           gcstr::migLabelMethod,
+                                                           methodChoicesDT);
+
+    // migration profile
+    std::vector<gcChoiceObject*> profileChoicesDT;
+    wxArrayString migProfiles = gcdata::migrationProfiles();
+
+    for(size_t i=0; i < migProfiles.Count(); i++)
+    {
+        wxString dataTypeString = migProfiles[i];
+        migration_profile type = ProduceMigProfileOrBarf(dataTypeString);
+        profileChoicesDT.push_back(new gcMigrationProfileChoice(migrationId,type));
+    }
+    gcUpdatingComponent * migProfile = new gcUpdatingChoose(&dialog,
+                                                            gcstr::migLabelProfile,
+                                                            profileChoicesDT);
+
+    // migration constraint
+    std::vector<gcChoiceObject*> constraintChoicesDT;
+    wxArrayString migConstraints = gcdata::migrationConstraints();
+
+    for(size_t i=0; i < migConstraints.Count(); i++)
+    {
+        wxString dataTypeString = migConstraints[i];
+        migration_constraint type = ProduceMigConstraintOrBarf(dataTypeString);
+        constraintChoicesDT.push_back(new gcMigrationConstraintChoice(migrationId,type));
+    }
+    gcUpdatingComponent * migConstraint = new gcUpdatingChoose(&dialog,
+                                                               gcstr::migLabelConstraint,
+                                                               constraintChoicesDT);
+
+
+    // build the dialog
+    gcDialogCreator creator;
+    wxBoxSizer * contentSizer   = new wxBoxSizer(wxHORIZONTAL);
+    wxBoxSizer * leftSizer      = new wxBoxSizer(wxVERTICAL);
+
+    // populate left sizer
+    leftSizer->Add(rate,
+                   0,
+                   wxALL | wxALIGN_CENTER | wxEXPAND,
+                   gclayout::borderSizeSmall);
+    leftSizer->Add(migConstraint,
+                   0,
+                   wxALL | wxALIGN_CENTER | wxEXPAND,
+                   gclayout::borderSizeSmall);
+
+
+    creator.AddComponent(dialog,rate);
+    creator.AddComponent(dialog,migConstraint);
+    contentSizer->Add(leftSizer,
+                      1,
+                      wxALL | wxALIGN_CENTER | wxEXPAND,
+                      gclayout::borderSizeSmall);
+
+
+    //////////////////////////////////////////////////////
+    contentSizer->Add(new wxStaticLine(&dialog,-1,wxDefaultPosition,wxDefaultSize,wxLI_VERTICAL),
+                      0,
+                      wxALL | wxALIGN_CENTER | wxEXPAND,
+                      gclayout::borderSizeSmall);
+
+    //////////////////////////////////////////////////////
+    wxBoxSizer * rightSizer = new wxBoxSizer(wxVERTICAL);
+
+    // populate right sizer
+    rightSizer->Add(migMethod,
+                    0,
+                    wxALL | wxALIGN_CENTER | wxEXPAND,
+                    gclayout::borderSizeSmall);
+    rightSizer->Add(migProfile,
+                    0,
+                    wxALL | wxALIGN_CENTER | wxEXPAND,
+                    gclayout::borderSizeSmall);
+
+
+    creator.AddComponent(dialog,migMethod);
+    creator.AddComponent(dialog,migProfile);
+
+    contentSizer->Add(rightSizer,
+                      1,
+                      wxALL | wxALIGN_CENTER | wxEXPAND,
+                      gclayout::borderSizeSmall);
+
+    creator.PlaceContent(dialog,contentSizer);
+    return dialog.Go();
+}
+//------------------------------------------------------------------------------------
+
+bool
+gcActor_MigrationEdit::OperateOn(wxWindow * parent, GCDataStore & dataStore)
+{
+    wxLogVerbose("gcActor_MigrationEdit::OperateOn m_migrationId: %i",(int)m_migrationId);  // JMDBG
+    return DoDialogEditMigration(parent,dataStore,m_migrationId,false);
+}
+
+//____________________________________________________________________________________
diff --git a/src/guiconv/gc_migration_dialogs.h b/src/guiconv/gc_migration_dialogs.h
new file mode 100644
index 0000000..0a523e7
--- /dev/null
+++ b/src/guiconv/gc_migration_dialogs.h
@@ -0,0 +1,154 @@
+// $Id: gc_migration_dialogs.h,v 1.2 2011/12/30 22:50:10 jmcgill Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_MIGRATION_DIALOGS_H
+#define GC_MIGRATION_DIALOGS_H
+
+#include "gc_dialog.h"
+#include "gc_quantum.h"
+#include "gc_validators.h"
+
+class GCDataStore;
+class wxWindow;
+
+//------------------------------------------------------------------------------------
+
+class gcMigrationRate : public gcTextHelper
+{
+  private:
+    gcMigrationRate();       // undefined
+    size_t                  m_migrationId;
+  protected:
+  public:
+    gcMigrationRate(size_t migrationId);
+    ~gcMigrationRate();
+
+    wxString    FromDataStore(GCDataStore &);
+    void        ToDataStore(GCDataStore &, wxString newText);
+};
+
+//------------------------------------------------------------------------------------
+
+class gcMigrationMethodChoice : public gcChoiceObject
+{
+  private:
+    gcMigrationMethodChoice();        // undefined
+    size_t                          m_migrationId;
+    migration_method                m_type;
+    wxCheckBox *                    m_box;
+  protected:
+  public:
+    gcMigrationMethodChoice(size_t forThisId, migration_method type);
+    ~gcMigrationMethodChoice();
+
+    void        UpdateDisplayInitial    (GCDataStore &) ;
+    void        UpdateDisplayInterim    (GCDataStore &) ;
+    void        UpdateDataInterim       (GCDataStore &) ;
+    void        UpdateDataFinal         (GCDataStore &) ;
+
+    wxWindow *  MakeWindow(wxWindow * parent)           ;
+    wxWindow *  FetchWindow()                           ;
+
+    size_t      GetRelevantId();
+};
+
+//------------------------------------------------------------------------------------
+
+class gcMigrationProfileChoice : public gcChoiceObject
+{
+  private:
+    gcMigrationProfileChoice();        // undefined
+    size_t                          m_migrationId;
+    migration_profile               m_type;
+    wxCheckBox *                    m_box;
+  protected:
+  public:
+    gcMigrationProfileChoice(size_t forThisId, migration_profile type);
+    ~gcMigrationProfileChoice();
+
+    void        UpdateDisplayInitial    (GCDataStore &) ;
+    void        UpdateDisplayInterim    (GCDataStore &) ;
+    void        UpdateDataInterim       (GCDataStore &) ;
+    void        UpdateDataFinal         (GCDataStore &) ;
+
+    wxWindow *  MakeWindow(wxWindow * parent)           ;
+    wxWindow *  FetchWindow()                           ;
+
+    size_t      GetRelevantId();
+};
+
+//------------------------------------------------------------------------------------
+
+class gcMigrationConstraintChoice : public gcChoiceObject
+{
+  private:
+    gcMigrationConstraintChoice();        // undefined
+    size_t                          m_migrationId;
+    migration_constraint            m_type;
+    wxCheckBox *                    m_box;
+  protected:
+  public:
+    gcMigrationConstraintChoice(size_t forThisId, migration_constraint type);
+    ~gcMigrationConstraintChoice();
+
+    void        UpdateDisplayInitial    (GCDataStore &) ;
+    void        UpdateDisplayInterim    (GCDataStore &) ;
+    void        UpdateDataInterim       (GCDataStore &) ;
+    void        UpdateDataFinal         (GCDataStore &) ;
+
+    wxWindow *  MakeWindow(wxWindow * parent)           ;
+    wxWindow *  FetchWindow()                           ;
+
+    size_t      GetRelevantId();
+};
+
+//------------------------------------------------------------------------------------
+
+class gcMigrationEditDialog : public gcUpdatingDialog
+{
+  private:
+  protected:
+    size_t          m_migrationId;
+    void    DoDelete() {};  // satisfy compiler, now does the same as cancel
+  public:
+    gcMigrationEditDialog(  wxWindow *      parentWindow,
+                            GCDataStore &   dataStore,
+                            size_t          migrationId,
+                            wxString        fromName,
+                            wxString        toName,
+                            bool            forJustCreatedObj);
+    virtual ~gcMigrationEditDialog();
+};
+
+//------------------------------------------------------------------------------------
+
+bool DoDialogEditMigration( wxWindow *      parentWindow,
+                            GCDataStore &   dataStore,
+                            size_t          migrationId,
+                            bool            forJustCreatedObj);
+
+
+
+//------------------------------------------------------------------------------------
+
+class gcActor_MigrationEdit : public gcEventActor
+{
+  private:
+    gcActor_MigrationEdit();       // undefined
+    size_t                      m_migrationId;
+
+  public:
+    gcActor_MigrationEdit(size_t migrationId) : m_migrationId(migrationId) {};
+    virtual ~gcActor_MigrationEdit() {};
+    virtual bool OperateOn(wxWindow * parent, GCDataStore & dataStore);
+};
+
+#endif  // GC_MIGRATION_DIALOGS_H
+//____________________________________________________________________________________
diff --git a/src/guiconv/gc_migtab.cpp b/src/guiconv/gc_migtab.cpp
new file mode 100644
index 0000000..52f84f0
--- /dev/null
+++ b/src/guiconv/gc_migtab.cpp
@@ -0,0 +1,455 @@
+// $Id: gc_migtab.cpp,v 1.7 2014/08/29 18:14:55 mkkuhner Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+/* NOTE:  8/29/2014 Mary edited gc_migtab.cpp to reverse the sense
+of "from" and "to" in the migration matrix, since it was producing
+XML files that were backwards to what LAMARC assumes.  She did NOT
+change variable names or anything else to match, so there are probably
+misleading variable names in this part of the converter and in the
+corresponding XML output routines.  Maintainers beware!  DEBUG */
+
+#include <cassert>
+#include <stdio.h>
+
+#include "gc_matrix_display.h"
+#include "gc_event_publisher.h"
+#include "gc_logic.h"
+#include "gc_migration_dialogs.h"
+#include "gc_migtab.h"
+#include "gc_strings.h"
+#include "gc_default.h"
+#include "gc_migration.h"
+#include "gc_population.h"
+
+#include "wx/log.h"
+
+//------------------------------------------------------------------------------------
+
+gcMigCell::gcMigCell(wxWindow * parent, GCStructures & st_var, const size_t cellId, matrix_cell_type cellType)
+    :   gcClickCell(parent,""),
+        m_cellId(gcdefault::badIndex),
+        m_cellType(cellType)
+{
+    wxString dispstr;
+    switch (cellType){
+        case matrixcelltype_EMPTY:
+            dispstr = "";
+            AddText(dispstr);
+            Disable();
+            break;
+
+        case matrixcelltype_INVALID:
+            dispstr = "invalid";
+            AddText(dispstr);
+            Disable();
+            break;
+
+        case matrixcelltype_CORNER:
+            dispstr = "From >";
+            AddText(dispstr);
+            dispstr = "";
+            AddText(dispstr);
+            dispstr = "To v";
+            AddText(dispstr);
+            Disable();
+            break;
+
+        case matrixcelltype_LABEL:
+            dispstr = st_var.GetPop(cellId).GetName();
+            AddText(dispstr);
+            Disable();
+            break;
+
+        case matrixcelltype_VALUE:
+            m_cellId = cellId;
+            AddText(wxString::Format(gcstr::migRate, st_var.GetMigration(m_cellId).GetStartValueString().c_str()));
+            AddText(wxString::Format(gcstr::migMethod, st_var.GetMigration(m_cellId).GetMethodString().c_str()));
+            AddText(wxString::Format(gcstr::migProfile, st_var.GetMigration(m_cellId).GetProfileAsString().c_str()));
+            AddText(wxString::Format(gcstr::migConstraint, st_var.GetMigration(m_cellId).GetConstraintString().c_str()));
+            break;
+
+        default:
+            dispstr = "undefined";
+    }
+    FinishSizing();
+}
+
+gcMigCell::~gcMigCell()
+{
+}
+
+void
+gcMigCell::ToDataStore(GCStructures & st_var, wxString newValue)
+{
+    //gcLocus & locusRef = dataStore.GetStructures().GetLocus(m_locusId);
+    //dataStore.GetStructures().Rename(locusRef,newName);
+}
+
+size_t
+gcMigCell::GetCellId()
+{
+    return m_cellId;
+}
+
+matrix_cell_type
+gcMigCell::GetCellType()
+{
+    return m_cellType;
+}
+
+void
+gcMigCell::NotifyLeftDClick()
+{
+    wxLogVerbose(" Migration cell %i pushed", (int)m_cellId);  // JMDBG
+    if (m_cellType == matrixcelltype_VALUE)
+    {
+        gcEventActor * migrationActor = new gcActor_MigrationEdit(m_cellId);
+        PublishScreenEvent(GetEventHandler(),migrationActor);
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+gcDivCell::gcDivCell(wxWindow * parent, GCStructures & st_var, const size_t cellId, bool isParent, matrix_cell_type cellType)
+    :   gcClickCell(parent,""),
+        m_cellId(gcdefault::badIndex),
+        m_cellType(cellType)
+{
+    wxString dispstr;
+    switch (cellType){
+        case matrixcelltype_EMPTY:
+            dispstr = "";
+            AddText(dispstr);
+            Disable();
+            break;
+
+        case matrixcelltype_INVALID:
+            dispstr = "invalid";
+            AddText(dispstr);
+            Disable();
+            break;
+
+        case matrixcelltype_CORNER:
+            dispstr = "From >";
+            AddText(dispstr);
+            dispstr = "";
+            AddText(dispstr);
+            dispstr = "To v";
+            AddText(dispstr);
+            Disable();
+            break;
+
+        case matrixcelltype_LABEL:
+            if (!isParent)
+            {
+                dispstr = st_var.GetPop(cellId).GetName();
+            }
+            else
+            {
+                dispstr = st_var.GetParent(cellId).GetName();
+            }
+            AddText(dispstr);
+            Disable();
+            break;
+
+        case matrixcelltype_VALUE:
+            m_cellId = cellId;
+            AddText(wxString::Format(gcstr::migRate, st_var.GetMigration(m_cellId).GetStartValueString().c_str()));
+            AddText(wxString::Format(gcstr::migMethod, st_var.GetMigration(m_cellId).GetMethodString().c_str()));
+            AddText(wxString::Format(gcstr::migProfile, st_var.GetMigration(m_cellId).GetProfileAsString().c_str()));
+            AddText(wxString::Format(gcstr::migConstraint, st_var.GetMigration(m_cellId).GetConstraintString().c_str()));
+            break;
+
+        default:
+            dispstr = "undefined";
+    }
+    FinishSizing();
+}
+
+gcDivCell::~gcDivCell()
+{
+}
+
+void
+gcDivCell::ToDataStore(GCStructures & st_var, wxString newValue)
+{
+    //gcLocus & locusRef = dataStore.GetStructures().GetLocus(m_locusId);
+    //dataStore.GetStructures().Rename(locusRef,newName);
+}
+
+size_t
+gcDivCell::GetCellId()
+{
+    return m_cellId;
+}
+
+matrix_cell_type
+gcDivCell::GetCellType()
+{
+    return m_cellType;
+}
+
+void
+gcDivCell::NotifyLeftDClick()
+{
+    wxLogVerbose(" Migration cell %i pushed", (int)m_cellId);  // JMDBG
+    if (m_cellType == matrixcelltype_VALUE)
+    {
+        gcEventActor * migrationActor = new gcActor_MigrationEdit(m_cellId);
+        PublishScreenEvent(GetEventHandler(),migrationActor);
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+gcMigTab::gcMigTab( wxWindow * parent, GCLogic & logic)
+    :
+    gcInfoPane(parent, logic, gcstr::migrationMatrix)
+    ,
+    m_parent(parent)
+{
+}
+
+gcMigTab::~gcMigTab()
+{
+}
+
+wxPanel *
+gcMigTab::MakeContent()
+{
+    wxPanel * newPanel = new wxPanel(   m_scrolled,
+                                        -1,
+                                        wxDefaultPosition,
+                                        wxDefaultSize,
+                                        wxTAB_TRAVERSAL);
+
+
+    GCMatrixDisplaySizer * mds = new GCMatrixDisplaySizer();
+    GCStructures & st_var = m_logic.GetStructures();
+
+    // convenience to make the code easier to read
+    gcDisplayOrder popids = st_var.GetDisplayablePopIds();
+    gcDisplayOrder parids = st_var.GetParentIds();
+    //objVector popsToDisplay    =  st_var.GetDisplayablePops();
+
+    if (st_var.GetDivergenceState() &&
+        (st_var.GetUnusedPopCount() + st_var.GetUnusedParentCount() < 2))
+    {
+        // make divergence migration matrix
+        st_var.MakeMigrationMatrix();
+
+        // Display Divergence Migration Matrix
+        int matrixDim = st_var.GetPopCount();
+        int ncell = 0; // for debug JRM
+
+        // first the corner square
+        mds->AddCell(new gcDivCell(newPanel, st_var, gcdefault::badIndex, false, matrixcelltype_CORNER),0,0);
+        ncell++;
+
+        // the left name column
+        size_t nrow = 0;
+        size_t ncol = 0;
+
+        // population names
+        for(gcDisplayOrder::iterator iter=popids.begin(); iter != popids.end(); iter++)
+        {
+            nrow++;
+            mds->AddCell(new gcDivCell(newPanel, st_var, *iter, false, matrixcelltype_LABEL), ncol, nrow);
+            ncell++;
+        }
+
+        // parent names
+        for(gcDisplayOrder::iterator iter=parids.begin(); iter != parids.end(); iter++)
+        {
+            nrow++;
+            mds->AddCell(new gcDivCell(newPanel, st_var, *iter, true, matrixcelltype_LABEL), ncol, nrow);
+            ncell++;
+        }
+
+        // now the rest of the matrix
+
+        // populatons columns
+        for(gcDisplayOrder::iterator iter=popids.begin(); iter != popids.end(); iter++)
+        {
+            ncol++;
+            nrow = 0;
+
+            // label
+            mds->AddCell(new gcDivCell(newPanel, st_var, *iter, false, matrixcelltype_LABEL), ncol, nrow);
+            ncell++;
+
+            // population data
+            for(gcDisplayOrder::iterator jter=popids.begin(); jter != popids.end(); jter++)
+            {
+                nrow++;
+                if (st_var.HasMigration(*iter,*jter))
+                {
+                    // hack to shut off FST if the user defined it because it will kill divergence
+                    if(st_var.GetMigration(*iter,*jter).GetMethod() == migmethod_FST)
+                    {
+                        st_var.GetMigration(*iter,*jter).SetMethod(migmethod_USER);
+                    }
+
+                    mds->AddCell(new gcDivCell(newPanel, st_var, st_var.GetMigration(*iter,*jter).GetId(), false, matrixcelltype_VALUE), ncol, nrow);
+                }
+                else
+                {
+                    mds->AddCell(new gcDivCell(newPanel, st_var,  gcdefault::badIndex, false, matrixcelltype_INVALID), ncol, nrow);
+                }
+                ncell++;
+            }
+
+            // parent data
+            for(gcDisplayOrder::iterator jter=parids.begin(); jter != parids.end(); jter++)
+            {
+                nrow++;
+                if (st_var.HasMigration(*iter,*jter))
+                {
+                    // hack to shut off FST if the user defined it because it will kill divergence
+                    // probably not necessary here, but better safe than dead
+                    if(st_var.GetMigration(*iter,*jter).GetMethod() == migmethod_FST)
+                    {
+                        st_var.GetMigration(*iter,*jter).SetMethod(migmethod_USER);
+                    }
+                    mds->AddCell(new gcDivCell(newPanel, st_var, st_var.GetMigration(*iter,*jter).GetId(), true, matrixcelltype_VALUE), ncol, nrow);
+                }
+                else
+                {
+                    mds->AddCell(new gcDivCell(newPanel, st_var,  gcdefault::badIndex, true, matrixcelltype_INVALID), ncol, nrow);
+                }
+                ncell++;
+            }
+
+        }
+
+        // parent columns
+        for(gcDisplayOrder::iterator iter=parids.begin(); iter != parids.end(); iter++)
+        {
+            ncol++;
+            nrow = 0;
+            // label
+            mds->AddCell(new gcDivCell(newPanel, st_var, *iter, true, matrixcelltype_LABEL), ncol, nrow);
+            ncell++;
+
+            // population data
+            for(gcDisplayOrder::iterator jter=popids.begin(); jter != popids.end(); jter++)
+            {
+                nrow++;
+                if (st_var.HasMigration(*iter,*jter))
+                {
+                    // hack to shut off FST if the user defined it because it will kill divergence
+                    if(st_var.GetMigration(*iter,*jter).GetMethod() == migmethod_FST)
+                    {
+                        st_var.GetMigration(*iter,*jter).SetMethod(migmethod_USER);
+                    }
+                    mds->AddCell(new gcDivCell(newPanel, st_var, st_var.GetMigration(*iter,*jter).GetId(), false, matrixcelltype_VALUE), ncol, nrow);
+                }
+                else
+                {
+                    mds->AddCell(new gcDivCell(newPanel, st_var,  gcdefault::badIndex, false, matrixcelltype_INVALID), ncol, nrow);
+                }
+                ncell++;
+            }
+
+            // parent data
+            for(gcDisplayOrder::iterator jter=parids.begin(); jter != parids.end(); jter++)
+            {
+                nrow++;
+                if (st_var.HasMigration(*iter,*jter))
+                {
+                    // hack to shut off FST if the user defined it because it will kill divergence
+                    // probably not necessary here, but better safe than dead
+                    if(st_var.GetMigration(*iter,*jter).GetMethod() == migmethod_FST)
+                    {
+                        st_var.GetMigration(*iter,*jter).SetMethod(migmethod_USER);
+                    }
+                    mds->AddCell(new gcDivCell(newPanel, st_var, st_var.GetMigration(*iter,*jter).GetId(), true, matrixcelltype_VALUE), ncol, nrow);
+                }
+                else
+                {
+                    mds->AddCell(new gcDivCell(newPanel, st_var,  gcdefault::badIndex, true, matrixcelltype_INVALID), ncol, nrow);
+                }
+                ncell++;
+            }
+
+        }
+
+        //        }
+        st_var.SetMigMatrixDefined(false);
+        st_var.SetDivMigMatrixDefined(true);
+    }
+    else
+    {
+        if (popids.size() > 0)
+        {
+            // make migration matrix
+            st_var.MakeMigrationMatrix();
+
+            // Display Migration Matrix
+            int matrixDim = st_var.GetPopCount();
+            int ncell = 0; // for debug JRM
+
+            // first the corner square
+            mds->AddCell(new gcMigCell(newPanel, st_var, gcdefault::badIndex, matrixcelltype_CORNER),0,0);
+            ncell++;
+
+            // the left name column
+            popids = st_var.GetDisplayablePopIds();
+            size_t nrow = 0;
+            size_t ncol = 0;
+            for(gcDisplayOrder::iterator iter=popids.begin(); iter != popids.end(); iter++)
+            {
+                nrow++;
+                mds->AddCell(new gcMigCell(newPanel, st_var, *iter, matrixcelltype_LABEL), ncol, nrow);
+                ncell++;
+            }
+
+            // now the rest of the matrix
+            for(gcDisplayOrder::iterator iter=popids.begin(); iter != popids.end(); iter++)
+            {
+                ncol++;
+                nrow = 0;
+
+                // first the label
+                mds->AddCell(new gcMigCell(newPanel, st_var, *iter, matrixcelltype_LABEL), ncol, nrow);
+                ncell++;
+
+                // now the data
+                for(gcDisplayOrder::iterator jter=popids.begin(); jter != popids.end(); jter++)
+                {
+                    nrow++;
+                    if (*iter == *jter)
+                    {
+                        mds->AddCell(new gcMigCell(newPanel, st_var,  gcdefault::badIndex, matrixcelltype_INVALID), ncol, nrow);
+                        ncell++;
+                    }
+                    else
+                    {
+                        mds->AddCell(new gcMigCell(newPanel, st_var, st_var.GetMigration(*iter,*jter).GetId(), matrixcelltype_VALUE), ncol, nrow);
+                        ncell++;
+                    }
+                }
+            }
+        }
+        st_var.SetMigMatrixDefined(true);
+        st_var.SetDivMigMatrixDefined(false);
+    }
+    newPanel->SetSizerAndFit(mds);
+    return newPanel;
+
+}
+
+wxString
+gcMigTab::MakeLabel()
+{
+    return m_panelLabelFmt;
+}
+
+//____________________________________________________________________________________
diff --git a/src/guiconv/gc_migtab.h b/src/guiconv/gc_migtab.h
new file mode 100644
index 0000000..ca4c8c6
--- /dev/null
+++ b/src/guiconv/gc_migtab.h
@@ -0,0 +1,84 @@
+// $Id: gc_migtab.h,v 1.3 2014/08/29 18:14:55 mkkuhner Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+/* NOTE:  8/29/2014 Mary edited gc_migtab.cpp to reverse the sense
+of "from" and "to" in the migration matrix, since it was producing
+XML files that were backwards to what LAMARC assumes.  She did NOT
+change variable names or anything else to match, so there are probably
+misleading variable names in this part of the converter and in the
+corresponding XML output routines.  Maintainers beware!  DEBUG */
+
+#ifndef GC_MIGTAB_H
+#define GC_MIGTAB_H
+
+#include "gc_clickpanel.h"
+#include "gc_gridpanel.h"
+
+class wxWindow;
+
+
+class gcMigCell : public gcClickCell
+{
+  private:
+    gcMigCell();            // undefined
+    size_t                  m_cellId;
+    matrix_cell_type        m_cellType;
+
+  protected:
+  public:
+    gcMigCell(wxWindow * parent, GCStructures & st_var, const size_t cellId, matrix_cell_type cellType);
+    ~gcMigCell();
+
+    void        ToDataStore(GCStructures & st_var, wxString newText);
+
+    size_t       GetCellId();
+    matrix_cell_type   GetCellType();
+
+    void    NotifyLeftDClick();
+};
+
+class gcDivCell : public gcClickCell
+{
+  private:
+    gcDivCell();            // undefined
+    size_t                  m_cellId;
+    matrix_cell_type        m_cellType;
+
+  protected:
+  public:
+    gcDivCell(wxWindow * parent, GCStructures & st_var, const size_t cellId, bool isParent, matrix_cell_type cellType);
+    ~gcDivCell();
+
+    void        ToDataStore(GCStructures & st_var, wxString newText);
+
+    size_t       GetCellId();
+    matrix_cell_type   GetCellType();
+
+    void    NotifyLeftDClick();
+};
+
+class gcMigTab : public gcInfoPane
+{
+  private:
+    gcMigTab();        // undefined
+    wxWindow *         m_parent;
+
+  protected:
+    wxPanel *   MakeContent();
+    wxString    MakeLabel();
+
+  public:
+    gcMigTab(wxWindow * parent, GCLogic & logic);
+    virtual ~gcMigTab();
+};
+
+#endif  // GC_MIGTAB_H
+
+//____________________________________________________________________________________
diff --git a/src/guiconv/gc_panel_dialogs.cpp b/src/guiconv/gc_panel_dialogs.cpp
new file mode 100644
index 0000000..706322d
--- /dev/null
+++ b/src/guiconv/gc_panel_dialogs.cpp
@@ -0,0 +1,176 @@
+// $Id: gc_panel_dialogs.cpp,v 1.2 2012/06/30 01:32:41 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+
+#include "gc_datastore.h"
+#include "gc_dialog.h"
+#include "gc_errhandling.h"
+#include "gc_layout.h"
+#include "gc_panel_dialogs.h"
+#include "gc_strings.h"
+
+#include "wx/checkbox.h"
+#include "wx/log.h"
+#include "wx/sizer.h"
+#include "wx/statbox.h"
+#include "wx/statline.h"
+#include "wx/textctrl.h"
+
+//------------------------------------------------------------------------------------
+
+gcPanelRename::gcPanelRename(size_t panelId)
+    :
+    m_panelId(panelId)
+{
+}
+
+gcPanelRename::~gcPanelRename()
+{
+}
+
+wxString
+gcPanelRename::FromDataStore(GCDataStore & dataStore)
+{
+    gcPanel & panelRef = dataStore.GetStructures().GetPanel(m_panelId);
+    return panelRef.GetName();
+}
+
+void
+gcPanelRename::ToDataStore(GCDataStore & dataStore, wxString newText)
+{
+    gcPanel & panelRef = dataStore.GetStructures().GetPanel(m_panelId);
+    dataStore.GetStructures().Rename(panelRef,newText);
+}
+
+//------------------------------------------------------------------------------------
+
+gcPanelMemberCount::gcPanelMemberCount(size_t panelId)
+    :
+    m_panelId(panelId)
+{
+}
+
+gcPanelMemberCount::~gcPanelMemberCount()
+{
+}
+
+wxString
+gcPanelMemberCount::FromDataStore(GCDataStore & dataStore)
+{
+    gcPanel & panelRef = dataStore.GetStructures().GetPanel(m_panelId);
+    return wxString::Format("%i",(int)panelRef.GetNumPanels());
+}
+
+void
+gcPanelMemberCount::ToDataStore(GCDataStore & dataStore, wxString newText)
+{
+    long longVal;
+    bool gotLong = newText.ToLong(&longVal);
+    if(gotLong)
+    {
+        gcPanel & panelRef = dataStore.GetStructures().GetPanel(m_panelId);
+        panelRef.SetNumPanels(longVal);
+        panelRef.SetBlessed(true);
+    }
+}
+
+const wxValidator &
+gcPanelMemberCount::GetValidator()
+{
+    return m_validator;
+}
+
+wxString
+gcPanelMemberCount::InitialString()
+{
+    return wxString::Format("0");
+}
+
+//------------------------------------------------------------------------------------
+
+gcPanelEditDialog::gcPanelEditDialog( wxWindow *      parent,
+                                      GCDataStore &   dataStore,
+                                      size_t          panelId,
+                                      bool            forJustCreatedObj)
+    :
+    gcUpdatingDialog(   parent,
+                        dataStore,
+                        forJustCreatedObj
+                        ?
+                        gcstr::addPanel
+                        :
+                        wxString::Format(gcstr::editPanel,
+                                         dataStore.GetStructures().GetPanel(panelId).GetName().c_str()),
+                        forJustCreatedObj),
+    m_panelId(panelId)
+{
+}
+
+gcPanelEditDialog::~gcPanelEditDialog()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+bool DoDialogEditPanel(wxWindow *      parent,
+                       GCDataStore &   dataStore,
+                       size_t          panelId,
+                       bool            forJustCreatedObj)
+{
+    wxLogVerbose("DoDialogEditPanel: %i",(int)panelId);  // JMDBG
+    gcPanelEditDialog dialog(parent,dataStore,panelId,forJustCreatedObj);
+
+    gcDialogCreator creator;
+    wxBoxSizer * contentSizer   = new wxBoxSizer(wxHORIZONTAL);
+    wxBoxSizer * leftSizer      = new wxBoxSizer(wxVERTICAL);
+
+    // for renaming
+    gcTextHelper * panelRenameHelp  = new gcPanelRename(panelId);
+    gcUpdatingComponent * rename    = new gcUpdatingTextCtrl(&dialog,
+                                                             gcstr::panelRename,
+                                                             panelRenameHelp);
+
+    // change count
+    gcTextHelper * memberCountHelp = new gcPanelMemberCount(panelId);
+    gcUpdatingComponent * memberCount    = new gcUpdatingTextCtrl(&dialog,
+                                                                  gcstr::panelMemberCount,
+                                                                  memberCountHelp);
+
+    leftSizer->Add(rename,
+                   0,
+                   wxALL | wxALIGN_CENTER | wxEXPAND,
+                   gclayout::borderSizeSmall);
+
+    leftSizer->Add(memberCount,
+                   0,
+                   wxALL | wxALIGN_CENTER | wxEXPAND,
+                   gclayout::borderSizeSmall);
+
+    creator.AddComponent(dialog,rename);
+    creator.AddComponent(dialog,memberCount);
+    contentSizer->Add(leftSizer,
+                      1,
+                      wxALL | wxALIGN_CENTER | wxEXPAND,
+                      gclayout::borderSizeSmall);
+
+    creator.PlaceContent(dialog,contentSizer);
+    return dialog.Go();
+}
+//------------------------------------------------------------------------------------
+
+bool
+gcActor_PanelEdit::OperateOn(wxWindow * parent, GCDataStore & dataStore)
+{
+    wxLogVerbose("gcActor_PanelEdit::OperateOn m_panelId: %i",(int)m_panelId);  // JMDBG
+    return DoDialogEditPanel(parent,dataStore,m_panelId,false);
+}
+
+//____________________________________________________________________________________
diff --git a/src/guiconv/gc_panel_dialogs.h b/src/guiconv/gc_panel_dialogs.h
new file mode 100644
index 0000000..cdf7fec
--- /dev/null
+++ b/src/guiconv/gc_panel_dialogs.h
@@ -0,0 +1,94 @@
+// $Id: gc_panel_dialogs.h,v 1.2 2012/06/30 01:32:41 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_PANEL_DIALOGS_H
+#define GC_PANEL_DIALOGS_H
+
+#include "gc_dialog.h"
+#include "gc_quantum.h"
+#include "gc_validators.h"
+
+class GCDataStore;
+class wxWindow;
+
+//------------------------------------------------------------------------------------
+
+class gcPanelRename : public gcTextHelper
+{
+  private:
+    gcPanelRename();       // undefined
+    size_t                  m_panelId;
+  protected:
+  public:
+    gcPanelRename(size_t panelId);
+    ~gcPanelRename();
+
+    wxString    FromDataStore(GCDataStore &);
+    void        ToDataStore(GCDataStore &, wxString newText);
+};
+
+//------------------------------------------------------------------------------------
+
+class gcPanelMemberCount : public gcTextHelper
+{
+  private:
+    gcPanelMemberCount();       // undefined
+    size_t                      m_panelId;
+    GCPositiveFloatValidator    m_validator;
+  protected:
+  public:
+    gcPanelMemberCount(size_t panelId);
+    ~gcPanelMemberCount();
+
+    wxString            FromDataStore(GCDataStore &);
+    void                ToDataStore(GCDataStore &, wxString newText);
+    const wxValidator & GetValidator();
+    wxString            InitialString();
+};
+//------------------------------------------------------------------------------------
+
+class gcPanelEditDialog : public gcUpdatingDialog
+{
+  private:
+  protected:
+    size_t          m_panelId;
+  public:
+    gcPanelEditDialog( wxWindow *      parentWindow,
+                       GCDataStore &   dataStore,
+                       size_t          panelId,
+                       bool            forJustCreatedObj);
+    virtual ~gcPanelEditDialog();
+};
+
+//------------------------------------------------------------------------------------
+
+bool DoDialogEditPanel(wxWindow *      parentWindow,
+                       GCDataStore &   dataStore,
+                       size_t          regionId,
+                       bool            forJustCreatedObj);
+
+
+
+//------------------------------------------------------------------------------------
+
+class gcActor_PanelEdit : public gcEventActor
+{
+  private:
+    gcActor_PanelEdit();       // undefined
+    size_t                      m_panelId;
+
+  public:
+    gcActor_PanelEdit(size_t panelId) : m_panelId(panelId) {};
+    virtual ~gcActor_PanelEdit() {};
+    virtual bool OperateOn(wxWindow * parent, GCDataStore & dataStore);
+};
+
+#endif  // GC_PANEL_DIALOGS_H
+//____________________________________________________________________________________
diff --git a/src/guiconv/gc_parent_dialogs.cpp b/src/guiconv/gc_parent_dialogs.cpp
new file mode 100644
index 0000000..823aef9
--- /dev/null
+++ b/src/guiconv/gc_parent_dialogs.cpp
@@ -0,0 +1,618 @@
+// $Id: gc_parent_dialogs.cpp,v 1.3 2012/06/30 01:32:41 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+
+#include "gc_datastore.h"
+#include "gc_dialog.h"
+#include "gc_errhandling.h"
+#include "gc_layout.h"
+#include "gc_parent_dialogs.h"
+#include "gc_strings.h"
+#include "gc_structures_err.h"
+#include "gc_event_ids.h"
+#include "gc_event_publisher.h"
+
+#include "wx/checkbox.h"
+#include "wx/log.h"
+#include "wx/sizer.h"
+#include "wx/statbox.h"
+#include "wx/statline.h"
+#include "wx/textctrl.h"
+
+//------------------------------------------------------------------------------------
+
+gcUpdatingChooseTwo::gcUpdatingChooseTwo(
+    wxWindow *                      parent,
+    wxString                        instructions,
+    std::vector<gcChoiceObject*>    choices)
+    :
+    gcUpdatingChooseMulti(parent,instructions,choices)
+{
+}
+
+gcUpdatingChooseTwo::~gcUpdatingChooseTwo()
+{
+}
+
+void
+gcUpdatingChooseTwo::BuildDisplay(GCDataStore & dataStore)
+{
+    gcUpdatingChoose::BuildDisplay(dataStore);
+
+    wxBoxSizer * buttonSizer = new wxBoxSizer(wxHORIZONTAL);
+    wxBoxSizer * leftSizer      = new wxBoxSizer(wxVERTICAL);
+    buttonSizer->AddStretchSpacer(1);
+    wxLogVerbose("gcUpdatingChooseTwo::BuildDisplay m_choices.size(): %i", (int)m_choices.size());  // JMDBG
+    if (m_choices.size() > 2)
+    {
+        buttonSizer->Add(new wxButton(this,GC_UnselectAll,gcstr::buttonUnselectAll),
+                         0,
+                         wxALL | wxALIGN_CENTER ,
+                         gclayout::borderSizeSmall);
+    }
+    else
+    {
+        for(size_t index=0; index < m_choices.size(); index++)
+        {
+            gcChoiceObject * choice = m_choices[index];
+            wxWindow * window = choice->FetchWindow();
+            wxCheckBox * cb = dynamic_cast<wxCheckBox*>(window);
+            if(cb->IsEnabled())
+            {
+                cb->SetValue(1);
+            }
+        }
+    }
+
+    m_statBoxSizer->AddStretchSpacer(1);
+    m_statBoxSizer->Add(buttonSizer,
+                        0,
+                        wxALL | wxALIGN_LEFT | wxALIGN_TOP | wxEXPAND ,
+                        gclayout::borderSizeSmall);
+}
+
+void
+gcUpdatingChooseTwo::OnCheck(wxCommandEvent & event)
+{
+    wxLogVerbose("gcUpdatingChooseTwo::OnCheck called");  // JMDBG
+    wxObject * obj = event.GetEventObject();
+    int npicked = 0;
+    for(size_t index=0; index < m_choices.size(); index++)
+    {
+        gcChoiceObject * choice = m_choices[index];
+        wxWindow * window = choice->FetchWindow();
+        wxCheckBox * cb = dynamic_cast<wxCheckBox*>(window);
+        assert(cb != NULL);
+        if(cb->GetValue() > 0)
+        {
+            npicked++;
+        }
+    }
+
+    if(event.IsChecked())
+    {
+        if (npicked >= 2)
+        {
+            for(size_t index=0; index < m_choices.size(); index++)
+            {
+                gcChoiceObject * choice = m_choices[index];
+                wxWindow * window = choice->FetchWindow();
+                wxCheckBox * cb = dynamic_cast<wxCheckBox*>(window);
+                assert(cb != NULL);
+                if(cb->GetValue() <= 0)
+                {
+                    cb->Enable(false);
+                }
+            }
+        }
+    }
+    else
+    {
+        if (npicked < 2)
+        {
+            for(size_t index=0; index < m_choices.size(); index++)
+            {
+                gcChoiceObject * choice = m_choices[index];
+                wxWindow * window = choice->FetchWindow();
+                wxCheckBox * cb = dynamic_cast<wxCheckBox*>(window);
+                assert(cb != NULL);
+                cb->Enable(true);
+            }
+        }
+    }
+}
+
+void
+gcUpdatingChooseTwo::UpdateDataFinal(GCDataStore & dataStore)
+{
+    gcUpdatingChoose::UpdateDataFinal(dataStore);
+
+    gcIdVec checkedIds;
+    for(size_t index=0; index < m_choices.size(); index++)
+    {
+        gcChoiceObject * choice = m_choices[index];
+        wxWindow * window = choice->FetchWindow();
+        wxCheckBox * cb = dynamic_cast<wxCheckBox*>(window);
+        assert(cb != NULL);
+        if(cb->GetValue() > 0)
+        {
+            checkedIds.push_back(choice->GetRelevantId());
+        }
+    }
+
+    wxLogVerbose("****gcUpdatingChooseTwo::DoDataFinal checkedIds.size(): %i", (int)checkedIds.size());  // JMDBG
+    if(checkedIds.size() != 2)
+    {
+        throw gc_wrong_divergence_error();
+    }
+    else
+    {
+        DoFinalForMulti(dataStore, checkedIds);
+    }
+}
+
+void
+gcUpdatingChooseTwo::OnUnselectAll(wxCommandEvent & event)
+{
+    wxLogVerbose("gcUpdatingChooseTwo::OnUnselectAll m_choices.size(): %i", (int)m_choices.size());  // JMDBG
+    for(size_t index=0; index < m_choices.size(); index++)
+    {
+        gcChoiceObject * ckbox = m_choices[index];
+        wxWindow * window = ckbox->FetchWindow();
+        wxCheckBox * cb = dynamic_cast<wxCheckBox*>(window);
+        cb->SetValue(0);
+        cb->Enable(true);
+    }
+}
+
+void
+gcUpdatingChooseTwo::DoFinalForMulti(GCDataStore & dataStore, gcIdVec checkedIds)
+{
+    wxLogVerbose("****gcUpdatingChooseTwo::DoFinalForMulti parentId: %i", (int)m_childParentId);  // JMDBG
+    gcParent & parent = dataStore.GetStructures().GetParent(m_childParentId);
+
+    // find how many populations already have display order set
+    int nPopsHaveOrder = 0;
+    gcDisplayOrder popids = dataStore.GetStructures().GetDisplayablePopIds();
+    for(gcDisplayOrder::iterator iter=popids.begin(); iter != popids.end(); iter++)
+    {
+        size_t id = *iter;
+        if (dataStore.GetStructures().GetPop(*iter).HasDispOrder())
+        {
+            nPopsHaveOrder++;
+        }
+    }
+
+    // set child 1 parent
+    gcIdVec::iterator iter = checkedIds.begin();
+    size_t child1Id = *iter;
+    //wxLogVerbose("    Child1Id: %i", (int)child1Id);  // JMDBG
+    parent.SetChild1Id(child1Id);
+    int nPopsFound = 0;
+    if (dataStore.GetStructures().IsPop(child1Id))
+    {
+        nPopsFound++;
+        dataStore.GetStructures().GetPop(child1Id).SetParentId(m_childParentId);
+        nPopsHaveOrder++;
+        dataStore.GetStructures().GetPop(child1Id).SetDispOrder(nPopsHaveOrder);
+    }
+
+    // set child 2 parent
+    iter++;
+    size_t child2Id = *iter;
+    //wxLogVerbose("    Child2Id: %i", (int)child2Id);  // JMDBG
+    parent.SetChild2Id(child2Id);
+    if (dataStore.GetStructures().IsPop(child2Id))
+    {
+        nPopsFound++;
+        dataStore.GetStructures().GetPop(child2Id).SetParentId(m_childParentId);
+        nPopsHaveOrder++;
+        dataStore.GetStructures().GetPop(child2Id).SetDispOrder(nPopsHaveOrder);
+    }
+
+    // some of children are parent levels, so recurse down the tree
+    if (nPopsFound < 2)
+    {
+        //wxLogVerbose("Call ParentLevelUp");
+        ParentLevelUp(dataStore, m_childParentId);
+    }
+
+    wxLogVerbose("****gcUpdatingChooseTwo::DoFinalForMulti parent: %s", parent.GetName().c_str());  // JMDBG
+    wxLogVerbose("    child1: %s child2: %s parentId: %i level: %i", parent.GetChild1IdString().c_str(), parent.GetChild2IdString().c_str(), (int)parent.GetParentId(), parent.GetDispLevel());  // JMDBG
+
+    int nunused = dataStore.GetStructures().GetUnusedPopCount() + dataStore.GetStructures().GetUnusedParentCount();
+    //wxLogVerbose("*==*gcUpdatingChooseTwo::DoFinalForMulti nunused = %i", nunused);  // JMDBG
+}
+
+void
+gcUpdatingChooseTwo::ParentLevelUp(GCDataStore & dataStore, size_t parentId)
+{
+    if (!dataStore.GetStructures().IsPop(dataStore.GetStructures().GetParent(parentId).GetChild1Id()))
+    {
+        size_t child1Id = dataStore.GetStructures().GetParent(parentId).GetChild1Id();
+        dataStore.GetStructures().GetParent(child1Id).SetParentId(parentId);
+        dataStore.GetStructures().GetParent(child1Id).SetDispLevel( dataStore.GetStructures().GetParent(child1Id).GetDispLevel() + 1);
+        //wxLogVerbose("  ParentLevelUp Child 1 parent: %s new level: %i",  dataStore.GetStructures().GetParent(child1Id).GetName().c_str(), dataStore.GetStructures().GetParent(child1Id).GetDispLevel());  // JMDBG
+        ParentLevelUp(dataStore, child1Id);
+    }
+
+    if (!dataStore.GetStructures().IsPop(dataStore.GetStructures().GetParent(parentId).GetChild2Id()))
+    {
+        size_t child2Id = dataStore.GetStructures().GetParent(parentId).GetChild2Id();
+        dataStore.GetStructures().GetParent(child2Id).SetParentId(parentId);
+        dataStore.GetStructures().GetParent(child2Id).SetDispLevel( dataStore.GetStructures().GetParent(child2Id).GetDispLevel() + 1);
+        //wxLogVerbose("  ParentLevelUp Child 2 parent: %s new level: %i", dataStore.GetStructures().GetParent(child2Id).GetName().c_str(), dataStore.GetStructures().GetParent(child2Id).GetDispLevel());  // JMDBG
+        ParentLevelUp(dataStore, child2Id);
+    }
+    return;
+}
+
+void
+gcUpdatingChooseTwo::SetChildParentId(size_t parentId)
+{
+    m_childParentId = parentId;
+}
+
+size_t
+gcUpdatingChooseTwo::GetChildParentId ()
+{
+    return m_childParentId;
+
+}
+
+//------------------------------------------------------------------------------------
+
+gcParentChildChoice::gcParentChildChoice(size_t popId, size_t parId)
+    :
+    m_popId(popId),
+    m_parId(parId)
+{
+}
+
+gcParentChildChoice::~gcParentChildChoice()
+{
+}
+
+wxString
+gcParentChildChoice::GetLabel(GCDataStore & dataStore)
+{
+    gcPopulation & popRef = dataStore.GetStructures().GetPop(m_popId);
+    return popRef.GetName();
+}
+
+bool
+gcParentChildChoice::GetEnabled(GCDataStore & dataStore)
+{
+    return true;
+}
+
+void
+gcParentChildChoice::SetEnabled(GCDataStore & dataStore, bool value)
+{
+}
+
+bool
+gcParentChildChoice::GetSelected(GCDataStore & dataStore)
+{
+    return true;
+}
+
+void
+gcParentChildChoice::SetSelected(GCDataStore & dataStore, bool value)
+{
+}
+
+void
+gcParentChildChoice::UpdateDisplayInitial(GCDataStore & dataStore)
+{
+    assert(m_box != NULL);
+
+    // if it parsed, it's a legal choice
+    m_box->Enable(true);
+
+    // display settings next to check box
+    if (m_popId != gcdefault::badIndex)
+    {
+        const gcPopulation & popRef = dataStore.GetStructures().GetPop(m_popId);
+        m_box->SetLabel(popRef.GetName());
+    }
+
+    if (m_parId != gcdefault::badIndex)
+    {
+        const gcParent & parRef = dataStore.GetStructures().GetParent(m_parId);
+        m_box->SetLabel(parRef.GetName());
+    }
+
+    int nbox = dataStore.GetStructures().GetUnusedPopCount() + dataStore.GetStructures().GetUnusedParentCount();
+    //int nbox = dataStore.GetStructures().GetPopCount();
+    //wxLogVerbose("UpdateDisplayInitial Unused npop: %i",dataStore.GetStructures().GetUnusedPopCount());  // JMDBG
+    //wxLogVerbose("UpdateDisplayInitial Unused npar: %i",dataStore.GetStructures().GetUnusedParentCount());  // JMDBG
+    wxLogVerbose("UpdateDisplayInitial nbox: %i",nbox);  // JMDBG
+
+    // we start with nothing checked
+    if (nbox > 2)
+    {
+        m_box->SetValue(0);
+    }
+    else
+    {
+        m_box->SetValue(1);
+    }
+}
+
+void
+gcParentChildChoice::UpdateDisplayInterim(GCDataStore & dataStore)
+// no changes needed since all pop merges are always legal
+{
+    wxLogVerbose("gcParentChildChoice::UpdateDisplayInterim called");  // JMDBG
+}
+
+void
+gcParentChildChoice::UpdateDataInterim(GCDataStore & dataStore)
+// all action happens at the enclosing set of choices
+// and only at final update
+{
+    wxLogVerbose("gcParentChildChoice::UpdateDataInterim called");  // JMDBG
+}
+
+void
+gcParentChildChoice::UpdateDataFinal(GCDataStore & dataStore)
+// all action happens at the enclosing set of choices
+{
+    wxLogVerbose("gcParentChildChoice::UpdateDataFinal called");  // JMDBG
+}
+
+wxWindow *
+gcParentChildChoice::MakeWindow(wxWindow * parent)
+{
+    m_box = new wxCheckBox(parent,-1,"");
+    return m_box;
+}
+
+wxWindow *
+gcParentChildChoice::FetchWindow()
+{
+    return m_box;
+}
+
+size_t
+gcParentChildChoice::GetRelevantId()
+{
+    if (m_popId != gcdefault::badIndex)
+    {
+        return m_popId;
+    }
+    return m_parId;
+}
+
+//------------------------------------------------------------------------------------
+
+gcParentChild::gcParentChild( wxWindow *                    parent,
+                              size_t                          parentId,
+                              std::vector<gcChoiceObject*>    choices)
+    :
+    gcUpdatingChooseTwo(parent, gcstr::divergeInstructions, choices)
+{
+    SetChildParentId(parentId);
+}
+
+gcParentChild::~gcParentChild()
+{
+}
+
+wxString
+gcParentChild::NoChoicesText() const
+{
+    return gcstr::noChoicePopulation;
+}
+
+//------------------------------------------------------------------------------------
+
+gcParentRename::gcParentRename(size_t parentId)
+    :
+    m_parentId(parentId)
+{
+}
+
+gcParentRename::~gcParentRename()
+{
+}
+
+wxString
+gcParentRename::FromDataStore(GCDataStore & dataStore)
+{
+    gcParent & parentRef = dataStore.GetStructures().GetParent(m_parentId);
+    return parentRef.GetName();
+}
+
+void
+gcParentRename::ToDataStore(GCDataStore & dataStore, wxString newText)
+{
+    newText.Replace(" ","_");
+    gcParent & parentRef = dataStore.GetStructures().GetParent(m_parentId);
+    wxLogVerbose("gcParentRename::ToDataStore: %i",(int)m_parentId);  // JMDBG
+    wxLogVerbose("old name: %s \nnew name: %s", parentRef.GetName().c_str(), newText.c_str());  // JMDBG
+    dataStore.GetStructures().Rename(parentRef,newText);
+    wxLogVerbose("after rename: %s ", parentRef.GetName().c_str());  // JMDBG
+
+}
+//------------------------------------------------------------------------------------
+
+//BEGIN_EVENT_TABLE(gcParentEditDialog, gcUpdatingDialog)
+//EVT_BUTTON( wxID_APPLY,        gcParentEditDialog::OnApply )
+//END_EVENT_TABLE()
+
+gcParentEditDialog::gcParentEditDialog( wxWindow *      parent,
+                                        GCDataStore &   dataStore,
+                                        size_t          parentId,
+                                        wxString        title,
+                                        bool            forJustCreatedObj)
+    :
+    gcUpdatingDialog(   parent,
+                        dataStore,
+                        forJustCreatedObj
+                        ?
+                        title
+                        :
+                        gcstr::editParent,
+                        forJustCreatedObj),
+    m_parentId(parentId),
+    m_dataStore(dataStore)
+{
+}
+
+gcParentEditDialog::~gcParentEditDialog()
+{
+}
+
+#if 0
+void
+gcParentEditDialog::OnApply(wxCommandEvent & event)
+{
+    int eventId = event.GetId();
+    wxLogVerbose("*==*in gcParentEditDialog::OnApply event: %i", eventId);  // JMDBG
+
+    assert(m_datastore != NULL);
+    int nunused = m_dataStore.GetStructures().GetUnusedPopCount() + m_dataStore.GetStructures().GetUnusedParentCount();
+    wxLogVerbose("*==*gcUpdatingChooseTwo::OnApply nunused = %i", nunused);  // JMDBG
+
+    event.Skip();
+}
+#endif
+
+//------------------------------------------------------------------------------------
+
+bool DoDialogEditParent(wxWindow *      parent,
+                        GCDataStore &   dataStore,
+                        size_t          parentId,
+                        bool            forJustCreatedObj)
+{
+    wxLogVerbose("DoDialogEditParent: %i",(int)parentId);  // JMDBG
+    gcParentEditDialog dialog(parent,dataStore,parentId,gcstr::createParent2Child,forJustCreatedObj);
+
+    // build the dialog
+    gcDialogCreator creator;
+    wxBoxSizer * contentSizer   = new wxBoxSizer(wxHORIZONTAL);
+    //wxBoxSizer * leftSizer      = new wxBoxSizer(wxVERTICAL);
+
+    // for renaming
+    gcTextHelper * parentRenameHelp  = new gcParentRename(parentId);
+    gcUpdatingComponent * rename    = new gcUpdatingTextCtrl(&dialog,
+                                                             gcstr::parentRename,
+                                                             parentRenameHelp);
+    contentSizer->Add(rename,
+                      1,
+                      wxALL | wxALIGN_CENTER | wxEXPAND,
+                      gclayout::borderSizeSmall);
+    creator.AddComponent(dialog,rename);
+
+    if (forJustCreatedObj)
+    {
+        // show potential children from populations
+        std::vector<gcChoiceObject*> parChoices;
+        gcDisplayOrder popids = dataStore.GetStructures().GetDisplayablePopIds();
+        for(gcDisplayOrder::iterator iter=popids.begin(); iter != popids.end(); iter++)
+        {
+            size_t id = *iter;
+            if (!dataStore.GetStructures().GetPop(id).HasParent())
+            {
+                wxLogVerbose("DoDialogEditParent add to list popid: %i",(int)id);  // JMDBG
+                gcParentChildChoice * choice = new gcParentChildChoice(id, gcdefault::badIndex);
+                parChoices.push_back(choice);
+            }
+        }
+
+        gcDisplayOrder parids = dataStore.GetStructures().GetParentIds();
+        for(gcDisplayOrder::iterator iter=parids.begin(); iter != parids.end(); iter++)
+        {
+            size_t id = *iter;
+            if (id != parentId)
+            {
+                if (!dataStore.GetStructures().GetParent(id).HasParent())
+                {
+                    wxLogVerbose("DoDialogEditParent add to list parid: %i",(int)id);  // JMDBG
+                    gcParentChildChoice * choice = new gcParentChildChoice(gcdefault::badIndex, id);
+                    parChoices.push_back(choice);
+                }
+            }
+        }
+
+
+        // add potential children from parents
+        gcUpdatingComponent * right = new gcParentChild( &dialog,parentId,parChoices);
+        contentSizer->Add(new wxStaticLine(&dialog,-1,wxDefaultPosition,wxDefaultSize,wxLI_VERTICAL),
+                          0,
+                          wxALL | wxALIGN_CENTER | wxEXPAND,
+                          gclayout::borderSizeSmall);
+        contentSizer->Add(right,
+                          1,
+                          wxALL | wxALIGN_CENTER | wxEXPAND,
+                          gclayout::borderSizeSmall);
+        creator.AddComponent(dialog,right);
+    }
+
+    creator.PlaceContent(dialog,contentSizer);
+    return dialog.Go();
+}
+
+//------------------------------------------------------------------------------------
+
+bool
+gcActor_ParentNew::OperateOn(wxWindow * parent, GCDataStore & dataStore)
+{
+    wxLogVerbose("gcActor_ParentNew::OperateOn");  // JMDBG
+    bool state = true;
+    int nunused = dataStore.GetStructures().GetUnusedPopCount() + dataStore.GetStructures().GetUnusedParentCount();
+
+    if (dataStore.GetStructures().GetDivergenceState())
+    {
+        if (nunused < 2)
+        {
+            wxLogVerbose("gcActor_ParentNew::OperateOn Divergence State = true");  // JMDBG
+            // delete all parents
+            dataStore.GetStructures().SetDivergenceState(false);
+            dataStore.GetStructures().RemoveParents();
+            dataStore.GetStructures().ClearPopDisplayOrder();
+            return state;
+        }
+        else
+        {
+            wxString parName = wxString::Format("Parent_%i",dataStore.GetStructures().GetParentCount()+1);
+            gcParent nextParent = dataStore.GetStructures().MakeParent(parName);
+            wxLogVerbose("repeat gcActor_ParentNew::OperateOn nunused: %i Calling DoDialogEditParent parent: %s ParId: %i window: %i", nunused, parName.c_str(), (int)nextParent.GetId(), parent->GetId());  // JMDBG
+
+            return DoDialogEditParent(parent,dataStore,nextParent.GetId(),true);
+        }
+    }
+    else
+    {
+        wxLogVerbose("gcActor_ParentNew::OperateOn Divergence State = false");  // JMDBG
+        dataStore.GetStructures().SetDivergenceState(true);
+        // make parent
+        wxString parName = wxString::Format("Parent_%i",dataStore.GetStructures().GetParentCount()+1);
+        gcParent nextParent = dataStore.GetStructures().MakeParent(parName);
+        wxLogVerbose("first gcActor_ParentNew::OperateOn nunused: %i Calling DoDialogEditParent parent: %s ParId: %i window: %i", nunused, parName.c_str(), (int)nextParent.GetId(), parent->GetId());
+        return DoDialogEditParent(parent,dataStore,nextParent.GetId(),true);
+
+    }
+    return state;
+}
+
+//------------------------------------------------------------------------------------
+
+bool
+gcActor_ParentEdit::OperateOn(wxWindow * parent, GCDataStore & dataStore)
+{
+    wxLogVerbose("gcActor_ParentEdit::OperateOn m_parentId: %i",(int)m_parentId);  // JMDBG
+    return DoDialogEditParent(parent,dataStore,m_parentId,false);
+}
+
+//____________________________________________________________________________________
diff --git a/src/guiconv/gc_parent_dialogs.h b/src/guiconv/gc_parent_dialogs.h
new file mode 100644
index 0000000..e73c820
--- /dev/null
+++ b/src/guiconv/gc_parent_dialogs.h
@@ -0,0 +1,157 @@
+// $Id: gc_parent_dialogs.h,v 1.2 2012/06/30 01:32:41 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_PARENT_DIALOGS_H
+#define GC_PARENT_DIALOGS_H
+
+#include "gc_dialog.h"
+#include "gc_quantum.h"
+#include "gc_validators.h"
+#include "gc_default.h"
+
+class GCDataStore;
+class wxWindow;
+
+//------------------------------------------------------------------------------------
+class gcUpdatingChooseTwo : public gcUpdatingChooseMulti
+{
+  private:
+    size_t m_childParentId;
+  public:
+    gcUpdatingChooseTwo(  wxWindow *                      parent,
+                          wxString                        instructions,
+                          std::vector<gcChoiceObject*>    choices);
+    virtual ~gcUpdatingChooseTwo();
+    virtual void    BuildDisplay(GCDataStore &);
+    virtual void    OnCheck(wxCommandEvent &);
+    virtual void    UpdateDataFinal(GCDataStore &);
+    virtual void    DoFinalForMulti(GCDataStore &, gcIdVec checkedIds);
+    virtual void    OnUnselectAll(wxCommandEvent &);
+    void    SetChildParentId(size_t childParentId);
+    void    ParentLevelUp(GCDataStore & dataStore, size_t parentId);
+    size_t  GetChildParentId ();
+};
+
+//------------------------------------------------------------------------------------
+
+class gcParentRename : public gcTextHelper
+{
+  private:
+    size_t m_parentId;
+  public:
+    gcParentRename(size_t parentId);
+    ~gcParentRename();
+
+    wxString    FromDataStore(GCDataStore &);
+    void        ToDataStore(GCDataStore &, wxString newText);
+};
+
+//------------------------------------------------------------------------------------
+
+class gcParentChildChoice : public gcChoiceObject
+{
+  private:
+    gcParentChildChoice();         // undefined
+    size_t                      m_popId;
+    size_t                      m_parId;
+    wxCheckBox *                m_box;
+  protected:
+  public:
+    gcParentChildChoice(size_t popId, size_t parentId);
+    ~gcParentChildChoice();
+
+    wxString    GetLabel(GCDataStore &);
+    bool        GetEnabled(GCDataStore &);
+    void        SetEnabled(GCDataStore &, bool value);
+    bool        GetSelected(GCDataStore &);
+    void        SetSelected(GCDataStore &, bool value);
+
+    void        UpdateDisplayInitial    (GCDataStore &) ;
+    void        UpdateDisplayInterim    (GCDataStore &) ;
+    void        UpdateDataInterim       (GCDataStore &) ;
+    void        UpdateDataFinal         (GCDataStore &) ;
+
+    wxWindow *  MakeWindow(wxWindow * parent);
+    wxWindow *  FetchWindow();
+
+    size_t      GetRelevantId();
+
+};
+
+//------------------------------------------------------------------------------------
+
+class gcParentChild : public gcUpdatingChooseTwo
+{
+  public:
+    gcParentChild(wxWindow *                      parent,
+                  size_t                          parentId,
+                  std::vector<gcChoiceObject*>    choices);
+    ~gcParentChild();
+
+    wxString    NoChoicesText() const;
+};
+
+//------------------------------------------------------------------------------------
+
+class gcParentEditDialog : public gcUpdatingDialog //public wxDialog
+{
+  private:
+  protected:
+    size_t          m_parentId;
+    GCDataStore &   m_dataStore;  //needed so it can decide whether to recurse or exit in OnButton
+  public:
+    gcParentEditDialog( wxWindow *      parentWindow,
+                        GCDataStore &   dataStore,
+                        size_t          parentId,
+                        wxString        title,
+                        bool            forJustCreatedObj);
+    virtual ~gcParentEditDialog();
+    //virtual void    OnApply(wxCommandEvent & event);
+
+    //DECLARE_EVENT_TABLE()
+};
+
+//------------------------------------------------------------------------------------
+
+bool DoDialogEditParent(wxWindow *      parentWindow,
+                        GCDataStore &   dataStore,
+                        size_t          regionId,
+                        bool            forJustCreatedObj);
+
+
+
+//------------------------------------------------------------------------------------
+
+class gcActor_ParentNew : public gcEventActor
+{
+  private:
+
+  public:
+    gcActor_ParentNew() {};
+    virtual ~gcActor_ParentNew() {};
+    virtual bool OperateOn(wxWindow * parent, GCDataStore & dataStore);
+};
+
+//------------------------------------------------------------------------------------
+
+class gcActor_ParentEdit : public gcEventActor
+{
+  private:
+    size_t                      m_parentId;
+
+  public:
+    gcActor_ParentEdit(size_t parentId) : m_parentId(parentId) {};
+    //gcActor_ParentEdit(parentId) : m_parentId(parentId) {};
+    virtual ~gcActor_ParentEdit() {};
+    virtual bool OperateOn(wxWindow * parent, GCDataStore & dataStore);
+};
+
+#endif  // GC_PANEL_DIALOGS_H
+//____________________________________________________________________________________
diff --git a/src/guiconv/gc_poptab.cpp b/src/guiconv/gc_poptab.cpp
new file mode 100644
index 0000000..ea140f4
--- /dev/null
+++ b/src/guiconv/gc_poptab.cpp
@@ -0,0 +1,81 @@
+// $Id: gc_poptab.cpp,v 1.9 2011/03/08 19:22:01 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+
+#include "gc_event_publisher.h"
+#include "gc_poptab.h"
+#include "gc_logic.h"
+#include "gc_population_dialogs.h"
+#include "gc_strings.h"
+
+#include "wx/log.h"
+
+//------------------------------------------------------------------------------------
+
+gcPopPane::gcPopPane(wxWindow * parent)
+    :
+    gcGridPane(parent,1,0)
+{
+}
+
+gcPopPane::~gcPopPane()
+{
+}
+
+void
+gcPopPane::NotifyLeftDClick(size_t row, size_t col)
+{
+    assert(row < m_objVec.size());
+    size_t popId = m_objVec[row];
+    gcEventActor * popEditActor = new gcActor_Pop_Edit(popId);
+    PublishScreenEvent(GetEventHandler(),popEditActor);
+}
+
+//------------------------------------------------------------------------------------
+
+gcPopTab::gcPopTab( wxWindow * parent, GCLogic & logic)
+    :
+    gcInfoPane(parent, logic, gcstr::popTabTitle)
+{
+}
+
+gcPopTab::~gcPopTab()
+{
+}
+
+wxPanel *
+gcPopTab::MakeContent()
+{
+    gcGridPane * pane = new gcPopPane(m_scrolled);
+    objVector pops = m_logic.GetStructures().GetDisplayablePops();
+    for(objVector::iterator iter=pops.begin(); iter != pops.end(); iter++)
+        // for each population
+    {
+        GCQuantum * quantumP = *iter;
+        GCPopulation * popP = dynamic_cast<GCPopulation*>(quantumP);
+        assert(popP != NULL);
+        wxString popName = popP->GetName();
+        wxArrayString labels;
+        labels.Add(popName);
+        pane->AddRow(quantumP->GetId(),labels);
+    }
+    pane->Finish();
+    return pane;
+
+}
+
+wxString
+gcPopTab::MakeLabel()
+{
+    return wxString::Format(m_panelLabelFmt,(int)m_logic.GetStructures().GetDisplayablePopIds().size());
+}
+
+//____________________________________________________________________________________
diff --git a/src/guiconv/gc_poptab.h b/src/guiconv/gc_poptab.h
new file mode 100644
index 0000000..e148a9d
--- /dev/null
+++ b/src/guiconv/gc_poptab.h
@@ -0,0 +1,46 @@
+// $Id: gc_poptab.h,v 1.7 2011/03/08 19:22:01 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_POPTAB_H
+#define GC_POPTAB_H
+
+#include "gc_gridpanel.h"
+
+class wxWindow;
+
+class gcPopPane : public gcGridPane
+{
+  private:
+    gcPopPane();            // undefined
+  protected:
+  public:
+    gcPopPane(wxWindow * parent);
+    virtual ~gcPopPane();
+
+    virtual void NotifyLeftDClick(size_t row, size_t col);
+};
+
+class gcPopTab : public gcInfoPane
+{
+  private:
+    gcPopTab();        // undefined
+
+  protected:
+    wxPanel *   MakeContent();
+    wxString    MakeLabel();
+
+  public:
+    gcPopTab(wxWindow * parent, GCLogic & logic);
+    virtual ~gcPopTab();
+};
+
+#endif  // GC_POPTAB_H
+
+//____________________________________________________________________________________
diff --git a/src/guiconv/gc_population_dialogs.cpp b/src/guiconv/gc_population_dialogs.cpp
new file mode 100644
index 0000000..db99e32
--- /dev/null
+++ b/src/guiconv/gc_population_dialogs.cpp
@@ -0,0 +1,285 @@
+// $Id: gc_population_dialogs.cpp,v 1.22 2012/06/30 01:32:41 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+
+#include "gc_datastore.h"
+#include "gc_errhandling.h"
+#include "gc_event_ids.h"
+#include "gc_dialog.h"
+#include "gc_layout.h"
+#include "gc_population.h"
+#include "gc_population_dialogs.h"
+#include "gc_strings.h"
+#include "gc_structures_err.h"
+
+#include "wx/checkbox.h"
+#include "wx/log.h"
+#include "wx/sizer.h"
+#include "wx/statbox.h"
+#include "wx/statline.h"
+#include "wx/textctrl.h"
+
+//------------------------------------------------------------------------------------
+
+gcPopMergeChoice::gcPopMergeChoice(size_t popId)
+    :
+    m_popId(popId)
+{
+}
+
+gcPopMergeChoice::~gcPopMergeChoice()
+{
+}
+
+#if 0
+
+wxString
+gcPopMergeChoice::GetLabel(GCDataStore & dataStore)
+{
+    gcPopulation & popRef = dataStore.GetStructures().GetPop(m_popId);
+    return popRef.GetName();
+}
+
+bool
+gcPopMergeChoice::GetEnabled(GCDataStore & dataStore)
+{
+    return true;
+}
+
+bool
+gcPopMergeChoice::GetSelected(GCDataStore & dataStore)
+{
+    return true;
+}
+
+void
+gcPopMergeChoice::SetSelected(GCDataStore & dataStore, bool value)
+{
+}
+
+#endif
+
+void
+gcPopMergeChoice::UpdateDisplayInitial(GCDataStore & dataStore)
+{
+    assert(m_box != NULL);
+
+    // if it parsed, it's a legal choice
+    m_box->Enable(true);
+
+    // display settings next to check box
+    const gcPopulation & popRef = dataStore.GetStructures().GetPop(m_popId);
+    m_box->SetLabel(popRef.GetName());
+
+    // we start with nothing checked
+    m_box->SetValue(0);
+}
+
+void
+gcPopMergeChoice::UpdateDisplayInterim(GCDataStore & dataStore)
+// no changes needed since all pop merges are always legal
+{
+}
+
+void
+gcPopMergeChoice::UpdateDataInterim(GCDataStore & dataStore)
+// all action happens at the enclosing set of choices
+// and only at final update
+{
+}
+
+void
+gcPopMergeChoice::UpdateDataFinal(GCDataStore & dataStore)
+// all action happens at the enclosing set of choices
+{
+}
+
+wxWindow *
+gcPopMergeChoice::MakeWindow(wxWindow * parent)
+{
+    m_box = new wxCheckBox(parent,-1,"");
+    return m_box;
+}
+
+wxWindow *
+gcPopMergeChoice::FetchWindow()
+{
+    return m_box;
+}
+
+size_t
+gcPopMergeChoice::GetRelevantId()
+{
+    return m_popId;
+}
+
+//------------------------------------------------------------------------------------
+
+gcPopMerge::gcPopMerge( wxWindow *                      parent,
+                        size_t                          popId,
+                        std::vector<gcChoiceObject*>    choices)
+    :
+    gcUpdatingChooseMulti(parent,gcstr::mergePopsInstructions,choices),
+    m_popId(popId)
+{
+}
+
+gcPopMerge::~gcPopMerge()
+{
+}
+
+void
+gcPopMerge::DoFinalForMulti(GCDataStore & dataStore, gcIdVec chosens)
+{
+    chosens.insert(chosens.begin(),m_popId);
+    dataStore.GetStructures().MergePops(chosens);
+}
+
+wxString
+gcPopMerge::NoChoicesText() const
+{
+    return gcstr::noChoicePopulation;
+}
+
+//------------------------------------------------------------------------------------
+
+gcPopRename::gcPopRename(size_t popId)
+    :
+    m_popId(popId)
+{
+}
+
+gcPopRename::~gcPopRename()
+{
+}
+
+wxString
+gcPopRename::FromDataStore(GCDataStore & dataStore)
+{
+    gcPopulation & popRef = dataStore.GetStructures().GetPop(m_popId);
+    return popRef.GetName();
+}
+
+void
+gcPopRename::ToDataStore(GCDataStore & dataStore, wxString newText)
+{
+    newText.Replace(" ","_");
+    gcPopulation & popRef = dataStore.GetStructures().GetPop(m_popId);
+    dataStore.GetStructures().Rename(popRef,newText);
+}
+
+//------------------------------------------------------------------------------------
+
+gcPopEditDialog::gcPopEditDialog(   wxWindow *      parent,
+                                    GCDataStore &   dataStore,
+                                    size_t          popId,
+                                    bool            forJustCreatedObj)
+    :
+    gcUpdatingDialog(   parent,
+                        dataStore,
+                        forJustCreatedObj
+                        ?
+                        gcstr::addPop
+                        :
+                        wxString::Format(gcstr::editPop,
+                                         dataStore.GetStructures().GetPop(popId).GetName().c_str()),
+                        forJustCreatedObj),
+    m_popId(popId)
+{
+}
+
+gcPopEditDialog::~gcPopEditDialog()
+{
+}
+
+void
+gcPopEditDialog::DoDelete()
+{
+    gcPopulation & popRef = m_dataStore.GetStructures().GetPop(m_popId);
+    m_dataStore.GetStructures().RemovePop(popRef);
+}
+
+//------------------------------------------------------------------------------------
+
+bool
+DoDialogEditPop(wxWindow *      parentWindow,
+                GCDataStore &   dataStore,
+                size_t          popId,
+                bool            forJustCreatedObj)
+{
+    gcPopEditDialog dialog(parentWindow,dataStore,popId,forJustCreatedObj);
+
+    // build the dialog
+    gcDialogCreator creator;
+    wxBoxSizer * contentSizer = new wxBoxSizer(wxHORIZONTAL);
+
+    // editing the name always ok
+    gcTextHelper * popRenameHelp = new gcPopRename(popId);
+    gcUpdatingComponent * rename = new gcUpdatingTextCtrl(&dialog,
+                                                          forJustCreatedObj ? gcstr::populationNewName : gcstr::populationRename,
+                                                          popRenameHelp);
+    contentSizer->Add(rename,
+                      1,
+                      wxALL | wxALIGN_CENTER | wxEXPAND,
+                      gclayout::borderSizeSmall);
+    creator.AddComponent(dialog,rename);
+
+    // merging pops only for existing pops
+    if(!forJustCreatedObj)
+    {
+        std::vector<gcChoiceObject*> popChoices;
+        gcDisplayOrder ids = dataStore.GetStructures().GetDisplayablePopIds();
+        for(gcDisplayOrder::iterator iter=ids.begin(); iter != ids.end(); iter++)
+        {
+            size_t id = *iter;
+            if(id != popId)
+            {
+                gcPopMergeChoice * choice = new gcPopMergeChoice(id);
+                popChoices.push_back(choice);
+            }
+        }
+
+        gcUpdatingComponent * right = new gcPopMerge( &dialog, popId,popChoices);
+        contentSizer->Add(new wxStaticLine(&dialog,-1,wxDefaultPosition,wxDefaultSize,wxLI_VERTICAL),
+                          0,
+                          wxALL | wxALIGN_CENTER | wxEXPAND,
+                          gclayout::borderSizeSmall);
+        contentSizer->Add(right,
+                          1,
+                          wxALL | wxALIGN_CENTER | wxEXPAND,
+                          gclayout::borderSizeSmall);
+        creator.AddComponent(dialog,right);
+    }
+
+    creator.PlaceContent(dialog,contentSizer);
+
+    // invoke the dialog
+    return dialog.Go();
+}
+
+//------------------------------------------------------------------------------------
+
+bool
+gcActor_PopAdd::OperateOn(wxWindow * parent, GCDataStore & dataStore)
+{
+    gcPopulation & popRef = dataStore.GetStructures().MakePop("",true);
+    return DoDialogEditPop(parent,dataStore,popRef.GetId(),true);
+}
+
+//------------------------------------------------------------------------------------
+
+bool
+gcActor_Pop_Edit::OperateOn(wxWindow * parent, GCDataStore & dataStore)
+{
+    return DoDialogEditPop(parent,dataStore,m_popId,false);
+}
+
+//____________________________________________________________________________________
diff --git a/src/guiconv/gc_population_dialogs.h b/src/guiconv/gc_population_dialogs.h
new file mode 100644
index 0000000..873df3f
--- /dev/null
+++ b/src/guiconv/gc_population_dialogs.h
@@ -0,0 +1,118 @@
+// $Id: gc_population_dialogs.h,v 1.16 2012/06/30 01:32:41 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_POPULATION_DIALOGS_H
+#define GC_POPULATION_DIALOGS_H
+
+#include "gc_quantum.h"
+#include "gc_dialog.h"
+
+class GCDataStore;
+class gcPopulation;
+class wxWindow;
+
+class gcPopMergeChoice : public gcChoiceObject
+{
+  private:
+    gcPopMergeChoice();         // undefined
+    size_t                      m_popId;
+    wxCheckBox *                m_box;
+  protected:
+  public:
+    gcPopMergeChoice(size_t popId);
+    ~gcPopMergeChoice();
+
+#if 0
+    wxString    GetLabel(GCDataStore &);
+    bool        GetEnabled(GCDataStore &);
+    bool        GetSelected(GCDataStore &);
+    void        SetSelected(GCDataStore &, bool value);
+#endif
+
+    void        UpdateDisplayInitial    (GCDataStore &) ;
+    void        UpdateDisplayInterim    (GCDataStore &) ;
+    void        UpdateDataInterim       (GCDataStore &) ;
+    void        UpdateDataFinal         (GCDataStore &) ;
+
+    wxWindow *  MakeWindow(wxWindow * parent);
+    wxWindow *  FetchWindow();
+
+    size_t      GetRelevantId();
+
+};
+
+class gcPopMerge : public gcUpdatingChooseMulti
+{
+  protected:
+    size_t          m_popId;
+  public:
+    gcPopMerge( wxWindow *                      parent,
+                size_t                          popId,
+                std::vector<gcChoiceObject*>    choices);
+    virtual ~gcPopMerge();
+
+    void DoFinalForMulti(GCDataStore & dataStore, gcIdVec chosenPops);
+    wxString    NoChoicesText() const;
+};
+
+class gcPopRename : public gcTextHelper
+{
+  private:
+  protected:
+    size_t          m_popId;
+  public:
+    gcPopRename(size_t popId);
+    ~gcPopRename();
+
+    wxString    FromDataStore(GCDataStore &);
+    void        ToDataStore(GCDataStore &, wxString newText);
+};
+
+class gcPopEditDialog : public gcUpdatingDialog
+{
+  private:
+  protected:
+    size_t          m_popId;
+    void DoDelete();
+  public:
+    gcPopEditDialog(wxWindow *      parentWindow,
+                    GCDataStore &   dataStore,
+                    size_t          popId,
+                    bool            forJustCreatedObj);
+    virtual ~gcPopEditDialog();
+};
+
+bool DoDialogEditPop(   wxWindow *      parentWindow,
+                        GCDataStore &   dataStore,
+                        size_t          popId,
+                        bool            forJustCreatedObj);
+
+class gcActor_PopAdd : public gcEventActor
+{
+  public:
+    gcActor_PopAdd() {};
+    virtual ~gcActor_PopAdd() {};
+    virtual bool OperateOn(wxWindow * parent, GCDataStore & dataStore);
+};
+
+class gcActor_Pop_Edit : public gcEventActor
+{
+  private:
+    gcActor_Pop_Edit();     // undefined
+    size_t                  m_popId;
+  public:
+    gcActor_Pop_Edit(size_t popId) : m_popId(popId) {};
+    virtual ~gcActor_Pop_Edit() {};
+    virtual bool OperateOn(wxWindow * parent, GCDataStore & dataStore);
+};
+
+#endif  // GC_POPULATION_DIALOGS_H
+
+//____________________________________________________________________________________
diff --git a/src/guiconv/gc_quantum.cpp b/src/guiconv/gc_quantum.cpp
new file mode 100644
index 0000000..f0e79d8
--- /dev/null
+++ b/src/guiconv/gc_quantum.cpp
@@ -0,0 +1,103 @@
+// $Id: gc_quantum.cpp,v 1.15 2012/06/30 01:32:41 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include "gc_default.h"
+#include "gc_quantum.h"
+#include "wx/log.h"
+
+size_t GCQuantum::s_objCount = 0;
+
+GCQuantum::GCQuantum()
+    :
+    m_objId(s_objCount++),
+    m_name(gcdefault::unnamedObject),
+    m_selected(false)
+{
+}
+
+#if 0
+GCQuantum::GCQuantum(wxString name)
+    :
+    m_objId(s_objCount++),
+    m_name(name),
+    m_selected(false)
+{
+}
+#endif
+
+GCQuantum::~GCQuantum()
+{
+}
+
+size_t
+GCQuantum::GetId() const
+{
+    return m_objId;
+}
+
+#if 0
+
+bool
+GCQuantum::GetSelected() const
+{
+    return m_selected;
+}
+
+void
+GCQuantum::SetSelected(bool selected)
+{
+    m_selected=selected;
+}
+
+#endif
+
+void
+GCQuantum::DebugDump(wxString prefix) const
+{
+    wxLogDebug("%sid:%d",prefix.c_str(),(int)m_objId);  // EWDUMPOK
+}
+
+wxString
+GCQuantum::GetName() const
+{
+    return m_name;
+}
+
+void
+GCQuantum::SetName(wxString name)
+{
+    m_name = name;
+}
+
+void
+GCQuantum::ReportMax()
+{
+    wxLogDebug("created %d numbered objects",(int)s_objCount);  // EWDUMPOK
+}
+
+//------------------------------------------------------------------------------------
+
+GCClientData::GCClientData(gcEventActor * myActor)
+    :   m_myActor(myActor)
+{
+}
+
+GCClientData::~GCClientData()
+{
+    delete m_myActor;
+}
+
+gcEventActor *
+GCClientData::GetActor()
+{
+    return m_myActor;
+}
+
+//____________________________________________________________________________________
diff --git a/src/guiconv/gc_quantum.h b/src/guiconv/gc_quantum.h
new file mode 100644
index 0000000..3174b27
--- /dev/null
+++ b/src/guiconv/gc_quantum.h
@@ -0,0 +1,71 @@
+// $Id: gc_quantum.h,v 1.15 2012/06/30 01:32:41 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_QUANTUM_H
+#define GC_QUANTUM_H
+
+#include "wx/clntdata.h"
+#include "wx/string.h"
+
+class GCQuantum
+{
+  private:
+    static size_t       s_objCount;
+  protected:
+    size_t              m_objId;
+    wxString            m_name;
+    bool                m_selected;
+  public:
+    GCQuantum();
+
+#if 0
+    GCQuantum(wxString name);
+#endif
+
+    virtual ~GCQuantum();
+
+    size_t      GetId()                         const;
+    void        DebugDump(wxString prefix=wxEmptyString)   const;
+    virtual wxString    GetName()                       const;
+    virtual void        SetName(wxString name);
+
+#if 0
+    virtual bool        GetSelected()                   const;
+    virtual void        SetSelected(bool selected);
+#endif
+
+    static  void        ReportMax();
+};
+
+class wxWindow;
+class GCDataStore;
+
+class gcEventActor
+{
+  public:
+    gcEventActor() {};
+    virtual ~gcEventActor() {};
+    virtual bool OperateOn(wxWindow * parent, GCDataStore & store) = 0;
+};
+
+class GCClientData : public wxClientData
+{
+  private:
+    GCClientData();         // undefined
+    gcEventActor *          m_myActor;
+  public:
+    GCClientData(gcEventActor * myActor);
+    virtual ~GCClientData();
+    gcEventActor *  GetActor() ;
+};
+
+#endif  // GC_QUANTUM_H
+
+//____________________________________________________________________________________
diff --git a/src/guiconv/gc_region_dialogs.cpp b/src/guiconv/gc_region_dialogs.cpp
new file mode 100644
index 0000000..13ab694
--- /dev/null
+++ b/src/guiconv/gc_region_dialogs.cpp
@@ -0,0 +1,418 @@
+// $Id: gc_region_dialogs.cpp,v 1.24 2011/03/08 19:22:01 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+
+#include "gc_datastore.h"
+#include "gc_dialog.h"
+#include "gc_errhandling.h"
+#include "gc_layout.h"
+#include "gc_region_dialogs.h"
+#include "gc_strings.h"
+
+#include "wx/checkbox.h"
+#include "wx/log.h"
+#include "wx/sizer.h"
+#include "wx/statbox.h"
+#include "wx/statline.h"
+#include "wx/textctrl.h"
+
+//------------------------------------------------------------------------------------
+
+gcRegionRename::gcRegionRename(size_t regionId)
+    :
+    m_regionId(regionId)
+{
+}
+
+gcRegionRename::~gcRegionRename()
+{
+}
+
+wxString
+gcRegionRename::FromDataStore(GCDataStore & dataStore)
+{
+    gcRegion & regionRef = dataStore.GetStructures().GetRegion(m_regionId);
+    return regionRef.GetName();
+}
+
+void
+gcRegionRename::ToDataStore(GCDataStore & dataStore, wxString newText)
+{
+    gcRegion & regionRef = dataStore.GetStructures().GetRegion(m_regionId);
+    dataStore.GetStructures().Rename(regionRef,newText);
+}
+
+//------------------------------------------------------------------------------------
+
+gcRegionEffPopSize::gcRegionEffPopSize(size_t regionId)
+    :
+    m_regionId(regionId)
+{
+}
+
+gcRegionEffPopSize::~gcRegionEffPopSize()
+{
+}
+
+wxString
+gcRegionEffPopSize::FromDataStore(GCDataStore & dataStore)
+{
+    gcRegion & regionRef = dataStore.GetStructures().GetRegion(m_regionId);
+    if(regionRef.HasEffectivePopulationSize())
+    {
+        return wxString::Format("%f",(double)regionRef.GetEffectivePopulationSize());
+    }
+    return gcstr::unsetValueRegionEffectivePopulationSize;
+}
+
+void
+gcRegionEffPopSize::ToDataStore(GCDataStore & dataStore, wxString newText)
+{
+    double doubleVal;
+    bool gotFloat = newText.ToDouble(&doubleVal);
+    if(gotFloat)
+    {
+        gcRegion & regionRef = dataStore.GetStructures().GetRegion(m_regionId);
+        regionRef.SetEffectivePopulationSize(doubleVal);
+    }
+}
+
+const wxValidator &
+gcRegionEffPopSize::GetValidator()
+{
+    return m_validator;
+}
+
+wxString
+gcRegionEffPopSize::InitialString()
+{
+    return gcstr::unsetValueRegionEffectivePopulationSize;
+}
+
+//------------------------------------------------------------------------------------
+
+gcRegionMergeChoice::gcRegionMergeChoice(   size_t  choiceRegionId,
+                                            size_t  dialogRegionId)
+    :
+    m_choiceRegionId(choiceRegionId),
+    m_dialogRegionId(dialogRegionId),
+    m_box(NULL)
+{
+}
+
+gcRegionMergeChoice::~gcRegionMergeChoice()
+{
+}
+
+void
+gcRegionMergeChoice::UpdateDisplayInitial(GCDataStore & dataStore)
+{
+    gcRegion & regionRef = dataStore.GetStructures().GetRegion(m_choiceRegionId);
+
+    m_box->SetLabel(regionRef.GetName());
+    m_box->SetValue(0);
+
+    UpdateDisplayInterim(dataStore);
+}
+
+void
+gcRegionMergeChoice::UpdateDisplayInterim(GCDataStore & dataStore)
+{
+    gcRegion & choiceRegion = dataStore.GetStructures().GetRegion(m_choiceRegionId);
+    gcRegion & dialogRegion = dataStore.GetStructures().GetRegion(m_dialogRegionId);
+    m_box->Enable(choiceRegion.CanMergeWith(dialogRegion));
+}
+
+void
+gcRegionMergeChoice::UpdateDataInterim(GCDataStore & dataStore)
+// nothing to do until the end at the enclosing level
+{
+}
+
+void
+gcRegionMergeChoice::UpdateDataFinal(GCDataStore & dataStore)
+// nothing to do until the end at the enclosing level
+{
+}
+
+wxWindow *
+gcRegionMergeChoice::MakeWindow(wxWindow * parent)
+{
+    m_box = new wxCheckBox(parent,-1,wxEmptyString);
+    return m_box;
+}
+
+wxWindow *
+gcRegionMergeChoice::FetchWindow()
+{
+    assert(m_box != NULL);
+    return m_box;
+}
+
+size_t
+gcRegionMergeChoice::GetRelevantId()
+{
+    return m_choiceRegionId;
+}
+
+//------------------------------------------------------------------------------------
+
+gcRegionMerge::gcRegionMerge(   wxWindow *                      parent,
+                                size_t                          regionId,
+                                std::vector<gcChoiceObject*>    choices)
+    :
+    gcUpdatingChooseMulti(parent,gcstr::mergeLinkGInstructions,choices),
+    m_regionId(regionId)
+{
+}
+
+gcRegionMerge::~gcRegionMerge()
+{
+}
+
+void
+gcRegionMerge::DoFinalForMulti(GCDataStore & dataStore, gcIdVec chosens)
+{
+    chosens.insert(chosens.begin(),m_regionId);
+    dataStore.GetStructures().MergeRegions(chosens);
+}
+
+wxString
+gcRegionMerge::NoChoicesText() const
+{
+    return gcstr::noChoiceRegion;
+}
+
+//------------------------------------------------------------------------------------
+
+gcRegionFragmentChoice::gcRegionFragmentChoice(size_t regionId)
+    :   m_regionId(regionId),
+        m_box(NULL)
+{
+}
+
+gcRegionFragmentChoice::~gcRegionFragmentChoice()
+{
+}
+
+void
+gcRegionFragmentChoice::UpdateDisplayInitial(GCDataStore & dataStore)
+{
+    m_box->SetLabel(gcstr::fragmentRegion);
+    m_box->SetValue(0);
+    UpdateDisplayInterim(dataStore);
+}
+
+void
+gcRegionFragmentChoice::UpdateDisplayInterim(GCDataStore & dataStore)
+{
+    gcRegion & region = dataStore.GetStructures().GetRegion(m_regionId);
+    m_box->Enable(region.GetLocusCount() > 1);
+}
+
+void
+gcRegionFragmentChoice::UpdateDataInterim(GCDataStore & dataStore)
+{
+}
+
+void
+gcRegionFragmentChoice::UpdateDataFinal(GCDataStore & dataStore)
+{
+    if(m_box->GetValue() && m_box->IsEnabled())
+    {
+        dataStore.GetStructures().FragmentRegion(m_regionId);
+    }
+}
+
+wxWindow *
+gcRegionFragmentChoice::MakeWindow(wxWindow * parent)
+{
+    m_box = new wxCheckBox(parent,-1,wxEmptyString);
+    return m_box;
+}
+
+wxWindow *
+gcRegionFragmentChoice::FetchWindow()
+{
+    assert(m_box != NULL);
+    return m_box;
+}
+
+size_t
+gcRegionFragmentChoice::GetRelevantId()
+{
+    return m_regionId;
+}
+
+//------------------------------------------------------------------------------------
+
+gcRegionFragmenter::gcRegionFragmenter( wxWindow *                      parent,
+                                        size_t                          regionId,
+                                        std::vector<gcChoiceObject*>    choices)
+
+    :
+    gcUpdatingChoose(parent,wxEmptyString,choices),
+    m_regionId(regionId)
+{
+}
+
+gcRegionFragmenter::~gcRegionFragmenter()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+gcRegionEditDialog::gcRegionEditDialog( wxWindow *      parent,
+                                        GCDataStore &   dataStore,
+                                        size_t          regionId,
+                                        bool            forJustCreatedObj)
+    :
+    gcUpdatingDialog(   parent,
+                        dataStore,
+                        forJustCreatedObj
+                        ?
+                        gcstr::addRegion
+                        :
+                        wxString::Format(gcstr::editRegion,
+                                         dataStore.GetStructures().GetRegion(regionId).GetName().c_str()),
+                        forJustCreatedObj),
+    m_regionId(regionId)
+{
+}
+
+gcRegionEditDialog::~gcRegionEditDialog()
+{
+}
+
+void
+gcRegionEditDialog::DoDelete()
+{
+    gcRegion & regionRef = m_dataStore.GetStructures().GetRegion(m_regionId);
+    m_dataStore.GetStructures().RemoveRegion(regionRef);
+}
+
+//------------------------------------------------------------------------------------
+
+bool DoDialogEditRegion(wxWindow *      parent,
+                        GCDataStore &   dataStore,
+                        size_t          regionId,
+                        bool            forJustCreatedObj)
+{
+    gcRegionEditDialog dialog(parent,dataStore,regionId,forJustCreatedObj);
+
+    gcDialogCreator creator;
+    wxBoxSizer * contentSizer   = new wxBoxSizer(wxHORIZONTAL);
+    wxBoxSizer * leftSizer      = new wxBoxSizer(wxVERTICAL);
+
+    // for renaming
+    gcTextHelper * regionRenameHelp = new gcRegionRename(regionId);
+    gcUpdatingComponent * rename    = new gcUpdatingTextCtrl(&dialog,
+                                                             forJustCreatedObj ? gcstr::regionNewName : gcstr::regionRename,
+                                                             regionRenameHelp);
+
+    gcTextHelper * effPopHelp = new gcRegionEffPopSize(regionId);
+    gcUpdatingComponent * effPop    = new gcUpdatingTextCtrl(&dialog,
+                                                             gcstr::regionEffPopSize,
+                                                             effPopHelp);
+
+    leftSizer->Add(rename,
+                   0,
+                   wxALL | wxALIGN_CENTER | wxEXPAND,
+                   gclayout::borderSizeSmall);
+
+    leftSizer->Add(effPop,
+                   0,
+                   wxALL | wxALIGN_CENTER | wxEXPAND,
+                   gclayout::borderSizeSmall);
+
+    creator.AddComponent(dialog,rename);
+    creator.AddComponent(dialog,effPop);
+    contentSizer->Add(leftSizer,
+                      1,
+                      wxALL | wxALIGN_CENTER | wxEXPAND,
+                      gclayout::borderSizeSmall);
+
+    if(!forJustCreatedObj)
+    {
+        wxBoxSizer * rightSizer    = new wxBoxSizer(wxVERTICAL);
+
+        // everything for merging
+        std::vector<gcChoiceObject*> regionChoices;
+        gcDisplayOrder ids = dataStore.GetStructures().GetDisplayableRegionIds();
+        for(gcDisplayOrder::iterator iter=ids.begin(); iter != ids.end(); iter++)
+        {
+            size_t id = *iter;
+            if(id != regionId)
+            {
+                const gcRegion & regionChoice = dataStore.GetStructures().GetRegion(id);
+                if(regionChoice.GetLocusCount() > 1  || dataStore.GetStructures().RegionHasAnyLinkedLoci(id))
+                    // don't put up single locus regions containing unlinked data only
+                {
+                    gcRegionMergeChoice * choice = new gcRegionMergeChoice(id,regionId);
+                    regionChoices.push_back(choice);
+                }
+            }
+        }
+        gcUpdatingComponent * merging = new gcRegionMerge(&dialog,regionId,regionChoices);
+
+        // for fragmenting
+        std::vector<gcChoiceObject*> fragmentChoices;
+        fragmentChoices.push_back(new gcRegionFragmentChoice(regionId));
+        gcUpdatingComponent * fragmenting = new gcRegionFragmenter(&dialog,regionId,fragmentChoices);
+
+        rightSizer->Add(merging,
+                        1,
+                        wxALL | wxALIGN_CENTER | wxEXPAND,
+                        gclayout::borderSizeSmall);
+        rightSizer->Add(new wxStaticLine(&dialog,-1,wxDefaultPosition,wxDefaultSize,wxLI_HORIZONTAL),
+                        0,
+                        wxALL | wxALIGN_CENTER | wxEXPAND,
+                        gclayout::borderSizeSmall);
+        rightSizer->Add(fragmenting,
+                        0,
+                        wxALL | wxALIGN_CENTER | wxEXPAND,
+                        gclayout::borderSizeSmall);
+        creator.AddComponent(dialog,merging);
+        creator.AddComponent(dialog,fragmenting);
+
+        contentSizer->Add(new wxStaticLine(&dialog,-1,wxDefaultPosition,wxDefaultSize,wxLI_VERTICAL),
+                          0,
+                          wxALL | wxALIGN_CENTER | wxEXPAND,
+                          gclayout::borderSizeSmall);
+
+        contentSizer->Add(rightSizer,
+                          1,
+                          wxALL | wxALIGN_CENTER | wxEXPAND,
+                          gclayout::borderSizeSmall);
+    }
+
+    creator.PlaceContent(dialog,contentSizer);
+    return dialog.Go();
+}
+
+//------------------------------------------------------------------------------------
+
+bool
+gcActor_RegionAdd::OperateOn(wxWindow * parent, GCDataStore & dataStore)
+{
+    gcRegion & regionRef = dataStore.GetStructures().MakeRegion("",true);
+    return DoDialogEditRegion(parent,dataStore,regionRef.GetId(),true);
+}
+
+//------------------------------------------------------------------------------------
+
+bool
+gcActor_RegionEdit::OperateOn(wxWindow * parent, GCDataStore & dataStore)
+{
+    return DoDialogEditRegion(parent,dataStore,m_regionId,false);
+}
+
+//____________________________________________________________________________________
diff --git a/src/guiconv/gc_region_dialogs.h b/src/guiconv/gc_region_dialogs.h
new file mode 100644
index 0000000..3b10c40
--- /dev/null
+++ b/src/guiconv/gc_region_dialogs.h
@@ -0,0 +1,202 @@
+// $Id: gc_region_dialogs.h,v 1.14 2011/03/08 19:22:01 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_REGION_DIALOGS_H
+#define GC_REGION_DIALOGS_H
+
+#include "gc_dialog.h"
+#include "gc_quantum.h"
+#include "gc_validators.h"
+
+class GCDataStore;
+class wxWindow;
+
+//------------------------------------------------------------------------------------
+
+class gcRegionRename : public gcTextHelper
+{
+  private:
+    gcRegionRename();       // undefined
+    size_t                  m_regionId;
+  protected:
+  public:
+    gcRegionRename(size_t regionId);
+    ~gcRegionRename();
+
+    wxString    FromDataStore(GCDataStore &);
+    void        ToDataStore(GCDataStore &, wxString newText);
+};
+
+//------------------------------------------------------------------------------------
+
+class gcRegionEffPopSize : public gcTextHelper
+{
+  private:
+    gcRegionEffPopSize();       // undefined
+    size_t                      m_regionId;
+    GCPositiveFloatValidator    m_validator;
+  protected:
+  public:
+    gcRegionEffPopSize(size_t regionId);
+    ~gcRegionEffPopSize();
+
+    wxString            FromDataStore(GCDataStore &);
+    void                ToDataStore(GCDataStore &, wxString newText);
+    const wxValidator & GetValidator();
+    wxString            InitialString();
+};
+
+//------------------------------------------------------------------------------------
+
+class gcRegionSamples : public gcTextHelper
+{
+  private:
+    gcRegionSamples();       // undefined
+    size_t                      m_regionId;
+    GCPositiveFloatValidator    m_validator;
+  protected:
+  public:
+    gcRegionSamples(size_t regionId);
+    ~gcRegionSamples();
+
+    wxString            FromDataStore(GCDataStore &);
+    void                ToDataStore(GCDataStore &, wxString newText);
+    const wxValidator & GetValidator();
+    wxString            InitialString();
+};
+
+//------------------------------------------------------------------------------------
+
+class gcRegionMergeChoice : public gcChoiceObject
+{
+  private:
+    gcRegionMergeChoice();      // undefined
+    size_t                      m_choiceRegionId;
+    size_t                      m_dialogRegionId;
+    wxCheckBox *                m_box;
+  protected:
+  public:
+    gcRegionMergeChoice(size_t choiceRegionId, size_t dialogRegionId);
+    ~gcRegionMergeChoice();
+
+    void        UpdateDisplayInitial    (GCDataStore &) ;
+    void        UpdateDisplayInterim    (GCDataStore &) ;
+    void        UpdateDataInterim       (GCDataStore &) ;
+    void        UpdateDataFinal         (GCDataStore &) ;
+
+    wxWindow *  MakeWindow(wxWindow * parent)           ;
+    wxWindow *  FetchWindow()                           ;
+
+    size_t      GetRelevantId();
+};
+
+//------------------------------------------------------------------------------------
+
+class gcRegionMerge : public gcUpdatingChooseMulti
+{
+  private:
+    size_t          m_regionId;
+  protected:
+  public:
+    gcRegionMerge(  wxWindow *                      parent,
+                    size_t                          regionId,
+                    std::vector<gcChoiceObject*>    choices);
+    ~gcRegionMerge();
+
+    void    DoFinalForMulti(GCDataStore & dataStore, gcIdVec selectedChoices);
+    wxString    NoChoicesText() const;
+};
+
+//------------------------------------------------------------------------------------
+
+class gcRegionFragmentChoice : public gcChoiceObject
+{
+  private:
+    gcRegionFragmentChoice();
+  protected:
+    size_t                      m_regionId;
+    wxCheckBox *                m_box;
+  public:
+    gcRegionFragmentChoice(size_t regionId);
+    ~gcRegionFragmentChoice();
+
+    void        UpdateDisplayInitial    (GCDataStore &);
+    void        UpdateDisplayInterim    (GCDataStore &);
+    void        UpdateDataInterim       (GCDataStore &);
+    void        UpdateDataFinal         (GCDataStore &);
+
+    wxWindow *  MakeWindow(wxWindow * parent);
+    wxWindow *  FetchWindow();
+
+    size_t      GetRelevantId();
+};
+
+class gcRegionFragmenter : public gcUpdatingChoose
+{
+  private:
+    size_t          m_regionId;
+  protected:
+  public:
+    gcRegionFragmenter( wxWindow *                      parent,
+                        size_t                          regionId,
+                        std::vector<gcChoiceObject*>    choices);
+    ~gcRegionFragmenter();
+};
+
+//------------------------------------------------------------------------------------
+
+class gcRegionEditDialog : public gcUpdatingDialog
+{
+  private:
+  protected:
+    size_t          m_regionId;
+    void DoDelete();
+  public:
+    gcRegionEditDialog( wxWindow *      parentWindow,
+                        GCDataStore &   dataStore,
+                        size_t          regionId,
+                        bool            forJustCreatedObj);
+    virtual ~gcRegionEditDialog();
+};
+
+//------------------------------------------------------------------------------------
+
+bool DoDialogEditRegion(wxWindow *      parentWindow,
+                        GCDataStore &   dataStore,
+                        size_t          regionId,
+                        bool            forJustCreatedObj);
+
+//------------------------------------------------------------------------------------
+
+class gcActor_RegionAdd : public gcEventActor
+{
+  public:
+    gcActor_RegionAdd() {};
+    virtual ~gcActor_RegionAdd() {};
+    virtual bool OperateOn(wxWindow * parent, GCDataStore & dataStore);
+};
+
+//------------------------------------------------------------------------------------
+
+class gcActor_RegionEdit : public gcEventActor
+{
+  private:
+    gcActor_RegionEdit();       // undefined
+    size_t                      m_regionId;
+
+  public:
+    gcActor_RegionEdit(size_t regionId) : m_regionId(regionId) {};
+    virtual ~gcActor_RegionEdit() {};
+    virtual bool OperateOn(wxWindow * parent, GCDataStore & dataStore);
+};
+
+#endif  // GC_REGION_DIALOGS_H
+
+//____________________________________________________________________________________
diff --git a/src/guiconv/gc_regiontab.cpp b/src/guiconv/gc_regiontab.cpp
new file mode 100644
index 0000000..c8435fc
--- /dev/null
+++ b/src/guiconv/gc_regiontab.cpp
@@ -0,0 +1,93 @@
+// $Id: gc_regiontab.cpp,v 1.43 2011/03/08 19:22:01 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+
+#include "gc_event_publisher.h"
+#include "gc_logic.h"
+#include "gc_regiontab.h"
+#include "gc_region_dialogs.h"
+#include "gc_strings.h"
+#include "gc_strings_region.h"
+#include "gc_structures.h"
+
+//------------------------------------------------------------------------------------
+
+gcRegionPane::gcRegionPane(wxWindow * parent)
+    :
+    gcGridPane(parent,3,0)  // EWFIX.P3 -- constant
+{
+}
+
+gcRegionPane::~gcRegionPane()
+{
+}
+
+void
+gcRegionPane::NotifyLeftDClick(size_t row, size_t col)
+{
+    assert(row < m_objVec.size());
+    size_t regionId = m_objVec[row];
+    gcEventActor * regionEditActor = new gcActor_RegionEdit(regionId);
+    PublishScreenEvent(GetEventHandler(),regionEditActor);
+}
+
+//------------------------------------------------------------------------------------
+
+gcRegionTab::gcRegionTab( wxWindow * parent, GCLogic & logic)
+    :
+    gcInfoPane(parent, logic, gcstr_region::tabTitle)
+{
+}
+
+gcRegionTab::~gcRegionTab()
+{
+}
+
+wxPanel *
+gcRegionTab::MakeContent()
+{
+    gcGridPane * pane = new gcRegionPane(m_scrolled);
+
+    objVector regions = m_logic.GetStructures().GetDisplayableRegions();
+    for(objVector::iterator iter=regions.begin(); iter != regions.end(); iter++)
+    {
+        GCQuantum * quantumP = *iter;
+        gcRegion * regionP = dynamic_cast<gcRegion*>(quantumP);
+        assert(regionP != NULL);
+
+        wxArrayString labels;
+
+        labels.Add(regionP->GetName());
+        labels.Add(wxString::Format(gcstr_region::numLoci,(int)(regionP->GetLocusCount())));
+        if(regionP->HasEffectivePopulationSize())
+        {
+            labels.Add(wxString::Format(gcstr_region::effPopSize,regionP->GetEffectivePopulationSize()));
+        }
+        else
+        {
+            labels.Add("");
+        }
+
+        pane->AddRow(quantumP->GetId(),labels);
+    }
+
+    pane->Finish();
+    return pane;
+
+}
+
+wxString
+gcRegionTab::MakeLabel()
+{
+    return wxString::Format(m_panelLabelFmt,(int)m_logic.GetStructures().GetDisplayableRegionIds().size());
+}
+
+//____________________________________________________________________________________
diff --git a/src/guiconv/gc_regiontab.h b/src/guiconv/gc_regiontab.h
new file mode 100644
index 0000000..eb98a3a
--- /dev/null
+++ b/src/guiconv/gc_regiontab.h
@@ -0,0 +1,46 @@
+// $Id: gc_regiontab.h,v 1.29 2011/03/08 19:22:01 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_REGIONTAB_H
+#define GC_REGIONTAB_H
+
+#include "gc_gridpanel.h"
+
+class wxWindow;
+
+class gcRegionPane : public gcGridPane
+{
+  private:
+    gcRegionPane();            // undefined
+  protected:
+  public:
+    gcRegionPane(wxWindow * parent);
+    virtual ~gcRegionPane();
+
+    virtual void NotifyLeftDClick(size_t row, size_t col);
+};
+
+class gcRegionTab : public gcInfoPane
+{
+  private:
+    gcRegionTab();        // undefined
+
+  protected:
+    wxPanel *   MakeContent();
+    wxString    MakeLabel();
+
+  public:
+    gcRegionTab(wxWindow * parent, GCLogic & logic);
+    virtual ~gcRegionTab();
+};
+
+#endif  // GC_REGIONTAB_H
+
+//____________________________________________________________________________________
diff --git a/src/guiconv/gc_trait_dialogs.cpp b/src/guiconv/gc_trait_dialogs.cpp
new file mode 100644
index 0000000..94a22d5
--- /dev/null
+++ b/src/guiconv/gc_trait_dialogs.cpp
@@ -0,0 +1,39 @@
+// $Id: gc_trait_dialogs.cpp,v 1.10 2011/03/08 19:22:01 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include "gc_datastore.h"
+#include "gc_strings.h"
+#include "gc_structures_err.h"
+#include "gc_trait_dialogs.h"
+
+#include "wx/textdlg.h"
+
+void
+DoDialogAddTrait(wxWindow * parentWindow, GCDataStore & dataStore)
+{
+    wxString newTraitName
+        = wxGetTextFromUser(gcstr::traitEnterNewName,
+                            gcstr::traitEnterNewName,  // EWFIX.P3 LATER
+                            wxEmptyString,
+                            parentWindow);
+    if(!(newTraitName.IsEmpty()))
+    {
+        try
+        {
+            dataStore.AddNewTrait(newTraitName);
+        }
+        catch(const duplicate_name_error& e)
+        {
+            dataStore.GCInfo(e.what());
+        }
+    }
+}
+
+//____________________________________________________________________________________
diff --git a/src/guiconv/gc_trait_dialogs.h b/src/guiconv/gc_trait_dialogs.h
new file mode 100644
index 0000000..1dcb667
--- /dev/null
+++ b/src/guiconv/gc_trait_dialogs.h
@@ -0,0 +1,21 @@
+// $Id: gc_trait_dialogs.h,v 1.5 2011/03/08 19:22:01 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_TRAIT_DIALOGS_H
+#define GC_TRAIT_DIALOGS_H
+
+class GCDataStore;
+class wxWindow;
+
+void DoDialogAddTrait(wxWindow * parentWindow, GCDataStore & dataStore);
+
+#endif  //GC_TRAIT_DIALOGS_H
+
+//____________________________________________________________________________________
diff --git a/src/guiconv/gc_unit_dialogs.cpp b/src/guiconv/gc_unit_dialogs.cpp
new file mode 100644
index 0000000..faa71b5
--- /dev/null
+++ b/src/guiconv/gc_unit_dialogs.cpp
@@ -0,0 +1,34 @@
+// $Id: gc_unit_dialogs.cpp,v 1.8 2011/03/08 19:22:01 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include "gc_strings.h"
+#include "gc_unit_dialogs.h"
+#include "wx/filedlg.h"
+
+//------------------------------------------------------------------------------------
+
+wxString DoDialogSelectHapFile(wxWindow * parentWindow,GCDataStore & dataStore)
+{
+    wxString hapFileName = wxEmptyString;
+    wxFileDialog hapFileDialog( parentWindow,
+                                gcstr::hapFileSelect,
+                                wxEmptyString,  // default directory == current
+                                wxEmptyString,  // default file = none
+                                gcstr::globAll, // show all files
+                                wxOPEN);
+    if(hapFileDialog.ShowModal() == wxID_OK)
+    {
+        hapFileName = hapFileDialog.GetPath();
+    }
+
+    return hapFileName;
+}
+
+//____________________________________________________________________________________
diff --git a/src/guiconv/gc_unit_dialogs.h b/src/guiconv/gc_unit_dialogs.h
new file mode 100644
index 0000000..5cde26f
--- /dev/null
+++ b/src/guiconv/gc_unit_dialogs.h
@@ -0,0 +1,21 @@
+// $Id: gc_unit_dialogs.h,v 1.5 2011/03/08 19:22:01 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_UNIT_DIALOGS_H
+#define GC_UNIT_DIALOGS_H
+
+class GCDataStore;
+class wxWindow;
+
+wxString DoDialogSelectHapFile(wxWindow * parent,GCDataStore &);
+
+#endif  //GC_UNIT_DIALOGS_H
+
+//____________________________________________________________________________________
diff --git a/src/guiconv/guiconverter.cpp b/src/guiconv/guiconverter.cpp
new file mode 100644
index 0000000..5116914
--- /dev/null
+++ b/src/guiconv/guiconverter.cpp
@@ -0,0 +1,163 @@
+// $Id: guiconverter.cpp,v 1.35 2014/08/29 18:14:55 mkkuhner Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+/* NOTE:  8/29/2014 Mary edited gc_migtab.cpp to reverse the sense
+of "from" and "to" in the migration matrix, since it was producing
+XML files that were backwards to what LAMARC assumes.  She did NOT
+change variable names or anything else to match, so there are probably
+misleading variable names in this part of the converter and in the
+corresponding XML output routines.  Maintainers beware!  DEBUG */
+
+#include "guiconverter.h"
+#include "gc_cmdline.h"
+#include "gc_errhandling.h"
+#include "gc_event_ids.h"
+#include "gc_event_publisher.h"
+#include "gc_frame.h"
+#include "gc_layout.h"
+#include "gc_strings.h"
+
+#include "giraffe32.xpm"
+
+#include "tinyxml.h"
+
+#include "wx/cmdline.h"
+#include "wx/icon.h"
+#include "wx/log.h"
+
+#ifdef LAMARC_COMPILE_MSWINDOWS
+#include "wincon.h"
+#endif
+
+GuiConverterApp::GuiConverterApp()
+{
+}
+
+GuiConverterApp::~GuiConverterApp()
+{
+}
+
+IMPLEMENT_APP(GuiConverterApp)
+
+void
+GuiConverterApp::OnInitCmdLine(wxCmdLineParser& parser)
+{
+    wxApp::OnInitCmdLine(parser);
+    GCCmdLineManager::AddOptions(parser);
+}
+
+bool
+GuiConverterApp::OnCmdLineParsed(wxCmdLineParser& parser)
+{
+    bool parentReturned = wxApp::OnCmdLineParsed(parser);
+    GCCmdLineManager::ExtractValues(parser);
+    return parentReturned;
+}
+
+bool
+GuiConverterApp::OnInit()
+{
+    // use the parent's OnInit because it includes command line parsing
+    if(wxApp::OnInit())
+    {
+        if(m_batchOnly)
+        {
+#ifdef LAMARC_COMPILE_MSWINDOWS
+            AttachConsole(-1);
+            // this should be better but mingw needs patching
+            // AttachConsole(ATTACH_PARENT_PROCESS);
+#else
+            wxLog::SetActiveTarget(new wxLogStderr());
+#endif
+            GCCmdLineManager::ProcessCommandLineAndCommandFile(m_logic);
+            GCCmdLineManager::DoExport(m_logic);
+            wxExit();
+            return false;
+        }
+        else
+        {
+            if(wxApp::OnInitGui())
+            {
+                int appWidth  = gclayout::appWidth;
+                int appHeight = gclayout::appHeight;
+                wxSize maxSize = wxGetDisplaySize();
+                int maxWidth = maxSize.GetWidth()  * gclayout::appWidthPercent  / 100;
+                int maxHeight= maxSize.GetHeight() * gclayout::appHeightPercent / 100;
+                if(appWidth > maxWidth)
+                {
+                    appWidth = maxWidth;
+                }
+                if(appHeight > maxHeight)
+                {
+                    appHeight = maxHeight;
+                }
+
+                wxSize bestSize(appWidth,appHeight);
+
+                GCFrame * m_mainFrame = new GCFrame(gcstr::converterTitle,m_logic);
+                m_logic.SetDisplayParent(m_mainFrame);
+                m_mainFrame->SetSize(bestSize);
+                m_mainFrame->CentreOnScreen();
+                m_mainFrame->Show(true);
+                SetTopWindow(m_mainFrame);
+
+                m_mainFrame->SetIcon( wxICON( giraffe32) );
+
+                GCCmdLineManager::ProcessCommandLineAndCommandFile(m_logic);
+                PublishDataEvent(m_mainFrame->GetEventHandler(),D2S_UserInteractionPhaseEnd);
+
+                return true;
+            }
+        }
+    }
+    return false;
+}
+
+int
+GuiConverterApp::OnRun()
+{
+    try
+    {
+        int result = wxApp::OnRun();
+        return result;
+    }
+    catch(const gc_fatal_error& e)
+    {
+        wxLogError(wxString::Format(gcerr::fatalError));
+        return 2;
+    }
+    catch(const std::exception& f)
+    {
+        wxLogError(wxString::Format(gcerr::uncaughtException,f.what()));
+        return 2;
+    }
+    return 3;       // EWFIX.P3 -- what should this be?
+}
+
+int
+GuiConverterApp::OnExit()
+{
+    if(m_doDebugDump)
+    {
+        m_logic.DebugDump();
+    }
+    if(! m_batchOutName.IsEmpty())
+    {
+        TiXmlDocument * doc = m_logic.ExportBatch();
+        m_logic.WriteBatchFile(doc,m_batchOutName);
+        delete doc;
+    }
+
+    m_logic.NukeContents();
+
+    return wxApp::OnExit();
+}
+
+//____________________________________________________________________________________
diff --git a/src/guiconv/guiconverter.h b/src/guiconv/guiconverter.h
new file mode 100644
index 0000000..99803bd
--- /dev/null
+++ b/src/guiconv/guiconverter.h
@@ -0,0 +1,48 @@
+// $Id: guiconverter.h,v 1.14 2014/08/29 18:14:55 mkkuhner Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GUICONVERTER_H
+#define GUICONVERTER_H
+
+/* NOTE:  8/29/2014 Mary edited gc_migtab.cpp to reverse the sense
+of "from" and "to" in the migration matrix, since it was producing
+XML files that were backwards to what LAMARC assumes.  She did NOT
+change variable names or anything else to match, so there are probably
+misleading variable names in this part of the converter and in the
+corresponding XML output routines.  Maintainers beware!  DEBUG */
+
+#include "wx/wx.h"
+#include "gc_cmdline.h"
+#include "gc_logic.h"
+
+class wxCmdLineParser;
+class GCFrame;
+
+class GuiConverterApp: public wxApp, public GCCmdLineManager
+                       // main gui converter application
+{
+  protected:
+    GCLogic         m_logic;
+    GCFrame     *   m_mainFrame;
+
+  public:
+    GuiConverterApp();
+    virtual ~GuiConverterApp();
+
+    virtual bool    OnCmdLineParsed(wxCmdLineParser&);
+    virtual int     OnExit();
+    virtual bool    OnInit();
+    virtual void    OnInitCmdLine(wxCmdLineParser&);
+    virtual int     OnRun();
+};
+
+#endif  // GUICONVERTER_H
+
+//____________________________________________________________________________________
diff --git a/src/guiutil/gc_clickpanel.cpp b/src/guiutil/gc_clickpanel.cpp
new file mode 100644
index 0000000..00d8f6d
--- /dev/null
+++ b/src/guiutil/gc_clickpanel.cpp
@@ -0,0 +1,291 @@
+// $Id: gc_clickpanel.cpp,v 1.12 2012/06/30 01:32:42 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+
+#include "gc_color.h"
+#include "gc_clickpanel.h"
+
+//------------------------------------------------------------------------------------
+
+BEGIN_EVENT_TABLE(gcClickText,wxStaticText)
+EVT_MOUSE_EVENTS(gcClickText::OnMouse)
+END_EVENT_TABLE()
+
+gcClickText::gcClickText(gcClickPanel * parent, wxString label)
+:
+wxStaticText(parent,-1,label),
+    m_parent(parent)
+{
+}
+
+gcClickText::~gcClickText()
+{
+}
+
+void
+gcClickText::OnMouse(wxMouseEvent & event)
+{
+    // according to the wxWidgets documentation, calling event.Skip()
+    // here might be necessary because it might be handling more basic
+    // functionality (such as bringing the window to the front).
+    event.Skip();
+
+    // send this event on to the containing object. We do
+    // this because which window (gcClickText, gcClickPanel, gcClickCell)
+    // responds to the mouse events we're interested in does not
+    // appear to be consistent across platforms.
+    event.ResumePropagation(1);
+}
+
+//------------------------------------------------------------------------------------
+
+BEGIN_EVENT_TABLE(gcClickPanel,wxPanel)
+EVT_MOUSE_EVENTS(gcClickPanel::OnMouse)
+END_EVENT_TABLE()
+
+gcClickPanel::gcClickPanel(gcClickCell * parent)
+:   wxPanel(parent,-1),
+    m_sizer(NULL)
+{
+    m_sizer = new wxBoxSizer(wxVERTICAL);
+}
+
+gcClickPanel::~gcClickPanel()
+{
+}
+
+void
+gcClickPanel::FinishSizing()
+{
+    SetSizerAndFit(m_sizer);
+}
+
+void
+gcClickPanel::AddText(wxString text)
+{
+    gcClickText* newtext = new gcClickText(this,text);
+
+    if (text.Find("???") != wxNOT_FOUND)
+    {
+        newtext->SetForegroundColour(wxTheColourDatabase->Find("RED"));
+    }
+
+    RecursiveSetColour(wxTheColourDatabase->Find("WHITE"));
+    m_sizer->Add(newtext,1,wxEXPAND);
+}
+
+void
+gcClickPanel::CenterText(wxString text)
+{
+    gcClickText* newtext = new gcClickText(this,text);
+
+    if (text.Find("???") != wxNOT_FOUND)
+    {
+        newtext->SetForegroundColour(wxTheColourDatabase->Find("RED"));
+    }
+
+    RecursiveSetColour(wxTheColourDatabase->Find("WHITE"));
+    m_sizer->Add(new gcClickText(this,text),1,wxALIGN_CENTER);
+}
+
+void
+gcClickPanel::OnMouse(wxMouseEvent & event)
+{
+    // according to the wxWidgets documentation, calling event.Skip()
+    // here might be necessary because it might be handling more basic
+    // functionality (such as bringing the window to the front).
+    event.Skip();
+
+    // send this event on to the containing object. We do
+    // this because which window (gcClickText, gcClickPanel, gcClickCell)
+    // responds to the mouse events we're interested in does not
+    // appear to be consistent across platforms.
+    event.ResumePropagation(1);
+}
+
+void
+gcClickPanel::RecursiveSetColour(wxColour c)
+{
+
+    SetBackgroundColour(c);
+    wxWindowList & kids = GetChildren();
+
+    for ( wxWindowList::Node *node = kids.GetFirst(); node; node = node->GetNext() )
+    {
+        wxWindow * current = node->GetData();
+        current->SetBackgroundColour(c);
+    }
+
+}
+
+//------------------------------------------------------------------------------------
+
+BEGIN_EVENT_TABLE(gcClickCell,wxPanel)
+EVT_MOUSE_EVENTS(gcClickCell::OnMouse)
+END_EVENT_TABLE()
+
+gcClickCell::gcClickCell(wxWindow * parent, wxString label)
+:   wxPanel(parent,-1,wxDefaultPosition,wxDefaultSize,wxTAB_TRAVERSAL),
+    m_sizer(NULL),
+    m_clickPanel(NULL),
+    m_mouseInCell(false)
+{
+    m_sizer = new wxStaticBoxSizer(wxVERTICAL,this,label);
+    m_clickPanel = new gcClickPanel(this);
+    m_sizer->Add(m_clickPanel,1,wxEXPAND);
+    RecursiveSetColour("wxWhite");
+}
+
+gcClickCell::~gcClickCell()
+{
+}
+
+void
+gcClickCell::FinishSizing()
+{
+    m_clickPanel->FinishSizing();
+    SetSizerAndFit(m_sizer);
+}
+
+void
+gcClickCell::AddText(wxString text)
+{
+    assert(m_clickPanel != NULL);
+    m_clickPanel->AddText(text);
+}
+
+void
+gcClickCell::CenterText(wxString text)
+{
+    assert(m_clickPanel != NULL);
+    m_clickPanel->CenterText(text);
+}
+
+void
+gcClickCell::OnMouse(wxMouseEvent & event)
+{
+    event.Skip();
+    if( !event.Moving())
+    {
+        // we need to process events from here, but
+        // we're currently only using the events
+        // from the wxStaticText objects that are
+        // children of this cell. This isn't quite
+        // right, but each of Linux/GTK, OS X, and
+        // MSW treat the events for the gcClickCell
+        // itself differently, so we're punting for
+        // now with this.
+        if(this == event.GetEventObject()) return;
+
+        if ( event.Entering() )
+        {
+            m_mouseInCell = true;
+#ifdef LAMARC_COMPILE_MACOSX
+            m_clickCount = 0;// HACK.HACK.HACK - 10.7 counts things wrong apparently
+#endif
+            NotifyEntering();
+        }
+        if ( event.Leaving() )
+        {
+            m_mouseInCell = false;
+#ifdef LAMARC_COMPILE_MACOSX
+            m_clickCount = 0;// HACK.HACK.HACK - 10.7 counts things wrong apparently
+#endif
+            NotifyLeaving();
+        }
+        if ( event.LeftDClick() )
+        {
+            wxLogVerbose(" event.LeftDClick detected");  // JMDBG
+
+            m_mouseInCell = true;
+#ifdef LAMARC_COMPILE_MACOSX
+            if (m_clickCount > 1)// HACK.HACK.HACK - 10.7 counts things wrong apparently
+            {
+#endif
+                NotifyLeftDClick();
+#ifdef LAMARC_COMPILE_MACOSX
+                m_clickCount = 0;// HACK.HACK.HACK - 10.7 counts things wrong apparently
+            }
+#endif
+        }
+        if ( event.LeftDown() )
+        {
+            wxLogVerbose(" event.LeftDown detected");  // JMDBG
+            m_mouseInCell = true;
+#ifdef LAMARC_COMPILE_MACOSX
+            m_clickCount++;// HACK.HACK.HACK - 10.7 counts things wrong apparently
+#endif
+
+            NotifyLeftDown();
+        }
+        if ( event.LeftUp() )
+        {
+            // doesn't change m_mouseInCell because this cell
+            // will get the event if the corresponding down
+            // happened in the cell
+            NotifyLeftUp();
+        }
+    }
+}
+
+void
+gcClickCell::RecursiveSetColour(wxColour c)
+{
+    // EWFIX.P3 -- not calling SetBackgroundColour because
+    // we don't appear to get click events until we pass
+    // into the enclosed gcClickPanel. Turned it off since
+    // it otherwise gives a false clue, but boy is this ugly.
+    // SetBackgroundColour(c);
+
+    m_clickPanel->RecursiveSetColour(c);
+    Refresh();  // for MSW ??
+}
+
+void
+gcClickCell::NotifyEntering()
+{
+    RecursiveSetColour(gccolor::enteredObject());
+}
+
+void
+gcClickCell::NotifyLeaving()
+{
+    RecursiveSetColour(wxTheColourDatabase->Find("WHITE"));
+}
+
+void
+gcClickCell::NotifyLeftDClick()
+{
+    wxLogDebug("Implementation Error: override NotifyLeftDClick");
+}
+
+void
+gcClickCell::NotifyLeftDown()
+{
+    RecursiveSetColour(gccolor::activeObject());
+}
+
+void
+gcClickCell::NotifyLeftUp()
+{
+    // EWFIX.P3 -- there should be a better way to do this
+    if(m_mouseInCell)
+    {
+        RecursiveSetColour(gccolor::enteredObject());
+
+    }
+    else
+    {
+        RecursiveSetColour("wxWhite");
+    }
+}
+
+//____________________________________________________________________________________
diff --git a/src/guiutil/gc_clickpanel.h b/src/guiutil/gc_clickpanel.h
new file mode 100644
index 0000000..c3fc52d
--- /dev/null
+++ b/src/guiutil/gc_clickpanel.h
@@ -0,0 +1,88 @@
+// $Id: gc_clickpanel.h,v 1.8 2012/06/30 01:32:42 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_CLICKPANEL_H
+#define GC_CLICKPANEL_H
+
+#include "wx/sizer.h"
+#include "wx/stattext.h"
+#include "wx/wx.h"
+
+class gcClickCell;
+
+class gcClickPanel : public wxPanel
+{
+  private:
+  protected:
+    wxBoxSizer *        m_sizer;
+  public:
+    gcClickPanel(gcClickCell * parent);
+    virtual ~gcClickPanel();
+
+    void AddText(wxString text);
+    void CenterText(wxString text);
+    void OnMouse(wxMouseEvent & event);
+    void RecursiveSetColour(wxColour c);
+
+    void        FinishSizing();
+
+    DECLARE_EVENT_TABLE()
+};
+
+class gcClickCell : public wxPanel
+{
+  private:
+    gcClickCell();         // undefined
+  protected:
+    wxStaticBoxSizer *      m_sizer;
+    gcClickPanel *          m_clickPanel;
+    bool                    m_mouseInCell;
+#ifdef LAMARC_COMPILE_MACOSX
+    int                     m_clickCount;  // HACK.HACK.HACK - 10.7 counts things wrong apparently
+#endif
+
+    void        FinishSizing();
+  public:
+    gcClickCell(wxWindow * parent, wxString label);
+    virtual ~gcClickCell();
+
+    void OnMouse(wxMouseEvent & event);
+    void RecursiveSetColour(wxColour c);
+
+    virtual void NotifyEntering     ();
+    virtual void NotifyLeaving      ();
+    virtual void NotifyLeftDClick   ();
+    virtual void NotifyLeftDown     ();
+    virtual void NotifyLeftUp       ();
+
+    void AddText(wxString text);
+    void CenterText(wxString text);
+
+    DECLARE_EVENT_TABLE()
+};
+
+class gcClickText : public wxStaticText
+{
+  private:
+    gcClickText();       // undefined
+  protected:
+    gcClickPanel *     m_parent;
+  public:
+    gcClickText(gcClickPanel * parent,wxString label);
+    virtual ~gcClickText();
+
+    void OnMouse(wxMouseEvent & event);
+
+    DECLARE_EVENT_TABLE()
+};
+
+#endif  // GC_CLICKPANEL_H
+
+//____________________________________________________________________________________
diff --git a/src/guiutil/gc_gridpanel.cpp b/src/guiutil/gc_gridpanel.cpp
new file mode 100644
index 0000000..6b62200
--- /dev/null
+++ b/src/guiutil/gc_gridpanel.cpp
@@ -0,0 +1,297 @@
+// $Id: gc_gridpanel.cpp,v 1.30 2012/03/09 22:55:20 jmcgill Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+#include <stdio.h>
+
+#include "wx/sizer.h"
+#include "gc_color.h"
+#include "gc_event_ids.h"
+#include "gc_event_publisher.h"
+#include "gc_gridpanel.h"
+#include "gc_layout.h"
+#include "gc_logic.h"
+#include "gc_strings.h"
+#include "gc_types.h"
+
+//------------------------------------------------------------------------------------
+
+gcGridPane::gcGridPane(wxWindow * parent, size_t cols, size_t growCol)
+    :
+    wxPanel(parent,-1,wxDefaultPosition,wxDefaultSize,wxTAB_TRAVERSAL),
+    m_sizer(NULL),
+    m_rows(0),
+    m_cols(cols)
+{
+    //(void) new wxStaticText( this, wxID_ANY, wxT("In gcGridPane Create."), wxPoint(10, 10) );
+
+    m_sizer = new wxFlexGridSizer(cols);
+    m_sizer->AddGrowableCol(growCol);
+    SetSizerAndFit(m_sizer);
+}
+
+gcGridPane::~gcGridPane()
+{
+}
+
+void
+gcGridPane::NotifyEntering(size_t row, size_t col)
+{
+    for(size_t c = 0; c < m_cols; c++)
+    {
+        cellIndices indices(row,c);
+        cellMap::iterator iter = m_cells.find(indices);
+        assert(iter != m_cells.end());
+        wxWindow * obj = (*iter).second;
+        obj->SetBackgroundColour(gccolor::enteredObject());
+    }
+}
+
+void
+gcGridPane::NotifyLeaving(size_t row, size_t col)
+{
+    for(size_t c = 0; c < m_cols; c++)
+    {
+        cellIndices indices(row,c);
+        cellMap::iterator iter = m_cells.find(indices);
+        assert(iter != m_cells.end());
+        wxWindow * obj = (*iter).second;
+        obj->SetBackgroundColour(wxNullColour);
+    }
+}
+
+void
+gcGridPane::NotifyLeftDClick(size_t row, size_t col)
+{
+    wxLogDebug("Implementation Error: override NotifyLeftDClick");
+}
+
+void
+gcGridPane::NotifyLeftDown(size_t row, size_t col)
+{
+    for(size_t c = 0; c < m_cols; c++)
+    {
+        cellIndices indices(row,c);
+        cellMap::iterator iter = m_cells.find(indices);
+        assert(iter != m_cells.end());
+        wxWindow * obj = (*iter).second;
+        obj->SetBackgroundColour(gccolor::activeObject());
+    }
+}
+
+void
+gcGridPane::NotifyLeftUp(size_t row, size_t col)
+{
+
+    for(size_t c = 0; c < m_cols; c++)
+    {
+        cellIndices indices(row,c);
+        cellMap::iterator iter = m_cells.find(indices);
+        assert(iter != m_cells.end());
+        wxWindow * obj = (*iter).second;
+        obj->SetBackgroundColour(gccolor::enteredObject());
+    }
+
+}
+
+void
+gcGridPane::AddRow(size_t objId, wxArrayString labels)
+{
+    assert(labels.size() == m_cols);
+    for(size_t i=0; i < labels.size(); i++)
+    {
+        wxWindow * cell = new gcGridCell(this,labels[i],m_rows,i);
+        m_sizer->Add(cell,
+                     1,
+                     wxALL | wxALIGN_LEFT | wxALIGN_CENTER_VERTICAL | wxEXPAND,
+                     gclayout::borderSizeNone);
+
+        m_cells[cellIndices(m_rows,i)] = cell;
+    }
+    m_objVec.push_back(objId);
+    m_rows++;
+}
+
+void
+gcGridPane::Finish()
+// This is necesary to get the scrollbars to be created correctly
+// EWFIX.P4 -- is there a better way to do this?
+{
+    SetSizerAndFit(m_sizer);
+}
+
+//------------------------------------------------------------------------------------
+
+BEGIN_EVENT_TABLE(gcGridText,wxStaticText)
+EVT_MOUSE_EVENTS(gcGridText::OnMouse)
+END_EVENT_TABLE()
+
+gcGridText::gcGridText(gcGridCell * parent, wxString label)
+:
+wxStaticText(parent,-1,label)
+{
+}
+
+gcGridText::~gcGridText()
+{
+}
+
+void
+gcGridText::OnMouse(wxMouseEvent & event)
+{
+    // according to the wxWidgets documentation, calling event.Skip()
+    // here might be necessary because it might be handling more basic
+    // functionality (such as bringing the window to the front).
+    event.Skip();
+
+    // send this event on to the containing gcGridCell object. We do
+    // this because which window (i.e. this gcGridText of the parent
+    // gcGridCell) responds to the mouse events we're interested in
+    // does not appear to be consistent across platforms.
+    event.ResumePropagation(1);
+
+}
+
+//------------------------------------------------------------------------------------
+
+BEGIN_EVENT_TABLE(gcGridCell,wxPanel)
+EVT_MOUSE_EVENTS(gcGridCell::OnMouse)
+END_EVENT_TABLE()
+
+gcGridCell::gcGridCell( gcGridPane *    parent,
+                        wxString        label,
+                        size_t          rowIndex,
+                        size_t          colIndex)
+:
+wxPanel(parent),
+    m_rowIndex(rowIndex),
+    m_colIndex(colIndex)
+{
+    wxBoxSizer * sizer = new wxBoxSizer(wxHORIZONTAL);
+    sizer->Add(new gcGridText(this,label),
+               1,
+               wxALL | wxALIGN_LEFT | wxALIGN_CENTER_VERTICAL | wxEXPAND,
+               gclayout::borderSizeSmall);
+    SetSizerAndFit(sizer);
+}
+
+gcGridCell::~gcGridCell()
+{
+}
+
+void
+gcGridCell::OnMouse(wxMouseEvent & event)
+{
+    gcGridPane * gridPane = dynamic_cast<gcGridPane*>(GetParent());
+    assert(gridPane != NULL);
+
+    event.Skip();
+    if( !event.Moving())
+    {
+        if ( event.Entering() )
+        {
+            gridPane->NotifyEntering(m_rowIndex,m_colIndex);
+        }
+        if ( event.Leaving() )
+        {
+            gridPane->NotifyLeaving(m_rowIndex,m_colIndex);
+        }
+        if ( event.LeftDClick() )
+        {
+            gridPane->NotifyLeftDClick(m_rowIndex,m_colIndex);
+        }
+        if ( event.LeftDown() )
+        {
+            gridPane->NotifyLeftDown(m_rowIndex,m_colIndex);
+        }
+        if ( event.LeftUp() )
+        {
+            gridPane->NotifyLeftUp(m_rowIndex,m_colIndex);
+        }
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+gcInfoPane::gcInfoPane(wxWindow * parent, GCLogic & logic, wxString title)
+    :
+    wxPanel(parent,
+            -1,
+            wxDefaultPosition,
+            wxDefaultSize,
+            wxTAB_TRAVERSAL | wxSUNKEN_BORDER),
+    m_scrolled(NULL),
+    m_contentSizer(NULL),
+    m_innerPanel(NULL),
+    m_topSizer(NULL),
+    m_panelLabelFmt(title),
+    m_panelLabel(NULL),
+    m_logic(logic)
+{
+    //(void) new wxStaticText( this, wxID_ANY, wxT("In gcInfoPane Create."), wxPoint(10, 10) );
+
+    m_scrolled = new wxScrolledWindow(this, -1,
+                                      wxDefaultPosition,
+                                      wxDefaultSize,
+                                      wxRAISED_BORDER | wxTAB_TRAVERSAL);
+
+    m_innerPanel = new wxPanel(m_scrolled,-1);
+    (void) new wxStaticText( m_innerPanel, wxID_ANY, wxT("In m_innerPanel Create."), wxPoint(10, 10) );
+
+    m_contentSizer = new wxBoxSizer(wxVERTICAL);
+    m_contentSizer->Add(m_innerPanel,
+                        1,
+                        wxALL | wxALIGN_LEFT | wxALIGN_CENTER_VERTICAL | wxEXPAND,
+                        gclayout::borderSizeSmall);
+
+    m_scrolled->SetScrollRate(5,5); // if removed, scrolling doesn't work
+    m_scrolled->SetSizerAndFit(m_contentSizer);
+
+    m_topSizer = new wxBoxSizer(wxVERTICAL);
+    m_panelLabel = new wxStaticText(this,-1,wxEmptyString);
+    m_topSizer->Add(m_panelLabel,
+                    0,
+                    wxALL | wxALIGN_LEFT | wxALIGN_CENTER_VERTICAL | wxEXPAND,
+                    gclayout::borderSizeSmall);
+    m_topSizer->Add( m_scrolled,
+                     1,
+                     wxALL | wxALIGN_LEFT | wxALIGN_CENTER_VERTICAL | wxEXPAND,
+                     gclayout::borderSizeSmall);
+    SetSizerAndFit(m_topSizer);
+
+    assert(m_contentSizer != NULL);
+
+}
+
+gcInfoPane::~gcInfoPane()
+{
+}
+
+void
+gcInfoPane::UpdateUserCues()
+{
+    wxPanel * newContent = MakeContent();
+    m_contentSizer->Detach(m_innerPanel);
+    m_panelLabel->SetLabel(MakeLabel());
+    m_innerPanel->Destroy();
+    m_innerPanel = newContent;
+    m_contentSizer->Add(m_innerPanel,
+                        1,
+                        wxALL | wxALIGN_LEFT | wxALIGN_CENTER_VERTICAL | wxEXPAND,
+                        gclayout::borderSizeSmall);
+    m_innerPanel->Layout();
+    m_contentSizer->Layout();
+    m_scrolled->FitInside();
+    //m_scrolled->Layout();
+    m_topSizer->Layout();
+    Layout();
+}
+
+//____________________________________________________________________________________
diff --git a/src/guiutil/gc_gridpanel.h b/src/guiutil/gc_gridpanel.h
new file mode 100644
index 0000000..256e5d2
--- /dev/null
+++ b/src/guiutil/gc_gridpanel.h
@@ -0,0 +1,113 @@
+// $Id: gc_gridpanel.h,v 1.23 2012/06/30 01:32:42 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_GRIDPANEL_H
+#define GC_GRIDPANEL_H
+
+#include <map>
+#include <vector>
+
+#include "gc_event_ids.h"
+#include "gc_layout.h"
+#include "wx/scrolwin.h"
+#include "wx/sizer.h"
+#include "wx/stattext.h"
+#include "wx/wx.h"
+
+class GCLogic;
+class wxSizer;
+
+typedef std::pair<size_t,size_t>        cellIndices;
+typedef std::map<cellIndices,wxWindow*> cellMap;
+typedef std::vector<size_t>             objIdsByRow;
+
+class gcGridPane : public wxPanel
+{
+  private:
+
+  protected:
+    wxFlexGridSizer *   m_sizer;
+    size_t              m_rows;
+    size_t              m_cols;
+    cellMap             m_cells;
+    objIdsByRow         m_objVec;
+
+  public:
+
+    gcGridPane(wxWindow * parent, size_t cols, size_t growCol);
+    virtual ~gcGridPane();
+
+    virtual void NotifyEntering     (size_t row, size_t col);
+    virtual void NotifyLeaving      (size_t row, size_t col);
+    virtual void NotifyLeftDClick   (size_t row, size_t col);
+    virtual void NotifyLeftDown     (size_t row, size_t col);
+    virtual void NotifyLeftUp       (size_t row, size_t col);
+
+    // EWFIX.P4 -- move to protected and inherit
+    void        AddRow(size_t objId, wxArrayString labels);
+
+    void        Finish();
+};
+
+class gcGridCell : public wxPanel
+{
+  private:
+    gcGridCell();                // undefined
+  protected:
+    size_t      m_rowIndex;
+    size_t      m_colIndex;
+  public:
+    gcGridCell( gcGridPane *    parent,
+                wxString        label,
+                size_t          rowIndex,
+                size_t          colIndex);
+    virtual ~gcGridCell();
+
+    void OnMouse(wxMouseEvent & event);
+
+    DECLARE_EVENT_TABLE()
+};
+
+class gcGridText : public wxStaticText
+{
+  private:
+    gcGridText();       // undefined
+  protected:
+  public:
+    gcGridText(gcGridCell * parent,wxString label);
+    virtual ~gcGridText();
+    void OnMouse(wxMouseEvent & event);
+
+    DECLARE_EVENT_TABLE()
+};
+
+class gcInfoPane : public wxPanel
+{
+  private:
+    gcInfoPane();              // undefined
+  protected:
+    wxScrolledWindow *  m_scrolled;
+    wxSizer *           m_contentSizer;
+    wxPanel *           m_innerPanel;
+    wxSizer *           m_topSizer;
+    const wxString      m_panelLabelFmt;
+    wxStaticText *      m_panelLabel;
+    GCLogic &           m_logic;
+    virtual wxPanel *   MakeContent() = 0;
+  public:
+    gcInfoPane(wxWindow * parent, GCLogic & logic, wxString title);
+    virtual ~gcInfoPane();
+    void UpdateUserCues();
+    virtual wxString MakeLabel() = 0;
+};
+
+#endif  // GC_GRIDPANEL_H
+
+//____________________________________________________________________________________
diff --git a/src/guiutil/gc_text_ctrl.cpp b/src/guiutil/gc_text_ctrl.cpp
new file mode 100644
index 0000000..444e043
--- /dev/null
+++ b/src/guiutil/gc_text_ctrl.cpp
@@ -0,0 +1,52 @@
+// $Id: gc_text_ctrl.cpp,v 1.8 2011/03/08 19:22:01 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include "gc_text_ctrl.h"
+#include "gc_validators.h"
+#include "wx/log.h"
+#include "wx/event.h"
+
+GCTextInput::GCTextInput(wxWindow * parentWindow,const wxValidator& validator)
+    :
+    wxTextCtrl( parentWindow,
+                -1,
+                wxEmptyString,
+                wxDefaultPosition,
+                wxDefaultSize,
+                wxTAB_TRAVERSAL | wxTE_RIGHT | wxTE_PROCESS_ENTER,
+                validator)
+{
+}
+
+GCTextInput::~GCTextInput()
+{
+}
+
+GCIntegerInput::GCIntegerInput(wxWindow * parentWindow)
+    :
+    GCTextInput(parentWindow,GCIntegerValidator())
+{
+}
+
+GCIntegerInput::~GCIntegerInput()
+{
+}
+
+GCNonNegativeIntegerInput::GCNonNegativeIntegerInput(wxWindow * parentWindow)
+    :
+    GCTextInput(parentWindow,GCNonNegativeIntegerValidator())
+{
+}
+
+GCNonNegativeIntegerInput::~GCNonNegativeIntegerInput()
+{
+}
+
+//____________________________________________________________________________________
diff --git a/src/guiutil/gc_text_ctrl.h b/src/guiutil/gc_text_ctrl.h
new file mode 100644
index 0000000..782a5cb
--- /dev/null
+++ b/src/guiutil/gc_text_ctrl.h
@@ -0,0 +1,46 @@
+// $Id: gc_text_ctrl.h,v 1.6 2011/03/08 19:22:01 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_TEXT_CTRL_H
+#define GC_TEXT_CTRL_H
+
+#include "wx/textctrl.h"
+
+class GCTextInput : public wxTextCtrl
+{
+
+  private:
+    GCTextInput();       // undefined
+  public:
+    GCTextInput(wxWindow * parentWindow,const wxValidator& validator);
+    virtual ~GCTextInput();
+};
+
+class GCIntegerInput : public GCTextInput
+{
+  private:
+    GCIntegerInput();       // undefined
+  public:
+    GCIntegerInput(wxWindow * parentWindow);
+    virtual ~GCIntegerInput();
+};
+
+class GCNonNegativeIntegerInput : public GCTextInput
+{
+  private:
+    GCNonNegativeIntegerInput();       // undefined
+  public:
+    GCNonNegativeIntegerInput(wxWindow * parentWindow);
+    virtual ~GCNonNegativeIntegerInput();
+};
+
+#endif  // GC_TEXT_CTRL_H
+
+//____________________________________________________________________________________
diff --git a/src/guiutil/gc_validators.cpp b/src/guiutil/gc_validators.cpp
new file mode 100644
index 0000000..f84e193
--- /dev/null
+++ b/src/guiutil/gc_validators.cpp
@@ -0,0 +1,66 @@
+// $Id: gc_validators.cpp,v 1.6 2011/03/08 19:22:01 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include "gc_data.h"
+#include "gc_validators.h"
+
+//------------------------------------------------------------------------------------
+
+GCIntegerValidator::GCIntegerValidator()
+    :
+    wxTextValidator(wxFILTER_INCLUDE_CHAR_LIST)
+{
+    SetIncludes(gcdata::integerList());
+}
+
+GCIntegerValidator::~GCIntegerValidator()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+GCIntegerListValidator::GCIntegerListValidator()
+    :
+    wxTextValidator(wxFILTER_INCLUDE_CHAR_LIST)
+{
+    SetIncludes(gcdata::integerListWithSpaces());
+}
+
+GCIntegerListValidator::~GCIntegerListValidator()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+GCNonNegativeIntegerValidator::GCNonNegativeIntegerValidator()
+    :
+    wxTextValidator(wxFILTER_INCLUDE_CHAR_LIST)
+{
+    SetIncludes(gcdata::nonNegativeIntegerList());
+}
+
+GCNonNegativeIntegerValidator::~GCNonNegativeIntegerValidator()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+GCPositiveFloatValidator::GCPositiveFloatValidator()
+    :
+    wxTextValidator(wxFILTER_INCLUDE_CHAR_LIST)
+{
+    SetIncludes(gcdata::positiveFloatChars());
+}
+
+GCPositiveFloatValidator::~GCPositiveFloatValidator()
+{
+}
+
+//____________________________________________________________________________________
diff --git a/src/guiutil/gc_validators.h b/src/guiutil/gc_validators.h
new file mode 100644
index 0000000..aa8f0d6
--- /dev/null
+++ b/src/guiutil/gc_validators.h
@@ -0,0 +1,54 @@
+// $Id: gc_validators.h,v 1.6 2011/03/08 19:22:01 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GC_VALIDATORS_H
+#define GC_VALIDATORS_H
+
+#include "wx/valtext.h"
+
+class GCIntegerValidator : public wxTextValidator
+{
+  private:
+    wxString                    *   m_stringVar;
+  public:
+    GCIntegerValidator();
+    virtual ~GCIntegerValidator();
+};
+
+class GCIntegerListValidator : public wxTextValidator
+{
+  private:
+    wxString                    *   m_stringVar;
+  public:
+    GCIntegerListValidator();
+    virtual ~GCIntegerListValidator();
+};
+
+class GCNonNegativeIntegerValidator : public wxTextValidator
+{
+  private:
+    wxString                    *   m_stringVar;
+  public:
+    GCNonNegativeIntegerValidator();
+    virtual ~GCNonNegativeIntegerValidator();
+};
+
+class GCPositiveFloatValidator : public wxTextValidator
+{
+  private:
+    wxString                    *   m_stringVar;
+  public:
+    GCPositiveFloatValidator();
+    virtual ~GCPositiveFloatValidator();
+};
+
+#endif  // GC_VALIDATORS_H
+
+//____________________________________________________________________________________
diff --git a/src/lamarcmenus/coalmenus.cpp b/src/lamarcmenus/coalmenus.cpp
new file mode 100644
index 0000000..57915e2
--- /dev/null
+++ b/src/lamarcmenus/coalmenus.cpp
@@ -0,0 +1,98 @@
+// $Id: coalmenus.cpp,v 1.15 2010/03/17 17:25:58 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <string>
+#include "constants.h"
+#include "constraintmenus.h"
+#include "coalmenus.h"
+#include "forcesummary.h"
+#include "lamarc_strings.h"
+#include "menu_strings.h"
+#include "priormenus.h"
+#include "newmenuitems.h"
+#include "setmenuitem.h"
+#include "ui_interface.h"
+#include "ui_strings.h"
+#include "profilemenus.h"
+using std::string;
+
+//------------------------------------------------------------------------------------
+
+SetAllThetasMenuItem::SetAllThetasMenuItem(string myKey, UIInterface & ui)
+    : SetMenuItemId(myKey,ui,uistr::globalTheta, UIId(force_COAL, uiconst::GLOBAL_ID))
+{
+}
+
+SetAllThetasMenuItem::~SetAllThetasMenuItem()
+{
+}
+
+bool SetAllThetasMenuItem::IsVisible()
+{
+    return (ui.doGetLong(uistr::crossPartitionCount) > 1);
+}
+
+string SetAllThetasMenuItem::GetVariableText()
+{
+    return "";
+}
+
+/////////
+
+SetThetasFstMenuItem::SetThetasFstMenuItem(string key,UIInterface & ui)
+    : ToggleMenuItemNoId(key,ui,uistr::fstSetTheta)
+{
+}
+
+SetThetasFstMenuItem::~SetThetasFstMenuItem()
+{
+}
+
+bool SetThetasFstMenuItem::IsVisible()
+{
+    return (ui.doGetLong(uistr::crossPartitionCount) > 1);
+}
+
+/////////
+
+SetMenuItemThetas::SetMenuItemThetas(UIInterface & myui)
+    : SetMenuItemGroup(myui,uistr::userSetTheta)
+{
+}
+
+SetMenuItemThetas::~SetMenuItemThetas()
+{
+}
+
+vector<UIId> SetMenuItemThetas::GetVisibleIds()
+{
+    return ui.doGetUIIdVec1d(uistr::validParamsForForce,UIId(force_COAL));
+}
+
+/////////
+
+CoalescenceMenu::CoalescenceMenu (UIInterface & myui )
+    : NewMenu (myui,lamarcmenu::coalTitle,lamarcmenu::coalInfo)
+{
+    UIId id(force_COAL);
+    AddMenuItem(new SubMenuConstraintsForOneForce("C",ui,id));
+    AddMenuItem(new SubMenuProfileForOneForce("P",ui,id));
+    AddMenuItem(new SubMenuPriorForOneForce("B",ui,id));
+    AddMenuItem(new SetAllThetasMenuItem("G",ui));
+    AddMenuItem(new SetThetasFstMenuItem("F",ui));
+    AddMenuItem(new ToggleMenuItemNoId("W",ui,uistr::wattersonSetTheta));
+    AddMenuItem(new SetMenuItemThetas(ui));
+}
+
+CoalescenceMenu::~CoalescenceMenu ()
+{
+}
+
+//____________________________________________________________________________________
diff --git a/src/lamarcmenus/coalmenus.h b/src/lamarcmenus/coalmenus.h
new file mode 100644
index 0000000..57f60ec
--- /dev/null
+++ b/src/lamarcmenus/coalmenus.h
@@ -0,0 +1,67 @@
+// $Id: coalmenus.h,v 1.15 2011/03/07 06:08:50 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef COALMENUS_H
+#define COALMENUS_H
+
+#include <string>
+#include <vector>
+#include "newmenuitems.h"
+#include "setmenuitem.h"
+#include "togglemenuitem.h"
+#include "menutypedefs.h"
+
+class UIInterface;
+
+class SetAllThetasMenuItem : public SetMenuItemId
+{
+  public:
+    SetAllThetasMenuItem(std::string myKey, UIInterface & myui);
+    virtual ~SetAllThetasMenuItem();
+    virtual bool IsVisible();
+    virtual std::string GetVariableText();
+};
+
+class SetThetasFstMenuItem : public ToggleMenuItemNoId
+{
+  public:
+    SetThetasFstMenuItem(std::string myKey, UIInterface & myui);
+    virtual ~SetThetasFstMenuItem();
+    virtual bool IsVisible();
+};
+
+class SetMenuItemThetas : public SetMenuItemGroup
+{
+  public:
+    SetMenuItemThetas(UIInterface & ui);
+    virtual ~SetMenuItemThetas();
+    virtual std::vector<UIId> GetVisibleIds();
+};
+
+class CoalescenceMenu : public NewMenu
+{
+  public:
+    CoalescenceMenu(UIInterface & myui);
+    virtual ~CoalescenceMenu();
+};
+
+class CoalescenceMenuCreator : public NewMenuCreator
+{
+  protected:
+    UIInterface & ui;
+  public:
+    CoalescenceMenuCreator(UIInterface & myui) : ui(myui) {};
+    virtual ~CoalescenceMenuCreator() {};
+    NewMenu_ptr Create() { return NewMenu_ptr(new CoalescenceMenu(ui));};
+};
+
+#endif  // COALMENUS_H
+
+//____________________________________________________________________________________
diff --git a/src/lamarcmenus/constraintmenus.cpp b/src/lamarcmenus/constraintmenus.cpp
new file mode 100644
index 0000000..b4b9878
--- /dev/null
+++ b/src/lamarcmenus/constraintmenus.cpp
@@ -0,0 +1,418 @@
+// $Id: constraintmenus.cpp,v 1.11 2011/03/08 19:22:01 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+#include <string>
+
+#include "display.h"
+#include "lamarc_strings.h"
+#include "newmenuitems.h"
+#include "constraintmenus.h"
+#include "togglemenuitem.h"
+#include "ui_interface.h"
+#include "ui_strings.h"
+
+using std::string;
+
+//------------------------------------------------------------------------------------
+
+ConstraintMenuForOneForce::ConstraintMenuForOneForce(UIInterface& ui,UIId id)
+    : NewMenu(ui,lamarcmenu::forceConstraintTitle+ToString(id.GetForceType()),
+              lamarcmenu::forceConstraintInfo)
+{
+    AddMenuItem(new AddToGroupedConstraintsMenuItem("A", ui, id));
+    AddMenuItem(new RemoveFromGroupedConstraintsMenuItem("R", ui, id));
+    AddMenuItem(new ToggleMenuItemUngroupedConstraints(ui,id));
+    AddMenuItem(new ToggleMenuItemGroupedConstraints(ui, id));
+}
+
+ConstraintMenuForOneForce::~ConstraintMenuForOneForce()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+SubMenuConstraintsForOneForce::SubMenuConstraintsForOneForce(std::string key, UIInterface& ui, UIId id)
+    : ForceSubMenuItem(key, ui, new ConstraintsMenuForOneForceCreator(ui, id), id)
+{
+}
+
+SubMenuConstraintsForOneForce::~SubMenuConstraintsForOneForce()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+AddToGroupedConstraintsMenuItem::AddToGroupedConstraintsMenuItem
+(std::string key, UIInterface& ui, UIId id)
+    : SetMenuItemId(key, ui, uistr::addParamToGroup, id)
+{
+}
+
+AddToGroupedConstraintsMenuItem::~AddToGroupedConstraintsMenuItem()
+{
+}
+
+MenuInteraction_ptr
+AddToGroupedConstraintsMenuItem::GetHandler(std::string input)
+{
+    assert(Handles(input));
+    return MenuInteraction_ptr(new GetIdAndAddDialog(ui, menuKey, id));
+}
+
+//------------------------------------------------------------------------------------
+
+GetIdAndAddDialog::GetIdAndAddDialog(UIInterface& ui, string menuKey, UIId id)
+    : SetDialog(ui, menuKey, id),
+      outputId()
+{
+}
+
+GetIdAndAddDialog::~GetIdAndAddDialog()
+{
+}
+
+menu_return_type GetIdAndAddDialog::InvokeMe(Display& display)
+{
+    display.DisplayDialogOutput(beforeLoopOutputString());
+    bool success = false;
+    for(int i=0;(i<maxTries() && success==false);i++)
+    {
+        display.DisplayDialogOutput(inLoopOutputString());
+        std::string result = display.GetUserInput();
+        if (handleInput(result))
+        {
+            display.DisplayDialogOutput(afterLoopSuccessOutputString());
+            success = true;
+        }
+        else
+        {
+            display.DisplayDialogOutput(inLoopFailureOutputString());
+        }
+    }
+    if (success)
+    {
+        for(int i=0;i<maxTries();i++)
+        {
+            display.DisplayDialogOutput(inLoopOutputString2());
+            std::string result = display.GetUserInput();
+            if (handleInput2(result))
+            {
+                display.DisplayDialogOutput(afterLoopSuccessOutputString());
+                return menu_REDISPLAY;
+            }
+            else
+            {
+                display.DisplayDialogOutput(inLoopFailureOutputString());
+            }
+        }
+    }
+    display.DisplayDialogOutput(afterLoopFailureOutputString());
+    doFailure();
+    return menu_REDISPLAY;
+}
+
+bool GetIdAndAddDialog::handleInput2(std::string input)
+{
+    haveError = false;
+    try
+    {
+        stuffVal2IntoUI(input);
+    }
+    catch (data_error& e)
+    {
+        currError = e.what();
+        haveError = true;
+        return false;
+    }
+    return true;
+}
+
+void GetIdAndAddDialog::stuffValIntoUI(std::string val)
+{
+    long pindex = ProduceLongOrBarf(val) - 1;
+    UIIdVec1d ungroupedIds=ui.doGetUIIdVec1d(uistr::ungroupedParamsForForce,id);
+    bool matched = false;
+    //The following code basically reiterates Group::Handles, below.
+    for (UIIdVec1d::iterator thisId = ungroupedIds.begin();
+         thisId != ungroupedIds.end(); thisId++)
+    {
+        UIId& visibleId = *thisId;
+        if(visibleId.GetIndex1() == pindex)
+        {
+            matched = true;
+        }
+    }
+    if (!matched)
+    {
+        throw data_error("That number is not the index of an ungrouped parameter.");
+    }
+
+    //Set Index2 of the UIId equal to the pindex.  Since we don't know the
+    // gindex yet, just set it to zero.
+    long gindex = 0;
+    UIId newid(id.GetForceType(), gindex, pindex);
+    outputId = newid;
+};
+
+void GetIdAndAddDialog::stuffVal2IntoUI(std::string val)
+{
+    if (CaselessStrCmp("N", val))
+    {
+        ui.doSet(uistr::addParamToNewGroup, ToString(noval_none), outputId);
+        return;
+    }
+    long gindex = ProduceLongOrBarf(val) - 1;
+    UIIdVec2d groupedIds=ui.doGetUIIdVec2d(uistr::groupedParamsForForce,id);
+    if (0 > gindex || gindex >= static_cast<long>(groupedIds.size()))
+    {
+        throw data_error("Please enter a valid group index or 'N'.");
+    }
+    UIId newId(outputId.GetForceType(), gindex, outputId.GetIndex2());
+    ui.doSet(menuKey,ToString(noval_none),newId);
+};
+
+string GetIdAndAddDialog::inLoopOutputString()
+{
+    return "Select a parameter to add to a group.\n";
+}
+
+string GetIdAndAddDialog::inLoopOutputString2()
+{
+    return "Select a group to add this parameter to (or N for a new group).\n";
+}
+
+//------------------------------------------------------------------------------------
+
+RemoveFromGroupedConstraintsMenuItem::RemoveFromGroupedConstraintsMenuItem
+(std::string key, UIInterface& ui, UIId id)
+    : SetMenuItemId(key, ui, uistr::removeParamFromGroup, id)
+{
+}
+
+RemoveFromGroupedConstraintsMenuItem::~RemoveFromGroupedConstraintsMenuItem()
+{
+}
+
+MenuInteraction_ptr
+RemoveFromGroupedConstraintsMenuItem::GetHandler(std::string input)
+{
+    assert(Handles(input));
+    return MenuInteraction_ptr(new GetIdAndRemoveDialog(ui, menuKey, id));
+}
+
+//------------------------------------------------------------------------------------
+
+GetIdAndRemoveDialog::GetIdAndRemoveDialog(UIInterface& ui, string menuKey,
+                                           UIId id)
+    : SetDialog(ui, menuKey, id)
+{
+}
+
+GetIdAndRemoveDialog::~GetIdAndRemoveDialog()
+{
+}
+
+void GetIdAndRemoveDialog::stuffValIntoUI(std::string val)
+{
+    long pindex = ProduceLongOrBarf(val) - 1;
+    UIIdVec2d groupIds = ui.doGetUIIdVec2d(uistr::groupedParamsForForce,id);
+    bool matched = false;
+    //The following code basically reiterates 2dGroup::Handles, below.
+    UIIdVec2d::iterator group;
+    for(group = groupIds.begin(); group != groupIds.end(); group++)
+    {
+        for (UIIdVec1d::iterator id = (*group).begin();
+             id != (*group).end(); id++)
+        {
+            UIId& visibleId = *id;
+            if(visibleId.GetIndex2() == pindex)
+            {
+                matched = true;
+            }
+        }
+    }
+    if (!matched)
+    {
+        throw data_error("That number is not the index of a grouped parameter.");
+    }
+    UIId newid(id.GetForceType(), pindex);
+    ui.doSet(menuKey,ToString(noval_none),newid);
+};
+
+string GetIdAndRemoveDialog::inLoopOutputString()
+{
+    return "Enter the index of the parameter you wish to remove from a group.\n";
+}
+
+//------------------------------------------------------------------------------------
+
+ToggleMenuItemUngroupedConstraints::ToggleMenuItemUngroupedConstraints(UIInterface & ui,UIId id)
+    : ToggleMenuItemGroup(ui,uistr::constraintType), m_id(id)
+{
+}
+
+ToggleMenuItemUngroupedConstraints::~ToggleMenuItemUngroupedConstraints()
+{
+}
+
+string ToggleMenuItemUngroupedConstraints::GetGroupDescription()
+{
+    return "Ungrouped parameters:";
+}
+
+string ToggleMenuItemUngroupedConstraints::GetEmptyDescription()
+{
+    return "  **None**";
+}
+
+UIIdVec1d
+ToggleMenuItemUngroupedConstraints::GetVisibleIds()
+{
+    return ui.doGetUIIdVec1d(uistr::ungroupedParamsForForce,m_id);
+}
+
+//------------------------------------------------------------------------------------
+
+ToggleMenuItem2dGroup::ToggleMenuItem2dGroup(UIInterface & myui,
+                                             string myMenuKey)
+    : ui(myui), menuKey(myMenuKey)
+{
+}
+
+UIId ToggleMenuItem2dGroup::GetGroupIdFromLocalId(UIId localId)
+{
+    UIIdVec2d allvisibles = GetVisibleIds();
+    long gindex = 0;
+    for(UIIdVec2d::iterator visibleset = allvisibles.begin();
+        visibleset != allvisibles.end(); visibleset++, gindex++)
+    {
+        for(UIIdVec1d::iterator id = (*visibleset).begin();
+            id != (*visibleset).end(); id++)
+        {
+            UIId& visibleId = *id;
+            if (visibleId.GetIndex2() == localId.GetIndex1())
+            {
+                return visibleId;
+            }
+        }
+    }
+    return UIId(FLAGLONG);
+}
+
+UIId ToggleMenuItem2dGroup::GetIdFromKey(string key)
+{
+    UIId localId(keyToIndex(key));
+    return GetGroupIdFromLocalId(localId);
+}
+
+string ToggleMenuItem2dGroup::GetKey(UIId id)
+{
+    // There is no guarantee that this key
+    // has a unique index1, but most will
+    // so this is the default implementation.
+    // If this is causing you trouble, override
+    // this virtual method
+    return indexToKey(id.GetIndex2());
+}
+
+string ToggleMenuItem2dGroup::GetText(UIId id)
+{
+    return ui.doGetDescription(menuKey,id);
+}
+
+string ToggleMenuItem2dGroup::GetVariableText(UIId id)
+{
+    return ui.doGetPrintString(menuKey,id);
+}
+
+string ToggleMenuItem2dGroup::GetGroupDescription(long gid)
+{
+    return "\n";
+}
+
+string ToggleMenuItem2dGroup::GetEmptyDescription(long gid)
+{
+    return "\n";
+}
+
+bool ToggleMenuItem2dGroup::Handles(string input)
+{
+    // kinda wasteful, but avoids some unpleasant to
+    // code error checking
+    UIIdVec2d allvisibles = GetVisibleIds();
+    UIIdVec2d::iterator group;
+    for(group = allvisibles.begin(); group != allvisibles.end(); group++)
+    {
+        for (UIIdVec1d::iterator id = (*group).begin();
+             id != (*group).end(); id++)
+        {
+            UIId& visibleId = *id;
+            if(CaselessStrCmp(GetKey(visibleId),input))
+            {
+                return true;
+            }
+        }
+    }
+    return false;
+}
+
+MenuInteraction_ptr ToggleMenuItem2dGroup::GetHandler(string input)
+{
+    UIId groupId = GetIdFromKey(input);
+    if (groupId.GetIndex1() != FLAGLONG)
+    {
+        return MakeOneHandler(groupId);
+    }
+    else
+    {
+        return MenuInteraction_ptr(new DoNothingHandler());
+    }
+}
+
+MenuInteraction_ptr ToggleMenuItem2dGroup::MakeOneHandler(UIId id)
+{
+    return MenuInteraction_ptr(new NewToggleHandler(ui,menuKey,id));
+}
+
+void ToggleMenuItem2dGroup::DisplayItemOn(Display & display)
+{
+    display.ShowMenuDisplay2dGroup(*this);
+}
+
+//------------------------------------------------------------------------------------
+
+ToggleMenuItemGroupedConstraints::ToggleMenuItemGroupedConstraints
+(UIInterface & ui,UIId id)
+    : ToggleMenuItem2dGroup(ui,uistr::groupConstraintType),
+      m_id(id)
+{
+}
+
+ToggleMenuItemGroupedConstraints::~ToggleMenuItemGroupedConstraints()
+{
+}
+
+UIIdVec2d ToggleMenuItemGroupedConstraints::GetVisibleIds()
+{
+    return ui.doGetUIIdVec2d(uistr::groupedParamsForForce,m_id);
+}
+
+string ToggleMenuItemGroupedConstraints::GetGroupDescription(long gid)
+{
+    return ("Group " + ToString(gid+1));
+}
+
+string ToggleMenuItemGroupedConstraints::GetEmptyDescription(long gid)
+{
+    return ("  **No parameters in this group**");
+}
+
+//____________________________________________________________________________________
diff --git a/src/lamarcmenus/constraintmenus.h b/src/lamarcmenus/constraintmenus.h
new file mode 100644
index 0000000..a47c7ef
--- /dev/null
+++ b/src/lamarcmenus/constraintmenus.h
@@ -0,0 +1,163 @@
+// $Id: constraintmenus.h,v 1.8 2012/02/29 00:29:58 ewalkup Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef CONSTRAINTMENUS_H
+#define CONSTRAINTMENUS_H
+
+#include <string>
+#include "forcesmenus.h"
+#include "newmenuitems.h"
+#include "setmenuitem.h"
+#include "togglemenuitem.h"
+
+class UIInterface;
+
+class ConstraintMenuForOneForce : public NewMenu
+{
+  public:
+    ConstraintMenuForOneForce(UIInterface &ui,UIId id);
+    virtual ~ConstraintMenuForOneForce();
+};
+
+//These two classes are used for the individual forces' menus.
+class SubMenuConstraintsForOneForce : public ForceSubMenuItem
+{
+  private:
+    SubMenuConstraintsForOneForce(); //undefined
+    UIId id;
+  public:
+    SubMenuConstraintsForOneForce(std::string key, UIInterface& ui, UIId id);
+    virtual ~SubMenuConstraintsForOneForce();
+};
+
+class ConstraintsMenuForOneForceCreator : public NewMenuCreator
+{
+  private:
+    ConstraintsMenuForOneForceCreator(); //undefined
+    UIInterface& ui;
+    UIId id;
+  public:
+    ConstraintsMenuForOneForceCreator(UIInterface& myui, UIId myid) :
+        ui(myui), id(myid) {};
+    virtual ~ConstraintsMenuForOneForceCreator() {};
+    virtual NewMenu_ptr Create() { return NewMenu_ptr(new ConstraintMenuForOneForce(ui, id));};
+};
+
+class AddToGroupedConstraintsMenuItem : public SetMenuItemId
+{
+  public:
+    AddToGroupedConstraintsMenuItem(std::string key, UIInterface& ui,
+                                    UIId id);
+    virtual ~AddToGroupedConstraintsMenuItem();
+    virtual MenuInteraction_ptr GetHandler(std::string input);
+};
+
+class GetIdAndAddDialog : public SetDialog
+{
+  private:
+    UIId outputId;
+  protected:
+    virtual void stuffValIntoUI(std::string val);
+    virtual void stuffVal2IntoUI(std::string val);
+  public:
+    GetIdAndAddDialog(UIInterface& ui, string menuKey, UIId id);
+    virtual ~GetIdAndAddDialog();
+    virtual menu_return_type InvokeMe(Display& display);
+    virtual bool handleInput2(std::string input);
+    virtual string inLoopOutputString();
+    virtual string inLoopOutputString2();
+
+};
+
+class RemoveFromGroupedConstraintsMenuItem : public SetMenuItemId
+{
+  public:
+    RemoveFromGroupedConstraintsMenuItem(std::string key, UIInterface& ui,
+                                         UIId id);
+    virtual ~RemoveFromGroupedConstraintsMenuItem();
+    virtual MenuInteraction_ptr GetHandler(std::string input);
+};
+
+class GetIdAndRemoveDialog : public SetDialog
+{
+  protected:
+    virtual void stuffValIntoUI(std::string val);
+  public:
+    GetIdAndRemoveDialog(UIInterface& ui, string menuKey, UIId id);
+    virtual ~GetIdAndRemoveDialog();
+    virtual string inLoopOutputString();
+
+};
+
+class ToggleMenuItemUngroupedConstraints : public ToggleMenuItemGroup
+{
+  private:
+    UIId    m_id;
+  public:
+    ToggleMenuItemUngroupedConstraints(UIInterface & ui,UIId id);
+    virtual ~ToggleMenuItemUngroupedConstraints();
+    virtual std::string GetGroupDescription();
+    virtual std::string GetEmptyDescription();
+    virtual UIIdVec1d GetVisibleIds();
+};
+
+class ToggleMenuItem2dGroup : public MenuDisplayQuantaWithHandler
+{
+  private:
+    ToggleMenuItem2dGroup(); //undefined
+  protected:
+    UIInterface & ui;
+    std::string menuKey;
+    virtual UIId GetIdFromKey(std::string key);
+    virtual MenuInteraction_ptr MakeOneHandler(UIId id);
+    virtual UIId GetGroupIdFromLocalId(UIId localId);
+  public:
+    ToggleMenuItem2dGroup(UIInterface& ui, string menuKey);
+    virtual ~ToggleMenuItem2dGroup() {};
+    virtual UIIdVec2d GetVisibleIds() = 0;
+    // single alphanumeric to invoke this line
+    virtual std::string GetKey(UIId id);
+    // description of this item
+    virtual std::string GetText(UIId id);
+    // current value of this item if any
+    virtual std::string GetVariableText(UIId id);
+    virtual std::string GetGroupDescription() {return "\n";};
+    virtual std::string GetGroupDescription(long groupId);
+    virtual std::string GetEmptyDescription() {return "\n";};
+    virtual std::string GetEmptyDescription(long groupId);
+    virtual bool Handles(std::string input);
+    virtual MenuInteraction_ptr GetHandler(std::string input);
+    // not virtual because we're assuming all inheritors should
+    // use the canned routine given in class Display
+    void DisplayItemOn(Display & display);
+
+    // stuff for multi-line items
+    virtual bool HasMultiLineItems() {return false;};
+    virtual std::vector<std::string> GetExtraText(UIId id) {return std::vector<std::string>();};
+    virtual std::vector<std::string> GetExtraVariableText(UIId id) {return std::vector<std::string>();};
+
+};
+
+class ToggleMenuItemGroupedConstraints : public ToggleMenuItem2dGroup
+{
+  private:
+    UIId    m_id;
+  public:
+    ToggleMenuItemGroupedConstraints(UIInterface & ui,UIId id);
+    virtual ~ToggleMenuItemGroupedConstraints();
+    virtual UIIdVec2d GetVisibleIds();
+    // description of the group
+    virtual std::string GetGroupDescription(long groupId);
+    virtual std::string GetEmptyDescription(long groupId);
+};
+
+#endif  // CONSTRAINTMENUS_H
+
+//____________________________________________________________________________________
diff --git a/src/lamarcmenus/datafilenamedialog.cpp b/src/lamarcmenus/datafilenamedialog.cpp
new file mode 100644
index 0000000..3348ba0
--- /dev/null
+++ b/src/lamarcmenus/datafilenamedialog.cpp
@@ -0,0 +1,102 @@
+// $Id: datafilenamedialog.cpp,v 1.14 2012/06/30 01:32:42 bobgian Exp $
+
+/*
+  Copyright 2003  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <string>
+#include <iostream>
+
+#include "dialogrepeat.h"
+#include "datafilenamedialog.h"
+#include "errhandling.h"
+#include "menudefs.h"
+#include "stringx.h"
+#include "ui_interface.h"
+#include "ui_strings.h"
+#include "xml.h"
+
+DataFileNameDialog::DataFileNameDialog(XmlParser & parser)
+    : DialogRepeat() , m_dataFileName(parser.GetFileName()), m_parser(parser)
+{
+}
+
+DataFileNameDialog::~DataFileNameDialog()
+{
+}
+
+long DataFileNameDialog::maxTries()
+{
+    return 3;
+}
+
+std::string DataFileNameDialog::displayFileName()
+{
+    std::string displayFileName;
+    if(m_dataFileName == "")
+    {
+        displayFileName = "[No default set]\n";
+    }
+    else
+    {
+        displayFileName = "[Default: "+ m_dataFileName + "]\n";
+    }
+    return displayFileName;
+}
+
+std::string DataFileNameDialog::beforeLoopOutputString()
+{
+    return "";
+}
+
+std::string DataFileNameDialog::inLoopOutputString()
+{
+    return "Enter the location of the data file\n"+displayFileName()+"\n";
+}
+
+std::string DataFileNameDialog::inLoopFailureOutputString()
+{
+    return " \n \n" + m_errmsg + "\n";
+}
+
+std::string DataFileNameDialog::afterLoopSuccessOutputString()
+{
+    std::string message = "Data file was read successfully\n\n";
+    message += "Calculating starting values; please be patient";
+    return message;
+}
+
+std::string DataFileNameDialog::afterLoopFailureOutputString()
+{
+    return "Unable to read or find your file in "+ToString(maxTries())
+        + " attempts.\n";
+}
+
+bool DataFileNameDialog::handleInput(std::string input)
+{
+    if (input.size () != 0)
+    {
+        m_dataFileName = input;
+    }
+    try
+    {
+        m_parser.ParseFileData(m_dataFileName);
+    }
+    catch (const data_error& e)
+    {
+        m_errmsg = e.whatString();
+        return false;
+    }
+    return true;
+}
+
+void DataFileNameDialog::doFailure()
+{
+    throw data_error("To create a LAMARC input file, please run the converter (lam_conv).");
+}
+
+//____________________________________________________________________________________
diff --git a/src/lamarcmenus/datafilenamedialog.h b/src/lamarcmenus/datafilenamedialog.h
new file mode 100644
index 0000000..7cecdd3
--- /dev/null
+++ b/src/lamarcmenus/datafilenamedialog.h
@@ -0,0 +1,44 @@
+// $Id: datafilenamedialog.h,v 1.8 2012/06/30 01:32:42 bobgian Exp $
+
+/*
+  Copyright 2003  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef DATAFILENAMEDIALOG_H
+#define DATAFILENAMEDIALOG_H
+
+#include <string>
+#include "dialogrepeat.h"
+
+class XmlParser;
+
+class DataFileNameDialog : public DialogRepeat
+{
+  private:
+    std::string    m_dataFileName;
+    XmlParser &    m_parser;
+    std::string    m_errmsg;
+
+  protected:
+    virtual long maxTries();
+    virtual std::string beforeLoopOutputString();
+    virtual std::string inLoopOutputString();
+    virtual std::string inLoopFailureOutputString();
+    virtual std::string afterLoopSuccessOutputString();
+    virtual std::string afterLoopFailureOutputString();
+    virtual bool handleInput(std::string input);
+    virtual std::string displayFileName();
+    virtual void doFailure();
+
+  public:
+    DataFileNameDialog(XmlParser &);
+    virtual ~DataFileNameDialog();
+};
+
+#endif // DATAFILENAMEDIALOG_H
+
+//____________________________________________________________________________________
diff --git a/src/lamarcmenus/datamodelmenu.cpp b/src/lamarcmenus/datamodelmenu.cpp
new file mode 100644
index 0000000..da6b783
--- /dev/null
+++ b/src/lamarcmenus/datamodelmenu.cpp
@@ -0,0 +1,788 @@
+// $Id: datamodelmenu.cpp,v 1.42 2011/03/07 06:08:50 bobgian Exp $
+
+/*
+  Copyright 2003  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+#include <string>
+
+#include "datamodelmenu.h"
+#include "lamarc_strings.h"
+#include "menu_strings.h"
+#include "menuitem.h"
+#include "overviewmenus.h"
+#include "popsizemenu.h"
+#include "setmenuitem.h"
+#include "ui_interface.h"
+#include "ui_strings.h"
+#include "ui_vars.h"
+
+using std::string;
+
+//------------------------------------------------------------------------------------
+
+AssignToGlobalMenuItem::AssignToGlobalMenuItem(
+    string myKey,
+    UIInterface & myui,
+    UIId myid)
+    : ToggleMenuItemId(myKey,myui,uistr::useGlobalDataModelForOne,myid)
+{
+}
+
+AssignToGlobalMenuItem::~AssignToGlobalMenuItem()
+{
+}
+
+bool AssignToGlobalMenuItem::IsVisible()
+{
+    long index = GetId().GetIndex1();
+    return (index != uiconst::GLOBAL_DATAMODEL_NUC_ID &&
+            index != uiconst::GLOBAL_DATAMODEL_MSAT_ID &&
+            index != uiconst::GLOBAL_DATAMODEL_KALLELE_ID);
+}
+
+//------------------------------------------------------------------------------------
+
+TTRatioMenuItem::TTRatioMenuItem(
+    string myKey,
+    UIInterface & myui,
+    UIId myid)
+    : SetMenuItemId(myKey,myui,uistr::TTRatio,myid)
+{
+}
+
+TTRatioMenuItem::~TTRatioMenuItem()
+{
+}
+
+bool TTRatioMenuItem::IsVisible()
+{
+    model_type mt = ui.doGetModelType(uistr::dataModel,id);
+    return mt == F84;
+}
+
+//------------------------------------------------------------------------------------
+
+DataUncertaintyMenuItem::DataUncertaintyMenuItem(
+    string myKey,
+    UIInterface & myui,
+    UIId myid)
+    : SetMenuItemId(myKey,myui,uistr::perBaseErrorRate,myid)
+{
+}
+
+DataUncertaintyMenuItem::~DataUncertaintyMenuItem()
+{
+}
+
+bool DataUncertaintyMenuItem::IsVisible()
+{
+    // yes if is nuc model
+    data_type dt = ui.doGetDataType(uistr::dataType,id);
+    return (dt == dtype_DNA || dt == dtype_SNP);
+}
+
+//------------------------------------------------------------------------------------
+
+AlphaMenuItem::AlphaMenuItem(
+    string myKey,
+    UIInterface & myui,
+    UIId myid)
+    : SetMenuItemId(myKey,myui,uistr::alpha,myid)
+{
+}
+
+AlphaMenuItem::~AlphaMenuItem()
+{
+}
+
+bool AlphaMenuItem::IsVisible()
+{
+    model_type mt = ui.doGetModelType(uistr::dataModel,id);
+    return mt == MixedKS;
+}
+
+//------------------------------------------------------------------------------------
+
+OptimizeAlphaMenuItem::OptimizeAlphaMenuItem(
+    string myKey,
+    UIInterface & myui,
+    UIId myid)
+    : ToggleMenuItemId(myKey,myui,uistr::optimizeAlpha,myid)
+{
+}
+
+OptimizeAlphaMenuItem::~OptimizeAlphaMenuItem()
+{
+}
+
+bool OptimizeAlphaMenuItem::IsVisible()
+{
+    model_type mt = ui.doGetModelType(uistr::dataModel,id);
+    return mt == MixedKS;
+}
+
+//------------------------------------------------------------------------------------
+
+GTRRatesMenu::GTRRatesMenu(UIInterface& myui, UIId myid)
+    : NewMenu(myui,menustr::emptyString) , id(myid)
+{
+    AddMenuItem(new SetMenuItemId("1",ui,uistr::gtrRateAC,id));
+    AddMenuItem(new SetMenuItemId("2",ui,uistr::gtrRateAG,id));
+    AddMenuItem(new SetMenuItemId("3",ui,uistr::gtrRateAT,id));
+    AddMenuItem(new SetMenuItemId("4",ui,uistr::gtrRateCG,id));
+    AddMenuItem(new SetMenuItemId("5",ui,uistr::gtrRateCT,id));
+    AddMenuItem(new SetMenuItemId("6",ui,uistr::gtrRateGT,id));
+}
+
+GTRRatesMenu::~GTRRatesMenu()
+{
+}
+
+string GTRRatesMenu::Title()
+{
+    string title = lamarcmenu::gtrRatesTitle;
+    if(id.GetIndex1() == uiconst::GLOBAL_DATAMODEL_NUC_ID)
+    {
+        return title + lamarcmenu::forGlobalNuc;
+    }
+    assert (id.GetIndex1() != uiconst::GLOBAL_DATAMODEL_MSAT_ID);
+    assert (id.GetIndex1() != uiconst::GLOBAL_DATAMODEL_KALLELE_ID);
+
+    return title + ui.doGetString(uistr::regionName,id);
+}
+
+GTRRatesSubMenuItem::GTRRatesSubMenuItem(string myKey, UIInterface & myui, UIId myid)
+    : SubMenuItem(myKey,myui,NULL), id(myid)
+{
+}
+
+GTRRatesSubMenuItem::~GTRRatesSubMenuItem()
+{
+}
+
+bool GTRRatesSubMenuItem::IsVisible()
+{
+    model_type mt = ui.doGetModelType(uistr::dataModel,id);
+    return (mt == GTR);
+}
+
+string GTRRatesSubMenuItem::GetText()
+{
+    return uistr::gtrRates;
+}
+
+string GTRRatesSubMenuItem::GetVariableText()
+{
+    return ui.doGetPrintString(uistr::gtrRates,id);
+}
+
+MenuInteraction_ptr GTRRatesSubMenuItem::GetHandler(std::string inputKey)
+{
+    NewMenu * gtrRateMenu = new GTRRatesMenu(ui,id);
+    return MenuInteraction_ptr(gtrRateMenu);
+}
+
+//------------------------------------------------------------------------------------
+
+FreqsFromDataMenuItem::FreqsFromDataMenuItem(string myKey, UIInterface & myui, UIId myid)
+    : ToggleMenuItemId(myKey,myui,uistr::freqsFromData,myid)
+{
+}
+
+FreqsFromDataMenuItem::~FreqsFromDataMenuItem()
+{
+}
+
+bool FreqsFromDataMenuItem::IsVisible()
+{
+    model_type mt = ui.doGetModelType(uistr::dataModel,id);
+    return (mt == F84);
+    //If the model is GTR, we require that you set the frequencies by hand
+}
+
+//------------------------------------------------------------------------------------
+
+SetFrequencyMenuItemId::SetFrequencyMenuItemId(string myKey,
+                                               UIInterface& myui,
+                                               std::string myVariable,
+                                               UIId myid)
+    : SetMenuItemId(myKey,myui,myVariable,myid)
+{
+}
+
+SetFrequencyMenuItemId::~SetFrequencyMenuItemId()
+{
+}
+
+string
+SetFrequencyMenuItemId::GetText()
+{
+    string text = SetMenuItemId::GetText();
+    if(id.GetIndex1() == uiconst::GLOBAL_DATAMODEL_NUC_ID)
+    {
+        text += lamarcmenu::wordFor + lamarcmenu::forGlobalNuc;
+    }
+    assert(id.GetIndex1() != uiconst::GLOBAL_DATAMODEL_MSAT_ID);
+    assert(id.GetIndex1() != uiconst::GLOBAL_DATAMODEL_KALLELE_ID);
+    return text;
+}
+
+bool
+SetFrequencyMenuItemId::IsVisible()
+{
+    if(id.GetIndex1() == uiconst::GLOBAL_DATAMODEL_NUC_ID)
+    {
+        return !(ui.doGetBool(uistr::freqsFromData,id));
+    }
+    assert(id.GetIndex1() != uiconst::GLOBAL_DATAMODEL_MSAT_ID);
+    assert(id.GetIndex1() != uiconst::GLOBAL_DATAMODEL_KALLELE_ID);
+    return true;
+}
+
+BaseFrequenciesMenu::BaseFrequenciesMenu(UIInterface& myui, UIId myid)
+    : NewMenu(myui,menustr::emptyString) , id(myid)
+{
+    // Calculate base frequencies for F84
+    AddMenuItem(new FreqsFromDataMenuItem("F",ui,id));
+    AddMenuItem(new SetFrequencyMenuItemId("A",ui,uistr::baseFrequencyA,id));
+    AddMenuItem(new SetFrequencyMenuItemId("C",ui,uistr::baseFrequencyC,id));
+    AddMenuItem(new SetFrequencyMenuItemId("G",ui,uistr::baseFrequencyG,id));
+    AddMenuItem(new SetFrequencyMenuItemId("T",ui,uistr::baseFrequencyT,id));
+}
+
+BaseFrequenciesMenu::~BaseFrequenciesMenu()
+{
+}
+
+string BaseFrequenciesMenu::Title()
+{
+    string title = lamarcmenu::baseFrequenciesTitle;
+    if(id.GetIndex1() == uiconst::GLOBAL_DATAMODEL_NUC_ID)
+    {
+        return title + lamarcmenu::forGlobalNuc;
+    }
+    assert(id.GetIndex1() != uiconst::GLOBAL_DATAMODEL_MSAT_ID);
+    assert(id.GetIndex1() != uiconst::GLOBAL_DATAMODEL_KALLELE_ID);
+    return title + ui.doGetString(uistr::regionName,id);
+}
+
+BaseFrequenciesSubMenuItem::BaseFrequenciesSubMenuItem(string myKey, UIInterface & myui, UIId myid)
+    : SubMenuItem(myKey,myui,NULL), id(myid)
+{
+}
+
+BaseFrequenciesSubMenuItem::~BaseFrequenciesSubMenuItem()
+{
+}
+
+bool BaseFrequenciesSubMenuItem::IsVisible()
+{
+    model_type mt = ui.doGetModelType(uistr::dataModel,id);
+    return (mt == F84 || mt == GTR);
+}
+
+string BaseFrequenciesSubMenuItem::GetText()
+{
+    return uistr::baseFrequencies;
+}
+
+string BaseFrequenciesSubMenuItem::GetVariableText()
+{
+    return ui.doGetPrintString(uistr::baseFrequencies,id);
+}
+
+MenuInteraction_ptr BaseFrequenciesSubMenuItem::GetHandler(std::string inputKey)
+{
+    NewMenu * baseFreqMenu = new BaseFrequenciesMenu(ui,id);
+    return MenuInteraction_ptr(baseFreqMenu);
+}
+
+//------------------------------------------------------------------------------------
+
+CategoryMenu::CategoryMenu(UIInterface& myui, UIId myid)
+    : NewMenu(myui,menustr::emptyString), id(myid)
+{
+    AddMenuItem(new SetMenuItemId("R",myui,uistr::categoryRate,myid));
+    AddMenuItem(new SetMenuItemId("P",myui,uistr::categoryProbability,myid));
+}
+
+CategoryMenu::~CategoryMenu()
+{
+}
+
+string CategoryMenu::Title()
+{
+    string titleString = lamarcmenu::categoryTitle
+        + ToString(indexToKey(id.GetIndex3()))
+        + lamarcmenu::wordFor;
+    if(id.GetIndex1() == uiconst::GLOBAL_DATAMODEL_NUC_ID)
+    {
+        titleString += lamarcmenu::forGlobalNuc;
+    }
+    else if(id.GetIndex1() == uiconst::GLOBAL_DATAMODEL_MSAT_ID)
+    {
+        titleString += lamarcmenu::forGlobalMsat;
+    }
+    else if(id.GetIndex1() == uiconst::GLOBAL_DATAMODEL_KALLELE_ID)
+    {
+        titleString += lamarcmenu::forGlobalKAllele;
+    }
+    else
+    {
+        titleString += ui.doGetString(uistr::regionName,id);
+    }
+    return titleString;
+}
+
+MenuInteraction_ptr
+CategoriesMenuGroup::MakeOneHandler(UIId id)
+{
+    return MenuInteraction_ptr(new CategoryMenu(ui,id));
+}
+
+CategoriesMenuGroup::CategoriesMenuGroup(UIInterface& myui, UIId myid)
+    : MenuDisplayGroupBaseImplementation(myui,""), id(myid)
+{
+}
+
+CategoriesMenuGroup::~CategoriesMenuGroup()
+{
+}
+
+string
+CategoriesMenuGroup::GetKey(UIId id)
+{
+    return indexToKey(id.GetIndex3());
+}
+
+string
+CategoriesMenuGroup::GetText(UIId id)
+{
+    return string("Mutation Rate pair: (rate, probability)");
+}
+
+string
+CategoriesMenuGroup::GetVariableText(UIId id)
+{
+    string rate = ui.doGetPrintString(uistr::categoryRate,id);
+    string prob = ui.doGetPrintString(uistr::categoryProbability,id);
+    return string("(")+ToString(rate)+","+ToString(prob)+")";
+}
+
+vector<UIId>
+CategoriesMenuGroup::GetVisibleIds()
+{
+    vector<UIId> visibleIds;
+    long numCats = ui.doGetLong(uistr::categoryCount,id);
+    for(long i=0; i < numCats; i++)
+    {
+        visibleIds.push_back(UIId(id.GetIndex1(),id.GetIndex2(),i));
+    }
+    return visibleIds;
+}
+
+CategoriesMenu::CategoriesMenu(UIInterface& myui, UIId myid)
+    : NewMenu(myui,menustr::emptyString, lamarcmenu::categoriesInfo) , id(myid)
+{
+    AddMenuItem(new SetMenuItemId("N",myui,uistr::categoryCount,myid));
+    AddMenuItem(new CategoriesMenuGroup(ui,id));
+}
+
+CategoriesMenu::~CategoriesMenu()
+{
+}
+
+string CategoriesMenu::Title()
+{
+    string title = lamarcmenu::categoriesTitle;
+    if(id.GetIndex1() == uiconst::GLOBAL_DATAMODEL_NUC_ID)
+    {
+        return title + lamarcmenu::forGlobalNuc;
+    }
+    if(id.GetIndex1() == uiconst::GLOBAL_DATAMODEL_MSAT_ID)
+    {
+        return title + lamarcmenu::forGlobalMsat;
+    }
+    if(id.GetIndex1() == uiconst::GLOBAL_DATAMODEL_KALLELE_ID)
+    {
+        return title + lamarcmenu::forGlobalKAllele;
+    }
+    return title + ui.doGetString(uistr::regionName,id);
+}
+
+CategoriesSubMenuItem::CategoriesSubMenuItem(string myKey, UIInterface & myui, UIId myid)
+    : SubMenuItem(myKey,myui,NULL), id(myid)
+{
+}
+
+CategoriesSubMenuItem::~CategoriesSubMenuItem()
+{
+}
+
+string CategoriesSubMenuItem::GetText()
+{
+    return ui.doGetDescription(uistr::categoryCount,id);
+}
+
+string CategoriesSubMenuItem::GetVariableText()
+{
+    return ui.doGetPrintString(uistr::categoryCount,id);
+}
+
+MenuInteraction_ptr CategoriesSubMenuItem::GetHandler(std::string inputKey)
+{
+    NewMenu * catsFreqMenu = new CategoriesMenu(ui,id);
+    return MenuInteraction_ptr(catsFreqMenu);
+}
+
+//------------------------------------------------------------------------------------
+
+RegionDataModelMenu::RegionDataModelMenu(
+    UIInterface & myui,
+    UIId myid)
+    : NewMenu(myui,menustr::emptyString) , id(myid)
+{
+    AddMenuItem(new LocusDataModels(ui,id));
+}
+
+RegionDataModelMenu::~RegionDataModelMenu()
+{
+}
+
+string RegionDataModelMenu::Title()
+{
+    return lamarcmenu::regionDataModelTitle + ui.doGetString(uistr::regionName,id);
+}
+
+string RegionDataModelMenu::Info()
+{
+    return menustr::emptyString;
+}
+
+//------------------------------------------------------------------------------------
+
+RegionDataModels::RegionDataModels(UIInterface & myui)
+    : MenuDisplayGroupBaseImplementation(myui,menustr::emptyString)
+{
+}
+
+RegionDataModels::~RegionDataModels()
+{
+}
+
+vector<UIId> RegionDataModels::GetVisibleIds()
+{
+    vector<UIId> visibleIds;
+    LongVec1d regionNumbers = ui.doGetLongVec1d(uistr::regionNumbers);
+    LongVec1d::iterator i;
+    for(i=regionNumbers.begin(); i != regionNumbers.end(); i++)
+    {
+        visibleIds.push_back(UIId(*i));
+    }
+    return visibleIds;
+}
+
+MenuInteraction_ptr RegionDataModels::MakeOneHandler(UIId id)
+{
+    // if there is only one locus for the region, skip directly
+    // to the locus menu
+    long numLoci = ui.doGetLong(uistr::lociCount,id);
+    if(numLoci > 1)
+    {
+        return MenuInteraction_ptr(new RegionDataModelMenu(ui,id));
+    }
+    else
+    {
+        UIId locusId(id.GetIndex1(),0);
+        return MenuInteraction_ptr(new LocusDataModelMenu(ui,locusId));
+    }
+}
+
+string RegionDataModels::GetText(UIId id)
+{
+    return lamarcmenu::regionDataModelTitle + ui.doGetString(uistr::regionName,id);
+}
+
+string RegionDataModels::GetVariableText(UIId id)
+{
+    long lociCount = ui.doGetLong(uistr::lociCount,id);
+    if(lociCount == 1)
+    {
+        UIId idForFirstLocus = UIId(id.GetIndex1(),0);
+        string text = ui.doGetPrintString(uistr::dataModel,idForFirstLocus);
+        if(ui.doGetBool(uistr::useGlobalDataModelForOne,idForFirstLocus))
+        {
+            text += lamarcmenu::globalModel;
+        }
+        return text;
+    }
+    else
+    {
+        return lamarcmenu::multiLocusCount_0
+            +ToString(lociCount)
+            +lamarcmenu::multiLocusCount_1;
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+LocusDataModelMenu::LocusDataModelMenu(
+    UIInterface & myui,
+    UIId myid)
+    : NewMenu(myui,menustr::emptyString) , id(myid)
+{
+    AddMenuItem(new MenuDisplayDataType(ui, id));
+    AddMenuItem(new ToggleMenuItemId("M",ui,uistr::dataModel,id));
+    AddMenuItem(new AssignToGlobalMenuItem("D",ui,id));
+    // rate categories & probs for F84, GTR, stepwise, brownian
+    AddMenuItem(new CategoriesSubMenuItem("C",ui,id));
+    // autocorrelation for F84, GTR, stepwise, brownian
+    AddMenuItem(new SetMenuItemId("A",ui,uistr::autoCorrelation,id));
+    // tt ratio for F84
+    AddMenuItem(new TTRatioMenuItem("T",ui,id));
+    // GTR rates for GTR
+    AddMenuItem(new GTRRatesSubMenuItem("G",ui,id));
+    // base frequencies for F84, GTR
+    AddMenuItem(new BaseFrequenciesSubMenuItem("B",ui,id));
+    // per base error rate
+    AddMenuItem(new DataUncertaintyMenuItem("E",ui,id));
+    // for MixedKS, alpha
+    AddMenuItem(new AlphaMenuItem("L",ui,id));
+    AddMenuItem(new OptimizeAlphaMenuItem("O",ui,id));
+    //We need to let people set the relative mu rate because it can be
+    // overridden with inopportune use of using the global data model.
+    // Ultimately, we probably want to take the relative mu rate *out* of
+    // the data model, and put it somewhere else so it can be constant.
+    AddMenuItem(new SetMenuItemId("R",ui,uistr::relativeMuRate, id));
+    //LS DEBUG!  Figure out what to do with this for sure before release.
+#ifndef NDEBUG
+    if (id.GetIndex1() >= 0 && id.GetIndex2() >= 0)
+    {
+        //Not a global region and/or locus menu
+        AddMenuItem(new ToggleMenuItemId("S",ui,uistr::simulateData,id));
+    }
+#endif
+}
+
+LocusDataModelMenu::~LocusDataModelMenu()
+{
+}
+
+string LocusDataModelMenu::Title()
+{
+    if(id.GetIndex1() == uiconst::GLOBAL_DATAMODEL_NUC_ID)
+    {
+        return lamarcmenu::globalDataModelNuc;
+    }
+    if(id.GetIndex1() == uiconst::GLOBAL_DATAMODEL_MSAT_ID)
+    {
+        return lamarcmenu::globalDataModelMsat;
+    }
+    if(id.GetIndex1() == uiconst::GLOBAL_DATAMODEL_KALLELE_ID)
+    {
+        return lamarcmenu::globalDataModelKAllele;
+    }
+    return lamarcmenu::dataModelTitle + ui.doGetString(uistr::locusName,id);
+}
+
+string LocusDataModelMenu::Info()
+{
+    return menustr::emptyString;
+}
+
+//------------------------------------------------------------------------------------
+
+LocusDataModels::LocusDataModels(UIInterface & myui, UIId myid)
+    : MenuDisplayGroupBaseImplementation(myui,menustr::emptyString), id(myid)
+{
+}
+
+LocusDataModels::~LocusDataModels()
+{
+}
+
+string
+LocusDataModels::GetKey(UIId id)
+{
+    return indexToKey(id.GetIndex2());
+}
+
+vector<UIId> LocusDataModels::GetVisibleIds()
+{
+    vector<UIId> visibleIds;
+    LongVec1d lociNumbers = ui.doGetLongVec1d(uistr::lociNumbers,id);
+    LongVec1d::iterator i;
+    for(i=lociNumbers.begin(); i != lociNumbers.end(); i++)
+    {
+        visibleIds.push_back(UIId(id.GetIndex1(),*i));
+    }
+    return visibleIds;
+}
+
+MenuInteraction_ptr LocusDataModels::MakeOneHandler(UIId id)
+{
+    return MenuInteraction_ptr(new LocusDataModelMenu(ui,id));
+}
+
+string LocusDataModels::GetText(UIId id)
+{
+    return lamarcmenu::dataModelTitle + ui.doGetString(uistr::locusName,id);
+}
+
+string LocusDataModels::GetVariableText(UIId id)
+{
+    string text = ui.doGetPrintString(uistr::dataModel,id);
+    if(ui.doGetBool(uistr::useGlobalDataModelForOne,id))
+    {
+        text += lamarcmenu::globalModel;
+    }
+    return text;
+}
+
+//------------------------------------------------------------------------------------
+
+NewDataModelMenu::NewDataModelMenu(UIInterface & myui)
+    : NewMenu(myui,lamarcmenu::dataTitle,menustr::emptyString)
+{
+    AddMenuItem(new ToggleMenuItemNoId("C", ui,uistr::systemClock));
+    AddMenuItem(new UseOldSeedMenuItem("R", ui));
+    AddMenuItem(new SetMenuItemNoId(   "S", ui,uistr::randomSeed));
+    AddMenuItem(new EffectivePopSizeSubMenuItem("E", ui));
+    AddMenuItem(new GlobalDataModelNuc("N", ui));
+    AddMenuItem(new GlobalDataModelMsat("M", ui));
+    AddMenuItem(new GlobalDataModelKAllele("K", ui));
+    AddMenuItem(new ToggleMenuItemNoId("D", ui,uistr::useGlobalDataModelForAll));
+    AddMenuItem(new RegionDataModels(ui));
+}
+
+NewDataModelMenu::~NewDataModelMenu()
+{
+}
+
+GlobalDataModelNuc::GlobalDataModelNuc(string myKey, UIInterface& myUI)
+    : SubMenuItem(myKey, myUI, new LocusDataModelMenuCreator
+                  (myUI, UIId(uiconst::GLOBAL_DATAMODEL_NUC_ID,0)))
+{
+}
+
+GlobalDataModelNuc::~GlobalDataModelNuc()
+{
+}
+
+bool GlobalDataModelNuc::IsVisible()
+{
+    return ui.GetCurrentVars().datapackplus.HasSomeNucleotideData();
+}
+
+GlobalDataModelMsat::GlobalDataModelMsat(string myKey, UIInterface& myUI)
+    : SubMenuItem(myKey, myUI, new LocusDataModelMenuCreator(myUI, UIId(uiconst::GLOBAL_DATAMODEL_MSAT_ID,0)))
+{
+}
+
+GlobalDataModelMsat::~GlobalDataModelMsat()
+{
+}
+
+bool GlobalDataModelMsat::IsVisible()
+{
+    return ui.GetCurrentVars().datapackplus.HasSomeMsatData();
+}
+
+GlobalDataModelKAllele::GlobalDataModelKAllele(string myKey, UIInterface& myUI)
+    : SubMenuItem(myKey, myUI, new LocusDataModelMenuCreator(myUI, UIId(uiconst::GLOBAL_DATAMODEL_KALLELE_ID,0)))
+{
+}
+
+GlobalDataModelKAllele::~GlobalDataModelKAllele()
+{
+}
+
+bool GlobalDataModelKAllele::IsVisible()
+{
+    return ui.GetCurrentVars().datapackplus.HasSomeKAlleleData();
+}
+
+//------------------------------------------------------------------------------------
+
+MenuDisplayDataType::MenuDisplayDataType(UIInterface& ui, UIId id)
+    : MenuDisplayLine(),
+      m_regId(id, ui.GetCurrentVars())
+{
+}
+
+MenuDisplayDataType::~MenuDisplayDataType()
+{
+}
+
+string MenuDisplayDataType::GetKey()
+{
+    return "";
+}
+
+string MenuDisplayDataType::GetText()
+{
+    return uistr::dataType;
+}
+
+string MenuDisplayDataType::GetVariableText()
+{
+    return ToString(m_regId.GetDataType());
+}
+
+bool MenuDisplayDataType::IsVisible()
+{
+    return (m_regId.GetRegion() != uiconst::GLOBAL_ID);
+}
+
+//------------------------------------------------------------------------------------
+
+EffectivePopSizeSubMenuItem::EffectivePopSizeSubMenuItem(string myKey, UIInterface& ui)
+    : SubMenuItem(myKey, ui, new EffectivePopSizeMenuCreator(ui))
+{
+}
+
+EffectivePopSizeSubMenuItem::~EffectivePopSizeSubMenuItem()
+{
+}
+
+bool EffectivePopSizeSubMenuItem::IsVisible()
+{
+    return (ui.GetCurrentVars().datapackplus.GetNumRegions() > 1);
+}
+
+//------------------------------------------------------------------------------------
+
+UseOldSeedMenuItem::UseOldSeedMenuItem(string myKey, UIInterface& ui)
+    : ToggleMenuItemNoId(myKey, ui, uistr::useOldSeedFromClock)
+{
+}
+
+UseOldSeedMenuItem::~UseOldSeedMenuItem()
+{
+}
+
+bool UseOldSeedMenuItem::IsVisible()
+{
+    return (ui.GetCurrentVars().userparams.GetHasOldClockSeed());
+}
+
+std::string UseOldSeedMenuItem::GetText()
+{
+    std::string text =  ToggleMenuItemNoId::GetText();
+    long oldSeed = ui.GetCurrentVars().userparams.GetOldClockSeed();
+    text += lamarcmenu::parenLeft + ToString(oldSeed) + lamarcmenu::parenRight;
+    return text;
+}
+
+std::string UseOldSeedMenuItem::GetVariableText()
+{
+    return menustr::emptyString;
+}
+
+//____________________________________________________________________________________
diff --git a/src/lamarcmenus/datamodelmenu.h b/src/lamarcmenus/datamodelmenu.h
new file mode 100644
index 0000000..435c50e
--- /dev/null
+++ b/src/lamarcmenus/datamodelmenu.h
@@ -0,0 +1,327 @@
+// $Id: datamodelmenu.h,v 1.28 2011/03/07 06:08:50 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef DATAMODELMENU_H
+#define DATAMODELMENU_H
+
+#include <string>
+#include "newmenuitems.h"
+#include "setmenuitem.h"
+#include "togglemenuitem.h"
+#include "ui_constants.h"
+#include "ui_regid.h"
+
+class UIInterface;
+
+class AssignToGlobalMenuItem : public ToggleMenuItemId
+{
+  public:
+    AssignToGlobalMenuItem(std::string , UIInterface &, UIId );
+    virtual ~AssignToGlobalMenuItem();
+    virtual bool IsVisible();
+};
+
+class TTRatioMenuItem : public SetMenuItemId
+{
+  public:
+    TTRatioMenuItem(std::string , UIInterface &, UIId );
+    virtual ~TTRatioMenuItem();
+    virtual bool IsVisible();
+};
+
+class DataUncertaintyMenuItem : public SetMenuItemId
+{
+  public:
+    DataUncertaintyMenuItem(std::string , UIInterface &, UIId );
+    virtual ~DataUncertaintyMenuItem();
+    virtual bool IsVisible();
+};
+
+class AlphaMenuItem : public SetMenuItemId
+{
+  public:
+    AlphaMenuItem(std::string , UIInterface &, UIId );
+    virtual ~AlphaMenuItem();
+    virtual bool IsVisible();
+};
+
+class OptimizeAlphaMenuItem : public ToggleMenuItemId
+{
+  public:
+    OptimizeAlphaMenuItem(std::string , UIInterface &, UIId );
+    virtual ~OptimizeAlphaMenuItem();
+    virtual bool IsVisible();
+};
+
+class GTRRatesMenu : public NewMenu
+{
+  protected:
+    UIId id;
+  public:
+    GTRRatesMenu(UIInterface&,UIId);
+    virtual ~GTRRatesMenu();
+    virtual std::string Title();
+};
+
+class GTRRatesSubMenuItem : public SubMenuItem
+{
+  protected:
+    UIId id;
+  public:
+    GTRRatesSubMenuItem(std::string , UIInterface &, UIId );
+    virtual ~GTRRatesSubMenuItem();
+    virtual bool IsVisible();
+    virtual std::string GetText();
+    virtual std::string GetVariableText();
+    virtual MenuInteraction_ptr GetHandler(std::string);
+};
+
+class FreqsFromDataMenuItem : public ToggleMenuItemId
+{
+  public:
+    FreqsFromDataMenuItem(std::string , UIInterface &, UIId);
+    virtual ~FreqsFromDataMenuItem();
+    virtual bool IsVisible();
+};
+
+class SetFrequencyMenuItemId : public SetMenuItemId
+{
+  public:
+    SetFrequencyMenuItemId(std::string, UIInterface&, std::string, UIId);
+    virtual ~SetFrequencyMenuItemId();
+    virtual std::string GetText();
+    virtual bool IsVisible();
+};
+
+class BaseFrequenciesMenu : public NewMenu
+{
+  protected:
+    UIId id;
+  public:
+    BaseFrequenciesMenu(UIInterface&,UIId);
+    virtual ~BaseFrequenciesMenu();
+    virtual std::string Title();
+};
+
+class BaseFrequenciesSubMenuItem : public SubMenuItem
+{
+  protected:
+    UIId id;
+  public:
+    BaseFrequenciesSubMenuItem(std::string , UIInterface &, UIId);
+    virtual ~BaseFrequenciesSubMenuItem();
+    virtual bool IsVisible();
+    virtual string GetText();
+    virtual string GetVariableText();
+    virtual MenuInteraction_ptr GetHandler(std::string);
+};
+
+class CategoryMenu : public NewMenu
+{
+  protected:
+    UIId id;
+  public:
+    CategoryMenu(UIInterface&,UIId);
+    ~CategoryMenu();
+    virtual std::string Title();
+};
+
+class CategoriesMenuGroup : public MenuDisplayGroupBaseImplementation
+{
+  protected:
+    UIId id;
+    virtual MenuInteraction_ptr MakeOneHandler(UIId id);
+  public:
+    CategoriesMenuGroup(UIInterface & myui,UIId id);
+    virtual ~CategoriesMenuGroup();
+    virtual vector<UIId> GetVisibleIds();
+    virtual std::string GetKey(UIId id);
+    virtual std::string GetText(UIId id);
+    virtual std::string GetVariableText(UIId id);
+};
+class CategoriesMenu : public NewMenu
+{
+  protected:
+    UIId id;
+  public:
+    CategoriesMenu(UIInterface&,UIId);
+    virtual ~CategoriesMenu();
+    virtual std::string Title();
+};
+
+class CategoriesSubMenuItem : public SubMenuItem
+{
+  protected:
+    UIId id;
+  public:
+    CategoriesSubMenuItem(std::string , UIInterface &, UIId);
+    virtual ~CategoriesSubMenuItem();
+    virtual string GetText();
+    virtual string GetVariableText();
+    virtual MenuInteraction_ptr GetHandler(std::string);
+};
+
+class RegionDataModelMenu : public NewMenu
+{
+  protected:
+    UIId id;
+  public:
+    RegionDataModelMenu(UIInterface & myui, UIId myid);
+    virtual ~RegionDataModelMenu();
+    virtual std::string Title();
+    virtual std::string Info();
+};
+
+class RegionDataModelMenuCreator : public NewMenuCreator
+{
+  protected:
+    UIInterface & ui;
+    UIId id;
+  public:
+    RegionDataModelMenuCreator( UIInterface & myui, UIId myid)
+        : ui(myui), id(myid) {};
+    virtual ~RegionDataModelMenuCreator() {};
+    NewMenu_ptr Create() { return NewMenu_ptr(new RegionDataModelMenu(ui,id));};
+};
+
+class RegionDataModels : public MenuDisplayGroupBaseImplementation
+{
+  protected:
+    virtual MenuInteraction_ptr MakeOneHandler(UIId id);
+  public:
+    RegionDataModels(UIInterface & myui);
+    virtual ~RegionDataModels();
+    //
+    virtual vector<UIId> GetVisibleIds();
+    virtual std::string GetText(UIId id);
+    virtual std::string GetVariableText(UIId id);
+};
+
+class LocusDataModelMenu : public NewMenu
+{
+  protected:
+    UIId id;
+  public:
+    LocusDataModelMenu(UIInterface & myui, UIId myid);
+    virtual ~LocusDataModelMenu();
+    virtual std::string Title();
+    virtual std::string Info();
+};
+
+class LocusDataModelMenuCreator : public NewMenuCreator
+{
+  protected:
+    UIInterface & ui;
+    UIId id;
+  public:
+    LocusDataModelMenuCreator( UIInterface & myui, UIId myid)
+        : ui(myui), id(myid) {};
+    virtual ~LocusDataModelMenuCreator() {};
+    NewMenu_ptr Create() { return NewMenu_ptr(new LocusDataModelMenu(ui,id));};
+};
+
+class LocusDataModels : public MenuDisplayGroupBaseImplementation
+{
+  protected:
+    virtual MenuInteraction_ptr MakeOneHandler(UIId id);
+    UIId id;
+  public:
+    LocusDataModels(UIInterface & myui,UIId id);
+    virtual ~LocusDataModels();
+    //
+    virtual std::string GetKey(UIId id);
+    virtual vector<UIId> GetVisibleIds();
+    virtual std::string GetText(UIId id);
+    virtual std::string GetVariableText(UIId id);
+};
+
+class NewDataModelMenu : public NewMenu
+{
+  public:
+    NewDataModelMenu(UIInterface & myui);
+    ~NewDataModelMenu();
+};
+
+class NewDataModelMenuCreator : public NewMenuCreator
+{
+  protected:
+    UIInterface & ui;
+  public:
+    NewDataModelMenuCreator(UIInterface & myui) : ui(myui) {};
+    virtual ~NewDataModelMenuCreator() {};
+    NewMenu_ptr Create() { return NewMenu_ptr(new NewDataModelMenu(ui));};
+};
+
+class GlobalDataModelNuc : public SubMenuItem
+{
+  public:
+    GlobalDataModelNuc(std::string , UIInterface &);
+    virtual ~GlobalDataModelNuc();
+    virtual bool IsVisible();
+};
+
+class GlobalDataModelMsat : public SubMenuItem
+{
+  public:
+    GlobalDataModelMsat(std::string , UIInterface &);
+    virtual ~GlobalDataModelMsat();
+    virtual bool IsVisible();
+};
+
+class GlobalDataModelKAllele : public SubMenuItem
+{
+  public:
+    GlobalDataModelKAllele(std::string , UIInterface &);
+    virtual ~GlobalDataModelKAllele();
+    virtual bool IsVisible();
+};
+
+class MenuDisplayDataType : public MenuDisplayLine
+{
+  private:
+    MenuDisplayDataType();
+    UIRegId m_regId;
+  protected:
+    UIId& GetId();
+  public:
+    MenuDisplayDataType(UIInterface& ui, UIId id);
+    virtual ~MenuDisplayDataType();
+    virtual std::string GetKey();
+    virtual std::string GetText();
+    virtual std::string GetVariableText();
+    virtual bool IsVisible();
+    virtual bool Handles(std::string) {return false;};
+};
+
+class UseOldSeedMenuItem : public ToggleMenuItemNoId
+{
+  private:
+    UseOldSeedMenuItem();
+  public:
+    UseOldSeedMenuItem(std::string, UIInterface& ui);
+    ~UseOldSeedMenuItem();
+    virtual std::string GetText();
+    virtual std::string GetVariableText();
+    virtual bool IsVisible();
+};
+
+class EffectivePopSizeSubMenuItem : public SubMenuItem
+{
+  private:
+    EffectivePopSizeSubMenuItem();
+  public:
+    EffectivePopSizeSubMenuItem(std::string, UIInterface& ui);
+    ~EffectivePopSizeSubMenuItem();
+    virtual bool IsVisible();
+};
+
+#endif  // DATAMODELMENU_H
+
+//____________________________________________________________________________________
diff --git a/src/lamarcmenus/diseasemenus.cpp b/src/lamarcmenus/diseasemenus.cpp
new file mode 100644
index 0000000..1815151
--- /dev/null
+++ b/src/lamarcmenus/diseasemenus.cpp
@@ -0,0 +1,111 @@
+// $Id: diseasemenus.cpp,v 1.17 2011/03/07 06:08:50 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <string>
+#include "constants.h"
+#include "constraintmenus.h"
+#include "diseasemenus.h"
+#include "lamarc_strings.h"
+#include "matrixitem.h"
+#include "menu_strings.h"
+#include "newmenuitems.h"
+#include "overviewmenus.h"
+#include "priormenus.h"
+#include "profilemenus.h"
+#include "setmenuitem.h"
+#include "ui_constants.h"
+#include "ui_interface.h"
+#include "ui_strings.h"
+
+using std::string;
+
+//------------------------------------------------------------------------------------
+
+SetAllDiseaseRatesMenuItem::SetAllDiseaseRatesMenuItem(string myKey, UIInterface & myui)
+    : SetMenuItemId(myKey,myui,uistr::globalDisease, UIId(force_DISEASE, uiconst::GLOBAL_ID))
+{
+}
+
+SetAllDiseaseRatesMenuItem::~SetAllDiseaseRatesMenuItem()
+{
+}
+
+bool SetAllDiseaseRatesMenuItem::IsVisible()
+{
+    return ui.doGetBool(uistr::disease);
+}
+
+string SetAllDiseaseRatesMenuItem::GetVariableText()
+{
+    return menustr::emptyString;
+}
+
+/////////
+
+DiseaseMaxEventsMenuItem::DiseaseMaxEventsMenuItem(string myKey,UIInterface & myui)
+    : SetMenuItemNoId(myKey,myui,uistr::diseaseMaxEvents)
+{
+}
+
+DiseaseMaxEventsMenuItem::~DiseaseMaxEventsMenuItem()
+{
+}
+
+bool DiseaseMaxEventsMenuItem::IsVisible()
+{
+    return ui.doGetBool(uistr::disease);
+}
+
+/////////
+
+DiseaseLocationMenuItem::DiseaseLocationMenuItem(string myKey,UIInterface & myui)
+    : SetMenuItemNoId(myKey,myui,uistr::diseaseLocation)
+{
+}
+
+DiseaseLocationMenuItem::~DiseaseLocationMenuItem()
+{
+}
+
+bool DiseaseLocationMenuItem::IsVisible()
+{
+    return ui.doGetBool(uistr::disease);
+}
+
+///
+
+DiseaseMenu::DiseaseMenu (UIInterface & myui )
+    : NewMenu (myui,lamarcmenu::diseaseTitle,lamarcmenu::diseaseInfo)
+{
+    // EWFIX.P5 DISEASE -- replace display only with commented item below
+    // once we resolve how to adjust menu settings when turning
+    // disease on/off changes number of cross partitions
+    AddMenuItem(new DisplayOnlyMenuItem(uistr::disease,ui));
+    //  AddMenuItem(new ToggleMenuItemNoId("X",ui,uistr::disease));
+    UIId id(force_DISEASE);
+    AddMenuItem(new SubMenuConstraintsForOneForce("C",ui,id));
+    AddMenuItem(new SubMenuProfileForOneForce("P",ui,id));
+    AddMenuItem(new SubMenuPriorForOneForce("B",ui,id));
+    AddMenuItem(new SetAllDiseaseRatesMenuItem("G",ui));
+    AddMenuItem(new MatrixSetMenuItem(ui,
+                                      uistr::diseaseInto,
+                                      uistr::diseaseByID,
+                                      uistr::diseasePartitionCount,
+                                      uistr::disease,
+                                      force_DISEASE));
+    AddMenuItem(new DiseaseMaxEventsMenuItem("M",ui));
+    AddMenuItem(new DiseaseLocationMenuItem("L",ui));
+}
+
+DiseaseMenu::~DiseaseMenu ()
+{
+}
+
+//____________________________________________________________________________________
diff --git a/src/lamarcmenus/diseasemenus.h b/src/lamarcmenus/diseasemenus.h
new file mode 100644
index 0000000..6eb6420
--- /dev/null
+++ b/src/lamarcmenus/diseasemenus.h
@@ -0,0 +1,70 @@
+// $Id: diseasemenus.h,v 1.14 2011/03/07 06:08:50 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef DISEASEMENUS_H
+#define DISEASEMENUS_H
+
+#include <string>
+#include <vector>
+#include "newmenuitems.h"
+#include "setmenuitem.h"
+#include "togglemenuitem.h"
+
+class UIInterface;
+
+class SetAllDiseaseRatesMenuItem : public SetMenuItemId
+{
+  public:
+    SetAllDiseaseRatesMenuItem(std::string myKey, UIInterface & myui);
+    virtual ~SetAllDiseaseRatesMenuItem();
+    virtual bool IsVisible();
+    virtual std::string GetVariableText();
+};
+
+// Specializes SetMenuItemNoId to be visible only when the
+// disease force is turned on
+class DiseaseMaxEventsMenuItem : public SetMenuItemNoId
+{
+  public:
+    DiseaseMaxEventsMenuItem(std::string myKey, UIInterface & myui);
+    virtual ~DiseaseMaxEventsMenuItem();
+    virtual bool IsVisible();
+};
+
+// Specializes SetMenuItemNoId to be visible only when the
+// disease force is turned on
+class DiseaseLocationMenuItem : public SetMenuItemNoId
+{
+  public:
+    DiseaseLocationMenuItem(std::string myKey, UIInterface & myui);
+    virtual ~DiseaseLocationMenuItem();
+    virtual bool IsVisible();
+};
+
+class DiseaseMenu : public NewMenu
+{
+  public:
+    DiseaseMenu(UIInterface & myui);
+    virtual ~DiseaseMenu();
+};
+
+class DiseaseMenuCreator : public NewMenuCreator
+{
+  protected:
+    UIInterface & ui;
+  public:
+    DiseaseMenuCreator(UIInterface & myui) : ui(myui) {};
+    virtual ~DiseaseMenuCreator() {};
+    NewMenu_ptr Create() { return NewMenu_ptr(new DiseaseMenu(ui));};
+};
+
+#endif  // DISEASEMENUS_H
+
+//____________________________________________________________________________________
diff --git a/src/lamarcmenus/divmenus.cpp b/src/lamarcmenus/divmenus.cpp
new file mode 100644
index 0000000..7b681b8
--- /dev/null
+++ b/src/lamarcmenus/divmenus.cpp
@@ -0,0 +1,81 @@
+// $Id: divmenus.cpp,v 1.6 2013/11/08 21:46:21 mkkuhner Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <string>
+#include "constants.h"
+#include "constraintmenus.h"
+#include "lamarc_strings.h"
+#include "menu_strings.h"
+#include "matrixitem.h"
+#include "divmenus.h"
+#include "newmenuitems.h"
+#include "priormenus.h"
+#include "setmenuitem.h"
+#include "ui_constants.h"
+#include "ui_interface.h"
+#include "ui_strings.h"
+#include "overviewmenus.h"
+#include "profilemenus.h"
+
+using std::string;
+//------------------------------------------------------------------------------------
+
+SetMenuItemEpochs::SetMenuItemEpochs(UIInterface & myui)
+    : SetMenuItemGroup(myui,uistr::divergenceEpochBoundaryTime)
+{
+}
+
+SetMenuItemEpochs::~SetMenuItemEpochs()
+{
+}
+
+vector<UIId> SetMenuItemEpochs::GetVisibleIds()
+{
+    return ui.doGetUIIdVec1d(uistr::validParamsForForce,UIId(force_DIVERGENCE));
+}
+
+std::vector<std::string> SetMenuItemEpochs::GetExtraText(UIId id)
+{
+    std::vector<std::string> s;
+    string ancestor = "  ";
+    ancestor += ui.doGetDescription(uistr::divergenceEpochAncestor,id);
+    s.push_back(ancestor);
+    string decendents = "  ";
+    decendents += ui.doGetDescription(uistr::divergenceEpochDescendents,id);
+    s.push_back(decendents);
+    return s;
+}
+
+std::vector<std::string> SetMenuItemEpochs::GetExtraVariableText(UIId id)
+{
+    std::vector<std::string> s;
+    s.push_back(ui.doGetPrintString(uistr::divergenceEpochAncestor,id));
+    s.push_back(ui.doGetPrintString(uistr::divergenceEpochDescendents,id));
+    return s;
+}
+
+
+//------------------------------------------------------------------------------------
+
+DivergenceMenu::DivergenceMenu (UIInterface & myui )
+    : NewMenu (myui,lamarcmenu::divTitle,lamarcmenu::divInfo)
+{
+    AddMenuItem(new DisplayOnlyMenuItem(uistr::divergence, ui));
+    UIId id(force_DIVERGENCE);
+
+    AddMenuItem(new SetMenuItemEpochs(ui));
+    AddMenuItem(new SubMenuPriorForOneForce("B",ui,id));
+}
+
+DivergenceMenu::~DivergenceMenu ()
+{
+}
+
+//____________________________________________________________________________________
diff --git a/src/lamarcmenus/divmenus.h b/src/lamarcmenus/divmenus.h
new file mode 100644
index 0000000..8438171
--- /dev/null
+++ b/src/lamarcmenus/divmenus.h
@@ -0,0 +1,51 @@
+// $Id: divmenus.h,v 1.2 2012/02/29 00:29:58 ewalkup Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef DIVMENUS_H
+#define DIVMENUS_H
+
+#include <string>
+#include "newmenuitems.h"
+#include "setmenuitem.h"
+#include "togglemenuitem.h"
+
+class UIInterface;
+
+class SetMenuItemEpochs : public SetMenuItemGroup
+{
+  public:
+    SetMenuItemEpochs(UIInterface & myui);
+    virtual ~SetMenuItemEpochs();
+    virtual std::vector<UIId> GetVisibleIds();
+    virtual bool HasMultiLineItems() {return true;};
+    std::vector<std::string> GetExtraText(UIId id);
+    std::vector<std::string> GetExtraVariableText(UIId id);
+};
+
+class DivergenceMenu : public NewMenu
+{
+  public:
+    DivergenceMenu(UIInterface & myui);
+    virtual ~DivergenceMenu();
+};
+
+class DivergenceMenuCreator : public NewMenuCreator
+{
+  protected:
+    UIInterface & ui;
+  public:
+    DivergenceMenuCreator(UIInterface & myui) : ui(myui) {};
+    virtual ~DivergenceMenuCreator() {};
+    NewMenu_ptr Create() { return NewMenu_ptr(new DivergenceMenu(ui));};
+};
+
+#endif  // DIVMENUS_H
+
+//____________________________________________________________________________________
diff --git a/src/lamarcmenus/divmigmenus.cpp b/src/lamarcmenus/divmigmenus.cpp
new file mode 100644
index 0000000..7a9bbc0
--- /dev/null
+++ b/src/lamarcmenus/divmigmenus.cpp
@@ -0,0 +1,116 @@
+// $Id: divmigmenus.cpp,v 1.2 2012/06/30 01:32:42 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <string>
+#include "constants.h"
+#include "constraintmenus.h"
+#include "lamarc_strings.h"
+#include "menu_strings.h"
+#include "matrixitem.h"
+#include "divmigmenus.h"
+#include "newmenuitems.h"
+#include "priormenus.h"
+#include "setmenuitem.h"
+#include "ui_constants.h"
+#include "ui_interface.h"
+#include "ui_strings.h"
+#include "overviewmenus.h"
+#include "profilemenus.h"
+
+using std::string;
+
+//------------------------------------------------------------------------------------
+
+SetAllDivMigsMenuItem::SetAllDivMigsMenuItem(string myKey, UIInterface & myui)
+    : SetMenuItemId(myKey,myui,uistr::globalDivMigration, UIId(force_DIVMIG, uiconst::GLOBAL_ID))
+{
+}
+
+SetAllDivMigsMenuItem::~SetAllDivMigsMenuItem()
+{
+}
+
+bool SetAllDivMigsMenuItem::IsVisible()
+{
+    return ui.doGetBool(uistr::divmigration);
+}
+
+string SetAllDivMigsMenuItem::GetVariableText()
+{
+    return "";
+}
+//------------------------------------------------------------------------------------
+// don't know how to do this with divergence yet
+
+#if 0
+
+SetDivMigsFstMenuItem::SetDivMigsFstMenuItem(string key,UIInterface & ui)
+    : ToggleMenuItemNoId(key,ui,uistr::fstSetMigration)
+{
+}
+
+SetDivMigsFstMenuItem::~SetDivMigsFstMenuItem()
+{
+}
+
+bool SetDivMigsFstMenuItem::IsVisible()
+{
+    return ui.doGetBool(uistr::divmigration);
+}
+
+#endif
+
+//------------------------------------------------------------------------------------
+
+DivMigMaxEventsMenuItem::DivMigMaxEventsMenuItem(string myKey,UIInterface & myui)
+    : SetMenuItemNoId(myKey,myui,uistr::divmigrationMaxEvents)
+{
+}
+
+DivMigMaxEventsMenuItem::~DivMigMaxEventsMenuItem()
+{
+}
+
+bool DivMigMaxEventsMenuItem::IsVisible()
+{
+    return ui.doGetBool(uistr::divmigration);
+}
+
+///
+
+DivMigMenu::DivMigMenu (UIInterface & myui )
+    : NewMenu (myui,lamarcmenu::divMigTitle,lamarcmenu::divMigInfo)
+{
+    AddMenuItem(new DisplayOnlyMenuItem(uistr::divmigration, ui));
+    UIId id(force_DIVMIG);
+
+    AddMenuItem(new SubMenuConstraintsForOneForce("C",ui,id));
+    AddMenuItem(new SubMenuProfileForOneForce("P",ui,id));
+    AddMenuItem(new SubMenuPriorForOneForce("B",ui,id));
+    AddMenuItem(new SetAllDivMigsMenuItem("G",ui));
+    //AddMenuItem(new SetDivMigsFstMenuItem("F",ui)); // not currently functional
+
+    AddMenuItem(new MatrixSetMenuItem(ui,
+                                      uistr::divmigrationInto,
+                                      uistr::divmigrationUser,
+                                      uistr::divmigrationPartitionCount,
+                                      uistr::divmigration,
+                                      force_DIVMIG));
+
+    AddMenuItem(new DivMigMaxEventsMenuItem("M",ui));
+
+
+}
+
+DivMigMenu::~DivMigMenu ()
+{
+}
+
+//____________________________________________________________________________________
diff --git a/src/lamarcmenus/divmigmenus.h b/src/lamarcmenus/divmigmenus.h
new file mode 100644
index 0000000..6584760
--- /dev/null
+++ b/src/lamarcmenus/divmigmenus.h
@@ -0,0 +1,65 @@
+// $Id: divmigmenus.h,v 1.1 2012/02/15 18:13:42 jmcgill Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef DIVMIGMENUS_H
+#define DIVMIGMENUS_H
+
+#include <string>
+#include "newmenuitems.h"
+#include "setmenuitem.h"
+#include "togglemenuitem.h"
+
+class UIInterface;
+
+class SetAllDivMigsMenuItem : public SetMenuItemId
+{
+  public:
+    SetAllDivMigsMenuItem(std::string myKey, UIInterface & myui);
+    virtual ~SetAllDivMigsMenuItem();
+    virtual bool IsVisible();
+    virtual std::string GetVariableText();
+};
+
+class SetDivMigsFstMenuItem : public ToggleMenuItemNoId
+{
+  public:
+    SetDivMigsFstMenuItem(std::string myKey, UIInterface & myui);
+    virtual ~SetDivMigsFstMenuItem();
+    virtual bool IsVisible();
+};
+
+class DivMigMaxEventsMenuItem : public SetMenuItemNoId
+{
+  public:
+    DivMigMaxEventsMenuItem(std::string myKey, UIInterface & myui);
+    virtual ~DivMigMaxEventsMenuItem();
+    virtual bool IsVisible();
+};
+
+class DivMigMenu : public NewMenu
+{
+  public:
+    DivMigMenu(UIInterface & myui);
+    virtual ~DivMigMenu();
+};
+
+class DivMigMenuCreator : public NewMenuCreator
+{
+  protected:
+    UIInterface & ui;
+  public:
+    DivMigMenuCreator(UIInterface & myui) : ui(myui) {};
+    virtual ~DivMigMenuCreator() {};
+    NewMenu_ptr Create() { return NewMenu_ptr(new DivMigMenu(ui));};
+};
+
+#endif  // DIVMIGMENUS_H
+
+//____________________________________________________________________________________
diff --git a/src/lamarcmenus/forcesmenus.cpp b/src/lamarcmenus/forcesmenus.cpp
new file mode 100644
index 0000000..c305ebe
--- /dev/null
+++ b/src/lamarcmenus/forcesmenus.cpp
@@ -0,0 +1,186 @@
+// $Id: forcesmenus.cpp,v 1.30 2012/06/30 01:32:42 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+#include <string>
+
+#include "coalmenus.h"
+#include "constraintmenus.h"
+#include "diseasemenus.h"
+#include "forcesmenus.h"
+#include "growthmenus.h"
+#include "lamarc_strings.h"
+#include "logselectmenus.h"
+#include "menu_strings.h"
+#include "migmenus.h"
+#include "divmenus.h"
+#include "divmigmenus.h"
+#include "newmenuitems.h"
+#include "recmenus.h"
+#include "regiongammamenus.h"
+#include "togglemenuitem.h"
+#include "traitmodelmenu.h"
+#include "ui_interface.h"
+#include "ui_strings.h"
+
+//------------------------------------------------------------------------------------
+
+ForcesSubMenuItem::ForcesSubMenuItem(
+    std::string myKey,
+    UIInterface & myui,
+    std::string myForce,
+    std::string myForceLegal,
+    NewMenuCreator * mySubMenuCreator)
+    :   SubMenuItem(myKey,myui,mySubMenuCreator),
+        force(myForce),
+        forceLegal(myForceLegal)
+{
+}
+
+ForcesSubMenuItem::~ForcesSubMenuItem()
+{
+}
+
+std::string ForcesSubMenuItem::GetVariableText()
+{
+    return ui.doGetPrintString(force);
+}
+
+bool ForcesSubMenuItem::IsVisible()
+{
+    return ui.doGetBool(forceLegal);
+}
+
+ForcesMenu::ForcesMenu (UIInterface & myui)
+    : NewMenu (myui,lamarcmenu::forcesTitle,menustr::emptyString)
+{
+    AddMenuItem(new ForcesSubMenuItem(string("T"),
+                                      ui,
+                                      uistr::coalescence,
+                                      uistr::coalescenceLegal,
+                                      new CoalescenceMenuCreator(ui)));
+    AddMenuItem(new ForcesSubMenuItem(string("G"),
+                                      ui,
+                                      uistr::growth,
+                                      uistr::growthLegal,
+                                      new GrowthMenuCreator(ui)));
+    AddMenuItem(new ForcesSubMenuItem(string("M"),
+                                      ui,
+                                      uistr::migration,
+                                      uistr::migrationLegal,
+                                      new MigrationMenuCreator(ui)));
+
+    AddMenuItem(new ForcesSubMenuItem(string("I"),
+                                      ui,
+                                      uistr::divergence,
+                                      uistr::divergenceLegal,
+                                      new DivergenceMenuCreator(ui)));
+
+    AddMenuItem(new ForcesSubMenuItem(string("E"),
+                                      ui,
+                                      uistr::divmigration,
+                                      uistr::divmigrationLegal,
+                                      new DivMigMenuCreator(ui)));
+
+    AddMenuItem(new ForcesSubMenuItem(string("R"),
+                                      ui,
+                                      uistr::recombination,
+                                      uistr::recombinationLegal,
+                                      new RecombinationMenuCreator(ui)));
+    AddMenuItem(new ForcesSubMenuItem(string("D"),
+                                      ui,
+                                      uistr::disease,
+                                      uistr::diseaseLegal,
+                                      new DiseaseMenuCreator(ui)));
+    AddMenuItem(new ForcesSubMenuItem(string("V"),
+                                      ui,
+                                      uistr::regionGamma,
+                                      uistr::regionGammaLegal,
+                                      new RegionGammaMenuCreator(ui)));
+#ifndef NDEBUG
+    AddMenuItem(new ForcesSubMenuItem(string("S"),
+                                      ui,
+                                      uistr::logisticSelection,
+                                      uistr::logisticSelectionLegal,
+                                      new LogisticSelectionMenuCreator(ui)));
+#endif
+#if 0
+    AddMenuItem(new ForcesSubMenuItem(string("Z"),
+                                      ui,
+                                      uistr::logSelectStick,
+                                      uistr::logisticSelectionLegal,
+                                      new StochasticSelectionMenuCreator(ui)));
+#endif
+    AddMenuItem(new TraitModelItem(string("A"),ui));
+}
+
+ForcesMenu::~ForcesMenu ()
+{
+}
+
+ForceSubMenuItem::ForceSubMenuItem(std::string myKey,UIInterface & myui, NewMenuCreator * myCreator, UIId myId)
+    : SubMenuItem(myKey, myui, myCreator),
+      m_id(myId)
+{
+}
+
+ForceSubMenuItem::~ForceSubMenuItem()
+{
+}
+
+bool ForceSubMenuItem::IsVisible()
+{
+    switch(m_id.GetForceType())
+    {
+        case force_COAL:
+            return true;
+            break;
+        case force_MIG:
+            return ui.doGetBool(uistr::migration);
+            break;
+        case force_DISEASE:
+            return ui.doGetBool(uistr::disease);
+            break;
+        case force_REC:
+            return ui.doGetBool(uistr::recombination);
+            break;
+        case force_GROW:
+            return ui.doGetBool(uistr::growth);
+            break;
+        case force_LOGISTICSELECTION:
+            return ui.doGetBool(uistr::logisticSelection);
+            break;
+        case force_REGION_GAMMA:
+            return ui.doGetBool(uistr::regionGamma);
+            break;
+        case force_EXPGROWSTICK:
+            return ui.doGetBool(uistr::expGrowStick);
+            break;
+        case force_LOGSELECTSTICK:
+            return ui.doGetBool(uistr::logSelectStick);
+            break;
+        case force_DIVERGENCE:
+            return ui.doGetBool(uistr::divergence);
+            break;
+        case force_DIVMIG:
+            return ui.doGetBool(uistr::divmigration);
+            break;
+        default:
+#if 0
+            string err = "ForceSubMenuItem::IsVisible() did not find force " + m_id.GetForceType();
+            implementation_error e(err);
+            throw e;
+#endif
+            assert(false);              //uncaught force type.
+    }
+    return true;
+}
+
+//____________________________________________________________________________________
diff --git a/src/lamarcmenus/forcesmenus.h b/src/lamarcmenus/forcesmenus.h
new file mode 100644
index 0000000..d1323e9
--- /dev/null
+++ b/src/lamarcmenus/forcesmenus.h
@@ -0,0 +1,66 @@
+// $Id: forcesmenus.h,v 1.11 2011/03/07 06:08:50 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef FORCESMENUS_H
+#define FORCESMENUS_H
+
+#include <string>
+#include "newmenuitems.h"
+
+class UIInterface;
+
+class ForcesSubMenuItem : public SubMenuItem
+{
+  protected:
+    std::string force;
+    std::string forceLegal;
+  public:
+    ForcesSubMenuItem(std::string myKey,
+                      UIInterface & myui,
+                      std::string myForce,
+                      std::string myForceLegal,
+                      NewMenuCreator * mySubMenuCreator);
+    virtual ~ForcesSubMenuItem();
+    virtual std::string GetVariableText();
+    virtual bool IsVisible();
+};
+
+class ForcesMenu : public NewMenu
+{
+  public:
+    ForcesMenu(UIInterface & myui);
+    ~ForcesMenu();
+};
+
+class ForcesMenuCreator : public NewMenuCreator
+{
+  protected:
+    UIInterface & ui;
+  public:
+    ForcesMenuCreator(UIInterface & myui) : ui(myui) {};
+    virtual ~ForcesMenuCreator() {};
+    NewMenu_ptr Create() { return NewMenu_ptr(new ForcesMenu(ui));};
+};
+
+class ForceSubMenuItem : public SubMenuItem
+{
+  private:
+    ForceSubMenuItem(); //undefined
+    UIId m_id;
+  public:
+    ForceSubMenuItem(std::string myKey,UIInterface & myui,
+                     NewMenuCreator * myCreator, UIId id);
+    virtual ~ForceSubMenuItem();
+    virtual bool IsVisible();
+};
+
+#endif  // FORCESMENUS_H
+
+//____________________________________________________________________________________
diff --git a/src/lamarcmenus/growthmenus.cpp b/src/lamarcmenus/growthmenus.cpp
new file mode 100644
index 0000000..fb2534e
--- /dev/null
+++ b/src/lamarcmenus/growthmenus.cpp
@@ -0,0 +1,123 @@
+// $Id: growthmenus.cpp,v 1.16 2010/03/17 17:25:58 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <string>
+#include "constants.h"
+#include "constraintmenus.h"
+#include "forcesummary.h"
+#include "growthmenus.h"
+#include "lamarc_strings.h"
+#include "menu_strings.h"
+#include "newmenuitems.h"
+#include "priormenus.h"
+#include "setmenuitem.h"
+#include "togglemenuitem.h"
+#include "ui_interface.h"
+#include "ui_strings.h"
+#include "profilemenus.h"
+
+using std::string;
+
+SetAllGrowthMenuItem::SetAllGrowthMenuItem(string myKey, UIInterface & ui)
+    : SetMenuItemId(myKey,ui,uistr::globalGrowth, UIId(force_GROW, uiconst::GLOBAL_ID))
+{
+}
+
+SetAllGrowthMenuItem::~SetAllGrowthMenuItem()
+{
+}
+
+bool SetAllGrowthMenuItem::IsVisible()
+{
+    return ui.doGetBool(uistr::growth);
+}
+
+std::string SetAllGrowthMenuItem::GetVariableText()
+{
+    return "";
+}
+
+//------------------------------------------------------------------------------------
+
+ToggleMenuItemGrowthScheme::ToggleMenuItemGrowthScheme(string k, UIInterface & myui)
+    : ToggleMenuItemNoId(k,myui,uistr::growthScheme)
+{
+}
+
+ToggleMenuItemGrowthScheme::~ToggleMenuItemGrowthScheme()
+{
+}
+
+bool ToggleMenuItemGrowthScheme::IsVisible()
+{
+#ifdef NDEBUG
+    return false;
+#else
+    return ui.doGetBool(uistr::growth);
+#endif
+}
+
+//------------------------------------------------------------------------------------
+
+ToggleMenuItemGrowthType::ToggleMenuItemGrowthType(string k, UIInterface & myui)
+    : ToggleMenuItemNoId(k,myui,uistr::growthType)
+{
+}
+
+ToggleMenuItemGrowthType::~ToggleMenuItemGrowthType()
+{
+}
+
+bool ToggleMenuItemGrowthType::IsVisible()
+{
+#ifdef NDEBUG
+    return false;
+#else
+    return ui.doGetBool(uistr::growth);
+#endif
+}
+
+//------------------------------------------------------------------------------------
+
+SetMenuItemGrowths::SetMenuItemGrowths(UIInterface & myui)
+    : SetMenuItemGroup(myui,uistr::growthByID)
+{
+}
+
+SetMenuItemGrowths::~SetMenuItemGrowths()
+{
+}
+
+vector<UIId> SetMenuItemGrowths::GetVisibleIds()
+{
+    return ui.doGetUIIdVec1d(uistr::validParamsForForce,UIId(force_GROW));
+}
+
+//------------------------------------------------------------------------------------
+
+GrowthMenu::GrowthMenu (UIInterface & myui )
+    : NewMenu (myui,lamarcmenu::growTitle,lamarcmenu::growInfo)
+{
+    AddMenuItem(new ToggleMenuItemNoId("X",ui,uistr::growth));
+    UIId id(force_GROW);
+    AddMenuItem(new ToggleMenuItemGrowthScheme("S",ui));
+    AddMenuItem(new ToggleMenuItemGrowthType("I",ui));
+    AddMenuItem(new SubMenuConstraintsForOneForce("C",ui,id));
+    AddMenuItem(new SubMenuProfileForOneForce("P",ui,id));
+    AddMenuItem(new SubMenuPriorForOneForce("B",ui,id));
+    AddMenuItem(new SetAllGrowthMenuItem("G",ui));
+    AddMenuItem(new SetMenuItemGrowths(ui));
+}
+
+GrowthMenu::~GrowthMenu ()
+{
+}
+
+//____________________________________________________________________________________
diff --git a/src/lamarcmenus/growthmenus.h b/src/lamarcmenus/growthmenus.h
new file mode 100644
index 0000000..9986071
--- /dev/null
+++ b/src/lamarcmenus/growthmenus.h
@@ -0,0 +1,74 @@
+// $Id: growthmenus.h,v 1.16 2011/03/07 06:08:50 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef GROWTHMENUS_H
+#define GROWTHMENUS_H
+
+#include <string>
+#include <vector>
+#include "newmenu.h"
+#include "setmenuitem.h"
+#include "newmenuitems.h"
+
+class UIInterface;
+
+class SetAllGrowthMenuItem : public SetMenuItemId
+{
+  public:
+    SetAllGrowthMenuItem(std::string myKey, UIInterface & myui);
+    virtual ~SetAllGrowthMenuItem();
+    virtual bool IsVisible();
+    virtual std::string GetVariableText();
+};
+
+class SetMenuItemGrowths : public SetMenuItemGroup
+{
+  public:
+    SetMenuItemGrowths(UIInterface & ui);
+    virtual ~SetMenuItemGrowths();
+    virtual std::vector<UIId> GetVisibleIds();
+};
+
+class ToggleMenuItemGrowthScheme : public ToggleMenuItemNoId
+{
+  public:
+    ToggleMenuItemGrowthScheme(std::string myKey, UIInterface& myui);
+    virtual ~ToggleMenuItemGrowthScheme();
+    virtual bool IsVisible();
+};
+
+class ToggleMenuItemGrowthType : public ToggleMenuItemNoId
+{
+  public:
+    ToggleMenuItemGrowthType(std::string myKey, UIInterface& myui);
+    virtual ~ToggleMenuItemGrowthType();
+    virtual bool IsVisible();
+};
+
+class GrowthMenu : public NewMenu
+{
+  public:
+    GrowthMenu(UIInterface & myui);
+    virtual ~GrowthMenu();
+};
+
+class GrowthMenuCreator : public NewMenuCreator
+{
+  protected:
+    UIInterface & ui;
+  public:
+    GrowthMenuCreator(UIInterface & myui) : ui(myui) {};
+    virtual ~GrowthMenuCreator() {};
+    NewMenu_ptr Create() { return NewMenu_ptr(new GrowthMenu(ui));};
+};
+
+#endif  // GROWTHMENUS_H
+
+//____________________________________________________________________________________
diff --git a/src/lamarcmenus/lamarc_strings.cpp b/src/lamarcmenus/lamarc_strings.cpp
new file mode 100644
index 0000000..38a5a0b
--- /dev/null
+++ b/src/lamarcmenus/lamarc_strings.cpp
@@ -0,0 +1,181 @@
+// $Id: lamarc_strings.cpp,v 1.47 2012/02/15 18:13:42 jmcgill Exp $
+
+/*
+ *  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+ *
+ *  This software is distributed free of charge for non-commercial use
+ *  and is copyrighted.  Of course, we do not guarantee that the software
+ *  works, and are not responsible for any damage you may cause or have.
+ *
+ */
+
+#include <string>
+#include "lamarc_strings.h"
+
+using std::string;
+
+const string lamarcmenu::addRangeDialog  = "Enter a site or a range of sites this trait might map to.  If entering a\n range of sites, use the format \"X:X\" (e.g. 23:57):";
+const string lamarcmenu::baseFrequenciesTitle  = "Base Frequncies for ";
+const string lamarcmenu::bayesianPriorsOverviewTitle = "Bayesian Priors Overview";
+const string lamarcmenu::calcPerLocus     = "<calculate per segment>";
+const string lamarcmenu::calculated       = " <calc>";
+const string lamarcmenu::categoriesTitle  = "Mutation Rate Categories for ";
+const string lamarcmenu::categoriesInfo   =
+    "Each category should have a unique mutation rate--identical rates do "
+    "nothing and waste considerable processing time.";
+const string lamarcmenu::categoryTitle  = "Mutation Rate Category ";
+const string lamarcmenu::coalInfo         =
+    "Values of theta equal the number of inheritable copies of the data in your population times 2mu.  If you have nuclear non-sex chromosomal DNA in your data, this works out to 4Nmu, where N is the effective population size, and mu the mutation rate.  Data from X chromosomes is 3Nmu, and data from Y chromosomes and mitochondrial DNA is 1Nmu.";
+const string lamarcmenu::coalTitle        = "Theta (Effective pop. size * mutation rate)";
+const string lamarcmenu::dataOverviewTitle= "Data Options Overview";
+const string lamarcmenu::dataTitle        = "Data options (tell us about your data)";
+const string lamarcmenu::dataModelTitle   = "Edit data model for ";
+const string lamarcmenu::defaultForParamsFor ="Default prior for ";
+const string lamarcmenu::diseaseInfo      =
+    "Values of D equal d/mu, and thus can be converted to be in terms of "
+    "population size by multiplying by the appropriate Theta.\n"
+    "e.g. if D is 100 and Theta is 0.01 for 4N data, this results in 4Nd=1\n";
+const string lamarcmenu::diseaseTitle     = "Disease parameters and model";
+const string lamarcmenu::divInfo          = "Edit Epoch Boundary Times";
+const string lamarcmenu::divTitle         = "Divergence parameters and model";
+const string lamarcmenu::divMigInfo          =
+    "[information about Divergence Migration here]";
+const string lamarcmenu::divMigTitle         = "Divergence Migration parameters and model";
+const string lamarcmenu::effectivePopSizeTitle = "Effective population size menu";
+const string lamarcmenu::effectivePopSizeInfo =
+    "The effective population size per region is a way to scale the different "
+    "regions so that the theta estimates can be combined.  Whatever region is set "
+    "to 1.0 becomes the reference region, and the theta for all other regions is "
+    "scaled appropriately.  So if, for example, you have both mitochondrial data "
+    "and nuclear-chromosome data, if you set the mitochondrial region to 1 and the "
+    "nuclear-chromosome region(s) to 4, the reported thetas will be scaled to the "
+    "mitochondrial theta.  If you set the mitochondrial region to 0.25 and the "
+    "nuclear-chromosome region(s) to 1, the reported thetas will be scaled to the "
+    "nuclear-chromosome theta.";
+const string lamarcmenu::effectivePopSizeFor = "Effective population size for ";
+const string lamarcmenu::fileOverviewTitle= "File Name Overview";
+const string lamarcmenu::forGlobalNuc     = "the default model for nucleotide data";
+const string lamarcmenu::forGlobalMsat    = "the default model for microsatellite data";
+const string lamarcmenu::forGlobalKAllele = "the default model for K-Allele data";
+const string lamarcmenu::forceProfileTitle = "Profiling for force: ";
+const string lamarcmenu::forceProfileInfo =
+    "Profiling gives you information about the accuracy of your parameter "
+    "estimates and the shape of the likelihood curve.  In a likelihood "
+    "analysis, 'Fixed' profiling is "
+    "faster but less informative, while 'percentile' profiling is slower but "
+    "more informative.  In a bayesian analysis, both types of profiling take "
+    "essentially no extra time to compute.  In either case, constant and invalid "
+    "parameters cannot be profiled.";
+const string lamarcmenu::forceConstraintTitle = "Constraints for force: ";
+const string lamarcmenu::forceConstraintInfo   =
+    "Select a parameter index to change its constraint type.  "
+    "'Unconstrained' parameters vary freely, 'constant' parameters are held at "
+    "their starting value, and 'invalid' parameters are set to zero and ignored "
+    "in future output.  Grouped parameters are set to the same (averaged) starting "
+    "value, then vary as above.  'Identical' grouped parameters are constrained to "
+    "be always be equal to each other.";
+const string lamarcmenu::forcesOverviewTitle="Analysis Overview";
+const string lamarcmenu::forcesTitle      = "Analysis (tell us what you want to know)";
+const string lamarcmenu::globalModel      = " <default>";
+const string lamarcmenu::globalDataModelNuc = "Edit default data model for all Nucleotide data";
+const string lamarcmenu::globalDataModelMsat = "Edit default data model for all Microsat data";
+const string lamarcmenu::globalDataModelKAllele = "Edit default data model for all K-Allele data";
+const string lamarcmenu::growInfo         =
+    "Values of G equal g/mu, where the population size at time t in the past equals the the present population size times exp(-t*g).\n"
+    "A shrinking population might have G = -10, while a rapidly growing population might have G = 200\n";
+const string lamarcmenu::growTitle        = "Growth rate";
+const string lamarcmenu::gtrRatesTitle    = "GTR Rates for ";
+const string lamarcmenu::heatingInfo      =
+    "Higher-temperature searches investigate more possible solutions "
+    "but spend proportionately more time evaluating poor candidates.\n"
+    "Temperatures will be scaled so the lowest is 1.0";
+const string lamarcmenu::heatingTitle     = "Multiple simultaneous searches with heating";
+const string lamarcmenu::logisticSelectionTitle = "Logistic selection coefficient";
+const string lamarcmenu::logisticSelectionInfo =
+    "This force models natural selection at a diallelic marker by "
+    "assuming the subpopulation with favored allele \"A\" grows "
+    "at the expense of the subpopulation with allele \"a\" "
+    "at a logistic rate determined by a selection coefficient \"s.\"  "
+    "Values of s > 0.1 indicate strong selection favoring \"A;\" "
+    "negative values of s (down to a minimum value of -1) indicate "
+    "selection favoring allele \"a.\"";
+const string lamarcmenu::mainTitle        = "Main Menu";
+const string lamarcmenu::migInfo          =
+    "Values of M equal m/mu, and thus can be converted to be in terms of "
+    "population size by multiplying by the appropriate Theta.\n"
+    "e.g. if M is 100 and Theta is 0.01 for 4N data, this results in 4Nm=1";
+const string lamarcmenu::migTitle         = "Migration parameters and model";
+const string lamarcmenu::multiLocusCount_0= "<";
+const string lamarcmenu::multiLocusCount_1= " segments>";
+const string lamarcmenu::outfileTitle     = "Output file options";
+const string lamarcmenu::overviewInfo     = "no changes allowed from this menu\nTo make changes use the menu: ";
+const string lamarcmenu::overviewTitle    = "Overview of current settings (what you told us)";
+const string lamarcmenu::parenLeft        = " ( ";
+const string lamarcmenu::parenRight       = " ) ";
+const string lamarcmenu::profileTitle     = "Profile likelihood settings";
+const string lamarcmenu::profileInfo      =
+    "Profiling provides information about the support intervals for your data.  "
+    "In a likelihood run, percentile profiling is most informative, but takes "
+    "longest to compute, while fixed profiling is less informative, but is "
+    "faster.  In a bayesian run, both take a minimal amount of time.";
+const string lamarcmenu::priorTitle       = "Bayesian Priors Menu";
+const string lamarcmenu::priorInfo = "Bayesian priors can be set on a per-force or per-parameter basis.  The type of prior can be set (linear or logarithmic) as well as the allowable bounds.";
+const string lamarcmenu::priorInfoForForce = "Select 'D' to change the default prior for all parameters for this force, or select the particular parameter whose prior you wish to change.  'Constant' and 'invalid' parameters do not use any prior.";
+const string lamarcmenu::priorInfoForParam = "Priors may be either linear or logarithmic.  The lower bound for logarithmic priors must be above zero, in addition to any other constraints an evolutionary force might have.";
+const string lamarcmenu::individualProfileTitle     = "Individual parameter profiling options";
+const string lamarcmenu::rearrangeTitle   = "Rearrangers Menu";
+const string lamarcmenu::rearrangeInfo    =
+    "The values set for each arranger indicate the relative amount of time "
+    "spent performing each type of arrangement.  The Tree-size frequency should "
+    "probably be lower than the the Topology frequency, and in "
+    "Bayesian runs with multiple parameters, the Bayesian frequency will "
+    "probably need to be increased.";
+const string lamarcmenu::recInfo          =
+    "Values of r equal C/mu, and thus can be converted to be in terms of "
+    "population size by multiplying by the appropriate Theta.\n"
+    "e.g. if r is 0.1 and Theta is 0.01 for 4N data, this results in 4NC=0.001\n"
+    "C is the recombination rate per generation per site (*not* per locus!).";
+const string lamarcmenu::recTitle         = "Recombination parameters";
+const string lamarcmenu::regionDataModelTitle   = "Edit data model(s) for ";
+const string lamarcmenu::regionGammaTitle = "Gamma (background mutation rate varies over genomic regions)";
+const string lamarcmenu::regionGammaInfo =
+    "This \"force\" allows the background mutation rate to vary from region\n"
+    "to region, modeled using the gamma distribution.  The \"scale\"\n"
+    "parameter (\"beta\") gets set to the reciprocal of the \"shape\"\n"
+    "parameter (\"alpha\") so that the average background mutation rate is 1.\n"
+    "A low alpha value like 0.5 implies the data is nearly invariable\n"
+    "in most genomic regions, and large at maybe one or two regions.\n"
+    "A medium value like 1.1 implies a spread of mutation rates, most with\n"
+    "low values.  A high value like 15 implies the level of variation is\n"
+    "sizeable, but roughly the same for each genomic region.\n";
+const string lamarcmenu::removeRangeDialog   = "Enter a site or a range of sites you know this trait cannot map.  If\n entering a range of sites, use the format \"X:X\" (e.g. 23:57):";
+const string lamarcmenu::resultOverviewTitle = "Results and Progress Option Overview";
+const string lamarcmenu::resultTitle      = "Input and Output (tell us where to keep it)";
+const string lamarcmenu::searchTitle      = "Sampling strategy (chains and replicates)";
+//LS DEBUG MAPPING: change after implementation
+const string lamarcmenu::singleTraitModelInfo =
+    "Your data contains information about the following trait.  This trait "
+#ifdef NDEBUG
+    "can be mapped either while collecting trees ('jump'), or after collecting "
+    "trees ('float').";
+#else
+"can be either mapped to the region from which it came, used as data "
+"to better determine other population parameters, or used to partition "
+"your data into sub-populations.";
+#endif
+const string lamarcmenu::strategyOverviewTitle    = "Search Strategy Overview";
+const string lamarcmenu::strategyTitle    = "Search Strategy menu (tell us how to do it)";
+const string lamarcmenu::traitMappingOverviewTitle = "Mapping and Traits Overview";
+const string lamarcmenu::traitModel1  = "Trait model for ";
+const string lamarcmenu::traitModel2  = ".  Range: ";
+const string lamarcmenu::traitRange   = "Range of allowable sites ";
+const string lamarcmenu::traitAnalysisType   = "Type of Analysis ";
+//const string lamarcmenu::traitsTitle = "Trait Data analysis (mapping or partitioning)";
+const string lamarcmenu::traitsTitle = "Trait Data Mapping Analysis";
+const string lamarcmenu::traitsInfo =
+    "Your data contains information about the following traits.  Each trait "
+    "will be mapped within a single region.";
+;
+const string lamarcmenu::wordFor          = " for ";
+
+//____________________________________________________________________________________
diff --git a/src/lamarcmenus/lamarc_strings.h b/src/lamarcmenus/lamarc_strings.h
new file mode 100644
index 0000000..c61693e
--- /dev/null
+++ b/src/lamarcmenus/lamarc_strings.h
@@ -0,0 +1,107 @@
+// $Id: lamarc_strings.h,v 1.32 2012/02/15 18:13:42 jmcgill Exp $
+
+/*
+ *  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+ *
+ *  This software is distributed free of charge for non-commercial use
+ *  and is copyrighted.  Of course, we do not guarantee that the software
+ *  works, and are not responsible for any damage you may cause or have.
+ *
+ */
+
+#ifndef LAMARC_STRINGS_H
+#define LAMARC_STRINGS_H
+
+#include <string>
+
+class lamarcmenu
+{
+  public:
+    static const std::string addRangeDialog;
+    static const std::string bayesianPriorsOverviewTitle;
+    static const std::string baseFrequenciesTitle;
+    static const std::string calcPerLocus;
+    static const std::string calculated;
+    static const std::string categoriesTitle;
+    static const std::string categoriesInfo;
+    static const std::string categoryTitle;
+    static const std::string coalInfo;
+    static const std::string coalTitle;
+    static const std::string dataTitle;
+    static const std::string dataModelTitle;
+    static const std::string dataOverviewTitle;
+    static const std::string defaultForParamsFor;
+    static const std::string diseaseInfo;
+    static const std::string diseaseTitle;
+    static const std::string divInfo;
+    static const std::string divTitle;
+    static const std::string divMigInfo;
+    static const std::string divMigTitle;
+    static const std::string effectivePopSizeFor;
+    static const std::string effectivePopSizeTitle;
+    static const std::string effectivePopSizeInfo;
+    static const std::string fileOverviewTitle;
+    static const std::string forGlobalNuc;
+    static const std::string forGlobalMsat;
+    static const std::string forGlobalKAllele;
+    static const std::string forcesTitle;
+    static const std::string forcesOverviewTitle;
+    static const std::string forceProfileTitle;
+    static const std::string forceProfileInfo;
+    static const std::string forceConstraintTitle;
+    static const std::string forceConstraintInfo;
+    static const std::string regionGammaInfo;
+    static const std::string regionGammaTitle;
+    static const std::string globalModel;
+    static const std::string globalDataModelNuc;
+    static const std::string globalDataModelMsat;
+    static const std::string globalDataModelKAllele;
+    static const std::string growInfo;
+    static const std::string growTitle;
+    static const std::string gtrRatesTitle;
+    static const std::string heatingInfo;
+    static const std::string heatingTitle;
+    static const std::string logisticSelectionInfo;
+    static const std::string logisticSelectionTitle;
+    static const std::string mainTitle;
+    static const std::string migInfo;
+    static const std::string migTitle;
+    static const std::string multiLocusCount_0;
+    static const std::string multiLocusCount_1;
+    static const std::string outfileTitle;
+    static const std::string overviewInfo;
+    static const std::string overviewTitle;
+    static const std::string parenLeft;
+    static const std::string parenRight;
+    static const std::string profileTitle;
+    static const std::string profileInfo;
+    static const std::string priorTitle;
+    static const std::string priorInfo;
+    static const std::string priorInfoForForce;
+    static const std::string priorInfoForParam;
+    static const std::string individualProfileTitle;
+    static const std::string rearrangeTitle;
+    static const std::string rearrangeInfo;
+    static const std::string recInfo;
+    static const std::string recTitle;
+    static const std::string regionDataModelTitle;
+    static const std::string removeRangeDialog;
+    static const std::string resultOverviewTitle;
+    static const std::string resultTitle;
+    static const std::string searchTitle;
+    static const std::string singleTraitModelInfo;
+    static const std::string strategyOverviewTitle;
+    static const std::string strategyTitle;
+    static const std::string traitAnalysisType;
+    static const std::string traitsTitle;
+    static const std::string traitMappingOverviewTitle;
+    static const std::string traitModel1;
+    static const std::string traitModel2;
+    static const std::string traitRange;
+    static const std::string traitsInfo;
+    static const std::string wordFor;
+};
+
+#endif // LAMARC_STRINGS_H
+
+//____________________________________________________________________________________
diff --git a/src/lamarcmenus/lamarcheaderdialog.cpp b/src/lamarcmenus/lamarcheaderdialog.cpp
new file mode 100644
index 0000000..ae31518
--- /dev/null
+++ b/src/lamarcmenus/lamarcheaderdialog.cpp
@@ -0,0 +1,146 @@
+// $Id: lamarcheaderdialog.cpp,v 1.17 2012/06/30 01:32:42 bobgian Exp $
+
+/*
+  Copyright 2003  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <string>
+
+// Defines various symbols used to control debugging of experimental code blocks.
+#include "local_build.h"
+
+#include "lamarcheaderdialog.h"
+#include "runreport.h"           // Added only for DebuggingOptionsXxxxxx() functions below.
+
+using std::string;
+
+LamarcHeaderDialog::LamarcHeaderDialog()
+    : DialogNoInput()
+{
+}
+
+LamarcHeaderDialog::~LamarcHeaderDialog()
+{
+}
+
+string LamarcHeaderDialog::outputString()
+{
+    string msg(string("       L A M A R C      Likelihood Analysis with               \n")
+               + string("                        Metropolis-Hastings Algorithms         \n")
+               + string("                        using Random Coalescences              \n")
+               + string("                        Version  ") + VERSION
+#ifndef NDEBUG
+               + string(" (Debug)")
+#endif
+               + string("\n")
+               + string("                        Release Date: ") + RELEASE_DATE + string("\n")
+               + string("---------------------------------------------------------------------------\n"));
+
+    // Appends indication of any debugging options which may different from the "usual" configuration.
+    if (DebuggingOptionsRunning())
+    {
+        // This variable exists only to give DebuggingOptionsString() something to increment (by reference),
+        // since that function needs to update a linecount when called in the context of printing an output file.
+        unsigned long int current_linecount = 0;
+        msg += DebuggingOptionsString(current_linecount);
+    }
+
+    return msg;
+}
+
+//------------------------------------------------------------------------------------
+// These are tools for marking output (both header printed on screen at LAMARC startup
+// and beginning of output file "outfile.txt") to indicate unambiguously whether any
+// "unusual" debugging options are activated.  If all the relevant pre-processor flags
+// are all in their usual states, no extra strings are printed.
+//
+// "Unusual" options include:
+//     Final Coalescence Optimization is disabled (for comparison with "enabled" mode).
+//       Final Coalescence being enabled is the "normal" state, indicated by no string of its own
+//       and a false boolean (unless some other "unusual" state is active).
+//     STATIONARIES is defined (data are ignored and trees are generated "from scratch").
+//     DENOVO is defined (trees are generated fresh each time rather than being re-arranged).
+//     ALL_ARRANGERS_DENOVO is defined (all arrangers work "de novo"; if OFF, only Bayesian arrangers do so).
+//       This is indicated only in the string output.  The boolean output is already TRUE if DENOVO
+//       is defined, and ALL_ARRANGERS_DENOVO is relevant only when DENOVO is defined.
+//     DEBUG build.
+//
+// These options can be activated/deactivated via the "configure" program in the build process.
+// They can also be set/unset directly in the file "local_build.h" in the config subdirectory
+// of the directory in which LAMARC is being built.
+//
+// If you edit either function, be sure to make parallel changes in the other.
+//
+// Prototypes for these two functions are in "runreport.h" simply because it is already included
+// by almost all files in which these functions appear.
+
+// Returns a TRUE indication if any "unusual" debugging option is activated (logical OR of "unusualness").
+// If all conditions are in their USUAL state, output is FALSE.
+//
+bool DebuggingOptionsRunning()
+{
+    bool unusual_flags(false);
+
+#ifndef NDEBUG
+    unusual_flags = true;
+#endif
+
+#if ! FINAL_COALESCENCE_ON
+    unusual_flags = true;
+#endif
+
+#ifdef STATIONARIES
+    unusual_flags = true;
+#endif
+
+#ifdef DENOVO
+    unusual_flags = true;
+#endif
+
+    return unusual_flags;
+}
+
+// Returns a string describing "unusual" debugging options.
+// The string is multi-line (each terminated by a newline), with one line for each "unusual" condition.
+// The ALL_ARRANGERS_DENOVO string is conditionally present only if the DENOVO state is also active.
+// If NO unusual states are active, the output string is the NULL string.
+//
+string DebuggingOptionsString(unsigned long int & current_linecount)
+{
+    // We want a blank line before the output appears (this function is called only if output WILL appear).
+    // But for some reason, the leading Newline does not appear unless the first line returned is non-empty.
+    string msg(" \n");
+    ++current_linecount;
+
+#ifndef NDEBUG
+    msg += "DEBUG mode is ON.\n";
+    ++current_linecount;
+#endif
+
+#if ! FINAL_COALESCENCE_ON
+    msg += "Final Coalescence optimization is OFF.\n";
+    ++current_linecount;
+#endif
+
+#ifdef STATIONARIES
+    msg += "STATIONARIES is ON.\n";
+    ++current_linecount;
+#endif
+
+#ifdef DENOVO
+    msg += "DENOVO is ON.\n";
+    ++current_linecount;
+#ifdef ALL_ARRANGERS_DENOVO
+    msg += "ALL_ARRANGERS_DENOVO is ON.\n";
+    ++current_linecount;
+#endif
+#endif
+
+    return msg;
+}
+
+//____________________________________________________________________________________
diff --git a/src/lamarcmenus/lamarcheaderdialog.h b/src/lamarcmenus/lamarcheaderdialog.h
new file mode 100644
index 0000000..eac128a
--- /dev/null
+++ b/src/lamarcmenus/lamarcheaderdialog.h
@@ -0,0 +1,28 @@
+// $Id: lamarcheaderdialog.h,v 1.7 2011/03/07 06:08:50 bobgian Exp $
+
+/*
+  Copyright 2003  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef LAMARCHEADERDIALOG_H
+#define LAMARCHEADERDIALOG_H
+
+#include "dialognoinput.h"
+#include <string>
+
+class LamarcHeaderDialog : public DialogNoInput
+{
+  protected:
+    virtual std::string outputString();
+  public:
+    LamarcHeaderDialog();
+    virtual ~LamarcHeaderDialog();
+};
+
+#endif // LAMARCHEADERDIALOG_H
+
+//____________________________________________________________________________________
diff --git a/src/lamarcmenus/lamarcmenu.cpp b/src/lamarcmenus/lamarcmenu.cpp
new file mode 100644
index 0000000..eaba680
--- /dev/null
+++ b/src/lamarcmenus/lamarcmenu.cpp
@@ -0,0 +1,47 @@
+// $Id: lamarcmenu.cpp,v 1.22 2010/03/17 17:25:58 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+// the base menu is the NON-Interactive "menu"
+// derived menus handele the different windowing systems
+//  (- Curses)
+//  - Scrolling ASCII console
+//  (-graphical GUI etc)
+// Peter Beerli
+
+#include <string>
+#include "datamodelmenu.h"
+#include "forcesmenus.h"
+#include "lamarcmenu.h"
+#include "lamarcmenuitems.h"
+#include "lamarc_strings.h"
+#include "menu_strings.h"
+#include "newmenu.h"
+#include "newmenuitems.h"
+#include "overviewmenus.h"
+#include "ui_interface.h"
+
+//------------------------------------------------------------------------------------
+
+LamarcMainMenu::LamarcMainMenu (UIInterface & myui)
+    : NewMenu (myui,lamarcmenu::mainTitle,menustr::emptyString,true)
+{
+    AddMenuItem(new SubMenuItem(string("D"),ui,new NewDataModelMenuCreator(ui)));
+    AddMenuItem(new SubMenuItem(string("A"),ui,new ForcesMenuCreator(ui)));
+    AddMenuItem(new SubMenuItem(string("S"),ui,new StrategyMenuCreator(ui)));
+    AddMenuItem(new SubMenuItem(string("I"),ui,new ResultMenuCreator(ui)));
+    AddMenuItem(new SubMenuItem(string("O"),ui,new OverviewMenuCreator(ui)));
+
+}
+
+LamarcMainMenu::~LamarcMainMenu ()
+{
+}
+
+//____________________________________________________________________________________
diff --git a/src/lamarcmenus/lamarcmenu.h b/src/lamarcmenus/lamarcmenu.h
new file mode 100644
index 0000000..604b012
--- /dev/null
+++ b/src/lamarcmenus/lamarcmenu.h
@@ -0,0 +1,27 @@
+// $Id: lamarcmenu.h,v 1.7 2011/03/07 06:08:50 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef LAMARCMENU_H_
+#define LAMARCMENU_H_
+
+#include "newmenu.h"
+
+class UIInterface;
+
+class LamarcMainMenu : public NewMenu
+{
+  public:
+    LamarcMainMenu(UIInterface & myui);
+    ~LamarcMainMenu();
+};
+
+#endif // LAMARCMENU_H_
+
+//____________________________________________________________________________________
diff --git a/src/lamarcmenus/lamarcmenuitems.cpp b/src/lamarcmenus/lamarcmenuitems.cpp
new file mode 100644
index 0000000..7390f69
--- /dev/null
+++ b/src/lamarcmenus/lamarcmenuitems.cpp
@@ -0,0 +1,541 @@
+// $Id: lamarcmenuitems.cpp,v 1.35 2012/06/30 01:32:42 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+// the base menu is the NON-Interactive "menu"
+// derived menus handele the different windowing systems
+//  (- Curses)
+//  - Scrolling ASCII console
+//  (-graphical GUI etc)
+// Peter Beerli
+
+#include <string>
+
+#include "local_build.h"
+
+#include "datamodelmenu.h"
+#include "forcesmenus.h"
+#include "lamarcmenuitems.h"
+#include "lamarc_strings.h"
+#include "menuitem.h"
+#include "menu_strings.h"
+#include "newmenu.h"
+#include "newmenuitems.h"
+#include "outfilemenus.h"
+#include "overviewmenus.h"
+#include "priormenus.h"
+#include "setmenuitem.h"
+#include "togglemenuitem.h"
+#include "treesummenus.h"
+#include "ui_interface.h"
+#include "ui_strings.h"
+#include "ui_vars.h"
+
+using namespace std;
+
+///// SetMenuItemTempInterval
+
+SetMenuItemTempInterval::SetMenuItemTempInterval(string k, UIInterface & myui)
+    : SetMenuItemNoId(k,myui,uistr::tempInterval)
+{
+}
+
+SetMenuItemTempInterval::~SetMenuItemTempInterval()
+{
+}
+
+bool SetMenuItemTempInterval::IsVisible()
+{
+    long numChains = ui.doGetLong(uistr::heatedChainCount);
+    return (numChains > 1);
+}
+
+///// SetMenuItemHapArranger
+
+SetMenuItemHapArranger::SetMenuItemHapArranger(string k, UIInterface & myui)
+    : SetMenuItemNoId(k,myui,uistr::hapArranger)
+{
+}
+
+SetMenuItemHapArranger::~SetMenuItemHapArranger()
+{
+}
+
+bool SetMenuItemHapArranger::IsVisible()
+{
+    return ui.doGetBool(uistr::canHapArrange);
+}
+
+///// SetMenuItemProbHapArranger
+
+SetMenuItemProbHapArranger::SetMenuItemProbHapArranger(string k, UIInterface & myui)
+    : SetMenuItemNoId(k,myui,uistr::probhapArranger)
+{
+}
+
+SetMenuItemProbHapArranger::~SetMenuItemProbHapArranger()
+{
+}
+
+bool SetMenuItemProbHapArranger::IsVisible()
+{
+    return ui.GetCurrentVars().chains.GetProbHapArrangerPossible();
+}
+
+///// SetMenuItemLocusArranger
+
+SetMenuItemLocusArranger::SetMenuItemLocusArranger(string k, UIInterface & myui)
+    : SetMenuItemNoId(k,myui,uistr::locusArranger)
+{
+}
+
+SetMenuItemLocusArranger::~SetMenuItemLocusArranger()
+{
+}
+
+bool
+SetMenuItemLocusArranger::IsVisible()
+{
+    return ui.GetCurrentVars().traitmodels.AnyJumpingAnalyses();
+}
+
+///// SetMenuItemBayesArranger
+
+SetMenuItemBayesArranger::SetMenuItemBayesArranger(string k, UIInterface & myui)
+    : SetMenuItemNoId(k,myui,uistr::bayesArranger)
+{
+}
+
+SetMenuItemBayesArranger::~SetMenuItemBayesArranger()
+{
+}
+
+bool
+SetMenuItemBayesArranger::IsVisible()
+{
+    return ui.doGetBool(uistr::bayesian);
+}
+
+///// SetMenuItemEpochArranger
+
+SetMenuItemEpochArranger::SetMenuItemEpochArranger(string k, UIInterface & myui)
+    : SetMenuItemNoId(k,myui,uistr::epochSizeArranger)
+{
+}
+
+SetMenuItemEpochArranger::~SetMenuItemEpochArranger()
+{
+}
+
+bool
+SetMenuItemEpochArranger::IsVisible()
+{
+    return ui.GetCurrentVars().forces.GetForceLegal(force_DIVERGENCE);
+}
+
+///// SetMenuItemZilchArranger
+
+SetMenuItemZilchArranger::SetMenuItemZilchArranger(string k, UIInterface & myui)
+    : SetMenuItemNoId(k,myui,uistr::zilchArranger)
+{
+}
+
+SetMenuItemZilchArranger::~SetMenuItemZilchArranger()
+{
+}
+
+bool
+SetMenuItemZilchArranger::IsVisible()
+{
+    return ui.GetCurrentVars().datapackplus.AnySimulation();
+}
+
+///// SetMenuItemCurveFilePrefix
+
+SetMenuItemCurveFilePrefix::SetMenuItemCurveFilePrefix(string k, UIInterface & myui)
+    : SetMenuItemNoId(k,myui,uistr::curveFilePrefix)
+{
+}
+
+SetMenuItemCurveFilePrefix::~SetMenuItemCurveFilePrefix()
+{
+}
+
+bool
+SetMenuItemCurveFilePrefix::IsVisible()
+{
+    return (ui.doGetBool(uistr::bayesian) && ui.doGetBool(uistr::useCurveFiles));
+}
+
+///// SetMenuItemMapFilePrefix
+
+SetMenuItemMapFilePrefix::SetMenuItemMapFilePrefix(string k, UIInterface & myui)
+    : SetMenuItemNoId(k,myui,uistr::mapFilePrefix)
+{
+}
+
+SetMenuItemMapFilePrefix::~SetMenuItemMapFilePrefix()
+{
+}
+
+bool
+SetMenuItemMapFilePrefix::IsVisible()
+{
+    return (ui.GetCurrentVars().traitmodels.GetNumMovableLoci() > 0);
+
+}
+
+
+///// SetMenuItemReclocFiles
+
+SetMenuItemReclocFiles::SetMenuItemReclocFiles(string k, UIInterface & myui)
+    : ToggleMenuItemNoId(k,myui,uistr::useReclocFiles)
+{
+}
+
+SetMenuItemReclocFiles::~SetMenuItemReclocFiles()
+{
+}
+
+bool
+SetMenuItemReclocFiles::IsVisible()
+{
+    return (ui.doGetBool(uistr::recombination));
+}
+
+///// SetMenuItemReclocFilePrefix
+
+SetMenuItemReclocFilePrefix::SetMenuItemReclocFilePrefix(string k, UIInterface & myui)
+    : SetMenuItemNoId(k,myui,uistr::reclocFilePrefix)
+{
+}
+
+SetMenuItemReclocFilePrefix::~SetMenuItemReclocFilePrefix()
+{
+}
+
+bool
+SetMenuItemReclocFilePrefix::IsVisible()
+{
+    return (ui.doGetBool(uistr::useReclocFiles) && ui.doGetBool(uistr::recombination));
+}
+
+///// SetMenuItemTraceFilePrefix
+
+SetMenuItemTraceFilePrefix::SetMenuItemTraceFilePrefix(string k, UIInterface & myui)
+    : SetMenuItemNoId(k,myui,uistr::traceFilePrefix)
+{
+}
+
+SetMenuItemTraceFilePrefix::~SetMenuItemTraceFilePrefix()
+{
+}
+
+bool
+SetMenuItemTraceFilePrefix::IsVisible()
+{
+    return (ui.doGetBool(uistr::useTraceFiles));
+}
+
+///// SetMenuItemNewickTreeFilePrefix
+
+SetMenuItemNewickTreeFilePrefix::SetMenuItemNewickTreeFilePrefix(string k, UIInterface & myui)
+    : SetMenuItemNoId(k,myui,uistr::newickTreeFilePrefix)
+{
+}
+
+SetMenuItemNewickTreeFilePrefix::~SetMenuItemNewickTreeFilePrefix()
+{
+}
+
+bool
+SetMenuItemNewickTreeFilePrefix::IsVisible()
+{
+    return (ui.doGetBool(uistr::useNewickTreeFiles));
+}
+
+///// ToggleMenuItemTempAdapt
+
+ToggleMenuItemTempAdapt::ToggleMenuItemTempAdapt(string k, UIInterface & myui)
+    : ToggleMenuItemNoId(k,myui,uistr::tempAdapt)
+{
+}
+
+ToggleMenuItemTempAdapt::~ToggleMenuItemTempAdapt()
+{
+}
+
+bool ToggleMenuItemTempAdapt::IsVisible()
+{
+    long numChains = ui.doGetLong(uistr::heatedChainCount);
+    return (numChains > 1);
+}
+
+//------------------------------------------------------------------------------------
+
+///// ToggleMenuItemUseCurveFiles
+
+ToggleMenuItemUseCurveFiles::ToggleMenuItemUseCurveFiles(string k, UIInterface & myui)
+    : ToggleMenuItemNoId(k,myui,uistr::useCurveFiles)
+{
+}
+
+ToggleMenuItemUseCurveFiles::~ToggleMenuItemUseCurveFiles()
+{
+}
+
+bool ToggleMenuItemUseCurveFiles::IsVisible()
+{
+    return ui.doGetBool(uistr::bayesian);
+}
+
+//------------------------------------------------------------------------------------
+
+StrategyMenu::StrategyMenu (UIInterface & myui)
+    : NewMenu (myui,lamarcmenu::strategyTitle,menustr::emptyString)
+{
+    AddMenuItem(new ToggleMenuItemNoId("P",ui,uistr::bayesian));
+    AddMenuItem(new PriorSubMenuItem("B",ui));
+    AddMenuItem(new SubMenuItem("R",ui,new RearrangeMenuCreator(ui)));
+    AddMenuItem(new SubMenuItem("S",ui,new SearchMenuCreator(ui)));
+    AddMenuItem(new HeatingSubMenuItem("M",ui));
+}
+
+StrategyMenu::~StrategyMenu()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+ChainTemperatureMenuItemGroup::ChainTemperatureMenuItemGroup(UIInterface & myui)
+    : SetMenuItemGroup(myui,uistr::heatedChain)
+{
+}
+
+ChainTemperatureMenuItemGroup::~ChainTemperatureMenuItemGroup()
+{
+}
+
+vector<UIId> ChainTemperatureMenuItemGroup::GetVisibleIds()
+{
+    vector<UIId> temperatureIds;
+    long nTemps = ui.doGetLong(uistr::heatedChainCount);
+    if (nTemps > 1)
+    {
+        for(long i=0; i < nTemps; i++)
+        {
+            temperatureIds.push_back(UIId(i));
+        }
+    }
+    return temperatureIds;
+}
+
+string ChainTemperatureMenuItemGroup::GetText(UIId id)
+{
+    return uistr::heatedChain + ToString(id.GetIndex1() + 1);
+}
+
+HeatingMenu::HeatingMenu (UIInterface & myui )
+    : NewMenu (myui,lamarcmenu::heatingTitle,lamarcmenu::heatingInfo)
+{
+    AddMenuItem(new SetMenuItemNoId("S",ui,uistr::heatedChainCount));
+    AddMenuItem(new ToggleMenuItemTempAdapt(string("A"),ui));
+    AddMenuItem(new SetMenuItemTempInterval("I",ui));
+    AddMenuItem(new ChainTemperatureMenuItemGroup(ui));
+}
+
+HeatingMenu::~HeatingMenu ()
+{
+}
+
+HeatingSubMenuItem::HeatingSubMenuItem(string mykey, UIInterface& myui)
+    : SubMenuItem(mykey,myui,new HeatingMenuCreator(myui))
+{
+}
+
+HeatingSubMenuItem::~HeatingSubMenuItem()
+{
+}
+
+string
+HeatingSubMenuItem::GetVariableText()
+{
+    long temps = ui.doGetLong(uistr::heatedChainCount,GetId());
+    if(temps < 2)
+    {
+        return "none";
+    }
+    else
+    {
+        return ToString(temps)+" chains";
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+PriorSubMenuItem::PriorSubMenuItem(string mykey, UIInterface& myui)
+    : SubMenuItem(mykey,myui,new PriorMenuCreator(myui))
+{
+}
+
+PriorSubMenuItem::~PriorSubMenuItem()
+{
+}
+
+bool PriorSubMenuItem::IsVisible()
+{
+    return ui.doGetBool(uistr::bayesian);
+}
+
+//------------------------------------------------------------------------------------
+
+RearrangeMenu::RearrangeMenu (UIInterface & myui )
+    : NewMenu (myui,lamarcmenu::rearrangeTitle,lamarcmenu::rearrangeInfo)
+
+{
+    AddMenuItem(new SetMenuItemNoId("T",ui,uistr::dropArranger));
+    AddMenuItem(new SetMenuItemNoId("S",ui,uistr::sizeArranger));
+    AddMenuItem(new SetMenuItemHapArranger("H",ui));
+    AddMenuItem(new SetMenuItemProbHapArranger("M",ui));
+    AddMenuItem(new SetMenuItemBayesArranger("B",ui));
+    AddMenuItem(new SetMenuItemEpochArranger("E",ui));
+    AddMenuItem(new SetMenuItemLocusArranger("L",ui));
+    AddMenuItem(new SetMenuItemZilchArranger("N",ui));
+}
+
+RearrangeMenu::~RearrangeMenu ()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+SearchMenu::SearchMenu (UIInterface & myui )
+    : NewMenu (myui,lamarcmenu::searchTitle,menustr::emptyString)
+{
+    AddMenuItem(new SetMenuItemNoId("R",ui,uistr::replicates));
+    AddMenuItem(new BlankMenuItem());
+    AddMenuItem(new OutputOnlyMenuItem("Initial Chains"));
+    AddMenuItem(new SetMenuItemNoId("1",ui,uistr::initialChains));
+    AddMenuItem(new SetMenuItemNumSamples("2",ui,uistr::initialSamples));
+    AddMenuItem(new SetMenuItemNoId("3",ui,uistr::initialInterval));
+    AddMenuItem(new SetMenuItemNoId("4",ui,uistr::initialDiscard));
+    AddMenuItem(new DividerMenuItem());
+    AddMenuItem(new OutputOnlyMenuItem("Final Chains"));
+    AddMenuItem(new SetMenuItemNoId("5",ui,uistr::finalChains));
+    AddMenuItem(new SetMenuItemNumSamples("6",ui,uistr::finalSamples));
+    AddMenuItem(new SetMenuItemNoId("7",ui,uistr::finalInterval));
+    AddMenuItem(new SetMenuItemNoId("8",ui,uistr::finalDiscard));
+}
+
+SearchMenu::~SearchMenu ()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+ResultMenu::ResultMenu (UIInterface & myui )
+    : NewMenu (myui,lamarcmenu::resultTitle,menustr::emptyString)
+{
+    AddMenuItem(new ToggleMenuItemNoId(string("V"),ui,uistr::progress));
+    AddMenuItem(new OutfileSubMenuItem(string("O"),ui));
+    AddMenuItem(new SetMenuItemNoId("M",ui,uistr::xmlOutFileName));
+    AddMenuItem(new TreeSumOutSubMenuItem(string("W"),ui));
+    AddMenuItem(new TreeSumInSubMenuItem(string("R"),ui));
+    AddMenuItem(new ToggleMenuItemUseCurveFiles(string("B"),ui));
+    AddMenuItem(new SetMenuItemCurveFilePrefix("C",ui));
+    AddMenuItem(new SetMenuItemMapFilePrefix("I",ui));
+    AddMenuItem(new SetMenuItemReclocFiles("L",ui));
+    AddMenuItem(new SetMenuItemReclocFilePrefix("F",ui));
+    AddMenuItem(new ToggleMenuItemNoId("A",ui,uistr::useTraceFiles));
+    AddMenuItem(new SetMenuItemTraceFilePrefix("T",ui));
+    AddMenuItem(new ToggleMenuItemNoId("K",ui,uistr::useNewickTreeFiles));
+    AddMenuItem(new SetMenuItemNewickTreeFilePrefix("N",ui));
+    AddMenuItem(new SetMenuItemNoId("X",ui,uistr::xmlReportFileName));
+    AddMenuItem(new SetMenuItemNoId("P",ui,uistr::profileprefix));
+#ifdef LAMARC_QA_TREE_DUMP
+    AddMenuItem(new ToggleMenuItemNoId("G",ui,uistr::useArgFiles));
+    AddMenuItem(new SetMenuItemNoId(string("H"),ui,uistr::argFilePrefix));
+    AddMenuItem(new ToggleMenuItemNoId("Z",ui,uistr::manyArgFiles));
+#endif // LAMARC_QA_TREE_DUMP
+}
+
+ResultMenu::~ResultMenu ()
+{
+}
+
+OverviewMenu::OverviewMenu (UIInterface & myui )
+    : NewMenu (myui,lamarcmenu::overviewTitle)
+{
+    AddMenuItem(new SubMenuItem(string("D"),ui,new DataOverviewMenuCreator(ui)));
+    AddMenuItem(new SubMenuItem(string("A"),ui,new ForceOverviewMenuCreator(ui)));
+    AddMenuItem(new SubMenuItemBayesPriors(string("B"),ui));
+    AddMenuItem(new SubMenuItemTraitMappingOverview(string("M"), ui));
+    AddMenuItem(new SubMenuItem(string("S"),ui,new SearchOverviewMenuCreator(ui)));
+    AddMenuItem(new SubMenuItem(string("F"),ui,new FileOverviewMenuCreator(ui)));
+    AddMenuItem(new SubMenuItem(string("R"),ui,new ResultsOverviewMenuCreator(ui)));
+}
+
+OverviewMenu::~OverviewMenu()
+{
+}
+
+SubMenuItemBayesPriors::SubMenuItemBayesPriors(string myKey, UIInterface& myui)
+    : SubMenuItem(myKey, myui, new BayesianPriorsOverviewMenuCreator(myui))
+{
+}
+
+SubMenuItemBayesPriors::~SubMenuItemBayesPriors()
+{
+}
+
+bool SubMenuItemBayesPriors::IsVisible()
+{
+    return ui.doGetBool(uistr::bayesian);
+}
+
+SubMenuItemTraitMappingOverview::SubMenuItemTraitMappingOverview(string myKey, UIInterface& myui)
+    : SubMenuItem(myKey, myui, new TraitMappingOverviewMenuCreator(myui))
+{
+}
+
+SubMenuItemTraitMappingOverview::~SubMenuItemTraitMappingOverview()
+{
+}
+
+bool SubMenuItemTraitMappingOverview::IsVisible()
+{
+    return (ui.GetCurrentVars().traitmodels.GetNumMovableLoci() > 0);
+}
+
+SetMenuItemNumSamples::SetMenuItemNumSamples(string myKey, UIInterface& myui, string myMenuKey)
+    : SetMenuItemNoId(myKey, myui, myMenuKey)
+{
+}
+
+SetMenuItemNumSamples::~SetMenuItemNumSamples()
+{
+}
+
+string SetMenuItemNumSamples::GetText()
+{
+    if (ui.GetCurrentVars().chains.GetDoBayesianAnalysis())
+    {
+        if (menuKey == uistr::initialSamples)
+        {
+            return uistr::initialSamplesBayes;
+        }
+        else if (menuKey == uistr::finalSamples)
+        {
+            return uistr::finalSamplesBayes;
+        }
+    }
+    return SetMenuItemNoId::GetText();
+}
+
+//____________________________________________________________________________________
diff --git a/src/lamarcmenus/lamarcmenuitems.h b/src/lamarcmenus/lamarcmenuitems.h
new file mode 100644
index 0000000..5300f9d
--- /dev/null
+++ b/src/lamarcmenus/lamarcmenuitems.h
@@ -0,0 +1,296 @@
+// $Id: lamarcmenuitems.h,v 1.24 2012/05/15 06:21:48 ewalkup Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef LAMARCMENUITEMS_H_
+#define LAMARCMENUITEMS_H_
+
+#include "newmenu.h"
+#include "setmenuitem.h"
+#include "ui_id.h"
+#include "ui_strings.h"
+
+class UIInterface;
+
+class SetMenuItemTempInterval : public SetMenuItemNoId
+{
+  public:
+    SetMenuItemTempInterval(std::string myKey, UIInterface & myui);
+    virtual ~SetMenuItemTempInterval();
+    virtual bool IsVisible();
+};
+
+class SetMenuItemHapArranger : public SetMenuItemNoId
+{
+  public:
+    SetMenuItemHapArranger(std::string myKey, UIInterface & myui);
+    virtual ~SetMenuItemHapArranger();
+    virtual bool IsVisible();
+};
+
+class SetMenuItemProbHapArranger : public SetMenuItemNoId
+{
+  public:
+    SetMenuItemProbHapArranger(std::string myKey, UIInterface & myui);
+    virtual ~SetMenuItemProbHapArranger();
+    virtual bool IsVisible();
+};
+
+class SetMenuItemBayesArranger : public SetMenuItemNoId
+{
+  public:
+    SetMenuItemBayesArranger(std::string myKey, UIInterface & myui);
+    virtual ~SetMenuItemBayesArranger();
+    virtual bool IsVisible();
+};
+
+class SetMenuItemEpochArranger : public SetMenuItemNoId
+{
+  public:
+    SetMenuItemEpochArranger(std::string myKey, UIInterface & myui);
+    virtual ~SetMenuItemEpochArranger();
+    virtual bool IsVisible();
+};
+
+class SetMenuItemLocusArranger : public SetMenuItemNoId
+{
+  public:
+    SetMenuItemLocusArranger(std::string myKey, UIInterface & myui);
+    virtual ~SetMenuItemLocusArranger();
+    virtual bool IsVisible();
+};
+
+class SetMenuItemZilchArranger : public SetMenuItemNoId
+{
+  public:
+    SetMenuItemZilchArranger(std::string myKey, UIInterface & myui);
+    virtual ~SetMenuItemZilchArranger();
+    virtual bool IsVisible();
+};
+
+class SetMenuItemCurveFilePrefix : public SetMenuItemNoId
+{
+  public:
+    SetMenuItemCurveFilePrefix(std::string myKey, UIInterface & myui);
+    virtual ~SetMenuItemCurveFilePrefix();
+    virtual bool IsVisible();
+};
+
+class SetMenuItemMapFilePrefix : public SetMenuItemNoId
+{
+  public:
+    SetMenuItemMapFilePrefix(std::string myKey, UIInterface & myui);
+    virtual ~SetMenuItemMapFilePrefix();
+    virtual bool IsVisible();
+};
+
+class SetMenuItemReclocFiles : public ToggleMenuItemNoId
+{
+  public:
+    SetMenuItemReclocFiles(std::string myKey, UIInterface & myui);
+    virtual ~SetMenuItemReclocFiles();
+    virtual bool IsVisible();
+};
+
+class SetMenuItemReclocFilePrefix : public SetMenuItemNoId
+{
+  public:
+    SetMenuItemReclocFilePrefix(std::string myKey, UIInterface & myui);
+    virtual ~SetMenuItemReclocFilePrefix();
+    virtual bool IsVisible();
+};
+
+class SetMenuItemTraceFilePrefix : public SetMenuItemNoId
+{
+  public:
+    SetMenuItemTraceFilePrefix(std::string myKey, UIInterface & myui);
+    virtual ~SetMenuItemTraceFilePrefix();
+    virtual bool IsVisible();
+};
+
+class SetMenuItemNewickTreeFilePrefix : public SetMenuItemNoId
+{
+  public:
+    SetMenuItemNewickTreeFilePrefix(std::string myKey, UIInterface & myui);
+    virtual ~SetMenuItemNewickTreeFilePrefix();
+    virtual bool IsVisible();
+};
+
+class ToggleMenuItemTempAdapt : public ToggleMenuItemNoId
+{
+  public:
+    ToggleMenuItemTempAdapt(std::string myKey, UIInterface & myui);
+    virtual ~ToggleMenuItemTempAdapt();
+    virtual bool IsVisible();
+};
+
+class ToggleMenuItemUseCurveFiles : public ToggleMenuItemNoId
+{
+  public:
+    ToggleMenuItemUseCurveFiles(std::string myKey, UIInterface & myui);
+    virtual ~ToggleMenuItemUseCurveFiles();
+    virtual bool IsVisible();
+};
+
+class ChainTemperatureMenuItemGroup : public SetMenuItemGroup
+{
+  public:
+    ChainTemperatureMenuItemGroup(UIInterface&);
+    ~ChainTemperatureMenuItemGroup();
+    virtual vector<UIId> GetVisibleIds();
+    virtual string GetText(UIId id);
+};
+
+class HeatingMenu : public NewMenu
+{
+  public:
+    HeatingMenu(UIInterface & myui);
+    ~HeatingMenu();
+};
+
+class HeatingSubMenuItem : public SubMenuItem
+{
+  public:
+    HeatingSubMenuItem(std::string myKey, UIInterface& myui);
+    virtual ~HeatingSubMenuItem();
+    virtual std::string GetVariableText();
+};
+
+class HeatingMenuCreator : public NewMenuCreator
+{
+  protected:
+    UIInterface & ui;
+  public:
+    HeatingMenuCreator(UIInterface & myui) : ui(myui) {};
+    virtual ~HeatingMenuCreator() {};
+    NewMenu_ptr Create() { return NewMenu_ptr(new HeatingMenu(ui));};
+};
+
+class PriorSubMenuItem : public SubMenuItem
+{
+  private:
+    PriorSubMenuItem(); //undefined.
+  public:
+    PriorSubMenuItem(std::string myKey, UIInterface& myui);
+    virtual ~PriorSubMenuItem();
+    virtual bool IsVisible();
+};
+
+class RearrangeMenu : public NewMenu
+{
+  public:
+    RearrangeMenu(UIInterface & myui);
+    ~RearrangeMenu();
+};
+
+class RearrangeMenuCreator : public NewMenuCreator
+{
+  protected:
+    UIInterface & ui;
+  public:
+    RearrangeMenuCreator(UIInterface & myui) : ui(myui) {};
+    virtual ~RearrangeMenuCreator() {};
+    NewMenu_ptr Create() { return NewMenu_ptr(new RearrangeMenu(ui));};
+};
+
+class SearchMenu : public NewMenu
+{
+  public:
+    SearchMenu(UIInterface & myui);
+    virtual ~SearchMenu();
+};
+
+class SearchMenuCreator : public NewMenuCreator
+{
+  protected:
+    UIInterface & ui;
+  public:
+    SearchMenuCreator(UIInterface & myui) : ui(myui) {};
+    virtual ~SearchMenuCreator() {};
+    NewMenu_ptr Create() { return NewMenu_ptr(new SearchMenu(ui));};
+};
+
+class StrategyMenu : public NewMenu
+{
+  public:
+    StrategyMenu(UIInterface & myui);
+    ~StrategyMenu();
+};
+
+class StrategyMenuCreator : public NewMenuCreator
+{
+  protected:
+    UIInterface & ui;
+  public:
+    StrategyMenuCreator(UIInterface & myui) : ui(myui) {};
+    virtual ~StrategyMenuCreator() {};
+    NewMenu_ptr Create() { return NewMenu_ptr(new StrategyMenu(ui));};
+};
+
+class ResultMenu : public NewMenu
+{
+  public:
+    ResultMenu(UIInterface & myui);
+    ~ResultMenu();
+};
+
+class ResultMenuCreator : public NewMenuCreator
+{
+  protected:
+    UIInterface & ui;
+  public:
+    ResultMenuCreator(UIInterface & myui) : ui(myui) {};
+    virtual ~ResultMenuCreator() {};
+    NewMenu_ptr Create() { return NewMenu_ptr(new ResultMenu(ui));};
+};
+
+class OverviewMenu : public NewMenu
+{
+  public:
+    OverviewMenu(UIInterface & myui);
+    ~OverviewMenu();
+};
+
+class OverviewMenuCreator : public NewMenuCreator
+{
+  protected:
+    UIInterface & ui;
+  public:
+    OverviewMenuCreator(UIInterface & myui) : ui(myui) {};
+    virtual ~OverviewMenuCreator() {};
+    NewMenu_ptr Create() { return NewMenu_ptr(new OverviewMenu(ui));};
+};
+
+class SubMenuItemBayesPriors: public SubMenuItem
+{
+  public:
+    SubMenuItemBayesPriors(string myKey, UIInterface& myui);
+    ~SubMenuItemBayesPriors();
+    virtual bool IsVisible();
+};
+
+class SubMenuItemTraitMappingOverview: public SubMenuItem
+{
+  public:
+    SubMenuItemTraitMappingOverview(string myKey, UIInterface& myui);
+    ~SubMenuItemTraitMappingOverview();
+    virtual bool IsVisible();
+};
+
+class SetMenuItemNumSamples: public SetMenuItemNoId
+{
+  public:
+    SetMenuItemNumSamples(string myKey, UIInterface& myui, string myMenuKey);
+    ~SetMenuItemNumSamples();
+    virtual string GetText();
+};
+
+#endif // LAMARCMENUITEMS_H_
+
+//____________________________________________________________________________________
diff --git a/src/lamarcmenus/logselectmenus.cpp b/src/lamarcmenus/logselectmenus.cpp
new file mode 100644
index 0000000..fae88bb
--- /dev/null
+++ b/src/lamarcmenus/logselectmenus.cpp
@@ -0,0 +1,52 @@
+// $Id: logselectmenus.cpp,v 1.4 2011/03/08 19:22:01 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <string>
+#include "constants.h"
+#include "constraintmenus.h"
+#include "lamarc_strings.h"
+#include "newmenuitems.h"
+#include "forcesmenus.h"
+#include "logselectmenus.h"
+#include "setmenuitem.h"
+#include "togglemenuitem.h"
+#include "ui_interface.h"
+#include "ui_strings.h"
+#include "profilemenus.h"
+
+LogisticSelectionCoefficientMenuItem::LogisticSelectionCoefficientMenuItem(string myKey, UIInterface & myui)
+    : SetMenuItemId(myKey,myui,uistr::logisticSelectionCoefficient, UIId(force_LOGISTICSELECTION, uiconst::GLOBAL_ID))
+{
+}
+
+LogisticSelectionCoefficientMenuItem::~LogisticSelectionCoefficientMenuItem()
+{
+}
+
+bool LogisticSelectionCoefficientMenuItem::IsVisible()
+{
+    return ui.doGetBool(uistr::logisticSelection);
+}
+
+LogisticSelectionMenu::LogisticSelectionMenu (UIInterface & myui )
+    : NewMenu (myui,lamarcmenu::logisticSelectionTitle,lamarcmenu::logisticSelectionInfo)
+{
+    AddMenuItem(new ToggleMenuItemNoId("X",ui,uistr::logisticSelection));
+    UIId id(force_LOGISTICSELECTION);
+    AddMenuItem(new SubMenuConstraintsForOneForce("C",ui,id));
+    AddMenuItem(new SubMenuProfileForOneForce("P",ui,id));
+    AddMenuItem(new LogisticSelectionCoefficientMenuItem("S",ui));
+}
+
+LogisticSelectionMenu::~LogisticSelectionMenu ()
+{
+}
+
+//____________________________________________________________________________________
diff --git a/src/lamarcmenus/logselectmenus.h b/src/lamarcmenus/logselectmenus.h
new file mode 100644
index 0000000..819de2b
--- /dev/null
+++ b/src/lamarcmenus/logselectmenus.h
@@ -0,0 +1,47 @@
+// $Id: logselectmenus.h,v 1.4 2011/03/08 19:22:01 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef LOGSELECTMENUS_H
+#define LOGSELECTMENUS_H
+
+#include <string>
+#include "newmenuitems.h"
+#include "setmenuitem.h"
+
+class UIInterface;
+
+class LogisticSelectionCoefficientMenuItem : public SetMenuItemId
+{
+  public:
+    LogisticSelectionCoefficientMenuItem(std::string myKey, UIInterface & myui);
+    ~LogisticSelectionCoefficientMenuItem();
+    bool IsVisible();
+};
+
+class LogisticSelectionMenu : public NewMenu
+{
+  public:
+    LogisticSelectionMenu(UIInterface & myui);
+    virtual ~LogisticSelectionMenu();
+};
+
+class LogisticSelectionMenuCreator : public NewMenuCreator
+{
+  protected:
+    UIInterface & ui;
+  public:
+    LogisticSelectionMenuCreator(UIInterface & myui) : ui(myui) {};
+    virtual ~LogisticSelectionMenuCreator() {};
+    NewMenu_ptr Create() { return NewMenu_ptr(new LogisticSelectionMenu(ui));};
+};
+
+#endif  // LOGSELECTMENUS_H
+
+//____________________________________________________________________________________
diff --git a/src/lamarcmenus/migmenus.cpp b/src/lamarcmenus/migmenus.cpp
new file mode 100644
index 0000000..e31bc03
--- /dev/null
+++ b/src/lamarcmenus/migmenus.cpp
@@ -0,0 +1,108 @@
+// $Id: migmenus.cpp,v 1.13 2010/03/17 17:25:58 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <string>
+#include "constants.h"
+#include "constraintmenus.h"
+#include "lamarc_strings.h"
+#include "menu_strings.h"
+#include "matrixitem.h"
+#include "migmenus.h"
+#include "newmenuitems.h"
+#include "priormenus.h"
+#include "setmenuitem.h"
+#include "ui_constants.h"
+#include "ui_interface.h"
+#include "ui_strings.h"
+#include "overviewmenus.h"
+#include "profilemenus.h"
+
+using std::string;
+
+//------------------------------------------------------------------------------------
+
+SetAllMigrationsMenuItem::SetAllMigrationsMenuItem(string myKey, UIInterface & myui)
+    : SetMenuItemId(myKey,myui,uistr::globalMigration, UIId(force_MIG, uiconst::GLOBAL_ID))
+{
+}
+
+SetAllMigrationsMenuItem::~SetAllMigrationsMenuItem()
+{
+}
+
+bool SetAllMigrationsMenuItem::IsVisible()
+{
+    return ui.doGetBool(uistr::migration);
+}
+
+string SetAllMigrationsMenuItem::GetVariableText()
+{
+    return "";
+}
+
+//------------------------------------------------------------------------------------
+
+SetMigrationsFstMenuItem::SetMigrationsFstMenuItem(string key,UIInterface & ui)
+    : ToggleMenuItemNoId(key,ui,uistr::fstSetMigration)
+{
+}
+
+SetMigrationsFstMenuItem::~SetMigrationsFstMenuItem()
+{
+}
+
+bool SetMigrationsFstMenuItem::IsVisible()
+{
+    return ui.doGetBool(uistr::migration);
+}
+
+//------------------------------------------------------------------------------------
+
+MigrationMaxEventsMenuItem::MigrationMaxEventsMenuItem(string myKey,UIInterface & myui)
+    : SetMenuItemNoId(myKey,myui,uistr::migrationMaxEvents)
+{
+}
+
+MigrationMaxEventsMenuItem::~MigrationMaxEventsMenuItem()
+{
+}
+
+bool MigrationMaxEventsMenuItem::IsVisible()
+{
+    return ui.doGetBool(uistr::migration);
+}
+
+///
+
+MigrationMenu::MigrationMenu (UIInterface & myui )
+    : NewMenu (myui,lamarcmenu::migTitle,lamarcmenu::migInfo)
+{
+    AddMenuItem(new DisplayOnlyMenuItem(uistr::migration, ui));
+    UIId id(force_MIG);
+    AddMenuItem(new SubMenuConstraintsForOneForce("C",ui,id));
+    AddMenuItem(new SubMenuProfileForOneForce("P",ui,id));
+    AddMenuItem(new SubMenuPriorForOneForce("B",ui,id));
+    AddMenuItem(new SetAllMigrationsMenuItem("G",ui));
+    AddMenuItem(new SetMigrationsFstMenuItem("F",ui));
+    AddMenuItem(new MatrixSetMenuItem(ui,
+                                      uistr::migrationInto,
+                                      uistr::migrationUser,
+                                      uistr::migrationPartitionCount,
+                                      uistr::migration,
+                                      force_MIG));
+
+    AddMenuItem(new MigrationMaxEventsMenuItem("M",ui));
+}
+
+MigrationMenu::~MigrationMenu ()
+{
+}
+
+//____________________________________________________________________________________
diff --git a/src/lamarcmenus/migmenus.h b/src/lamarcmenus/migmenus.h
new file mode 100644
index 0000000..6f9355a
--- /dev/null
+++ b/src/lamarcmenus/migmenus.h
@@ -0,0 +1,65 @@
+// $Id: migmenus.h,v 1.13 2011/03/07 06:08:50 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef MIGMENUS_H
+#define MIGMENUS_H
+
+#include <string>
+#include "newmenuitems.h"
+#include "setmenuitem.h"
+#include "togglemenuitem.h"
+
+class UIInterface;
+
+class SetAllMigrationsMenuItem : public SetMenuItemId
+{
+  public:
+    SetAllMigrationsMenuItem(std::string myKey, UIInterface & myui);
+    virtual ~SetAllMigrationsMenuItem();
+    virtual bool IsVisible();
+    virtual std::string GetVariableText();
+};
+
+class SetMigrationsFstMenuItem : public ToggleMenuItemNoId
+{
+  public:
+    SetMigrationsFstMenuItem(std::string myKey, UIInterface & myui);
+    virtual ~SetMigrationsFstMenuItem();
+    virtual bool IsVisible();
+};
+
+class MigrationMaxEventsMenuItem : public SetMenuItemNoId
+{
+  public:
+    MigrationMaxEventsMenuItem(std::string myKey, UIInterface & myui);
+    virtual ~MigrationMaxEventsMenuItem();
+    virtual bool IsVisible();
+};
+
+class MigrationMenu : public NewMenu
+{
+  public:
+    MigrationMenu(UIInterface & myui);
+    virtual ~MigrationMenu();
+};
+
+class MigrationMenuCreator : public NewMenuCreator
+{
+  protected:
+    UIInterface & ui;
+  public:
+    MigrationMenuCreator(UIInterface & myui) : ui(myui) {};
+    virtual ~MigrationMenuCreator() {};
+    NewMenu_ptr Create() { return NewMenu_ptr(new MigrationMenu(ui));};
+};
+
+#endif  // MIGMENUS_H
+
+//____________________________________________________________________________________
diff --git a/src/lamarcmenus/nomenufilereaddialog.cpp b/src/lamarcmenus/nomenufilereaddialog.cpp
new file mode 100644
index 0000000..3d470b5
--- /dev/null
+++ b/src/lamarcmenus/nomenufilereaddialog.cpp
@@ -0,0 +1,50 @@
+// $Id: nomenufilereaddialog.cpp,v 1.4 2010/03/17 17:25:58 bobgian Exp $
+
+/*
+  Copyright 2003  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <string>
+
+#include "defaults.h"
+#include "nomenufilereaddialog.h"
+#include "ui_interface.h"
+#include "ui_strings.h"
+#include "xml.h"
+
+using std::string;
+
+NoMenuFileReadDialog::NoMenuFileReadDialog(XmlParser & parser)
+    : DialogNoInput(), m_parser(parser)
+{
+}
+
+NoMenuFileReadDialog::~NoMenuFileReadDialog()
+{
+}
+
+string NoMenuFileReadDialog::outputString()
+{
+    return
+        string("---------------------------------------------------------------------------\n")
+        +string("** Menu-less version generated with \"configure --disable-menu\"\n")
+        +string("** For menu re-run \"configure --enable-menu\" and recompile\n")
+        +string("---------------------------------------------------------------------------\n")
+        +string("** Reading data from \"")
+        +m_parser.GetFileName()
+        +string("\" and writing to its default outfile\n")
+        +string("---------------------------------------------------------------------------\n");
+
+}
+
+void
+NoMenuFileReadDialog::performAction()
+{
+    m_parser.ParseFileData(m_parser.GetFileName());
+}
+
+//____________________________________________________________________________________
diff --git a/src/lamarcmenus/nomenufilereaddialog.h b/src/lamarcmenus/nomenufilereaddialog.h
new file mode 100644
index 0000000..c83089d
--- /dev/null
+++ b/src/lamarcmenus/nomenufilereaddialog.h
@@ -0,0 +1,32 @@
+// $Id: nomenufilereaddialog.h,v 1.5 2012/06/30 01:32:42 bobgian Exp $
+
+/*
+  Copyright 2003  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef NOMENUDIALOG_H
+#define NOMENUDIALOG_H
+
+#include <string>
+#include "dialognoinput.h"
+
+class XmlParser;
+
+class NoMenuFileReadDialog : public DialogNoInput
+{
+  protected:
+    XmlParser &     m_parser;
+    virtual std::string outputString();
+    virtual void performAction();
+  public:
+    NoMenuFileReadDialog(XmlParser & parser);
+    virtual ~NoMenuFileReadDialog();
+};
+
+#endif // NOMENUDIALOG_H
+
+//____________________________________________________________________________________
diff --git a/src/lamarcmenus/outfilemenus.cpp b/src/lamarcmenus/outfilemenus.cpp
new file mode 100644
index 0000000..33a42b0
--- /dev/null
+++ b/src/lamarcmenus/outfilemenus.cpp
@@ -0,0 +1,43 @@
+// $Id: outfilemenus.cpp,v 1.21 2010/03/17 17:25:59 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <string>
+#include "lamarc_strings.h"
+#include "newmenuitems.h"
+#include "outfilemenus.h"
+#include "profilemenus.h"
+#include "ui_interface.h"
+#include "ui_strings.h"
+
+using std::string;
+
+OutfileMenu::OutfileMenu(UIInterface & myui)
+    : NewMenu(myui,lamarcmenu::outfileTitle)
+{
+    AddMenuItem(new SetMenuItemNoId("N",ui,uistr::resultsFileName));
+    AddMenuItem(new ToggleMenuItemNoId("V",ui,uistr::verbosity));
+    AddMenuItem(new SubMenuItem("P", ui,new ProfileMenuCreator(ui)));
+}
+
+OutfileMenu::~OutfileMenu()
+{
+}
+
+OutfileSubMenuItem::OutfileSubMenuItem(std::string myKey, UIInterface & myui)
+    : SubMenuItem(myKey, myui,new OutfileMenuCreator(myui))
+{
+}
+
+std::string OutfileSubMenuItem::GetVariableText()
+{
+    return ui.doGetPrintString(uistr::resultsFileName,GetId());
+}
+
+//____________________________________________________________________________________
diff --git a/src/lamarcmenus/outfilemenus.h b/src/lamarcmenus/outfilemenus.h
new file mode 100644
index 0000000..76ea242
--- /dev/null
+++ b/src/lamarcmenus/outfilemenus.h
@@ -0,0 +1,46 @@
+// $Id: outfilemenus.h,v 1.15 2011/03/07 06:08:50 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef OUTFILEMENUS_H
+#define OUTFILEMENUS_H
+
+#include <string>
+#include "newmenuitems.h"
+
+class UIInterface;
+
+class OutfileMenu : public NewMenu
+{
+  public:
+    OutfileMenu(UIInterface & myui);
+    ~OutfileMenu();
+};
+
+class OutfileMenuCreator : public NewMenuCreator
+{
+  protected:
+    UIInterface & ui;
+  public:
+    OutfileMenuCreator(UIInterface & myui) : ui(myui) {};
+    virtual ~OutfileMenuCreator() {};
+    NewMenu_ptr Create() { return NewMenu_ptr(new OutfileMenu(ui));};
+};
+
+// menu line giving access to OutfileMenu
+class OutfileSubMenuItem : public SubMenuItem
+{
+  public:
+    OutfileSubMenuItem(std::string myKey,UIInterface & myUI);
+    virtual std::string GetVariableText();
+};
+
+#endif  // OUTFILEMENUS_H
+
+//____________________________________________________________________________________
diff --git a/src/lamarcmenus/overviewmenus.cpp b/src/lamarcmenus/overviewmenus.cpp
new file mode 100644
index 0000000..e3331b4
--- /dev/null
+++ b/src/lamarcmenus/overviewmenus.cpp
@@ -0,0 +1,782 @@
+// $Id: overviewmenus.cpp,v 1.33 2011/08/19 23:30:31 ewalkup Exp $
+
+/*
+  Copyright 2003  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+#include <string>
+#include <iostream>
+
+#include "display.h"
+#include "lamarc_strings.h"
+#include "menuitem.h"
+#include "menu_strings.h"
+#include "overviewmenus.h"
+#include "profilemenus.h"
+#include "ui_regid.h"
+#include "stringx.h"
+#include "twodtable.h"
+#include "ui_interface.h"
+#include "ui_strings.h"
+#include "ui_vars.h"
+
+using std::string;
+
+//------------------------------------------------------------------------------------
+
+string ChainTable::Title(UIInterface & ui)
+{
+    return "Chain parameters:";
+}
+
+long ChainTable::ColCount(UIInterface & ui)
+{
+    return 3;
+}
+
+long ChainTable::RowCount(UIInterface & ui)
+{
+    return 4;
+}
+
+string ChainTable::ColLabel(UIInterface & ui, long index)
+{
+    if (index == 0) return menustr::emptyString;
+    if (index == 1) return menustr::initial;
+    if (index == 2) return menustr::final;
+    assert(false); //Uncaught index.
+    return menustr::emptyString;
+}
+
+string ChainTable::RowLabel(UIInterface & ui, long index)
+{
+    if(index == 0) return menustr::chains;
+    if(index == 1) return menustr::discard;
+    if(index == 2) return menustr::interval;
+    if(index == 3) return menustr::samples;
+    assert(false); //Uncaught index.
+    return menustr::emptyString;
+}
+
+string ChainTable::Cell(UIInterface & ui, long rowIndex, long colIndex)
+{
+    if (colIndex == 0) return menustr::emptyString;
+    if (colIndex == 1)
+    {
+        if(rowIndex == 0) return ui.doGetPrintString(uistr::initialChains);
+        if(rowIndex == 1) return ui.doGetPrintString(uistr::initialDiscard);
+        if(rowIndex == 2) return ui.doGetPrintString(uistr::initialInterval);
+        if(rowIndex == 3) return ui.doGetPrintString(uistr::initialSamples);
+        assert(false); //Uncaught index.
+    }
+    if (colIndex==2)
+    {
+        if(rowIndex == 0) return ui.doGetPrintString(uistr::finalChains);
+        if(rowIndex == 1) return ui.doGetPrintString(uistr::finalDiscard);
+        if(rowIndex == 2) return ui.doGetPrintString(uistr::finalInterval);
+        if(rowIndex == 3) return ui.doGetPrintString(uistr::finalSamples);
+        assert(false); //Uncaught index.
+    }
+    assert(false); //Uncaught index.
+    return menustr::emptyString;
+}
+
+ChainTable::ChainTable(UIInterface & myui)
+    : TwoDTable(myui)
+{
+}
+
+ChainTable::~ChainTable()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+const string ForceEventTable::forceName(long i)
+{
+    static const string forceNames[6] = {
+        uistr::coalescence,
+        uistr::disease,
+        uistr::growth,
+        uistr::migration,
+        uistr::recombination,
+        uistr::logisticSelection
+    };
+    if(i < 0) return menustr::emptyString;
+    if(i > 5) return menustr::emptyString;
+    return forceNames[i];
+}
+
+const string ForceEventTable::forceMaxEvents(long i)
+{
+    static const string forceMaxEventCount[6] = {
+        uistr::coalescenceMaxEvents,
+        uistr::diseaseMaxEvents,
+        uistr::growthMaxEvents,
+        uistr::migrationMaxEvents,
+        uistr::recombinationMaxEvents,
+        uistr::logisticSelectionMaxEvents
+    };
+    if(i < 0) return menustr::emptyString;
+    if(i > 5) return menustr::emptyString;
+    return forceMaxEventCount[i];
+}
+
+string ForceEventTable::Title(UIInterface & ui)
+{
+    return "Enabled Forces:";
+}
+
+long ForceEventTable::ColCount(UIInterface & ui)
+{
+    return 1;
+}
+
+long ForceEventTable::RowCount(UIInterface & ui)
+{
+    return (ui.GetCurrentVars().forces.GetActiveForces().size());
+}
+
+string ForceEventTable::ColLabel(UIInterface & ui, long index)
+{
+    if(index == 0) return uistr::maxEvents;
+    assert(false); //Uncaught index.
+    return menustr::emptyString;
+}
+
+long ForceEventTable::getForceIndex(UIInterface & ui,long index)
+{
+    long countDown = index;
+    for(long i=0; i < 6; i++)
+    {
+        if(ui.doGetBool(forceName(i))) countDown--;
+        if(countDown < 0) return i;
+    }
+    return -1;
+}
+
+string ForceEventTable::RowLabel(UIInterface & ui, long index)
+{
+    long forceIndex = getForceIndex(ui,index);
+    if(forceIndex < 0) return menustr::emptyString;
+    if(forceIndex > 5) return menustr::emptyString;
+    return ui.doGetDescription(forceName(forceIndex));
+}
+
+string ForceEventTable::Cell(UIInterface & ui, long rowIndex, long colIndex)
+{
+    if(colIndex != 0) return menustr::emptyString;
+    long forceIndex = getForceIndex(ui,rowIndex);
+    if(forceIndex < 0) return menustr::emptyString;
+    if(forceIndex > 5) return menustr::emptyString;
+    return ui.doGetPrintString(forceMaxEvents(forceIndex));
+}
+
+ForceEventTable::ForceEventTable(UIInterface & myui)
+    : TwoDTable(myui)
+{
+}
+
+ForceEventTable::~ForceEventTable()
+{
+}
+
+//------------------------------------------------------------------------------------
+// assuming only one disease present
+
+string StartParamTable::Title(UIInterface & ui)
+{
+    return "Start parameters";
+}
+
+string StartParamTable::RowHeader(UIInterface & ui)
+{
+    return "Population";
+}
+
+long StartParamTable::ColCount(UIInterface & ui)
+{
+    return 6;
+}
+
+long StartParamTable::RowCount(UIInterface & ui)
+{
+    return ui.doGetLong(uistr::crossPartitionCount);
+}
+
+string StartParamTable::ColLabel(UIInterface & ui, long index)
+{
+    if(index == 0)
+    {
+        if(ui.doGetBool(uistr::disease))
+        {
+            return "disease status";
+        }
+    }
+    if(index == 1) return "Theta";
+    if(index == 2)
+    {
+        if(ui.doGetBool(uistr::growth))
+        {
+            return "Growth";
+        }
+    }
+    if(index == 3)
+    {
+        if(ui.doGetBool(uistr::migration) && (RowCount(ui) > 0))
+        {
+            return "M=m/mu";
+        }
+    }
+    if(index == 4)
+    {
+        if(ui.doGetBool(uistr::disease))
+        {
+            return "disease Mu rate";
+        }
+    }
+    return menustr::emptyString;
+}
+
+string StartParamTable::RowLabel(UIInterface & ui, long index)
+{
+    long diseaseDivisor = ui.doGetLong(uistr::diseasePartitionCount);
+    if (diseaseDivisor == 0) diseaseDivisor = 1;
+    if(index % diseaseDivisor == 0)
+    {
+        return ui.doGetPrintString(uistr::migrationPartitionName,index / diseaseDivisor);
+    }
+    return menustr::emptyString;
+}
+
+string StartParamTable::Cell(UIInterface & ui, long rowIndex, long colIndex)
+{
+    if(colIndex == 0)
+    {
+        if(ui.doGetBool(uistr::disease))
+        {
+            long disIndex = rowIndex % ui.doGetLong(uistr::diseasePartitionCount);
+            return ui.doGetString(uistr::diseasePartitionName,disIndex);
+        }
+    }
+    if(colIndex == 1)
+    {
+        return Pretty(ui.doGetDouble(uistr::userSetTheta,rowIndex), 8);
+    }
+    if((colIndex == 2) && ui.doGetBool(uistr::growth))
+    {
+        return ui.doGetPrintString(uistr::growthByID,rowIndex);
+    }
+    if((colIndex == 3) && ui.doGetBool(uistr::migration))
+    {
+        long diseaseDivisor = ui.doGetLong(uistr::diseasePartitionCount);
+        if (diseaseDivisor == 0) diseaseDivisor = 1;
+        if(rowIndex % diseaseDivisor == 0)
+        {
+            return ui.doGetPrintString(uistr::migrationInto,rowIndex / diseaseDivisor);
+        }
+    }
+    if((colIndex == 4) && ui.doGetBool(uistr::disease))
+    {
+        if(rowIndex < ui.doGetLong(uistr::diseasePartitionCount))
+        {
+            return ui.doGetPrintString(uistr::diseaseInto,rowIndex);
+        }
+    }
+    return menustr::emptyString;
+}
+
+StartParamTable::StartParamTable(UIInterface & myui)
+    : TwoDTable(myui)
+{
+}
+
+StartParamTable::~StartParamTable()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+long TemperatureTable::ColCount(UIInterface & ui)
+{
+    return 1;
+}
+
+long TemperatureTable::RowCount(UIInterface & ui)
+{
+    return 4;
+}
+
+string TemperatureTable::Title(UIInterface & ui)
+{
+    return "Parallel Markov Chain info:";
+}
+
+string TemperatureTable::ColLabel(UIInterface & ui, long index)
+{
+    return menustr::emptyString;
+}
+
+string TemperatureTable::RowLabel(UIInterface & ui, long index)
+{
+    if(index == 0) return ui.doGetDescription(uistr::heatedChainCount);
+    if(index == 1) return ui.doGetDescription(uistr::heatedChains);
+    if(index == 2) return ui.doGetDescription(uistr::tempInterval);
+    if(index == 3) return ui.doGetDescription(uistr::tempAdapt);
+    assert(false); //Uncaught index.
+    return menustr::emptyString;
+}
+
+string TemperatureTable::Cell(UIInterface & ui, long rowIndex, long colIndex)
+{
+    if(colIndex != 0) return menustr::emptyString;
+    if(rowIndex == 0) return ui.doGetPrintString(uistr::heatedChainCount);
+    if(rowIndex == 1)
+    {
+        string retstring = ui.doGetPrintString(uistr::heatedChains);
+        while (retstring.find(" ") == 0)
+        {
+            retstring.erase(0,1);
+        }
+        return retstring;
+    }
+    if(rowIndex == 2) return ui.doGetPrintString(uistr::tempInterval);
+    if(rowIndex == 3) return ui.doGetPrintString(uistr::tempAdapt);
+    assert(false); //Uncaught index.
+    return menustr::emptyString;
+}
+
+TemperatureTable::TemperatureTable(UIInterface & myui)
+    : TwoDTable(myui)
+{
+}
+
+TemperatureTable::~TemperatureTable()
+{
+}
+
+bool TemperatureTable::IsVisible()
+{
+    return (ui.doGetLong(uistr::heatedChainCount) > 1);
+}
+
+//------------------------------------------------------------------------------------
+
+DisplayOnlyMenuItem::DisplayOnlyMenuItem(const string & uiVariable,UIInterface & myui)
+    : OutputOnlyMenuItem(uiVariable),ui(myui),keyId(NO_ID())
+{
+}
+
+DisplayOnlyMenuItem::DisplayOnlyMenuItem(const string & uiVariable,UIInterface & myui, UIId myKeyId)
+    : OutputOnlyMenuItem(uiVariable),ui(myui),keyId(myKeyId)
+{
+}
+
+DisplayOnlyMenuItem::~DisplayOnlyMenuItem()
+{
+}
+
+UIId DisplayOnlyMenuItem::GetKeyid()
+{
+    return keyId;
+}
+
+string DisplayOnlyMenuItem::GetText()
+{
+    return ui.doGetDescription(displayString,GetKeyid());
+}
+
+string DisplayOnlyMenuItem::GetVariableText()
+{
+    return ui.doGetPrintString(displayString,GetKeyid());
+}
+
+//------------------------------------------------------------------------------------
+
+DisplayOnlyMenuItemIfTrue::DisplayOnlyMenuItemIfTrue(
+    const string & uiVariable,
+    const string & myguard,
+    UIInterface & myui, UIId myKeyId)
+    : DisplayOnlyMenuItem(uiVariable,myui,myKeyId), guard(myguard)
+{
+}
+
+DisplayOnlyMenuItemIfTrue::DisplayOnlyMenuItemIfTrue(
+    const string & uiVariable,
+    const string & myguard,
+    UIInterface & myui)
+    : DisplayOnlyMenuItem(uiVariable,myui), guard(myguard)
+{
+}
+
+DisplayOnlyMenuItemIfTrue::~DisplayOnlyMenuItemIfTrue()
+{
+}
+
+bool DisplayOnlyMenuItemIfTrue::IsVisible()
+{
+    return ui.doGetBool(guard);
+}
+
+//------------------------------------------------------------------------------------
+
+DisplayOnlyMenuItemIfNonZero::DisplayOnlyMenuItemIfNonZero(
+    const string & uiVariable,
+    UIInterface & myui)
+    : DisplayOnlyMenuItem(uiVariable,myui)
+{
+}
+
+DisplayOnlyMenuItemIfNonZero::~DisplayOnlyMenuItemIfNonZero()
+{
+}
+
+bool DisplayOnlyMenuItemIfNonZero::IsVisible()
+{
+    return (ui.doGetDouble(displayString) != 0.0);
+}
+
+//------------------------------------------------------------------------------------
+
+DisplayOnlyMenu::DisplayOnlyMenu(
+    UIInterface & myui, const string & myTitle, const string & myInfo)
+    : NewMenu(myui,myTitle,string(lamarcmenu::overviewInfo+myInfo))
+{
+}
+
+DisplayOnlyMenu::~DisplayOnlyMenu()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+FileOverviewMenu::FileOverviewMenu(UIInterface & myui)
+    : DisplayOnlyMenu(
+        myui,
+        lamarcmenu::fileOverviewTitle,
+        lamarcmenu::resultTitle)
+{
+    AddMenuItem(new DisplayOnlyMenuItem(uistr::dataFileName,myui));
+    AddMenuItem(new DisplayOnlyMenuItem(uistr::resultsFileName,myui));
+    AddMenuItem(new DisplayOnlyMenuItemIfTrue(uistr::treeSumInFileName,uistr::treeSumInFileEnabled, myui));
+    AddMenuItem(new DisplayOnlyMenuItemIfTrue(uistr::treeSumOutFileName,uistr::treeSumOutFileEnabled, myui));
+    AddMenuItem(new DisplayOnlyMenuItem(uistr::xmlOutFileName,myui));
+    if (ui.doGetBool(uistr::bayesian))
+    {
+        AddMenuItem(new DisplayOnlyMenuItemIfTrue(uistr::curveFilePrefix,uistr::useCurveFiles,myui));
+    }
+    AddMenuItem(new DisplayOnlyMenuItemIfTrue(uistr::reclocFilePrefix,uistr::useReclocFiles,myui));
+    AddMenuItem(new DisplayOnlyMenuItemIfTrue(uistr::traceFilePrefix,uistr::useTraceFiles,myui));
+    AddMenuItem(new DisplayOnlyMenuItemIfTrue(uistr::newickTreeFilePrefix,uistr::useNewickTreeFiles,myui));
+
+}
+
+FileOverviewMenu::~FileOverviewMenu()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+RegionDataOverviewMenu::RegionDataOverviewMenu(UIInterface& ui, UIId id)
+    : DisplayOnlyMenu(ui,lamarcmenu::dataOverviewTitle,lamarcmenu::dataTitle), m_id(id)
+{
+    AddMenuItem(new DataOverviewMenuItem(ui,m_id));
+}
+
+RegionDataOverviewMenu::~RegionDataOverviewMenu()
+{
+}
+
+MenuInteraction_ptr
+DataOverviewMenuItemGroup::MakeOneHandler(UIId id)
+{
+    return MenuInteraction_ptr(new RegionDataOverviewMenu(ui,id));
+}
+
+DataOverviewMenuItemGroup::DataOverviewMenuItemGroup(UIInterface & ui)
+    : MenuDisplayGroupBaseImplementation(ui,menustr::emptyString)
+{
+}
+
+DataOverviewMenuItemGroup::~DataOverviewMenuItemGroup()
+{
+}
+
+vector<UIId>
+DataOverviewMenuItemGroup::GetVisibleIds()
+{
+    vector<UIId> visibleIds;
+    LongVec1d regionNumbers = ui.doGetLongVec1d(uistr::regionNumbers);
+    LongVec1d::iterator i;
+    for(i=regionNumbers.begin(); i != regionNumbers.end(); i++)
+    {
+        visibleIds.push_back(UIId(*i));
+    }
+    return visibleIds;
+}
+
+string
+DataOverviewMenuItemGroup::GetText(UIId id)
+{
+    string retString("Model and parameters for ");
+    retString += ui.doGetString(uistr::regionName,id.GetIndex1());
+    return retString;
+}
+
+string
+DataOverviewMenuItemGroup::GetVariableText(UIId id)
+{
+    //return ToString(ui.doGetModelType(uistr::dataModel,id));
+    return menustr::emptyString;
+}
+
+string DataOverviewMenuItem::GetText()
+{
+    string strings;
+
+    long regionNumber = m_id.GetIndex1();
+    string regionName = ui.doGetString(uistr::regionName,regionNumber);
+
+    LongVec1d lociNumbers = ui.doGetLongVec1d(uistr::lociNumbers,regionNumber);
+    LongVec1d::iterator j;
+    for(j=lociNumbers.begin(); j != lociNumbers.end(); j++)
+    {
+
+        UIId thisId(regionNumber,*j);
+        model_type dataModel = ui.doGetModelType(uistr::dataModel,thisId);
+        string locusName = ui.doGetString(uistr::locusName,thisId);
+        strings += "Parameters of a ";
+        strings += ToString(dataModel);
+        strings += " model for segment ";
+        strings += locusName;
+        strings += "\n";
+        StringVec1d report =
+            ui.doGetStringVec1d(uistr::dataModelReport,thisId);
+        StringVec1d::iterator i;
+        for(i=report.begin(); i != report.end(); i++)
+        {
+            strings += "    " + *i + "\n";
+        }
+    }
+    return strings;
+}
+
+DataOverviewMenuItem::DataOverviewMenuItem(UIInterface & myui,UIId id)
+    : OutputOnlyMenuItem(menustr::emptyString), ui(myui), m_id(id)
+{
+}
+
+DataOverviewMenuItem::~DataOverviewMenuItem()
+{
+}
+
+DataOverviewMenu::DataOverviewMenu(UIInterface & myui)
+    : DisplayOnlyMenu(
+        myui,
+        lamarcmenu::dataOverviewTitle,
+        lamarcmenu::dataTitle)
+{
+    AddMenuItem(new DisplayOnlyMenuItem(uistr::randomSeed, myui));
+    AddMenuItem(new DataOverviewMenuItemGroup(ui));
+}
+
+DataOverviewMenu::~DataOverviewMenu()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+ForceOverviewMenu::ForceOverviewMenu(UIInterface & myui)
+    : DisplayOnlyMenu(
+        myui,
+        lamarcmenu::forcesOverviewTitle,
+        lamarcmenu::forcesTitle)
+{
+    AddMenuItem(new ForceEventTable(myui));
+    AddMenuItem(new BlankMenuItem());
+    AddMenuItem(new StartParamTable(myui));
+    AddMenuItem(new BlankMenuItem());
+    AddMenuItem(new DisplayOnlyMenuItemIfTrue(
+                    uistr::recombinationRate,uistr::recombination,
+                    myui, UIId(force_REC, uiconst::GLOBAL_ID)));
+    AddMenuItem(new DisplayOnlyMenuItemIfTrue(
+                    uistr::regionGammaShape,uistr::regionGamma,
+                    myui, UIId(force_REGION_GAMMA, uiconst::GLOBAL_ID)));
+
+}
+
+ForceOverviewMenu::~ForceOverviewMenu()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+ReplicatesMenuItemIfMoreThanOne::ReplicatesMenuItemIfMoreThanOne(UIInterface & myui)
+    : DisplayOnlyMenuItem(uistr::replicates,myui)
+{
+}
+
+ReplicatesMenuItemIfMoreThanOne::~ReplicatesMenuItemIfMoreThanOne()
+{
+}
+
+bool ReplicatesMenuItemIfMoreThanOne::IsVisible()
+{
+    return (ui.doGetLong(uistr::replicates) > 1);
+}
+
+SearchOverviewMenu::SearchOverviewMenu(UIInterface & myui)
+    : DisplayOnlyMenu(
+        myui,
+        lamarcmenu::strategyOverviewTitle,
+        lamarcmenu::strategyTitle)
+{
+    AddMenuItem(new DisplayOnlyMenuItem(uistr::bayesian, myui));
+    AddMenuItem(new DisplayOnlyMenuItemIfNonZero(uistr::dropArranger, myui));
+    AddMenuItem(new DisplayOnlyMenuItemIfNonZero(uistr::sizeArranger, myui));
+    AddMenuItem(new DisplayOnlyMenuItemIfNonZero(uistr::hapArranger, myui));
+    AddMenuItem(new DisplayOnlyMenuItemIfNonZero(uistr::bayesArranger, myui));
+    AddMenuItem(new ReplicatesMenuItemIfMoreThanOne(myui));
+    AddMenuItem(new BlankMenuItem());
+    AddMenuItem(new ChainTable(myui));
+    AddMenuItem(new BlankMenuItem());
+    AddMenuItem(new TemperatureTable(myui));
+
+}
+
+SearchOverviewMenu::~SearchOverviewMenu()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+ResultsOverviewMenu::ResultsOverviewMenu(UIInterface & myui)
+    : DisplayOnlyMenu(
+        myui,
+        lamarcmenu::resultOverviewTitle,
+        lamarcmenu::resultTitle)
+{
+    AddMenuItem(new DisplayOnlyMenuItem(uistr::progress,myui));
+    AddMenuItem(new DisplayOnlyMenuItem(uistr::verbosity,myui));
+    AddMenuItem(new DisplayOnlyMenuItem(uistr::treeSumInFileEnabled, myui));
+    AddMenuItem(new DisplayOnlyMenuItem(uistr::treeSumOutFileEnabled, myui));
+    AddMenuItem(new DisplayOnlyGroupWrapper(new ProfileByForceMenuItemGroup(myui)));
+
+}
+
+ResultsOverviewMenu::~ResultsOverviewMenu()
+{
+}
+
+BayesianPriorsOverviewMenu::BayesianPriorsOverviewMenu(UIInterface & myui)
+    : DisplayOnlyMenu(
+        myui,
+        lamarcmenu::bayesianPriorsOverviewTitle,
+        lamarcmenu::strategyTitle)
+{
+    AddMenuItem(new DisplayOnlyGroupWrapper(new BayesianPriorsOverviewMenuItemGroup(myui)));
+}
+
+BayesianPriorsOverviewMenu::~BayesianPriorsOverviewMenu()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+BayesianPriorsOverviewMenuItemGroup::BayesianPriorsOverviewMenuItemGroup(UIInterface & ui)
+    : MenuDisplayGroupBaseImplementation(ui,menustr::emptyString)
+{
+}
+
+BayesianPriorsOverviewMenuItemGroup::~BayesianPriorsOverviewMenuItemGroup()
+{
+}
+
+vector<UIId>
+BayesianPriorsOverviewMenuItemGroup::GetVisibleIds()
+{
+    vector<UIId> allParams;
+    vector<UIId> forceIds = ui.doGetUIIdVec1d(uistr::validForces);
+    for (vector<UIId>::iterator force  = forceIds.begin(); force != forceIds.end(); force++)
+    {
+        vector<UIId> paramIds = ui.doGetUIIdVec1d(uistr::validParamsForForce,*force);
+        allParams.insert(allParams.end(), paramIds.begin(), paramIds.end());
+    }
+    return allParams;
+}
+
+string
+BayesianPriorsOverviewMenuItemGroup::GetText(UIId id)
+{
+    return ui.GetCurrentVars().GetParamNameWithConstraint(id.GetForceType(),id.GetIndex1());
+}
+
+string
+BayesianPriorsOverviewMenuItemGroup::GetVariableText(UIId id)
+{
+    return ui.GetCurrentVars().forces.GetPriorTypeSummaryDescription(id.GetForceType(),id.GetIndex1(), false);
+}
+
+MenuInteraction_ptr BayesianPriorsOverviewMenuItemGroup::MakeOneHandler(UIId id)
+{
+    return MenuInteraction_ptr(new DoNothingHandler());
+}
+
+//------------------------------------------------------------------------------------
+
+TraitMappingOverviewMenu::TraitMappingOverviewMenu(UIInterface & myui)
+    : DisplayOnlyMenu(
+        myui,
+        lamarcmenu::traitMappingOverviewTitle,
+        lamarcmenu::forcesTitle)
+{
+    AddMenuItem(new DisplayOnlyGroupWrapper(new TraitMappingOverviewMenuItemGroup(myui)));
+}
+
+TraitMappingOverviewMenu::~TraitMappingOverviewMenu()
+{
+}
+
+TraitMappingOverviewMenuItemGroup::TraitMappingOverviewMenuItemGroup(UIInterface & ui)
+    : MenuDisplayGroupBaseImplementation(ui,menustr::emptyString)
+{
+}
+
+TraitMappingOverviewMenuItemGroup::~TraitMappingOverviewMenuItemGroup()
+{
+}
+
+vector<UIId>
+TraitMappingOverviewMenuItemGroup::GetVisibleIds()
+{
+    return ui.doGetUIIdVec1d(uistr::validMovingLoci,UIId());
+}
+
+string
+TraitMappingOverviewMenuItemGroup::GetText(UIId id)
+{
+    UIRegId regId(id, ui.GetCurrentVars());
+    return lamarcmenu::traitModel1 + ui.doGetString(uistr::locusName, id)
+        + lamarcmenu::traitModel2
+        + ToString(ui.GetCurrentVars().traitmodels.GetRange(regId));
+    ;
+}
+
+string
+TraitMappingOverviewMenuItemGroup::GetVariableText(UIId id)
+{
+    UIRegId regId(id, ui.GetCurrentVars());
+    return ToString(ui.GetCurrentVars().traitmodels.GetAnalysisType(regId));
+}
+
+MenuInteraction_ptr TraitMappingOverviewMenuItemGroup::MakeOneHandler(UIId id)
+{
+    return MenuInteraction_ptr(new DoNothingHandler());
+}
+
+//____________________________________________________________________________________
diff --git a/src/lamarcmenus/overviewmenus.h b/src/lamarcmenus/overviewmenus.h
new file mode 100644
index 0000000..75d3e2f
--- /dev/null
+++ b/src/lamarcmenus/overviewmenus.h
@@ -0,0 +1,322 @@
+// $Id: overviewmenus.h,v 1.16 2011/03/07 06:08:50 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef OVERVIEWMENUS_H
+#define OVERVIEWMENUS_H
+
+#include <string>
+#include "menudefs.h"
+#include "newmenu.h"
+#include "newmenuitems.h"
+#include "setmenuitem.h"
+#include "twodtable.h"
+
+class Display;
+class UIInterface;
+
+class ChainTable : public TwoDTable
+{
+  protected:
+    virtual long ColCount(UIInterface & ui);
+    virtual long RowCount(UIInterface & ui);
+    virtual std::string Title(UIInterface & ui);
+    virtual std::string ColLabel(UIInterface & ui, long index);
+    virtual std::string RowLabel(UIInterface & ui, long index);
+    virtual std::string Cell(UIInterface & ui, long rowIndex, long colIndex);
+  public:
+    ChainTable(UIInterface &);
+    virtual ~ChainTable();
+};
+
+class ForceEventTable : public TwoDTable
+{
+  protected:
+    virtual const std::string forceName(long);
+    virtual const std::string forceMaxEvents(long);
+    virtual long getForceIndex(UIInterface & ui, long index);
+    virtual long ColCount(UIInterface & ui);
+    virtual long RowCount(UIInterface & ui);
+    virtual std::string Title(UIInterface & ui);
+    virtual std::string ColLabel(UIInterface & ui, long index);
+    virtual std::string RowLabel(UIInterface & ui, long index);
+    virtual std::string Cell(UIInterface & ui, long rowIndex, long colIndex);
+  public:
+    ForceEventTable(UIInterface &);
+    virtual ~ForceEventTable();
+};
+
+class StartParamTable : public TwoDTable
+{
+  protected:
+    virtual long ColCount(UIInterface & ui);
+    virtual long RowCount(UIInterface & ui);
+    virtual std::string Title(UIInterface & ui);
+    virtual std::string RowHeader(UIInterface & ui);
+    virtual std::string ColLabel(UIInterface & ui, long index);
+    virtual std::string RowLabel(UIInterface & ui, long index);
+    virtual std::string Cell(UIInterface & ui, long rowIndex, long colIndex);
+  public:
+    StartParamTable(UIInterface &);
+    virtual ~StartParamTable();
+};
+
+class TemperatureTable : public TwoDTable
+{
+  protected:
+    virtual long ColCount(UIInterface & ui);
+    virtual long RowCount(UIInterface & ui);
+    virtual std::string Title(UIInterface & ui);
+    virtual std::string ColLabel(UIInterface & ui, long index);
+    virtual std::string RowLabel(UIInterface & ui, long index);
+    virtual std::string Cell(UIInterface & ui, long rowIndex, long colIndex);
+  public:
+    TemperatureTable(UIInterface &);
+    virtual ~TemperatureTable();
+    virtual bool IsVisible();
+};
+
+class DisplayOnlyMenuItem : public OutputOnlyMenuItem
+{
+  protected:
+    UIInterface & ui;
+    UIId keyId;
+  public:
+    DisplayOnlyMenuItem(const std::string &,UIInterface&);
+    DisplayOnlyMenuItem(const std::string &,UIInterface&,UIId);
+    virtual ~DisplayOnlyMenuItem();
+    virtual UIId GetKeyid();
+    virtual std::string GetText();
+    virtual std::string GetVariableText();
+};
+
+class DisplayOnlyMenuItemIfTrue : public DisplayOnlyMenuItem
+{
+  protected:
+    std::string guard;
+  public:
+    DisplayOnlyMenuItemIfTrue(
+        const std::string &,
+        const std::string &,
+        UIInterface &);
+    DisplayOnlyMenuItemIfTrue(
+        const std::string &,
+        const std::string &,
+        UIInterface &,
+        UIId);
+    virtual bool IsVisible();
+    virtual ~DisplayOnlyMenuItemIfTrue();
+};
+
+class DisplayOnlyMenuItemIfNonZero : public DisplayOnlyMenuItem
+{
+  public:
+    DisplayOnlyMenuItemIfNonZero(
+        const std::string &,
+        UIInterface &);
+    virtual ~DisplayOnlyMenuItemIfNonZero();
+    virtual bool IsVisible();
+};
+
+class DisplayOnlyMenu : public NewMenu
+{
+  public:
+    DisplayOnlyMenu(UIInterface & myui, const std::string &,const std::string &);
+    virtual ~DisplayOnlyMenu();
+};
+
+class FileOverviewMenu : public DisplayOnlyMenu
+{
+  public:
+    FileOverviewMenu(UIInterface & myui);
+    virtual ~FileOverviewMenu();
+};
+
+class FileOverviewMenuCreator : public NewMenuCreator
+{
+  protected:
+    UIInterface & ui;
+  public:
+    FileOverviewMenuCreator(UIInterface & myui) : ui(myui) {};
+    virtual ~FileOverviewMenuCreator() {};
+    NewMenu_ptr Create() { return NewMenu_ptr(new FileOverviewMenu(ui));};
+};
+
+class DataOverviewMenuItem : public OutputOnlyMenuItem
+{
+  protected:
+    UIInterface & ui;
+    UIId            m_id;
+  public:
+    DataOverviewMenuItem(UIInterface & myui,UIId id);
+    virtual ~DataOverviewMenuItem();
+    virtual std::string GetText();
+};
+
+class RegionDataOverviewMenu : public DisplayOnlyMenu
+{
+  protected:
+    UIId    m_id;
+  public:
+    RegionDataOverviewMenu(UIInterface & ui,UIId id);
+    virtual ~RegionDataOverviewMenu();
+};
+
+class DataOverviewMenuItemGroup :   public MenuDisplayGroupBaseImplementation
+{
+  protected:
+    MenuInteraction_ptr MakeOneHandler(UIId id);
+  public:
+    DataOverviewMenuItemGroup(UIInterface & ui);
+    ~DataOverviewMenuItemGroup();
+    vector<UIId> GetVisibleIds();
+    std::string GetText(UIId id);
+    std::string GetVariableText(UIId id);
+};
+
+class DataOverviewMenu : public DisplayOnlyMenu
+{
+  public:
+    DataOverviewMenu(UIInterface & ui);
+    virtual ~DataOverviewMenu();
+};
+
+class DataOverviewMenuCreator : public NewMenuCreator
+{
+  protected:
+    UIInterface & ui;
+  public:
+    DataOverviewMenuCreator(UIInterface & myui) : ui(myui) {};
+    virtual ~DataOverviewMenuCreator() {};
+    NewMenu_ptr Create() { return NewMenu_ptr(new DataOverviewMenu(ui));};
+};
+
+class ForceOverviewMenu : public DisplayOnlyMenu
+{
+  public:
+    ForceOverviewMenu(UIInterface & myui);
+    virtual ~ForceOverviewMenu();
+};
+
+class ForceOverviewMenuCreator : public NewMenuCreator
+{
+  protected:
+    UIInterface & ui;
+  public:
+    ForceOverviewMenuCreator(UIInterface & myui) : ui(myui) {};
+    virtual ~ForceOverviewMenuCreator() {};
+    NewMenu_ptr Create() { return NewMenu_ptr(new ForceOverviewMenu(ui));};
+};
+
+class ReplicatesMenuItemIfMoreThanOne : public DisplayOnlyMenuItem
+{
+  public:
+    ReplicatesMenuItemIfMoreThanOne(UIInterface&);
+    virtual ~ReplicatesMenuItemIfMoreThanOne();
+    virtual bool IsVisible();
+};
+
+class SearchOverviewMenu : public DisplayOnlyMenu
+{
+  public:
+    SearchOverviewMenu(UIInterface & myui);
+    virtual ~SearchOverviewMenu();
+};
+
+class SearchOverviewMenuCreator : public NewMenuCreator {
+  protected:
+    UIInterface & ui;
+  public:
+    SearchOverviewMenuCreator(UIInterface & myui) : ui(myui) {};
+    virtual ~SearchOverviewMenuCreator() {};
+    NewMenu_ptr Create() { return NewMenu_ptr(new SearchOverviewMenu(ui));};
+};
+
+class ResultsOverviewMenu : public DisplayOnlyMenu
+{
+  public:
+    ResultsOverviewMenu(UIInterface & myui);
+    virtual ~ResultsOverviewMenu();
+};
+
+class ResultsOverviewMenuCreator : public NewMenuCreator
+{
+  protected:
+    UIInterface & ui;
+  public:
+    ResultsOverviewMenuCreator(UIInterface & myui) : ui(myui) {};
+    virtual ~ResultsOverviewMenuCreator() {};
+    NewMenu_ptr Create() { return NewMenu_ptr(new ResultsOverviewMenu(ui));};
+};
+
+class BayesianPriorsOverviewMenu : public DisplayOnlyMenu
+{
+  public:
+    BayesianPriorsOverviewMenu(UIInterface & myui);
+    ~BayesianPriorsOverviewMenu();
+};
+
+class BayesianPriorsOverviewMenuCreator : public NewMenuCreator
+{
+  protected:
+    UIInterface & ui;
+  public:
+    BayesianPriorsOverviewMenuCreator(UIInterface & myui) : ui(myui) {};
+    virtual ~BayesianPriorsOverviewMenuCreator() {};
+    NewMenu_ptr Create() { return NewMenu_ptr(new BayesianPriorsOverviewMenu(ui));};
+};
+
+//------------------------------------------------------------------------------------
+
+class BayesianPriorsOverviewMenuItemGroup : public MenuDisplayGroupBaseImplementation
+{
+  public:
+    BayesianPriorsOverviewMenuItemGroup(UIInterface& ui);
+    virtual ~BayesianPriorsOverviewMenuItemGroup();
+    virtual vector<UIId> GetVisibleIds();
+    virtual string GetText(UIId id);
+    virtual string GetVariableText(UIId id);
+    virtual MenuInteraction_ptr MakeOneHandler(UIId id);
+};
+
+//------------------------------------------------------------------------------------
+
+class TraitMappingOverviewMenu : public DisplayOnlyMenu
+{
+  public:
+    TraitMappingOverviewMenu(UIInterface & myui);
+    ~TraitMappingOverviewMenu();
+};
+
+class TraitMappingOverviewMenuCreator : public NewMenuCreator
+{
+  protected:
+    UIInterface & ui;
+  public:
+    TraitMappingOverviewMenuCreator(UIInterface & myui) : ui(myui) {};
+    virtual ~TraitMappingOverviewMenuCreator() {};
+    NewMenu_ptr Create() { return NewMenu_ptr(new TraitMappingOverviewMenu(ui));};
+};
+
+//------------------------------------------------------------------------------------
+
+class TraitMappingOverviewMenuItemGroup : public MenuDisplayGroupBaseImplementation
+{
+  public:
+    TraitMappingOverviewMenuItemGroup(UIInterface& ui);
+    virtual ~TraitMappingOverviewMenuItemGroup();
+    virtual vector<UIId> GetVisibleIds();
+    virtual string GetText(UIId id);
+    virtual string GetVariableText(UIId id);
+    virtual MenuInteraction_ptr MakeOneHandler(UIId id);
+};
+
+#endif  // OVERVIEWMENUS_H
+
+//____________________________________________________________________________________
diff --git a/src/lamarcmenus/popsizemenu.cpp b/src/lamarcmenus/popsizemenu.cpp
new file mode 100644
index 0000000..1d87671
--- /dev/null
+++ b/src/lamarcmenus/popsizemenu.cpp
@@ -0,0 +1,57 @@
+// $Id: popsizemenu.cpp,v 1.5 2010/03/17 17:25:59 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <string>
+#include "lamarc_strings.h"
+#include "popsizemenu.h"
+#include "ui_interface.h"
+#include "ui_strings.h"
+
+using std::string;
+
+EffectivePopSizeMenu::EffectivePopSizeMenu(UIInterface & myui)
+    : NewMenu(myui,lamarcmenu::effectivePopSizeTitle,
+              lamarcmenu::effectivePopSizeInfo)
+{
+    AddMenuItem(new EffectivePopSizeGroup(ui));
+}
+
+EffectivePopSizeMenu::~EffectivePopSizeMenu()
+{
+}
+
+EffectivePopSizeGroup::EffectivePopSizeGroup(UIInterface& myui)
+    : SetMenuItemGroup(myui,uistr::effectivePopSize)
+{
+}
+
+EffectivePopSizeGroup::~EffectivePopSizeGroup()
+{
+}
+
+UIIdVec1d
+EffectivePopSizeGroup::GetVisibleIds()
+{
+    vector<UIId> visibleIds;
+    LongVec1d regionNumbers = ui.doGetLongVec1d(uistr::regionNumbers);
+    LongVec1d::iterator i;
+    for(i=regionNumbers.begin(); i != regionNumbers.end(); i++)
+    {
+        visibleIds.push_back(UIId(*i));
+    }
+    return visibleIds;
+}
+
+string EffectivePopSizeGroup::GetText(UIId id)
+{
+    return lamarcmenu::effectivePopSizeFor + ui.doGetString(uistr::regionName,id);
+}
+
+//____________________________________________________________________________________
diff --git a/src/lamarcmenus/popsizemenu.h b/src/lamarcmenus/popsizemenu.h
new file mode 100644
index 0000000..198aec1
--- /dev/null
+++ b/src/lamarcmenus/popsizemenu.h
@@ -0,0 +1,50 @@
+// $Id: popsizemenu.h,v 1.4 2011/03/07 06:08:50 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef POPSIZEMENU_H
+#define POPSIZEMENU_H
+
+#include <string>
+#include "newmenuitems.h"
+#include "setmenuitem.h"
+
+class UIInterface;
+
+class EffectivePopSizeMenu : public NewMenu
+{
+  private:
+    EffectivePopSizeMenu(); //undefined
+  public:
+    EffectivePopSizeMenu(UIInterface& ui);
+    virtual ~EffectivePopSizeMenu();
+};
+
+class EffectivePopSizeMenuCreator : public NewMenuCreator
+{
+  protected:
+    UIInterface & ui;
+  public:
+    EffectivePopSizeMenuCreator(UIInterface & myui) : ui(myui) {};
+    virtual ~EffectivePopSizeMenuCreator() {};
+    virtual NewMenu_ptr Create() { return NewMenu_ptr(new EffectivePopSizeMenu(ui));};
+};
+
+class EffectivePopSizeGroup : public SetMenuItemGroup
+{
+  public:
+    EffectivePopSizeGroup(UIInterface& myui);
+    virtual ~EffectivePopSizeGroup();
+    virtual UIIdVec1d GetVisibleIds();
+    virtual string GetText(UIId id);
+};
+
+#endif  // POPSIZEMENU_H
+
+//____________________________________________________________________________________
diff --git a/src/lamarcmenus/priormenus.cpp b/src/lamarcmenus/priormenus.cpp
new file mode 100644
index 0000000..febaa5b
--- /dev/null
+++ b/src/lamarcmenus/priormenus.cpp
@@ -0,0 +1,201 @@
+// $Id: priormenus.cpp,v 1.13 2013/06/03 17:23:13 jyamato Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+#include <string>
+
+#include "local_build.h"                // for definition of LAMARC_NEW_FEATURE_RELATIVE_SAMPLING
+
+#include "lamarc_strings.h"
+#include "newmenuitems.h"
+#include "priormenus.h"
+#include "togglemenuitem.h"
+#include "ui_interface.h"
+#include "ui_strings.h"
+#include "ui_vars.h"
+
+using std::string;
+
+//------------------------------------------------------------------------------------
+
+PriorMenu::PriorMenu(UIInterface & myui)
+    : NewMenu(myui,lamarcmenu::priorTitle)
+{
+    AddMenuItem(new PriorByForceMenuItemGroup(ui));
+}
+
+PriorMenu::~PriorMenu()
+{
+}
+
+PriorByForceMenuItemGroup::PriorByForceMenuItemGroup(UIInterface& ui)
+    : MenuDisplayGroupBaseImplementation(ui,uistr::priorByForce)
+{
+}
+
+PriorByForceMenuItemGroup::~PriorByForceMenuItemGroup()
+{
+}
+
+MenuInteraction_ptr
+PriorByForceMenuItemGroup::MakeOneHandler(UIId id)
+{
+    return MenuInteraction_ptr(new PriorMenuForOneForce(ui,id));
+}
+
+UIIdVec1d
+PriorByForceMenuItemGroup::GetVisibleIds()
+{
+    return ui.doGetUIIdVec1d(uistr::validForces);
+}
+
+string
+PriorByForceMenuItemGroup::GetKey(UIId id)
+{
+    switch(id.GetForceType())
+    {
+        case force_COAL:
+            return "T";
+            break;
+        case force_DISEASE:
+            return "D";
+            break;
+        case force_GROW:
+            return "G";
+            break;
+        case force_MIG:
+        case force_DIVMIG:
+            return "M";
+            break;
+        case force_REC:
+            return "R";
+            break;
+        case force_REGION_GAMMA:
+            return "L";
+            break;
+        case force_EXPGROWSTICK:
+            return "X";
+            break;
+        case force_LOGISTICSELECTION:
+            return "S";
+            break;
+        case force_LOGSELECTSTICK:
+            return "H";
+            break;
+        case force_DIVERGENCE:
+            return "V";
+            break;
+        default:
+            assert(false);              //uncaught force type.
+    }
+    throw implementation_error("force_type enum missing case in PriorByForceMenuItemGroup::GetKey");
+}
+
+PriorMenuForOneForce::PriorMenuForOneForce(UIInterface& ui,UIId id)
+    : NewMenu(ui,lamarcmenu::priorTitle + lamarcmenu::wordFor
+              +ToString(id.GetForceType()), lamarcmenu::priorInfoForForce)
+{
+    AddMenuItem(new ToggleMenuItemId("U",ui,uistr::useDefaultPriorsForForce,id));
+    AddMenuItem(new DefaultPriorForForce("D", ui, id));
+    AddMenuItem(new PriorByParameterMenuItemGroup(ui,id));
+}
+
+PriorMenuForOneForce::~PriorMenuForOneForce()
+{
+}
+
+SubMenuPriorForOneForce::SubMenuPriorForOneForce(std::string key, UIInterface& ui, UIId id)
+    : ForceSubMenuItem(key, ui, new PriorMenuForOneForceCreator(ui, id), id)
+{
+}
+
+SubMenuPriorForOneForce::~SubMenuPriorForOneForce()
+{
+}
+
+bool SubMenuPriorForOneForce::IsVisible()
+{
+    return (ForceSubMenuItem::IsVisible() && ui.doGetBool(uistr::bayesian));
+}
+
+PriorByParameterMenuItemGroup::PriorByParameterMenuItemGroup(UIInterface & ui,UIId id)
+    : MenuDisplayGroupBaseImplementation(ui,uistr::priorByID),
+      m_id(id)
+{
+}
+
+PriorByParameterMenuItemGroup::~PriorByParameterMenuItemGroup()
+{
+}
+
+UIIdVec1d
+PriorByParameterMenuItemGroup::GetVisibleIds()
+{
+    return ui.doGetUIIdVec1d(uistr::validParamsForForce,m_id);
+}
+
+MenuInteraction_ptr
+PriorByParameterMenuItemGroup::MakeOneHandler(UIId id)
+{
+    return MenuInteraction_ptr(new PriorMenuForOneParameter(ui,id));
+}
+
+DefaultPriorForForce::DefaultPriorForForce(string myKey, UIInterface& myUI,
+                                           UIId myId)
+    : SubMenuItem(myKey, myUI, new PriorMenuForOneParameterCreator
+                  (myUI, UIId(myId.GetForceType(), uiconst::GLOBAL_ID))),
+      m_id(myId)
+{
+}
+
+DefaultPriorForForce::~DefaultPriorForForce()
+{
+}
+
+string DefaultPriorForForce::GetVariableText()
+{
+    return ui.GetCurrentVars().forces.GetPriorTypeSummaryDescription(m_id.GetForceType(), uiconst::GLOBAL_ID);
+}
+
+//One possible parameter is the 'default' parameter for a particular force.
+PriorMenuForOneParameter::PriorMenuForOneParameter(UIInterface& ui, UIId id)
+    : NewMenu(ui, lamarcmenu::priorTitle, lamarcmenu::priorInfoForParam),
+      m_id(id)
+{
+    if (id.GetIndex1() != uiconst::GLOBAL_ID)
+    {
+        AddMenuItem( new ToggleMenuItemId("D",ui,uistr::priorUseDefault,id));
+    }
+    AddMenuItem(new ToggleMenuItemId("S",ui,uistr::priorType,id));
+    AddMenuItem(new SetMenuItemId("U",ui,uistr::priorUpperBound,id));
+    AddMenuItem(new SetMenuItemId("L",ui,uistr::priorLowerBound,id));
+#ifdef LAMARC_NEW_FEATURE_RELATIVE_SAMPLING
+    AddMenuItem(new SetMenuItemId("R",ui,uistr::relativeSampleRate,id));
+#endif
+}
+
+PriorMenuForOneParameter::~PriorMenuForOneParameter()
+{
+}
+
+string PriorMenuForOneParameter::Title()
+{
+    if (m_id.GetIndex1() == uiconst::GLOBAL_ID)
+    {
+        return lamarcmenu::defaultForParamsFor + ToString(m_id.GetForceType());
+    }
+    else
+    {
+        return lamarcmenu::priorTitle + lamarcmenu::wordFor +
+            ui.GetCurrentVars().GetParamNameWithConstraint(m_id.GetForceType(), m_id.GetIndex1());
+    }
+}
+
+//____________________________________________________________________________________
diff --git a/src/lamarcmenus/priormenus.h b/src/lamarcmenus/priormenus.h
new file mode 100644
index 0000000..967470c
--- /dev/null
+++ b/src/lamarcmenus/priormenus.h
@@ -0,0 +1,135 @@
+// $Id: priormenus.h,v 1.3 2011/03/07 06:08:50 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef PRIORMENUS_H
+#define PRIORMENUS_H
+
+#include <string>
+#include "forcesmenus.h"
+#include "newmenuitems.h"
+#include "setmenuitem.h"
+#include "togglemenuitem.h"
+
+class UIInterface;
+
+class PriorMenu : public NewMenu
+{
+  private:
+    PriorMenu(); //undefined
+  public:
+    PriorMenu(UIInterface& ui);
+    virtual ~PriorMenu();
+};
+
+class PriorMenuCreator : public NewMenuCreator
+{
+  protected:
+    UIInterface & ui;
+  public:
+    PriorMenuCreator(UIInterface & myui) : ui(myui) {};
+    virtual ~PriorMenuCreator() {};
+    virtual NewMenu_ptr Create() { return NewMenu_ptr(new PriorMenu(ui));};
+};
+
+class PriorByForceMenuItemGroup : public MenuDisplayGroupBaseImplementation
+{
+  private:
+    PriorByForceMenuItemGroup(); //undefined
+  public:
+    PriorByForceMenuItemGroup(UIInterface& ui);
+    virtual ~PriorByForceMenuItemGroup();
+    virtual MenuInteraction_ptr MakeOneHandler(UIId id);
+    virtual UIIdVec1d           GetVisibleIds();
+    virtual string              GetKey(UIId id);
+};
+
+class PriorMenuForOneForce : public NewMenu
+{
+  private:
+    PriorMenuForOneForce(); //undefined
+  public:
+    PriorMenuForOneForce(UIInterface& ui, UIId id);
+    virtual ~PriorMenuForOneForce();
+};
+
+//These two classes are used for the individual forces' menus.
+class SubMenuPriorForOneForce : public ForceSubMenuItem
+{
+  private:
+    SubMenuPriorForOneForce(); //undefined
+    UIId id;
+  public:
+    SubMenuPriorForOneForce(std::string key, UIInterface& ui, UIId id);
+    virtual ~SubMenuPriorForOneForce();
+    virtual bool IsVisible();
+};
+
+class PriorMenuForOneForceCreator : public NewMenuCreator
+{
+  private:
+    PriorMenuForOneForceCreator(); //undefined
+    UIInterface& ui;
+    UIId id;
+  public:
+    PriorMenuForOneForceCreator(UIInterface& myui, UIId myid) :
+        ui(myui), id(myid) {};
+    virtual ~PriorMenuForOneForceCreator() {};
+    virtual NewMenu_ptr Create() { return NewMenu_ptr(new PriorMenuForOneForce(ui, id));};
+};
+
+class PriorByParameterMenuItemGroup : public MenuDisplayGroupBaseImplementation
+{
+  private:
+    PriorByParameterMenuItemGroup(); //undefined
+    UIId m_id;
+  public:
+    PriorByParameterMenuItemGroup(UIInterface& ui, UIId id);
+    virtual ~PriorByParameterMenuItemGroup();
+    virtual UIIdVec1d  GetVisibleIds();
+    virtual MenuInteraction_ptr MakeOneHandler(UIId id);
+};
+
+class DefaultPriorForForce: public SubMenuItem
+{
+  private:
+    DefaultPriorForForce();
+    UIId m_id;
+  public:
+    DefaultPriorForForce(string myKey, UIInterface& myUI, UIId myId);
+    virtual ~DefaultPriorForForce();
+    virtual string GetVariableText();
+};
+
+class PriorMenuForOneParameter : public NewMenu
+{
+  private:
+    PriorMenuForOneParameter();
+    UIId m_id;
+  public:
+    PriorMenuForOneParameter(UIInterface& ui, UIId id);
+    virtual ~PriorMenuForOneParameter();
+    virtual string Title();
+};
+
+class PriorMenuForOneParameterCreator : public NewMenuCreator
+{
+  protected:
+    UIInterface & ui;
+    UIId id;
+  public:
+    PriorMenuForOneParameterCreator(UIInterface & myui, UIId myid) :
+        ui(myui), id(myid) {};
+    virtual ~PriorMenuForOneParameterCreator() {};
+    virtual NewMenu_ptr Create() { return NewMenu_ptr(new PriorMenuForOneParameter(ui, id));};
+};
+
+#endif  // PRIORMENUS_H
+
+//____________________________________________________________________________________
diff --git a/src/lamarcmenus/profilemenus.cpp b/src/lamarcmenus/profilemenus.cpp
new file mode 100644
index 0000000..9c461ed
--- /dev/null
+++ b/src/lamarcmenus/profilemenus.cpp
@@ -0,0 +1,135 @@
+// $Id: profilemenus.cpp,v 1.15 2013/10/25 17:00:53 mkkuhner Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+
+#include <string>
+#include "lamarc_strings.h"
+#include "profilemenus.h"
+#include "ui_interface.h"
+#include "ui_strings.h"
+
+using std::string;
+
+ProfileMenu::ProfileMenu(UIInterface & myui)
+    : NewMenu(myui,lamarcmenu::profileTitle, lamarcmenu::profileInfo)
+{
+    AddMenuItem(new ToggleMenuItemNoId("A",ui,uistr::allProfilesOn));
+    AddMenuItem(new ToggleMenuItemNoId("X",ui,uistr::allProfilesOff));
+    AddMenuItem(new ToggleMenuItemNoId("P",ui,uistr::allProfilesPercentile));
+    AddMenuItem(new ToggleMenuItemNoId("F",ui,uistr::allProfilesFixed));
+    AddMenuItem(new ProfileByForceMenuItemGroup(ui));
+}
+
+ProfileMenu::~ProfileMenu()
+{
+}
+
+ProfileByForceMenuItemGroup::ProfileByForceMenuItemGroup(UIInterface& ui)
+    : MenuDisplayGroupBaseImplementation(ui,uistr::profileByForce)
+{
+}
+
+ProfileByForceMenuItemGroup::~ProfileByForceMenuItemGroup()
+{
+}
+
+MenuInteraction_ptr
+ProfileByForceMenuItemGroup::MakeOneHandler(UIId id)
+{
+    return MenuInteraction_ptr(new ProfileMenuForOneForce(ui,id));
+}
+
+UIIdVec1d
+ProfileByForceMenuItemGroup::GetVisibleIds()
+{
+    return ui.doGetUIIdVec1d(uistr::validForces);
+}
+
+string
+ProfileByForceMenuItemGroup::GetKey(UIId id)
+{
+    // MDEBUG:  is the absence of the Divergence family a problem here?
+    // Use testing seems to say it's okay (you get Migration menu entries for DivMig)
+    // but this hasn't been extensively tested.
+    switch(id.GetForceType())
+    {
+        case force_COAL:
+            return "T";
+            break;
+        case force_DISEASE:
+            return "D";
+            break;
+        case force_GROW:
+            return "G";
+            break;
+        case force_MIG:
+            return "M";
+            break;
+        case force_REC:
+            return "R";
+            break;
+        case force_REGION_GAMMA:
+            return "L";
+            break;
+        case force_EXPGROWSTICK:
+            return "ES";
+            break;
+        case force_LOGISTICSELECTION:
+            return "S";
+            break;
+        case force_LOGSELECTSTICK:
+            return "LS";
+            break;
+        default:
+            assert(false);              //uncaught force type.
+    }
+    throw implementation_error("force_type enum missing case in ProfileByForceMenuItemGroup::GetKey");
+}
+
+ProfileMenuForOneForce::ProfileMenuForOneForce(UIInterface& ui,UIId id)
+    : NewMenu(ui,lamarcmenu::forceProfileTitle+ToString(id.GetForceType()),
+              lamarcmenu::forceProfileInfo)
+{
+    AddMenuItem(new ToggleMenuItemId("A",ui,uistr::oneForceProfilesOn,id));
+    AddMenuItem(new ToggleMenuItemId("X",ui,uistr::oneForceProfilesOff,id));
+    AddMenuItem(new ToggleMenuItemId("P",ui,uistr::oneForceProfileType,id));
+    AddMenuItem(new ToggleMenuItemGroupProfiles(ui,id));
+}
+
+ProfileMenuForOneForce::~ProfileMenuForOneForce()
+{
+}
+
+SubMenuProfileForOneForce::SubMenuProfileForOneForce(std::string key, UIInterface& ui, UIId id)
+    : ForceSubMenuItem(key, ui, new ProfileMenuForOneForceCreator(ui, id), id)
+{
+}
+
+SubMenuProfileForOneForce::~SubMenuProfileForOneForce()
+{
+}
+
+ToggleMenuItemGroupProfiles::ToggleMenuItemGroupProfiles(UIInterface & ui,UIId id)
+    : ToggleMenuItemGroup(ui,uistr::profileByID), m_id(id)
+{
+}
+
+ToggleMenuItemGroupProfiles::~ToggleMenuItemGroupProfiles()
+{
+}
+
+UIIdVec1d
+ToggleMenuItemGroupProfiles::GetVisibleIds()
+{
+    return ui.doGetUIIdVec1d(uistr::validParamsForForce,m_id);
+}
+
+//____________________________________________________________________________________
diff --git a/src/lamarcmenus/profilemenus.h b/src/lamarcmenus/profilemenus.h
new file mode 100644
index 0000000..d083e67
--- /dev/null
+++ b/src/lamarcmenus/profilemenus.h
@@ -0,0 +1,94 @@
+// $Id: profilemenus.h,v 1.4 2011/03/07 06:08:50 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef PROFILEMENUS_H
+#define PROFILEMENUS_H
+
+#include <string>
+#include "forcesmenus.h"
+#include "newmenuitems.h"
+#include "setmenuitem.h"
+#include "togglemenuitem.h"
+
+class UIInterface;
+
+// choices for profile type and on/off for individual quantities
+class ProfileMenu : public NewMenu
+{
+  public:
+    ProfileMenu(UIInterface & myui);
+    ~ProfileMenu();
+};
+
+class ProfileMenuCreator : public NewMenuCreator
+{
+  protected:
+    UIInterface & ui;
+  public:
+    ProfileMenuCreator(UIInterface & myui) : ui(myui) {};
+    virtual ~ProfileMenuCreator() {};
+    NewMenu_ptr Create() { return NewMenu_ptr(new ProfileMenu(ui));};
+};
+
+class ProfileByForceMenuItemGroup : public MenuDisplayGroupBaseImplementation
+{
+  protected:
+    virtual MenuInteraction_ptr MakeOneHandler(UIId id);
+  public:
+    ProfileByForceMenuItemGroup(UIInterface& ui);
+    virtual ~ProfileByForceMenuItemGroup();
+    virtual UIIdVec1d GetVisibleIds();
+    virtual std::string GetKey(UIId id);
+};
+
+class ProfileMenuForOneForce : public NewMenu
+{
+  public:
+    ProfileMenuForOneForce(UIInterface &ui,UIId id);
+    virtual ~ProfileMenuForOneForce();
+};
+
+//These two classes are used for the individual forces' menus.
+class SubMenuProfileForOneForce : public ForceSubMenuItem
+{
+  private:
+    SubMenuProfileForOneForce(); //undefined
+    UIId id;
+  public:
+    SubMenuProfileForOneForce(std::string key, UIInterface& ui, UIId id);
+    virtual ~SubMenuProfileForOneForce();
+};
+
+class ProfileMenuForOneForceCreator : public NewMenuCreator
+{
+  private:
+    ProfileMenuForOneForceCreator(); //undefined
+    UIInterface& ui;
+    UIId id;
+  public:
+    ProfileMenuForOneForceCreator(UIInterface& myui, UIId myid) :
+        ui(myui), id(myid) {};
+    virtual ~ProfileMenuForOneForceCreator() {};
+    virtual NewMenu_ptr Create() { return NewMenu_ptr(new ProfileMenuForOneForce(ui, id));};
+};
+
+class ToggleMenuItemGroupProfiles : public ToggleMenuItemGroup
+{
+  private:
+    UIId    m_id;
+  public:
+    ToggleMenuItemGroupProfiles(UIInterface & ui,UIId id);
+    virtual ~ToggleMenuItemGroupProfiles();
+    virtual UIIdVec1d GetVisibleIds();
+};
+
+#endif  // PROFILEMENUS_H
+
+//____________________________________________________________________________________
diff --git a/src/lamarcmenus/recmenus.cpp b/src/lamarcmenus/recmenus.cpp
new file mode 100644
index 0000000..610db5d
--- /dev/null
+++ b/src/lamarcmenus/recmenus.cpp
@@ -0,0 +1,68 @@
+// $Id: recmenus.cpp,v 1.11 2010/03/17 17:25:59 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <string>
+#include "constants.h"
+#include "constraintmenus.h"
+#include "lamarc_strings.h"
+#include "newmenuitems.h"
+#include "priormenus.h"
+#include "recmenus.h"
+#include "setmenuitem.h"
+#include "togglemenuitem.h"
+#include "ui_interface.h"
+#include "ui_strings.h"
+#include "profilemenus.h"
+
+RecombineMaxEventsMenuItem::RecombineMaxEventsMenuItem(string myKey, UIInterface & myui)
+    : SetMenuItemNoId(myKey,myui,uistr::recombinationMaxEvents)
+{
+}
+
+RecombineMaxEventsMenuItem::~RecombineMaxEventsMenuItem()
+{
+}
+
+bool RecombineMaxEventsMenuItem::IsVisible()
+{
+    return ui.doGetBool(uistr::recombination);
+}
+
+RecombineRateMenuItem::RecombineRateMenuItem(string myKey, UIInterface & myui)
+    : SetMenuItemId(myKey,myui,uistr::recombinationRate, UIId(force_REC, uiconst::GLOBAL_ID))
+{
+}
+
+RecombineRateMenuItem::~RecombineRateMenuItem()
+{
+}
+
+bool RecombineRateMenuItem::IsVisible()
+{
+    return ui.doGetBool(uistr::recombination);
+}
+
+RecombinationMenu::RecombinationMenu (UIInterface & myui )
+    : NewMenu (myui,lamarcmenu::recTitle,lamarcmenu::recInfo)
+{
+    AddMenuItem(new ToggleMenuItemNoId("X",ui,uistr::recombination));
+    UIId id(force_REC);
+    AddMenuItem(new SubMenuConstraintsForOneForce("C",ui,id));
+    AddMenuItem(new SubMenuProfileForOneForce("P",ui,id));
+    AddMenuItem(new SubMenuPriorForOneForce("B",ui,id));
+    AddMenuItem(new RecombineRateMenuItem("S",ui));
+    AddMenuItem(new RecombineMaxEventsMenuItem("M",ui));
+}
+
+RecombinationMenu::~RecombinationMenu ()
+{
+}
+
+//____________________________________________________________________________________
diff --git a/src/lamarcmenus/recmenus.h b/src/lamarcmenus/recmenus.h
new file mode 100644
index 0000000..1a9ff6d
--- /dev/null
+++ b/src/lamarcmenus/recmenus.h
@@ -0,0 +1,55 @@
+// $Id: recmenus.h,v 1.12 2011/03/07 06:08:50 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef RECMENUS_H
+#define RECMENUS_H
+
+#include <string>
+#include "newmenuitems.h"
+#include "setmenuitem.h"
+
+class UIInterface;
+
+class RecombineMaxEventsMenuItem : public SetMenuItemNoId
+{
+  public:
+    RecombineMaxEventsMenuItem(std::string myKey, UIInterface & myui);
+    ~RecombineMaxEventsMenuItem();
+    bool IsVisible();
+};
+
+class RecombineRateMenuItem : public SetMenuItemId
+{
+  public:
+    RecombineRateMenuItem(std::string myKey, UIInterface & myui);
+    ~RecombineRateMenuItem();
+    bool IsVisible();
+};
+
+class RecombinationMenu : public NewMenu
+{
+  public:
+    RecombinationMenu(UIInterface & myui);
+    virtual ~RecombinationMenu();
+};
+
+class RecombinationMenuCreator : public NewMenuCreator
+{
+  protected:
+    UIInterface & ui;
+  public:
+    RecombinationMenuCreator(UIInterface & myui) : ui(myui) {};
+    virtual ~RecombinationMenuCreator() {};
+    NewMenu_ptr Create() { return NewMenu_ptr(new RecombinationMenu(ui));};
+};
+
+#endif  // RECMENUS_H
+
+//____________________________________________________________________________________
diff --git a/src/lamarcmenus/regiongammamenus.cpp b/src/lamarcmenus/regiongammamenus.cpp
new file mode 100644
index 0000000..cff467d
--- /dev/null
+++ b/src/lamarcmenus/regiongammamenus.cpp
@@ -0,0 +1,52 @@
+// $Id: regiongammamenus.cpp,v 1.3 2010/03/17 17:25:59 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <string>
+#include "constants.h"
+#include "constraintmenus.h"
+#include "lamarc_strings.h"
+#include "newmenuitems.h"
+#include "forcesmenus.h"
+#include "regiongammamenus.h"
+#include "setmenuitem.h"
+#include "togglemenuitem.h"
+#include "ui_interface.h"
+#include "ui_strings.h"
+#include "profilemenus.h"
+
+RegionGammaShapeParameterMenuItem::RegionGammaShapeParameterMenuItem(string myKey, UIInterface & myui)
+    : SetMenuItemId(myKey,myui,uistr::regionGammaShape, UIId(force_REGION_GAMMA, uiconst::GLOBAL_ID))
+{
+}
+
+RegionGammaShapeParameterMenuItem::~RegionGammaShapeParameterMenuItem()
+{
+}
+
+bool RegionGammaShapeParameterMenuItem::IsVisible()
+{
+    return ui.doGetBool(uistr::regionGamma);
+}
+
+RegionGammaMenu::RegionGammaMenu (UIInterface & myui )
+    : NewMenu (myui,lamarcmenu::regionGammaTitle,lamarcmenu::regionGammaInfo)
+{
+    AddMenuItem(new ToggleMenuItemNoId("X",ui,uistr::regionGamma));
+    UIId id(force_REGION_GAMMA);
+    AddMenuItem(new SubMenuConstraintsForOneForce("C",ui,id));
+    AddMenuItem(new SubMenuProfileForOneForce("P",ui,id));
+    AddMenuItem(new RegionGammaShapeParameterMenuItem("S",ui));
+}
+
+RegionGammaMenu::~RegionGammaMenu ()
+{
+}
+
+//____________________________________________________________________________________
diff --git a/src/lamarcmenus/regiongammamenus.h b/src/lamarcmenus/regiongammamenus.h
new file mode 100644
index 0000000..bf2e4eb
--- /dev/null
+++ b/src/lamarcmenus/regiongammamenus.h
@@ -0,0 +1,47 @@
+// $Id: regiongammamenus.h,v 1.4 2011/03/08 19:22:01 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef REGIONGAMMAMENUS_H
+#define REGIONGAMMAMENUS_H
+
+#include <string>
+#include "newmenuitems.h"
+#include "setmenuitem.h"
+
+class UIInterface;
+
+class RegionGammaShapeParameterMenuItem : public SetMenuItemId
+{
+  public:
+    RegionGammaShapeParameterMenuItem(std::string myKey, UIInterface & myui);
+    ~RegionGammaShapeParameterMenuItem();
+    bool IsVisible();
+};
+
+class RegionGammaMenu : public NewMenu
+{
+  public:
+    RegionGammaMenu(UIInterface & myui);
+    virtual ~RegionGammaMenu();
+};
+
+class RegionGammaMenuCreator : public NewMenuCreator
+{
+  protected:
+    UIInterface & ui;
+  public:
+    RegionGammaMenuCreator(UIInterface & myui) : ui(myui) {};
+    virtual ~RegionGammaMenuCreator() {};
+    NewMenu_ptr Create() { return NewMenu_ptr(new RegionGammaMenu(ui));};
+};
+
+#endif  // REGIONGAMMAMENUS_H
+
+//____________________________________________________________________________________
diff --git a/src/lamarcmenus/traitmodelmenu.cpp b/src/lamarcmenus/traitmodelmenu.cpp
new file mode 100644
index 0000000..ed2b182
--- /dev/null
+++ b/src/lamarcmenus/traitmodelmenu.cpp
@@ -0,0 +1,287 @@
+// $Id: traitmodelmenu.cpp,v 1.11 2012/06/30 01:32:42 bobgian Exp $
+
+/*
+  Copyright 2003  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+
+#include "lamarc_strings.h"
+#include "overviewmenus.h"
+#include "traitmodelmenu.h"
+#include "ui_interface.h"
+#include "ui_vars.h"
+
+//------------------------------------------------------------------------------------
+
+TraitModelItem::TraitModelItem(string key, UIInterface& ui)
+    : SubMenuItem(key, ui, new TraitModelsMenuCreator(ui))
+{
+}
+
+TraitModelItem::~TraitModelItem()
+{
+}
+
+bool TraitModelItem::IsVisible()
+{
+    return (ui.GetCurrentVars().traitmodels.GetNumMovableLoci() > 0);
+}
+
+string TraitModelItem::GetVariableText()
+{
+    //If the trait menu is on at all, it's enabled.
+    return "Enabled";
+}
+
+//------------------------------------------------------------------------------------
+
+TraitModelsMenu::TraitModelsMenu (UIInterface & myui )
+    : NewMenu (myui,lamarcmenu::traitsTitle,lamarcmenu::traitsInfo)
+{
+    AddMenuItem(new SubMenuItemsTraitModels(ui));
+}
+
+TraitModelsMenu::~TraitModelsMenu ()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+SubMenuItemsTraitModels::SubMenuItemsTraitModels(UIInterface & myui)
+    : MenuDisplayGroupBaseImplementation(myui,menustr::emptyString)
+{
+}
+
+SubMenuItemsTraitModels::~SubMenuItemsTraitModels()
+{
+}
+
+vector<UIId> SubMenuItemsTraitModels::GetVisibleIds()
+{
+    return ui.doGetUIIdVec1d(uistr::validMovingLoci,UIId());
+}
+
+MenuInteraction_ptr SubMenuItemsTraitModels::GetHandler(std::string input)
+{
+    vector<UIId> ids = ui.doGetUIIdVec1d(uistr::validMovingLoci, UIId());
+    long idno = ProduceLongOrBarf(input);
+    idno--;
+    assert(idno < static_cast<long>(ids.size()));
+    return MakeOneHandler(ids[idno]);
+}
+
+MenuInteraction_ptr SubMenuItemsTraitModels::MakeOneHandler(UIId id)
+{
+    return MenuInteraction_ptr(new SingleTraitModelMenu(ui, id));
+}
+
+string SubMenuItemsTraitModels::GetText(UIId id)
+{
+    return lamarcmenu::traitModel1 + ui.doGetString(uistr::regionName, id);
+}
+
+string SubMenuItemsTraitModels::GetVariableText(UIId id)
+{
+    return ui.doGetString(uistr::traitModelName, id);
+}
+
+string SubMenuItemsTraitModels::GetKey(UIId id)
+{
+    vector<UIId> ids = ui.doGetUIIdVec1d(uistr::validMovingLoci, UIId());
+    for (unsigned long idno=0; idno<ids.size(); idno++)
+    {
+        if (ids[idno] == id) return ToString(idno+1);
+    }
+    assert(false);
+    return ToString(0);
+}
+
+//------------------------------------------------------------------------------------
+
+SingleTraitModelMenu::SingleTraitModelMenu(UIInterface& ui, UIId id)
+    : NewMenu(ui, menustr::emptyString, lamarcmenu::singleTraitModelInfo),
+      m_id(id)
+{
+    AddMenuItem(new DisplayOnlyMenuItem(uistr::traitModelName, ui, m_id));
+    AddMenuItem(new MenuDisplayTraitAnalysisType(ui, id));
+    AddMenuItem(new ToggleMenuItemId("F", ui, uistr::traitAnalysisFloat, m_id));
+    AddMenuItem(new ToggleMenuItemId("J", ui, uistr::traitAnalysisJump, m_id));
+
+#if 0
+    //LS DEBUG MAPPING: change after implementation
+    AddMenuItem(new ToggleMenuItemId("U", ui, uistr::traitAnalysisData, m_id));
+    AddMenuItem(new ToggleMenuItemId("P", ui, uistr::traitAnalysisPartition, m_id));
+#endif
+
+    AddMenuItem(new MenuDisplayTraitRange(ui, id));
+    AddMenuItem(new AddRangeToTraitModel("A", ui, m_id));
+    AddMenuItem(new RemoveRangeFromTraitModel("D", ui, m_id));
+
+#if 0
+    AddMenuItem(new TraitModelRangeToPointMenu("S", ui, m_id));
+#endif
+}
+
+SingleTraitModelMenu::~SingleTraitModelMenu()
+{
+}
+
+string SingleTraitModelMenu::Title()
+{
+    return lamarcmenu::traitModel1 + ui.doGetString(uistr::locusName, m_id);
+}
+
+//------------------------------------------------------------------------------------
+
+MenuDisplayTraitAnalysisType::MenuDisplayTraitAnalysisType(UIInterface& ui, UIId id)
+    : MenuDisplayLine(),
+      m_regId(id, ui.GetCurrentVars()),
+      m_ui(ui)
+{
+}
+
+MenuDisplayTraitAnalysisType::~MenuDisplayTraitAnalysisType()
+{
+}
+
+string MenuDisplayTraitAnalysisType::GetKey()
+{
+    return "";
+}
+
+string MenuDisplayTraitAnalysisType::GetText()
+{
+    return lamarcmenu::traitAnalysisType;
+}
+
+string MenuDisplayTraitAnalysisType::GetVariableText()
+{
+    return ToString(m_ui.GetCurrentVars().traitmodels.GetAnalysisType(m_regId));
+}
+
+//------------------------------------------------------------------------------------
+
+MenuDisplayTraitRange::MenuDisplayTraitRange(UIInterface& ui, UIId id)
+    : MenuDisplayLine(),
+      m_regId(id, ui.GetCurrentVars()),
+      m_ui(ui)
+{
+}
+
+MenuDisplayTraitRange::~MenuDisplayTraitRange()
+{
+}
+
+string MenuDisplayTraitRange::GetKey()
+{
+    return "";
+}
+
+string MenuDisplayTraitRange::GetText()
+{
+    return lamarcmenu::traitRange;
+}
+
+string MenuDisplayTraitRange::GetVariableText()
+{
+    return ToString(m_ui.GetCurrentVars().traitmodels.GetRange(m_regId));
+}
+
+//------------------------------------------------------------------------------------
+
+AddRangeToTraitModel::AddRangeToTraitModel(string key, UIInterface& ui, UIId id)
+    : SetMenuItemId(key, ui, uistr::addRangeForTraitModel, id)
+{
+}
+
+AddRangeToTraitModel::~AddRangeToTraitModel()
+{
+}
+
+string AddRangeToTraitModel::GetVariableText()
+{
+    return "";
+}
+
+MenuInteraction_ptr AddRangeToTraitModel::GetHandler(std::string input)
+{
+    assert(Handles(input));
+    return MenuInteraction_ptr(new SetAddRangeDialog(ui,menuKey,GetId()));
+}
+
+//------------------------------------------------------------------------------------
+
+SetAddRangeDialog::SetAddRangeDialog(UIInterface& ui, string key, UIId id)
+    : SetDialog(ui, key, id)
+{
+}
+
+SetAddRangeDialog::~SetAddRangeDialog()
+{
+}
+
+string SetAddRangeDialog::inLoopOutputString()
+{
+    return lamarcmenu::addRangeDialog;
+}
+
+//------------------------------------------------------------------------------------
+
+RemoveRangeFromTraitModel::RemoveRangeFromTraitModel(string key, UIInterface& ui, UIId id)
+    : SetMenuItemId(key, ui, uistr::removeRangeForTraitModel, id)
+{
+}
+
+RemoveRangeFromTraitModel::~RemoveRangeFromTraitModel()
+{
+}
+
+string RemoveRangeFromTraitModel::GetVariableText()
+{
+    return "";
+}
+
+MenuInteraction_ptr RemoveRangeFromTraitModel::GetHandler(std::string input)
+{
+    assert(Handles(input));
+    return MenuInteraction_ptr(new SetRemoveRangeDialog(ui,menuKey,GetId()));
+}
+
+//------------------------------------------------------------------------------------
+
+TraitModelRangeToPointMenu::TraitModelRangeToPointMenu(string key, UIInterface& ui, UIId id)
+    : SetMenuItemId(key, ui, uistr::traitModelRangeToPoint, id)
+{
+}
+
+TraitModelRangeToPointMenu::~TraitModelRangeToPointMenu()
+{
+}
+
+string TraitModelRangeToPointMenu::GetVariableText()
+{
+    return "";
+}
+
+//------------------------------------------------------------------------------------
+
+SetRemoveRangeDialog::SetRemoveRangeDialog(UIInterface& ui, string key, UIId id)
+    : SetDialog(ui, key, id)
+{
+}
+
+SetRemoveRangeDialog::~SetRemoveRangeDialog()
+{
+}
+
+string SetRemoveRangeDialog::inLoopOutputString()
+{
+    return lamarcmenu::removeRangeDialog;
+}
+
+//____________________________________________________________________________________
diff --git a/src/lamarcmenus/traitmodelmenu.h b/src/lamarcmenus/traitmodelmenu.h
new file mode 100644
index 0000000..9592b45
--- /dev/null
+++ b/src/lamarcmenus/traitmodelmenu.h
@@ -0,0 +1,145 @@
+// $Id: traitmodelmenu.h,v 1.5 2011/03/07 06:08:50 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef TRAITMODELMENU_H
+#define TRAITMODELMENU_H
+
+#include "newmenuitems.h"
+#include "setmenuitem.h"
+#include "ui_regid.h"
+
+class TraitModelItem : public SubMenuItem
+{
+  public:
+    TraitModelItem(string key, UIInterface& ui);
+    ~TraitModelItem();
+    virtual bool IsVisible();
+    virtual std::string GetVariableText();
+};
+
+class TraitModelsMenu : public NewMenu
+{
+  public:
+    TraitModelsMenu(UIInterface & myui);
+    ~TraitModelsMenu();
+};
+
+class TraitModelsMenuCreator : public NewMenuCreator
+{
+  protected:
+    UIInterface & ui;
+  public:
+    TraitModelsMenuCreator(UIInterface & myui) : ui(myui) {};
+    virtual ~TraitModelsMenuCreator() {};
+    NewMenu_ptr Create() { return NewMenu_ptr(new TraitModelsMenu(ui));};
+};
+
+class SubMenuItemsTraitModels : public MenuDisplayGroupBaseImplementation
+{
+  public:
+    SubMenuItemsTraitModels(UIInterface & myui);
+    virtual ~SubMenuItemsTraitModels();
+    virtual vector<UIId> GetVisibleIds();
+    virtual MenuInteraction_ptr GetHandler(std::string input);
+    virtual MenuInteraction_ptr MakeOneHandler(UIId id);
+    virtual string GetText(UIId id);
+    virtual string GetKey(UIId id);
+    virtual string GetVariableText(UIId id);
+};
+
+class SingleTraitModelMenu : public NewMenu
+{
+  private:
+    UIId m_id;
+  public:
+    SingleTraitModelMenu(UIInterface& ui, UIId id);
+    virtual ~SingleTraitModelMenu();
+    string Title();
+};
+
+class MenuDisplayTraitAnalysisType : public MenuDisplayLine
+{
+  private:
+    MenuDisplayTraitAnalysisType();
+    UIRegId m_regId;
+    UIInterface & m_ui;
+  protected:
+    UIId& GetId();
+  public:
+    MenuDisplayTraitAnalysisType(UIInterface& ui, UIId id);
+    virtual ~MenuDisplayTraitAnalysisType();
+    virtual std::string GetKey();
+    virtual std::string GetText();
+    virtual std::string GetVariableText();
+    virtual bool Handles(std::string) {return false;};
+};
+
+class MenuDisplayTraitRange : public MenuDisplayLine
+{
+  private:
+    MenuDisplayTraitRange();
+    UIRegId m_regId;
+    UIInterface & m_ui;
+  protected:
+    UIId& GetId();
+  public:
+    MenuDisplayTraitRange(UIInterface& ui, UIId id);
+    virtual ~MenuDisplayTraitRange();
+    virtual std::string GetKey();
+    virtual std::string GetText();
+    virtual std::string GetVariableText();
+    virtual bool Handles(std::string) {return false;};
+};
+
+class AddRangeToTraitModel : public SetMenuItemId
+{
+  public:
+    AddRangeToTraitModel(string key, UIInterface& ui, UIId id);
+    virtual ~AddRangeToTraitModel();
+    virtual string GetVariableText();
+    virtual MenuInteraction_ptr GetHandler(std::string key);
+};
+
+class SetAddRangeDialog  : public SetDialog
+{
+  public:
+    SetAddRangeDialog(UIInterface& ui, string key, UIId id);
+    virtual ~SetAddRangeDialog();
+    virtual string inLoopOutputString();
+};
+
+class RemoveRangeFromTraitModel : public SetMenuItemId
+{
+  public:
+    RemoveRangeFromTraitModel(string key, UIInterface& ui, UIId id);
+    virtual ~RemoveRangeFromTraitModel();
+    virtual string GetVariableText();
+    virtual MenuInteraction_ptr GetHandler(std::string key);
+};
+
+class TraitModelRangeToPointMenu : public SetMenuItemId
+{
+  public:
+    TraitModelRangeToPointMenu(string key, UIInterface& ui, UIId id);
+    virtual ~TraitModelRangeToPointMenu();
+    virtual string GetVariableText();
+};
+
+class SetRemoveRangeDialog  : public SetDialog
+{
+  public:
+    SetRemoveRangeDialog(UIInterface& ui, string key, UIId id);
+    virtual ~SetRemoveRangeDialog();
+    virtual string inLoopOutputString();
+};
+
+#endif  // TRAITMODELMENU_H
+
+//____________________________________________________________________________________
diff --git a/src/lamarcmenus/treesummenus.cpp b/src/lamarcmenus/treesummenus.cpp
new file mode 100644
index 0000000..fe6ef5c
--- /dev/null
+++ b/src/lamarcmenus/treesummenus.cpp
@@ -0,0 +1,92 @@
+// $Id: treesummenus.cpp,v 1.13 2010/03/17 17:25:59 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <fstream>
+#include <string>
+#include "menu_strings.h"
+#include "newmenuitems.h"
+#include "togglemenuitem.h"
+#include "treesummenus.h"
+#include "ui_interface.h"
+#include "ui_strings.h"
+
+using std::string;
+
+SetMenuItemTreeSumInFileName::SetMenuItemTreeSumInFileName(string myKey, UIInterface & myui)
+    : SetMenuItemNoId(myKey,myui,uistr::treeSumInFileName)
+{
+}
+
+SetMenuItemTreeSumInFileName::~SetMenuItemTreeSumInFileName()
+{
+}
+
+SetMenuItemTreeSumOutFileName::SetMenuItemTreeSumOutFileName(string myKey, UIInterface & myui)
+    : SetMenuItemNoId(myKey,myui,uistr::treeSumOutFileName)
+{
+}
+
+SetMenuItemTreeSumOutFileName::~SetMenuItemTreeSumOutFileName()
+{
+}
+
+bool SetMenuItemTreeSumInFileName::IsVisible()
+{
+    return ui.doGetBool(uistr::treeSumInFileEnabled);
+}
+
+bool SetMenuItemTreeSumOutFileName::IsVisible()
+{
+    return ui.doGetBool(uistr::treeSumOutFileEnabled);
+}
+
+TreeSumInMenu::TreeSumInMenu(UIInterface & myui)
+    : NewMenu(myui,uistr::treeSumInFileEnabled,menustr::emptyString)
+{
+    AddMenuItem(new ToggleMenuItemNoId("X",ui,uistr::treeSumInFileEnabled));
+    AddMenuItem(new SetMenuItemTreeSumInFileName("N",ui));
+}
+
+TreeSumInMenu::~TreeSumInMenu()
+{
+}
+
+TreeSumOutMenu::TreeSumOutMenu(UIInterface & myui)
+    : NewMenu(myui,uistr::treeSumOutFileEnabled,menustr::emptyString)
+{
+    AddMenuItem(new ToggleMenuItemNoId("X",ui,uistr::treeSumOutFileEnabled));
+    AddMenuItem(new SetMenuItemTreeSumOutFileName("N",ui));
+}
+
+TreeSumOutMenu::~TreeSumOutMenu()
+{
+}
+
+TreeSumInSubMenuItem::TreeSumInSubMenuItem(string myKey, UIInterface & myui)
+    : SubMenuItem(myKey, myui, new TreeSumInMenuCreator(myui))
+{
+}
+
+TreeSumOutSubMenuItem::TreeSumOutSubMenuItem(string myKey, UIInterface & myui)
+    : SubMenuItem(myKey, myui, new TreeSumOutMenuCreator(myui))
+{
+}
+
+string TreeSumInSubMenuItem::GetVariableText()
+{
+    return ui.doGetPrintString(uistr::treeSumInFileEnabled,GetId());
+}
+
+string TreeSumOutSubMenuItem::GetVariableText()
+{
+    return ui.doGetPrintString(uistr::treeSumOutFileEnabled,GetId());
+}
+
+//____________________________________________________________________________________
diff --git a/src/lamarcmenus/treesummenus.h b/src/lamarcmenus/treesummenus.h
new file mode 100644
index 0000000..fe24628
--- /dev/null
+++ b/src/lamarcmenus/treesummenus.h
@@ -0,0 +1,87 @@
+// $Id: treesummenus.h,v 1.13 2011/03/07 06:08:50 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef TREESUMMENUS_H
+#define TREESUMMENUS_H
+
+#include <string>
+#include "newmenuitems.h"
+#include "setmenuitem.h"
+#include "togglemenuitem.h"
+
+class UIInterface;
+
+class SetMenuItemTreeSumInFileName : public SetMenuItemNoId
+{
+  public:
+    SetMenuItemTreeSumInFileName(std::string myKey, UIInterface & myui);
+    virtual ~SetMenuItemTreeSumInFileName();
+    virtual bool IsVisible();
+};
+
+class SetMenuItemTreeSumOutFileName : public SetMenuItemNoId
+{
+  public:
+    SetMenuItemTreeSumOutFileName(std::string myKey, UIInterface & myui);
+    virtual ~SetMenuItemTreeSumOutFileName();
+    virtual bool IsVisible();
+};
+
+class TreeSumInMenu : public NewMenu
+{
+  public:
+    TreeSumInMenu(UIInterface & myui);
+    ~TreeSumInMenu();
+};
+
+class TreeSumOutMenu : public NewMenu
+{
+  public:
+    TreeSumOutMenu(UIInterface & myui);
+    ~TreeSumOutMenu();
+};
+
+class TreeSumInMenuCreator : public NewMenuCreator
+{
+  protected:
+    UIInterface & ui;
+  public:
+    TreeSumInMenuCreator(UIInterface & myui) : ui(myui) {};
+    virtual ~TreeSumInMenuCreator() {};
+    NewMenu_ptr Create() { return NewMenu_ptr(new TreeSumInMenu(ui));};
+};
+
+class TreeSumOutMenuCreator : public NewMenuCreator
+{
+  protected:
+    UIInterface & ui;
+  public:
+    TreeSumOutMenuCreator(UIInterface & myui) : ui(myui) {};
+    virtual ~TreeSumOutMenuCreator() {};
+    NewMenu_ptr Create() { return NewMenu_ptr(new TreeSumOutMenu(ui));};
+};
+
+class TreeSumInSubMenuItem : public SubMenuItem
+{
+  public:
+    TreeSumInSubMenuItem(std::string myKey, UIInterface & myUI);
+    virtual std::string GetVariableText();
+};
+
+class TreeSumOutSubMenuItem : public SubMenuItem
+{
+  public:
+    TreeSumOutSubMenuItem(std::string myKey, UIInterface & myUI);
+    virtual std::string GetVariableText();
+};
+
+#endif  // TREESUMMENUS_H
+
+//____________________________________________________________________________________
diff --git a/src/menu/dialog.cpp b/src/menu/dialog.cpp
new file mode 100644
index 0000000..d45b28f
--- /dev/null
+++ b/src/menu/dialog.cpp
@@ -0,0 +1,22 @@
+// $Id: dialog.cpp,v 1.23 2010/03/17 17:25:59 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include "menuinteraction.h"
+#include "dialog.h"
+
+Dialog::Dialog() : MenuInteraction()
+{
+}
+
+Dialog::~Dialog()
+{
+}
+
+//____________________________________________________________________________________
diff --git a/src/menu/dialog.h b/src/menu/dialog.h
new file mode 100644
index 0000000..b6716c0
--- /dev/null
+++ b/src/menu/dialog.h
@@ -0,0 +1,25 @@
+// $Id: dialog.h,v 1.17 2011/03/07 06:08:50 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef DIALOG_H
+#define DIALOG_H
+
+#include "menuinteraction.h"
+
+class Dialog : public MenuInteraction
+{
+  public:
+    Dialog();
+    virtual ~Dialog();
+};
+
+#endif // DIALOG_H
+
+//____________________________________________________________________________________
diff --git a/src/menu/dialognoinput.cpp b/src/menu/dialognoinput.cpp
new file mode 100644
index 0000000..80f3567
--- /dev/null
+++ b/src/menu/dialognoinput.cpp
@@ -0,0 +1,78 @@
+// $Id: dialognoinput.cpp,v 1.14 2012/06/30 01:32:42 bobgian Exp $
+
+/*
+  Copyright 2003  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <string>
+
+#include "dialog.h"
+#include "dialognoinput.h"
+#include "display.h"
+#include "menudefs.h"
+#include "menu_strings.h"
+#include "stringx.h"
+
+using std::string;
+
+DialogNoInput::DialogNoInput() : Dialog()
+{
+}
+
+DialogNoInput::~DialogNoInput()
+{
+}
+
+void DialogNoInput::performAction()
+{
+    // default is to do nothing at all
+}
+
+menu_return_type DialogNoInput::InvokeMe(Display & display)
+{
+    display.DisplayDialogOutput(outputString());
+    performAction();
+    return menu_REDISPLAY;
+}
+
+//------------------------------------------------------------------------------------
+
+DialogAcknowledge::DialogAcknowledge() : DialogNoInput()
+{
+}
+
+DialogAcknowledge::~DialogAcknowledge()
+{
+}
+
+menu_return_type DialogAcknowledge::InvokeMe(Display & display)
+{
+    display.DisplayDialogOutput(outputString());
+    display.DisplayDialogOutput(menustr::acknowledge);
+    display.GetUserInput();
+    return menu_REDISPLAY;
+}
+
+//------------------------------------------------------------------------------------
+
+std::string DialogChideInconsistencies::outputString()
+{
+    std::string allInconsistencies = ToString(inconsistencies);
+    std::string wholeMessage = menustr::inconsistencies + allInconsistencies;
+    return wholeMessage;
+}
+
+DialogChideInconsistencies::DialogChideInconsistencies(StringVec1d & myInconsistencies)
+    : inconsistencies(myInconsistencies)
+{
+}
+
+DialogChideInconsistencies::~DialogChideInconsistencies()
+{
+}
+
+//____________________________________________________________________________________
diff --git a/src/menu/dialognoinput.h b/src/menu/dialognoinput.h
new file mode 100644
index 0000000..91a4ba7
--- /dev/null
+++ b/src/menu/dialognoinput.h
@@ -0,0 +1,53 @@
+// $Id: dialognoinput.h,v 1.13 2012/06/30 01:32:42 bobgian Exp $
+
+/*
+  Copyright 2003  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef DIALOGNOINPUT_H
+#define DIALOGNOINPUT_H
+
+#include <string>
+
+#include "dialog.h"
+#include "menudefs.h"
+#include "vectorx.h"
+
+class Display;
+
+class DialogNoInput : public Dialog
+{
+  protected:
+    virtual std::string outputString() = 0;
+    virtual void performAction();
+  public:
+    DialogNoInput();
+    virtual ~DialogNoInput();
+    virtual menu_return_type InvokeMe(Display&);
+};
+
+class DialogAcknowledge : public DialogNoInput
+{
+  public:
+    DialogAcknowledge();
+    virtual ~DialogAcknowledge();
+    virtual menu_return_type InvokeMe(Display&);
+};
+
+class DialogChideInconsistencies : public DialogAcknowledge
+{
+  protected:
+    StringVec1d & inconsistencies;
+    virtual std::string outputString();
+  public:
+    DialogChideInconsistencies(StringVec1d & inconsistencies);
+    virtual ~DialogChideInconsistencies();
+};
+
+#endif // DIALOGNOINPUT_H
+
+//____________________________________________________________________________________
diff --git a/src/menu/dialogrepeat.cpp b/src/menu/dialogrepeat.cpp
new file mode 100644
index 0000000..2501921
--- /dev/null
+++ b/src/menu/dialogrepeat.cpp
@@ -0,0 +1,51 @@
+// $Id: dialogrepeat.cpp,v 1.12 2012/06/30 01:32:42 bobgian Exp $
+
+/*
+  Copyright 2003  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <string>
+
+#include "dialog.h"
+#include "dialogrepeat.h"
+#include "display.h"
+#include "menudefs.h"
+#include "ui_constants.h"
+
+using std::string;
+
+DialogRepeat::DialogRepeat() : Dialog()
+{
+}
+
+DialogRepeat::~DialogRepeat()
+{
+}
+
+menu_return_type DialogRepeat::InvokeMe(Display& display)
+{
+    display.DisplayDialogOutput(beforeLoopOutputString());
+    for(int i=0;i<maxTries();i++)
+    {
+        display.DisplayDialogOutput(inLoopOutputString());
+        std::string result = display.GetUserInput();
+        if (handleInput(result))
+        {
+            display.DisplayDialogOutput(afterLoopSuccessOutputString());
+            return menu_REDISPLAY;
+        }
+        else
+        {
+            display.DisplayDialogOutput(inLoopFailureOutputString());
+        }
+    }
+    display.DisplayDialogOutput(afterLoopFailureOutputString());
+    doFailure();
+    return menu_REDISPLAY;
+}
+
+//____________________________________________________________________________________
diff --git a/src/menu/dialogrepeat.h b/src/menu/dialogrepeat.h
new file mode 100644
index 0000000..e7b6bdb
--- /dev/null
+++ b/src/menu/dialogrepeat.h
@@ -0,0 +1,38 @@
+// $Id: dialogrepeat.h,v 1.11 2012/06/30 01:32:42 bobgian Exp $
+
+/*
+  Copyright 2003  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef DIALOGREPEAT_H
+#define DIALOGREPEAT_H
+
+#include <string>
+#include "dialog.h"
+
+class Display;
+
+class DialogRepeat : public Dialog
+{
+  protected:
+    virtual long maxTries() = 0;
+    virtual std::string beforeLoopOutputString() = 0;
+    virtual std::string inLoopOutputString() = 0;
+    virtual std::string inLoopFailureOutputString() = 0;
+    virtual std::string afterLoopSuccessOutputString() = 0;
+    virtual std::string afterLoopFailureOutputString() = 0;
+    virtual bool handleInput(std::string input) = 0;
+    virtual void doFailure() = 0;
+  public:
+    DialogRepeat();
+    virtual ~DialogRepeat();
+    virtual menu_return_type InvokeMe(Display&);
+};
+
+#endif // DIALOGREPEAT_H
+
+//____________________________________________________________________________________
diff --git a/src/menu/display.cpp b/src/menu/display.cpp
new file mode 100644
index 0000000..e26a279
--- /dev/null
+++ b/src/menu/display.cpp
@@ -0,0 +1,456 @@
+// $Id: display.cpp,v 1.40 2012/06/30 01:32:42 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+// base class for all displays
+// + derived classes (these may need to split away from this file
+//
+// Peter Beerli (March 22 2001)
+
+#include <string>
+#include <iostream>
+#include "constraintmenus.h"
+#include "dialognoinput.h"
+#include "display.h"
+#include "errhandling.h"
+#include "menu_strings.h"
+#include "menuitem.h"
+#include "newmenu.h"
+#include "stringx.h"
+#include "twodtable.h"
+
+using namespace std;
+
+const size_t SCREENWIDTH = 75;
+const long   MENUPARAMETERWIDTH=24;
+const long   MENUTEXTWIDTH=47;
+const long   MENUKEYWIDTH=3;
+
+// Scrolling display ----------------------------------
+ScrollingDisplay::ScrollingDisplay() {};
+ScrollingDisplay::~ScrollingDisplay() {};
+
+void ScrollingDisplay::Title(const string & title, const string & info)
+{
+    string line(SCREENWIDTH,'-');
+
+    StringVec1d titleUnits = MakeDisplayableUnits(title,SCREENWIDTH);
+    StringVec1d infoUnits = MakeDisplayableUnits(info,SCREENWIDTH-5);
+
+    cout << endl << line << endl;
+    for(StringVec1d::iterator i=titleUnits.begin(); i != titleUnits.end(); i++)
+    {
+        cout << *i << endl;
+    }
+    for(StringVec1d::iterator j=infoUnits.begin(); j != infoUnits.end(); j++)
+    {
+        cout << "    " << *j  << endl;
+    }
+    //cout << endl;
+}
+
+void ScrollingDisplay::Warn(StringVec1d warnings)
+{
+    for(unsigned long warnNum= 0; warnNum < warnings.size(); warnNum++)
+    {
+        cout << endl;
+        StringVec1d onewarning;
+        onewarning.push_back(warnings[warnNum]);
+        onewarning = Linewrap(onewarning, SCREENWIDTH);
+        for(StringVec1d::iterator i=onewarning.begin(); i != onewarning.end(); i++)
+        {
+            cout << *i << endl;
+        }
+    }
+}
+
+menu_return_type ScrollingDisplay::DisplayNewMenu(NewMenu & menu)
+{
+    menu_return_type mrt = menu_REDISPLAY;
+    while(mrt == menu_REDISPLAY)
+    {
+        Title(menu.Title(),menu.Info());
+        ShowMenuDisplayQuantaVec(menu.MenuItems());
+        Warn(menu.Warnings());
+        string input = GetUserInput();
+        if(menu.Handles(input))
+        {
+            MenuInteraction_ptr mi = menu.GetHandler(input);
+            try
+            {
+                mrt = mi->InvokeMe(*this);
+                // delete mi;  handled by smart pointer
+            }
+            catch (const data_error& e)
+            {
+                menu.AddWarning(e.what());
+                return menu_REDISPLAY;
+            }
+        }
+        // verify that this part of the menu is consistent
+        // before fully exiting
+        if((mrt == menu_RUN) || (mrt == menu_GO_UP))
+        {
+            //
+            StringVec1d inconsistents = menu.GetInconsistencies();
+            if(!inconsistents.empty())
+            {
+                ChideOnInconsistencies(inconsistents);
+                mrt = menu_REDISPLAY;
+            }
+        }
+    }
+    switch(mrt)
+    {
+        case menu_RUN:
+            return menu_RUN;
+            break;
+        case menu_QUIT:
+            return menu_QUIT;
+            break;
+        default:
+            return menu_REDISPLAY;
+    }
+
+    return menu_REDISPLAY;
+}
+
+void ScrollingDisplay::ChideOnInconsistencies(StringVec1d inconsistents)
+{
+    DialogChideInconsistencies dci(inconsistents);
+    dci.InvokeMe(*this);
+}
+
+menu_return_type ScrollingDisplay::DisplayRaw(StringVec1d strings)
+{
+    StringVec1d::iterator i;
+    for(i=strings.begin();i!= strings.end();i++)
+    {
+        string s = *i;
+        DisplayDialogOutput(s);
+    }
+    return menu_REDISPLAY;
+}
+
+void ScrollingDisplay::ShowMenuDisplayQuantaVec(MenuDisplayQuantaVec lines)
+{
+    MenuDisplayQuantaVec :: iterator menuline;
+    for(menuline=lines.begin(); menuline != lines.end(); menuline++)
+    {
+        MenuDisplayQuanta * item = *menuline;
+        item->DisplayItemOn(*this);//Probably a call-back to ShowMenuDisplayLine
+    }
+}
+
+void ScrollingDisplay::ShowMenuDisplayGroup(MenuDisplayGroup & group)
+{
+    vector<UIId> visibleIds = group.GetVisibleIds();
+    vector<UIId>::iterator ids;
+    DisplayOneLine(group.GetGroupDescription());
+    if (visibleIds.begin() == visibleIds.end())
+    {
+        DisplayOneLine(group.GetEmptyDescription());
+    }
+    for(ids=visibleIds.begin(); ids != visibleIds.end(); ids++)
+    {
+        UIId id = *ids;
+        string menuKey = group.GetKey(id);
+        string storedMenuText = group.GetText(id);
+        string generatedMenuText = group.GetVariableText(id);
+        ShowOneMenuLine(menuKey,storedMenuText,generatedMenuText);
+        if (group.HasMultiLineItems())
+        {
+            ShowExtraMenuLines(group.GetExtraText(id),group.GetExtraVariableText(id));
+        }
+    }
+}
+
+void ScrollingDisplay::ShowMenuDisplay2dGroup(ToggleMenuItem2dGroup & groups)
+{
+    UIIdVec2d visibleIds = groups.GetVisibleIds();
+    long listnum = 0;
+    DisplayOneLine(groups.GetGroupDescription());
+    if (visibleIds.begin() == visibleIds.end())
+    {
+        DisplayOneLine(groups.GetEmptyDescription());
+    }
+    for(UIIdVec2d::iterator idlists=visibleIds.begin();
+        idlists != visibleIds.end(); idlists++, listnum++)
+    {
+        UIIdVec1d idlist = *idlists;
+        DisplayOneLine(groups.GetGroupDescription(listnum));
+        if (idlist.begin() == idlist.end())
+        {
+            DisplayOneLine(groups.GetEmptyDescription(listnum));
+        }
+
+        for (UIIdVec1d::iterator ids = idlist.begin();
+             ids != idlist.end(); ids++)
+        {
+            UIId id = *ids;
+            string menuKey = groups.GetKey(id);
+            string storedMenuText = groups.GetText(id);
+            string generatedMenuText = groups.GetVariableText(id);
+            ShowOneMenuLine(menuKey,storedMenuText,generatedMenuText);
+            if (groups.HasMultiLineItems())
+            {
+                ShowExtraMenuLines(groups.GetExtraText(id),groups.GetExtraVariableText(id));
+            }
+        }
+    }
+}
+
+void ScrollingDisplay::ShowMenuDisplayLine(MenuDisplayLine & line)
+{
+    if(line.IsVisible())
+    {
+        string menuKey = line.GetKey();
+        string storedMenuText = line.GetText();
+        string generatedMenuText = line.GetVariableText();
+        ShowOneMenuLine(menuKey,storedMenuText,generatedMenuText);
+        if (line.HasMultiLineItems())
+        {
+            ShowExtraMenuLines(line.GetExtraText(),line.GetExtraVariableText());
+        }
+    }
+}
+
+vector<string> ScrollingDisplay::BreakAtCarriageReturns(string toBreak)
+{
+    vector<string> strings;
+    string::size_type prevStringPos;
+    string::size_type stringPos;
+    string stringFragment;
+
+    prevStringPos = 0;
+    // loop through the toBreak string, finding each "\n" character and
+    // stuffing each substring into the return vector
+    while( (stringPos = toBreak.find("\n",prevStringPos)) != string::npos)
+    {
+        stringFragment.assign(toBreak,prevStringPos,stringPos-prevStringPos);
+        prevStringPos = stringPos+1;
+        strings.push_back(stringFragment);
+    }
+    // remaining string has no carriage return
+    stringFragment.assign(toBreak,prevStringPos,string::npos);
+    if(!stringFragment.empty())
+    {
+        strings.push_back(stringFragment);
+    }
+    return strings;
+}
+
+// break "toShow" string into substrings no longer than "width"
+// units wide
+vector<string> ScrollingDisplay::MakeDisplayableUnits(string toShow,long width)
+{
+    string::size_type tabpos = toShow.find("\t");
+    while (tabpos != string::npos)
+    {
+        toShow.replace(tabpos,1,"    ");
+        tabpos = toShow.find("\t");
+    }
+    vector<string> returnStrings;
+    vector<string> withoutCarriageReturns = BreakAtCarriageReturns(toShow);
+    vector<string>::iterator nextStringIter;
+    for(nextStringIter = withoutCarriageReturns.begin();
+        nextStringIter != withoutCarriageReturns.end();
+        nextStringIter++)
+    {
+        long currentwidth = width;
+        string remaining = *nextStringIter;
+        //Find any indenting
+        unsigned long indentlength = 0;
+        string::size_type indent = remaining.find(" ");
+        while (indent == 0)
+        {
+            currentwidth--;
+            indentlength++;
+            remaining.erase(0,1);
+            indent = remaining.find(" ");
+        }
+        while((long)(remaining.length()) > currentwidth)
+        {
+            unsigned long breakHere
+                = remaining.find_last_of(menustr::space,currentwidth);
+            string thisLine = remaining.substr(0,breakHere);
+            thisLine = string(indentlength, ' ') + thisLine;
+            returnStrings.push_back(thisLine);
+            remaining.erase(0,breakHere);
+        }
+        remaining = string(indentlength, ' ') + remaining;
+        returnStrings.push_back(remaining);
+    }
+
+    return returnStrings;
+}
+
+void ScrollingDisplay::ShowOneMenuLine(
+    string menuKey, string storedMenuText, string generatedMenuText)
+{
+    vector<string> middleParts = MakeDisplayableUnits(storedMenuText,MENUTEXTWIDTH);
+    vector<string> rightParts = MakeDisplayableUnits(generatedMenuText,MENUPARAMETERWIDTH);
+    vector<string>::iterator middleStrings;
+    vector<string>::iterator rightStrings;
+    bool firstTime = true;
+    for(middleStrings = middleParts.begin(), rightStrings = rightParts.begin();
+        middleStrings != middleParts.end();
+        middleStrings++)
+    {
+        string key;
+        if(firstTime)
+        {
+            key = MakeJustified(menuKey, MENUKEYWIDTH);
+            firstTime = false;
+        }
+        else
+        {
+            key = MakeJustified(menustr::emptyString, MENUKEYWIDTH);
+        }
+
+        if(rightStrings != rightParts.end())
+        {
+            DisplayDialogOutput(key
+                                + "  "
+                                + MakeJustified(*middleStrings, -MENUTEXTWIDTH)
+                                + MakeJustified(*rightStrings, MENUPARAMETERWIDTH));
+            rightStrings++;
+        }
+        else
+        {
+            DisplayDialogOutput(key
+                                + "  "
+                                + MakeJustified(*middleStrings, -MENUTEXTWIDTH)
+                                + MakeJustified(menustr::emptyString, MENUPARAMETERWIDTH));
+        }
+
+    }
+    for( ; rightStrings != rightParts.end(); rightStrings++)
+    {
+        DisplayDialogOutput(MakeJustified(menustr::emptyString, MENUKEYWIDTH)
+                            + "  "
+                            + MakeJustified(menustr::emptyString, -MENUTEXTWIDTH)
+                            + MakeJustified(*rightStrings, MENUPARAMETERWIDTH));
+    }
+}
+
+void ScrollingDisplay::ShowExtraMenuLines( std::vector<string> storedMenuText, std::vector<string> generatedMenuText)
+{
+    assert(storedMenuText.size() == generatedMenuText.size());
+    std::vector<string>::iterator si;
+    std::vector<string>::iterator gi;
+    for(si=storedMenuText.begin(),gi=generatedMenuText.begin();
+        si != storedMenuText.end() && gi != generatedMenuText.end();
+        si++,gi++)
+    {
+        string s_string = *si;
+        string g_string = *gi;
+        // JIMFIX -- if you really want an indent, you'll need to refactor ShowOneMenuLine
+        // so that this call to it makes the s_string argument indent
+        ShowOneMenuLine(menustr::emptyString,s_string,g_string);
+    }
+}
+
+void ScrollingDisplay::ShowMenuDisplayTable(MenuDisplayTable & t)
+{
+    if(t.IsVisible())
+    {
+        cout << t.CreateDisplayString();
+    }
+}
+
+void ScrollingDisplay::Prompt()
+{
+    cout << endl << ">>> ";
+    cout.flush();
+}
+
+string ScrollingDisplay::GetUserInput()
+{
+    Prompt();
+    string sline;
+    MyCinGetline(sline);
+    return sline;
+}
+
+void ScrollingDisplay::DisplayOneLine(string output)
+{
+    if((output == "") || (output == "\n"))
+    {
+        return;
+    }
+    cout << output << endl;
+}
+
+// displays the string "output" one line at a time
+// intended to later be extended to allow adjustment of
+// display width and indenting if desired
+void ScrollingDisplay::DisplayDialogOutput(string output)
+{
+    string::size_type prevStringPos;
+    string::size_type stringPos;
+    string stringFragment;
+
+    prevStringPos = 0;
+    // loop through the output string, finding each "\n" character and
+    // printing one line at a time
+    while( (stringPos = output.find("\n",prevStringPos)) != string::npos)
+    {
+        stringFragment.assign(output,prevStringPos,stringPos-prevStringPos);
+        prevStringPos = stringPos+1;
+        DisplayOneLine(stringFragment);
+    }
+    // remaining string doesn't have a "\n", so print it anyway
+    stringFragment.assign(output,prevStringPos,string::npos);
+    if(!stringFragment.empty())
+    {
+        DisplayOneLine(stringFragment);
+    }
+}
+
+// No display ----------------------------------
+NoDisplay::NoDisplay() {};
+NoDisplay::~NoDisplay() {};
+
+menu_return_type NoDisplay::DisplayNewMenu(NewMenu & menu)
+{
+    return menu_REDISPLAY;
+}
+
+menu_return_type NoDisplay::DisplayRaw(StringVec1d strings)
+{
+    return menu_REDISPLAY;
+}
+
+void NoDisplay::ShowMenuDisplayGroup(MenuDisplayGroup & m)
+{
+}
+
+void NoDisplay::ShowMenuDisplay2dGroup(ToggleMenuItem2dGroup & m)
+{
+}
+
+void NoDisplay::ShowMenuDisplayLine(MenuDisplayLine & m)
+{
+}
+
+void NoDisplay::ShowMenuDisplayTable(MenuDisplayTable & t)
+{
+}
+
+string NoDisplay::GetUserInput()
+{
+    throw impossible_error("Should never get input without display");
+}
+
+void NoDisplay::DisplayDialogOutput(string output)
+{
+}
+
+//____________________________________________________________________________________
diff --git a/src/menu/display.h b/src/menu/display.h
new file mode 100644
index 0000000..d6c0337
--- /dev/null
+++ b/src/menu/display.h
@@ -0,0 +1,94 @@
+// $Id: display.h,v 1.23 2012/02/29 00:29:58 ewalkup Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef DISPLAY_H
+#define DISPLAY_H
+
+#include <string>
+#include <vector>
+
+#include "menudefs.h" // for menu_return_type
+#include "vectorx.h"
+
+class Dialog;
+class NewMenu;
+class MenuDisplayQuantaVec;
+class MenuDisplayGroup;
+class ToggleMenuItem2dGroup;
+class MenuDisplayLine;
+class MenuDisplayTable;
+
+class Display
+{
+  public:
+    Display() {};
+    virtual ~Display() {};
+    virtual menu_return_type DisplayNewMenu(NewMenu&) = 0;
+    virtual menu_return_type DisplayRaw(StringVec1d) = 0;
+    //
+    virtual void ShowMenuDisplayGroup(MenuDisplayGroup&) = 0;
+    virtual void ShowMenuDisplay2dGroup(ToggleMenuItem2dGroup&) = 0;
+    virtual void ShowMenuDisplayLine(MenuDisplayLine&) = 0;
+    virtual void ShowMenuDisplayTable(MenuDisplayTable&) = 0;
+    //
+    virtual void DisplayDialogOutput(std::string) = 0;
+    virtual std::string GetUserInput() = 0;
+};
+
+class ScrollingDisplay : public Display
+{
+  protected:
+    void ShowMenuDisplayQuantaVec(MenuDisplayQuantaVec);
+    void Prompt();
+    void Title(const std::string & title, const std::string & info);
+    void DisplayOneLine(std::string);
+    void ShowOneMenuLine(std::string,std::string,std::string);
+    void ShowExtraMenuLines(std::vector<string> storedMenuText, std::vector<string> generatedMenuText);
+    std::vector<std::string> BreakAtCarriageReturns(std::string);
+    std::vector<std::string> MakeDisplayableUnits(std::string, long int width);
+    void ChideOnInconsistencies(StringVec1d inconsistencies);
+  public:
+    ScrollingDisplay();
+    ~ScrollingDisplay();
+    //
+    virtual menu_return_type DisplayNewMenu(NewMenu&);
+    virtual menu_return_type DisplayRaw(StringVec1d);
+    //
+    virtual void ShowMenuDisplayGroup(MenuDisplayGroup&);
+    virtual void ShowMenuDisplay2dGroup(ToggleMenuItem2dGroup&);
+    virtual void ShowMenuDisplayLine(MenuDisplayLine&);
+    virtual void ShowMenuDisplayTable(MenuDisplayTable&);
+    //
+    virtual void DisplayDialogOutput(std::string);
+    virtual std::string GetUserInput();
+    //
+    void Warn(std::vector<std::string> warnings);
+};
+
+class NoDisplay : public Display
+{
+  public:
+    NoDisplay() ;
+    ~NoDisplay();
+    virtual menu_return_type DisplayNewMenu(NewMenu&);
+    virtual menu_return_type DisplayRaw(StringVec1d);
+    //
+    virtual void ShowMenuDisplayGroup(MenuDisplayGroup&);
+    virtual void ShowMenuDisplay2dGroup(ToggleMenuItem2dGroup&);
+    virtual void ShowMenuDisplayLine(MenuDisplayLine&);
+    virtual void ShowMenuDisplayTable(MenuDisplayTable&);
+    //
+    virtual void DisplayDialogOutput(std::string);
+    virtual std::string GetUserInput();
+};
+
+#endif  // DISPLAY_H
+
+//____________________________________________________________________________________
diff --git a/src/menu/matrixitem.cpp b/src/menu/matrixitem.cpp
new file mode 100644
index 0000000..170f9a1
--- /dev/null
+++ b/src/menu/matrixitem.cpp
@@ -0,0 +1,148 @@
+// $Id: matrixitem.cpp,v 1.13 2012/06/30 01:32:42 bobgian Exp $
+
+/*
+  Copyright 2003  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <string>
+#include "matrixitem.h"
+#include "menuinteraction.h"
+#include "newmenuitems.h"
+#include "setmenuitem.h"
+#include "ui_id.h"
+#include "ui_interface.h"
+#include "ui_strings.h"
+#include "ui_vars.h"
+#include "vectorx.h"
+
+UIId LowerLevelMenuItemGroup::translateLocalToGlobal(UIId localId)
+{
+    return UIId(localId.GetForceType(), intoId.GetIndex1() * ui.doGetLong(legalIdsMenuKey) + localId.GetIndex1());
+}
+
+UIId LowerLevelMenuItemGroup::translateGlobalToLocal(UIId globalId)
+{
+    return UIId(globalId.GetForceType(), globalId.GetIndex1() - (intoId.GetIndex1() * ui.doGetLong(legalIdsMenuKey)));
+}
+
+LowerLevelMenuItemGroup::LowerLevelMenuItemGroup(
+    UIInterface & myui,
+    string myMenuKey,
+    string myLegalIdsMenuKey,
+    string myVisibilityGuard,
+    UIId myId)
+    : SetMenuItemGroup(myui,myMenuKey),
+      legalIdsMenuKey(myLegalIdsMenuKey),
+      visibilityGuard(myVisibilityGuard),
+      intoId(myId)
+{
+}
+
+LowerLevelMenuItemGroup::~LowerLevelMenuItemGroup()
+{
+}
+
+string LowerLevelMenuItemGroup::GetKey(UIId globalId)
+{
+    return SetMenuItemGroup::GetKey(translateGlobalToLocal(globalId));
+}
+
+UIIdVec1d LowerLevelMenuItemGroup::GetVisibleIds()
+{
+    UIIdVec1d visibles;
+    std::set<long> grouplist;
+    if(ui.doGetBool(visibilityGuard))
+    {
+        long partitionCount = ui.doGetLong(legalIdsMenuKey);
+        for(long fromId= 0; fromId < partitionCount; fromId++)
+        {
+            if(fromId != intoId.GetIndex1())
+            {
+                UIId thisId(intoId.GetForceType(), fromId);
+                thisId = translateLocalToGlobal(thisId);
+                long gindex = ui.GetCurrentVars().forces.ParamInGroup(thisId.GetForceType(), thisId.GetIndex1());
+                if ((gindex == FLAGLONG) || (grouplist.find(gindex) == grouplist.end()))
+                {
+                    visibles.push_back(thisId);
+                    grouplist.insert(gindex);
+                }
+            }
+        }
+    }
+    return visibles;
+}
+
+//------------------------------------------------------------------------------------
+
+LowerLevelMenu::LowerLevelMenu(
+    UIInterface & myui,
+    string myMenuKey,
+    string lowerLevelMenuKey,
+    string myLegalIdsMenuKey,
+    string myVisibilityGuard,
+    UIId myId)
+    : NewMenu(myui,myui.doGetDescription(myMenuKey,myId))
+{
+    AddMenuItem(new LowerLevelMenuItemGroup(
+                    myui,lowerLevelMenuKey,myLegalIdsMenuKey,myVisibilityGuard,myId));
+}
+
+LowerLevelMenu::~LowerLevelMenu()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+MenuInteraction_ptr MatrixSetMenuItem::MakeOneHandler(UIId id)
+{
+    UIId newId(forceId.GetForceType(), id.GetIndex1());
+    return MenuInteraction_ptr(new LowerLevelMenu
+                               (ui,menuKey,lowerLevelMenuKey,legalIdsMenuKey,
+                                visibilityGuard,newId));
+}
+
+MatrixSetMenuItem::MatrixSetMenuItem(
+    UIInterface & myui,
+    string myMenuKey,
+    string myLowerLevelMenuKey,
+    string myLegalIdsMenuKey,
+    string myVisibilityGuard,
+    force_type ftype)
+    : MenuDisplayGroupBaseImplementation(myui,myMenuKey),
+      lowerLevelMenuKey(myLowerLevelMenuKey),
+      legalIdsMenuKey(myLegalIdsMenuKey),
+      visibilityGuard(myVisibilityGuard),
+      forceId(ftype)
+{
+}
+
+#if 0
+UIId MatrixSetMenuItem::GetIdFromKey(string key)
+{
+    return UIId(forceId.GetForceType(), keyToIndex(key));
+}
+#endif
+
+MatrixSetMenuItem::~MatrixSetMenuItem()
+{
+}
+
+UIIdVec1d MatrixSetMenuItem::GetVisibleIds()
+{
+    UIIdVec1d visibles;
+    if(ui.doGetBool(visibilityGuard))
+    {
+        long partitionCount = ui.doGetLong(legalIdsMenuKey);
+        for(long i= 0; i < partitionCount; i++)
+        {
+            visibles.push_back(UIId(forceId.GetForceType(),i));
+        }
+    }
+    return visibles;
+}
+
+//____________________________________________________________________________________
diff --git a/src/menu/matrixitem.h b/src/menu/matrixitem.h
new file mode 100644
index 0000000..83d60d4
--- /dev/null
+++ b/src/menu/matrixitem.h
@@ -0,0 +1,75 @@
+// $Id: matrixitem.h,v 1.11 2011/03/07 06:08:50 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef MATRIXITEM_H
+#define MATRIXITEM_H
+
+#include <string>
+#include "menuinteraction.h"
+#include "newmenu.h"
+#include "setmenuitem.h"
+#include "ui_constants.h"
+#include "ui_interface.h"
+#include "vectorx.h"
+
+class LowerLevelMenuItemGroup : public SetMenuItemGroup
+{
+  protected:
+    std::string legalIdsMenuKey;
+    std::string visibilityGuard;
+    UIId intoId;
+    virtual UIId translateLocalToGlobal(UIId localId);
+    virtual UIId translateGlobalToLocal(UIId globalId);
+  public:
+    LowerLevelMenuItemGroup(UIInterface & myui,
+                            std::string myMenuKey,
+                            std::string myLegalIdsMenuKey,
+                            std::string myVisibilityGuard,
+                            UIId myId);
+    virtual ~LowerLevelMenuItemGroup();
+    virtual std::string GetKey(UIId id);
+    virtual UIIdVec1d GetVisibleIds();
+};
+
+class LowerLevelMenu : public NewMenu
+{
+  public:
+    LowerLevelMenu(UIInterface & myui,
+                   std::string myMenuKey,
+                   std::string lowerLevelMenuKey,
+                   std::string myLegalIdsMenuKey,
+                   std::string myVisibilityGuard,
+                   UIId myId);
+    virtual ~LowerLevelMenu();
+};
+
+class MatrixSetMenuItem : public MenuDisplayGroupBaseImplementation
+{
+  protected:
+    std::string lowerLevelMenuKey;
+    std::string legalIdsMenuKey;
+    std::string visibilityGuard;
+    UIId        forceId;
+    // returns sub-menu
+    virtual MenuInteraction_ptr MakeOneHandler(UIId id);
+  public:
+    MatrixSetMenuItem(UIInterface & myui,
+                      std::string myMenuKey,
+                      std::string myLowerLevelMenuKey,
+                      std::string myLegalIdsMenuKey,
+                      std::string myVisibilityGuard,
+                      force_type ftype);
+    virtual ~MatrixSetMenuItem();
+    UIIdVec1d GetVisibleIds();
+};
+
+#endif  // MATRIXITEM_H
+
+//____________________________________________________________________________________
diff --git a/src/menu/menu_strings.cpp b/src/menu/menu_strings.cpp
new file mode 100644
index 0000000..b4ebc85
--- /dev/null
+++ b/src/menu/menu_strings.cpp
@@ -0,0 +1,36 @@
+// $Id: menu_strings.cpp,v 1.14 2010/03/02 23:12:29 bobgian Exp $
+
+/*
+ *  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+ *
+ *  This software is distributed free of charge for non-commercial use
+ *  and is copyrighted.  Of course, we do not guarantee that the software
+ *  works, and are not responsible for any damage you may cause or have.
+ *
+ */
+
+#include <string>
+#include "menu_strings.h"
+
+using std::string;
+
+const string key::dot   = ".";
+const string key::Q     = "Q";
+const string key::R     = "R";
+
+const string menustr::acknowledge      = "\nType <Return> to continue\n";
+const string menustr::bottomLine       = "<Return> = Go Up | . = Run | Q = Quit";
+const string menustr::bottomLineAtTop  = ". = Run | Q = Quit";
+const string menustr::carriageReturn   = "\n";
+const string menustr::divider          = "----------";
+const string menustr::emptyString      = "";
+const string menustr::inconsistencies  = "\n\nYou must fix inconsistencies with the following menu\nitems before exiting the current menu:\n\t";
+const string menustr::space            = " ";
+const string menustr::initial          = "Initial";
+const string menustr::final            = "Final";
+const string menustr::chains           = "Number of chains";
+const string menustr::discard          = "Number of genealogies to discard";
+const string menustr::interval         = "Sampling interval";
+const string menustr::samples          = "Number of samples";
+
+//____________________________________________________________________________________
diff --git a/src/menu/menu_strings.h b/src/menu/menu_strings.h
new file mode 100644
index 0000000..91089a8
--- /dev/null
+++ b/src/menu/menu_strings.h
@@ -0,0 +1,46 @@
+// $Id: menu_strings.h,v 1.16 2011/03/07 06:08:51 bobgian Exp $
+
+/*
+ *  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+ *
+ *  This software is distributed free of charge for non-commercial use
+ *  and is copyrighted.  Of course, we do not guarantee that the software
+ *  works, and are not responsible for any damage you may cause or have.
+ *
+ */
+
+#ifndef MENU_STRINGS_H
+#define MENU_STRINGS_H
+
+#include <string>
+
+class key
+{
+  public:
+    static const std::string dot;
+    static const std::string R;
+    static const std::string Q;
+};
+
+class menustr
+{
+  public:
+    static const std::string acknowledge;
+    static const std::string bottomLine;
+    static const std::string bottomLineAtTop;
+    static const std::string divider;
+    static const std::string carriageReturn;
+    static const std::string emptyString;
+    static const std::string inconsistencies;
+    static const std::string space;
+    static const std::string initial;
+    static const std::string final;
+    static const std::string chains;
+    static const std::string discard;
+    static const std::string interval;
+    static const std::string samples;
+};
+
+#endif // MENU_STRINGS_H
+
+//____________________________________________________________________________________
diff --git a/src/menu/menudefs.h b/src/menu/menudefs.h
new file mode 100644
index 0000000..b2b5b2d
--- /dev/null
+++ b/src/menu/menudefs.h
@@ -0,0 +1,18 @@
+// $Id: menudefs.h,v 1.12 2010/03/17 17:25:59 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef MENUDEFS_H
+#define MENUDEFS_H
+
+enum menu_return_type { menu_GO_UP, menu_REDISPLAY, menu_QUIT, menu_RUN};
+
+#endif // MENUDEFS_H
+
+//____________________________________________________________________________________
diff --git a/src/menu/menuerror.h b/src/menu/menuerror.h
new file mode 100644
index 0000000..dc3147c
--- /dev/null
+++ b/src/menu/menuerror.h
@@ -0,0 +1,29 @@
+// $Id: menuerror.h,v 1.7 2011/03/07 06:08:51 bobgian Exp $
+
+/*
+  Copyright 2003  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef MENUERROR_H
+#define MENUERROR_H
+
+#include <stdexcept>
+#include <string>
+
+class DialogProtocolError : public std::exception
+{
+  private:
+    std::string _what;
+  public:
+    DialogProtocolError(const std::string& wh): _what (wh) { };
+    virtual ~DialogProtocolError() throw() {};
+    virtual const char* what () const throw() { return _what.c_str (); };
+};
+
+#endif  // MENUERROR_H
+
+//____________________________________________________________________________________
diff --git a/src/menu/menuinteraction.cpp b/src/menu/menuinteraction.cpp
new file mode 100644
index 0000000..0c1d374
--- /dev/null
+++ b/src/menu/menuinteraction.cpp
@@ -0,0 +1,22 @@
+// $Id: menuinteraction.cpp,v 1.8 2010/03/17 17:25:59 bobgian Exp $
+
+/*
+  Copyright 2003  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include "menuinteraction.h"
+#include "ui_constants.h"
+
+MenuInteraction::MenuInteraction()
+{
+}
+
+MenuInteraction::~MenuInteraction()
+{
+}
+
+//____________________________________________________________________________________
diff --git a/src/menu/menuinteraction.h b/src/menu/menuinteraction.h
new file mode 100644
index 0000000..d8c0688
--- /dev/null
+++ b/src/menu/menuinteraction.h
@@ -0,0 +1,28 @@
+// $Id: menuinteraction.h,v 1.9 2011/03/07 06:08:51 bobgian Exp $
+
+/*
+  Copyright 2003  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef MENUINTERACTION_H
+#define MENUINTERACTION_H
+
+#include "menudefs.h" // for menu_return_type
+
+class Display;
+
+class MenuInteraction
+{
+  public:
+    MenuInteraction();
+    virtual ~MenuInteraction();
+    virtual menu_return_type InvokeMe(Display& display) = 0;
+};
+
+#endif // MENUINTERACTION_H
+
+//____________________________________________________________________________________
diff --git a/src/menu/menuitem.cpp b/src/menu/menuitem.cpp
new file mode 100644
index 0000000..7cd4e88
--- /dev/null
+++ b/src/menu/menuitem.cpp
@@ -0,0 +1,142 @@
+// $Id: menuitem.cpp,v 1.25 2012/06/30 01:32:42 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+// menuitem.cpp
+// functions to control the menuitem (a single line in a menu)
+//
+// Peter Beerli
+
+#include <vector>
+
+#include "display.h"
+#include "errhandling.h"
+#include "menuinteraction.h"
+#include "menuitem.h"
+#include "menutypedefs.h"
+#include "stringx.h"
+#include "ui_constants.h"
+
+void
+MenuDisplayQuantaVec::NukeContents()
+{
+    MenuDisplayQuantaVec::iterator iter;
+    for(iter = begin(); iter != end(); iter++)
+    {
+        delete(*iter);
+    }
+}
+
+MenuInteraction_ptr
+MenuDisplayQuantaWithHandler::GetHandler(string key)
+{
+    return MenuInteraction_ptr(NULL);
+}
+
+void MenuDisplayLine::DisplayItemOn(Display & display)
+{
+    display.ShowMenuDisplayLine(*this);
+}
+
+UIId& MenuDisplayLine::GetId()
+{
+    return NO_ID();
+}
+
+bool MenuDisplayLine::IsVisible()
+{
+    return true;
+}
+
+bool MenuDisplayLine::IsConsistent()
+{
+    return true;
+}
+
+void MenuDisplayGroup::DisplayItemOn(Display & display)
+{
+    display.ShowMenuDisplayGroup(*this);
+}
+
+std::vector<UIId> MenuDisplayGroup::GetInconsistentIds()
+{
+    std::vector<UIId> badIds;   // empty vector => no bad ids!
+    return badIds;
+}
+
+void MenuDisplayTable::DisplayItemOn(Display & display)
+{
+    display.ShowMenuDisplayTable(*this);
+}
+
+DisplayOnlyGroupWrapper::DisplayOnlyGroupWrapper(MenuDisplayGroup * group)
+    : m_group(group)
+{
+}
+
+DisplayOnlyGroupWrapper::~DisplayOnlyGroupWrapper()
+{
+    delete m_group;
+}
+
+vector<UIId>
+DisplayOnlyGroupWrapper::GetVisibleIds()
+{
+    return m_group->GetVisibleIds();
+}
+
+vector<UIId>
+DisplayOnlyGroupWrapper::GetInconsistentIds()
+{
+    vector<UIId> emptyVector;
+    return emptyVector;
+}
+
+string
+DisplayOnlyGroupWrapper::GetKey(UIId id)
+{
+    return menustr::emptyString;
+}
+
+string
+DisplayOnlyGroupWrapper::GetText(UIId id)
+{
+    return m_group->GetText(id);
+}
+
+string
+DisplayOnlyGroupWrapper::GetVariableText(UIId id)
+{
+    return m_group->GetVariableText(id);
+}
+
+KeyedMenuItem::KeyedMenuItem(const string& myKey)
+    : MenuDisplayLine(), key(myKey)
+{
+}
+
+KeyedMenuItem::~KeyedMenuItem()
+{
+}
+
+string KeyedMenuItem::GetKey()
+{
+    return key;
+}
+
+bool KeyedMenuItem::Handles(std::string inputKey)
+{
+    if(IsVisible())
+    {
+        return CaselessStrCmp(GetKey(),inputKey);
+    }
+    return false;
+}
+
+//____________________________________________________________________________________
diff --git a/src/menu/menuitem.h b/src/menu/menuitem.h
new file mode 100644
index 0000000..991584e
--- /dev/null
+++ b/src/menu/menuitem.h
@@ -0,0 +1,163 @@
+// $Id: menuitem.h,v 1.27 2012/02/29 00:29:58 ewalkup Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+// Menuitem.h
+// is line in the menu and consists of a (key, text, handlerobject)
+//
+// Peter Beerli
+
+#ifndef MENUITEM_H
+#define MENUITEM_H
+
+#include <string>
+#include <vector>
+#include "menu_strings.h"
+#include "menutypedefs.h"
+#include "ui_id.h"
+
+class Display;
+class MenuInteraction;
+
+// a self-contained unit of display/action invocation in the menu.
+// Examples include
+//      * a table of values to display
+//      * a single menu line that performs an action when its key is entered
+//      * a set of similar menu lines that perform the same type of
+//          action on different instances of a similar object
+class MenuDisplayQuanta
+{
+  public:
+    MenuDisplayQuanta() {};
+    virtual ~MenuDisplayQuanta() {};
+    virtual void DisplayItemOn(Display & display) = 0;
+};
+
+// this ordered set of MenuDisplayQuanta forms the basic
+// display unit of a Menu
+////////typedef vector<MenuDisplayQuanta*> MenuDisplayQuantaVec;
+class MenuDisplayQuantaVec : public std::vector<MenuDisplayQuanta*>
+{
+  public:
+    MenuDisplayQuantaVec() {};
+    virtual ~MenuDisplayQuantaVec() {};
+    virtual void NukeContents();
+};
+
+class MenuDisplayQuantaWithHandler : public MenuDisplayQuanta
+{
+  public:
+    MenuDisplayQuantaWithHandler() {};
+    virtual ~MenuDisplayQuantaWithHandler() {};
+    //
+    virtual bool Handles(std::string inputKey) = 0;
+    virtual MenuInteraction_ptr GetHandler(std::string);
+    virtual bool HasMultiLineItems() = 0;
+};
+
+// like group, but only ever has a single line
+class MenuDisplayLine : public MenuDisplayQuantaWithHandler
+{
+  protected:
+    UIId& GetId();
+  public:
+    MenuDisplayLine() {};
+    virtual ~MenuDisplayLine() {};
+    // single alphanumeric to invoke this line
+    virtual std::string GetKey() = 0;
+    // description of this item
+    virtual std::string GetText() = 0;
+    // current value of this item if any
+    virtual std::string GetVariableText() = 0;
+    virtual bool IsVisible();
+    // has this item been set to a legal value;
+    virtual bool IsConsistent();
+    // not virtual because I'm assuming all inheritors should
+    // use the canned routine given in class Display
+    void DisplayItemOn(Display & display);
+
+    // stuff for multi-line items
+    virtual bool HasMultiLineItems() { return false; };
+    virtual std::vector<std::string> GetExtraText() {return std::vector<std::string>();};
+    virtual std::vector<std::string> GetExtraVariableText() {return std::vector<std::string>();};
+};
+
+// a set of similar menu lines that perform the same type of
+// action on different instances of a similar object
+class MenuDisplayGroup : public MenuDisplayQuantaWithHandler
+{
+  public:
+    MenuDisplayGroup() {};
+    virtual ~MenuDisplayGroup() {};
+    //
+    virtual std::vector<UIId> GetVisibleIds() = 0;
+    virtual std::vector<UIId> GetInconsistentIds();
+    // single alphanumeric to invoke this line
+    virtual std::string GetKey(UIId id) = 0;
+    // description of this item
+    virtual std::string GetText(UIId id) = 0;
+    // current value of this item if any
+    virtual std::string GetVariableText(UIId id) = 0;
+    // not virtual because I'm assuming all inheritors should
+    // use the canned routine given in class Display
+    void DisplayItemOn(Display & display);
+    virtual std::string GetGroupDescription() {return "\n";};
+    virtual std::string GetEmptyDescription() {return "\n";};
+
+    // stuff for multi-line items
+    virtual bool HasMultiLineItems() {return false;};
+    virtual std::vector<std::string> GetExtraText(UIId id) {return std::vector<std::string>();};
+    virtual std::vector<std::string> GetExtraVariableText(UIId id) {return std::vector<std::string>();};
+};
+
+// a several-line unit for display in a menu, not having any associated
+// actions that can be invoked from it
+class MenuDisplayTable : public MenuDisplayQuanta
+{
+  public:
+    MenuDisplayTable() {};
+    virtual ~MenuDisplayTable() {};
+    virtual bool IsVisible() = 0;
+    virtual std::string CreateDisplayString() = 0;
+    // not virtual because I'm assuming all inheritors should
+    // use the canned routine given in class Display
+    void DisplayItemOn(Display & display);
+};
+
+class DisplayOnlyGroupWrapper : public MenuDisplayGroup
+{
+  protected:
+    MenuDisplayGroup * m_group;
+  public:
+    DisplayOnlyGroupWrapper(MenuDisplayGroup * group);
+    virtual ~DisplayOnlyGroupWrapper();
+    //
+    virtual std::vector<UIId> GetVisibleIds();
+    virtual std::vector<UIId> GetInconsistentIds();
+    virtual std::string GetKey(UIId id);
+    virtual std::string GetText(UIId id);
+    virtual std::string GetVariableText(UIId id);
+    // void DisplayItemOn(Display & display);
+    virtual bool Handles(std::string inputKey) { return false;};
+};
+
+class KeyedMenuItem : public MenuDisplayLine
+{
+  protected:
+    const std::string key;
+  public:
+    KeyedMenuItem(const std::string& myKey);
+    virtual ~KeyedMenuItem();
+    virtual std::string GetKey();
+    virtual bool Handles(std::string inputKey);
+};
+
+#endif  // MENUITEM_H
+
+//____________________________________________________________________________________
diff --git a/src/menu/menutypedefs.h b/src/menu/menutypedefs.h
new file mode 100644
index 0000000..0e99249
--- /dev/null
+++ b/src/menu/menutypedefs.h
@@ -0,0 +1,29 @@
+// $Id: menutypedefs.h,v 1.4 2011/03/08 19:22:01 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef MENUTYPEDEFS_H
+#define MENUTYPEDEFS_H
+
+#include <memory>
+
+class NewMenu;
+//typedef boost::shared_ptr<NewMenu> NewMenu_ptr;
+
+typedef std::auto_ptr<NewMenu> NewMenu_ptr;
+
+class MenuInteraction;
+
+//typedef boost::shared_ptr<MenuInteraction> MenuInteraction_ptr;
+
+typedef std::auto_ptr<MenuInteraction> MenuInteraction_ptr;
+
+#endif // MENUTYPEDEFS_H
+
+//____________________________________________________________________________________
diff --git a/src/menu/newmenu.cpp b/src/menu/newmenu.cpp
new file mode 100644
index 0000000..2825f4d
--- /dev/null
+++ b/src/menu/newmenu.cpp
@@ -0,0 +1,211 @@
+// $Id: newmenu.cpp,v 1.30 2011/03/07 06:08:51 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+#include <string>
+#include <vector>
+
+#include "menuinteraction.h"
+#include "display.h"
+#include "newmenu.h"
+#include "errhandling.h"
+#include "menudefs.h"
+#include "menuitem.h"
+#include "menu_strings.h"
+#include "newmenuitems.h"
+#include "ui_interface.h"
+#include "constants.h"
+#include "stringx.h"
+
+//------------------------------------------------------------------------------------
+
+void NewMenu::fillStandardContent(bool atTop)
+{
+    beforeContent.push_back(new BlankMenuItem());
+    //
+    afterContent.push_back(new DividerMenuItem());
+    if(atTop)
+    {
+        afterContent.push_back(new BottomLineMenuItemAtTop(ui));
+    }
+    else
+    {
+        afterContent.push_back(new BottomLineMenuItem(ui));
+    }
+    afterContent.push_back(new UndoMenuItem(ui));
+    afterContent.push_back(new RedoMenuItem(ui));
+    afterContent.push_back(new FileDumpMenuItem(ui));
+}
+
+NewMenu::NewMenu(UIInterface & myui, const std::string myTitle, bool atTop)
+    : MenuInteraction(), ui(myui), title(myTitle), info (menustr::emptyString)
+{
+    fillStandardContent(atTop);
+}
+
+NewMenu::NewMenu(UIInterface & myui, const std::string myTitle, const std::string myInfo, bool atTop)
+    : MenuInteraction(), ui(myui), title(myTitle), info (myInfo)
+{
+    fillStandardContent(atTop);
+}
+
+NewMenu::~NewMenu()
+{
+    afterContent.NukeContents();
+    afterContent.clear();
+    beforeContent.NukeContents();
+    beforeContent.clear();
+    myContent.NukeContents();
+    myContent.clear();
+}
+
+void NewMenu::AddMenuItem(MenuDisplayQuanta * menuItem)
+{
+    //Check to see if we already use this key.
+#ifndef NDEBUG
+    MenuDisplayLine * newmenuline = dynamic_cast<MenuDisplayLine*>(menuItem);
+    if (newmenuline != NULL)
+    {
+        string newkey = newmenuline->GetKey();
+        if (newkey != menustr::emptyString)
+        {
+            if (Handles(newkey))
+            {
+                string msg = "The menu key for \"" + newmenuline->GetText() +
+                    "\" (" + newkey + ") is already used by another menu item.";
+                throw implementation_error(msg);
+            }
+        }
+    }
+#endif
+    //We're safe--add it.
+    myContent.push_back(menuItem);
+}
+
+// returns true if this menu knows how to respond to the given input
+bool NewMenu::Handles(std::string input)
+{
+    MenuDisplayQuantaVec allContent = MenuItems();
+    MenuDisplayQuantaVec::iterator i;
+    for(i=allContent.begin(); i!=allContent.end(); i++)
+    {
+        MenuDisplayQuanta * base = *i;
+        MenuDisplayQuantaWithHandler * item
+            = dynamic_cast<MenuDisplayQuantaWithHandler*>(base);
+        if(item != NULL)
+        {
+            if(item->Handles(input))
+            {
+                return true;
+            }
+        }
+    }
+    return false;
+}
+
+MenuInteraction_ptr NewMenu::GetHandler(std::string input)
+{
+    MenuDisplayQuantaVec allContent = MenuItems();
+    MenuDisplayQuantaVec::iterator i;
+    for(i=allContent.begin(); i!=allContent.end(); i++)
+    {
+        MenuDisplayQuanta * base = *i;
+        MenuDisplayQuantaWithHandler * item
+            = dynamic_cast<MenuDisplayQuantaWithHandler*>(base);
+        if(item != NULL)
+        {
+            if(item->Handles(input))
+            {
+                MenuInteraction_ptr mi = item->GetHandler(input);
+                return mi;
+            }
+        }
+    }
+    assert(false);
+    return MenuInteraction_ptr(NULL);
+}
+
+menu_return_type NewMenu::InvokeMe(Display& display)
+{
+    return display.DisplayNewMenu(*this);
+}
+
+std::string NewMenu::Title()
+{
+    return title;
+}
+
+StringVec1d NewMenu::Warnings()
+{
+    return ui.GetAndClearWarnings();
+}
+
+void NewMenu::AddWarning(string warning)
+{
+    ui.AddWarning(warning);
+}
+
+std::string NewMenu::Info()
+{
+    return info;
+}
+
+MenuDisplayQuantaVec NewMenu::MenuItems()
+{
+    MenuDisplayQuantaVec allContent;
+    allContent.insert(allContent.end(),beforeContent.begin(),beforeContent.end());
+    allContent.insert(allContent.end(),myContent.begin(),myContent.end());
+    allContent.insert(allContent.end(),afterContent.begin(),afterContent.end());
+    return allContent;
+}
+
+StringVec1d NewMenu::GetInconsistencies()
+{
+    StringVec1d inconsistents;
+    MenuDisplayQuantaVec :: iterator i;
+    for(i = myContent.begin(); i != myContent.end(); i++)
+    {
+        MenuDisplayQuanta * base = *i;
+        MenuDisplayLine * line = dynamic_cast<MenuDisplayLine*>(base);
+        if(line != NULL)
+        {
+            if(line->IsVisible())
+            {
+                if(!line->IsConsistent())
+                {
+                    inconsistents.push_back(line->GetKey());
+                }
+            }
+        }
+        MenuDisplayGroup * group = dynamic_cast<MenuDisplayGroup*>(base);
+        if(group != NULL)
+        {
+            std::vector<UIId> inconsistentIds = group->GetInconsistentIds();
+            std::vector<UIId> :: iterator i;
+            for(i = inconsistentIds.begin(); i != inconsistentIds.end(); i++)
+            {
+                inconsistents.push_back(group->GetKey(*i));
+            }
+        }
+    }
+    return inconsistents;
+}
+
+//------------------------------------------------------------------------------------
+
+NewMenuCreator::NewMenuCreator()
+{
+}
+
+NewMenuCreator::~NewMenuCreator()
+{
+}
+
+//____________________________________________________________________________________
diff --git a/src/menu/newmenu.h b/src/menu/newmenu.h
new file mode 100644
index 0000000..2ae9528
--- /dev/null
+++ b/src/menu/newmenu.h
@@ -0,0 +1,62 @@
+// $Id: newmenu.h,v 1.23 2012/06/30 01:32:42 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef NEWMENU_H
+#define NEWMENU_H
+
+#include <string>
+
+#include "menudefs.h"
+#include "menuinteraction.h"
+#include "menuitem.h"
+#include "menutypedefs.h"
+#include "vectorx.h"
+
+class Display;
+class UIInterface;
+
+class NewMenu : public MenuInteraction
+{
+  protected:
+    UIInterface & ui;
+    const std::string title;
+    const std::string info;
+    MenuDisplayQuantaVec afterContent;
+    MenuDisplayQuantaVec beforeContent;
+    MenuDisplayQuantaVec myContent;
+    virtual void AddMenuItem(MenuDisplayQuanta*);
+    void fillStandardContent(bool atTop);
+  public:
+    NewMenu(UIInterface & myui, const std::string myTitle, bool atTop=false);
+    NewMenu(UIInterface & myui, const std::string myTitle, const std::string myInfo, bool atTop=false);
+    virtual ~NewMenu();
+    virtual bool Handles(std::string input);
+    virtual MenuInteraction_ptr GetHandler(std::string input);
+    virtual menu_return_type InvokeMe(Display&);
+    virtual std::string Title();
+    virtual std::vector<std::string> Warnings();
+    virtual void AddWarning(std::string warning);
+    virtual std::string Info();
+    virtual MenuDisplayQuantaVec MenuItems();
+    virtual StringVec1d GetInconsistencies();
+};
+
+class NewMenuCreator
+{
+  public:
+    NewMenuCreator();
+    virtual ~NewMenuCreator();
+    //        virtual NewMenu * Create() = 0;
+    virtual NewMenu_ptr Create() = 0;
+};
+
+#endif // NEWMENU_H
+
+//____________________________________________________________________________________
diff --git a/src/menu/newmenuitems.cpp b/src/menu/newmenuitems.cpp
new file mode 100644
index 0000000..c1dc023
--- /dev/null
+++ b/src/menu/newmenuitems.cpp
@@ -0,0 +1,294 @@
+// $Id: newmenuitems.cpp,v 1.36 2013/11/08 21:46:21 mkkuhner Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+// menuitem.cpp
+// functions to control the menuitem (a single line in a menu)
+//
+// Peter Beerli
+
+#include <string>
+#include "display.h"
+#include "lamarc.h"         // EWFIX.P3 HACK HACK HACK -- for FinishRegistry()
+                            // which we shouldn't even be contemplating using
+#include "menudefs.h"
+#include "newmenuitems.h"
+#include "outputfile.h"
+#include "registry.h"       // EWFIX.P3 HACK HACK HACK -- for FileDumpHandler
+#include "ui_vars.h"        // EWFIX.P3 GIFTFROMLUCIAN Because of FinishRegistry()
+#include "stringx.h"
+#include "ui_interface.h"
+#include "ui_strings.h"
+
+SameLevelMenuItem::SameLevelMenuItem(std::string myKey, UIInterface & myUi, std::string myMenuKey)
+    : KeyedMenuItem(myKey), ui(myUi) , menuKey(myMenuKey)
+{
+}
+
+SameLevelMenuItem::~SameLevelMenuItem()
+{
+}
+
+std::string SameLevelMenuItem::GetVariableText()
+{
+    return menustr::emptyString;
+}
+
+menu_return_type DoNothingHandler::InvokeMe(Display& display)
+{
+    return menu_REDISPLAY;
+}
+
+FileDumpMenuItem::FileDumpMenuItem(UIInterface & myUi)
+    : SameLevelMenuItem(">",myUi,"")
+{
+}
+
+FileDumpMenuItem::~FileDumpMenuItem()
+{
+}
+
+MenuInteraction_ptr FileDumpMenuItem::GetHandler(string inputKey)
+{
+    return MenuInteraction_ptr(new FileDumpHandler(ui));
+}
+
+std::string FileDumpMenuItem::GetText()
+{
+    return std::string("Generate Lamarc input file for these settings");
+}
+
+std::string
+FileDumpHandler::beforeLoopOutputString()
+{
+    return "";
+}
+
+std::string
+FileDumpHandler::inLoopOutputString()
+{
+    return "Enter a filename for a LAMARC input file.  Default is \""
+        +  ui.doGetString(uistr::xmlOutFileName)
+        +"\".\n";
+}
+
+std::string
+FileDumpHandler::inLoopFailureOutputString()
+{
+    return "\nUnable to write to this file.\n";
+}
+
+std::string
+FileDumpHandler::afterLoopSuccessOutputString()
+{
+    return string("");
+}
+
+std::string
+FileDumpHandler::afterLoopFailureOutputString()
+{
+    return "Giving up after "+ToString(maxTries())
+        + " unsuccessful attempts.\n";
+}
+
+bool
+FileDumpHandler::handleInput(std::string input)
+{
+    std::string fileToWrite = ui.doGetString(uistr::xmlOutFileName);
+    if (input.size() != 0)
+    {
+        fileToWrite = input;
+    }
+    std::ifstream testin(fileToWrite.c_str(), std::ios::in);
+    std::ofstream testout(fileToWrite.c_str(), std::ios::out);
+    if(testout)
+    {
+        if(testin)
+        {
+            string msg = "Warning:  overwrote an older version of \""
+                + fileToWrite
+                + "\".";
+            ui.AddWarning(msg);
+        }
+        else
+        {
+            string msg = "Successfully wrote to \"" + fileToWrite + "\".";
+            ui.AddWarning(msg);
+        }
+        FinishRegistry(ui);
+        XMLOutfile xmlout(fileToWrite);
+        xmlout.Display();
+        // FinishRegistry moves any loci in the datapack to a new location.
+        //  This is lethal if you want to do more with it, so we must revert
+        //  it.  If we split the datapack in to phase 1/phase 2, this would go
+        //  away.  --LS NOTE.
+        ui.GetCurrentVars().datapackplus.RevertAllMovingLoci();
+        return true;
+    }
+    else
+    {
+        return false;
+    }
+}
+
+////
+
+UndoMenuItem::UndoMenuItem(UIInterface & myUi)
+    : SameLevelMenuItem("-",myUi,"")
+{
+}
+
+UndoMenuItem::~UndoMenuItem()
+{
+}
+
+std::string UndoMenuItem::GetText()
+{
+    return ui.GetUndoDescription();
+}
+
+bool UndoMenuItem::IsVisible()
+{
+    bool undoable = ui.CanUndo();
+    return undoable;
+}
+
+MenuInteraction_ptr UndoMenuItem::GetHandler(string inputKey)
+{
+    return MenuInteraction_ptr(new UndoHandler(ui));
+}
+
+menu_return_type UndoHandler::InvokeMe(Display& display)
+{
+    ui.Undo();
+    return menu_REDISPLAY;
+}
+
+RedoMenuItem::RedoMenuItem(UIInterface & myUi)
+    : SameLevelMenuItem("+",myUi,"")
+{
+}
+
+RedoMenuItem::~RedoMenuItem()
+{
+}
+
+std::string RedoMenuItem::GetText()
+{
+    return ui.GetRedoDescription();
+}
+
+bool RedoMenuItem::IsVisible()
+{
+    bool redoable =  ui.CanRedo();
+    return redoable;
+}
+
+MenuInteraction_ptr RedoMenuItem::GetHandler(string inputKey)
+{
+    return MenuInteraction_ptr(new RedoHandler(ui));
+}
+
+menu_return_type RedoHandler::InvokeMe(Display& display)
+{
+    ui.Redo();
+    return menu_REDISPLAY;
+}
+
+BottomLineMenuItem::BottomLineMenuItem(UIInterface& ui)
+    : m_ui(ui)
+{
+}
+
+BottomLineMenuItem::~BottomLineMenuItem()
+{
+}
+
+bool BottomLineMenuItem::Handles(string inputKey)
+{
+    if(CaselessStrCmp(inputKey, menustr::emptyString))
+    { return true;};
+    if(CaselessStrCmp(inputKey, key::dot))
+    { return true;};
+    if(CaselessStrCmp(inputKey, key::Q))
+    { return true;};
+    return false;
+}
+
+MenuInteraction_ptr BottomLineMenuItem::GetHandler(string inputKey)
+{
+    if(CaselessStrCmp(inputKey, menustr::emptyString))
+    { return MenuInteraction_ptr(new GoUpHandler()); };
+    if(CaselessStrCmp(inputKey, key::dot))
+    { return MenuInteraction_ptr(new RunHandler(m_ui)); };
+    if(CaselessStrCmp(inputKey, key::Q))
+    { return MenuInteraction_ptr(new QuitHandler()); };
+    return MenuInteraction_ptr(new DoNothingHandler());
+}
+
+BottomLineMenuItemAtTop::BottomLineMenuItemAtTop(UIInterface& ui)
+    : BottomLineMenuItem(ui)
+{
+}
+
+BottomLineMenuItemAtTop::~BottomLineMenuItemAtTop()
+{
+}
+
+menu_return_type RunHandler::InvokeMe(Display & display)
+{
+    if (m_ui.IsReadyToRun())
+    {
+        return menu_RUN;
+    }
+    else
+    {
+        return DialogAcknowledge::InvokeMe(display);
+    }
+}
+
+string RunHandler::outputString()
+{
+    return m_ui.WhatIsWrong();
+}
+
+SubMenuItem::SubMenuItem(std::string myKey,UIInterface & myui, NewMenuCreator * myCreator)
+    : KeyedMenuItem(myKey), ui(myui), creator(myCreator)
+{
+}
+
+SubMenuItem::~SubMenuItem()
+{
+    delete creator;
+}
+
+string SubMenuItem::GetText()
+{
+    // EWFIX.P5 REFACTOR there was a memory leak here, which was fixed
+    // by having Create() return a shared pointer, but surely
+    // we can do better
+    return creator->Create()->Title();
+}
+
+string SubMenuItem::GetVariableText()
+{
+    return menustr::emptyString;
+}
+
+MenuInteraction_ptr SubMenuItem::GetHandler(std::string inputKey)
+{
+    // EWFIX.P5 REFACTOR there was a memory leak here, which was fixed
+    // by having Create() return a shared pointer, but surely
+    // we can do better
+    NewMenu_ptr nm(creator->Create());
+    NewMenu * np = nm.release();
+    return MenuInteraction_ptr(np);
+}
+
+//____________________________________________________________________________________
diff --git a/src/menu/newmenuitems.h b/src/menu/newmenuitems.h
new file mode 100644
index 0000000..7cef045
--- /dev/null
+++ b/src/menu/newmenuitems.h
@@ -0,0 +1,228 @@
+// $Id: newmenuitems.h,v 1.32 2012/06/30 01:32:42 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef NEWMENUITEM_H
+#define NEWMENUITEM_H
+
+#include <string>
+#include "dialognoinput.h"
+#include "dialogrepeat.h"
+#include "menudefs.h"
+#include "menuitem.h"
+#include "menuinteraction.h"
+#include "menu_strings.h"
+#include "newmenu.h"
+#include "ui_constants.h"
+
+class UIInterface;
+
+// puts text into the menu but cannot be entered
+class OutputOnlyMenuItem : public MenuDisplayLine
+{
+  protected:
+    std::string displayString;
+  public:
+    OutputOnlyMenuItem(std::string toDisplay) {displayString = toDisplay;};
+    virtual ~OutputOnlyMenuItem() {};
+    virtual bool Handles(std::string) { return false;};
+    virtual MenuInteraction_ptr GetHandler(std::string) {return MenuInteraction_ptr(NULL);};
+    virtual std::string GetKey() { return menustr::emptyString;};
+    virtual std::string GetText() {return displayString;};
+    virtual std::string GetVariableText() {return "";};
+    virtual bool IsVisible() {return true;};
+};
+
+// outputs a blank line
+class BlankMenuItem : public OutputOnlyMenuItem
+{
+  public:
+    BlankMenuItem() : OutputOnlyMenuItem(menustr::space) {};
+    virtual ~BlankMenuItem() {};
+};
+
+// outputs a dashed line
+class DividerMenuItem : public OutputOnlyMenuItem
+{
+  public:
+    DividerMenuItem() : OutputOnlyMenuItem(menustr::divider) {};
+    virtual ~DividerMenuItem() {};
+};
+
+// for menu items that result in the same menu being re-displayed
+class SameLevelMenuItem : public KeyedMenuItem
+{
+  protected:
+    UIInterface & ui;
+    std::string menuKey;
+  public:
+    SameLevelMenuItem(std::string myKey, UIInterface & myUi, std::string myMenuKey);
+    virtual ~SameLevelMenuItem();
+    // ---- MenuItem methods
+    virtual std::string GetVariableText();
+};
+
+// ideally, this item would go in the lamarcmenus directory
+// and we would have a "LamarcMenu" class (inheriting from "NewMenu")
+// into which we'd put this. Unfortunately, that is stymied by
+// the fact that we have class "LowerLevelMenu" inheriting from
+// NewMenu as well.
+class FileDumpMenuItem : public SameLevelMenuItem
+{
+  public:
+    FileDumpMenuItem(UIInterface & myUi);
+    virtual ~FileDumpMenuItem();
+    // ---- MenuItem methods
+    virtual MenuInteraction_ptr GetHandler(std::string);
+    virtual std::string GetText();
+
+};
+
+class FileDumpHandler : public DialogRepeat
+{
+  protected:
+    UIInterface & ui;
+    long maxTries() {return 3;};
+    std::string beforeLoopOutputString();
+    std::string inLoopOutputString();
+    std::string inLoopFailureOutputString();
+    std::string afterLoopSuccessOutputString();
+    std::string afterLoopFailureOutputString();
+    bool        handleInput(std::string inputString);
+    void        doFailure() {};
+  public:
+    FileDumpHandler(UIInterface & myui) : ui(myui) {};
+    virtual ~FileDumpHandler() {};
+};
+
+class UndoMenuItem : public SameLevelMenuItem
+{
+  public:
+    UndoMenuItem(UIInterface & myUi);
+    virtual ~UndoMenuItem();
+    // ---- MenuItem methods
+    virtual std::string GetText();
+    virtual bool IsVisible();
+    virtual MenuInteraction_ptr GetHandler(std::string);
+
+};
+
+class UndoHandler : public MenuInteraction
+{
+  protected:
+    UIInterface & ui;
+  public:
+    UndoHandler(UIInterface & myui) : ui(myui) {};
+    virtual ~UndoHandler() {};
+    virtual menu_return_type InvokeMe(Display & display);
+};
+
+class RedoMenuItem : public SameLevelMenuItem
+{
+  public:
+    RedoMenuItem(UIInterface & myUi);
+    virtual ~RedoMenuItem();
+    // ---- MenuItem methods
+    virtual std::string GetText();
+    virtual bool IsVisible();
+    virtual MenuInteraction_ptr GetHandler(std::string);
+
+};
+
+class RedoHandler : public MenuInteraction
+{
+  protected:
+    UIInterface & ui;
+  public:
+    RedoHandler(UIInterface & myui) : ui(myui) {};
+    virtual ~RedoHandler() {};
+    virtual menu_return_type InvokeMe(Display & display);
+};
+
+class BottomLineMenuItem : public MenuDisplayLine
+{
+  protected:
+    UIInterface& m_ui;
+  public:
+    BottomLineMenuItem(UIInterface& ui);
+    virtual ~BottomLineMenuItem();
+    virtual std::string GetKey() { return menustr::emptyString;};
+
+#if 0
+    virtual long GetKeyid() { return -1;};
+#endif
+
+    virtual std::string GetText() { return menustr::bottomLine;};
+    virtual bool Handles(std::string inputKey);
+    virtual std::string GetVariableText() { return menustr::emptyString;};
+    virtual MenuInteraction_ptr GetHandler(std::string inputKey);
+};
+
+class BottomLineMenuItemAtTop : public BottomLineMenuItem
+{
+  public:
+    BottomLineMenuItemAtTop(UIInterface& ui);
+    virtual ~BottomLineMenuItemAtTop();
+    virtual std::string GetText() { return menustr::bottomLineAtTop;};
+};
+
+class DoNothingHandler : public MenuInteraction
+{
+  public:
+    DoNothingHandler() {};
+    virtual ~DoNothingHandler() {};
+    virtual menu_return_type InvokeMe(Display & display);
+};
+
+class RunHandler : public DialogAcknowledge
+{
+  protected:
+    UIInterface& m_ui;
+    virtual std::string outputString();
+  public:
+    RunHandler(UIInterface& ui): m_ui(ui) {};
+    virtual ~RunHandler() {};
+    virtual menu_return_type InvokeMe(Display & display);
+};
+
+class QuitHandler : public MenuInteraction
+{
+  public:
+    QuitHandler() {};
+    virtual ~QuitHandler() {};
+    virtual menu_return_type InvokeMe(Display & display)
+    { return menu_QUIT;};
+};
+
+class GoUpHandler : public MenuInteraction
+{
+  public:
+    GoUpHandler() {};
+    virtual ~GoUpHandler() {};
+    virtual menu_return_type InvokeMe(Display & display)
+    { return menu_GO_UP;};
+};
+
+class SubMenuItem : public KeyedMenuItem
+{
+  protected:
+    UIInterface & ui;
+    NewMenuCreator * creator;
+  public:
+    SubMenuItem(std::string myKey,UIInterface & myui, NewMenuCreator * myCreator);
+    virtual ~SubMenuItem();
+    // ---- MenuItem methods
+    virtual std::string GetText();
+    virtual std::string GetVariableText();
+    virtual MenuInteraction_ptr GetHandler(std::string inputKey) ;
+};
+
+#endif  // NEWMENUITEM_H
+
+//____________________________________________________________________________________
diff --git a/src/menu/setmenuitem.cpp b/src/menu/setmenuitem.cpp
new file mode 100644
index 0000000..8db06e7
--- /dev/null
+++ b/src/menu/setmenuitem.cpp
@@ -0,0 +1,199 @@
+// $Id: setmenuitem.cpp,v 1.27 2011/03/07 06:08:51 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+#include <string>
+
+#include "setmenuitem.h"
+#include "ui_strings.h"
+#include "constants.h"
+#include "stringx.h"
+#include "ui_constants.h"
+#include "newmenuitems.h"
+
+using std::string;
+
+//------------------------------------------------------------------------------------
+
+string SetDialog::inLoopOutputString()
+{
+    if (getMin() == getMax())
+    {
+        assert (getMin() == ToString(FLAGDOUBLE));
+        return "Enter " + getText() + "\n";
+    }
+    else
+    {
+        return "Enter " + getText() + " (between " + getMin() + " and "
+            + getMax() + ").\n";
+    }
+}
+
+SetMenuItem::SetMenuItem(std::string myKey, UIInterface & myui, std::string mk )
+    : KeyedMenuItem(myKey), ui(myui) , menuKey(mk)
+{
+}
+
+SetMenuItem::~SetMenuItem()
+{
+}
+
+string SetMenuItem::GetText()
+{
+    return ui.doGetDescription(menuKey,GetId());
+}
+
+std::string SetMenuItem::GetVariableText()
+{
+    return ui.doGetPrintString(menuKey,GetId());
+}
+
+MenuInteraction_ptr SetMenuItem::GetHandler(std::string input)
+{
+    assert(Handles(input));
+    return MenuInteraction_ptr(new SetDialog(ui,menuKey,GetId()));
+}
+
+bool SetMenuItem::IsVisible()
+{
+    return true;
+}
+
+bool SetMenuItem::IsConsistent()
+{
+    return ui.doGetConsistency(menuKey,GetId());
+}
+
+//------------------------------------------------------------------------------------
+
+SetMenuItemNoId::SetMenuItemNoId(std::string myKey, UIInterface & myui, std::string myMenuKey)
+    : SetMenuItem(myKey,myui,myMenuKey)
+{
+}
+
+SetMenuItemNoId::~SetMenuItemNoId()
+{
+}
+
+UIId SetMenuItemNoId::GetId()
+{
+    return NO_ID();
+}
+
+//------------------------------------------------------------------------------------
+
+SetMenuItemId::SetMenuItemId(std::string myKey, UIInterface & myui, std::string myMenuKey, UIId myId)
+    : SetMenuItem(myKey,myui,myMenuKey) , id(myId)
+{
+}
+
+SetMenuItemId::~SetMenuItemId()
+{
+}
+
+UIId SetMenuItemId::GetId()
+{
+    return id;
+}
+
+///// MenuDisplayGroupBaseImplementation
+
+MenuDisplayGroupBaseImplementation::MenuDisplayGroupBaseImplementation(UIInterface & myui, string myMenuKey)
+    : ui(myui) , menuKey(myMenuKey)
+{
+}
+
+MenuDisplayGroupBaseImplementation::~MenuDisplayGroupBaseImplementation()
+{
+}
+
+string MenuDisplayGroupBaseImplementation::GetKey(UIId id)
+{
+    return indexToKey(id.GetIndex1()); // EWFIX.P3 ERRORCHECKING : what if GetIndex1 not unique?
+}
+
+string MenuDisplayGroupBaseImplementation::GetText(UIId id)
+{
+    return ui.doGetDescription(menuKey,id);
+}
+
+string MenuDisplayGroupBaseImplementation::GetVariableText(UIId id)
+{
+    return ui.doGetPrintString(menuKey,id);
+}
+
+bool MenuDisplayGroupBaseImplementation::Handles(string input)
+{
+    // kinda wasteful, but avoids some unpleasant to
+    // code error checking
+    vector<UIId> visibles = GetVisibleIds();
+    vector<UIId>::iterator i;
+    for(i = visibles.begin(); i != visibles.end(); i++)
+    {
+        UIId& visibleId = *i;
+        if(CaselessStrCmp(GetKey(visibleId),input))
+        {
+            return true;
+        }
+    }
+    return false;
+}
+
+MenuInteraction_ptr MenuDisplayGroupBaseImplementation::GetHandler(string input)
+{
+    // kinda wasteful, but avoids some unpleasant to
+    // code error checking
+    vector<UIId> visibles = GetVisibleIds();
+    vector<UIId>::iterator i;
+    for(i = visibles.begin(); i != visibles.end(); i++)
+    {
+        UIId& visibleId = *i;
+        if(CaselessStrCmp(GetKey(visibleId),input))
+        {
+            return MakeOneHandler(visibleId);
+        }
+    }
+    assert(false); //A group with nothing in it?
+    return MenuInteraction_ptr(new DoNothingHandler());
+}
+
+///// SetMenuItemGroup
+
+SetMenuItemGroup::SetMenuItemGroup(UIInterface & myui, string myMenuKey)
+    : MenuDisplayGroupBaseImplementation(myui,myMenuKey)
+{
+}
+
+SetMenuItemGroup::~SetMenuItemGroup()
+{
+}
+
+MenuInteraction_ptr SetMenuItemGroup::MakeOneHandler(UIId id)
+{
+    return MenuInteraction_ptr(new SetDialog(ui,menuKey,id));
+}
+
+///// ToggleMenuItemGroup
+
+ToggleMenuItemGroup::ToggleMenuItemGroup(UIInterface & myui, string myMenuKey)
+    : MenuDisplayGroupBaseImplementation(myui,myMenuKey)
+{
+}
+
+ToggleMenuItemGroup::~ToggleMenuItemGroup()
+{
+}
+
+MenuInteraction_ptr ToggleMenuItemGroup::MakeOneHandler(UIId id)
+{
+    return MenuInteraction_ptr(new NewToggleHandler(ui,menuKey,id));
+}
+
+//____________________________________________________________________________________
diff --git a/src/menu/setmenuitem.h b/src/menu/setmenuitem.h
new file mode 100644
index 0000000..31ea702
--- /dev/null
+++ b/src/menu/setmenuitem.h
@@ -0,0 +1,217 @@
+// $Id: setmenuitem.h,v 1.30 2011/03/07 06:08:51 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+// Menuitem.h
+// is line in the menu and consists of a (key, text, handlerobject)
+//
+// Peter Beerli
+
+#ifndef SETMENUITEM_H
+#define SETMENUITEM_H
+
+#include <string>
+#include "dialogrepeat.h"
+#include "errhandling.h"
+#include "menuitem.h"
+#include "menuinteraction.h"
+#include "stringx.h"
+#include "ui_interface.h"
+#include "ui_constants.h"
+#include "vectorx.h"
+#include "menutypedefs.h"
+
+class SetDialog : public DialogRepeat
+{
+  private:
+    SetDialog(); //undefined
+  protected:
+    UIInterface & ui;
+    std::string menuKey;
+    const UIId id;
+    //
+    bool haveError;
+    std::string currError;
+    //
+    std::string getText()
+    { return ui.doGetDescription(menuKey,id);};
+    virtual void stuffValIntoUI(std::string val)
+    { ui.doSet(menuKey,val,id);};
+  public:
+    SetDialog(UIInterface & myui, std::string myMenuKey, UIId myId)
+        : ui(myui) , menuKey(myMenuKey), id(myId) {};
+    virtual ~SetDialog() {};
+    virtual long maxTries()
+    { return 3;};
+    virtual std::string beforeLoopOutputString()
+    { return "";};
+    virtual std::string inLoopOutputString();
+    virtual std::string inLoopFailureOutputString()
+    {
+        if (haveError)
+        {
+            return currError;
+        }
+        else
+        {
+            implementation_error e("Unknown menu error");
+            throw e;
+        }
+    };
+    virtual std::string afterLoopSuccessOutputString()
+    { return "";};
+    virtual std::string afterLoopFailureOutputString()
+    { return string("Unable to accept your input\n")+
+            "Leaving \""+getText()+"\" unchanged\n";};
+    virtual bool handleInput(std::string input)
+    {   haveError = false;
+        try
+        {
+            stuffValIntoUI(input);
+        }
+        catch (data_error& e)
+        {
+            currError = e.what();
+            haveError = true;
+            return false;
+        }
+        return true;
+    };
+    virtual void doFailure()
+    {};
+
+  private:
+    std::string getMin() { return ui.doGetMin(menuKey,id);};
+    std::string getMax() { return ui.doGetMax(menuKey,id);};
+};
+
+class SetMenuItem : public KeyedMenuItem
+{
+  private:
+    SetMenuItem(); //undefined
+  protected:
+    UIInterface & ui;
+    std::string menuKey;
+    virtual UIId GetId() = 0;
+  public:
+    SetMenuItem(std::string myKey, UIInterface & myui, std::string mk );
+    SetMenuItem(std::string myKey, UIInterface & myui, std::string mk, UIId myId);
+    virtual ~SetMenuItem();
+    // ---- MenuItem methods
+    virtual std::string GetText();
+    virtual std::string GetVariableText();
+    virtual MenuInteraction_ptr GetHandler(std::string input);
+    virtual bool IsVisible();
+    virtual bool IsConsistent();
+};
+
+class SetMenuItemNoId : public SetMenuItem
+{
+  private:
+    SetMenuItemNoId(); //undefined
+  protected:
+    virtual UIId GetId();
+  public:
+    SetMenuItemNoId(std::string myKey, UIInterface & myui, std::string myMenuKey);
+    virtual ~SetMenuItemNoId();
+};
+
+class SetMenuItemId : public SetMenuItem
+{
+  private:
+    SetMenuItemId(); //undefined
+  protected:
+    UIId id;
+    virtual UIId GetId();
+  public:
+    SetMenuItemId(std::string myKey, UIInterface & myui, std::string myMenuKey, UIId myId);
+    virtual ~SetMenuItemId();
+};
+
+class NewToggleHandler : public MenuInteraction
+{
+  private:
+    NewToggleHandler(); //undefined
+  protected:
+    UIInterface & ui;
+    std::string menuKey;
+    UIId id;
+  public:
+    NewToggleHandler(UIInterface & myui, std::string myMenuKey, UIId myId)
+        : ui(myui),menuKey(myMenuKey),id(myId) {};
+    ~NewToggleHandler() {};
+    virtual menu_return_type InvokeMe(Display & display)
+    { ui.doToggle(menuKey,id); return menu_REDISPLAY;};
+};
+
+class ToggleMenuItemNoId : public SetMenuItemNoId
+{
+  private:
+    ToggleMenuItemNoId(); //undefined
+  public:
+    ToggleMenuItemNoId(std::string myKey, UIInterface & myui, std::string myMenuKey )
+        : SetMenuItemNoId(myKey,myui,myMenuKey) {};
+    ~ToggleMenuItemNoId() {};
+    virtual MenuInteraction_ptr GetHandler(std::string input)
+    { return MenuInteraction_ptr(new NewToggleHandler(ui,menuKey,GetId()));};
+};
+
+class ToggleMenuItemId : public SetMenuItemId
+{
+  public:
+    ToggleMenuItemId(std::string myKey, UIInterface & myui, std::string myMenuKey, UIId myId)
+        : SetMenuItemId(myKey,myui,myMenuKey,myId) {};
+    ~ToggleMenuItemId() {};
+    virtual MenuInteraction_ptr GetHandler(std::string input)
+    { return MenuInteraction_ptr(new NewToggleHandler(ui,menuKey,GetId()));};
+};
+
+class MenuDisplayGroupBaseImplementation : public MenuDisplayGroup
+{
+  private:
+    MenuDisplayGroupBaseImplementation(); //undefined
+  protected:
+    UIInterface & ui;
+    std::string menuKey;
+    virtual MenuInteraction_ptr MakeOneHandler(UIId id) = 0;
+  public:
+    MenuDisplayGroupBaseImplementation(UIInterface & myui, std::string myMenuKey);
+    virtual ~MenuDisplayGroupBaseImplementation();
+    virtual std::string GetKey(UIId id);
+    virtual std::string GetText(UIId id);
+    virtual std::string GetVariableText(UIId id);
+    virtual bool Handles(std::string input);
+    virtual MenuInteraction_ptr GetHandler(std::string input);
+};
+
+class SetMenuItemGroup : public MenuDisplayGroupBaseImplementation
+{
+  private:
+    SetMenuItemGroup(); //undefined
+  protected:
+    virtual MenuInteraction_ptr MakeOneHandler(UIId id);
+  public:
+    SetMenuItemGroup(UIInterface & myui, std::string myMenuKey);
+    virtual ~SetMenuItemGroup();
+};
+
+class ToggleMenuItemGroup : public MenuDisplayGroupBaseImplementation
+{
+  private:
+    ToggleMenuItemGroup(); //undefined
+  protected:
+    virtual MenuInteraction_ptr MakeOneHandler(UIId id);
+  public:
+    ToggleMenuItemGroup(UIInterface & myui, std::string myMenuKey);
+    virtual ~ToggleMenuItemGroup();
+};
+
+#endif  // SETMENUITEM_H
+
+//____________________________________________________________________________________
diff --git a/src/menu/togglemenuitem.cpp b/src/menu/togglemenuitem.cpp
new file mode 100644
index 0000000..3465796
--- /dev/null
+++ b/src/menu/togglemenuitem.cpp
@@ -0,0 +1,19 @@
+// $Id: togglemenuitem.cpp,v 1.13 2010/03/17 17:25:59 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <string>
+#include "constants.h"
+#include "togglemenuitem.h"
+#include "stringx.h"
+#include "ui_interface.h"
+
+using std::string;
+
+//____________________________________________________________________________________
diff --git a/src/menu/togglemenuitem.h b/src/menu/togglemenuitem.h
new file mode 100644
index 0000000..6318374
--- /dev/null
+++ b/src/menu/togglemenuitem.h
@@ -0,0 +1,24 @@
+// $Id: togglemenuitem.h,v 1.16 2010/03/02 23:12:29 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef TOGGLEMENUITEM_H
+#define TOGGLEMENUITEM_H
+
+#include <string>
+#include "constants.h" // for proftype
+#include "menuinteraction.h"
+#include "newmenuitems.h"
+
+class UIInterface;
+class Display;
+
+#endif  // TOGGLEMENUITEM_H
+
+//____________________________________________________________________________________
diff --git a/src/menu/twodtable.cpp b/src/menu/twodtable.cpp
new file mode 100644
index 0000000..68d5de1
--- /dev/null
+++ b/src/menu/twodtable.cpp
@@ -0,0 +1,173 @@
+// $Id: twodtable.cpp,v 1.7 2011/03/07 06:08:51 bobgian Exp $
+
+/*
+  Copyright 2003  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <string>
+#include <vector>
+#include "display.h"
+#include "menu_strings.h"
+#include "twodtable.h"
+#include "ui_interface.h"
+#include "stringx.h"
+
+using std::string;
+using std::vector;
+
+static const long MAXDISPLAYWIDTH = 78;
+
+TwoDTable::TwoDTable(UIInterface & myui)
+    : ui(myui)
+{
+}
+
+TwoDTable::~TwoDTable()
+{
+}
+
+bool TwoDTable::IsVisible()
+{
+    return true;
+}
+
+string TwoDTable::RowHeader(UIInterface & ui)
+{
+    return menustr::emptyString;
+}
+
+string TwoDTable::CreateDisplayString()
+{
+    return CreateDisplayString(ui);
+}
+
+string TwoDTable::CreateDisplayString(UIInterface & ui)
+{
+    if(!IsVisible()) return menustr::emptyString;
+
+    long rowMax = RowCount(ui);
+    long colMax = ColCount(ui);
+    long colIndex, rowIndex;
+    StringVec2d outv;
+    LongVec1d widths;
+
+    // create column header row
+    vector<string> colHeader;
+    colHeader.push_back(RowHeader(ui));
+    for(colIndex = 0; colIndex < colMax; colIndex++)
+    {
+        colHeader.push_back(ColLabel(ui,colIndex));
+    }
+    bool someActualHeaders = false;
+    for (unsigned long c=0; c<colHeader.size(); c++)
+    {
+        if (colHeader[c] != menustr::emptyString)
+        {
+            someActualHeaders = true;
+        }
+    }
+    if (someActualHeaders)
+    {
+        outv.push_back(colHeader);
+    }
+
+    // create data rows
+    for(rowIndex = 0; rowIndex < rowMax; rowIndex++)
+    {
+        vector<string> row;
+        row.push_back(RowLabel(ui,rowIndex));
+        for(colIndex = 0; colIndex < colMax; colIndex++)
+        {
+            row.push_back(Cell(ui,rowIndex,colIndex));
+        }
+        outv.push_back(row);
+    }
+
+    // calculate data widths
+    for(unsigned long i=0;i<outv[0].size();i++)
+    {
+        widths.push_back(0);
+    }
+    // calculate appropriate column widths
+    for(unsigned long r=0;r<outv.size();r++)
+    {
+        for(unsigned long c=0;c<outv[r].size();c++)
+        {
+            long curr_width = outv[r][c].length();
+            if(curr_width > widths[c])
+            {
+                widths[c] = curr_width;
+            }
+        }
+    }
+    // Re-calculate column widths, given that we can only be 78 characters wide.
+    long total_width = 0;
+    for (unsigned long i=0; i<widths.size(); i++)
+    {
+        total_width += widths[i];
+        total_width++; //for the space between columns
+    }
+    while (total_width > MAXDISPLAYWIDTH)
+    {
+        long difference = total_width - MAXDISPLAYWIDTH;
+        unsigned long maxwidth=0;
+        for (unsigned long i=1; i < widths.size(); i++)
+        {
+            if (widths[i] > widths[maxwidth])
+            {
+                maxwidth = i;
+            }
+        }
+        long reduce = std::min(5L, difference);
+        reduce = std::min(reduce, (widths[maxwidth]-3));
+        if (reduce == widths[maxwidth]-3)
+        {
+            reduce = 1;
+        }
+        widths[maxwidth] -= reduce;
+        total_width -= reduce;
+    }
+
+    // create string
+    string outputString = "";
+    outputString += Title(ui);
+    outputString += "\n";
+    for(unsigned long r=0;r<outv.size();r++)
+    {
+        StringVec2d wrappedLines;
+        unsigned long numlines = 1;
+        for(unsigned long c=0;c<outv[r].size();c++)
+        {
+            StringVec1d wrappedCell;
+            wrappedCell = Linewrap(outv[r][c], (widths[c]), 2);
+            wrappedLines.push_back(wrappedCell);
+            if (wrappedCell.size() > numlines)
+            {
+                numlines = wrappedCell.size();
+            }
+        }
+        for (unsigned long r2=0; r2<numlines; r2++)
+        {
+            for (unsigned long c=0; c<wrappedLines.size(); c++)
+            {
+                outputString += " ";
+                if (wrappedLines[c].size() > r2 )
+                {
+                    outputString += MakeJustified(wrappedLines[c][r2],-(widths[c]));
+                }
+                else
+                {
+                    outputString += MakeJustified("",-(widths[c]));
+                }
+            }
+            outputString += "\n";
+        }
+    }
+    return outputString;
+}
+
+//____________________________________________________________________________________
diff --git a/src/menu/twodtable.h b/src/menu/twodtable.h
new file mode 100644
index 0000000..269613c
--- /dev/null
+++ b/src/menu/twodtable.h
@@ -0,0 +1,42 @@
+// $Id: twodtable.h,v 1.6 2011/03/07 06:08:51 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef TWODTABLE_H
+#define TWODTABLE_H
+
+#include <string>
+#include "menuitem.h"
+
+class Display;
+class UIInterface;
+
+class TwoDTable : public MenuDisplayTable
+{
+  protected:
+    UIInterface & ui;
+    virtual long ColCount(UIInterface & ui) = 0;
+    virtual long RowCount(UIInterface & ui) = 0;
+    virtual std::string Title(UIInterface & ui) = 0;
+    virtual std::string RowHeader(UIInterface & ui);
+    virtual std::string ColLabel(UIInterface & ui, long index) = 0;
+    virtual std::string RowLabel(UIInterface & ui, long index) = 0;
+    virtual std::string Cell(UIInterface & ui, long rowIndex, long colIndex) = 0;
+    std::string CreateDisplayString(UIInterface & myui);
+
+  public:
+    TwoDTable(UIInterface & myui);
+    virtual ~TwoDTable();
+    virtual bool IsVisible();
+    std::string CreateDisplayString();
+};
+
+#endif  // TWODTABLE_H
+
+//____________________________________________________________________________________
diff --git a/src/postlike/analyzer.cpp b/src/postlike/analyzer.cpp
new file mode 100644
index 0000000..e75dc09
--- /dev/null
+++ b/src/postlike/analyzer.cpp
@@ -0,0 +1,67 @@
+// $Id: analyzer.cpp,v 1.33 2012/06/30 01:32:42 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+// analyzer.cpp
+//
+// the analyzer generates
+// - profiles  [the code is in profile.cpp]
+// - likelihood surfaces
+// - likelihood ratio tests
+//
+// Peter Beerli November 2000
+
+#include <iostream>
+
+#include "analyzer.h"
+#include "forcesummary.h"
+#include "mathx.h"                      // for probchi
+#include "registry.h"
+#include "runreport.h"
+#include "stringx.h"
+
+using namespace std;
+
+//------------------------------------------------------------------------------------
+// Instantiate the Analyzer
+// using the "last" postlike object, the Analyzer assumes that the
+// Prob(G|parameter_0) are already computed
+// [in likelihood.h:....PostLike::Setup()
+
+Analyzer::Analyzer (ForceSummary &fs, const ParamVector& params, Maximizer * thismaximizer):
+    m_maximizer(thismaximizer),
+    m_forcesummary (fs)
+{
+    m_newparams = CreateVec1d(params.size(), static_cast<double>(0.0));
+}
+
+//------------------------------------------------------------------------------------
+
+Analyzer::~Analyzer ()
+{
+    // intentionally blank
+}
+
+void Analyzer::SetMLEs(DoubleVec1d newMLEs)
+{
+    m_MLEparams = newMLEs;
+    m_MLElparams = m_MLEparams;
+    LogVec0 (m_MLEparams, m_MLElparams);
+}
+
+//------------------------------------------------------------------------------------
+
+void Analyzer::PrintMLEs()
+{
+    cout << "Current analyzer MLE's:" << endl;
+    for (unsigned long int i = 0; i < m_MLEparams.size(); ++i)
+        cout << m_MLEparams[i] << endl;
+}
+
+//____________________________________________________________________________________
diff --git a/src/postlike/analyzer.h b/src/postlike/analyzer.h
new file mode 100644
index 0000000..d6c6bc5
--- /dev/null
+++ b/src/postlike/analyzer.h
@@ -0,0 +1,118 @@
+// $Id: analyzer.h,v 1.33 2012/06/30 01:32:42 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+// analyzer.h
+//
+// the analyzer generates
+// - profiles  [code will be in profile.cpp]
+//
+// Peter Beerli November 2000
+//
+
+#ifndef ANALYZER_H
+#define ANALYZER_H
+
+#include <fstream>
+#include <vector>
+
+#include "likelihood.h"
+#include "maximizer.h"
+#include "parameter.h"
+#include "plotstat.h"
+#include "vectorx.h"
+
+typedef std::map<double, double> DoublesMap;
+typedef std::map<double, double>::iterator DoublesMapiter;
+typedef std::map<double, double>::reverse_iterator DoublesMapReviter;
+
+typedef std::map<double, DoubleVec1d> DoubleToVecMap;
+typedef std::map<double, DoubleVec1d>::iterator DoubleToVecMapiter;
+
+typedef std::map<double, string> DoubleToStringMap;
+typedef std::map<double, string>::iterator DoubleToStringiter;
+
+class ForceSummary;
+
+long int difference(DoubleVec1d param1, DoubleVec1d param2);
+
+class Analyzer
+{
+  public:
+    // Instantiate the Analyzer
+    // using the "last" postlike object or "last" maximizer object,
+    // the Analyzer assumes that the
+    // Prob(G|parameter_0) are already computed
+    // [in likelihood.h:....PostLike::Setup()
+    Analyzer (ForceSummary &fs, const ParamVector &params,
+              Maximizer * thismaximizer);
+    ~Analyzer ();
+
+    void SetMLEs(DoubleVec1d newMLEs);
+    //Debug function:
+    void PrintMLEs();
+
+    // Calculate the profiles and put them into class variable profiles.
+    void CalcProfiles (const DoubleVec1d MLEs, double likelihood, long int region);
+
+  private:
+    DoubleVec1d  m_MLEparams;     // holds MLE parameters
+    DoubleVec1d  m_MLElparams;    // holds the log(m_MLEparam)
+    Maximizer *m_maximizer;       // holds the maximizer [for profiles]
+    PostLike *m_postlikelihood;   // pointer to the PostLike [for plots]
+    double m_likelihood;          // holds the L of the last maximization
+    const ForceSummary &m_forcesummary;
+    vector < double > m_modifiers;    // for calculation of percentiles or fixed
+
+    // Temporary variables as an optimization
+    DoubleVec1d m_newparams;
+
+    // Calculates a single Profile table
+    void CalcProfile (ParamVector::iterator guide, long int pnum, long int region);
+    void CalcProfileFixed (ParamVector::iterator guide, long int pnum, long int region);
+    void CalcProfilePercentile (ParamVector::iterator guide, long int pnum, long int region);
+    void DoHalfTheProfile(bool high, DoubleVec1d& targetLikes,
+                          DoublesMap& percsForLikes, long int pnum, long int region,
+                          vector<ProfileLineStruct>& localprofiles,
+                          ParamVector::iterator guide);
+
+    bool AddMoreExtremeValue(DoublesMap& foundLikesForVals,
+                             DoubleToVecMap& foundVecsForVals,
+                             DoubleToStringMap& messagesForVals,
+                             bool high, long int pnum);
+    // returns fairly high and low values for specific forces
+    DoublesMapiter GetLastHigher(DoublesMap& foundLikesForVals,
+                                 double targetLike, bool high);
+    DoublesMapiter GetFirstLower(DoublesMap& foundLikesForVals,
+                                 double targetLike, bool high);
+    bool ExpandSearch(DoublesMap& foundLikesForVals,
+                      DoubleToVecMap& foundVecsForVals,
+                      DoublesMapiter& lowValAndLike,
+                      DoublesMapiter& highValAndLike,
+                      DoubleToStringMap& messagesForVals,
+                      long int pnum, double targetlike);
+    double LowParameter(long int pnum, double currentLow);
+    double HighParameter(long int pnum, double currentHigh);
+    DoublesMapiter ClosestFoundFor(DoublesMap& foundLikesForVals,
+                                   double targetLike);
+    double GetNewValFromBracket(DoublesMapiter& highValAndLike,
+                                DoublesMapiter& lowValAndLike,
+                                double targetLike);
+    void AddBlankProfileForModifier(double modifier,
+                                    vector<ProfileLineStruct>& localprofiles);
+    void   CheckForMultipleMaxima(DoublesMap& foundLikesForVals, bool high);
+    void   PrintDoublesMap(DoublesMap& printme);
+    void   PrintDoublesIter(DoublesMapiter& printme);
+    void   PrintDoubleToStringMap(DoubleToStringMap& printme);
+    void   PrintDoubleToStringIter(DoubleToStringiter& printme);
+};
+
+#endif // ANALYZER_H
+
+//____________________________________________________________________________________
diff --git a/src/postlike/derivatives.cpp b/src/postlike/derivatives.cpp
new file mode 100644
index 0000000..ba1bb7f
--- /dev/null
+++ b/src/postlike/derivatives.cpp
@@ -0,0 +1,405 @@
+// $Id: derivatives.cpp,v 1.55 2012/06/30 01:32:42 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+// derivatives of parameter likelihood
+// call sequence is in maximizer.cpp
+
+#include <cassert>
+#include <iostream>                     // debugging
+
+#include "collector.h"
+#include "likelihood.h"
+#include "mathx.h"
+#include "plforces.h"
+
+#ifdef DMALLOC_FUNC_CHECK
+#include "/usr/local/include/dmalloc.h"
+#endif
+
+#ifndef NDEBUG
+//#define TEST
+#endif // NDEBUG
+
+using namespace std;
+
+//------------------------------------------------------------------------------------
+
+// Default implementation of a function to compute and return the following:
+//
+// sum_over_all_trees( (Prob(G|P)/Prob(G|Po)) * (d/dp)log(Prob(G|P)) )
+// == sum_over_unique_trees(nG*(Prob(G|P)/Prob(G|Po))*(d/dp)log(Prob(G|P)))
+//
+// where d/dp   means partial derivative with respect to parameter p,
+//       P      is the full vector of current parameter values ("param"),
+//       Po     is the starting point ("driving values") vector ("param0"),
+//       G      is one of the genealogies over which we're summing,
+//  and  nG     is the number of identical copies of genealogy G.
+//
+// Input paramters:  param, ln_ProbGP_over_ProbGPo, pGenealogies, whichparam.
+//
+// This function can be used with or without Geyer replicates.  If Geyer replicates
+// are being used, then input parameter ln_ProbGP_over_ProbGPo must have been
+// computed using an effective "lnProbGPo" == "lnProb(G|param0),"
+// where "lnProb(G|param0)" = log(sum_over_replicates((NtrR/wR)*Prob(G|param0R))),
+//       NtrR    is the total number of (non-unique) trees in replicate R,
+//       wR      is the Geyer weight assigned to replicate R,
+//  and  param0R is the starting point ("driving value") for replicate R.
+//
+// This function returns the "sum_over_..." quantity spelled out at the top
+// of this set of comments.
+//
+double PostLike::DCalc_sumG_BasicNumerator(const vector<double>& param, const vector<double>& ln_ProbGP_over_ProbGPo,
+                                           const vector<TreeSummary*> *pGenealogies, const long& whichparam)
+{
+    double DlnPoint(0.0), DlnWait(0.0), termG(0.0), result(0.0);
+    vector<PLForces*>::const_iterator theForce;
+    long whichforce = FindForce(whichparam);
+    theForce = m_forces.begin() + whichforce;
+
+    for (unsigned long G = 0; G < pGenealogies->size(); G++)
+    {
+        DlnPoint = (*theForce)->DlnPoint(param, (*pGenealogies)[G], whichparam);
+        DlnWait = (*theForce)->DlnWait(param, (*pGenealogies)[G], whichparam);
+        termG = SafeProductWithExp(DlnPoint + DlnWait, ln_ProbGP_over_ProbGPo[G]);
+        result += ((*pGenealogies)[G])->GetNCopies() * termG;
+
+        // These should never be not-a-number.
+        assert(!systemSpecificIsnan(ln_ProbGP_over_ProbGPo[G]));
+        assert(!systemSpecificIsnan(DlnPoint));
+        assert(!systemSpecificIsnan(DlnWait));
+        //assert(!systemSpecificIsnan(termG)); // termG is OK if the others are OK
+    }
+
+    return result;
+}
+
+//------------------------------------------------------------------------------------
+// Single-region derivatives
+//
+bool SinglePostLike::DCalculate(const vector<double>& param, vector<double>& gradient)
+{
+    assert(param.size() == gradient.size());
+    double denominator = m_totalNumTrees * exp(m_last_lnL);
+    // "denominator" is equal to
+    // sum_over_all_trees(Prob(G|P)/Prob(G|Po)).
+
+    if (0.0 == denominator)
+    {
+#ifdef TEST
+        cerr << "SinglePostLike::DCalculate -- zero likelihood.  "
+             << "Returning...." << endl;
+#endif // TEST
+        return false; // otherwise we'd divide by zero
+    }
+
+    vector<double>::const_iterator p; // param
+    vector<ParamStatus>::const_iterator guideIt;
+    vector<double>::iterator g; // gradient
+    long whichparam = 0;
+    for (p = param.begin(), g = gradient.begin(),
+             guideIt = m_working_pstatusguides.begin();
+         p != param.end(); ++p, ++g, ++whichparam, ++guideIt)
+    {
+        ParamStatus mystatus = *guideIt;
+        if (!mystatus.Varies())
+        {
+            //guide is 0 if the gradient does not need to be calculated
+            (*g) = 0.0;
+        }
+        else
+        {
+            (*g) = PostLike::DCalc_sumG_BasicNumerator(param, m_ln_ProbGP_over_ProbGPo,
+                                                       &m_data->treeSummaries, whichparam);
+            (*g) /= denominator; // defined at the top of this method
+        }
+        // 2004/03/19 erynes -- For every parameter "p" except linear
+        // parameters like growth, we maximize with respect to log(p)
+        // instead of with respect to p.
+        // Not only does this make the math easier, it also keeps our
+        // positive-definite parameters (e.g., theta) positive.
+        // The "external" effect of maximizing with respect to log(p)
+        // is visible only in Maximizer::SetParam(), where the step is applied
+        // to the log parameters instead of the linear parameters,
+        // and right here, where we multiply the gradient by the parameter.
+        // This latter step comes from the chain rule:
+        // if f(x1, x2, x3, ..., xN) = g(y1, y2, y3, ..., xN),
+        // where y1 is a function of x1 alone, y2 = y2(x2), etc.
+        // (note g can contain both y's and x's),
+        // then if we use "d" to stand for "partial derivative," we have:
+        //
+        // g = f
+        // dg/dy2 = df/dy2  = df/dx2 * dx2/dy2,
+        // and similarly for y1, y3, etc.
+        //
+        // For yN = log(xN), we have xN = exp(yN),
+        // and dxN/dyN = d(exp(yN))/dyN = exp(yN) = xN.
+        // Substituting xN = dxN/dyN in the equation above, we have
+        // dg/dy2 = df/dx2 * x2,
+        // and similarly for y1, y3, and all log parameters generally.
+        if (!isLinearParam(whichparam))
+            (*g) *= (*p);
+    }
+
+    return true;
+}
+
+//------------------------------------------------------------------------------------
+// Multi-replicate, single-region derivatives
+//
+bool ReplicatePostLike::DCalculate(const vector<double>& param, vector<double>& gradient)
+{
+    assert(param.size() == gradient.size());
+    double denominator = exp(m_last_lnL); // reduce num. of calls to exp()
+
+    if (0.0 == denominator)
+    {
+#ifdef TEST
+        cerr << "ReplicatePostLike::DCalculate -- zero likelihood.  "
+             << "Returning...." << endl;
+#endif // TEST
+        return false; // otherwise we'd divide by zero
+    }
+
+    vector<double>::const_iterator p; // param
+    vector<ParamStatus>::const_iterator guideIt;
+    vector<double>::iterator g; // gradient
+    long whichparam = 0;
+
+    for (p = param.begin(), g = gradient.begin(),
+             guideIt = m_working_pstatusguides.begin();
+         p != param.end(); ++p, ++g, ++whichparam, ++guideIt)
+    {
+        ParamStatus mystatus = *guideIt;
+        (*g) = 0.0;
+        // non-varying parameters are not computed
+        if (mystatus.Varies())
+        {
+            for (unsigned long rep = 0; rep < m_nReplicate; rep++)
+            {
+                (*g) += PostLike::DCalc_sumG_BasicNumerator(param, m_ln_ProbGP_over_ProbGPo[rep],
+                                                            &(m_data[rep]->treeSummaries), whichparam);
+            }
+            (*g) /= denominator; // defined at the top of this method
+            if (!isLinearParam(whichparam))
+                (*g) *= (*p); // see comment for log params in SinglePostLike::DCalculate()
+        }
+    }
+    return true;
+}
+
+//------------------------------------------------------------------------------------
+// Multi-region, multi-replicate derivatives
+//
+bool RegionPostLike::DCalculate(const vector<double>& param, vector<double>& gradient)
+{
+    assert(param.size() == gradient.size());
+
+    DoubleVec2d scaledparams = CreateVec2d(m_nRegions,param.size(),0.0);
+    for(unsigned long region = 0; region < m_nRegions; ++region)
+    {
+        transform(param.begin(),param.end(),
+                  m_paramscalars[region].begin(),scaledparams[region].begin(),
+                  multiplies<double>());
+    }
+    assert(scaledparams.size() == m_data.size());
+
+    vector<ParamStatus>::const_iterator guideIt;
+    vector<double>::iterator g;
+    long whichparam = 0;
+    double numeratorForThisRegion(0.0);
+
+    for (g = gradient.begin(),
+             guideIt = m_working_pstatusguides.begin();
+         g != gradient.end(); ++g, ++whichparam, ++guideIt)
+    {
+        ParamStatus mystatus = *guideIt;
+        (*g) = 0.0;
+        //guide is 0 if the gradient does not need to be calculated
+        if (mystatus.Varies())
+        {
+            for (unsigned long reg = 0; reg < m_lnProbGPo.size(); reg++)
+            {
+                numeratorForThisRegion = 0.0;
+                for (unsigned long rep = 0; rep < m_lnProbGPo[reg].size(); rep++)
+                    numeratorForThisRegion +=
+                        PostLike::DCalc_sumG_BasicNumerator(scaledparams[reg],
+                                                            m_ln_ProbGP_over_ProbGPo[reg][rep],
+                                                            &(m_data[reg][rep]->treeSummaries),
+                                                            whichparam);
+                if (!isLinearParam(whichparam))
+                    numeratorForThisRegion *= scaledparams[reg][whichparam];
+                // see comment for log params in SinglePostLike::DCalculate()
+
+                if (0.0 == m_sumG_ProbGP_over_ProbGPo[reg])
+                {
+#ifdef TEST
+                    cerr << "RegionPostLike::DCalculate -- zero likelihood for region "
+                         << reg << ".  Returning...." << endl;
+#endif // TEST
+                    return false; // otherwise we'd divide by zero
+                }
+                (*g) += numeratorForThisRegion / m_sumG_ProbGP_over_ProbGPo[reg];
+            }
+        }
+    }
+
+    return true;
+}
+
+//------------------------------------------------------------------------------------
+// Multi-region, multi-replicate derivatives,
+// with the background mutation rates gamma-distributed over regions.
+//
+bool GammaRegionPostLike::DCalculate(const vector<double>& param, vector<double>& gradient)
+{
+    if (param.size() != gradient.size())
+    {
+        assert(false);
+        string msg = "GammaRegionPostLike::DCalculate(), received two ";
+        msg += "vectors of unequal size:  param.size() = ";
+        msg += ToString(param.size()) + " and gradient.size() = ";
+        msg += ToString(gradient.size()) + ".";
+        throw implementation_error(msg);
+    }
+
+    DoubleVec2d scaledparams = CreateVec2d(m_nRegions,
+                                           m_paramscalars[0].size(),0.0);
+    for(unsigned long region = 0; region < m_nRegions; ++region)
+    {
+        transform(param.begin(), param.end(), m_paramscalars[region].begin(),
+                  scaledparams[region].begin(), multiplies<double>());
+    }
+    assert(scaledparams.size() == m_data.size());
+
+    vector<ParamStatus>::const_iterator guideIt;
+    vector<double>::iterator g;
+    bool retval(true);
+    double DlnPoint(0.0), DlnWait(0.0), termRegRepG(0.0), numeratorForThisRegion(0.0);
+    vector<PLForces*>::const_iterator theForce;
+    unsigned long whichparam(0), whichforce(0), the_alpha_param(param.size() - 1);
+    double alpha = param[the_alpha_param], SqrtAlpha = sqrt(alpha);
+
+    for (g = gradient.begin(),
+             guideIt = m_working_pstatusguides.begin();
+         g != gradient.end(); ++g, ++whichparam, ++guideIt)
+    {
+        ParamStatus mystatus = *guideIt;
+        (*g) = 0.0;
+        //guide is 0 if the gradient does not need to be calculated
+        if (!mystatus.Varies()) continue;
+
+        if (whichparam != the_alpha_param)
+        {
+            whichforce = FindForce(whichparam);
+            theForce = m_forces.begin() + whichforce;
+            for (unsigned long reg = 0; reg < m_lnProbGPo.size(); reg++)
+            {
+                numeratorForThisRegion = 0.0;
+                for (unsigned long rep = 0; rep < m_lnProbGPo[reg].size(); rep++)
+                {
+                    const vector<TreeSummary*> *pGenealogies =
+                        &(m_data[reg][rep]->treeSummaries);
+                    for (unsigned long G = 0; G < pGenealogies->size(); G++)
+                    {
+                        DlnPoint = (*theForce)->DlnPoint(scaledparams[reg],
+                                                         (*pGenealogies)[G], whichparam);
+                        DlnWait = (*theForce)->DlnWait(scaledparams[reg],
+                                                       (*pGenealogies)[G], whichparam);
+                        termRegRepG = m_K_alpha_minus_nevents_plus_1[reg][rep][G] /
+                            m_K_alpha_minus_nevents[reg][rep][G];
+                        termRegRepG *= sqrt(alpha/(-m_lnWait[reg][rep][G]));
+                        termRegRepG += (alpha - m_nevents_G[reg][rep][G]) /
+                            m_lnWait[reg][rep][G]; // note:  not -m_lnWait
+                        termRegRepG *= DlnWait;
+                        termRegRepG += DlnPoint;
+                        termRegRepG *= m_C[reg][rep][G];
+
+                        numeratorForThisRegion += termRegRepG
+                            * ((*pGenealogies)[G])->GetNCopies();
+                    }
+                }
+                if (0.0 == m_sumG_C[reg])
+                {
+#ifdef TEST
+                    cerr << "GammaRegionPostLike::DCalculate(), sum over reps and trees of region "
+                         << reg << " is " << m_sumG_C[reg] << "; this must be a positive number.  "
+                         << "Returning false and a gradient component of DBL_BIG....";
+#endif // TEST
+                    (*g) = DBL_BIG;
+                    retval = false;
+                    continue;
+                }
+
+                if (isLinearParam(whichparam))
+                {
+                    // Growth is the lone linear parameter at present.
+                    // But at present, growth and gamma can't mix.
+                    // If the user asked for both growth and gamma,
+                    // this error was almost surely handled earlier,
+                    // but we need to throw here, just to be safe.
+                    throw implementation_error("Unable to co-estimate growth and alpha.");
+                }
+                numeratorForThisRegion *= scaledparams[reg][whichparam];
+                // see comment for log params in SinglePostLike::DCalculate()
+                (*g) += numeratorForThisRegion / m_sumG_C[reg];
+            }
+        }
+
+        else // partial derivative with respect to alpha
+        {
+            for (unsigned long reg = 0; reg < m_lnProbGPo.size(); reg++)
+            {
+                numeratorForThisRegion = 0.0;
+                for (unsigned long rep = 0; rep < m_lnProbGPo[reg].size(); rep++)
+                {
+                    const vector<TreeSummary*> *pGenealogies =
+                        &(m_data[reg][rep]->treeSummaries);
+
+                    for (unsigned long G = 0; G < pGenealogies->size(); G++)
+                    {
+                        double SqrtNegLogWait = sqrt(-m_lnWait[reg][rep][G]);
+                        termRegRepG = m_K_alpha_minus_nevents_plus_1[reg][rep][G];
+                        termRegRepG *= -SqrtNegLogWait / SqrtAlpha;
+                        termRegRepG += DvBesselK(alpha - m_nevents_G[reg][rep][G],
+                                                 2.0*SqrtNegLogWait*SqrtAlpha,
+                                                 m_K_alpha_minus_nevents[reg][rep][G]);
+                        termRegRepG /= m_K_alpha_minus_nevents[reg][rep][G];
+                        termRegRepG += 1.0 + log(SqrtNegLogWait*SqrtAlpha);
+                        termRegRepG *= m_C[reg][rep][G];
+
+                        numeratorForThisRegion += termRegRepG
+                            * ((*pGenealogies)[G])->GetNCopies();
+                    }
+                }
+                if (0.0 == m_sumG_C[reg])
+                {
+#ifdef TEST
+                    cerr << "GammaRegionPostLike::DCalculate(), sum over reps and trees of region "
+                         << reg << " is " << m_sumG_C[reg] << "; this must be a positive number.  "
+                         << "Returning false and a gradient component of DBL_BIG....";
+#endif // TEST
+                    (*g) = DBL_BIG;
+                    retval = false;
+                    continue;
+                }
+
+                (*g) += numeratorForThisRegion / m_sumG_C[reg]; // psi() added later
+            }
+
+            (*g) -= m_nRegions * psi(alpha);
+            (*g) *= alpha; // Alpha is treated as a log parameter to ensure it's kept
+            // positive.  See comment in SinglePostLike::DCalculate().
+        }
+    }
+
+    return retval; // true unless m_sumG_C[reg] is zero
+}
+
+//____________________________________________________________________________________
diff --git a/src/postlike/likelihood.cpp b/src/postlike/likelihood.cpp
new file mode 100644
index 0000000..b5f7082
--- /dev/null
+++ b/src/postlike/likelihood.cpp
@@ -0,0 +1,1454 @@
+// $Id: likelihood.cpp,v 1.110 2013/10/25 17:00:53 mkkuhner Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+// parameter likelihood calculation
+//
+// <Single | Replicate | Region | GammaRegion>PostLike::Calculate()
+// returns the total log-likelihood for a set of genealogies
+// given a parameter vector P (this is written log(L(P)/L(Po))
+// when there is one unique vector of driving values "Po").
+//
+//------------------------------------------------------------------------------------
+//
+// Calling sequence and like:
+//
+// SinglePostLike gets passed into a Maximizer constructor
+//          the data-pointer is set, but there is no data yet.
+// after a single chain Maximizer::Calculate() uses SinglePostLike::Calculate()
+// if Replication: creation of ReplicatePostLike
+// if more than 1 Region creation of RegionPostLike
+// at the end of program RegionPostLike dies
+//
+//   GradientGuideSetup(thisToDolist); assigns zeroes and ones to a
+//           vector that allows the gradient calculator to skip
+//           over parameters that should not be maximized.
+//
+
+#include <cassert>
+#include <iostream>                     // debugging
+
+#include "vectorx.h"
+#include "likelihood.h"
+#include "forceparam.h"
+#include "force.h"
+#include "definitions.h"
+#include "treesum.h"
+#include "collector.h"
+#include "mathx.h"
+#include "runreport.h"                  // for FillGeyerWeights() reporting
+#include "forcesummary.h"               // for ctor and FillForces()
+
+#ifndef NDEBUG
+//#define TEST
+#endif // NDEBUG
+
+#ifdef DMALLOC_FUNC_CHECK
+#include "/usr/local/include/dmalloc.h"
+#endif
+
+using namespace std;
+
+//------------------------------------------------------------------------------------
+
+const double POSTERIOR_EPSILON = 0.00001;
+
+//------------------------------------------------------------------------------------
+
+PostLike::PostLike(const ForceSummary& fs, const long thisNregion, const long thisNreplicate, const long thisNParam)
+{
+    m_growthstart = 0L;
+    m_growthend = 0L;
+    m_nForces = fs.GetNForces();
+    m_nRegions = thisNregion;
+    m_nReplicate = thisNreplicate;
+    m_nParam = thisNParam;
+    m_pGammaRegionPostLike = NULL;
+    m_s_is_here = FLAGLONG;
+    m_forcestags = fs.GetForceTags();
+    FillForces (fs);
+}
+
+//------------------------------------------------------------------------------------
+
+PostLike::~PostLike (void)
+{
+    // deliberately blank--we don't own those pointers
+}
+
+//------------------------------------------------------------------------------------
+
+bool PostLike::isLinearParam(long whichparam)
+{
+    if (m_growthstart != m_growthend
+        && (whichparam >= m_growthstart && whichparam < m_growthend))
+        return true;
+    else if (whichparam == m_s_is_here)
+        return true;
+    return false;
+}
+
+//------------------------------------------------------------------------------------
+
+void PostLike::FillForces(const ForceSummary& fs)
+{
+    const ForceVec& theseforces(fs.GetAllForces());
+    ForceVec::const_iterator fit;
+    vector < PLForces * >::iterator last;
+    bool hasgrowth(false), hasLogisticSelection(false);
+    bool hassticklogselect(false);
+    long growthsize = 0L;
+    long thetastart = 0L;
+    long thetaend = 0L;
+    long diseasestart = 0L;
+    long diseaseend = 0L;
+    long recstart = 0L;
+    PLForces *forceptr = NULL;
+    PLForces *coalptr = NULL;
+    PLForces *growptr = NULL;
+    PLForces *sticklogselptr = NULL;
+    long start = 0L, end = 0L;
+    long counter = 0;
+    for (fit = theseforces.begin(); fit != theseforces.end(); ++fit)
+    {
+        if ((*fit)->GetTag() == force_LOGSELECTSTICK)
+        {
+            forceptr = new StickSelectPL(fs);
+        }
+        else
+        {
+            forceptr = (*fit)->GetPLForceFunction();
+        }
+
+        if (!forceptr)
+        {
+            if (force_REGION_GAMMA == (*fit)->GetTag() ||
+                force_DIVERGENCE == (*fit)->GetTag())
+                continue; // by design, there's no PL force associated with this force
+            string msg = "PostLike::FillForces(), unable to retrieve a PLForces pointer ";
+            msg += "for force " + ToString((*fit)->GetTag()) + ".";
+            throw implementation_error(msg);
+        }
+        m_forces.push_back(forceptr);
+        last = m_forces.end() - 1;
+        start = end;
+        end += (*fit)->GetNParams();
+        (*last)->SetStart(start);
+        (*last)->SetEnd(end);
+
+        // fill of indicator for faster derivatives calculation
+        LongVec1d temptype(end - start, counter);
+        copy (temptype.begin (), temptype.end (), back_inserter(m_parameter_types));
+        counter++;
+
+        if((*fit)->GetTag()==force_GROW || (*fit)->GetTag()==force_EXPGROWSTICK)
+        {
+            m_growthstart = start;
+            m_growthend = end;
+            growthsize  = end - start;
+            hasgrowth = true;
+            growptr = forceptr;
+        }
+        else if((*fit)->GetTag()==force_COAL)
+        {
+            thetastart = start;
+            thetaend = end;
+            coalptr = forceptr;
+        }
+        else if (force_LOGISTICSELECTION == (*fit)->GetTag())
+        {
+            hasLogisticSelection = true;
+            m_s_is_here = start;
+        }
+        else if ((*fit)->GetTag() == force_DISEASE)
+        {
+            diseasestart = start;
+            diseaseend = end;
+        }
+        else if ((*fit)->GetTag() == force_LOGSELECTSTICK)
+        {
+            hassticklogselect = true;
+            hasLogisticSelection = true; // JDEBUG--we probably won't want
+            // to set this long term
+            m_s_is_here = start;
+            sticklogselptr = forceptr;
+        }
+        else if ((*fit)->GetTag() == force_REC)
+        {
+            recstart = start;
+        }
+
+        if (m_minvalues.size() < m_nParam) // then initialization is in progress
+        {
+            double minvalue((*fit)->GetMaximizerMinVal()), maxvalue((*fit)->GetMaximizerMaxVal());
+            long nparams((*fit)->GetNParams());
+            for (long i = 0; i < nparams; i++)
+            {
+                m_minvalues.push_back(minvalue);
+                m_maxvalues.push_back(maxvalue);
+            }
+        }
+    }
+
+    if(hasgrowth)
+    {
+        if (hasLogisticSelection)
+        {
+            string msg = "PostLike::FillForces(), detected the presence of both ";
+            msg += "the growth force and the logistic selection force.  For at least ";
+            msg += "the time being, these can\'t both be \"on\" at the same time.";
+            throw implementation_error(msg);
+        }
+        CoalesceGrowPL *temp = (dynamic_cast<CoalesceGrowPL *>(coalptr));
+        temp->SetGrowthStart(m_growthstart);
+        GrowPL *temp2 = (dynamic_cast<GrowPL *>(growptr));
+        temp2->SetThetaStart(thetastart);
+    }
+
+    bool hasLPForces((fs.GetNLocalPartitionForces() != 0));
+    bool hasRec(fs.CheckForce(force_REC));
+    if(hasLPForces && hasRec)
+    {
+        CoalescePL *temp = dynamic_cast<CoalescePL *>(coalptr);
+        if (hasgrowth)
+        {
+            CoalesceGrowPL *gtemp = dynamic_cast<CoalesceGrowPL *>(temp);
+            // JCHECK2--will want to change this....
+            gtemp->SetTimeManager(fs.CreateTimeManager());
+        }
+        temp->SetLocalPartForces(fs);
+    }
+
+    if(hassticklogselect)
+    {
+        StickSelectPL *temp = dynamic_cast<StickSelectPL *>
+            (sticklogselptr);
+        temp->SetThetastart(thetastart);
+        temp->SetThetaend(thetaend);
+        temp->SetSelCoeffIndex(m_s_is_here);
+        // JDEBUG--When we allow for more than two states, this will need to
+        // change...(at the interface level and usage, here)
+        temp->SetToSmallAIndex(diseasestart + 2);
+        temp->SetToBigAIndex(diseasestart + 1);
+        // the existence or non-existence of the localpartforces
+        // vector will be used to flag to the plforces that there is
+        // or is not recombination
+        if (hasRec)
+        {
+            temp->SetLocalPartForces(fs);
+            temp->SetRecRateIndex(recstart);
+        }
+        // now purge all the other plforceobjects but ours.....
+        m_forces.clear();
+        m_forces.push_back(sticklogselptr);
+        // JCHECK
+        // we also need to sqrunge m_parameter_types so that
+        // all parameters now map to our new singular plforce object
+        m_parameter_types.assign(m_parameter_types.size(),0L);
+    }
+
+}
+
+//------------------------------------------------------------------------------------
+// Copies force parameters into a single DoubleVec1d param0,
+// for speed's sake.  Requires output parameter param0
+// to have a size of m_nParam when it's sent to this method.
+// Called only by SetParam0().
+
+void PostLike::Linearize(const ForceParameters * pForceParameters, DoubleVec1d& param0)
+{
+    assert(param0.size() == m_nParam);
+    vector <force_type>::const_iterator fit;
+    DoubleVec1d::iterator pit = param0.begin();
+
+    for (fit = m_forcestags.begin(); fit != m_forcestags.end(); ++fit)
+    {
+        if (*fit == force_COAL)
+        {
+            //We want the regional values even if we're doing the multiregion
+            // estimate, because each region's trees will still have been
+            // created using the regional-scaled values, not the global.
+            const DoubleVec1d& thetas = pForceParameters->GetRegionalThetas();
+            copy (thetas.begin(), thetas.end(), pit);
+            pit += thetas.size();
+        }
+        else if (IsMigrationLike(*fit))
+        {
+            const DoubleVec1d& migrations = pForceParameters->GetMigRates();
+            copy (migrations.begin(), migrations.end(), pit);
+            pit += migrations.size();
+        }
+        else if (*fit == force_DISEASE)
+        {
+            const DoubleVec1d& transitions = pForceParameters->GetDiseaseRates();
+            copy (transitions.begin(), transitions.end(), pit);
+            pit += transitions.size();
+        }
+        else if (*fit == force_REC)
+        {
+            double recRate = pForceParameters->GetRecRates().front();
+            *pit = recRate;
+            pit++;
+        }
+        else if (*fit == force_GROW)
+        {
+            const DoubleVec1d& growths = pForceParameters->GetGrowthRates();
+            copy (growths.begin(), growths.end(), pit);
+            pit += growths.size();
+        }
+        else if (*fit == force_LOGISTICSELECTION)
+        {
+            const DoubleVec1d& logisticSelCoeff = pForceParameters->GetLogisticSelectionCoefficient();
+            if (1 != logisticSelCoeff.size())
+            {
+                string msg = "PostLike::Linearize() received a ";
+                msg += ToString(logisticSelCoeff.size());
+                msg += "-element vector for the logistic selection coefficient; ";
+                msg += "this must be a one-element vector.";
+                throw implementation_error(msg);
+            }
+            *pit++ = logisticSelCoeff[0];
+        }
+    }
+} // void PostLike::Linearize
+
+//------------------------------------------------------------------------------------
+
+// Default implementation of a function to compute and return the following:
+//
+// sum_over_all_trees( Prob(G|param)/Prob(G|param0) )
+// == sum_over_unique_trees( nG * Prob(G|param)/Prob(G|param0) )
+//
+// where param0 is the starting point ("driving values") in parameter space,
+//       param  is the current parameter vector,
+//       G      is one of the genealogies over which we're summing,
+//  and  nG     is the number of identical copies of genealogy G.
+//
+// Input paramters:  param, lparam (log values of param), lnProbGPo,
+//                   pGenealogies.
+// Output parameters:  ln_ProbGP_over_ProbGPo, sum_ProbGP_over_ProbGPo.
+//
+// This function fills ln_ProbGP_over_ProbGPo with log(Prob(G|P)/Prob(G|Po)),
+// and sets sumG_ProbGP_over_ProbGPo to
+// sum_over_unique_genealogies(nG * Prob(G|P)/Prob(G|Po)).
+//
+// This function can also be used with Geyer replicates, with input parameter
+// lnProb(G|param0) replaced by "lnProb(G|param0),"
+// where "lnProb(G|param0)" = log(sum_over_replicates((NtrR/wR)*Prob(G|param0R))),
+//       NtrR    is the total number of (non-unique) trees in replicate R,
+//       wR      is the Geyer weight assigned to replicate R,
+//  and  param0R is the starting point ("driving value") for replicate R.
+//
+// This function returns false if Prob(G|param) is 0 for all G
+// (underflow can cause this); otherwise it returns true.
+// "False" means "param" (the parameter values P) is utterly incapable
+// of producing the set of genealogy trees "pGenealogies."
+// When false is returned, -DBL_BIG is returned in lnL.
+
+bool PostLike::Calc_sumG_ProbGP_over_ProbGPo(const DoubleVec1d& param,
+                                             const DoubleVec1d& lparam,
+                                             const DoubleVec1d& ln_ProbGPo,
+                                             DoubleVec1d& ln_ProbGP_over_ProbGPo,
+                                             double& sumG_ProbGP_over_ProbGPo,
+                                             const vector<TreeSummary*> *pGenealogies)
+{
+    vector<double>::const_iterator i;
+    vector<double>::iterator j;
+    vector<TreeSummary*>::const_iterator currentGenealogy;
+    long ncopies = 0;
+
+    sumG_ProbGP_over_ProbGPo = 0.0;
+
+    for (currentGenealogy = pGenealogies->begin(),
+             i = ln_ProbGPo.begin(),
+             j = ln_ProbGP_over_ProbGPo.begin();
+         currentGenealogy != pGenealogies->end();
+         ++i, ++currentGenealogy, ++j)
+    {
+        // Store log( Prob(G|P)/"Prob(G|Po)" ) in vector ln_ProbGP_over_ProbGPo.
+        // Vector ln_ProbGPo already contains "log(Prob(G|Po)),"
+        // which may include Geyer weights and replicate-dependent
+        // driving values as described above.
+        *j = Calc_lnProbGP(param, lparam, (*currentGenealogy)) - *i;
+
+        // Our tree-generating procedure can generate identical trees,
+        // both in shape and branch length (e.g., we find and store a good tree,
+        // then because it's good, each proposed change to it gets rejected
+        // for several proposed rearrangements, causing us to sample and store it
+        // again after these rounds of rejections).
+        // "ncopies" is the number of identical copies of the given tree.
+        ncopies = (*currentGenealogy)->GetNCopies();
+        sumG_ProbGP_over_ProbGPo += SafeProductWithExp(1.0*ncopies, (*j));
+    }
+
+    if (sumG_ProbGP_over_ProbGPo > 0.0)
+        return true;
+
+#ifdef TEST
+    cerr << "PostLike::Calculate_sumG_ProbGP_over_ProbGPo() -- zero "
+         << "likelihood, returning \"false\"..." << endl;
+#endif // TEST
+    return false;
+}
+
+//------------------------------------------------------------------------------------
+// Calculates and returns log(PointProb(G|param) * WaitProb(G|param))
+// for parameter vector "param" whose logarithms are "lparam,"
+// for the genealogy pointed to by pThisGenealogy.
+// NOTE:  This basic calculation is used in both
+// likelihood and Bayesian analyses.
+// NOTE:  If the caller is calculating lnL for a GammaRegionPostLike
+// object, then this otherwise-generic function stores two numbers
+// in the GammaRegionPostLike object for speed improvements in the latter.
+
+double PostLike::Calc_lnProbGP(const DoubleVec1d& param, const DoubleVec1d& lparam, const TreeSummary *pThisGenealogy)
+{
+    vector<PLForces*>::const_iterator i;
+    double total_lnPointProb(0.0), total_lnWaitProb(0.0),
+        current_lnPointProb(0.0), current_lnWaitProb(0.0);
+
+    for (i = m_forces.begin(); i != m_forces.end (); ++i)
+    {
+        current_lnPointProb = (*i)->lnPoint(param, lparam, pThisGenealogy);
+        current_lnWaitProb  = (*i)->lnWait(param, pThisGenealogy);
+        total_lnPointProb += current_lnPointProb;
+        total_lnWaitProb  += current_lnWaitProb;
+    }
+
+    if (m_pGammaRegionPostLike)
+    {
+        m_pGammaRegionPostLike->Store_Current_lnPoint(total_lnPointProb);
+        m_pGammaRegionPostLike->Store_Current_lnWait(total_lnWaitProb);
+    }
+
+    return total_lnPointProb + total_lnWaitProb;
+}
+
+//------------------------------------------------------------------------------------
+
+double PostLike::Calc_lnProbGS(const DoubleVec1d& param, const DoubleVec1d& lparam, const TreeSummary *pThisGenealogy)
+{
+    // we cannot use GetStickSelectPL since that is, correctly, const
+    // and for some reason the calculators are non-const....
+    // So, we end up duplicating the code from GetStickSelectPL here.
+
+    // JDEBUG: this will probably go away when we refactor PLForces
+    // as modular equation components.
+    if (m_forces.size() != 1)
+    {
+        assert(0);
+        string msg = "PostLike::Calc_lnProbGS() has been called when";
+        msg += "there exist zero or many PLForces.";
+        throw implementation_error(msg);
+    }
+
+    StickSelectPL* stickpl = dynamic_cast<StickSelectPL*>(m_forces[0]);
+    if (stickpl == NULL)
+    {
+        assert(0);
+        string msg = "PostLike::Calc_lnProbGS() has been called when";
+        msg += "there is no StickSelectPLForce.";
+        throw implementation_error(msg);
+    }
+
+    return stickpl->lnPTreeStick(param,lparam,pThisGenealogy);
+}
+
+//------------------------------------------------------------------------------------
+// Evil kludge for stair rearranger to access the Mean and
+// Variance calculators used by the StickSelectPLforce.
+//
+// It may throw an implementation error.
+
+const StickSelectPL& PostLike::GetStickSelectPL() const
+{
+    // JDEBUG: this will probably go away when we refactor PLForces
+    // as modular equation components.
+    if (m_forces.size() != 1)
+    {
+        assert(0);
+        string msg = "PostLike::GetStickSelectPL() has been called when";
+        msg += "there exist zero or many PLForces.";
+        throw implementation_error(msg);
+    }
+
+    const StickSelectPL* stickpl = dynamic_cast<const StickSelectPL*>
+        (m_forces[0]);
+    if (stickpl == NULL)
+    {
+        assert(0);
+        string msg = "PostLike::GetStickSelectPL() has been called when";
+        msg += "there is no StickSelectPLForce.";
+        throw implementation_error(msg);
+    }
+
+    return *stickpl;
+
+}
+
+//------------------------------------------------------------------------------------
+// Single-region, single-replicate likelihood.
+
+SinglePostLike::SinglePostLike(const ForceSummary &thisforces,
+                               const long thisNregion,
+                               const long thisNreplicate,
+                               const long thisNParam):
+    PostLike(thisforces, thisNregion, thisNreplicate, thisNParam),
+    m_param0(thisNParam), m_lparam0(thisNParam),
+    m_last_lnL(0.0), m_totalNumTrees(0.0)
+{
+    // intentionally blank
+}
+
+//------------------------------------------------------------------------------------
+
+SinglePostLike::~SinglePostLike()
+{
+    // intentionally blank
+}
+
+//------------------------------------------------------------------------------------
+// Called only by Setup().
+
+void SinglePostLike::SetParam0()
+{
+    // fills m_param0 by reference
+    Linearize(&(m_data->forceParameters), m_param0);
+    LogVec0(m_param0, m_lparam0);
+    for(unsigned long i=0; i < m_param0.size(); i++)
+    {
+        if(isLinearParam(i))
+            m_lparam0[i] = m_param0[i];
+    }
+}
+
+//------------------------------------------------------------------------------------
+// Calculates log(Prob(G|param0)) for the given
+// genealogies G, and stores these in m_lnProbGPo.
+// Also stores the total number of (non-unique) trees.
+
+void SinglePostLike::Setup(TreeCollector * pTreeCollector)
+{
+    m_data = pTreeCollector;
+    m_totalNumTrees = static_cast<double>(m_data->TreeCount());
+    vector<TreeSummary*> genealogies = pTreeCollector->treeSummaries;
+    if (m_lnProbGPo.size() != genealogies.size())
+    {
+        m_lnProbGPo.resize(genealogies.size(), 0.0);
+        m_ln_ProbGP_over_ProbGPo.resize(genealogies.size(), 0.0);
+    }
+    SetParam0();
+    for (unsigned long G = 0; G < genealogies.size(); G++)
+        m_lnProbGPo[G] = Calc_lnProbGP(m_param0, m_lparam0, genealogies[G]);
+}
+
+//------------------------------------------------------------------------------------
+// Updates m_ln_ProbGP_over_ProbGPo = log(Prob(G|P)/Prog(G|Po)) and m_last_lnL.
+// Returns lnL = log((1/N)*sum_over_genealogies(Prob(G|P)/Prob(G|Po)),
+// for the underlying trees G and driving values Po.
+// (G and Po are retrieved from private member variables.)
+//
+// If Prob(G|P) is 0 for all G (underflow can cause this),
+// we return false and lnL = -DBL_BIG.  Otherwise we return true.
+
+bool SinglePostLike::Calculate(const DoubleVec1d& param,
+                               const DoubleVec1d& lparam,
+                               double& lnL)
+{
+    double sumG_ProbGP_over_ProbGPo(0.0);
+    if (!PostLike::Calc_sumG_ProbGP_over_ProbGPo(param, lparam,
+                                                 m_lnProbGPo,
+                                                 m_ln_ProbGP_over_ProbGPo,
+                                                 sumG_ProbGP_over_ProbGPo,
+                                                 &(m_data->treeSummaries)))
+    {
+        m_last_lnL = lnL = -DBL_BIG;
+        return false;
+    }
+
+    // sumG_ProbGP_over_ProbGPo is guaranteed to be positive (nonzero).
+    m_last_lnL = lnL = log(sumG_ProbGP_over_ProbGPo/m_totalNumTrees);
+    return true;
+}
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d SinglePostLike::GetMeanParam0()
+{
+    return m_param0;
+    // The "mean driving values" only require a computation
+    // in the multi-replicate or multi-region cases.
+}
+
+//------------------------------------------------------------------------------------
+// Single-region, multi-replicate likelihood.
+
+ReplicatePostLike::ReplicatePostLike(const ForceSummary &thisforces,
+                                     const long thisNregion,
+                                     const long thisNreplicate,
+                                     const long thisNParam):
+    PostLike(thisforces, thisNregion, thisNreplicate,
+             thisNParam), m_last_lnL(0.0)
+{
+    // intentionally blank
+}
+
+//------------------------------------------------------------------------------------
+// FillGeyerWeights() is a helper function to Setup(), that deals with
+// the Thompson-Geyer weighting for combining trees created under differing
+// driving values.  GeyerLike() and GeyerStart() are helpers to
+// FillGeyerWeights().
+//
+// FillGeyerWeights() overwrites m_probg0 and should be called
+// after CreateProbg0(), just after the initial g0 values
+// are calculated but before they are used.
+//
+// Once FillGeyerWeights() has been called then Calculate() may be called
+// normally (and the derivatives hanging off of its results, etc.)
+//
+// GeyerLike() is the core likelihood function used in FillGeyerWeights()
+//
+// GeyerStart() is used to initialize the weights within FillGeyerWeights()
+
+bool ReplicatePostLike::FillGeyerWeights()
+{
+    m_logGeyerWeights = DoubleVec1d(m_param0.size(), 0.0);
+    RunReport& runreport(registry.GetRunReport());
+
+    // compute single-replicate likelihoods as starting values
+    // for the iteration
+    unsigned long rep, nreps(m_param0.size());
+    if (!GeyerStart(m_logGeyerWeights))
+    {
+        string msg = "FillGeyerWeights: starting value failure\n";
+        runreport.ReportDebug(msg);
+    }
+    // we normalize to the largest to preserve machine calculability, and
+    // assert while this may shift the resulting curve(s), it should not
+    // affect its mean or spread.
+    ScaleLargestToZero(m_logGeyerWeights);
+    TransformProbG0With(m_logGeyerWeights);
+    DoubleVec1d oldlike(nreps);
+    for(rep = 0; rep < nreps; ++rep)
+    {
+        Calculate(m_param0[rep],m_lparam0[rep],oldlike[rep]);
+    }
+    bool alldone;
+    long cnt(0);
+    do {
+        ++cnt;
+        alldone = true;
+        for(rep = 0; rep < nreps; ++rep)
+        {
+            if (!GeyerLike(m_param0[rep],m_lparam0[rep],m_logGeyerWeights[rep],oldlike))
+            {
+                string msg = "FillGeyerWeights:  overflow in iteration " +
+                    ToString(cnt) + " for replicate " + ToString(rep) + "\n";
+                runreport.ReportDebug(msg);
+            }
+        }
+        TransformProbG0With(m_logGeyerWeights);
+        DoubleVec1d newlike(nreps);
+        for(rep = 0; rep < nreps; ++rep)
+        {
+            Calculate(m_param0[rep],m_lparam0[rep],newlike[rep]);
+            if (fabs(newlike[rep]-oldlike[rep]) > POSTERIOR_EPSILON)
+                alldone = false;
+        }
+        oldlike = newlike;
+    } while (!alldone && cnt < defaults::geyeriters);
+
+    if (cnt == defaults::geyeriters)
+    {
+        string msg = "Internal error: defaults::geyeriters needs to be";
+        msg += " increased.\n";
+        runreport.ReportUrgent(msg);
+    }
+
+    // we normalize to the largest to preserve machine calculability, and
+    // assert while this may shift the resulting curve(s), it should not
+    // affect its mean or spread.
+    ScaleLargestToZero(m_logGeyerWeights);
+
+    // now calculate the weighted P(G|theta0)
+    TransformProbG0With(m_logGeyerWeights);
+
+    return true;
+
+}
+
+//------------------------------------------------------------------------------------
+
+void ReplicatePostLike::TransformProbG0With(const DoubleVec1d& weights)
+{
+    unsigned long rep, nreps(m_param0.size());
+    DoubleVec1d nuniquetrees(nreps,0.0), ntrees(nreps,0.0);
+    for(rep = 0; rep < nreps; ++rep)
+    {
+        nuniquetrees[rep] = m_data[rep]->treeSummaries.size();
+        ntrees[rep] = static_cast<double>(m_data[rep]->TreeCount());
+    }
+
+    for(rep = 0; rep < nreps; ++rep)
+    {
+        unsigned long tree;
+        for(tree = 0; tree < nuniquetrees[rep]; ++tree)
+        {
+            unsigned long rep2;
+            m_lnProbGPo[rep][tree] = 0.0;
+            for(rep2 = 0; rep2 < nreps; ++rep2)
+            {
+                m_lnProbGPo[rep][tree] += SafeProductWithExp(ntrees[rep2],
+                                                             Calc_lnProbGP(m_param0[rep2],m_lparam0[rep2],
+                                                                           m_data[rep]->treeSummaries[tree])
+                                                             - weights[rep2]);
+            }
+            m_lnProbGPo[rep][tree] = log(m_lnProbGPo[rep][tree]);
+        }
+    }
+}
+
+//------------------------------------------------------------------------------------
+// sum_over_trees: ratio of numerator = P(G|theta_eval)
+//                          denominator = sum_over_reps
+//                                           ptrees[rep] * P(G|theta0) /
+//                                              oldlike[rep]
+//
+//                 ptrees[rep] = ntrees[rep] / sum_over_reps(ntrees[rep])
+
+bool ReplicatePostLike::GeyerLike(const DoubleVec1d& param,
+                                  const DoubleVec1d& lparam, double& newlike, const DoubleVec1d& oldlike)
+{
+    unsigned long rep, nreps(m_param0.size()), tree;
+    vector<unsigned long> nuniquetrees(nreps,0);
+    DoubleVec1d ptrees(nreps,0.0);
+    for(rep = 0; rep < nreps; ++rep)
+    {
+        nuniquetrees[rep] = m_data[rep]->treeSummaries.size();
+        ptrees[rep] = static_cast<double>(m_data[rep]->TreeCount());
+    }
+
+    // do some precalculations for speed
+    double totaltrees(accumulate(ptrees.begin(), ptrees.end(), 0.0));
+    // type 'double' needed for use in std::bind2nd
+    transform(ptrees.begin(), ptrees.end(), ptrees.begin(), bind2nd(divides<double>(), totaltrees));
+
+    newlike = 0.0;
+    for(rep = 0; rep < nreps; ++rep)
+    {
+        for(tree = 0; tree < nuniquetrees[rep]; ++tree)
+        {
+            TreeSummary* tr(m_data[rep]->treeSummaries[tree]);
+            long ncopies(tr->GetNCopies());
+
+            double denom(0.0);
+            unsigned long trrep;
+            for(trrep = 0; trrep < nreps; ++trrep)
+            {
+                denom += SafeProductWithExp(ptrees[trrep], m_pg0[rep][tree][trrep] - oldlike[trrep]);
+            }
+            if (denom)
+                denom = log(denom);
+            else
+            {
+                newlike = DBL_BIG;
+                return false;
+            }
+
+            newlike += SafeProductWithExp(ncopies, Calc_lnProbGP(param,lparam,tr) - denom);
+        }
+    }
+
+    if (!newlike)
+    {
+        newlike = -DBL_BIG;
+        return false;
+    }
+    else
+        newlike = log(newlike) - log(totaltrees);
+
+    return true;
+}
+
+//------------------------------------------------------------------------------------
+
+bool ReplicatePostLike::GeyerStart (DoubleVec1d& lnL)
+{
+    unsigned long rep, nreps(m_param0.size());
+    for(rep = 0; rep < nreps; ++rep)
+    {
+        unsigned long nuniquetrees = m_data[rep]->treeSummaries.size();
+        // double needed here for some earlier versions of cmath
+        double totaltrees = static_cast<double>(m_data[rep]->TreeCount());
+        lnL[rep] = 0.0;
+
+        unsigned long tree;
+        for(tree = 0; tree < nuniquetrees; ++tree)
+        {
+            TreeSummary* tr(m_data[rep]->treeSummaries[tree]);
+            lnL[rep] += SafeProductWithExp(tr->GetNCopies(),
+                                           m_lnProbGPo[rep][tree]);
+            // m_pg0[rep][tree][rep]);
+        }
+
+        if (!lnL[rep])
+        {
+            lnL[rep] = -DBL_BIG;
+            return false;
+        }
+        else
+            lnL[rep] = log(lnL[rep]) - log(totaltrees);
+
+    }
+    return true;
+
+}
+
+//------------------------------------------------------------------------------------
+// Called only by Setup().
+
+void ReplicatePostLike::SetParam0 ()
+{
+    m_param0.clear();
+    m_lparam0.clear();
+    for (unsigned long rep = 0; rep < m_nReplicate; rep++)
+    {
+        DoubleVec1d param0(m_nParam), lparam0(m_nParam);
+        Linearize(&(m_data[rep]->forceParameters), param0);
+        LogVec0(param0, lparam0);
+        m_param0.push_back(param0);
+        m_lparam0.push_back(lparam0);
+    }
+}
+
+//------------------------------------------------------------------------------------
+// Fills m_lnProbGPo by calling PostLike::Calc_lnProbGP() // with replicate/tree data.
+
+void ReplicatePostLike::CreateProbG0()
+{
+    m_pg0.resize(m_nReplicate);
+    for (unsigned long rep = 0; rep < m_nReplicate; rep++)
+    {
+        m_pg0[rep].resize(m_data[rep]->treeSummaries.size());
+        for (unsigned long G = 0; G < m_data[rep]->treeSummaries.size(); G++)
+        {
+            m_lnProbGPo[rep][G] = Calc_lnProbGP(m_param0[rep], m_lparam0[rep],
+                                                m_data[rep]->treeSummaries[G]);
+
+            // now do the likelihood under each other replicate
+            m_pg0[rep][G].resize(m_nReplicate);
+            for(unsigned long trrep = 0; trrep < m_nReplicate; ++trrep)
+            {
+                m_pg0[rep][G][trrep] = ((trrep == rep) ? m_lnProbGPo[rep][G] :
+                                        Calc_lnProbGP(m_param0[trrep],m_lparam0[trrep],
+                                                      m_data[rep]->treeSummaries[G]));
+            }
+        }
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+bool ReplicatePostLike::Setup(const vector<TreeCollector*>& treedata)
+{
+    m_data = treedata;
+    SetParam0();
+
+    m_lnProbGPo.clear();
+    m_ln_ProbGP_over_ProbGPo.clear();
+
+    for (unsigned long rep = 0; rep < m_nReplicate; rep++)
+    {
+        DoubleVec1d numtrees_zeroes(m_data[rep]->treeSummaries.size(), 0.0);
+        m_lnProbGPo.push_back(numtrees_zeroes);
+        m_ln_ProbGP_over_ProbGPo.push_back(numtrees_zeroes);
+    }
+
+    CreateProbG0();
+    return FillGeyerWeights();
+}
+
+//------------------------------------------------------------------------------------
+
+bool ReplicatePostLike::Calculate(const DoubleVec1d& param, const DoubleVec1d& lparam, double& lnL)
+{
+    double sum_over_replicates(0.0), sumForThisReplicate(0.0);
+
+    for (unsigned long rep = 0; rep < m_nReplicate; rep++)
+    {
+        if (!PostLike::Calc_sumG_ProbGP_over_ProbGPo(param, lparam,
+                                                     m_lnProbGPo[rep],
+                                                     m_ln_ProbGP_over_ProbGPo[rep],
+                                                     sumForThisReplicate,
+                                                     &(m_data[rep]->treeSummaries)))
+        {
+            lnL = -DBL_BIG;
+            m_last_lnL = lnL;
+            return false;
+        }
+        sum_over_replicates += sumForThisReplicate;
+    }
+
+    if (sum_over_replicates > 0.0)
+    {
+        m_last_lnL = lnL = log(sum_over_replicates);
+        return true;
+    }
+
+    m_last_lnL = lnL = -DBL_BIG;
+    return false;
+}
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d ReplicatePostLike::GetMeanParam0 ()
+{
+    long size = m_param0.size();
+    DoubleVec1d meanparam0(m_param0.begin()->size());
+    DoubleVec2d::iterator p0;
+    DoubleVec1d::iterator i;
+    for (p0 = m_param0.begin(); p0 != m_param0.end(); ++p0)
+    {
+        transform (meanparam0.begin(), meanparam0.end(),
+                   (*p0).begin(), meanparam0.begin(), plus<double>());
+    }
+    for (i = meanparam0.begin(); i != meanparam0.end(); ++i)
+    {
+        (*i) /= size;
+    }
+    return meanparam0;
+}
+
+//------------------------------------------------------------------------------------
+// Multiple-region, multiple-replicate likelihood.
+
+RegionPostLike::RegionPostLike(const ForceSummary &thisforces, const long thisNregion, const long thisNreplicate,
+                               const long thisNParam, DoubleVec2d paramscalars) :
+    PostLike(thisforces, thisNregion, thisNreplicate, thisNParam),
+    m_sumG_ProbGP_over_ProbGPo(thisNregion, 0.0), m_paramscalars(paramscalars),
+    m_totalNumTrees(thisNregion, 0.0), m_last_lnL(0.0)
+{
+    m_lparamscalars = CreateVec2d(thisNregion, thisNParam, 1.0);
+
+    for(long reg = 0; reg < thisNregion; ++reg)
+        LogVec0(m_paramscalars[reg], m_lparamscalars[reg]);
+}
+
+//------------------------------------------------------------------------------------
+// Called only by Setup().
+
+void RegionPostLike::SetParam0()
+{
+    m_param0.clear();
+    m_lparam0.clear();
+
+    for (unsigned long reg = 0; reg < m_nRegions; reg++)
+    {
+        DoubleVec1d param0(m_nParam), lparam0(m_nParam);
+        DoubleVec2d tmpParam0;
+        DoubleVec2d tmpLParam0;
+
+        for (unsigned long rep = 0; rep < m_nReplicate; rep++)
+        {
+            // param0 filled by reference
+            Linearize(&(m_data[reg][rep]->forceParameters), param0);
+            transform(param0.begin(), param0.end(), m_paramscalars[reg].begin(),
+                      param0.begin(), multiplies<double>());
+            LogVec0(param0, lparam0);
+            for (unsigned long i = 0; i < m_nParam; i++)
+                if (isLinearParam(i))
+                    lparam0[i] = param0[i];
+            tmpParam0.push_back(param0);
+            tmpLParam0.push_back(lparam0);
+        }
+        m_param0.push_back(tmpParam0);
+        m_lparam0.push_back(tmpLParam0);
+        tmpParam0.clear();
+        tmpLParam0.clear();
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+void RegionPostLike::Setup(const vector<vector<TreeCollector*> >& treedata, const DoubleVec2d& logGeyerWeights)
+{
+    m_data = treedata;
+    SetParam0();
+
+    if (!(m_lnProbGPo.empty() && m_ln_ProbGP_over_ProbGPo.empty() &&
+          m_pg0.empty()))
+    {
+        assert(0);
+        string msg = "RegionPostLike::Setup() has been called more than once.";
+        throw implementation_error(msg);
+    }
+    m_pg0.resize(m_nRegions);
+
+    for (unsigned long reg = 0; reg < m_nRegions; reg++)
+    {
+        m_pg0[reg].resize(m_nReplicate);
+        DoubleVec2d numreps_zerovectors;
+        for (unsigned long rep = 0; rep < m_nReplicate; rep++)
+        {
+            DoubleVec1d numtrees_zeroes(m_data[reg][rep]->treeSummaries.size(), 0.0);
+            numreps_zerovectors.push_back(numtrees_zeroes);
+            m_pg0[reg][rep].resize(m_data[reg][rep]->treeSummaries.size());
+        }
+        m_lnProbGPo.push_back(numreps_zerovectors);
+        m_ln_ProbGP_over_ProbGPo.push_back(numreps_zerovectors);
+    }
+
+    // Note:  m_sumG_ProbGP_over_ProbGPo was created in the constructor.
+
+    double totalNumTreesThisRegion(0.0);
+
+    for (unsigned long reg = 0; reg < m_nRegions; reg++)
+    {
+        totalNumTreesThisRegion = 0.0;
+        for (unsigned long rep = 0; rep < m_nReplicate; rep++)
+        {
+            totalNumTreesThisRegion +=
+                static_cast<double>(m_data[reg][rep]->TreeCount());
+            for (unsigned long G = 0;
+                 G < m_data[reg][rep]->treeSummaries.size(); G++)
+            {
+                m_lnProbGPo[reg][rep][G] =
+                    Calc_lnProbGP(m_param0[reg][rep],
+                                  m_lparam0[reg][rep],
+                                  m_data[reg][rep]->treeSummaries[G]);
+
+                // now do the likelihood under each other replicate
+                unsigned long trrep;
+                m_pg0[reg][rep][G].resize(m_nReplicate);
+                for(trrep = 0; trrep < m_nReplicate; ++trrep)
+                {
+                    m_pg0[reg][rep][G][trrep] =
+                        ((trrep == rep) ? m_lnProbGPo[reg][rep][G] :
+                         Calc_lnProbGP(m_param0[reg][trrep],m_lparam0[reg][trrep],
+                                       m_data[reg][rep]->treeSummaries[G]));
+                }
+            }
+        }
+        m_totalNumTrees[reg] = totalNumTreesThisRegion;
+    }
+
+    for(unsigned long reg = 0; reg < m_nRegions; ++reg)
+    {
+        TransformProbG0With(logGeyerWeights[reg], reg);
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+bool RegionPostLike::Calculate(const DoubleVec1d& param, const DoubleVec1d& lparam, double& lnL)
+{
+    bool usingGeyer = (m_nReplicate > 1 ? true : false);
+    double total_lnL(0.0), sum_forThisReplicate(0.0);
+
+    for (unsigned long reg = 0; reg < m_nRegions; reg++)
+    {
+        // Scale the parameter vector, e.g., to account for inherent
+        // differences between nuclear DNA and Y-chromosome DNA and mtDNA.
+
+        DoubleVec1d scaledparam(param.size()), scaledlparam(lparam.size());
+        transform(param.begin(), param.end(), m_paramscalars[reg].begin(),
+                  scaledparam.begin(), multiplies<double>());
+        transform(lparam.begin(),lparam.end(), m_lparamscalars[reg].begin(),
+                  scaledlparam.begin(), plus<double>());
+
+        m_sumG_ProbGP_over_ProbGPo[reg] = 0.0;
+        for (unsigned long rep = 0; rep < m_nReplicate; rep++)
+        {
+            if (!PostLike::Calc_sumG_ProbGP_over_ProbGPo(scaledparam,
+                                                         scaledlparam,
+                                                         m_lnProbGPo[reg][rep],
+                                                         m_ln_ProbGP_over_ProbGPo[reg][rep],
+                                                         sum_forThisReplicate,
+                                                         &(m_data[reg][rep]->treeSummaries)))
+            {
+                m_last_lnL = lnL = -DBL_BIG;
+                return false;
+            }
+            m_sumG_ProbGP_over_ProbGPo[reg] += sum_forThisReplicate;
+        }
+        if (!usingGeyer)
+            total_lnL += log(m_sumG_ProbGP_over_ProbGPo[reg]/m_totalNumTrees[reg]);
+        else
+            total_lnL += log(m_sumG_ProbGP_over_ProbGPo[reg]);
+    }
+
+    m_last_lnL = lnL = total_lnL;
+    return true;
+}
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d RegionPostLike::GetMeanParam0 ()
+{
+    long size = 0;
+    DoubleVec1d meanparam0(m_param0.begin()->begin()->size());
+
+    DoubleVec3d::iterator p00;
+    DoubleVec2d::iterator p01;
+    DoubleVec1d::iterator i;
+
+    for (p00 = m_param0.begin(); p00 != m_param0.end(); ++p00)
+    {
+        size += p00->size();
+        for (p01 = p00->begin(); p01 != p00->end(); ++p01)
+            transform(meanparam0.begin(), meanparam0.end(),
+                      p01->begin(), meanparam0.begin(), plus<double>());
+    }
+
+    for (i = meanparam0.begin(); i != meanparam0.end(); ++i)
+    {
+        (*i) /= size;
+    }
+
+    return meanparam0;
+}
+
+//------------------------------------------------------------------------------------
+
+void RegionPostLike::TransformProbG0With(const DoubleVec1d& weights, long region)
+{
+    unsigned long rep, nreps(m_param0[region].size());
+    if (nreps == 1) return;
+    DoubleVec1d nuniquetrees(nreps,0.0), ntrees(nreps,0.0);
+    for(rep = 0; rep < nreps; ++rep)
+    {
+        nuniquetrees[rep] = m_data[region][rep]->treeSummaries.size();
+        ntrees[rep] = static_cast<double>(m_data[region][rep]->TreeCount());
+    }
+
+    for(rep = 0; rep < nreps; ++rep)
+    {
+        unsigned long tree;
+        for(tree = 0; tree < nuniquetrees[rep]; ++tree)
+        {
+            unsigned long rep2;
+            m_lnProbGPo[region][rep][tree] = 0.0;
+            for(rep2 = 0; rep2 < nreps; ++rep2)
+            {
+                m_lnProbGPo[region][rep][tree] +=
+                    SafeProductWithExp(ntrees[rep2],
+                                       Calc_lnProbGP(m_param0[region][rep2],
+                                                     m_lparam0[region][rep2],
+                                                     m_data[region][rep]->treeSummaries[tree]
+                                           ) - weights[rep2]);
+            }
+            m_lnProbGPo[region][rep][tree] = SafeLog(m_lnProbGPo[region][rep][tree]);
+        }
+    }
+}
+
+// sum_over_trees: ratio of numerator = P(G|theta_eval)
+//                          denominator = sum_over_reps
+//                                           ptrees[rep] * P(G|theta0) /
+//                                              oldlike[rep]
+//
+//                 ptrees[rep] = ntrees[rep] / sum_over_reps(ntrees[rep])
+
+//------------------------------------------------------------------------------------
+// Multiple-region, multiple-replicate likelihood,
+// with the background mutation rates gamma-distributed over regions.
+
+GammaRegionPostLike::GammaRegionPostLike(const ForceSummary& thisforces,
+                                         const long thisNregion, const long thisNreplicate,
+                                         const long thisNParam, DoubleVec2d paramscalars) :
+    RegionPostLike(thisforces, thisNregion, thisNreplicate, thisNParam,
+                   paramscalars), m_sumG_C(thisNregion, 0.0)
+{
+    // Note: The matrix m_sumG_C is used in place of m_sumG_ProbGP_over_ProbGPo.
+
+    const RegionGammaInfo *pRegionGammaInfo = registry.GetRegionGammaInfo();
+    if (m_growthstart != m_growthend && NULL != pRegionGammaInfo)
+    {
+        // Growth and alpha are being estimated.
+        string msg = "Attempted to use a gamma distribution to allow the ";
+        msg += "background mutation rate to vary among genomic regions, ";
+        msg += "while estimating population growth/shrinkage rates.  ";
+        msg += "Currently, the gamma distribution cannot be used ";
+        msg += "if growth rates are being inferred.";
+        throw implementation_error(msg);
+    }
+    for (unsigned long reg = 0; reg < m_paramscalars.size(); reg++)
+    {
+        m_paramscalars[reg].push_back(1.0);
+        m_lparamscalars[reg].push_back(0.0);
+    }
+    m_pGammaRegionPostLike = this; // for use by the base class, for speed
+}
+
+//------------------------------------------------------------------------------------
+// Store log(PointProb(G|P)) for each tree G of each replicate of each region.
+// We store these during calculation of the log-likelihood,
+// so we can re-use them in the gradient calculations.
+
+void GammaRegionPostLike::Store_Current_lnPoint(const double& value)
+{
+    *m_lnPoint_genealogy_it = value;
+    m_lnPoint_genealogy_it++;
+
+    if (m_lnPoint_genealogy_it == m_lnPoint_rep_it->end())
+    {
+        m_lnPoint_rep_it++;
+        if (m_lnPoint_rep_it == m_lnPoint_reg_it->end())
+        {
+            m_lnPoint_reg_it++;
+            if (m_lnPoint_reg_it == m_lnPoint.end())
+            {
+                m_lnPoint_reg_it = m_lnPoint.begin();
+            }
+            m_lnPoint_rep_it = m_lnPoint_reg_it->begin();
+        }
+        m_lnPoint_genealogy_it = m_lnPoint_rep_it->begin();
+    }
+}
+
+//------------------------------------------------------------------------------------
+// Store log(WaitProb(G|P)) for each tree G of each replicate of each region.
+// We store these during calculation of the log-likelihood,
+// so we can re-use them in the gradient calculations.
+
+void GammaRegionPostLike::Store_Current_lnWait(const double& value)
+{
+    *m_lnWait_genealogy_it = value;
+    m_lnWait_genealogy_it++;
+
+    if (m_lnWait_genealogy_it == m_lnWait_rep_it->end())
+    {
+        m_lnWait_rep_it++;
+        if (m_lnWait_rep_it == m_lnWait_reg_it->end())
+        {
+            m_lnWait_reg_it++;
+            if (m_lnWait_reg_it == m_lnWait.end())
+            {
+                m_lnWait_reg_it = m_lnWait.begin();
+            }
+            m_lnWait_rep_it = m_lnWait_reg_it->begin();
+        }
+        m_lnWait_genealogy_it = m_lnWait_rep_it->begin();
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+void GammaRegionPostLike::Setup(const vector<vector<TreeCollector*> >& treedata, const DoubleVec2d& logGeyerWeights)
+{
+    if (!(m_lnProbGPo.empty() && m_C.empty() && m_lnPoint.empty() && m_lnWait.empty() &&
+          m_K_alpha_minus_nevents.empty() && m_K_alpha_minus_nevents_plus_1.empty() &&
+          m_nevents_G.empty() && m_pg0.empty()))
+    {
+        string msg = "Internal logic error:  GammaRegionPostLike::Setup() ";
+        msg += "has been called more than once.";
+        throw implementation_error(msg);
+    }
+
+    m_data = treedata;
+    SetParam0();
+
+    m_pg0.resize(m_nRegions);
+
+    for (unsigned long reg = 0; reg < m_nRegions; reg++)
+    {
+        m_pg0[reg].resize(m_nReplicate);
+        DoubleVec2d numreps_zerovectors;
+        for (unsigned long rep = 0; rep < m_nReplicate; rep++)
+        {
+            DoubleVec1d numUniqueTrees_zeroes(m_data[reg][rep]->treeSummaries.size(), 0.0);
+            numreps_zerovectors.push_back(numUniqueTrees_zeroes);
+            m_pg0[reg][rep].resize(m_data[reg][rep]->treeSummaries.size());
+        }
+        m_lnProbGPo.push_back(numreps_zerovectors);
+        m_C.push_back(numreps_zerovectors);
+        m_lnPoint.push_back(numreps_zerovectors);
+        m_lnWait.push_back(numreps_zerovectors);
+        m_K_alpha_minus_nevents.push_back(numreps_zerovectors);
+        m_K_alpha_minus_nevents_plus_1.push_back(numreps_zerovectors);
+        m_nevents_G.push_back(numreps_zerovectors);
+    }
+
+    // Note:  m_sumG_C (dimension: regions) was created in the constructor.
+
+    // Set the lnPoint and lnWait iterators, in preparation for the
+    // calls to Calc_lnProbGP() in the next block.
+    m_lnPoint_reg_it = m_lnPoint.begin();
+    m_lnPoint_rep_it = m_lnPoint_reg_it->begin();
+    m_lnPoint_genealogy_it = m_lnPoint_rep_it->begin();
+    m_lnWait_reg_it = m_lnWait.begin();
+    m_lnWait_rep_it = m_lnWait_reg_it->begin();
+    m_lnWait_genealogy_it = m_lnWait_rep_it->begin();
+
+    double totalNumTreesThisRegion(0.0);
+
+    for (unsigned long reg = 0; reg < m_nRegions; reg++)
+    {
+        totalNumTreesThisRegion = 0.0;
+        for (unsigned long rep = 0; rep < m_nReplicate; rep++)
+        {
+            totalNumTreesThisRegion +=
+                static_cast<double>(m_data[reg][rep]->TreeCount());
+            for (unsigned long G = 0;
+                 G < m_data[reg][rep]->treeSummaries.size(); G++)
+            {
+                m_lnProbGPo[reg][rep][G] = Calc_lnProbGP(m_param0[reg][rep],
+                                                         m_lparam0[reg][rep],
+                                                         m_data[reg][rep]->treeSummaries[G]);
+
+                // Store the total number of events from tree G.
+                Summary const *pSummary = m_data[reg][rep]->treeSummaries[G]->GetCoalSummary();
+                if (pSummary)
+                {
+                    // Add the total number of coalescences to m_events_G.
+                    const vector<double>& total_ncoal = pSummary->GetShortPoint();
+                    m_nevents_G[reg][rep][G] =
+                        accumulate(total_ncoal.begin(), total_ncoal.end(), 0.0);
+                }
+                else
+                {
+                    // No coalescence force found?!?
+                    string msg = "Internal error:  GammaRegionPostLike::Setup() ";
+                    msg += "could not find a CoalSummary object.";
+                    throw implementation_error(msg);
+                }
+                pSummary = m_data[reg][rep]->treeSummaries[G]->GetMigSummary();
+                if (pSummary)
+                {
+                    // Add the total number of immigrations to m_events_G.
+                    const vector<double>& total_nmig = pSummary->GetShortPoint();
+                    m_nevents_G[reg][rep][G] +=
+                        accumulate(total_nmig.begin(), total_nmig.end(), 0.0);
+                }
+                pSummary = m_data[reg][rep]->treeSummaries[G]->GetRecSummary();
+                if (pSummary)
+                {
+                    // Add the total number of recombinations to m_events_G.
+                    const vector<double>& total_nrec = pSummary->GetShortPoint();
+                    m_nevents_G[reg][rep][G] +=
+                        accumulate(total_nrec.begin(), total_nrec.end(), 0.0);
+                }
+
+                // BUGBUG WARNING:  If an event type other than coalescence,
+                // migration, or recombination is ever added to lamarc,
+                // it must be factored into the total number of events above.
+                // (Growth is not an event type; it gets applied to coalescent events.)
+
+                // now do the likelihood under each other replicate
+                m_pg0[reg][rep][G].resize(m_nReplicate);
+                for (unsigned long trrep = 0; trrep < m_nReplicate; ++trrep)
+                {
+                    m_pg0[reg][rep][G][trrep] =
+                        ((trrep == rep) ? m_lnProbGPo[reg][rep][G] :
+                         Calc_lnProbGP(m_param0[reg][trrep],m_lparam0[reg][trrep],
+                                       m_data[reg][rep]->treeSummaries[G]));
+                }
+            }
+        }
+        m_totalNumTrees[reg] = totalNumTreesThisRegion;
+    }
+    for(unsigned long reg = 0; reg < m_nRegions; ++reg)
+    {
+        TransformProbG0With(logGeyerWeights[reg], reg);
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+bool GammaRegionPostLike::Calculate(const DoubleVec1d& param, const DoubleVec1d& lparam, double& lnL)
+{
+    // Set the lnPoint and lnWait iterators.
+    m_lnPoint_reg_it = m_lnPoint.begin();
+    m_lnPoint_rep_it = m_lnPoint_reg_it->begin();
+    m_lnPoint_genealogy_it = m_lnPoint_rep_it->begin();
+    m_lnWait_reg_it = m_lnWait.begin();
+    m_lnWait_rep_it = m_lnWait_reg_it->begin();
+    m_lnWait_genealogy_it = m_lnWait_rep_it->begin();
+
+    bool usingGeyer = (m_nReplicate > 1 ? true : false);
+    double alpha = param[param.size() - 1];
+    if (alpha <= 0.0)
+    {
+        string msg = "GammaRegionPostLike::Calculate():  Must have alpha > 0.  ";
+        msg += "Encountered alpha = " + ToString(alpha) + ".";
+        throw implementation_error(msg);
+    }
+    double logalpha = lparam[lparam.size() - 1]; // avoid redundant calls to log()
+    double total_lnL(0.0);
+    double C(0.0); // used to calculate lnL
+
+    for (unsigned long reg = 0; reg < m_nRegions; reg++)
+    {
+        // Scale the parameter vector, e.g., to account for inherent
+        // differences between nuclear DNA and Y-chromosome DNA and mtDNA.
+        // Note:  The scaled parameter vector does NOT contain alpha, by design.
+        DoubleVec1d scaledparam(m_paramscalars[reg].size()),
+            scaledlparam(m_lparamscalars[reg].size());
+        transform(param.begin(), param.end()--, m_paramscalars[reg].begin(),
+                  scaledparam.begin(), multiplies<double>());
+        transform(lparam.begin(), lparam.end()--, m_lparamscalars[reg].begin(),
+                  scaledlparam.begin(), plus<double>());
+        m_sumG_C[reg] = 0.0;
+
+        for (unsigned long rep = 0; rep < m_nReplicate; rep++)
+        {
+            for (unsigned long G = 0; // "G" means "genealogy tree"
+                 G < m_data[reg][rep]->treeSummaries.size(); G++)
+            {
+                // Calc_lnProbGP() updates m_lnPoint and m_lnWait.
+                // We use these separately; we don't need
+                // the return value of log(Prob(G|P)) here.
+                PostLike::Calc_lnProbGP(scaledparam, scaledlparam,
+                                        m_data[reg][rep]->treeSummaries[G]);
+                // Compute log(C), then exp() it, to avoid under/overflow.
+                C = m_lnPoint[reg][rep][G];
+                C -= m_lnProbGPo[reg][rep][G]; // may include Geyer weights
+                C += 0.5 * (alpha + m_nevents_G[reg][rep][G]) * logalpha;
+                C += 0.5 * (alpha - m_nevents_G[reg][rep][G])
+                    * log(-m_lnWait[reg][rep][G]); // -m_lnWait > 0
+                // Calculate K(alpha-nevents, 2*sqrt(-m_lnWait*alpha))
+                // and K(alpha-nevents+1, 2*sqrt(-m_lnWait*alpha)).
+                // Store these for use by the gradient calculations.
+                m_K_alpha_minus_nevents[reg][rep][G] =
+                    BesselK(alpha - m_nevents_G[reg][rep][G],
+                            2.0*sqrt(-m_lnWait[reg][rep][G]*alpha),
+                            m_K_alpha_minus_nevents_plus_1[reg][rep][G]);
+                // BesselK(v,z) >= 0 for all v,z.  Approaches 0 as z gets large.
+                if (m_K_alpha_minus_nevents[reg][rep][G] > 0.0)
+                {
+                    C += log(m_K_alpha_minus_nevents[reg][rep][G]);
+                    m_C[reg][rep][G] = SafeProductWithExp(1.0, C);
+                }
+                else
+                    m_C[reg][rep][G] = 0.0; // because m_C is proportional to BesselK(v,z)
+                m_sumG_C[reg] += m_C[reg][rep][G] * m_data[reg][rep]->treeSummaries[G]->GetNCopies();
+            }
+        }
+        if (0.0 == m_sumG_C[reg])
+        {
+#ifdef TEST
+            cerr << "GammaRegionPostLike::Calculate():  sum over reps and trees "
+                 << "of region " << reg << " is zero; this must be a positive "
+                 << "number.  Returning false and lnL = -DBL_BIG....";
+#endif // TEST
+            m_last_lnL = lnL = -DBL_BIG;
+            return false;
+        }
+        if (!usingGeyer)
+            total_lnL += log(m_sumG_C[reg]/m_totalNumTrees[reg]);
+        else
+            total_lnL += log(m_sumG_C[reg]);
+    }
+
+    total_lnL += (LOG2 - log_gamma(alpha))*m_nRegions;
+    m_last_lnL = lnL = total_lnL;
+    return true;
+}
+
+//____________________________________________________________________________________
diff --git a/src/postlike/likelihood.h b/src/postlike/likelihood.h
new file mode 100644
index 0000000..816464d
--- /dev/null
+++ b/src/postlike/likelihood.h
@@ -0,0 +1,315 @@
+// $Id: likelihood.h,v 1.59 2013/11/08 21:46:21 mkkuhner Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+// likelihood class --------------------------------------------------
+//
+// PostLike
+//     --> SinglePostLike      [single region, single chain]
+//     --> ReplicatePostLike   [single region, multiple replicate]
+//     --> RegionPostLike      [multiple region, multiple replicate]
+//     --> GammaRegionPostLike [mult. reg., mult. rep., with background
+//                              mutation rates gamma-distributed among regions]
+
+#ifndef LIKELIHOOD_H
+#define LIKELIHOOD_H
+
+#include <functional>
+#include <vector>
+#include "vectorx.h"
+#include "defaults.h"
+#include "paramstat.h"
+
+// now necessary to handle the evil kludge for stick rearrangement
+// the commented out forward declaration is all that is otherwise
+// necessary
+#include "plforces.h"
+// class PLForces
+
+class ForceParameters;
+class Maximizer;
+class TreeSummary;
+class TreeCollector;
+class GammaRegionPostLike;
+class ForceSummary;
+
+class PostLike
+{
+  protected:
+    unsigned long m_nRegions;
+    unsigned long m_nReplicate;
+    unsigned long m_nParam;
+    unsigned long m_nForces;
+    GammaRegionPostLike *m_pGammaRegionPostLike;
+    vector <force_type> m_forcestags;
+    vector <ParamStatus> m_default_pstatusguides; // set by Maximizer, used in derivatives
+    vector <ParamStatus> m_working_pstatusguides; // ditto
+
+    // PLforces objects
+    // non-owning pointers (objects are owned by Force class)
+    vector < PLForces * >m_forces;
+
+    LongVec1d m_parameter_types; //specifies to which force a parameter belongs
+    // is a simple offset list from forces.begin()
+    // parameter_type is filled in FillForces()
+
+    long m_growthstart;
+    long m_growthend;
+    long m_s_is_here;
+
+    vector<double> m_minvalues;
+    vector<double> m_maxvalues;
+
+    long FindForce(long whichparam) { return m_parameter_types[whichparam];} ;
+
+    bool Calc_sumG_ProbGP_over_ProbGPo(const DoubleVec1d& param,
+                                       const DoubleVec1d& lparam,
+                                       const DoubleVec1d& ln_ProbGPo,
+                                       DoubleVec1d& ln_ProbGP_over_ProbGPo,
+                                       double& sumG_ProbGP_over_ProbGPo,
+                                       const vector<TreeSummary*> *data);
+
+    double DCalc_sumG_BasicNumerator(const vector<double>& param,
+                                     const vector<double>& ln_ProbGP_over_ProbGPo,
+                                     const vector<TreeSummary*> *pGenealogies,
+                                     const long& whichparam);
+
+    // Changes the many-vector parameter list into a 1-D vector.
+    void Linearize(const ForceParameters * pForceParameters,
+                   DoubleVec1d & param0);
+
+    virtual likelihoodtype GetTag() = 0; // enum
+
+  public:
+    PostLike(const ForceSummary &thisforces,
+             const long thisNregion,
+             const long thisNreplicate, const long thisNParam);
+    virtual ~PostLike (void);
+
+    // Sets up data connection
+    virtual long GetNparam(void)
+    {
+        return m_nParam;
+    };
+    virtual DoubleVec1d GetMeanParam0() = 0;
+    virtual void SetParam0() = 0;
+    bool isLinearParam(long whichparam);
+    bool isLogisticSelection(long whichparam) { return 0 != m_s_is_here && whichparam == m_s_is_here; };
+
+    friend class Maximizer; // for speed, maximizer writes to postlike's gradient guide
+
+    // Interface for calculating the log-likelihood.
+    virtual bool Calculate(const DoubleVec1d& param,
+                           const DoubleVec1d& lparam,
+                           double& lnL) = 0;
+
+    // Calculates the log of the probability of a genealogy given the parameters,
+    // i.e., log(Prob(G|P)).
+    // This method is public for use by Bayesian estimation.
+    double Calc_lnProbGP(const DoubleVec1d& param,
+                         const DoubleVec1d& lparam,
+                         const TreeSummary *treedata);
+
+    // calculates derivates
+    virtual bool DCalculate(const DoubleVec1d& param,
+                            DoubleVec1d& gradient) = 0;
+
+    // calculates the of log of the probability of a genealogy given a
+    // stick.
+    // This method is public for use by StairArranger.
+    double Calc_lnProbGS(const DoubleVec1d& param,
+                         const DoubleVec1d& lparam,
+                         const TreeSummary* treedata);
+
+    const DoubleVec1d& GetMinValues() const { return m_minvalues; };
+    const DoubleVec1d& GetMaxValues() const { return m_maxvalues; };
+
+    // evil kludge to give stair rearrangement access to the Mean and
+    // Variance
+    const StickSelectPL& GetStickSelectPL() const;
+
+  private:
+    // fills the forces classes into the forces vector
+    void FillForces (const ForceSummary&thisforces);
+    PostLike();
+};
+
+//------------------------------------------------------------------------------------
+
+class SinglePostLike : public PostLike
+{
+  private:
+    TreeCollector *m_data;  // main hook into the treesummary data
+    DoubleVec1d m_param0;
+    DoubleVec1d m_lparam0;
+    DoubleVec1d m_lnProbGPo;      //this remains constant through maximization
+    DoubleVec1d m_ln_ProbGP_over_ProbGPo; //this does NOT! It is constant for a
+    // specific parameter.
+    // "G" stands for "genealogy."
+    double m_last_lnL;
+    double m_totalNumTrees;
+    SinglePostLike();
+
+  protected:
+    virtual likelihoodtype GetTag() { return ltype_ssingle; }; // enum
+
+  public:
+    SinglePostLike(const ForceSummary &thisforces,
+                   const long thisNregion,
+                   const long thisNreplicate, const long thisNParam);
+    ~SinglePostLike();
+    void SetParam0();
+    DoubleVec1d GetParam0() { return m_param0; };
+    virtual void Setup(TreeCollector * treedata);
+    bool Calculate(const DoubleVec1d& param,
+                   const DoubleVec1d& lparam,
+                   double& lnL);
+    bool DCalculate(const DoubleVec1d& param,
+                    DoubleVec1d& gradient);
+    DoubleVec1d GetMeanParam0 ();
+};
+
+//------------------------------------------------------------------------------------
+
+class ReplicatePostLike : public PostLike
+{
+  private:
+    DoubleVec2d m_param0;
+    DoubleVec2d m_lparam0;
+    vector<TreeCollector*> m_data;
+    DoubleVec2d m_lnProbGPo;
+    DoubleVec2d m_ln_ProbGP_over_ProbGPo;
+    double m_last_lnL;
+    DoubleVec3d m_pg0; // rep X tree X rep, filled in by Setup(),
+    // used by FillGeyerWeights
+    DoubleVec1d m_logGeyerWeights;
+
+    bool FillGeyerWeights();
+    bool GeyerLike(const DoubleVec1d& param, const DoubleVec1d& lparam,
+                   double &newlike, const DoubleVec1d& oldlike);
+    bool GeyerStart(DoubleVec1d &lnL);
+    void TransformProbG0With(const DoubleVec1d& weights);
+    ReplicatePostLike();
+
+  protected:
+    virtual likelihoodtype GetTag() { return ltype_replicate; }; // enum
+
+  public:
+    ReplicatePostLike(const ForceSummary &thisforces,
+                      const long thisNregion,
+                      const long thisNreplicate, const long thisNParam);
+    //~ReplicatePostLike() {};
+    void SetParam0();
+    void CreateProbG0();
+    virtual bool Setup(const vector<TreeCollector*>& treedata);
+    bool Calculate(const DoubleVec1d& param,
+                   const DoubleVec1d& lparam,
+                   double& lnL);
+    bool DCalculate(const DoubleVec1d& param,
+                    DoubleVec1d& gradient);
+    DoubleVec1d GetMeanParam0();
+    DoubleVec1d GetGeyerWeights() {return m_logGeyerWeights;};
+};
+
+//------------------------------------------------------------------------------------
+
+class RegionPostLike : public PostLike
+{
+  protected:
+    DoubleVec3d m_param0; // the "driving values" for each rep and reg
+    DoubleVec3d m_lparam0; // the natural logarithms of the above
+    vector<vector<TreeCollector *> > m_data; // the trees for each rep and reg
+    DoubleVec3d m_lnProbGPo; // Dimensions: numtrees x reps x regions.  May include Geyer.
+    DoubleVec3d m_ln_ProbGP_over_ProbGPo;  // Same dimensions as Prob(G|Po).
+    DoubleVec1d m_sumG_ProbGP_over_ProbGPo; // One value per region.  May include Geyer.
+    DoubleVec2d m_paramscalars; // Scale the eff. pop. size when we have nuc DNA and mtDNA, etc.
+    DoubleVec2d m_lparamscalars; // Natural logarithms of the above scaling factors.
+    DoubleVec1d m_totalNumTrees; // One value per region.
+    double m_last_lnL; // Store the total lnL for later use.
+    DoubleVec4d m_pg0; // reg X rep X tree X rep, filled in by Setup
+    // used by FillGeyerWeights
+
+    void TransformProbG0With(const DoubleVec1d& weights, long region);
+
+    virtual likelihoodtype GetTag() { return ltype_region; }; // enum
+
+  public:
+    RegionPostLike(const ForceSummary &thisforces,
+                   const long thisNregion,
+                   const long thisNreplicate, const long thisNParam,
+                   DoubleVec2d paramscalars);
+    //~RegionPostLike() {};
+    void SetParam0();
+    virtual void Setup(const vector<vector<TreeCollector*> >& treedata,
+                       const DoubleVec2d& logGeyerWeights);
+    bool Calculate(const DoubleVec1d& param,
+                   const DoubleVec1d& lparam,
+                   double& lnL);
+    bool DCalculate(const DoubleVec1d& param,
+                    DoubleVec1d& gradient);
+    DoubleVec1d GetMeanParam0();
+
+  private:
+    RegionPostLike();
+};
+
+//------------------------------------------------------------------------------------
+
+class GammaRegionPostLike : public RegionPostLike
+{
+  protected:
+    // Note:  All of the following variables of type DoubleVec3d
+    // have the dimensions of numtrees x reps x regions.
+    DoubleVec3d m_nevents_G; // The total number of events in tree G.
+    DoubleVec3d m_lnPoint; // log(PointProb(G|P)).
+    DoubleVec3d m_lnWait;  // log(WaitProb(G|P)).
+    DoubleVec3d m_C;      // NOTE:  This takes the place of m_ln_ProbGP_over_ProbGPo.
+    DoubleVec1d m_sumG_C; // NOTE:  This takes the place of m_sumG_ProbGP_over_ProbGPo.
+
+    DoubleVec3d m_K_alpha_minus_nevents;
+    // K(alpha - nevents, 2.0*sqrt(-alpha*m_lnWait)),
+    // where K(v,x) is the modified Bessel fn. of the 2nd kind
+    // of order v evaluated at x.
+    DoubleVec3d m_K_alpha_minus_nevents_plus_1;
+    // K(alpha - nevents + 1, 2.0*sqrt(-alpha*m_lnWait)).
+
+    virtual likelihoodtype GetTag() { return ltype_gammaregion; }; // enum
+
+    // Iterators over m_lnPoint and m_lnWait.
+    vector<vector<vector<double> > >::iterator m_lnPoint_reg_it;
+    vector<vector<double> >::iterator m_lnPoint_rep_it;
+    vector<double>::iterator m_lnPoint_genealogy_it;
+    vector<vector<vector<double> > >::iterator m_lnWait_reg_it;
+    vector<vector<double> >::iterator m_lnWait_rep_it;
+    vector<double>::iterator m_lnWait_genealogy_it;
+
+  public:
+    GammaRegionPostLike(const ForceSummary &theseforces,
+                        const long thisNregion,
+                        const long thisNreplicate, const long thisNParam,
+                        DoubleVec2d paramscalars);
+    //~GammaRegionPostLike() {};
+
+    void Store_Current_lnPoint(const double& value);
+    void Store_Current_lnWait(const double& value);
+    virtual void Setup(const vector<vector<TreeCollector*> >& treedata,
+                       const DoubleVec2d& logGeyerWeights);
+    bool Calculate(const DoubleVec1d& param,
+                   const DoubleVec1d& lparam,
+                   double& lnL);
+    bool DCalculate(const DoubleVec1d& param,
+                    DoubleVec1d& gradient);
+
+  private:
+    GammaRegionPostLike();
+};
+
+#endif // LIKELIHOOD_H
+
+//____________________________________________________________________________________
diff --git a/src/postlike/maximizer.cpp b/src/postlike/maximizer.cpp
new file mode 100644
index 0000000..5c4555e
--- /dev/null
+++ b/src/postlike/maximizer.cpp
@@ -0,0 +1,2143 @@
+// $Id: maximizer.cpp,v 1.119 2012/06/30 01:32:42 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+// Broyden-Fletcher-Goldfarb-Shanno maximizer implementation
+//
+// started 2000 Peter Beerli
+// Major changes devised and implemented in 2004 by Eric Rynes.
+// Customized method of steepest ascents, probing artificially large steps
+// if necessary, has been added for conditional use.
+//
+
+#include <cassert>
+#include <cmath>
+#include <iostream>
+
+#include "vectorx.h"
+#include "maximizer.h"
+#include "maximizer_strings.h"
+#include "likelihood.h"
+#include "mathx.h"                      // for system-specific isnan support
+#include "parameter.h"
+#include "registry.h"
+#include "runreport.h"
+
+#ifdef DMALLOC_FUNC_CHECK
+#include "/usr/local/include/dmalloc.h"
+#endif
+
+#ifndef NDEBUG
+#define TEST //This will trigger Eric's debug statements, below.
+#endif
+
+using namespace std;
+
+//------------------------------------------------------------------------------------
+
+const unsigned long int NTRIALS = 1000UL;
+//const double LAMBDA_EPSILON = 10.0 * DBL_EPSILON;
+//const double LAMBDA_EPSILON = 1.0e-50; // BUGBUG erynes--needs further investigation!
+const double LAMBDA_EPSILON = DBL_EPSILON;  // EWFIX -- the above is way too small
+const unsigned long int MAX_NUM_LONG_PUSHES = 8UL;
+const double NORM_EPSILON = 0.0001;
+const double lnL_EPSILON = 0.005;
+
+#ifdef TEST
+long int nMatrixResets = 0;
+long int nLambdaSmall = 0;
+long int n_dtg_zero = 0;
+unsigned long int nFunc(0UL), nDFunc(0UL); // nIter is now used in all cases
+#endif // TEST
+unsigned long int nIter(0UL); // declared here for use by steepest ascents _and_ BFGS
+
+//------------------------------------------------------------------------------------
+// maximizer class -------------------------------------------------
+//
+// maximizer initialization
+
+Maximizer::Maximizer (long int thisNparam)
+{
+    Initialize(thisNparam);
+}
+
+//------------------------------------------------------------------------------------
+
+void Maximizer::Initialize(long int thisNparam)
+{
+    m_pPostLike = NULL;
+    m_nparam = (unsigned long int)thisNparam;
+    double zero = 0.0;
+    m_param = CreateVec1d (m_nparam, zero);       // parameters to maximize
+    m_lparam = CreateVec1d (m_nparam, zero);      // parameters to maximize
+    m_oldlparam = CreateVec1d (m_nparam, zero);   // old log parameters
+    m_gradient = CreateVec1d (m_nparam, zero);    // first derivatives
+    m_oldgradient = CreateVec1d (m_nparam, zero); // old first derivatives
+    m_paramdelta = CreateVec1d (m_nparam, zero);  // parameter difference
+    m_gradientdelta = CreateVec1d (m_nparam, zero);// first derivative difference
+    m_direction = CreateVec1d (m_nparam, zero);   // direction to jump uphill
+    m_second = CreateVec2d (m_nparam, m_nparam, zero);// approx second derivative
+
+    m_newparam = CreateVec1d (m_nparam, zero);        // temporary storage
+    m_newlparam = CreateVec1d (m_nparam, zero);       // for speeding up
+    m_temp = CreateVec1d (m_nparam, zero);
+    m_dd = CreateVec2d (m_nparam, m_nparam, zero);
+
+    m_isLinearParam = CreateVec1d(m_nparam, 0UL);
+    // m_dataHasGrowth = false;
+    m_dataHasLinearParam = false;
+    m_dataHasGamma = false;
+    m_constraintsVectorHasSlotForAlpha = false;
+    m_maxAllowedValueForAlpha = 0.0;
+
+    m_lastnormg = 0.0;
+
+    m_constrained = false;
+    // Keep m_constraints empty.  Fill it only using SetConstraints.
+    if (!m_constraints.empty())
+    {
+        for (long int i = m_constraints.size() - 1; i > 0; i--)
+            m_constraints[i].clear();
+        m_constraints.clear();
+    }
+    m_constraintratio.clear(); // also fill only using SetConstraints.
+}
+
+//------------------------------------------------------------------------------------
+
+Maximizer::~Maximizer ()
+{
+    // intentionally blank
+}
+
+//------------------------------------------------------------------------------------
+
+void Maximizer::SetLikelihood (PostLike * thispostlike)
+{
+    m_pPostLike = thispostlike;
+    m_minparamvalues = m_pPostLike->GetMinValues();
+    m_maxparamvalues = m_pPostLike->GetMaxValues();
+    m_minlparamvalues.clear();
+    m_maxlparamvalues.clear();
+
+    // Reset our variables.
+    fill(m_isLinearParam.begin(), m_isLinearParam.end(), 0UL);
+    m_dataHasLinearParam = false;
+
+    // Record which parameters are to be treated linearly.
+    for (unsigned long int i = 0; i < m_minparamvalues.size(); i++)
+    {
+        if (m_pPostLike->isLinearParam(i))
+        {
+            m_isLinearParam[i] = 1UL;
+            m_dataHasLinearParam = true;
+            m_minlparamvalues.push_back(m_minparamvalues[i]);
+            m_maxlparamvalues.push_back(m_maxparamvalues[i]);
+        }
+        else
+        {
+            m_minlparamvalues.push_back(log(m_minparamvalues[i]));
+            m_maxlparamvalues.push_back(log(m_maxparamvalues[i]));
+        }
+    }
+
+    m_dataHasGamma = (dynamic_cast<GammaRegionPostLike*>(m_pPostLike)) ? true : false;
+    if (m_dataHasGamma)
+    {
+        const RegionGammaInfo *pRGI = registry.GetRegionGammaInfo();
+        if (!pRGI)
+        {
+            string msg = "Maximizer::SetLikelihood(), received a GammaRegionPostLike ";
+            msg += "object, but failed to find a RegionGammaInfo object in the registry.";
+            throw implementation_error(msg);
+        }
+        m_maxAllowedValueForAlpha = pRGI->GetMaxValue();
+        if (m_maxparamvalues.size() != m_nparam)
+        {
+            m_maxparamvalues.push_back(m_maxAllowedValueForAlpha);
+            m_minparamvalues.push_back(pRGI->GetMinValue());
+            if (m_pPostLike->isLinearParam(m_nparam-1))
+            {
+                m_maxlparamvalues.push_back(m_maxparamvalues[m_maxparamvalues.size()-1]);
+                m_minlparamvalues.push_back(m_minparamvalues[m_minparamvalues.size()-1]);
+            }
+            else
+            {
+                m_maxlparamvalues.push_back(log(m_maxparamvalues[m_maxparamvalues.size()-1]));
+                m_minlparamvalues.push_back(log(m_minparamvalues[m_minparamvalues.size()-1]));
+            }
+        }
+    }
+
+    if (m_minparamvalues.size() != m_nparam)
+    {
+        string msg = "Maximizer::SetLikelihood(), vector m_minparamvalues ";
+        msg += "contains values for " + ToString(m_minparamvalues.size());
+        msg += " parameters; it should contain values for " + ToString(m_nparam);
+        msg += " parameters, which is the value found in m_nparam.";
+        throw implementation_error(msg);
+    }
+}
+
+//------------------------------------------------------------------------------------
+// Find optimum using  Broyden-Fletcher-Goldfarb-Shanno mechanism
+// (or customized steepest ascent if growth is present)
+// returns log(likelihood)
+// replaces param with the result param
+
+bool Maximizer::Calculate(vector<double>& thisparam, double& lnL, string& message)
+{
+    message = "";
+
+    copy(thisparam.begin(), thisparam.end(), m_param.begin());
+    LogVec0(m_param, m_lparam); // store log(params), 0 for neg. params
+    bool calc_succeeded = true;
+
+    nIter = 0UL;
+#ifdef TEST
+    nMatrixResets = 0;
+    nLambdaSmall = 0;
+    n_dtg_zero = 0;
+    nFunc = 0UL;
+    nDFunc = 0UL;
+#endif // TEST
+
+    double loglikelihood = 0.0;
+
+    //if (m_dataHasGrowth || m_dataHasGamma) {
+    if (m_dataHasLinearParam || m_dataHasGamma)
+    {
+        //loglikelihood = CalculateByParts();  //An LS debug test thingy.
+        try
+        {
+            calc_succeeded = CalculateSteepest(loglikelihood, message);
+        }
+        catch (const insufficient_variability_over_regions_error& e)
+        {
+            lnL = -DBL_BIG;
+            copy(m_param.begin(), m_param.end(), thisparam.begin());
+            message = maxstr::MAX_HIGH_ALPHA_0;
+            return false;
+        }
+
+    }
+    else
+    {
+        calc_succeeded = CalculateBroyden(NTRIALS, loglikelihood, message);
+    }
+
+    if (!calc_succeeded)
+    {
+        lnL = loglikelihood;
+        return false;
+    }
+
+    copy(m_param.begin(), m_param.end(), thisparam.begin());
+    lnL = loglikelihood;
+    return true;
+
+} // Calculate
+
+//------------------------------------------------------------------------------------
+// SetParam sets vector m_newlparam = m_lparam + lambda*direction.
+// "direction" is m_direction for BFGS, m_gradient for steepest ascents.
+// For parameters such as growth, which are treated linearly,
+// applying this step to m_newlparam is identical to applying it to m_newparam.
+// erynes Sept. 2004:  Put the calculation code here in the maximizer,
+// removed it from the plforces and likelihood classes.
+
+double Maximizer::SetParam(const double& lambda, const DoubleVec1d& direction)
+{
+    unsigned long int i;
+    double biggestChange = 0.0;
+
+    for (i = 0; i < m_nparam; i++)
+    {
+        if (m_isLinearParam[i])
+        {
+            m_newparam[i] = m_param[i] + lambda * direction[i];
+
+            double change = m_newparam[i] - m_param[i];
+            change = fabs(change);
+            if(change > biggestChange) biggestChange = change;
+
+            AdjustExtremeLinearParam(i);
+            m_newlparam[i] = m_newparam[i];
+        }
+        else if (m_param[i] > 0.0) // param is treated logarithmically
+        {
+            m_newlparam[i] = m_lparam[i] + lambda * direction[i];
+
+            double change = m_newlparam[i] - m_lparam[i];
+            change = fabs(change);
+            if(change > biggestChange) biggestChange = change;
+
+            AdjustExtremeLogParam(i);
+            m_newparam[i] = exp(m_newlparam[i]);
+        }
+    }
+    return biggestChange;
+
+} // Maximizer::SetParam
+
+//------------------------------------------------------------------------------------
+
+void Maximizer::CoutNewParams() const // used for debugging
+{
+    unsigned long int i;
+    for (i = 0; i < m_nparam; i++)
+    {
+        cout << setw(10) << m_newparam[i] << " ";
+    }
+    cout << ":: ";
+    for (i = 0; i < m_nparam; i++)
+    {
+        if (m_isLinearParam[i])
+        {
+            cout << setw(10) << "--" << " ";
+        }
+        else
+        {
+            cout << setw(10) << m_newlparam[i] << " ";
+        }
+    }
+    cout << endl;
+}
+
+//------------------------------------------------------------------------------------
+
+void Maximizer::CoutCurParams() const // used for debugging
+{
+    unsigned long int i;
+    for (i = 0; i < m_nparam; i++)
+    {
+        cout << setw(10) << m_param[i] << " ";
+    }
+    cout << ":: ";
+    for (i = 0; i < m_nparam; i++)
+    {
+        if (m_isLinearParam[i])
+        {
+            cout << setw(10) << "--" << " ";
+        }
+        else
+        {
+            cout << setw(10) << m_lparam[i] << " ";
+        }
+    }
+    cout << endl;
+}
+
+//------------------------------------------------------------------------------------
+
+void Maximizer::CoutByLinOrNoMult(double mult,const DoubleVec1d& d) const // used for debugging
+{
+    unsigned long int i;
+    for (i = 0; i < d.size(); i++)
+    {
+        if (!m_isLinearParam[i])
+        {
+            cout << setw(10) << "--" << " ";
+        }
+        else
+        {
+            cout << setw(10) << mult * d[i] << " ";
+        }
+    }
+    cout << ":: ";
+    for (i = 0; i < d.size(); i++)
+    {
+        if (m_isLinearParam[i])
+        {
+            cout << setw(10) << "--" << " ";
+        }
+        else
+        {
+            cout << setw(10) << mult * d[i] << " ";
+        }
+    }
+    cout << endl;
+}
+
+//------------------------------------------------------------------------------------
+// A one-dimensional version of SetParam(), for adding a step to only one
+// component of the parameter vector.  Added by erynes for use with
+// CalculateSteepest().  Modified by erynes early Sept. '04 to allow for
+// constrained parameters ("whichparam" will often be a one-element vector).
+
+bool Maximizer::SetParam1d(const double & steplength,
+                           const vector<unsigned long int> * pWhichparam)
+{
+    vector<unsigned long int>::const_iterator it;
+
+    if (m_isLinearParam[(*pWhichparam)[0]])
+    {
+        for (it = pWhichparam->begin(); it != pWhichparam->end(); it++)
+        {
+            m_newparam[*it]  = m_param[*it] + steplength;
+            if (m_newparam[*it] < m_minparamvalues[*it])
+            {
+#ifdef TEST
+                cerr << "Parameter \"out of bounds\"--setting param " << *it
+                     << " (and any parameters constrained to equal this) to "
+                     << m_minparamvalues[*it] << " instead of "
+                     << m_newparam[*it] << "." << endl;
+#endif // TEST
+                for (it = pWhichparam->begin();
+                     it != pWhichparam->end(); it++)
+                {
+                    m_newparam[*it] = m_minparamvalues[*it];
+                    m_newlparam[*it] = m_minlparamvalues[*it];
+                }
+                return false;
+            }
+            else if (m_newparam[*it] > m_maxparamvalues[*it])
+            {
+#ifdef TEST
+                cerr << "Parameter \"out of bounds\"--setting param " << *it
+                     << " (and any parameters constrained to equal this) to "
+                     << m_maxparamvalues[*it] << " instead of "
+                     << m_newparam[*it] << "." << endl;
+#endif // TEST
+                for (it = pWhichparam->begin();
+                     it != pWhichparam->end(); it++)
+                {
+                    m_newparam[*it] = m_maxparamvalues[*it];
+                    m_newlparam[*it] = m_maxlparamvalues[*it];
+                }
+                return false;
+            }
+            m_newlparam[*it] = m_newparam[*it];
+        }
+        return true;
+    }
+
+    // Else the parameter(s) is/are treated logarithmically.
+    // No worries; we're guaranteed not to get linear/log mixing within this function.
+
+    for (it = pWhichparam->begin(); it != pWhichparam->end(); it++)
+    {
+        m_newlparam[*it] = m_lparam[*it] + steplength;
+        if (m_newlparam[*it] < m_minlparamvalues[*it])
+        {
+#ifdef TEST
+            cerr << "Parameter \"out of bounds\"--setting param " << *it
+                 << " (and any parameters constrained to equal this) to "
+                 << m_minparamvalues[*it] << " instead of "
+                 << exp(m_newlparam[*it]) << "." << endl;
+#endif // TEST
+            for (it = pWhichparam->begin();
+                 it != pWhichparam->end(); it++)
+            {
+                m_newparam[*it] = m_minparamvalues[*it];
+                m_newlparam[*it] = m_minlparamvalues[*it];
+            }
+            return false;
+        }
+        else if (m_newlparam[*it] > m_maxlparamvalues[*it])
+        {
+#ifdef TEST
+            cerr << "Parameter \"out of bounds\"--setting param " << *it
+                 << " (and any parameters constrained to equal this) to "
+                 << m_maxparamvalues[*it] << " instead of "
+                 << exp(m_newlparam[*it]) << "." << endl;
+#endif // TEST
+            for (it = pWhichparam->begin();
+                 it != pWhichparam->end(); it++)
+            {
+                m_newparam[*it] = m_maxparamvalues[*it];
+                m_newlparam[*it] = m_maxlparamvalues[*it];
+            }
+            return false;
+        }
+        m_newparam[*it] = exp(m_newlparam[*it]);
+    }
+    return true;
+
+} // Maximizer::SetParam1d
+
+//------------------------------------------------------------------------------------
+// calculates the norm of the first derivative (or any other vector):
+// sqrt(sum(x^2))
+
+double Maximizer::Norm(const DoubleVec1d& g)
+{
+    return sqrt(inner_product(g.begin(), g.end(), g.begin(), 0.0));
+} // Maximizer::Norm
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+
+double Maximizer::GetLastNorm()
+{
+    return m_lastnormg;
+}
+
+//------------------------------------------------------------------------------------
+// Calculates the direction for the Broyden-Fletcher-Goldfarb-Shanno algorithm:
+// direction = second  .  gradient
+// erynes note:  This direction points to the maximum.
+// Textbooks applying BFGS to minimization set direction = -1*second . gradient.
+
+void Maximizer::CalcDirection ()
+{
+    DoubleVec1d::iterator dir;
+    DoubleVec2d::iterator h;
+    for (dir = m_direction.begin (), h = m_second.begin ();
+         dir != m_direction.end (); dir++, h++)
+        (*dir) =
+            inner_product ((*h).begin (), (*h).end (), m_gradient.begin (), 0.0);
+}
+
+//------------------------------------------------------------------------------------
+
+void Maximizer::ExplainParamChange(long index, double oldVal, double newVal, string kind)
+{
+#ifdef TEST
+    cerr << "Parameter \"out of bounds\"--setting param " << index
+         << " to " << newVal << " instead of "
+         << oldVal << "." << endl;
+#endif // TEST
+}
+
+//------------------------------------------------------------------------------------
+
+void Maximizer::AdjustExtremeLinearParam(long index)
+{
+
+    if (m_newparam[index] < m_minparamvalues[index])
+    {
+        ExplainParamChange(index,m_newparam[index],m_minparamvalues[index],"min");
+        m_newparam[index] = m_minparamvalues[index];
+    }
+    else if (m_newparam[index] > m_maxparamvalues[index])
+    {
+        ExplainParamChange(index,m_newparam[index],m_maxparamvalues[index],"max");
+        m_newparam[index] = m_maxparamvalues[index];
+    }
+
+}
+
+//------------------------------------------------------------------------------------
+
+void Maximizer::AdjustExtremeLogParam(long index)
+{
+
+    if (m_newlparam[index] < m_minlparamvalues[index])
+    {
+        ExplainParamChange(index,exp(m_newlparam[index]),m_minparamvalues[index],"min");
+        m_newlparam[index] = m_minlparamvalues[index];
+    }
+    else if (m_newlparam[index] > m_maxlparamvalues[index])
+    {
+        ExplainParamChange(index,exp(m_newlparam[index]),m_maxparamvalues[index],"max");
+        m_newlparam[index] = m_maxlparamvalues[index];
+    }
+
+}
+
+//------------------------------------------------------------------------------------
+// CalcSecond updates approximative second derivative matrix.
+// erynes note:  This is the BFGS algorithm; it must produce symmetric,
+// positive-definite matricies.  It is guaranteed to do so, unless
+// finite-precision effects (roundoff errors) become significant.  Symmetric,
+// real-valued matricies are positive definite if and only if they have strictly
+// positive eigenvalues.  (A positive-definite matrix can contain negative
+// elements.)  The matrix produced here is identical to the one you'd produce if
+// you were looking for a minimum, rather than a maximum.  This is fine--
+// actually, this is great.  See comments for Calculate() and CalcDirection()
+// if you are curious about the question of maximizing vs. minimizing.
+
+void Maximizer::CalcSecond ()
+{
+    long int i, j, k;
+    long int n = m_paramdelta.size ();
+    double t, dtg;
+    DoubleVec2d::iterator h;
+    DoubleVec1d::iterator tt;
+
+    // m_paramdelta^T . m_gradientdelta
+    dtg = inner_product (m_paramdelta.begin (), m_paramdelta.end (),
+                         m_gradientdelta.begin (), 0.0);
+    if (dtg != 0)
+    {
+        dtg = 1.0 / dtg;
+    }
+    else
+    {
+#ifdef TEST
+        n_dtg_zero++;
+#endif // TEST
+        ResetSecond();
+        return;
+    }
+
+    // m_temp = m_gradientdelta^T . m_second
+    for (tt = m_temp.begin (), h = m_second.begin ();
+         tt != m_temp.end (); tt++, h++)
+        (*tt) = inner_product (m_gradientdelta.begin (), m_gradientdelta.end (),
+                               (*h).begin (), 0.0);
+    // t = m_temp . m_gradientdelta
+    t = inner_product (m_temp.begin (), m_temp.end (),
+                       m_gradientdelta.begin (), 0.0);
+    // t = (1 + (m_gradientdelta^T . m_second . m_gradientdelta) * dtg) * dtg
+    t = (1.0 + t * dtg) * dtg;
+    // m_paramdelta . m_gradientdelta^T . m_second
+    for (i = 0; i < n; i++)
+        for (j = 0; j < n; j++)
+        {
+            m_dd[i][j] = 0.0;
+            for (k = 0; k < n; k++)
+                m_dd[i][j] += m_paramdelta[i] * m_gradientdelta[k] * m_second[k][j];
+        }
+
+    for (i = 0; i < n; i++)
+        for (j = 0; j < n; j++)
+            m_second[i][j] += m_paramdelta[i] * m_paramdelta[j] * t
+                - (m_dd[i][j] + m_temp[i] * m_paramdelta[j]) * dtg;
+
+}
+
+//------------------------------------------------------------------------------------
+
+void Maximizer::ResetSecond ()
+{
+    unsigned long int i;
+    unsigned long int j;
+    unsigned long int nsecond = m_second.size (); // m_second is square
+    for (i = 0; i < nsecond; ++i)
+    {
+        m_second[i][i] = 1.0;
+        for (j = 0; j < i; ++j)
+        {
+            m_second[i][j] = 0.0;
+            m_second[j][i] = 0.0;
+        }
+    }
+#ifdef TEST
+    nMatrixResets++;
+#endif // TEST
+}
+
+//------------------------------------------------------------------------------------
+// Helper function to help us determine whether we haven't bracketed a maximum.
+// A local maximum (or minimum) of a differentiable (hence continuous) function
+// is bounded by an increasing slope on one side and a decreasing slope on the
+// other side.
+// This function receives 2 slopes and returns true if they have the same sign.
+// IMPORTANT NOTE #1.  The "new" slope is treated differently from the "old"
+// slope.  If the new slope is sufficiently flat, false is returned.
+// Parameter "tolerance" defines flatness.
+// IMPORTANT NOTE #2.  "tolerance" is assumed to be positive.  We avoid calling
+// fabs() for speed's sake.
+
+inline bool SlopesHaveSameSign(const double& oldgrad, const double& newgrad,
+                               const double& tolerance)
+{
+    if (newgrad > tolerance)
+        return oldgrad > 0.0;
+    if (newgrad < -tolerance)
+        return oldgrad < 0.0;
+    return false; // reached convergence on this component:
+    // -tolerance <= newgrad <= +tolerance
+}
+
+//------------------------------------------------------------------------------------
+// helper function--a simply-named wrapper for std::transform
+// calculates differences between two arrays
+// result = one - two
+
+void CalcDelta (const DoubleVec1d & one,
+                const DoubleVec1d & two, DoubleVec1d & result)
+{
+    transform (one.begin (), one.end (), two.begin (),
+               result.begin (), minus < double >());
+}
+
+//------------------------------------------------------------------------------------
+
+void Maximizer::SetMLEs(DoubleVec1d newMLEs)
+{
+    m_param = newMLEs;
+    m_lparam = newMLEs;
+    LogVec0 (m_param, m_lparam);
+    for(unsigned long int i = 0; i < m_nparam; i++)
+    {
+        if (m_isLinearParam[i])
+        {
+            m_lparam[i] = m_param[i]; // possibly unnecessary...
+        }
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+bool Maximizer::CalculateSteepest(double& lnL, string& message)
+// Find the maximum lnL using a customized version of the method of steepest
+// ascents.  Use when growth is present; otherwise use CalculateBroyden().
+// See comments within the function body for details on the customization.
+//
+// Implemented by erynes in early 2004.
+//
+// Returns false (failure) and lnL = -DBL_BIG if:
+// 1. lnL cannot be calculated for the initial point Po
+// 2. A derivative points toward a higher lnL, but none is found there
+// 3. Along a coordinate axis, we bound the location of a peak, but fail
+//    to find it within this region (this is related to (2))
+// 4. We move many orders of magnitude along a coordinate axis
+//    and observe no change in the sign of the slope
+//
+// Otherwise returns true (success) and a valid lnL value.
+// Failure generally means there is something wrong with the trees;
+// this can be pinpointed by nimbly stepping through the plforces code.
+//
+{
+    unsigned long int i;
+    double newlike(0.0), lambda(1.0), oldlike(0.0);
+
+    if (!m_pPostLike->Calculate(m_param, m_lparam, oldlike))
+    {
+#ifdef TEST
+        nFunc++;
+        cerr << "Initial parameter vector received by CalculateSteepest() "
+             << "is infinitely unlikely to reproduce the genealogies;"
+             << endl << "params = (" << m_param[0];
+        for (unsigned long int k = 1; k < m_nparam; k++)
+            cerr << ", " << m_param[k];
+        cerr << ").  Returning lnL = " << -DBL_BIG << "...." << endl;
+#endif // TEST
+        lnL = -DBL_BIG;
+        return false; // m_param is infinitely unlikely to reproduce
+        // the genealogies currently under consideration
+    }
+
+    // The first PostLike::Calculate call will always return oldlike == 0, because
+    // log(Prob(G|Po)/Prob(G|Po)) = log(1) = 0.0  (Well, this is true for SinglePostLike.
+    // It's not true when calculating likelihoods over multiple regions.)
+    // Each call to DCalculate must be preceded by a call to Calculate with the same
+    // parameter list, because Calculate sets up an intermediate variable needed by
+    // DCalculate (see comment in likelihood.cpp).
+    DCalculate(m_param, m_gradient); // result into m_gradient
+#ifdef TEST
+    nDFunc++;
+#endif // TEST
+
+    double normg = Norm(m_gradient), old_normg = DBL_BIG;
+    bool calc_succeeded = true;
+
+#ifdef TEST
+    cerr << "Begin; gradient = (" << m_gradient[0];
+    for (i = 1; i < m_gradient.size(); ++i)
+        cerr << ", " << m_gradient[i];
+    cerr << ")" << endl << "params = (" << m_param[0];
+    for (i = 1; i < m_param.size(); ++i)
+        cerr << ", " << m_param[i];
+    cerr << ")" << endl << "normg = " << normg << ", and lnL = " << oldlike << endl;
+#endif
+
+    // Apply the standard, classic method of n-dimensional steepest ascents
+    // while our steps take us to points that are successively higher and flatter.
+    // Quit when we get less flat, or if our step size has gotten tiny.
+    // This approach seems to efficiently get us onto the ridge that usually
+    // appears in the likelihood surfaces of growing populations.
+    // Once we're atop the ridge, a customized algorithm works better for us.
+
+    while (normg > NORM_EPSILON && normg < old_normg
+           && old_normg - normg > 0.1*NORM_EPSILON)
+    {
+        // Iterate over the first few ascent directions.
+        lambda = 2.0; // reset; will be reduced to 1 before lambda gets used
+        double thisChange(DBL_MAX);
+        do {
+            // "Line search" -- find a higher lnL along this ascent direction
+            lambda /= 2.0;    // set the step length
+            thisChange = SetParam(lambda, m_gradient); // probe this step in this direction
+            calc_succeeded = m_pPostLike->Calculate (m_newparam, m_newlparam,
+                                                     newlike);
+#ifdef TEST
+            nFunc++;
+#endif // TEST
+        } while ((!calc_succeeded || newlike <= oldlike ||
+                  systemSpecificIsnan(newlike)) && lambda > LAMBDA_EPSILON
+                 && thisChange > LAMBDA_EPSILON
+            );
+
+        if (lambda <= LAMBDA_EPSILON ||  thisChange <= LAMBDA_EPSILON)
+        {
+            // Something is wrong.  We started with a point, perhaps the initial
+            // point Po that was passed to Calculate(), and successfully
+            // calculated lnL there.  We calculated the gradient there; if there
+            // was a problem with it, the problem might be manifesting itself
+            // here.  Otherwise, the gradient pointed us toward a higher lnL, and
+            // we probed steplengths along this direction of smaller and smaller
+            // lengths until we wound up basically back at the starting point.
+            // There was no higher lnL to be found, contrary to what the gradient
+            // claimed.
+            unsigned long int k;
+            string msg = maxstr::MAX_BAD_LNL_0;
+            msg += Pretty(oldlike) + maxstr::MAX_BAD_LNL_1;
+            msg += Pretty(m_param[0]);
+            for (k = 1; k < m_nparam; k++)
+                msg += ", " + Pretty(m_param[k]);
+            if (!calc_succeeded)
+            {
+                // Not only was there no higher lnL to be found--we were unable to
+                // calculate lnL at a point in this guaranteed ascent direction,
+                // extremely close to the point where we _did_ calculate lnL!
+                msg += maxstr::MAX_BAD_LNL_2A1;
+                msg += Pretty(lambda) + maxstr::MAX_BAD_LNL_2A2;
+                message = msg;
+                lnL = -DBL_BIG;
+                return false;
+            }
+            msg += maxstr::MAX_BAD_LNL_2B;
+            message = msg;
+#ifdef TEST
+            cerr << "Maximizer:  Initial loop, lambda = " << lambda
+                 << ", giving up with normg = " << normg << ", lnL = " << oldlike
+                 << ", after " << nIter << " iterations." << endl
+                 << "            gradient = (" << m_gradient[0];
+            for (i = 1; i < m_nparam; i++)
+                cerr << ", " << m_gradient[i];
+            cerr << ")" << endl;
+#endif
+            lnL = -DBL_BIG;
+            return false;
+        }
+
+        // accept the step that we just tested
+        m_param = m_newparam;
+        m_lparam = m_newlparam;
+        DCalculate(m_param, m_gradient);
+        // The call to PostLike::Calculate, which needs to precede every call to
+        // DCalculate, was performed at beginning of the while loop, above.
+        oldlike = newlike;
+        old_normg = normg;
+        normg = Norm(m_gradient);
+#ifdef TEST
+        nDFunc++;
+#endif
+        nIter++;
+    } // end of standard method-of-steepest-ascents loop
+
+    if (normg <= NORM_EPSILON)
+    {
+        // Found the maximum, according to our convergence criterion.
+#ifdef TEST
+        cerr << "Finished before entering custom code; gradient = ("
+             << m_gradient[0];
+        for (i = 1; i < m_gradient.size(); ++i)
+            cerr << ", " << m_gradient[i];
+        cerr << ")" << endl << "params = (" << m_param[0];
+        for (i = 1; i < m_param.size(); ++i)
+            cerr << ", " << m_param[i];
+        cerr << ")" << endl << "|g| = " << normg << ", and lnL = " << oldlike
+             << ", after " << nIter << " iterations, " << nFunc
+             << " lnL function evaluations, and " << nDFunc
+             << " derivative evaluations." << endl << endl;
+#endif
+        m_lastnormg = normg;
+        lnL = oldlike;
+        return true;
+    }
+
+#ifdef TEST
+    cerr << "Beginning one-at-a-time, custom code.  So far we have:" << endl;
+    cerr << nIter << " iterations, |g| = " << normg << ", lnL = " << oldlike
+         << ", nFunc = " << nFunc << ", nDFunc = " << nDFunc << endl
+         << "     gradient = (" << m_gradient[0];
+    for (i = 1; i < m_nparam; i++)
+        cerr << ", " << m_gradient[i];
+    cerr << ")," << endl << "     params = (" << m_param[0];
+    for (i = 1; i < m_nparam; i++)
+        cerr << ", " << m_param[i];
+    cerr << ")." << endl;
+#endif
+
+    // Custom algorithm.  Should AVOID this when growth is absent.
+    // Probe parameters one at a time; allow for artificially large steps because
+    // growth tends to yield likelihood surfaces that are very long, narrow ridges
+    // that are very gradually increasing (and non-quadratically so) on top.
+
+    const long int UNINITIALIZED = -99L;
+    const long int FIXED = -2L;
+    const long int UNCONSTRAINED = -1L;
+    const unsigned long int DONE = 1UL;
+    // Explanation of the following vectors:
+    // origGradGuide is a copy of the gradient guide with the values it held
+    //   upon entry into this function, possibly set by the profiler;
+    //   it does not change during the lifetime of this function call.
+    // tempGradGuide changes with each iteration; it suppresses all but one
+    //   gradient component calculation by PostLike.
+    // compositeGradGuide is an "informed" composite of origGradGuide and the
+    //   constraints; it doesn't change during the lifetime of this function call.
+    // currentUnconstrainedParam and whichparam serve merely to simplify the code,
+    //   so that the logic for unconstrained and constrained parameters can be
+    //   written once in a uniform manner.
+    vector<ParamStatus> origGradGuide(m_pPostLike->m_working_pstatusguides);
+    assert(origGradGuide.size() == m_nparam);
+    vector<ParamStatus> tempGradGuide(m_nparam, ParamStatus(pstat_constant));
+    vector<long int> compositeGradGuide(m_nparam, UNINITIALIZED);
+    vector<unsigned long int> currentUnconstrainedParam(1, 0UL);
+    vector<unsigned long int>  *pWhichparam = ¤tUnconstrainedParam;
+    double numNonfixedParams(0.0); // determines convergence criterion
+
+    // Set up the compositeGradGuide (see comment at its declaration).  We use it
+    // to efficiently reset the gradient guide over and over while we calculate
+    // derivatives for one independent parameter (or set of constrained
+    // parameters) at a time.
+
+    for (unsigned long int j = 0; j < m_nparam; j++)
+    {
+        if (UNINITIALIZED != compositeGradGuide[j])
+            continue; // we've already updated this element
+        if (!origGradGuide[j].Varies())
+            compositeGradGuide[j] = FIXED;
+        else if (!origGradGuide[j].Grouped())
+        {
+            numNonfixedParams += 1.0;
+            compositeGradGuide[j] = UNCONSTRAINED;
+        }
+        else // constrained
+        {
+            bool found_j = false;
+            unsigned long int k;
+            for (k = 0; k < m_constraints.size(); k++)
+            {
+                // Loop over the constraint vectors to find which parameters
+                // are constrained to equal one another--
+                // e.g., a vector in position 0 containing 4 and 6,
+                // followed by a vector in position 1 containing 5 and 9,
+                // means that parameter 4 is constrained to equal parameter 6
+                // and parameter 5 is constrained to equal parameter 9.
+                // (m_constraints[0] holds 4, 6; m_constraints[1] holds 5, 9.)
+                // In this case, if "F" means fixed and "U" means "unconstrained,"
+                // then our compositeGradGuide vector might end up looking
+                // like this:  U, U, U, F, 0, 1, 0, F, U, 1, U, F.
+                // (This could be a 3-population run in which M12 = M21,
+                // M13 = M31, and Mii = 0 for i = 1, 2, 3.)
+                // Each of the constraint vectors has been sorted
+                // in ascending order.  Also, no parameter index appears twice.
+                if (j == m_constraints[k][0])
+                {
+                    compositeGradGuide[j] = k;
+                    for (unsigned long int m = 1; m < m_constraints[k].size(); m++)
+                        compositeGradGuide[m_constraints[k][m]] = k;
+                    found_j = true;
+                    break; // found it, look no further; retain "k"
+                }
+            }
+            if (!found_j)
+            {
+                const ParamVector paramvec(true);  // read-only copy
+                string paramname = paramvec[j].GetName();
+                string msg = "Error:  Thee maximizer received a gradient guide ";
+                msg += "indicating that " + paramname + " should be ";
+                msg += "constrained, but this parameter was not found in the ";
+                msg += "maximizer\'s lookup table of constraints.  This is ";
+                msg += "a bug in LAMARC you shouldn't be able to reach; sorry.";
+                throw implementation_error(msg);
+            }
+            else
+                numNonfixedParams += 1.0 * m_constraints[k].size();
+        }
+    } // end of loop to set up compositeGradGuide
+
+    double gradient_i(0.0); // ith component of the gradient
+    vector<unsigned long int> grad_calculated(m_nparam, 0UL); // "to-do list"
+    const double GRAD_COMPONENT_EPSILON = NORM_EPSILON /
+        sqrt(numNonfixedParams); // convergence criterion for each component
+    // Note: numNonfixedParams > 0, else we would have calculated a gradient
+    // vector that's identically 0 at the top of this function, and returned.
+
+#ifdef TEST
+    cerr << "GRAD_COMPONENT_EPSILON = " << GRAD_COMPONENT_EPSILON << "." << endl;
+#endif // TEST
+
+    i = 0;
+
+    long int previousComponentSearched = FLAGLONG;
+    long int numConsecutiveTimesThisComponentHasBeenSearched = 0;
+    const long int maxNumConsecutiveComponentSearchesAllowed = 4;
+
+    while (normg > NORM_EPSILON && nIter < 300)
+    {
+        if (m_constrained && 0 == i)
+            fill(grad_calculated.begin(), grad_calculated.end(), 0UL);
+        // new cycle, so reset the "checklist" of what's been done
+
+        if (FIXED == compositeGradGuide[i]                      // fixed; continue
+            || fabs(m_gradient[i]) <= GRAD_COMPONENT_EPSILON    //  flat; continue
+            || grad_calculated[i] == DONE) // already done in this cycle; continue
+        {
+            i++;
+            if (i == m_nparam)
+                i = 0; // loop back to the beginning
+            continue;
+        }
+
+        if (UNCONSTRAINED == compositeGradGuide[i])
+        {
+            currentUnconstrainedParam[0] = i;
+            tempGradGuide[i] = ParamStatus(pstat_unconstrained);
+            pWhichparam = ¤tUnconstrainedParam;
+            // For example, if i == 2, and we use "0" to denote pstat_constant
+            // and "U" to denote pstat_unconstrained, then tempGradGuide
+            // will now look like this:  0 0 U 0 0 0 0 0 ...
+            // and whichparam will look like this:  2.
+        }
+
+        else // constrained--we already determined that "i" is not fixed
+        {
+            vector<unsigned long int>::const_iterator it;
+            for (it = m_constraints[compositeGradGuide[i]].begin();
+                 it != m_constraints[compositeGradGuide[i]].end(); it++)
+            {
+                tempGradGuide[*it] = ParamStatus(pstat_identical);
+                grad_calculated[*it] = DONE; // mark each one as "done"
+            }
+            pWhichparam = &(m_constraints[compositeGradGuide[i]]);
+            // For example, if i == 4, and parameters 4 and 6 are constrained
+            // to be equal, then if we use "0" to denote pstat_constant
+            // and "C" to denote pstat_constrained, then tempGradGuide
+            // will now look like this:  0 0 0 0 C 0 C 0 0 0 ...
+            // and whichparam will look like this:  4 6.
+        }
+
+        // Are we "stuck?"
+        if (static_cast<long int>(i) != previousComponentSearched)
+            numConsecutiveTimesThisComponentHasBeenSearched = 0;
+        else
+            if (numConsecutiveTimesThisComponentHasBeenSearched > maxNumConsecutiveComponentSearchesAllowed)
+                break; // maximizer warning will ensue
+
+        vector<unsigned long int>::const_iterator it;
+        // Temporarily restrict gradient calculations to this one component.
+        SetGradGuide(tempGradGuide);
+
+        // Find the local maximum along this direction.
+        if (!BracketTheMaximumAndFindIt(pWhichparam, GRAD_COMPONENT_EPSILON,
+                                        oldlike, newlike, message))
+        {
+            // Failure.  Error message already set.
+            if (string::npos != message.find(maxstr::MAX_NO_UPPER_BOUND_0) && m_nparam - 1 == i)
+            {
+                if (m_dataHasGamma && m_newparam[i] >= m_maxAllowedValueForAlpha)
+                {
+                    SetGradGuide(origGradGuide); // restore to original values
+                    throw insufficient_variability_over_regions_error(m_newparam[i],
+                                                                      m_maxAllowedValueForAlpha);
+                }
+            }
+
+            SetGradGuide(origGradGuide); // restore to its original values
+            lnL = newlike; // -DBL_BIG, or something else for "stairway to heaven" surface
+            return lnL > -DBL_BIG ? true : false; // per lpsmith request
+        }
+
+        // Update our variables.
+        for (it = pWhichparam->begin(); it != pWhichparam->end(); it++)
+        {
+            m_param[*it] = m_newparam[*it];
+            m_lparam[*it] = m_newlparam[*it];
+        }
+        oldlike = newlike;
+
+        // Calculate the full gradient to see whether we've converged upon a flat
+        // peak.  Recall that adjusting one component can affect another gradient
+        // component.  Since we've just calculated the ith gradient component, we
+        // avoid needlessly recalculating it here.  "i" could be a single,
+        // unconstrained parameter, or a set of parameters constrained to
+        // equal each other.
+        SetGradGuide(origGradGuide); // restore to get full gradient
+        gradient_i = m_gradient[i]; // store what we've just calculated
+        // Suppress recalculation of "i" by DCalculate.
+        for (it = pWhichparam->begin(); it != pWhichparam->end(); it++)
+            m_pPostLike->m_working_pstatusguides[*it] = ParamStatus(pstat_constant);
+        DCalculate(m_param, m_gradient); // full gradient except "i"
+        for (it = pWhichparam->begin(); it != pWhichparam->end(); it++)
+        {
+            m_gradient[*it] = gradient_i; // restore component "i"
+            // to get the full gradient
+            // While we're at it, reset the one-at-a-time gradient guide
+            // to "all constant" in preparation for the next iteration.
+            // That is, undo the last operation we made onto tempGradGuide.
+            tempGradGuide[*it] = ParamStatus(pstat_constant);
+        }
+        old_normg = normg;
+        normg = Norm(m_gradient);
+
+        // Increment i to the next component.
+        previousComponentSearched = i; // store this, to determine if we're "stuck"
+        numConsecutiveTimesThisComponentHasBeenSearched++;
+        i++;
+        if (i == m_nparam)
+        {
+            if (m_dataHasGamma && m_param[i-1] > m_maxAllowedValueForAlpha)
+            {
+                SetGradGuide(origGradGuide); // restore to original values
+                throw insufficient_variability_over_regions_error(m_param[i-1],
+                                                                  m_maxAllowedValueForAlpha);
+            }
+            i = 0; // loop back to the beginning of the vector
+        }
+
+        nIter++;
+#ifdef TEST
+        cerr << "Iter " << nIter << ", |g| = " << normg << ", lnL = " << oldlike
+             << ", nFunc = " << nFunc << ", nDFunc = " << nDFunc << endl
+             << "     gradient = (" << m_gradient[0];
+        for (unsigned long int k = 1; k < m_nparam; k++)
+            cerr << ", " << m_gradient[k];
+        cerr << ")," << endl << "     params = (" << m_param[0];
+        for (unsigned long int k = 1; k < m_nparam; k++)
+            cerr << ", " << m_param[k];
+        cerr << ")." << endl;
+#endif
+
+    }
+
+#ifdef TEST
+    cerr << "Finished; |g| = " << normg << " and lnL = " << oldlike
+         << ", after " << nIter << " iterations, " << nFunc
+         << " lnL function evaluations, and " << nDFunc
+         << " derivative evaluations." << endl << endl;
+#endif
+
+    if (normg > NORM_EPSILON)
+    {
+        string msg = maxstr::MAX_NO_CONVERGENCE_0
+            + maxstr::MAX_NO_CONVERGENCE_1 + ToString(nIter)
+            + maxstr::MAX_NO_CONVERGENCE_2
+            + maxstr::MAX_NO_CONVERGENCE_3  + Pretty(normg)
+            + maxstr::MAX_NO_CONVERGENCE_4;
+        message = msg;
+    }
+
+    SetGradGuide(origGradGuide); // restore to original values
+    m_lastnormg = normg;
+    lnL = oldlike;
+    return true;
+} // CalculateSteepest
+
+//------------------------------------------------------------------------------------
+// Attempt to bracket the maximum for one component alone, then search the
+// bracketed region for this maximum.  "whichparam" is either a one-element
+// vector holding the parameter number of an unconstrained parameter, or a
+// vector holding the parameter numbers of a set of parameters that are
+// constrained to equal one another.  Note that in the latter case,
+// m_gradient[2] == m_gradient[1] == m_gradient[0]
+// and likewise for vectors m_newparam and m_param.
+
+bool Maximizer::BracketTheMaximumAndFindIt(const vector<unsigned long int> * pWhichparam,
+                                           const double epsilon, double oldlike,
+                                           double & newlike, string & message)
+{
+    double lambda(1.0), orig_gradient_i(0.0);
+    vector<unsigned long int>::const_iterator it;
+    bool calc_succeeded(true);
+    const unsigned long int i = (*pWhichparam)[0]; // This is either the index
+    // of an unconstrained parameter,
+    // or the index of the first of a
+    // set of parameters constrained
+    // to equal one another.
+
+    SetParam1d(lambda * m_gradient[i], pWhichparam);
+    orig_gradient_i = m_gradient[i]; // "ith" component of gradient
+    calc_succeeded = m_pPostLike->Calculate(m_newparam, m_newlparam, newlike);
+#ifdef TEST
+    nFunc++;
+#endif // TEST
+    if (calc_succeeded)
+    {
+        if (newlike >= oldlike)
+        {
+            // We took the full Newton step (lambda == 1) along the direction of
+            // increase, and we found a higher lnL.  Does the slope here point to
+            // a yet-higher lnL farther along this direction, or have we now
+            // bracketed the maximum along the direction of this component?
+            DCalculate(m_newparam, m_gradient); // grad guide avoids
+            // unnecessary calculations
+#ifdef TEST
+            nDFunc++;
+#endif
+            if (SlopesHaveSameSign(orig_gradient_i, m_gradient[i], epsilon))
+            {
+                // m_gradient[i] did not change sign.  Hence a maximum lies ahead
+                // in this direction.  Probe artificially large steps of
+                // lambda = 10, 100, etc., until we bracket the maximum.
+                bool slopes_have_same_sign = true;
+                unsigned long int numLongPushes = 0;
+
+                while (numLongPushes < MAX_NUM_LONG_PUSHES &&
+                       calc_succeeded &&
+                       newlike >= oldlike &&
+                       slopes_have_same_sign)
+                {
+                    // Accept this step, since it has a higher lnL,
+                    // and a yet-higher lnL lies ahead of it.
+                    for (it = pWhichparam->begin(); it != pWhichparam->end(); it++)
+                    {
+                        m_param[*it] = m_newparam[*it];
+                        m_lparam[*it] = m_newlparam[*it];
+                    }
+                    oldlike = newlike;
+                    // Do NOT update orig_gradient_i.  Need consistency.
+
+                    // Take a new step along this direction.
+                    if (m_isLinearParam[i])
+                    {
+                        lambda *= 10.0;
+                        if (!SetParam1d(lambda*orig_gradient_i, pWhichparam))
+                            numLongPushes = MAX_NUM_LONG_PUSHES; // parameter boundary reached
+                    }
+                    else // Multiply the param by 10 or 0.1.
+                        if (!SetParam1d(LOG10*sign(orig_gradient_i), pWhichparam))
+                            numLongPushes = MAX_NUM_LONG_PUSHES; // parameter boundary reached
+
+                    numLongPushes++; // count the number of these steps
+
+                    calc_succeeded = m_pPostLike->Calculate(m_newparam, m_newlparam, newlike);
+#ifdef TEST
+                    nFunc++;
+#endif // TEST
+                    if (calc_succeeded)
+                    {
+                        if (newlike >= oldlike)
+                        {
+                            DCalculate(m_newparam, m_gradient); // one-dimensional
+#ifdef TEST
+                            nDFunc++;
+#endif
+                            slopes_have_same_sign =
+                                SlopesHaveSameSign(orig_gradient_i, m_gradient[i],
+                                                   epsilon);
+                        }
+                        // else we bracketed the maximum by finding a point
+                        // with a worse lnL
+                    }
+                    // else we bracketed the maximum by finding a point where
+                    // the surface is undefined (a flat "plane" at -infinity)
+                }
+
+                // Did we bracket the maximum,
+                // or give up due to iterating suspiciously many times?
+                if (numLongPushes >= MAX_NUM_LONG_PUSHES
+                    && calc_succeeded && newlike >= oldlike
+                    && slopes_have_same_sign)
+                {
+                    const ParamVector paramvec(true);  // read-only copy
+                    string paramname = paramvec[i].GetName();
+                    string msg = maxstr::MAX_NO_UPPER_BOUND_0
+                        + paramname + maxstr::MAX_NO_UPPER_BOUND_1
+                        + Pretty(m_newparam[i]) + maxstr::MAX_NO_UPPER_BOUND_2;
+                    message = msg;
+                    //newlike = +DBL_BIG; // erynes changed 17-Mar-06 per lpsmith request
+                    return false;
+                }
+                // Else we bracketed the maximum, after making "numLongPushes"
+                // long pushes along the current direction.
+            }
+            // Else if lambda == 1, we bracketed the maximum by taking a step of
+            // lambda == 1 along this direction and detecting a sign-flip in the
+            // slope there, or we happened to land on the exact maximum with this
+            // step.  Otherwise, we bracketed the maximum by reaching a point
+            // where (1) lnL is undefined, (2) lnL has been observed to decrease,
+            // or (3) the slope has flipped sign.  In cases (1) and (2), we don't
+            // know the slope at this new point, and we don't need to; for all 3
+            // numbered cases we know we need to revserse direction to find the
+            // maximum.  m_param and oldlike correspond to the "original" edge of
+            // the bracket, and m_newparam and newlike correspond to the "current"
+            // edge of the bracket.
+        }
+        // Else we bracketed the maximum, because taking a step of lambda = 1
+        // along this direction led us to a lower lnL.  We don't know the slope
+        // at this point, and we don't need to; we know we need to reverse
+        // direction to find the maximum.
+    }
+    // Else we bracketed the maximum, because taking a step of lambda = 1
+    // along this direction led us to a region in which the parameters are
+    // infinitely unlikely to reproduce any of the genealogies.
+
+    // At this point, we've converged for this component (unlikely), or we've
+    // bracketed a local maximum for the component under consideration.  Now we'll
+    // find the local maximum within this bracketed region.
+
+    // Note to debuggers/code-reviewers:
+    // Because we allow for the existence of multiple local extrema within this
+    // bracketed region, there are cases in which variable m_gradient[i] may
+    // happen to be out-of-sync and hence meaningless at this point in program
+    // execution.  What's certain is that orig_gradient_i and oldlike correspond
+    // to the "far" point of the bracket (the point that's "behind" us).  The
+    // point upon which we're sitting corresponds to either
+    // (!calc_succeeded || newlike < oldlike), or
+    // (calc_succeeded && newlike >= oldlike && !slopes_have_same_sign).
+    // In all cases, our next step will be "backwards" toward the point to which
+    // orig_gradient_i corresponds.  And once we reach a point for which
+    // newlike >= oldlike, the gradient will be valid, and it will point us
+    // toward a maximum.
+    //
+    // Note that in the case of calc_succeeded == false, we can theoretically
+    // expect either of the following cases for the lnL curve:
+    //
+    //
+    //     __   |XXXXXXXXXXXXX                |XXXXXXXXXXXXX
+    //    /  \  |XXXXXXXXXXXXX      or       /|XXXXXXXXXXXXX
+    //   /    \ |X undefined X              / |X undefined X
+    //  /      \|XXXXXXXXXXXXX             /  |XXXXXXXXXXXXX
+    //
+    //
+    // In the first case, the curve has a standard peak.  In the second case,
+    // the "peak" is the highest point outside the undefined region.
+
+    // Now we search the bracket for the maximum.  We examine the midpoint of the
+    // bracket, reduce the bracket length by 1/2, examine the midpoint of that,
+    // etc., until we find the maximum.  (Note:  If our bracket values are, say,
+    // 0 and 100, and the maximum is at 98, it would take 6 iterations to reach
+    // 98.4475.  Just an example.)
+
+    // m_gradient holds the gradient at the better point, which
+    // could be m_newparam (newlike >= oldlike) or m_param (newlike < oldlike).
+
+    // In the code below, we occasionally swap m_param and m_newparam,
+    // so that m_newparam (at least momentarily) always holds the point
+    // with the higher log-likelihood.
+
+    double bracket_length = fabs(m_newparam[i] - m_param[i]);
+
+#ifdef TEST
+    cerr << "     i = " << i << ", initial bracket length = "
+         << bracket_length << ", from p = " << min(m_param[i], m_newparam[i])
+         << " to " << max(m_param[i], m_newparam[i]) << endl;
+#endif // TEST
+
+    // This variable is explained in the comment paragraph below.
+    const double WIGGLE_CRITERION(5.0e-06);
+
+    // The convergence criterion for the bracket search is a bit tricky.
+    // The main idea is:  "While we haven't found a flat slope, keep going."
+    // This is the main loop condition, the comparison between the gradient
+    // and epsilon.  Because we always choose the point with the higher lnL,
+    // and from there step only to a point with a yet-higher lnL, we never
+    // risk interpreting a local minimum as a local maximum (if it's even
+    // possible for a local minimum to occur).  Also, note that this enables
+    // us to perform the minimum possible number of derivative evaluations
+    // while we search the bracket for the maximum.
+    // We employ a hard cut-off to our search, to ensure we never go into an
+    // infinite loop if the surface is exceptionally problematic.
+
+    // We also attempt to allow for the rare phenomenon of "wiggles," i.e.,
+    // anomalies with some trees that, collectively, yield a likelihood surface
+    // that has a reasonably well-defined maximum, but has a jagged contour
+    // at the microscopic level.  (Imagine tracing the fuzz on the surface of a
+    // peach).  We can often(?) return a reasonably accurate MLE in this case,
+    // even though the surface is very jittery in the lower decimal places.
+    // The "wiggle-handling code" precedes the bracket search, because most
+    // surfaces don't have wiggles, and hence incorporating this criterion into
+    // the bracket-search loop condition would almost always terminate the loop
+    // before reaching optimal precision on the MLEs.  The placement of this
+    // wiggle-handling code means that, yes, some full bracket searches are
+    // performed, but eventually we reach a state where the bracket we
+    // calculate is merely the tiny distance between two nearby wiggles,
+    // and this is where the wiggle-handling code kicks in and claims
+    // that we've converged to the MLE.
+
+    if (bracket_length/fabs(m_param[i]) < WIGGLE_CRITERION &&
+        fabs((newlike - oldlike)/oldlike) < WIGGLE_CRITERION)
+        // Don't bother searching this tiny bracket.  Force an immediate return.
+        m_gradient[i] = 0.0;
+
+    // Search the bracket, halving its size with each search.
+
+    while (fabs(m_gradient[i]) > epsilon &&
+           bracket_length > fabs(m_newparam[i])*1.0e-10)
+    {
+        if (fabs(m_newparam[i])*1.0e-10 < 100.0/DBL_BIG)
+            break; // hard stop condition
+
+        if (newlike < oldlike)
+        {
+            // Redefine m_newparam, etc., to refer to the point with the higher lnL.
+            m_param.swap(m_newparam);
+            m_lparam.swap(m_newlparam);
+            swap(oldlike, newlike);
+        }
+
+        // Take a step, of length half of the bracket size.
+        // Apply it to multiple parameters if any are contrained to be equal.
+
+        // Pre-compute two quantities, for speed.
+        double step = 0.5 * sign(m_gradient[i]) * bracket_length;
+        double lognewparam = log(m_newparam[i] + step); // it's safe to log() within the bracket
+
+        oldlike = newlike; // Store the lnL before we take a step.
+
+        for (it = pWhichparam->begin(); it != pWhichparam->end(); it++)
+        {
+            // Store the better point before we take a step away from it.
+            m_param[*it] = m_newparam[*it];
+            m_lparam[*it] = m_newlparam[*it];
+
+            m_newparam[*it] += step;
+            if (m_isLinearParam[*it]) // they're all log or all linear
+                m_newlparam[*it] = m_newparam[*it];
+            else
+                m_newlparam[*it] = lognewparam;
+        }
+
+        // Update the bracket size.
+        bracket_length = fabs(m_newparam[i] - m_param[i]);
+
+        calc_succeeded = m_pPostLike->Calculate(m_newparam, m_newlparam, newlike);
+#ifdef TEST
+        nFunc++;
+#endif // TEST
+
+        if (newlike >= oldlike)
+        {
+            DCalculate(m_newparam, m_gradient); // one-dimensional
+#ifdef TEST
+            nDFunc++;
+#endif
+        }
+    }
+
+    // This should never be true at this point in the code.
+    if (!calc_succeeded)
+    {
+        const ParamVector paramvec(true);  // read-only copy
+        string paramname = paramvec[i].GetName();
+        string msg = maxstr::MAX_UNDEFINED_BORDER_0 + paramname
+            + maxstr::MAX_UNDEFINED_BORDER_1 + Pretty(m_param[i])
+            + maxstr::MAX_UNDEFINED_BORDER_2;
+        message = msg;
+#ifdef TEST
+        cerr << "The parameter vector where lnL = -DBL_BIG is p = (" << m_newparam[0];
+        unsigned long int k;
+        for (k = 1; k < m_nparam; k++)
+            cerr << ", " << m_newparam[k];
+        cerr << ")." << endl;
+#endif // TEST
+        //      assert(0); // for debugging
+        newlike = -DBL_BIG; // for consistency
+        return false;
+    }
+
+    // Print a debug message if our bracket search failed egregiously.
+    if (fabs(m_gradient[i]) > 100.0 * epsilon)
+    {
+        const ParamVector paramvec(true);  // read-only copy
+        string paramname = paramvec[i].GetName();
+        string msg = maxstr::MAX_FAILED_BRACKET_0 + paramname
+            + maxstr::MAX_FAILED_BRACKET_1
+            + (newlike > oldlike ? Pretty(m_newparam[i]) : Pretty(m_param[i]))
+            + maxstr::MAX_FAILED_BRACKET_2 + Pretty(m_gradient[i]) + "."
+            + maxstr::MAX_FAILED_BRACKET_3;
+        message = msg;
+#ifdef TEST
+        cerr << "The full parameter vector is p = ("
+             << (newlike > oldlike ? m_newparam[0] : m_param[0]);
+        unsigned long int k;
+        for (k = 1; k < m_nparam; k++)
+            cerr << ", " << (newlike > oldlike ? m_newparam[k] : m_param[k]);
+        cerr << "), where lnL = "
+             << (newlike > oldlike ? newlike : oldlike) << "." << endl;
+#endif // TEST
+    }
+
+    return true;
+}
+
+//------------------------------------------------------------------------------------
+// Try to find the maximum by the method of Broyden/Fletcher/Goldfarb/Shanno.
+// Does not work well in the presence of growth; try CalculateSteepest instead.
+
+bool Maximizer::CalculateBroyden(unsigned long int maxtrials, double& lnL, string& message)
+{
+    // The oldlike will always start at 0, but we must still call Calculate()
+    // because it precomputes an essential term used by DCalculate().
+    double oldlike = 0.0;
+    if (!m_pPostLike->Calculate(m_param, m_lparam, oldlike))
+    {
+        lnL = -DBL_BIG;
+        return false; // unrecoverable; gradient can't be calculated from m_param.
+    }
+    DCalculate(m_param, m_gradient); // result into m_gradient
+
+    double lambda(1.0), newlike(0.0);
+    m_oldlparam = m_lparam;
+    m_direction = m_gradient; // Point to maximum.
+    m_oldgradient = m_gradient;
+    double normg = Norm(m_gradient); // length of the gradient vector
+    double normg20 = 0; // length the gradient vector had in the 20th,
+    // 40th, 60th, etc. iteration (used for convergence)
+
+    ResetSecond(); // initialize the approx. inverse second derivative matrix
+#ifdef TEST
+    nMatrixResets--; // discount this first "reset"
+#endif // TEST
+    bool calc_succeeded = true;
+    unsigned long int count = 0; // number of iterations
+
+    while (normg > NORM_EPSILON && count < maxtrials)
+    {
+        double thisChange(DBL_MAX);
+        lambda = 2.0; // reset; will be reduced to 1 before lambda gets used
+        do {
+            // "Line search" -- find a higher lnL along this direction
+            lambda /= 2.0;    // set the step length
+            thisChange = SetParam(lambda, m_direction); // probe this step in this direction
+            calc_succeeded = m_pPostLike->Calculate (m_newparam, m_newlparam,
+                                                     newlike);
+        } while ((!calc_succeeded || newlike <= oldlike ||
+                  systemSpecificIsnan(newlike)) && lambda > LAMBDA_EPSILON
+                 && thisChange > LAMBDA_EPSILON
+            );
+
+        // the eventloop call used to be here when we were supporting
+        // Mac OS 9
+        // eventloop();
+
+        // Track each 20th normg. If we're creeping along with the tiniest of steps,
+        // reset the approx. inverse Hessian and continue along this direction.
+        if (count % 20 == 0)
+        {
+            if (fabs (normg - normg20) < NORM_EPSILON)
+            {
+                lambda = 0.0; // creeping; force a matrix reset below
+            }
+            normg20 = normg;
+        }
+
+        if (lambda <= LAMBDA_EPSILON || thisChange <= LAMBDA_EPSILON)
+        {
+            if (m_direction == m_gradient && lambda != 0.0)
+            {
+                // This is bad--it means there's a nonzero gradient (guaranteed ascent
+                // direction) at a certain point, but if we take an infinitesimally tiny
+                // step along this direction, we don't find a greater likelihood.
+                unsigned long int k;
+                string msg = maxstr::MAX_CLIFF_EDGE_0 + Pretty(oldlike)
+                    + maxstr::MAX_CLIFF_EDGE_1 + Pretty(m_param[0]);
+                for (k = 1; k < m_nparam; k++)
+                    msg += ", " + Pretty(m_param[k]);
+                msg += maxstr::MAX_CLIFF_EDGE_2;
+                message = msg;
+#ifdef TEST
+                cerr << "Maximizer:  direction = gradient = (" << m_gradient[0];
+                for (k = 1; k < m_nparam; k++)
+                    cerr << ", " << m_gradient[k];
+                cerr << "), |g| = " << Norm(m_gradient) << endl;
+#endif
+                lnL = -DBL_BIG;
+                return false;
+            }
+
+            // we're creeping along with the tiniest of steps,
+            // either in "line search" or in normg (lambda purposely set to 0 above)
+#ifdef TEST
+            nLambdaSmall++;
+#endif // TEST
+            ResetSecond();
+            m_direction = m_gradient; // guaranteed ascent direction
+            count++;
+            continue;
+        }
+
+        // found a point that's closer to the maximum
+
+        m_param = m_newparam;
+        m_lparam = m_newlparam;
+        oldlike = newlike;
+        DCalculate (m_param, m_gradient);
+        normg = Norm(m_gradient);
+        CalcDelta(m_lparam, m_oldlparam, m_paramdelta);
+        CalcDelta(m_gradient, m_oldgradient, m_gradientdelta);
+        CalcSecond();
+        CalcDirection();
+        copy (m_gradient.begin(), m_gradient.end(), m_oldgradient.begin());
+        copy (m_lparam.begin(), m_lparam.end(), m_oldlparam.begin());
+        count++;
+    }
+
+    m_lastnormg = normg;
+    lnL = oldlike;
+
+    if (normg > NORM_EPSILON)
+    {
+        string msg = maxstr::MAX_NO_CONVERGENCE_0;
+        if (count > 0)
+        {
+            msg += maxstr::MAX_NO_CONVERGENCE_1;
+            msg += ToString(count) + maxstr::MAX_NO_CONVERGENCE_2;
+        }
+        msg += maxstr::MAX_NO_CONVERGENCE_3 + Pretty(normg)
+            + maxstr::MAX_NO_CONVERGENCE_4;
+        message = msg;
+    }
+
+#ifdef TEST
+    cerr << "Finished, after " << count << " iterations of BFGS and "
+         << nMatrixResets << " matrix resets." << endl
+         << "|grad| = " << normg << ", grad = (" << m_gradient[0];
+    unsigned long int i;
+    for (i = 1; i < m_gradient.size(); i++)
+        cerr << ", " << m_gradient[i];
+    cerr << ")" << endl << "direction = (" << m_direction[0];
+    for (i = 1; i < m_direction.size(); i++)
+        cerr << ", " << m_direction[i];
+    cerr << ")" << endl << "params = (" << m_param[0];
+    for (i = 1; i < m_param.size(); i++)
+        cerr << ", " << m_param[i];
+    cerr << ")" << endl;
+    if (count > 21)
+        cerr << "normg20 = " << normg20 << " and ";
+    cerr << "lambda = " << lambda << "; lnL = " << lnL << endl;
+    if (nMatrixResets > 2)
+        cerr << endl << nLambdaSmall << " matrix resets were due to halving back too far"
+             << ", and " << n_dtg_zero << " resets were due to dtg == 0 (rounding error)"
+             << " in BFGS.";
+    cerr << endl << endl;
+#endif // TEST
+
+    return true;
+} // Maximizer::CalculateBroyden
+
+//------------------------------------------------------------------------------------
+//CalculateByParts is meant to calculate the maximum of the function by
+// varying the gradient guides and using repeated calls to CalculateBroyden.
+// It was created when we saw that profiling could come up with better
+// likelihoods than the so-called 'mles'.
+
+bool Maximizer::CalculateByParts(double& lnL, string& message)
+{
+    cerr << "Initial params:  " << m_param[0];
+    for (unsigned long int i = 1; i < m_param.size(); ++i)
+        cerr << ", " << m_param[i];
+    cerr << endl;
+    //First, just run BFGS, but not as long as we would normally.
+    double newlike = 0.0, oldlike = 0.0;
+    bool calc_succeeded = CalculateBroyden(NTRIALS/10, newlike, message);
+    if (!calc_succeeded)
+        cerr << "Maximizer::CalculateByParts -- infinitely unlikely initial param vector.";
+    else
+    {
+        oldlike = newlike*2; //Just to get a clearly-different likelihood.
+        cerr << "First newlike = " << newlike << endl;
+    }
+    cerr << "Params = (" << m_param[0];
+    for (unsigned long int i = 1; i < m_param.size(); ++i)
+        cerr << ", " << m_param[i];
+    cerr << ")" << endl;
+    if (!calc_succeeded)
+        return false; // unrecoverable; can't calculate gradient
+
+    //Now, go through and hold each parameter constant while maximizing
+    // everything else.
+    //
+    //If this approach doesn't work, we might want to hold each parameter
+    // *type* constant (theta, mig, grow, etc.) instead.  Or even just thetas
+    // and growth, since those seem to be the problem pairs.
+
+    vector<ParamStatus> gradGuide(m_pPostLike->m_working_pstatusguides);
+
+    long int ncount = 0;
+    while (ncount++ < 20)
+    {
+        oldlike = newlike;
+        for (unsigned long int paramnum = 0; paramnum < gradGuide.size(); ++paramnum)
+        {
+            ParamStatus mystatus = gradGuide[paramnum];
+            if (mystatus.Inferred())
+            {
+                ProfileGuideFix(paramnum);
+                // Try varying all but one of the parameters, as an experiment.
+                calc_succeeded = CalculateBroyden(NTRIALS/20, newlike, message);
+                // We're not necessarily interested in each likelihood;
+                // we're interested in where the params move to.
+                if (!calc_succeeded)
+                {
+                    // This is an unrecoverable error; we can no longer
+                    // calculate the gradient, because Prob(G|P) = 0 for all G.
+                    break;
+                }
+                ProfileGuideRestore(paramnum);
+            }
+        }
+
+        if (!calc_succeeded)
+        {
+#ifdef TEST
+            cerr << "Maximizer::CalculateByParts -- moved to an infinitely unlikely param vector."
+                 << endl << "Param vector = (" << m_param[0];
+            for (unsigned long int i = 1; i < m_param.size(); i++)
+                cerr << ", " << m_param[i];
+            cerr << ")" << endl;
+#endif // TEST
+            return false;
+        }
+
+        //Finally, do one more round of BFGS, with nothing fixed.
+        calc_succeeded = CalculateBroyden(NTRIALS/20, newlike, message);
+
+        //Exit if the likelihood has not changed that much.
+        if ( fabs(newlike - oldlike) < fabs(newlike/100) || !calc_succeeded)
+        {
+            cerr << "Exiting CalculateByParts after " << ncount
+                 << " loops with newlike = " << newlike << "." << endl
+                 << "Params = " << m_param[0];
+            for (unsigned long int i = 1; i < m_param.size(); ++i)
+                cerr << ", " << m_param[i];
+            cerr << endl;
+            if (!calc_succeeded)
+            {
+#ifdef TEST
+                cerr << "(Parameter vector is infinitely unlikely "
+                     << "to reproduce the genealogies.)" << endl;
+#endif // TEST
+                return false;
+            }
+            lnL = newlike;
+            return true;
+        }
+        cerr << "Newlike = " << newlike << endl << "Params = " << m_param[0];
+        for (unsigned long int i = 1; i < m_param.size(); ++i)
+            cerr << ", " << m_param[i];
+        cerr << endl;
+    }
+    cerr << "Exiting CalculateByParts after " << ncount
+         << " loops; the maximum allowed." << endl;
+    lnL = newlike;
+    return true;
+}
+
+//------------------------------------------------------------------------------------
+// Maximizer::SetConstraints()
+// Allows the user to constrain some parameters to equal one another.
+// For example, she might have six migration rate matrix elements, and wish to
+// constrain M12 = M21 = A and M13 = M31 = B.  In this case, she would be
+// probing the maximum-likelihood estimates for four migration rates:
+// M23, M32, A, and B (in addition to the populations' three thetas).
+// The maximizer is supposed to have no knowledge of forces, that is, whether
+// the parameter vector it receives contains thetas and recomination rates
+// or migration rates or anything else.  And rightly so.  Hence, here it simply
+// receives a vector of vectors specifying which parameters should be
+// constrained to equal one another.  If we wish to forbid the user from
+// constraining a recomination rate to equal a migration rate, or equivalent,
+// then that must be done at a higher level in the code (e.g.,
+// in the user interface).
+// At the level of the maximizer, we must forbid one thing:  A parameter
+// that we manipulate via its logarithmic value cannot be constrained to equal
+// a parameter that we vary linearly to allow the latter to become negative.
+// At present, this means growth parameters can be constrained to equal
+// one another in any combination, but cannot be constrained to equal
+// thetas, disease rates, etc. (this would be nonsensical anyway).
+// This function returns false if the caller attempts such a constraint.
+// It also returns false if the caller sends a parameter index that is
+// greater than the largest parameter index previously known by the maximizer,
+// or if a parameter index appears in multiple places in the input vector.
+// Otherwise, it stores the constraints and returns true.
+// If false is returned, the maximizer's constraint parameters remain unchanged.
+//
+// If an empty vector is received, false is returned.
+//
+// Added by erynes, 23 August 2004.
+
+bool Maximizer::SetConstraints(const vector<vector<unsigned long int> > & constraints)
+{
+    if (constraints.empty())
+        return false;
+
+    vector<vector<unsigned long int> > oldconstraints = m_constraints; // save previous
+    m_constraints.clear(); // clear current so we can use push_back()
+    vector<vector<unsigned long int> >::const_iterator outer_it; // it. over vectors
+    vector<unsigned long int>::const_iterator inner_it; // it. over one vector
+    bool isLinearParam; // whether a vec's first param is treated linearly
+    vector<unsigned long int> values_seen; // All values seen so far.
+    // Shouldn't have duplicates.
+
+    m_constraintratio.clear();
+
+    for (outer_it = constraints.begin(); outer_it != constraints.end();
+         outer_it++)
+    {
+        // First, validate the contents of each vector.
+
+        if ((*outer_it).size() < 2)
+        {
+            // Empty constraint vector, or missing constraints
+            // (constraining a lone parameter to equal itself
+            // is meaningless)
+          ExitFailure:
+            m_constraints.clear();
+            m_constraints = oldconstraints;
+            m_constrained = false;
+            return false;
+        }
+
+        isLinearParam = m_isLinearParam[(*outer_it)[0]];
+
+        for (inner_it = (*outer_it).begin();
+             inner_it != (*outer_it).end();
+             ++inner_it)
+        {
+            if (*inner_it >= m_nparam)
+            {
+                // Attempt to constrain a nonexistent parameter.
+                goto ExitFailure;
+            }
+            if (isLinearParam && !m_isLinearParam[*inner_it])
+            {
+                // Attempt to constrain a logarithmic parameter
+                // to a linear parameter.
+                goto ExitFailure;
+            }
+            if (!isLinearParam && m_isLinearParam[*inner_it])
+            {
+                // Attempt to constrain a linear parameter
+                // to a logarithmic parameter.
+                goto ExitFailure;
+            }
+            if (find(values_seen.begin(), values_seen.end(), *inner_it)
+                != values_seen.end())
+            {
+                // A parameter identifier occurs more than once.
+                goto ExitFailure;
+            }
+            values_seen.push_back(*inner_it);
+        }
+
+        // Determined this vector is valid, so we store it for later use.
+        m_constraints.push_back(*outer_it);
+        // Sort the vector we just stored, to make maximization simpler.
+        sort(m_constraints[m_constraints.size() - 1].begin(),
+             m_constraints[m_constraints.size() - 1].end());
+
+        // handle multiplicative constraint
+        double ratio(0.0);  // assume no multiplicative constraint
+        // this group has multiplicative constraint
+        // JDEBUG we are EVIL kludging this at the moment...
+        if ( find(outer_it->begin(),outer_it->end(),0.0) != outer_it->end() )
+        {
+            ratio = 0.5/0.5;  // (1-pA)/pA, JDEBUG, hard set pA = 0.5;
+            // MDEBUG this needs to be replaced by ratio of the
+            // input Thetas for the selection case; we are not
+            // prepared (in phase I) to do anything more general yet.
+        }
+        m_constraintratio.push_back(ratio);
+    }
+
+    m_constrained = true;
+    return true;
+}
+
+//------------------------------------------------------------------------------------
+// Maximizer::DCalculate()
+// In the absence of constraints, simply returns PostLike::DCalculate().
+// In the presence of constraints,
+// it calculates the directional derivative along the direction x = y,
+// or y = z, or x = z and p = d = q, etc., at the n-dimensional point "param."
+// Stores the result in "gradient."
+// The unconstrained components are unaffected; they get the same values
+// they would in the absence of constraints.
+// The constrained components each get the value of the dot product of the
+// unconstrained gradient vector with the unit vector in the direction of the
+// constraint.  Since our constraints are of the form x = y, which makes a
+// 45-degree angle with the coordinate directions, these constraints are
+// equivalent to setting a group of constrained gradient components to
+// the average value of those components.
+// If we ever want to allow constraints of the form x = const * y,
+// that would be possible, but would require turning the averages into
+// weighted averages, with the weights coming from the cosines and sines
+// (or arccosines and arcsines) of the angle defined by the constant.
+//
+// Calls PostLike::DCalculate, and returns its return value.
+//
+// Added by erynes, 23 August 2004.
+
+bool Maximizer::DCalculate(const DoubleVec1d& param,
+                           DoubleVec1d& gradient)
+{
+    bool retval = m_pPostLike->DCalculate(param, gradient);
+    if (!m_constrained || !retval)
+        return retval;
+
+    DoubleVec1d::size_type group;
+    DoubleVec1d::size_type numElements;
+    vector<unsigned long>::iterator inner_it;
+    double avg_grad_component;
+
+    assert(m_constraints.size() == m_constraintratio.size());
+    for (group = 0; group < m_constraints.size(); ++group)
+    {
+        if (!(numElements = m_constraints[group].size()))
+            continue; // this vector is empty
+
+        if (m_constraintratio[group]) // do multiplicative constraint
+        { // we assume that there are exactly two elements here
+          // and that they are theta values
+            assert(numElements == 2);
+            double ratio(m_constraintratio[group]);
+            double& th0(gradient[m_constraints[group][0]]);
+            double& th1(gradient[m_constraints[group][1]]);
+            double mult(1.0/(1.0+ratio*ratio) * (th0+ratio*th1));
+            th0 = mult;
+            th1 = ratio * mult;
+        }
+        else                          // do the average
+        {
+            avg_grad_component = 0.0;   // reset for next average
+
+            for (inner_it = m_constraints[group].begin();
+                 inner_it != m_constraints[group].end();
+                 inner_it++)
+                avg_grad_component += gradient[*inner_it];
+
+            avg_grad_component /= numElements; // numElements > 0 verified above
+
+            for (inner_it = m_constraints[group].begin();
+                 inner_it != m_constraints[group].end();
+                 inner_it++)
+                gradient[*inner_it] = avg_grad_component;
+        }
+    }
+
+    return true;
+}
+
+//------------------------------------------------------------------------------------
+
+inline double sign(const double& x) { return x < 0.0 ? -1.0 : 1.0; };
+
+//------------------------------------------------------------------------------------
+// Set up the gradient guide for the given postlike object.
+// Does NOT store or update anything in the maximizer object.
+
+void Maximizer::GradientGuideSetup(const ParamVector &thisToDolist,
+                                   PostLike* pThisPostLike)
+{
+    // defines behavior in the gradient calculations
+    // Type              Label [this gets used in gradient()]
+    // c = constant   -> 0.
+    // * = maximize   -> 1.
+    ParamVector::const_iterator doListItem;
+    if (!pThisPostLike)
+    {
+        assert(0); // received a NULL postlike pointer
+        return;
+    }
+    pThisPostLike->m_working_pstatusguides.clear();
+    for (doListItem = thisToDolist.begin();
+         doListItem != thisToDolist.end(); ++doListItem)
+    {
+        pThisPostLike->m_default_pstatusguides.push_back(doListItem->GetStatus().Status());
+    }
+    pThisPostLike->m_working_pstatusguides =
+        pThisPostLike->m_default_pstatusguides;
+    //m_validguides might be useful at some point, but not right now.
+}
+
+//------------------------------------------------------------------------------------
+// Instruct DCalculate to treat this component as a constant.
+
+void Maximizer::ProfileGuideFix(long int guide)
+{
+    if (guide < 0 ||
+        guide >= static_cast<long int>(m_pPostLike->m_default_pstatusguides.size()))
+    {
+        string msg = "Internal error:  Maximizer::ProfileGuideFix() ";
+        msg += "received an invalid parameter index (" + ToString(guide);
+        msg += ").  Valid values for this run range from 0 to ";
+        msg += ToString(m_pPostLike->m_default_pstatusguides.size());
+        msg += ".";
+        throw implementation_error(msg);
+    }
+    unsigned long k(0);
+    vector<unsigned long>::const_iterator it;
+    bool found_guide = false;
+    string msg;
+    ParamStatus mystatus = m_pPostLike->m_default_pstatusguides[guide];
+    if (!mystatus.Inferred())
+    {
+        msg = "Warning:  Attempted to fix parameter " + Pretty(guide) + " in "
+            + "ProfileGuideFix(), but this parameter is of type " + ToString(mystatus.Status());
+        registry.GetRunReport().ReportDebug(msg);
+    }
+    if (mystatus.Grouped())
+    {
+        for (k = 0; k < m_constraints.size(); k++)
+        {
+            if (guide == static_cast<long>(m_constraints[k][0]))
+            {
+                found_guide = true;
+                for (it = m_constraints[k].begin();
+                     it != m_constraints[k].end();
+                     it++)
+                    m_pPostLike->m_working_pstatusguides[*it] = ParamStatus(pstat_constant);
+            }
+        }
+        if (!found_guide)
+        {
+            // "guide" not found as the zeroth element of a constraint vector
+            msg = "Warning!  ProfileGuideFix() was called on parameter "
+                + Pretty(guide) + ", whose default type is pstat_head, but "
+                + "this parameter was not found in the maximizer\'s lookup table, "
+                + "which is indexed by joint parameters.";
+            registry.GetRunReport().ReportDebug(msg);
+            assert(0);
+        }
+    }
+    else
+    {
+        m_pPostLike->m_working_pstatusguides[guide] = ParamStatus(pstat_constant);
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+void Maximizer::ProfileGuideFixAll()
+{
+    for (unsigned long int pnum = 0;
+         pnum < m_pPostLike->m_working_pstatusguides.size();
+         pnum++)
+    {
+        ParamStatus mystatus = m_pPostLike->m_working_pstatusguides[pnum];
+        if (mystatus.Varies()) m_pPostLike->m_working_pstatusguides[pnum] = ParamStatus(pstat_constant);
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+void Maximizer::ProfileGuideRestore()
+{
+    m_pPostLike->m_working_pstatusguides = m_pPostLike->m_default_pstatusguides;
+}
+
+//------------------------------------------------------------------------------------
+// Instruct DCalculate to calculate the gradient for this component.
+// Useful for "undoing" a call to ProfileGuideFix().
+// If "guide" refers to a constrained parameter, then the corresponding
+// constrained parameters also get fixed here.
+
+void Maximizer::ProfileGuideRestore(long int guide)
+{
+    if (guide < 0 ||
+        guide >= static_cast<long int>(m_pPostLike->m_default_pstatusguides.size()))
+    {
+        string msg = "Internal error:  Maximizer::ProfileGuideRestore() ";
+        msg += "received an invalid parameter index (" + ToString(guide);
+        msg += ").  Valid values for this run range from 0 to ";
+        msg += ToString(m_pPostLike->m_default_pstatusguides.size());
+        msg += ".";
+        throw implementation_error(msg);
+    }
+    unsigned long int k(0);
+    vector<unsigned long int>::const_iterator it;
+    bool found_guide = false;
+    string msg;
+    ParamStatus mystatus = m_pPostLike->m_default_pstatusguides[guide];
+    if (mystatus.Inferred() && mystatus.Grouped())
+    {
+        // MFIX need to handle multiplicative?
+        for (k = 0; k < m_constraints.size(); k++)
+        {
+            if (guide == static_cast<long int>(m_constraints[k][0]))
+            {
+                found_guide = true;
+                for (it = m_constraints[k].begin();
+                     it != m_constraints[k].end();
+                     it++)
+                    m_pPostLike->m_working_pstatusguides[*it] =
+                        m_pPostLike->m_default_pstatusguides[guide];
+            }
+        }
+        if (!found_guide)
+        {
+            // "guide" not found as the zeroth element of a constraint vector
+            msg = "Warning!  ProfileGuideRestore() was called on parameter "
+                + Pretty(guide) + ", whose default type is pstat_head, but "
+                + "this parameter was not found in the maximizer\'s lookup table, "
+                + "which is indexed by joint parameters.";
+            registry.GetRunReport().ReportDebug(msg);
+            throw implementation_error(msg);
+        }
+    }
+    else
+        if (mystatus.Inferred())
+        {
+            m_pPostLike->m_working_pstatusguides[guide] =
+                m_pPostLike->m_default_pstatusguides[guide];
+        }
+        else
+        {
+            msg = "Warning!  ProfileGuideRestore() may only be called on parameters ";
+            msg += "of type \"pstatus_unconstrained\" and \"pstatus_head.\"";
+            msg += "Parameter " + Pretty(guide) + " is of neither type, but it was ";
+            msg += "sent to ProfileGuideRestore.";
+            registry.GetRunReport().ReportDebug(msg);
+            throw implementation_error(msg);
+        }
+}
+
+//------------------------------------------------------------------------------------
+
+void Maximizer::AppendConstraintOnAlpha(ParamStatus alphaStatus)
+{
+    if (!m_dataHasGamma)
+    {
+        string msg = "Maximizer::AppendConstraintOnAlpha() was called, but a multi-region ";
+        msg += "estimate with gamma-distributed mutation rates was not being performed ";
+        msg += "when this function was called.";
+        throw implementation_error(msg);
+    }
+    if (m_constraintsVectorHasSlotForAlpha)
+    {
+        m_pPostLike->m_working_pstatusguides.pop_back();
+        m_pPostLike->m_default_pstatusguides.pop_back();
+    }
+    m_pPostLike->m_working_pstatusguides.push_back(alphaStatus);
+    m_pPostLike->m_default_pstatusguides.push_back(alphaStatus);
+    m_constraintsVectorHasSlotForAlpha = true;
+}
+
+//------------------------------------------------------------------------------------
+
+inline double tolerance(double number)
+{
+    if (0.0 == number)
+        return 0.0;
+
+    bool numIsNegative = (number < 0.0);
+    double delta = floor(log10(numIsNegative ? -1.0 * number : number));
+
+    if (numIsNegative)
+        delta += 1.0;
+
+    if (delta <= 6.0)
+    {
+        if (delta >= 0.0 && !(numIsNegative && delta == 0.0))
+            delta = 5.0 * pow(10.0, delta - 7.0);
+        else if (delta >= -4.0)
+            delta = (numIsNegative && delta != -4.0) ? 5.0e-06 : 5.0e-07;
+        else if (delta >= -99.0 && !(numIsNegative && delta == -99.0))
+            delta = 5.0 * pow(10.0, delta - 3.0);
+        else // delta <= -100.0
+            delta = 5.0 * pow(10.0, delta - 2.0);
+    }
+    else if (delta == 7.0)
+        delta = 0.5;
+    else if (delta <= 99.0 || (numIsNegative && delta == 100.0))
+        delta = 5.0 * pow(10.0, delta - 3.0);
+    else // delta >= 100.0
+        delta = 5.0 * pow(10.0, delta - 2.0);
+
+    return delta;
+}
+
+//____________________________________________________________________________________
diff --git a/src/postlike/maximizer.h b/src/postlike/maximizer.h
new file mode 100644
index 0000000..73a1c42
--- /dev/null
+++ b/src/postlike/maximizer.h
@@ -0,0 +1,200 @@
+// $Id: maximizer.h,v 1.36 2012/04/17 19:03:20 ewalkup Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+// Maximizer Class ----------------------------------------------
+//
+//   Maximizer for likelihood function (see PostLike::Calculate())
+//   using the Broyden-Fletcher-Shanno-Goldfarb method
+//   other methods (e.g. Newton-Raphson) can be easily incorporated
+//   by adding another Maximizer::Calculate()
+//
+//
+//   calculates the maximum of the likelihood function and returns
+//   the parameters as a vector  at the ML.
+//
+//   Call sequence and existence through run
+//      foreach(locus)
+//      {
+//         likelihood = SinglePostLike(forces)
+//         Maximizer(likelihood)
+//         foreach(replicate) [independent replicates, if any]
+//         {
+//             foreach(single_chains)
+//                Calculate(data) uses data and MLE
+//             foreach(long_chain)
+//                Calculate(data)
+//          }
+//         ~Maximizer()
+//         likelihood = ReplicatePostLike(forces)
+//         Maximizer(likelihood)
+//         ~Maximizer()
+//      }
+//      if(locus>1)
+//      {
+//         likelihood = RegionPostLike(forces)
+//         Maximizer(likelihood)
+//         ~Maximizer()
+//         likelihood = GammaRegionPostLike(forces)
+//         Maximizer(likelihood)
+//         ~Maximizer()
+//       }
+//   STILL TODO
+//     Line() is not yet implemented
+//
+
+#ifndef MAXIMIZER_H
+#define MAXIMIZER_H
+
+#include <algorithm>
+#include <cmath>
+#include <functional>
+#include <iostream>
+#include <numeric>
+#include <vector>
+
+#include "definitions.h"
+#include "vectorx.h"
+#include "likelihood.h"
+#include "runreport.h"
+#include "paramstat.h"
+
+using
+std::vector;
+
+// declarations --------------------------------------------
+
+class
+Maximizer
+{
+  public:
+    Maximizer (long int thisNparam);
+    ~Maximizer ();
+
+    void Initialize(long int thisNparam);
+    void   SetLikelihood (PostLike * thispostlike);
+    bool   Calculate (DoubleVec1d & thisparam, double& lnL, string& message);
+    PostLike *GetPostLike()
+    {
+        return m_pPostLike;             // Analyzer and others need access to this.
+    };
+    double GetLastNorm();                // Debugging function.
+    void   SetMLEs(DoubleVec1d newMLEs); // Need for sumfile reading.
+
+    // Interface for performing constrained maximization--
+    // e.g., maximization under the constraint that forward and backward
+    // migration rates are equal (M12 == M21).
+    bool SetConstraints(const vector<vector<unsigned long int> > & constraints);
+    void ClearConstraints(void) { m_constraints.clear(); };
+    void AppendConstraintOnAlpha(ParamStatus alphaStatus); // Append to the gradient guide.
+
+    // Gradient guide interface, moved here from the PostLike class,
+    // because it's better for the maximizer to control it.
+    // (The gradient guide still is, and will be, used by PostLike.)
+    void GradientGuideSetup (const ParamVector &thisToDolist, PostLike* pPL);
+    void ProfileGuideRestore();
+    void ProfileGuideFix(long int guide);
+    void ProfileGuideFixAll();
+    void ProfileGuideRestore(long int guide);
+    likelihoodtype GetPostlikeTag() { return m_pPostLike->GetTag(); };
+
+  private:
+    Maximizer ()
+    {
+    };
+    double m_lastnormg;
+
+  protected:
+    PostLike *m_pPostLike;        // pointer to PostLike object
+    unsigned long int m_nparam;   // number of parameters (m_param.size())
+    long int m_nloci;             // number of loci
+    long int m_nrep;              // number of replicates
+    DoubleVec1d  m_param;         // parameters
+    DoubleVec1d  m_lparam;        // log parameters
+    DoubleVec1d  m_oldlparam;     // old log parameters
+    DoubleVec1d  m_gradient;      // gradient
+    DoubleVec1d  m_oldgradient;   // old gradient
+    DoubleVec1d  m_paramdelta;    // m_newparam - m_oldparam
+    DoubleVec1d  m_gradientdelta; // m_gradient - m_oldgradient
+    DoubleVec1d  m_direction;     // direction
+    vector<DoubleVec1d> m_second; // approx second derivative
+
+    vector<unsigned long int> m_isLinearParam; // track which parameters get treated
+    // logarithmically and which linearly
+    bool m_dataHasLinearParam;
+    bool m_dataHasGamma;
+    bool m_constraintsVectorHasSlotForAlpha;
+    double m_maxAllowedValueForAlpha;
+
+    // MARY  -- temporary storage, kept as class variables for speed
+    DoubleVec1d m_newparam;
+    DoubleVec1d m_newlparam;
+    DoubleVec1d m_minparamvalues;
+    DoubleVec1d m_minlparamvalues;
+    DoubleVec1d m_maxparamvalues;
+    DoubleVec1d m_maxlparamvalues;
+    DoubleVec1d m_temp;
+    DoubleVec2d m_dd;
+
+    vector<vector<unsigned long int> > m_constraints; // user can constrain params
+    // to equal one another
+    bool m_constrained; // assume this is faster than evaluating m_constraints.empty()
+    DoubleVec1d m_constraintratio; // for multiplicative constraints
+
+    double Norm (const DoubleVec1d&d); // utility function: norm of vector
+    // (i.e., its length)
+
+    bool CalculateSteepest(double&, string& message);
+    bool CalculateByParts(double&, string& message);
+    bool CalculateBroyden(unsigned long int maxtrials, double &, string & message);
+
+    // calculates the new parameter
+    double SetParam(const double &lambda, const DoubleVec1d& direction);
+    bool SetParam1d(const double& step, const vector<unsigned long int> * pWhichparam);
+
+    void CoutNewParams() const; // for debugging
+    void CoutCurParams() const; // for debugging
+    void CoutByLinOrNoMult(double mult, const DoubleVec1d&) const; // for debugging
+
+    // Used by CalculateSteepest().
+    bool BracketTheMaximumAndFindIt(const vector<unsigned long int>
+                                    * pWhichparam, const double epsilon,
+                                    double oldlike, double & newlike,
+                                    string & message);
+
+    // Handles constrained parameters when applicable.
+    bool DCalculate(const DoubleVec1d & param, DoubleVec1d & gradient);
+
+    void ResetSecond ();          //resets the second derivative matrix
+    void CalcSecond ();           // calculate approximative second derivatives
+    void CalcDirection ();        // calculate the direction of change
+
+    void SetGradGuide(const vector<ParamStatus>& thisguide)
+    {
+        std::copy(thisguide.begin(), thisguide.end(),
+                  m_pPostLike->m_working_pstatusguides.begin());
+    };
+
+    void ExplainParamChange(long index, double oldVal, double newVal, std::string kind);
+    void AdjustExtremeLinearParam(long i);
+    void AdjustExtremeLogParam(long i);
+};
+
+// helper functions
+
+// simply returns result = one - two
+void CalcDelta (const DoubleVec1d & one, const DoubleVec1d & two, DoubleVec1d & result);
+
+inline bool SlopesHaveSameSign(const double & oldgrad, const double & newgrad, const double & tolerance);
+inline double sign(const double & x);
+inline double tolerance(double number);
+
+#endif // MAXIMIZER_H
+
+//____________________________________________________________________________________
diff --git a/src/postlike/maximizer_strings.cpp b/src/postlike/maximizer_strings.cpp
new file mode 100644
index 0000000..e96a545
--- /dev/null
+++ b/src/postlike/maximizer_strings.cpp
@@ -0,0 +1,47 @@
+// $Id: maximizer_strings.cpp,v 1.7 2012/06/30 01:32:42 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <string>
+#include "maximizer_strings.h"
+
+using std::string;
+
+//------------------------------------------------------------------------------------
+// xml tags for lamarc input file
+
+const string maxstr::MAX_BAD_ALPHA_0 = "The data seem to be well fit by a mutation rate that is nearly constant among these unlinked regions. A perfectly uniform mutation rate corresponds to a gamma distribution whose shape parameter, alpha, is infinite.  It is being constrained to equal ";
+const string maxstr::MAX_BAD_ALPHA_1 = ", and we're maximizing over regions once more.  If this also doesn't work, it might be worthwhile to re-run your analysis with the gamma distribution of mutation rates over regions turned off.";
+const string maxstr::MAX_BAD_LNL_0  = "Warning!  Calculated a log-likelihood of ";
+const string maxstr::MAX_BAD_LNL_1  = " for the parameter vector p = (";
+const string maxstr::MAX_BAD_LNL_2A1 = "), but could not calculate a log-likelihood at a point ";
+const string maxstr::MAX_BAD_LNL_2A2 = " units away from it.  This implies that your data may be difficult to model, or that there is a problem with lamarc.\n";
+const string maxstr::MAX_BAD_LNL_2B = "), and determined that a greater log-likelihood could be found in a certain direction, but no greater log-likelihood was found in that direction.  This implies that your data may be difficult to model, or that there is a problem with lamarc.\n";
+const string maxstr::MAX_CLIFF_EDGE_0 = "Warning!  Calculated a log-likelihood of ";
+const string maxstr::MAX_CLIFF_EDGE_1 =  " for the parameter vector p = (";
+const string maxstr::MAX_CLIFF_EDGE_2 =  "), and determined that a greater log-likelihood could be found in a certain direction, but no greater log-likelihood was found in that direction.  This implies that your data may be difficult to model, or that there is a problem with lamarc.";
+const string maxstr::MAX_FAILED_BRACKET_0 = "Warning!  During one search for the maximum log-likelihood for parameter \"";
+const string maxstr::MAX_FAILED_BRACKET_1 = "\", we failed to find a well-defined peak.  This might only be worrisome if you receive several such warning messages.  The search concluded at a value of ";
+const string maxstr::MAX_FAILED_BRACKET_2 = " for this parameter.  The slope at this point is ";
+const string maxstr::MAX_FAILED_BRACKET_3 = ".";
+const string maxstr::MAX_HIGH_ALPHA_0 = "The alpha parameter got high enough that further maximization is pointless.";
+const string maxstr::MAX_NO_CONVERGENCE_0 = "Warning:  Convergence to the maximum cannot be guaranteed.  ";
+const string maxstr::MAX_NO_CONVERGENCE_1 = "Maximization terminated after ";
+const string maxstr::MAX_NO_CONVERGENCE_2 = " iterations.  ";
+const string maxstr::MAX_NO_CONVERGENCE_3 = "(|gradient| = ";
+const string maxstr::MAX_NO_CONVERGENCE_4 = ")";
+const string maxstr::MAX_NO_MULTI_MAX = "Unable to find a multi-region maximum-likelihood estimate; tried searching the surface starting from each single region\'s peak, and from the average of these peaks.  The multi-region MLE will be set to the average of the single-region MLE values.\n";
+const string maxstr::MAX_NO_UPPER_BOUND_0 = "Warning!  Encountered a region of the log-likelihood surface in which the log-likelihood increases steadily, seemingly without an upper bound.  This implies that your data is difficult to model.  The problematic parameter is ";
+const string maxstr::MAX_NO_UPPER_BOUND_1 = "; it has been increased or decreased to a value of ";
+const string maxstr::MAX_NO_UPPER_BOUND_2 = ", and the maximum lnL, if one exists, seems to lie beyond this value.\n";
+const string maxstr::MAX_UNDEFINED_BORDER_0 = "Warning!  While searching for the maximum log-likelihood for parameter \"";
+const string maxstr::MAX_UNDEFINED_BORDER_1 = "\", we were able to calculate the log-likelihood at a particular point (";
+const string maxstr::MAX_UNDEFINED_BORDER_2 = "), but we were unable to calculate the log-likelihood at any point beyond this point.  This should never happen.  This implies that at least one of the genealogies that LAMARC produced is infeasible.  This might imply that your data is difficult to model.  If you encounter this error during an early stage of your run (for example, during the second of ten initial Markov chains), and the results in the final stage of your run look consistent  [...]
+
+//____________________________________________________________________________________
diff --git a/src/postlike/maximizer_strings.h b/src/postlike/maximizer_strings.h
new file mode 100644
index 0000000..55a957b
--- /dev/null
+++ b/src/postlike/maximizer_strings.h
@@ -0,0 +1,51 @@
+// $Id: maximizer_strings.h,v 1.4 2011/03/07 06:08:51 bobgian Exp $
+
+/*
+  Copyright 2006 Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef MAXSTR_H
+#define MAXSTR_H
+
+#include <string>
+#include "stringx.h"
+
+class maxstr
+{
+  public:
+    static const string MAX_BAD_ALPHA_0;
+    static const string MAX_BAD_ALPHA_1;
+    static const string MAX_BAD_LNL_0;
+    static const string MAX_BAD_LNL_1;
+    static const string MAX_BAD_LNL_2A1;
+    static const string MAX_BAD_LNL_2A2;
+    static const string MAX_BAD_LNL_2B;
+    static const string MAX_CLIFF_EDGE_0;
+    static const string MAX_CLIFF_EDGE_1;
+    static const string MAX_CLIFF_EDGE_2;
+    static const string MAX_FAILED_BRACKET_0;
+    static const string MAX_FAILED_BRACKET_1;
+    static const string MAX_FAILED_BRACKET_2;
+    static const string MAX_FAILED_BRACKET_3;
+    static const string MAX_HIGH_ALPHA_0;
+    static const string MAX_NO_CONVERGENCE_0;
+    static const string MAX_NO_CONVERGENCE_1;
+    static const string MAX_NO_CONVERGENCE_2;
+    static const string MAX_NO_CONVERGENCE_3;
+    static const string MAX_NO_CONVERGENCE_4;
+    static const string MAX_NO_MULTI_MAX;
+    static const string MAX_NO_UPPER_BOUND_0;
+    static const string MAX_NO_UPPER_BOUND_1;
+    static const string MAX_NO_UPPER_BOUND_2;
+    static const string MAX_UNDEFINED_BORDER_0;
+    static const string MAX_UNDEFINED_BORDER_1;
+    static const string MAX_UNDEFINED_BORDER_2;
+};
+
+#endif // MAXSTR_H
+
+//____________________________________________________________________________________
diff --git a/src/postlike/plforces.cpp b/src/postlike/plforces.cpp
new file mode 100644
index 0000000..83e6225
--- /dev/null
+++ b/src/postlike/plforces.cpp
@@ -0,0 +1,3012 @@
+// $Id: plforces.cpp,v 1.76 2013/11/08 21:46:21 mkkuhner Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+// Posterior Likelihood forces classes
+// - CoalescePL
+//   - coalesce with no-growth [using compressed summaries]
+// - CoalesceGrowPL
+//   - coalesce with exponential growth [using non-compressed summaries]
+// - GrowPL
+//   - coalesce with exponential growth [using non-compressed summaries]
+// - MigratePL
+// - RecombinePL
+// - SelectPL [stubs]
+
+//------------------------------------------------------------------------------------
+
+#include <cassert>
+
+#ifdef DMALLOC_FUNC_CHECK
+#include <dmalloc.h>
+#endif
+
+// debug code helpers
+#include <iostream>
+#include <fstream>
+#include <sstream>
+std::ofstream numfile;
+
+#include "mathx.h"
+#include "plforces.h"
+#include "runreport.h"
+#include "summary.h"
+#include "vectorx.h"
+#include "force.h"
+#include "timemanager.h"                // for CoalesceGrowPL::lnPoint()
+
+using namespace std;
+
+//------------------------------------------------------------------------------------
+
+const double LOG_ONEPLUSEPSILON = log(1.0 + numeric_limits<double>::epsilon());
+
+//------------------------------------------------------------------------------------
+// DiseasePL: disease force specific waiting time and
+//            point probabilites plus derivatives
+//------------------------------------------------------------------------------------
+
+double DiseasePL::lnWait (const vector < double >&param, const TreeSummary * treedata)
+{
+    vector < double >::iterator i;
+    vector < double >::const_iterator pstart;
+    vector < double >::const_iterator pend;
+
+    const vector<double>& mWait = treedata->GetDiseaseSummary()->GetShortWait();
+    // precomputing the transition rates to a status Sum[d_ji] into
+    // membervariable m_msum
+    for (i = m_msum.begin (),
+             pstart = param.begin () + m_start,
+             pend = param.begin () + m_start + m_nPop;
+         i != m_msum.end (); ++i, pstart += m_nPop, pend += m_nPop)
+    {
+        (*i) = accumulate (pstart, pend, 0.0);
+    }
+
+    // sum_pop(SM_status *kt)
+    return  -1.0 * inner_product (mWait.begin (),
+                                  mWait.end (), m_msum.begin (), 0.0);
+}
+
+//------------------------------------------------------------------------------------
+
+double DiseasePL::lnPoint (const vector < double >&param, const vector < double >&lparam,
+                           const TreeSummary * treedata)
+{
+    //  Sum_j(Sum_i(disevent[j,i] * log(M[j,i]))
+    const vector<double>& nevents = treedata->GetDiseaseSummary()->GetShortPoint();
+    return  inner_product (nevents.begin (), nevents.end (),
+                           lparam.begin () + m_start, 0.0);
+}
+
+//------------------------------------------------------------------------------------
+
+double DiseasePL::DlnWait (const vector < double >&param, const TreeSummary * treedata, const long int &whichparam)
+{
+    long int which = (whichparam - m_start) / m_nPop;
+    const vector<double>& mWait = treedata->GetDiseaseSummary()->GetShortWait();
+    return -mWait[which];
+}
+
+//------------------------------------------------------------------------------------
+
+double DiseasePL::DlnPoint (const vector < double >&param, const TreeSummary * treedata, const long int &whichparam)
+{
+    long int which = whichparam - m_start;
+    const vector<double>& nmig = treedata->GetDiseaseSummary()->GetShortPoint();
+    return SafeDivide (nmig[which], param[whichparam]);
+}
+
+//------------------------------------------------------------------------------------
+
+double DiseaseLogisticSelectionPL::lnWait(const vector<double>& param, const TreeSummary * treedata)
+{
+    return 0.0; // contained in CoalesceLogisticSelectionPL::lnWait(), for speed
+}
+
+//------------------------------------------------------------------------------------
+// Note:  DlnPoint() is selection-independent, because the point terms
+// separate into a sum of logarithms.  So, the DiseasePL method is used for that.
+
+double DiseaseLogisticSelectionPL::lnPoint(const vector<double>& param, const vector<double>& lparam,
+                                           const TreeSummary * treedata)
+{
+    const Interval *treesum = treedata->GetDiseaseSummary()->GetLongPoint();
+    const Interval *pTreesum;
+
+    double logTheta_A0(lparam[0]), logTheta_a0(lparam[1]), s(param[m_s_is_here]);
+    double log_mu_into_A_from_a(lparam[3]), log_mu_into_a_from_A(lparam[4]);
+
+    if (0 != param[2] || 0 != param[5] || 0 == param[3] || 0 == param[4])
+        throw implementation_error("Error parsing disease rates in DiseaseLogisticSelectionPL::lnPoint()");
+
+    double result = 0.0;
+
+    for(pTreesum = treesum; pTreesum != NULL; pTreesum = pTreesum->m_next)
+    {
+        if (0L == pTreesum->m_oldstatus)
+            result += log_mu_into_A_from_a + logTheta_a0 - logTheta_A0 + s*pTreesum->m_endtime;
+        else if (1L == pTreesum->m_oldstatus)
+            result += log_mu_into_a_from_A + logTheta_A0 - logTheta_a0 - s*pTreesum->m_endtime;
+        else
+        {
+            string msg = "DiseaseLogisticSelectionPL::lnPoint(), received a TreeSummary ";
+            msg += "containing an event with oldstatus = " + ToString(pTreesum->m_oldstatus);
+            msg += ".  \"oldstatus\" must be either 0 or 1, reflecting a coalescence in either ";
+            msg += "the subpopulation with allele A or the subpopulation with allele a.";
+            throw implementation_error(msg);
+        }
+    }
+
+    return result;
+}
+
+//------------------------------------------------------------------------------------
+
+double DiseaseLogisticSelectionPL::DlnWait(const vector<double>&param, const TreeSummary * treedata,
+                                           const long int &whichparam)
+{
+    if (2 != m_nPop)
+    {
+        string msg = "DiseaseLogisticSelectionPL::DlnWait() called with m_nPop = ";
+        msg += ToString(m_nPop);
+        msg += ".  m_nPop must equal 2, reflecting one population with the major ";
+        msg += "allele and one population with the minor allele.";
+        throw implementation_error(msg);
+    }
+
+    if (3 != whichparam && 4 != whichparam)
+        throw implementation_error("DiseaseLogisticSelectionPL::DlnWait(), bad \"whichparam\"");
+
+    const list<Interval>& treesum = treedata->GetDiseaseSummary()->GetLongWait();
+    list<Interval>::const_iterator treesum_it;
+
+    double result(0.0), s(param[m_s_is_here]);
+    double theta_A0(param[m_start]), theta_a0(param[m_start+1]);
+
+    if (theta_a0 <= 0.0 || theta_A0 <= 0.0)
+    {
+        string msg = "DiseaseLogisticSelectionPL::DlnWait(), received an invalid Theta value ";
+        msg += "(theta_A0 = " + ToString(theta_A0) + ", theta_a0 = " + ToString(theta_a0) + ").";
+        throw impossible_error(msg);
+    }
+
+    if (fabs(s) < LOGISTIC_SELECTION_COEFFICIENT_EPSILON)
+        return DlnWaitForTinyLogisticSelectionCoefficient(param, treedata, whichparam);
+
+    if (fabs(s) >= DBL_BIG)
+        return -DBL_BIG; // unavoidable overflow
+
+    double t_e, t_s(0.0), dt, e_toThe_sts(1.0), e_toThe_ste, term(0.0);
+    double term_A(0.0), term_a(0.0);
+    double factor_A(theta_a0/(s*theta_A0)), factor_a(theta_A0/(-s*theta_a0));
+    bool derivativeWithRespectTo_mu_into_A_from_a = (3 == whichparam ? true : false);
+
+    for (treesum_it = treesum.begin(); treesum_it != treesum.end(); treesum_it++)
+    {
+        t_e = treesum_it->m_endtime;
+        dt = t_e - t_s;
+        term_A = term_a = 0.0;
+        const double& k_A = treesum_it->m_xpartlines[0]; // num. lineages, allele A
+        const double& k_a = treesum_it->m_xpartlines[1]; // num. lineages, allele a
+
+        if (s > 0.0)
+        {
+            // Use overflow protection for term_A; if term_a underflows, that's okay.
+            if (s*dt > LOG_ONEPLUSEPSILON)
+            {
+                term = SafeProductWithExp(factor_A,s*dt);
+                if (term >= DBL_BIG && derivativeWithRespectTo_mu_into_A_from_a)
+                    return -DBL_BIG; // unavoidable overflow
+                e_toThe_ste = (term/factor_A)*e_toThe_sts;
+                if (k_A > 0.0 && derivativeWithRespectTo_mu_into_A_from_a)
+                {
+                    term_A = k_A*(term - factor_A)*e_toThe_sts;
+                    if (term_A >= DBL_BIG)
+                        return -DBL_BIG; // unavoidable overflow
+                }
+            }
+            else
+            {
+                if (s*dt >= numeric_limits<double>::epsilon())
+                    e_toThe_ste = (1.0 + s*dt)*e_toThe_sts;
+                else
+                    e_toThe_ste = SafeProductWithExp(1.0,s*t_e);
+                if (k_A > 0.0 && derivativeWithRespectTo_mu_into_A_from_a)
+                {
+                    term_A = k_A*factor_A*s*dt*e_toThe_sts;
+                    if (term_A >= DBL_BIG)
+                        return -DBL_BIG; // unavoidable overflow
+                }
+            }
+            if (k_a > 0.0 && !derivativeWithRespectTo_mu_into_A_from_a)
+                term_a = k_a*factor_a*(1.0/e_toThe_ste - 1.0/e_toThe_sts); // note: factor_a includes minus sign
+        }
+        else // s < 0
+        {
+            // Use overflow protection for term_a; if term_A underflows, that's okay.
+
+            if (-s*dt > LOG_ONEPLUSEPSILON)
+            {
+                term = SafeProductWithExp(factor_a,-s*dt);
+                if (term >= DBL_BIG && !derivativeWithRespectTo_mu_into_A_from_a)
+                    return -DBL_BIG; // unavoidable overflow
+                e_toThe_ste = e_toThe_sts/(term/factor_a);
+                if (k_a > 0.0 && !derivativeWithRespectTo_mu_into_A_from_a)
+                {
+                    term_a = k_a*(term - factor_a)/e_toThe_sts;
+                    if (term_a >= DBL_BIG)
+                        return -DBL_BIG; // unavoidable overflow
+                }
+            }
+            else
+            {
+                if (-s*dt >= numeric_limits<double>::epsilon())
+                    e_toThe_ste = (1.0 + s*dt)*e_toThe_sts;
+                else
+                    e_toThe_ste = SafeProductWithExp(1.0,s*t_e);
+                if (k_a > 0.0 && !derivativeWithRespectTo_mu_into_A_from_a)
+                {
+                    term_a = k_a*factor_a*(-s)*dt/e_toThe_sts;
+                    if (term_a >= DBL_BIG)
+                        return -DBL_BIG; // unavoidable overflow
+                }
+            }
+            if (k_A > 0.0 && derivativeWithRespectTo_mu_into_A_from_a)
+                term_A = k_A*factor_A*(e_toThe_ste - e_toThe_sts);
+        }
+
+        if (derivativeWithRespectTo_mu_into_A_from_a)
+            result -= term_A;
+        else
+            result -= term_a;
+        if (result <= -DBL_BIG)
+            return -DBL_BIG;
+
+        t_s = t_e;
+        e_toThe_sts = e_toThe_ste;
+    }
+
+    return result;
+}
+
+//------------------------------------------------------------------------------------
+
+double DiseaseLogisticSelectionPL::DlnWaitForTinyLogisticSelectionCoefficient(const vector<double>& param,
+                                                                              const TreeSummary *treedata,
+                                                                              const long int &whichparam)
+{
+    const list<Interval>& treesum = treedata->GetDiseaseSummary()->GetLongWait();
+    list<Interval>::const_iterator treesum_it;
+    double result(0.0), s(param[m_s_is_here]);
+    double theta_A0(param[m_start]), theta_a0(param[m_start+1]);
+    double t_e, t_s(0.0);
+    bool derivativeWithRespectTo_mu_into_A_from_a = (4 == whichparam ? true : false);
+
+    for (treesum_it = treesum.begin(); treesum_it != treesum.end(); treesum_it++)
+    {
+        t_e = treesum_it->m_endtime;
+        const double& k_A = treesum_it->m_xpartlines[0]; // num. lineages, allele A
+        const double& k_a = treesum_it->m_xpartlines[1]; // num. lineages, allele a
+
+        if (derivativeWithRespectTo_mu_into_A_from_a)
+            result -= (k_A*theta_a0/theta_A0)*((t_e - t_s) + 0.5*s*(t_e*t_e - t_s*t_s));
+        else
+            result -= (k_a*theta_A0/theta_a0)*((t_e - t_s) - 0.5*s*(t_e*t_e - t_s*t_s));
+
+        t_s = t_e;
+    }
+
+    return result;
+}
+
+//------------------------------------------------------------------------------------
+// MigratePL: migration forces specific waiting time and
+//            point probabilites and its derivatives
+
+double MigratePL::lnWait (const vector < double >&param, const TreeSummary * treedata)
+{
+    vector < double >::iterator i;
+    vector < double >::const_iterator pstart;
+    vector < double >::const_iterator pend;
+
+    const vector<double>& mWait = treedata->GetMigSummary()->GetShortWait();
+
+    // precomputing the immigration rates Sum[m_ji] into membervariable m_msum
+    for (i = m_msum.begin (),
+             pstart = param.begin () + m_start,
+             pend = param.begin () + m_start + m_nPop;
+         i != m_msum.end (); ++i, pstart += m_nPop, pend += m_nPop)
+    {
+        (*i) = accumulate (pstart, pend, 0.0);
+    }
+
+    // sum_pop(SM_pop *kt)
+    return  -1.0 * inner_product (mWait.begin (), mWait.end (), m_msum.begin (), 0.0);
+}
+
+//------------------------------------------------------------------------------------
+
+double MigratePL::lnPoint (const vector < double >&param, const vector < double >&lparam,
+                           const TreeSummary * treedata)
+{
+    //  Sum_j(Sum_i(migevent[j,i] * log(M[j,i]))
+    const vector<double>& nmig = treedata->GetMigSummary()->GetShortPoint();
+
+    return  inner_product (nmig.begin(), nmig.end(), lparam.begin() + m_start, 0.0);
+}
+
+//------------------------------------------------------------------------------------
+
+double MigratePL::DlnWait (const vector < double >&param, const TreeSummary * treedata, const long int & whichparam)
+{
+    long int which = (whichparam - m_start) / m_nPop;
+    const vector<double>& mWait = treedata->GetMigSummary()->GetShortWait();
+    return -mWait[which];
+}
+
+//------------------------------------------------------------------------------------
+
+double MigratePL::DlnPoint (const vector < double >&param, const TreeSummary * treedata, const long int & whichparam)
+{
+    long int which = whichparam - m_start;
+    const vector<double>& nmig = treedata->GetMigSummary()->GetShortPoint();
+
+    return SafeDivide (nmig[which], param[whichparam]);
+}
+
+//------------------------------------------------------------------------------------
+
+// CoalescePL: coalescence forces specific waiting time and
+//             point probabilites and its derivatives
+
+// CalculateScaledLPCounts returns the log-likelihood of all
+// of the choices of recombination-branch partition assignment
+// made in a genealogy.  It does this by summing the log of (the
+// number of times each partition was chosen times the relative
+// frequency of that partition).
+//
+// JDEBUG--:  This code cannot handle migration.  To do so, it
+// will be necessary to (a) store the partition in which each
+// recombination happened, and (b) make the relative frequency
+// relative to the partition, not the whole.  For example, the
+// term for choice of a disease state in Boston should be
+// Theta(disease & Boston) over Theta(Boston).  Currently it
+// is Theta(disease) over Theta(total).
+
+double CoalescePL::CalculateScaledLPCounts(const DoubleVec1d& params, const LongVec2d& picks) const
+{
+    double total_theta(accumulate(params.begin() + m_start, params.begin() + m_end, 0.0));
+    DoubleVec1d myths(params.begin() + m_start, params.begin() + m_end);
+    double answ(0.0);
+    LongVec2d::size_type lpforce;
+    for(lpforce = 0; lpforce < picks.size(); ++lpforce)
+    {
+        LocalPartitionForce* pforce(dynamic_cast<LocalPartitionForce*>(m_localpartforces[lpforce]));
+        assert(pforce);  // It wasn't a local partition force?
+        DoubleVec1d part_thetas(pforce->SumXPartsToParts(myths));
+        LongVec1d::size_type partition;
+        for(partition = 0; partition < picks[lpforce].size(); ++partition)
+        {
+            answ += log(picks[lpforce][partition] * part_thetas[partition] / total_theta);
+        }
+    }
+    return answ;
+}
+
+//------------------------------------------------------------------------------------
+
+void CoalescePL::SetLocalPartForces(const ForceSummary& fs)
+{
+    m_localpartforces = fs.GetLocalPartitionForces();
+    if (m_localpartforces.empty()) return;
+
+    // set up vector which answers the following question:
+    // for each local partition force
+    //   for any xpartition theta (identified by index),
+    //   what partition (of the given force) is it?
+
+    // This code is copied approximately from PartitionForce::
+    // SumXPartsToParts().
+    ForceVec::const_iterator pforce;
+    const ForceVec& partforces = fs.GetPartitionForces();
+    LongVec1d indicator(partforces.size(), 0L);
+    LongVec1d nparts(registry.GetDataPack().GetAllNPartitions());
+    DoubleVec1d::size_type xpart;
+    long partindex;
+    for (pforce = partforces.begin(), partindex = 0;
+         pforce != partforces.end(); ++pforce, ++partindex)
+    {
+        vector<DoubleVec1d::size_type> indices(m_nPop, 0);
+        for(xpart = 0; xpart < static_cast<DoubleVec1d::size_type>(m_nPop); ++xpart)
+        {
+            indices[xpart] = indicator[partindex];
+            long int part;
+            for (part = nparts.size() - 1; part >= 0; --part)
+            {
+                ++indicator[part];
+                if (indicator[part] < nparts[part]) break;
+                indicator[part] = 0;
+            }
+        }
+        // initialize xparts vectors
+        vector<DoubleVec1d::size_type> emptyvec;
+        vector<vector<DoubleVec1d::size_type> > partvec(nparts[partindex], emptyvec);
+
+        if ((*pforce)->IsLocalPartitionForce())
+        {
+            m_whichlocalpart.push_back(indices);
+            m_whichlocalxparts.push_back(partvec);
+        }
+        m_whichpart.push_back(indices);
+        m_whichxparts.push_back(partvec);
+    }
+
+    // now construct the vector mapping partition to a set of xpartitions.
+    //
+    // we will do this by going through the vector mapping xpartition to
+    // partition we just constructed, letting its contents tell us which
+    // partition to add the parameter (xpartition) to and keeping a
+    // counter to let us know which xpartition to add.
+    long lpindex = 0;
+    for (pforce = partforces.begin(), partindex = 0;
+         pforce != partforces.end(); ++pforce, ++partindex)
+    {
+        for(xpart = 0; xpart < m_whichpart[partindex].size(); ++xpart)
+            m_whichxparts[partindex][m_whichpart[partindex][xpart]].push_back(xpart);
+
+        if ((*pforce)->IsLocalPartitionForce())
+        {
+            for(xpart = 0; xpart < m_whichpart[partindex].size(); ++xpart)
+            {
+                m_whichlocalxparts[lpindex][m_whichpart[partindex][xpart]].
+                    push_back(xpart);
+            }
+            ++lpindex;
+        }
+    }
+} // SetLocalPartForces
+
+//------------------------------------------------------------------------------------
+// COMPRESSED DATA (SHORT) SUMMARIES
+
+double CoalescePL::lnWait (const vector < double >&param, const TreeSummary * treedata)
+{
+    // result = sum_pop(k(k-1)t/theta_pop)
+    const vector<double>& cWait = treedata->GetCoalSummary()->GetShortWait();
+
+    return -1.0 * inner_product (cWait.begin (), cWait.end (),
+                                 param.begin () + m_start,
+                                 0.0, plus < double >(),
+                                 divides < double >());
+}
+
+//------------------------------------------------------------------------------------
+
+double CoalescePL::lnPoint (const vector < double >&param, const vector < double >&lparam,
+                            const TreeSummary * treedata)
+{
+    //  in general it is Sum_j(coalesceevent[j] * (log(2) - log(theta_j)))
+    //  sumcpoint = Sum_j(coalesceevent[j] * log(2))
+    //   - Sum_j(coalesceevent[j] * log(theta_j))
+    const vector<double>& ncoal = treedata->GetCoalSummary()->GetShortPoint();
+
+    double point = (LOG2 * accumulate (ncoal.begin (), ncoal.end (), 0.0) -
+                    inner_product (ncoal.begin (), ncoal.end (),
+                                   lparam.begin () + m_start, 0.0));
+    const LongVec2d& picks = treedata->GetCoalSummary()->GetShortPicks();
+    if (picks.empty()) return point;
+    else return point + CalculateScaledLPCounts(param, picks);
+}
+
+//------------------------------------------------------------------------------------
+
+double CoalescePL::DlnWait (const vector < double >&param, const TreeSummary * treedata, const long int & whichparam)
+{
+    long int which = whichparam - m_start;
+    const vector<double>& cWait = treedata->GetCoalSummary()->GetShortWait();
+
+    return cWait[which] / (param[whichparam] * param[whichparam]);
+}
+
+//------------------------------------------------------------------------------------
+// erynes 2004/03/11 -- This method presently returns the following:
+//
+//          -ncoal/theta,
+//
+// where theta is the current estimate of parameter theta for the
+// given population ("whichparam"), and ncoal is the total number
+// of coalescent events for this population (i.e., one less than the
+// total number of tips that the population has in this genealogy tree,
+// unless recombinations are present).
+//
+// Note:  CoalescePL::DlnPoint calculates the partial derivative of log(Point)
+// with respect to theta for the given population ("whichparam"),
+// in both the absence and presence of growth.
+// When growth is present, CoalesceGrowPL::DlnWait does this for log(Wait).
+// Partial derivatives with respect to growth are found in GrowPL.
+//
+// jay 2007/03/22 -- added support for disease w/ recombination
+//
+// JDEBUG--:  only in absence of migration
+
+double CoalescePL::DlnPoint (const vector < double >&param, const TreeSummary * treedata, const long int & whichparam)
+{
+    long int which = whichparam - m_start;
+    const vector<double>& ncoal = treedata->GetCoalSummary()->GetShortPoint();
+
+    double answ(-ncoal[which] / param[whichparam]);
+
+    const LongVec2d& picks = treedata->GetCoalSummary()->GetShortPicks();
+    if (picks.empty()) return answ;
+    else  {
+        // count recs in crosspartition corresponding to whichparam
+        const RecSummary* rsum = dynamic_cast<const RecSummary*>(treedata->GetSummary(force_REC));
+        long recs_in_xpart = rsum->GetNRecsByXPart()[whichparam];
+        // divide by whichparam
+        double xpart_theta = param[whichparam];
+        answ += recs_in_xpart / xpart_theta;
+        // from that, subtract number of recs (should be "in that partition")
+        long nrecs = rsum->GetNRecs();
+        // divided by sum of thetas (should be "for that partition")
+        double total_theta(accumulate(param.begin() + m_start, param.begin() + m_end, 0.0));
+        answ -= nrecs/total_theta;
+    }
+
+#if 0 // this was what CVS merged into here.....
+    if (!m_localpartforces.empty())
+    {
+        double total_theta(accumulate(param.begin() + m_start, param.begin() + m_end, 0.0));
+        DoubleVec1d myths(param.begin() + m_start, param.begin() + m_end);
+        long int total_npartitions(0);
+        LongVec2d::size_type lpforce;
+        for(lpforce = 0; lpforce < m_localpartforces.size(); ++lpforce)
+        {
+            LocalPartitionForce* pforce(dynamic_cast<LocalPartitionForce*>(m_localpartforces[lpforce]));
+            assert(pforce);  // It wasn't a local partition force?
+            total_npartitions += pforce->GetNPartitions();
+
+            const vector<DoubleVec1d::size_type>& indices(m_whichxparts[lpforce][m_whichpart[lpforce][which]]);
+            vector<DoubleVec1d::size_type>::const_iterator index;
+            double part_theta(0.0);
+            for(index = indices.begin(); index != indices.end(); ++index)
+            {
+                part_theta += myths[*index];
+            }
+            answ += 1.0 / part_theta;
+
+        }
+        answ -= total_npartitions / total_theta;
+    }
+#endif
+
+    return answ;
+}
+
+//------------------------------------------------------------------------------------
+// CoalesceGrowPL: coalescence forces specific waiting time and
+//             point probabilites and its derivatives
+// LONG DATA SUMMARIES
+// [only in effect when GROWTH force is turned on]
+
+//------------------------------------------------------------------------------------
+// erynes 2004/03/11 -- This method presently returns the following:
+//
+// sum_over_populations(sum_over_each_population's_coalescent_time_intervals(
+//  (k_i*(k_i - 1)/(theta_p * g_p)) * (exp(g_p * t_start)- exp(g_p * t_end))
+//                                                                          )
+//                     )
+//
+// where
+//        k_i is the number of lineages of population p in time interval i
+//        theta_p is the present estimate of parameter theta for population p
+//        g_p is the present estimate of the growth parameter for population p
+//        t_start is the timepoint at the start of time interval i
+//        t_end is the timepoint at the end of time interval i
+//
+// This quantity is equal to log(WaitProb(G|P)), where WaitProb(G|P) is the
+// "waiting" probability of the input genealogy G (parameter "treedata")
+// given the parameters P (parameter "param").
+// Prob(G|P) = PointProb(G|P) * WaitProb(G|P).
+// See CoalesceGrowPL::Point() for the point probability term.
+
+double CoalesceGrowPL::lnWait (const vector < double >&param, const TreeSummary * treedata)
+{
+    const list<Interval>& treesum=treedata->GetCoalSummary()->GetLongWait();
+    list <Interval> :: const_iterator tit;
+    vector < double > :: const_iterator pit = param.begin() + m_start;
+    vector < double > :: const_iterator pend = param.begin() + m_start + m_nPop;
+    vector < double > :: const_iterator git = param.begin() + m_growthstart;
+
+    unsigned long int xpartition = 0L;
+    double growth, theta, gts, gte;
+    double coeff_s, coeff_e, arg1_s, arg1_e;
+    double result = 0.0;
+    double starttime;
+
+    for( ; pit != pend; ++pit, ++git, ++xpartition)
+    {
+        growth = *git;
+        theta = *pit;
+
+        if (fabs(growth) < GROWTH_EPSILON)
+        {
+            result += lnWaitForTinyGrowth(theta, growth, xpartition, treesum);
+            continue;
+        }
+
+        starttime = 0.0;
+        for(tit = treesum.begin(); tit != treesum.end(); ++tit)
+        {
+            gts = growth * starttime;
+            gte = growth * tit->m_endtime;
+
+            coeff_s = coeff_e = arg1_s = arg1_e = 1.0;
+            const double& k = tit->m_xpartlines[xpartition];
+
+            // IMPORTANT NOTE:  The following over/underflow checking
+            // assumes that each starttime and endtime is always >= 0.
+            // Hence, e.g., gts < 0 means growth < 0.
+
+            if (gts > 1.0)
+            {
+                coeff_s = k * (k - 1.0);
+                if (growth > 1.0)
+                    arg1_s /= growth;
+                else
+                    coeff_s /= growth;
+                if (theta > 1.0)
+                    arg1_s /= theta;
+                else
+                    coeff_s /= theta;
+            }
+            else if (gts < -1.0)
+            {
+                arg1_s *= k * (k - 1.0);
+                if (growth < -1.0)
+                    coeff_s /= growth;
+                else
+                    arg1_s /= growth;
+                if (theta > 1.0)
+                    coeff_s /= theta;
+                else
+                    arg1_s /= theta;
+            }
+            else // no over/underflow in exp()
+                coeff_s = k * (k - 1.0)/(theta * growth); // arg1_s already set
+
+            // BUGBUG erynes -- what follows is easy to understand,
+            // but we should revisit this and reuse some of what we
+            // learned about growth and theta above, while considering gts.
+
+            if (gte > 1.0)
+            {
+                coeff_e = k * (k - 1.0);
+                if (growth > 1.0)
+                    arg1_e /= growth;
+                else
+                    coeff_e /= growth;
+                if (theta > 1.0)
+                    arg1_e /= theta;
+                else
+                    coeff_e /= theta;
+            }
+            else if (gte < -1.0)
+            {
+                arg1_e *= k * (k - 1.0);
+                if (growth < -1.0)
+                    coeff_e /= growth;
+                else
+                    arg1_e /= growth;
+                if (theta > 1.0)
+                    coeff_e /= theta;
+                else
+                    arg1_e /= theta;
+            }
+            else // no over/underflow in exp()
+                coeff_e = k * (k - 1.0)/(theta * growth); // arg1_e already set
+
+            double incrementalResult =
+                coeff_s * SafeProductWithExp(arg1_s, gts) -
+                coeff_e * SafeProductWithExp(arg1_e, gte);
+
+            if (incrementalResult > 0.0)
+            {
+                // We have precision problems,
+                // because we expect te > ts always,
+                // and hence gte > gts always.
+                // During debugging, we have encountered precision problems
+                // in which gdb claims gts == gte and arg1_s == arg1_e
+                // but incrementalResult > 0, typically reflecting
+                // a difference near the 15th digit.
+                // (Note that te == ts corresponds to two events
+                // occurring simultaneously--e.g., k = 1 for a really long
+                // time in each of two populations, then one finally
+                // migrates to the other, then they coalesce instantaneously.)
+                // It should be safe to set incrementalResult to zero in cases of
+                // roundoff error.  If incrementalResult is positive,
+                // but _not_ tiny compared with either of the terms
+                // whose difference yielded incrementalResult,
+                // then something is _very_ wrong--probably te < ts,
+                // which should never happen.  We assert in this case;
+                // this will enable us to see the problem, but the end user
+                // will not see the assertion, unless s/he's running in debug mode.
+                if (incrementalResult/fabs(coeff_s * SafeProductWithExp(arg1_s, gts)) > 1e-10)
+                {
+                    string msg = "incrementalResult in plforces.cpp is greater "
+                        "than zero.  coeff_s = "
+                        + ToString(coeff_s) + ", coeff_e = " + ToString(coeff_e)
+                        + ", theta = " + ToString(theta) + ", growth = "
+                        + ToString(growth) + ", and incrementalResult is "
+                        + ToString(incrementalResult);
+                    registry.GetRunReport().ReportDebug(msg);
+                    //assert(0);
+                }
+                incrementalResult = 0.0;
+            }
+
+            starttime = tit->m_endtime;
+            result += incrementalResult;
+        }
+    }
+
+    return result;
+}
+
+//------------------------------------------------------------------------------------
+// erynes 2004/05/20 -- This method presently returns the following:
+//
+// sum_over_a_population's_coalescent_time_intervals(
+//  (k_i*(k_i - 1)/theta_p) * ( (tis - tie) + (1/2)*g_p*(tis^2 - tie^2) )
+//                                                  )
+//
+// where
+//        k_i is the number of lineages of population p in time interval i
+//        theta_p is the present estimate of parameter theta for population p
+//        g_p is the present estimate of the growth parameter for population p
+//        tis is the timepoint at the start of time interval i
+//        tie is the timepoint at the end of time interval i
+//
+// The population "p" is determined in Wait() by iterating over all
+// populations.  That method calls this method with the appropriate
+// parameters for the population in question.
+//
+// This non-public method is used to handle the case in which fabs(g)
+// is tiny--that is, less than GROWTH_EPSILON.  It is derived from the
+// formula used in Wait by expanding exp(g*t) in a Taylor series
+// about g == 0, multiplying this by the term k(k-1)/(g*theta),
+// and dropping all terms of order g^2 and above.
+// Brief tests with Mathematica suggest that the original formula
+// converges very nicely to this linear formula for normal values
+// of the timepoints, the difference being less than one part in one
+// billion, dropping down to zero difference for g == 0 (using
+// L'Hopital's rule).
+//
+// Please also see the comments for DWait.
+
+double CoalesceGrowPL::lnWaitForTinyGrowth(const double theta, const double growth,
+                                           const unsigned long int xpartition,
+                                           const list<Interval>& treesummary)
+{
+    assert(fabs(growth) < GROWTH_EPSILON); // otherwise use Wait
+
+    list<Interval>::const_iterator tit;
+    double k, te;
+
+    double result = 0.0;
+    // ts starts at 0 and subsequently is the previous cycle's te
+    double ts = 0.0;
+
+    for(tit = treesummary.begin(); tit != treesummary.end(); ++tit)
+    {
+        te = tit->m_endtime;
+        k  = tit->m_xpartlines[xpartition];
+
+        result += (k * (k - 1.0) / theta) *
+            ((ts - te) + 0.5 * growth * (ts * ts - te * te));
+        ts = te;
+    }
+
+    return result;
+}
+
+//------------------------------------------------------------------------------------
+// erynes 2004/03/11 -- This method presently returns the following:
+//
+// sum_over_populations(sum_over_each_population's_coalescent_time_intervals(
+//                              log(2) + g_p*t_i_end - log(theta_p)
+//                                                                          )
+//                     )
+//
+// where
+//        theta_p is the present estimate of parameter theta for population p
+//        g_p is the present estimate of the growth parameter for population p
+//        t_i_end is the timepoint at the end of time interval i
+//
+// This quantity is equal to log(PointProb(G|P)), where PointProb(G|P) is the
+// "point" probability of the input genealogy G (parameter "treedata")
+// given the parameters P (parameter "param").
+// Prob(G|P) = PointProb(G|P) * WaitProb(G|P).
+// See CoalesceGrowPL::lnWait() for the wait probability term.
+
+double CoalesceGrowPL::lnPoint (const vector < double >&param, const vector < double >&lparam,
+                                const TreeSummary * treedata)
+
+{
+    const Interval *  treesum = treedata->GetCoalSummary()->GetLongPoint();
+    const Interval * tit;
+    vector < double > :: const_iterator pit = lparam.begin() + m_start;
+    vector < double > :: const_iterator pend = lparam.begin () + m_start + m_nPop;
+    vector < double > :: const_iterator git = param.begin() + m_growthstart;
+
+    long int xpartition = 0L;
+    double result = 0.0;
+
+    for( ; pit != pend; ++pit, ++git, ++xpartition)
+    {
+        for(tit= treesum; tit != NULL; tit = tit->m_next)
+        {
+            if(xpartition==tit->m_oldstatus)
+                result +=  LOG2 + tit->m_endtime * (*git) - (*pit);
+        }
+    }
+
+    // we pull this array only to check for emptyness, it is not used!
+    // JDEBUG--:  not correct in presence of migration
+    const LongVec2d& picks = treedata->GetCoalSummary()->GetShortPicks();
+    if (!picks.empty())                 // partner-picks must be accomodated
+    {
+        DoubleVec1d timesizes(m_nPop, 0.0);
+        ForceParameters fp(unknown_region);
+        DoubleVec1d myths(param.begin()+m_start,param.begin()+m_end);
+        fp.SetRegionalThetas(myths);
+        DoubleVec1d mygs(param.begin()+m_growthstart,
+                         param.begin()+m_growthstart+m_nPop);
+        fp.SetGrowthRates(mygs);
+        // now for every recombination event....
+        treesum = treedata->GetRecSummary()->GetLongPoint();
+        for (tit = treesum; tit != NULL; tit = tit->m_next)
+        {
+            // compute partner-picks denominator
+            timesizes = m_timesize->XpartThetasAtT(tit->m_endtime, fp);
+            assert(timesizes.size() == static_cast<DoubleVec1d::size_type>(m_nPop));
+            double denominator(accumulate(timesizes.begin(), timesizes.end(), 0.0));
+
+            // compute partner-picks numerator
+            ForceVec::size_type lpforce = 0;
+            // assuming no migration and only one local partition force
+            long whichtheta = m_whichlocalxparts[lpforce][tit->m_partnerpicks[lpforce]][0];
+            result += log(timesizes[whichtheta] / denominator);
+        }
+    }
+
+#if 0 // JDEBUG--JWARNING this is the logic that was in Jim's original commit
+    // compute partner-picks numerator
+    for(lpforce = 0; lpforce < m_localpartforces.size(); ++lpforce)
+    {
+        LocalPartitionForce* pforce(dynamic_cast<LocalPartitionForce*>(m_localpartforces[lpforce]));
+        assert(pforce);  // It wasn't a local partition force?
+        DoubleVec1d part_thetas(pforce->SumXPartsToParts(myths, mygs, tit->m_endtime));
+        DoubleVec1d::size_type partition;
+        for(partition = 0; partition < part_thetas.size(); ++partition)
+        {
+            result += log(part_thetas[partition] / denominator);
+        }
+    }
+#endif
+
+    return result;
+}
+
+//------------------------------------------------------------------------------------
+// erynes 2004/03/11 -- This method currently returns the following:
+//
+// sum_over_coalescent_time_intervals_of_population_whichparam(
+//   -(k_i(k_i - 1)/(g*theta*theta))*(exp(g*t_i_start) - exp(g*t_i_end))
+//                                                            )
+// where
+//   k_i is the number of lineages of this population in time interval i
+//   g   is the current estimate of the growth parameter for this pop.
+//   theta is the current estimate of the theta parameter for this pop.
+//   t_i_start is the timepoint at the beginning of time interval i
+//   t_i_end is the timepoint at the end of time interval i
+//
+// IMPORTANT NOTE:
+// Wait() and Point() return log(WaitProb(G|P)) and log(PointProb(G|P)),
+// and PostLike::DCalculate() currently assumes that DWait() and DPoint()
+// return the derivatives of log(WaitProb(G|P)) and log(PointProb(G|P)),
+// instead of returning the derivatives of WaitProb/PointProb directly.
+// As long as everything is kept consistent, this is all very good
+// for reducing complexity and improving speed.
+//
+// Note:  CoalesceGrowPL::DWait calculates the partial derivative of Wait
+// with respect to theta for the given population ("whichparam").
+// CoalescePL::DPoint does the same for Point.
+// Partial derivatives with respect to growth are found in GrowPL.
+
+double CoalesceGrowPL::DlnWait (const vector < double >&param, const TreeSummary * treedata,
+                                const long int & whichparam)
+{
+    const double growth = param[whichparam + m_growthstart];
+    if(fabs(growth) < GROWTH_EPSILON)
+        return DlnWaitForTinyGrowth(param, treedata, whichparam);
+
+    double result = 0.0;
+    long int which = whichparam - m_start;
+    const list<Interval>& treesum = treedata->GetCoalSummary()->GetLongWait();
+    list <Interval> :: const_iterator tit=treesum.begin();
+    const double theta = param[whichparam];
+    double coeff_s, coeff_e, arg1_s, arg1_e;
+    double starttime = 0.0;
+
+    for( ; tit != treesum.end(); ++tit)
+    {
+        double gts = growth * starttime;
+        double gte = growth * tit->m_endtime;
+
+        const long int & k = tit->m_xpartlines[which];
+
+        coeff_s = coeff_e = arg1_s = arg1_e = 1.0;
+
+        // IMPORTANT NOTE:  The following over/underflow checking
+        // assumes that each starttime and endtime is always >= 0.
+        // Hence, e.g., gts < 0 means growth < 0.
+
+        if (gts > 1.0)
+        {
+            coeff_s = -k * (k - 1.0);
+            if (growth > 1.0)
+                arg1_s /= growth;
+            else
+                coeff_s /= growth;
+            if (theta > 1.0)
+                arg1_s /= theta*theta;
+            else
+                coeff_s /= theta*theta;
+        }
+        else if (gts < -1.0)
+        {
+            arg1_s *= -k * (k - 1.0);
+            if (growth < -1.0)
+                coeff_s /= growth;
+            else
+                arg1_s /= growth;
+            if (theta > 1.0)
+                coeff_s /= theta*theta;
+            else
+                arg1_s /= theta*theta;
+        }
+        else // no over/underflow in exp()
+            coeff_s = -k * (k - 1.0)/(theta * theta * growth); // arg1_s already set
+
+        // BUGBUG erynes -- what follows is easy to understand,
+        // but we should revisit this and reuse some of what we
+        // learned about growth and theta above, while considering gts.
+
+        if (gte > 1.0)
+        {
+            coeff_e = -k * (k - 1.0);
+            if (growth > 1.0)
+                arg1_e /= growth;
+            else
+                coeff_e /= growth;
+            if (theta > 1.0)
+                arg1_e /= theta*theta;
+            else
+                coeff_e /= theta*theta;
+        }
+        else if (gte < -1.0)
+        {
+            arg1_e *= -k * (k - 1.0);
+            if (growth < -1.0)
+                coeff_e /= growth;
+            else
+                arg1_e /= growth;
+            if (theta > 1.0)
+                coeff_e /= theta*theta;
+            else
+                arg1_e /= theta*theta;
+        }
+        else // no over/underflow in exp()
+            coeff_e = -k * (k - 1.0)/(theta * theta * growth); // arg1_e already set
+
+        double incrementalResult =
+            coeff_s * SafeProductWithExp(arg1_s, gts) -
+            coeff_e * SafeProductWithExp(arg1_e, gte);
+
+        if (incrementalResult < 0.0)
+        {
+            // We have precision problems,
+            // because we expect te > ts always,
+            // and hence gte > gts always.
+            // During debugging, we have encountered precision problems
+            // in which gdb claims gts == gte and arg1_s == arg1_e
+            // but incrementalResult < 0, typically reflecting
+            // a difference near the 15th digit.
+            // (Note that te == ts corresponds to two events
+            // occurring simultaneously--e.g., k = 1 for a really long
+            // time in each of two populations, then one finally
+            // migrates to the other, then they coalesce instantaneously.)
+            // It should be safe to set incrementalResult to zero in cases of
+            // roundoff error.  If incrementalResult is negative,
+            // but _not_ tiny compared with either of the terms
+            // whose difference yielded incrementalResult,
+            // then something is _very_ wrong--probably te < ts,
+            // which should never happen.  We assert in this case;
+            // this will enable us to see the problem, but the end user
+            // will not see the assertion, unless s/he's running in debug mode.
+            if (incrementalResult/fabs(coeff_s * SafeProductWithExp(arg1_s, gts))
+                < -1e-10)
+                assert(0);
+            incrementalResult = 0.0;
+        }
+
+        starttime = tit->m_endtime;
+        result += incrementalResult;
+    }
+
+    return result;
+}
+
+//------------------------------------------------------------------------------------
+// erynes 2004/05/20 -- This method currently returns the following:
+//
+// sum_over_coalescent_time_intervals_of_population_whichparam(
+//   -(k_i(k_i - 1)/(theta*theta))*( (t_i_start - t_i_end) +
+//                                   (1/2)*g*(t_i_start^2 - t_i_end^2) )
+//                                                            )
+// where
+//   k_i is the number of lineages of this population in time interval i
+//   g   is the current estimate of the growth parameter for this pop.
+//   theta is the current estimate of the theta parameter for this pop.
+//   t_i_start is the timepoint at the beginning of time interval i
+//   t_i_end is the timepoint at the end of time interval i
+//
+// This non-public method is used to handle the case in which fabs(g)
+// is tiny--that is, less than GROWTH_EPSILON.  It is derived from the
+// formula used in DWait by expanding exp(g*t) in a Taylor series
+// about g == 0, multiplying this by the term -k(k-1)/(g*theta*theta),
+// and dropping all terms of order g^2 and above.
+// Brief tests with Mathematica suggest that the original formula
+// converges very nicely to this linear formula for normal values
+// of the timepoints, the difference being less than one part in one
+// billion, dropping down to zero difference for g == 0 (using
+// L'Hopital's rule).
+//
+// Please also see the comments for DWait.
+
+double CoalesceGrowPL::DlnWaitForTinyGrowth(const vector < double >&param, const TreeSummary * treedata,
+                                            const long int & whichparam)
+{
+    const double growth = param[whichparam + m_growthstart];
+    assert(fabs(growth) < GROWTH_EPSILON); // otherwise use DWait()
+    const double theta = param[whichparam];
+    double k, te;
+    long int which = whichparam - m_start;
+    const list<Interval>& treesum = treedata->GetCoalSummary()->GetLongWait();
+    list<Interval>::const_iterator tit = treesum.begin();
+
+    double result = 0.0;
+    double ts = 0.0;
+
+    for( ; tit != treesum.end(); ++tit)
+    {
+        k  = tit->m_xpartlines[which];
+        te = tit->m_endtime;
+
+        result += -(k * (k - 1.0) / (theta * theta)) *
+            ((ts - te) + 0.5 * growth * (ts * ts - te * te));
+        ts = te;
+    }
+
+    return result;
+}
+
+//------------------------------------------------------------------------------------
+// Mary 2007/08/13 -- added support for disease w/ recombination
+//
+// JDEBUG--:  only in absence of migration
+
+double CoalesceGrowPL::DlnPoint (const vector < double >&param, const TreeSummary * treedata, const long &whichparam)
+{
+    long which = whichparam - m_start;
+    const vector<double>& ncoal = treedata->GetCoalSummary()->GetShortPoint();
+
+    double answ(-ncoal[which] / param[whichparam]);
+
+    const LongVec2d& picks = treedata->GetCoalSummary()->GetShortPicks();
+    if (picks.empty()) return answ;
+    else  {
+        DoubleVec1d timesizes(m_nPop, 0.0);
+        ForceParameters fp(unknown_region);
+        DoubleVec1d myths(param.begin()+m_start,param.begin()+m_end);
+        fp.SetRegionalThetas(myths);
+        DoubleVec1d mygs(param.begin()+m_growthstart, param.begin()+m_growthstart+m_nPop);
+        fp.SetGrowthRates(mygs);
+        // now for every recombination event....
+        const Interval* reclist  = treedata->GetRecSummary()->GetLongPoint();
+        const Interval* tit;
+        for (tit = reclist; tit != NULL; tit = tit->m_next)
+        {
+            // compute partner-picks denominator
+            timesizes = m_timesize->XpartThetasAtT(tit->m_endtime, fp);
+            assert(timesizes.size() == static_cast<DoubleVec1d::size_type>(m_nPop));
+            double denominator(accumulate(timesizes.begin(), timesizes.end(), 0.0));
+
+            // compute partner-picks numerator
+            // ALL assuming no migration and only one local partition force
+            // therefore xpart == part
+            answ -= 1.0/denominator;
+            if (tit->m_partnerpicks[0] == which)
+            {
+                answ += 1.0/timesizes[which];
+            }
+        }
+    }
+
+    return answ;
+} // CoalesceGrowPL::DlnPoint
+
+//------------------------------------------------------------------------------------
+// GrowPL: exponential growth forces specific waiting time (=0) and
+//              point probabilites and its derivatives
+// LONG DATA SUMMARIES
+
+double GrowPL::lnWait (const vector < double >&param, const TreeSummary * treedata)
+{
+    // Growth is a modifier of theta, hence its contribution
+    // is included in CoalesceGrowPL::Wait.
+    return 0.0;
+}
+
+//------------------------------------------------------------------------------------
+
+double GrowPL::lnPoint (const vector < double >&param, const vector < double >&lparam, const TreeSummary * treedata)
+{
+    // Growth is a modifier of theta, hence its contribution
+    // is included in CoalesceGrowPL::Point.
+    return 0.0;
+}
+
+//------------------------------------------------------------------------------------
+// erynes 2004/04/09 -- This method currently returns the following:
+//
+// sum_over_coalescent_time_intervals_of_population_whichparam(
+//   -(k_i(k_i - 1)/(g*g*theta))*(
+//             (1 - g*t_i_start)*exp(g*t_i_start) -
+//             (1 - g*t_i_end)  *exp(g*t_i_end)
+//                               )
+//                                                            )
+// where
+//   k_i is the number of lineages of this population in time interval i
+//   g   is the current estimate of the growth parameter for this pop.
+//   theta is the current estimate of the theta parameter for this pop.
+//   t_i_start is the timepoint at the beginning of time interval i
+//   t_i_end is the timepoint at the end of time interval i
+
+double GrowPL::DlnWait (const vector < double >&param, const TreeSummary * treedata, const long int & whichparam)
+{
+    const double growth = param[whichparam];
+    if (fabs(growth) < GROWTH_EPSILON)
+        return DlnWaitForTinyGrowth(param, treedata, whichparam);
+
+    const long int whicht = m_thetastart + whichparam - m_start;
+    const double theta = param[whicht];
+    assert(theta); // we divide by it below
+    double gte, gts, arg1_s, arg1_e, coeff_s, coeff_e;
+    long int k;
+
+    const list<Interval>& treesum = treedata->GetCoalSummary()->GetLongWait();
+    list <Interval> :: const_iterator tit = treesum.begin();
+
+    double result = 0.0;
+    double starttime = 0.0;
+
+    for( ; tit != treesum.end(); ++tit)
+    {
+        gts = growth * starttime;
+        gte = growth * tit->m_endtime;
+
+        k = tit->m_xpartlines[whicht];
+
+        arg1_s = 1.0 - gts;
+        arg1_e = 1.0 - gte;
+        coeff_s = 1.0;
+        coeff_e = 1.0;
+
+        // IMPORTANT NOTE:  The following over/underflow checking
+        // assumes that each starttime and endtime is always >= 0.
+        // Hence, e.g., gts < 0 means growth < 0.
+
+        if (gts > 1.0)
+        {
+            coeff_s = -k * (k - 1.0);
+            if (growth > 1.0)
+                arg1_s /= growth*growth;
+            else
+                coeff_s /= growth*growth;
+            if (theta > 1.0)
+                arg1_s /= theta;
+            else
+                coeff_s /= theta;
+        }
+        else if (gts < -1.0)
+        {
+            arg1_s *= -k*(k-1.0);
+            if (growth < -1.0)
+                coeff_s /= growth*growth;
+            else
+                arg1_s /= growth*growth;
+            if (theta > 1.0)
+                coeff_s /= theta;
+            else
+                arg1_s /= theta;
+        }
+        else // no over/underflow in exp()
+            coeff_s = -k*(k-1.0)/(theta*growth*growth); // arg1_s already set
+
+        // BUGBUG erynes -- this is easy to follow,
+        // but we should revisit it and possibly reuse
+        // the growth and theta checks that we made for the gts case.
+
+        if (gte > 1.0)
+        {
+            coeff_e = -k*(k-1.0);
+            if (growth > 1.0)
+                arg1_e /= growth*growth;
+            else
+                coeff_e /= growth*growth;
+            if (theta > 1.0)
+                arg1_e /= theta;
+            else
+                coeff_e /= theta;
+        }
+        else if (gte < -1.0)
+        {
+            arg1_e *= -k*(k-1.0);
+            if (growth < -1.0)
+                coeff_e /= growth*growth;
+            else
+                arg1_e /= growth*growth;
+            if (theta > 1.0)
+                coeff_e /= theta;
+            else
+                arg1_e /= theta;
+        }
+        else // no over/underflow in exp()
+            coeff_e = -k*(k-1.0)/(theta*growth*growth); // arg1_e already set
+
+        starttime = tit->m_endtime;
+        result +=  coeff_s * SafeProductWithExp(arg1_s, gts) - coeff_e * SafeProductWithExp(arg1_e, gte);
+    }
+
+    return result;
+}
+
+//------------------------------------------------------------------------------------
+// erynes 2004/05/20 -- This method currently returns the following:
+//
+// sum_over_coalescent_time_intervals_of_population_whichparam(
+//    (k_i(k_i - 1)/(2*theta))*(
+//                  tis^2 * (1 + g*tis)  -  tie^2 * (1 + g*tie)
+//                             )
+//                                                            )
+// where
+//   k_i is the number of lineages of this population in time interval i
+//   g   is the current estimate of the growth parameter for this pop.
+//   theta is the current estimate of the theta parameter for this pop.
+//   tis is the timepoint at the beginning of time interval i
+//   tie is the timepoint at the end of time interval i
+//
+// This non-public method is used to handle the case in which fabs(g)
+// is tiny--that is, less than GROWTH_EPSILON.  It is derived from the
+// formula used in DWait by expanding exp(g*t) in a Taylor series
+// about g == 0, multiplying through in the original formula,
+// and dropping all terms of order g^2 and above.
+// Brief tests with Mathematica suggest that the original formula
+// converges very nicely to this linear formula for normal values
+// of the timepoints, the difference being less than one part in one
+// billion, dropping down to zero difference for g == 0 (using
+// L'Hopital's rule).
+//
+// Please also see the comments for DWait.
+
+double GrowPL::DlnWaitForTinyGrowth(const vector < double >&param, const TreeSummary * treedata,
+                                    const long int & whichparam)
+{
+    const double growth = param[whichparam];
+    assert(fabs(growth) < GROWTH_EPSILON); // otherwise use DWait
+
+    const long int whicht = m_thetastart + whichparam - m_start;
+    const double theta = param[whicht];
+    assert(theta); // we divide by it below
+    double k, te;
+    const list<Interval>& treesum = treedata->GetCoalSummary()->GetLongWait();
+    list <Interval> :: const_iterator tit = treesum.begin();
+
+    double result = 0.0;
+    double ts = 0.0;
+
+    for( ; tit != treesum.end(); ++tit)
+    {
+        te = tit->m_endtime;
+        k = tit->m_xpartlines[whicht];
+
+        result += (k * (k - 1.0) / (2.0 * theta)) *
+            (ts * ts * (1.0 + growth * ts) - te * te * (1.0 + growth * te));
+        ts = te;
+    }
+
+    return result;
+}
+
+//------------------------------------------------------------------------------------
+// This method presently returns:
+//
+// sum_over_time_intervals_of_this_population's_coalescences(endtime)
+//
+// This is the derivative of log(Point) with respect to growth.
+
+double GrowPL::DlnPoint (const vector < double >&param, const TreeSummary * treedata, const long int & whichparam)
+{
+    const Interval * treesum = treedata->GetCoalSummary()->GetLongPoint();
+    const Interval * tit;
+    double result = 0.0 ;
+    // indicator to Theta, we need this to get the correct
+    // time intervals in treesum
+    long int whicht = m_thetastart + whichparam - m_start;
+
+    for(tit = treesum; tit != NULL; tit = tit->m_next)
+    {
+        if(whicht == tit->m_oldstatus)
+            result += tit->m_endtime;
+    }
+
+    return result;
+}
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+double DivMigPL::lnWait(const vector<double>& param, const TreeSummary* treedata)
+{
+    const vector<double>& mWait = treedata->GetDivMigSummary()->GetShortWait();
+
+    // sum_pop(SM_pop *kt)
+    return  -1.0 * inner_product (mWait.begin (), mWait.end (), param.begin() + m_start, 0.0);
+
+}
+
+//------------------------------------------------------------------------------------
+
+double DivMigPL::lnPoint(const vector<double>& param, const vector<double>& lparam, const TreeSummary* treedata)
+{
+    //  Sum_j(Sum_i(migevent[j,i] * log(M[j,i]))
+    const vector<double>& nmig = treedata->GetDivMigSummary()->GetShortPoint();
+
+    return  inner_product (nmig.begin(), nmig.end(), lparam.begin() + m_start, 0.0);
+}
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+// RecombinePL: recombination forces specific waiting time and
+//              point probabilites and its derivatives
+
+double RecombinePL::lnWait (const vector < double >&param, const TreeSummary * treedata)
+{
+    const vector<double>& rWait = treedata->GetRecSummary()->GetShortWait();
+    return  -rWait[0] * *(param.begin () + m_start);
+}
+
+//------------------------------------------------------------------------------------
+
+double RecombinePL::lnPoint (const vector < double >&param, const vector < double >&lparam,
+                             const TreeSummary * treedata)
+{
+    //  Sum_j(recevents * log(r))
+    const vector<double>& nrec = treedata->GetRecSummary()->GetShortPoint();
+    return nrec[0] * *(lparam.begin () + m_start);
+}
+
+//------------------------------------------------------------------------------------
+
+double RecombinePL::DlnWait (const vector < double >&param, const TreeSummary * treedata,
+                             const long int & whichparam)
+{
+    const vector<double>& rWait = treedata->GetRecSummary()->GetShortWait();
+    return -rWait[0];
+}
+
+//------------------------------------------------------------------------------------
+
+double RecombinePL::DlnPoint (const vector < double >&param, const TreeSummary * treedata,
+                              const long int & whichparam)
+{
+    const vector<double>& nrecs = treedata->GetRecSummary()->GetShortPoint();
+    if(nrecs[0] == 0)
+    {
+        return 0.0;
+    }
+    else
+    {
+        return SafeDivide(nrecs[0], *(param.begin() + m_start));
+    }
+}
+
+//------------------------------------------------------------------------------------
+// SelectPL: selection forces specific waiting time and
+//              point probabilites and its derivatives
+
+double SelectPL::lnWait (const vector < double >&param, const TreeSummary * treedata)
+{
+    return 0.0;
+}
+
+//------------------------------------------------------------------------------------
+
+double SelectPL::lnPoint (const vector < double >&param, const vector < double >&lparam, const TreeSummary * treedata)
+{
+    return 0.0;
+}
+
+//------------------------------------------------------------------------------------
+
+double SelectPL::DlnWait (const vector < double >&param, const TreeSummary * treedata, const long int & whichparam)
+{
+    return 0.0;
+}
+
+//------------------------------------------------------------------------------------
+
+double SelectPL::DlnPoint (const vector < double >&param, const TreeSummary * treedata, const long int & whichparam)
+{
+    return 0.0;
+}
+
+//------------------------------------------------------------------------------------
+// This function computes the "waiting time" contribution to the log-likelihood.
+// It computes and returns
+//
+// sum_over_all_timeIntervals_i( -kA_i*(kA_i - 1)*dtauA_i/thetaA0 -ka_i*(ka_i - 1)*dtaua_i/thetaa0 ),
+//
+// where "A" is the favored allele, "a" is the disfavored allele, "k" is the number of lineages
+// in population "A" or "a," and dtau is the length of the time interval in the "Kingman reference frame"
+// (i.e., time running as usual and pop. size remaining constant).  The transformations from
+// "magic time" dtau to real time "dt," where dt = te - ts = endtime - starttime, are:
+//
+// dtau_A = (theta_A0*dt + (theta_a0/s)*(exp(s*te) - exp(s*ts)))/(theta_A0 + theta_a0),
+// dtau_a = (theta_a0*dt - (theta_A0/s)*(exp(-s*te) - exp(-s*ts)))/(theta_A0 + theta_a0).
+
+double CoalesceLogisticSelectionPL::lnWait(const vector<double>& param, const TreeSummary *treedata)
+{
+    if (2 != m_nPop)
+    {
+        string msg = "CoalesceLogisticSelectionPL::lnWait() called with m_nPop = ";
+        msg += ToString(m_nPop);
+        msg += ".  m_nPop must equal 2, reflecting one population with the major ";
+        msg += "allele and one population with the minor allele.";
+        throw implementation_error(msg);
+    }
+    double mu_into_A_from_a(param[3]), mu_into_a_from_A(param[4]);
+
+    if (0 != param[2] || 0 != param[5] || 0 == param[3] || 0 == param[4])
+        throw implementation_error("Error parsing disease rates in CoalesceLogisticSelectionPL::lnWait()");
+
+    const list<Interval>& treesum = treedata->GetCoalSummary()->GetLongWait();
+    list<Interval>::const_iterator treesum_it;
+
+    double result(0.0), s(param[m_s_is_here]);
+    double theta_A0(param[m_start]), theta_a0(param[m_start+1]);
+    double totalTheta_0(theta_A0 + theta_a0);
+
+    if (theta_a0 <= 0.0 || theta_A0 <= 0.0)
+    {
+        string msg = "CoalesceLogisticSelectionPL::lnWait(), received an invalid Theta value ";
+        msg += "(theta_A0 = " + ToString(theta_A0) + ", theta_a0 = " + ToString(theta_a0) + ").";
+        throw impossible_error(msg);
+    }
+
+    if (fabs(s) < LOGISTIC_SELECTION_COEFFICIENT_EPSILON)
+        return lnWaitForTinyLogisticSelectionCoefficient(param, treedata);
+
+    if (fabs(s) >= DBL_BIG)
+        return -DBL_BIG; // unavoidable overflow
+
+    double factor_A(theta_a0/(s*theta_A0)), factor_a(theta_A0/(-s*theta_a0));
+    double t_e, t_s(0.0), dt, e_toThe_sts(1.0), e_toThe_ste, term;
+    double term_A(0.0), term_a(0.0);
+
+    for (treesum_it = treesum.begin(); treesum_it != treesum.end(); treesum_it++)
+    {
+        t_e = treesum_it->m_endtime;
+        dt = t_e - t_s;
+        term_A = term_a = 0.0;
+        const double& k_A = treesum_it->m_xpartlines[0]; // num. lineages, allele A
+        const double& k_a = treesum_it->m_xpartlines[1]; // num. lineages, allele a
+
+        if (s > 0.0)
+        {
+            // Use overflow protection for term_A; if term_a underflows, that's okay.
+
+            if (s*dt > LOG_ONEPLUSEPSILON)
+            {
+                term = SafeProductWithExp(factor_A, s*dt); // coefficient guaranteed finite and positive
+                if (term >= DBL_BIG)
+                    return -DBL_BIG; // unavoidable overflow
+                e_toThe_ste = (term/factor_A)*e_toThe_sts;
+                if (k_A > 0.0)
+                {
+                    term_A = k_A*(term - factor_A)*e_toThe_sts*((k_A - 1.0)/totalTheta_0 + mu_into_A_from_a)
+                        + k_A*(k_A - 1.0)*dt/totalTheta_0;
+                    // Note:  A factor of theta_A0 cancels in the "dt" term.
+                    if (term_A >= DBL_BIG)
+                        return -DBL_BIG; // unavoidable overflow
+                }
+            }
+            else
+            {
+                if (s*dt >= numeric_limits<double>::epsilon())
+                    e_toThe_ste = (1.0 + s*dt)*e_toThe_sts;
+                else
+                    e_toThe_ste = SafeProductWithExp(1.0,s*t_e);
+                if (k_A > 0.0)
+                {
+                    term_A = k_A*factor_A*s*dt*e_toThe_sts*((k_A - 1.0)/totalTheta_0 + mu_into_A_from_a)
+                        + k_A*(k_A - 1.0)*dt/totalTheta_0;
+                    // Note:  A factor of theta_A0 cancels in the "dt" term.
+                    if (term_A >= DBL_BIG)
+                        return -DBL_BIG; // unavoidable overflow
+                }
+            }
+            if (k_a > 0.0)
+                term_a = k_a * factor_a
+                    * (1.0/e_toThe_ste - 1.0/e_toThe_sts)
+                    * ((k_a - 1.0)/totalTheta_0 + mu_into_a_from_A)
+                    + k_a*(k_a - 1.0)*dt/totalTheta_0;
+            // Note:  A factor of theta_a0 cancels in the "dt" term.
+        }
+        else // s < 0
+        {
+            // Use overflow protection for term_a; if term_A underflows, that's okay.
+
+            if (-s*dt > LOG_ONEPLUSEPSILON)
+            {
+                term = SafeProductWithExp(factor_a,-s*dt); // coefficient guaranteed finite and positive
+                if (term >= DBL_BIG)
+                    return -DBL_BIG; // unavoidable overflow
+                e_toThe_ste = e_toThe_sts/(term/factor_a);
+                if (k_a > 0.0)
+                {
+                    term_a = k_a*((term - factor_a)/e_toThe_sts)*((k_a - 1.0)/totalTheta_0 + mu_into_a_from_A)
+                        + k_a*(k_a - 1.0)*dt/totalTheta_0;
+                    // Note:  A factor of theta_a0 cancels in the "dt" term.
+                    if (term_a >= DBL_BIG)
+                        return -DBL_BIG; // unavoidable overflow
+                }
+            }
+            else
+            {
+                if (-s*dt >= numeric_limits<double>::epsilon())
+                    e_toThe_ste = (1.0 + s*dt)*e_toThe_sts;
+                else
+                    e_toThe_ste = SafeProductWithExp(1.0,s*t_e);
+                if (k_a > 0.0)
+                {
+                    term_a = k_a*(factor_a*(-s)*dt/e_toThe_sts)*((k_a - 1.0)/totalTheta_0 + mu_into_a_from_A)
+                        + k_a*(k_a - 1.0)*dt/totalTheta_0;
+                    // Note:  A factor of theta_a0 cancels in the "dt" term.
+                    if (term_a >= DBL_BIG)
+                        return -DBL_BIG; // unavoidable overflow
+                }
+            }
+            if (k_A > 0.0)
+                term_A = k_A*factor_A*(e_toThe_ste - e_toThe_sts)*((k_A - 1.0)/totalTheta_0 + mu_into_A_from_a)
+                    + k_A*(k_A - 1.0)*dt/totalTheta_0;
+            // Note:  A factor of theta_a0 cancels in the "dt" term.
+        }
+
+        result -= term_A + term_a;
+        if (result <= -DBL_BIG)
+            return -DBL_BIG;
+
+        t_s = t_e;
+        e_toThe_sts = e_toThe_ste;
+    }
+
+    return result;
+}
+
+//------------------------------------------------------------------------------------
+// See comment for lnWait(), above.
+// This function expands lnWait() in a Taylor series about s = 0,
+// so that we can smoothly handle the factor of 1/s in lnWait().
+// This version retains only the first two terms in
+// (exp(s*te) - exp(s*ts))/s = (te-ts) + (s/2!)(te^2 - ts^2) + (s^2/3!)(te^3 - ts^3) + ....
+
+double CoalesceLogisticSelectionPL::lnWaitForTinyLogisticSelectionCoefficient(const vector<double>& param,
+                                                                              const TreeSummary* treedata)
+{
+    double mu_into_A_from_a(param[3]), mu_into_a_from_A(param[4]);
+
+    if (0 != param[2] || 0 != param[5] || 0 == param[3] || 0 == param[4])
+        throw implementation_error("Error parsing disease rates in "
+                                   "CoalesceLogisticSelectionPL::lnWaitForTinyLogisticSelectionCoefficient()");
+
+    const list<Interval>& treesum = treedata->GetCoalSummary()->GetLongWait();
+    list<Interval>::const_iterator treesum_it;
+
+    double result(0.0), t_e, t_s(0.0), s(param[m_s_is_here]), dt, term;
+    double theta_A0(param[m_start]), theta_a0(param[m_start+1]), totalTheta_0(theta_A0+theta_a0);
+
+    for (treesum_it = treesum.begin(); treesum_it != treesum.end(); treesum_it++)
+    {
+        t_e = treesum_it->m_endtime;
+        const double& k_A = treesum_it->m_xpartlines[0]; // num. lineages, allele A
+        const double& k_a = treesum_it->m_xpartlines[1]; // num. lineages, allele a
+        dt = t_e - t_s;
+
+        term = 0.5*s*(t_e*t_e - t_s*t_s);
+
+        result += -(k_A*dt/theta_A0)*((k_A - 1.0) + theta_a0*mu_into_A_from_a)
+            - (k_A*theta_a0*term/theta_A0)*((k_A - 1.0)/totalTheta_0 + mu_into_A_from_a);
+
+        result += -(k_a*dt/theta_a0)*((k_a - 1.0) + theta_A0*mu_into_a_from_A)
+            + (k_a*theta_A0*term/theta_a0)*((k_a - 1.0)/totalTheta_0 + mu_into_a_from_A);
+
+        t_s = t_e;
+    }
+
+    return result;
+}
+
+//------------------------------------------------------------------------------------
+// This function computes the "point" contribution to the log-likelihood.
+// It computes and returns
+//
+// sum_over_all_coalescent_times_in_A( ln(2/theta_A(te_A)) )
+//  + sum_over_all_coalescent_times_in_a( ln(2/theta_a(te_a)) ),
+//
+// where te_A is the time (te = "endtime") at the bottom of a time interval
+// which ends with a coalescence in the population with allele "A," and
+// similarly for the pop. with allele "a."
+// theta_A(t) + theta_a(t) = theta_A(0) + theta_a(0) for all times t;
+// that is, the overall population size is assumed constant.  We have
+//
+// theta_A(t) = ((theta_A0+theta_a0)/(theta_A0+theta_a0*exp(s*t)))*theta_A0,
+// theta_a(t) = ((theta_A0+theta_a0)/(theta_A0+theta_a0*exp(s*t)))*theta_a0*exp(s*t).
+
+double CoalesceLogisticSelectionPL::lnPoint(const vector<double>& param, const vector<double>& lparam,
+                                            const TreeSummary* treedata)
+{
+    if (2 != m_nPop)
+    {
+        string msg = "CoalesceLogisticSelectionPL::lnPoint() called with m_nPop = ";
+        msg += ToString(m_nPop);
+        msg += ".  m_nPop must equal 2, reflecting one population with the major ";
+        msg += "allele and one population with the minor allele.";
+        throw implementation_error(msg);
+    }
+
+    const Interval *treesum = treedata->GetCoalSummary()->GetLongPoint();
+    const Interval *pTreesum;
+    double theta_A0(param[0]), theta_a0(param[1]), logTheta_A0(lparam[0]),
+        logTheta_a0(lparam[1]), logThetaTotal_0, s(param[m_s_is_here]);
+
+    if (theta_A0 > 0.0 && theta_a0 > 0.0)
+        logThetaTotal_0 = log(theta_A0 + theta_a0);
+    else
+    {
+        // This really, really should never ever happen.
+        string msg = "CoalesceLogisticSelectionPL::lnPoint(), received an invalid value ";
+        msg += "(theta_A0 = ";
+        msg += ToString(theta_A0);
+        msg += " and theta_a0 = " + ToString(theta_a0) + "); s = ";
+        msg += ToString(s) + ".";
+        throw impossible_error(msg);
+    }
+
+    double result = 0.0;
+
+    for(pTreesum = treesum; pTreesum != NULL; pTreesum = pTreesum->m_next)
+    {
+        double thetaDenominator = theta_A0 +
+            SafeProductWithExp(theta_a0, s * pTreesum->m_endtime); // sum is guaranteed to be > 0
+
+        if (thetaDenominator < DBL_BIG)
+            result += LOG2 - logThetaTotal_0 + log(thetaDenominator);
+        else
+            result += LOG2 - logThetaTotal_0 + logTheta_a0 + s * pTreesum->m_endtime;
+        if (0L == pTreesum->m_oldstatus)
+            result -= logTheta_A0;
+        else if (1L == pTreesum->m_oldstatus)
+            result -= logTheta_a0 + (pTreesum->m_endtime * s);
+        else
+        {
+            string msg = "CoalesceLogisticSelectionPL::lnPoint(), received a TreeSummary ";
+            msg += "containing an event with oldstatus = " + ToString(pTreesum->m_oldstatus);
+            msg += ".  \"oldstatus\" must be either 0 or 1, reflecting a coalescene in either ";
+            msg += "the population with allele A or the population with allele a.";
+            throw implementation_error(msg);
+        }
+    }
+
+    return result;
+}
+
+//------------------------------------------------------------------------------------
+
+double CoalesceLogisticSelectionPL::DlnWait(const vector<double>& param, const TreeSummary * treedata,
+                                            const long int & whichparam)
+{
+    if (2 != m_nPop)
+    {
+        string msg = "CoalesceLogisticSelectionPL::DlnWait() called with m_nPop = ";
+        msg += ToString(m_nPop);
+        msg += ".  m_nPop must equal 2, reflecting one population with the major ";
+        msg += "allele and one population with the minor allele.";
+        throw implementation_error(msg);
+    }
+
+    double s(param[m_s_is_here]);
+
+    if (fabs(s) < LOGISTIC_SELECTION_COEFFICIENT_EPSILON)
+        return DlnWaitForTinyLogisticSelectionCoefficient(param, treedata, whichparam);
+
+    bool derivativeWithRespectTo_A;
+
+    if (0 == whichparam)
+        derivativeWithRespectTo_A = true;
+    else if (1 == whichparam)
+        derivativeWithRespectTo_A = false;
+    else
+    {
+        string msg = "CoalesceLogisticSelectionPL::DlnWait() called with whichparam = ";
+        msg += ToString(whichparam);
+        msg += ".  \"whichparam\" must equal 0 or 1, reflecting one population with the favored ";
+        msg += "allele and one population with the disfavored allele.";
+        throw implementation_error(msg);
+    }
+
+    if (s >= DBL_BIG)
+        return derivativeWithRespectTo_A ? DBL_BIG : -DBL_BIG;
+    if (s <= -DBL_BIG)
+        return derivativeWithRespectTo_A ? -DBL_BIG : DBL_BIG;
+
+    if (0 != param[2] || 0 != param[5] || 0 == param[0] || 0 == param[1] ||
+        0 == param[3] || 0 == param[4])
+        throw implementation_error("Bad param received by CoalesceLogisticSelectionPL::DlnWait().");
+
+    const list<Interval>& treesum = treedata->GetCoalSummary()->GetLongWait();
+    list<Interval>::const_iterator treesum_it;
+    double theta_A0(param[0]), theta_a0(param[1]), totalTheta_0(theta_A0+theta_a0);
+    double term_A(0.0), term_a(0.0), term, result(0.0);
+    double t_s(0.0),t_e,dt,e_toThe_sts(1.0),e_toThe_ste;
+    double factor = 1.0/(totalTheta_0*totalTheta_0);
+    double thetaFactor_A = theta_a0/(theta_A0*theta_A0);
+    double thetaFactor_a = theta_A0/(theta_a0*theta_a0);
+    double mu_into_A_from_a(param[3]), mu_into_a_from_A(param[4]);
+
+    for (treesum_it = treesum.begin(); treesum_it != treesum.end(); treesum_it++)
+    {
+        t_e = treesum_it->m_endtime;
+        dt = t_e - t_s;
+        term_A = term_a = 0.0;
+        const double& k_A = treesum_it->m_xpartlines[0]; // num. lineages, allele A
+        const double& k_a = treesum_it->m_xpartlines[1]; // num. lineages, allele a
+
+        if (s > 0.0)
+        {
+            // Use overflow protection for term_A; if term_a underflows, that's okay.
+
+            if (s*dt > LOG_ONEPLUSEPSILON)
+            {
+                term = SafeProductWithExp(factor/s,s*dt); // quotient guaranteed finite and positive
+                if (term >= DBL_BIG)
+                    return derivativeWithRespectTo_A ? DBL_BIG : -DBL_BIG; // unavoidable overflow
+                e_toThe_ste = (term/(factor/s))*e_toThe_sts;
+                if (k_A > 0)
+                {
+                    if (derivativeWithRespectTo_A)
+                        term_A = (term - factor/s)*thetaFactor_A*e_toThe_sts;
+                    else
+                        term_A = (term - factor/s)*e_toThe_sts;
+                    if (term_A >= DBL_BIG)
+                        return derivativeWithRespectTo_A ? DBL_BIG : -DBL_BIG; // unavoidable overflow
+                }
+            }
+            else
+            {
+                if (s*dt >= numeric_limits<double>::epsilon())
+                    e_toThe_ste = (1.0 + s*dt)*e_toThe_sts;
+                else
+                    e_toThe_ste = SafeProductWithExp(1.0,s*t_e);
+                if (k_A > 0)
+                {
+                    if (derivativeWithRespectTo_A)
+                        term_A = factor*dt*thetaFactor_A*e_toThe_sts;
+                    else
+                        term_A = factor*dt*e_toThe_sts;
+                    if (term_A >= DBL_BIG)
+                        return derivativeWithRespectTo_A ? DBL_BIG : -DBL_BIG; // unavoidable overflow
+                }
+            }
+            if (k_a > 0)
+            {
+                if (derivativeWithRespectTo_A)
+                    term_a = (factor/(-s))*(1.0/e_toThe_ste - 1.0/e_toThe_sts);
+                else
+                    term_a = (factor/(-s))*thetaFactor_a*(1.0/e_toThe_ste - 1.0/e_toThe_sts);
+            }
+        }
+
+        else // s < 0
+        {
+            // Use overflow protection for dtau_a; if dtau_A underflows, that's okay.
+
+            if (-s*dt > LOG_ONEPLUSEPSILON)
+            {
+                term = SafeProductWithExp(factor/(-s),-s*dt); // quotient guaranteed finite and positive
+                if (term >= DBL_BIG)
+                    return derivativeWithRespectTo_A ? -DBL_BIG : DBL_BIG; // unavoidable overflow
+                e_toThe_ste = e_toThe_sts/(term/(factor/(-s)));
+                if (k_a > 0)
+                {
+                    if (derivativeWithRespectTo_A)
+                        term_a = (term - factor/(-s))/e_toThe_sts;
+                    else
+                        term_a = (term - factor/(-s))*thetaFactor_a/e_toThe_sts;
+                    if (term_a >= DBL_BIG)
+                        return derivativeWithRespectTo_A ? -DBL_BIG : DBL_BIG; // unavoidable overflow
+                }
+            }
+            else
+            {
+                if (-s*dt >= numeric_limits<double>::epsilon())
+                    e_toThe_ste = (1.0 + s*dt)*e_toThe_sts;
+                else
+                    e_toThe_ste = SafeProductWithExp(1.0,s*t_e);
+                if (k_a > 0)
+                {
+                    if (derivativeWithRespectTo_A)
+                        term_a = factor*dt/e_toThe_sts;
+                    else
+                        term_a = factor*dt*thetaFactor_a/e_toThe_sts;
+                    if (term_a >= DBL_BIG)
+                        return derivativeWithRespectTo_A ? -DBL_BIG : DBL_BIG; // unavoidable overflow
+                }
+            }
+            if (k_A > 0)
+            {
+                if (derivativeWithRespectTo_A)
+                    term_A = (factor/s)*thetaFactor_A*(e_toThe_ste - e_toThe_sts);
+                else
+                    term_A = (factor/s)*(e_toThe_ste - e_toThe_sts);
+            }
+        }
+
+        if (derivativeWithRespectTo_A)
+            result += k_A*term_A*((k_A-1.0)*(theta_A0+totalTheta_0) + mu_into_A_from_a/factor)
+                - k_a*term_a*((k_a-1.0) + mu_into_a_from_A/(factor*theta_a0))
+                + dt*factor*(k_A*(k_A-1.0) + k_a*(k_a-1.0));
+        else
+            result += -k_A*term_A*((k_A-1.0) + mu_into_A_from_a/(factor*theta_A0))
+                + k_a*term_a*((k_a-1.0)*(theta_a0+totalTheta_0) + mu_into_a_from_A/factor)
+                + dt*factor*(k_A*(k_A-1.0) + k_a*(k_a-1.0));
+
+        if (result >= DBL_BIG)
+            return DBL_BIG;
+        if (result <= -DBL_BIG)
+            return -DBL_BIG;
+
+        t_s = t_e;
+        e_toThe_sts = e_toThe_ste;
+    }
+
+    return result;
+}
+
+//------------------------------------------------------------------------------------
+
+double CoalesceLogisticSelectionPL::DlnWaitForTinyLogisticSelectionCoefficient(const vector<double>& param,
+                                                                               const TreeSummary *treedata,
+                                                                               const long int & whichparam)
+{
+    bool derivativeWithRespectTo_A;
+
+    if (0 == whichparam)
+        derivativeWithRespectTo_A = true;
+    else if (1 == whichparam)
+        derivativeWithRespectTo_A = false;
+    else
+    {
+        string msg = "CoalesceLogisticSelectionPL::DlnWaitForTinyLogisticSelectionCoefficient() ";
+        msg += "called with whichparam = ";
+        msg += ToString(whichparam);
+        msg += ".  \"whichparam\" must equal 0 or 1, reflecting one subpopulation with the favored ";
+        msg += "allele and one subpopulation with the disfavored allele.";
+        throw implementation_error(msg);
+    }
+
+    if (0 != param[2] || 0 != param[5] || 0 == param[0] || 0 == param[1] ||
+        0 == param[3] || 0 == param[4])
+        throw implementation_error
+            ("Bad param received by CoalesceLogisticSelectionPL::DlnWaitForTinyLogisticSelectionCoefficient().");
+
+    const list<Interval>& treesum = treedata->GetCoalSummary()->GetLongWait();
+    list<Interval>::const_iterator treesum_it;
+    double theta_A0(param[0]), theta_a0(param[1]), totalTheta_0(theta_A0+theta_a0);
+    double t_e, t_s(0.0), resultTimesDenominator(0.0), s(param[m_s_is_here]);
+    double mu_into_A_from_a(param[3]), mu_into_a_from_A(param[4]);
+    double denominator(totalTheta_0*totalTheta_0), term_A, term_a;
+    double factor_A(theta_a0/(theta_A0*theta_A0)), factor_a(theta_A0/(theta_a0*theta_a0));
+
+    for(treesum_it = treesum.begin(); treesum_it != treesum.end(); treesum_it++)
+    {
+        t_e = treesum_it->m_endtime;
+        double dt = t_e - t_s;
+        const double& k_A = treesum_it->m_xpartlines[0]; // num. lineages, allele A
+        const double& k_a = treesum_it->m_xpartlines[1]; // num. lineages, allele a
+
+        if (derivativeWithRespectTo_A)
+        {
+            term_A = k_A*factor_A*(dt + 0.5*s*(t_e*t_e - t_s*t_s))*((k_A-1.0)*(theta_A0+totalTheta_0)
+                                                                    + mu_into_A_from_a*denominator);
+            term_a = -k_a*(dt - 0.5*s*(t_e*t_e - t_s*t_s))*((k_a-1.0) + mu_into_a_from_A*denominator);
+        }
+        else
+        {
+            term_A = -k_A*(dt + 0.5*s*(t_e*t_e - t_s*t_s))*((k_A-1.0) + mu_into_A_from_a*denominator);
+            term_a = k_a*factor_a*(dt = 0.5*s*(t_e*t_e - t_s*t_s))*((k_a-1.0)*(theta_a0+totalTheta_0)
+                                                                    + mu_into_a_from_A*denominator);
+        }
+
+        resultTimesDenominator += term_A + term_a + dt*(k_A*(k_A-1.0) + k_a*(k_a-1.0));
+
+        t_s = t_e;
+    }
+
+    return resultTimesDenominator/denominator;
+}
+
+//------------------------------------------------------------------------------------
+
+double CoalesceLogisticSelectionPL::DlnPoint(const vector<double>& param, const TreeSummary *treedata,
+                                             const long int & whichparam)
+{
+    if (2 != m_nPop)
+    {
+        string msg = "CoalesceLogisticSelectionPL::DlnPoint() called with m_nPop = ";
+        msg += ToString(m_nPop);
+        msg += ".  m_nPop must equal 2, reflecting one population with the major ";
+        msg += "allele and one population with the minor allele.";
+        throw implementation_error(msg);
+    }
+
+    bool derivativeWithRespectTo_A;
+
+    if (0 == whichparam)
+        derivativeWithRespectTo_A = true;
+    else if (1 == whichparam)
+        derivativeWithRespectTo_A = false;
+    else
+    {
+        string msg = "CoalesceLogisticSelectionPL::DlnPoint() called with whichparam = ";
+        msg += ToString(whichparam);
+        msg += ".  \"whichparam\" must equal 0 or 1, reflecting one population with the major ";
+        msg += "allele and one population with the minor allele.";
+        throw implementation_error(msg);
+    }
+
+    const Interval *treesum = treedata->GetCoalSummary()->GetLongPoint();
+    const Interval *pTreesum;
+    double theta_A0(param[0]), theta_a0(param[1]), s(param[m_s_is_here]),
+        totalTheta_0(theta_A0+theta_a0), theta_A0squared(theta_A0*theta_A0),
+        theta_a0squared(theta_a0*theta_a0), result(0.0), x;
+
+    if (theta_a0 <= 0.0 || theta_A0 <= 0.0)
+    {
+        string msg = "CoalesceLogisticSelectionPL::DlnPoint(), received an invalid Theta value ";
+        msg += "(theta_A0 = " + ToString(theta_A0) + ", theta_a0 = " + ToString(theta_a0) + ").";
+        throw impossible_error(msg);
+    }
+
+    // First, add up the contributions from the coalescent events.
+
+    if (s >= 0.0)
+    {
+        for (pTreesum = treesum; pTreesum != NULL; pTreesum = pTreesum->m_next)
+        {
+            double t_e = pTreesum->m_endtime;
+            if (derivativeWithRespectTo_A)
+            {
+                if (0L == pTreesum->m_oldstatus) // coal. in allele A
+                {
+                    x = SafeProductWithExp(theta_a0*(theta_A0+totalTheta_0),s*t_e);
+                    if (x < DBL_BIG)
+                        result -= (theta_A0squared + x) /
+                            ((theta_A0*totalTheta_0)*(theta_A0 + x/(theta_A0+totalTheta_0)));
+                    else
+                        result -= (theta_A0 + totalTheta_0)/(theta_A0*totalTheta_0);
+                }
+                else if (1L == pTreesum->m_oldstatus) // coal. in allele a
+                {
+                    x = SafeProductWithExp(theta_a0,s*t_e);
+                    if (x < DBL_BIG)
+                        result -= (x - theta_a0)/(totalTheta_0*(theta_A0 + x));
+                    else
+                        result -= 1.0/totalTheta_0;
+                }
+                else
+                {
+                    string msg = "CoalesceLogisticSelectionPL::DlnPoint(), received a TreeSummary ";
+                    msg += "containing an event with oldstatus = " + ToString(pTreesum->m_oldstatus);
+                    msg += ".  \"oldstatus\" must be either 0 or 1, reflecting a coalescene in either ";
+                    msg += "the population with allele A or the population with allele a.";
+                    throw implementation_error(msg);
+                }
+            }
+            else // derivative with repect to theta_a0
+            {
+                if (0L == pTreesum->m_oldstatus) // coal. in allele A
+                {
+                    x = SafeProductWithExp(theta_A0,s*t_e);
+                    if (x < DBL_BIG)
+                        result += (x - theta_A0) /
+                            (totalTheta_0*(theta_A0 + x*theta_a0/theta_A0));
+                    else
+                        result += theta_A0/(theta_a0*totalTheta_0);
+                }
+                else if (1L == pTreesum->m_oldstatus) // coal. in allele a
+                {
+                    x = SafeProductWithExp(theta_a0squared,s*t_e);
+                    if (x < DBL_BIG)
+                        result -= (theta_A0*(theta_a0+totalTheta_0) + x) /
+                            (totalTheta_0*(theta_A0*theta_a0 + x));
+                    else
+                        result -= 1.0/totalTheta_0;
+                }
+                else
+                {
+                    string msg = "CoalesceLogisticSelectionPL::DlnPoint(), received a TreeSummary ";
+                    msg += "containing an event with oldstatus = " + ToString(pTreesum->m_oldstatus);
+                    msg += ".  \"oldstatus\" must be either 0 or 1, reflecting a coalescene in either ";
+                    msg += "the population with allele A or the population with allele a.";
+                    throw implementation_error(msg);
+                }
+            }
+        }
+    }
+    else // s < 0
+    {
+        for (pTreesum = treesum; pTreesum != NULL; pTreesum = pTreesum->m_next)
+        {
+            double t_e = pTreesum->m_endtime;
+            if (derivativeWithRespectTo_A)
+            {
+                if (0L == pTreesum->m_oldstatus) // coal. in allele A
+                {
+                    x = SafeProductWithExp(theta_A0squared,-s*t_e);
+                    if (x < DBL_BIG)
+                        result -= (theta_a0*(theta_A0+totalTheta_0) + x) /
+                            (totalTheta_0*(theta_A0*theta_a0 + x));
+                    else
+                        result -= 1.0/totalTheta_0;
+                }
+                else if (1L == pTreesum->m_oldstatus) // coal. in allele a
+                {
+                    x = SafeProductWithExp(theta_a0,-s*t_e);
+                    if (x < DBL_BIG)
+                        result += (x - theta_a0) / (totalTheta_0*(theta_a0 + x*theta_A0/theta_a0));
+                    else
+                        result += theta_a0/(theta_A0*totalTheta_0);
+                }
+                else
+                {
+                    string msg = "CoalesceLogisticSelectionPL::DlnPoint(), received a TreeSummary ";
+                    msg += "containing an event with oldstatus = " + ToString(pTreesum->m_oldstatus);
+                    msg += ".  \"oldstatus\" must be either 0 or 1, reflecting a coalescene in either ";
+                    msg += "the population with allele A or the population with allele a.";
+                    throw implementation_error(msg);
+                }
+            }
+            else // derivative with respect to theta_a0
+            {
+                if (0L == pTreesum->m_oldstatus) // coal. in allele A
+                {
+                    x = SafeProductWithExp(theta_A0,-s*t_e);
+                    if (x < DBL_BIG)
+                        result -= (x - theta_A0) / (totalTheta_0*(theta_a0 + x));
+                    else
+                        result -= 1.0/totalTheta_0;
+                }
+                else if (1L == pTreesum->m_oldstatus) // coal. in allele a
+                {
+                    x = SafeProductWithExp(theta_A0*(theta_a0+totalTheta_0),-s*t_e);
+                    if (x < DBL_BIG)
+                        result -= (x + theta_a0squared) /
+                            (theta_a0*totalTheta_0*(theta_a0 + x/(theta_a0+totalTheta_0)));
+                    else
+                        result -= (theta_a0+totalTheta_0)/(theta_a0*totalTheta_0);
+                }
+                else
+                {
+                    string msg = "CoalesceLogisticSelectionPL::DlnPoint(), received a TreeSummary ";
+                    msg += "containing an event with oldstatus = " + ToString(pTreesum->m_oldstatus);
+                    msg += ".  \"oldstatus\" must be either 0 or 1, reflecting a coalescene in either ";
+                    msg += "the population with allele A or the population with allele a.";
+                    throw implementation_error(msg);
+                }
+            }
+        }
+    }
+
+    // Now, add the contributions from the mutation events a --> A and A --> a.
+    treesum = treedata->GetDiseaseSummary()->GetLongPoint();
+
+    double one__over__theta_A0(1.0/theta_A0), one__over__theta_a0(1.0/theta_a0);
+    for(pTreesum = treesum; pTreesum != NULL; pTreesum = pTreesum->m_next)
+    {
+        if (0L == pTreesum->m_oldstatus)
+        {
+            if (derivativeWithRespectTo_A)
+                result -= one__over__theta_A0;
+            else
+                result += one__over__theta_a0;
+        }
+        else if (1L == pTreesum->m_oldstatus)
+        {
+            if (derivativeWithRespectTo_A)
+                result += one__over__theta_A0;
+            else
+                result -= one__over__theta_a0;
+        }
+        else
+        {
+            string msg = "DiseaseLogisticSelectionPL::DlnPoint(), received a TreeSummary ";
+            msg += "containing an event with oldstatus = " + ToString(pTreesum->m_oldstatus);
+            msg += ".  \"oldstatus\" must be either 0 or 1, reflecting a mutation to either ";
+            msg += "allele A or allele a.";
+            throw implementation_error(msg);
+        }
+    }
+
+    return result;
+
+}
+
+//------------------------------------------------------------------------------------
+
+double LogisticSelectionPL::lnWait(const vector<double>& param, const TreeSummary *treedata)
+{
+    return 0.0;
+}
+
+//------------------------------------------------------------------------------------
+
+double LogisticSelectionPL::lnPoint(const vector<double>& param, const vector<double>& lparam,
+                                    const TreeSummary *treedata)
+{
+    return 0.0;
+}
+
+//------------------------------------------------------------------------------------
+
+double LogisticSelectionPL::DlnWait(const vector<double>& param, const TreeSummary *treedata,
+                                    const long int & whichparam)
+{
+    if (whichparam != m_s_is_here)
+    {
+        string msg = "LogisticSelectionPL::DlnWait() called with whichparam = ";
+        msg += ToString(whichparam);
+        msg += ".  \"whichparam\" must equal " + ToString(m_s_is_here);
+        msg += ", which corresponds to the selection coefficient \"s.\"";
+        throw implementation_error(msg);
+    }
+
+    double s(param[m_s_is_here]);
+
+    if (fabs(s) < LOGISTIC_SELECTION_COEFFICIENT_EPSILON)
+        return DlnWaitForTinyLogisticSelectionCoefficient(param, treedata, whichparam);
+
+    double theta_A0(param[0]), theta_a0(param[1]), mu_into_A_from_a(param[3]),
+        mu_into_a_from_A(param[4]);
+
+    if (0.0 == theta_A0 || 0.0 == theta_a0)
+    {
+        string msg = "LogisticSelectionPL::DlnWait(), theta_A0 = ";
+        msg += ToString(theta_A0) + ", theta_a0 = " + ToString(theta_a0);
+        msg += "; attempted to divide by zero.";
+        throw impossible_error(msg);
+    }
+
+    const list<Interval>& treesum = treedata->GetCoalSummary()->GetLongWait();
+    list<Interval>::const_iterator treesum_it;
+    double t_s(0.0), t_e, term_s, term_e, denominator(s*s*(theta_A0+theta_a0)),
+        dtau_A__ds(0.0), dtau_a__ds(0.0), result(0.0);
+    double mu_term_A(mu_into_A_from_a*(theta_A0+theta_a0)), mu_term_a(mu_into_a_from_A*(theta_A0+theta_a0));
+    term_s = s > 0.0 ? -theta_a0/denominator : theta_A0/denominator;
+
+    for(treesum_it = treesum.begin(); treesum_it != treesum.end(); treesum_it++)
+    {
+        t_e = treesum_it->m_endtime;
+        const double& k_A = treesum_it->m_xpartlines[0]; // num. lineages, allele A
+        const double& k_a = treesum_it->m_xpartlines[1]; // num. lineages, allele a
+        dtau_A__ds = dtau_a__ds = 0;
+
+        if (s > 0.0)
+        {
+            term_e = SafeProductWithExp(theta_a0*(s*t_e - 1.0)/denominator,s*t_e);
+            if (term_e >= DBL_BIG)
+                return -DBL_BIG;
+            if (k_A > 0)
+                dtau_A__ds = term_e - term_s;
+            if (k_a > 0)
+                dtau_a__ds = ((theta_A0*theta_a0)/(denominator*denominator)) *
+                    ((s*s*t_e*t_e - 1.0)/term_e - (s*s*t_s*t_s - 1.0)/term_s);
+        }
+        else
+        {
+            term_e = SafeProductWithExp(theta_A0*(s*t_e + 1.0)/denominator,-s*t_e);
+            if (term_e <= -DBL_BIG)
+                return DBL_BIG;
+            if (k_a > 0)
+                dtau_a__ds = term_e - term_s;
+            if (k_A > 0)
+                dtau_A__ds = ((theta_A0*theta_a0)/(denominator*denominator)) *
+                    ((s*s*t_e*t_e - 1.0)/term_e - (s*s*t_s*t_s - 1.0)/term_s);
+        }
+
+        result -= (k_A*(k_A - 1.0 + mu_term_A)/theta_A0)*dtau_A__ds
+            + (k_a*(k_a - 1.0 + mu_term_a)/theta_a0)*dtau_a__ds;
+        if (result <= -DBL_BIG)
+            return -DBL_BIG;
+        if (result >= DBL_BIG)
+            return DBL_BIG;
+
+        t_s = t_e;
+        term_s = term_e;
+    }
+
+    return result;
+
+}
+
+//------------------------------------------------------------------------------------
+
+double LogisticSelectionPL::DlnWaitForTinyLogisticSelectionCoefficient(const vector<double> & param,
+                                                                       const TreeSummary * treedata,
+                                                                       const long int & whichparam)
+{
+    const list<Interval>& treesum = treedata->GetCoalSummary()->GetLongWait();
+    list<Interval>::const_iterator treesum_it;
+    double theta_A0(param[0]), theta_a0(param[1]), theta_A0_over_theta_a0;
+    double ts(0.0), te, result(0.0), s(param[m_s_is_here]);
+    double ts_squared(0.0), te_squared;
+
+    if (0.0 == theta_A0 || 0.0 == theta_a0 || 0.0 == theta_A0 + theta_a0)
+    {
+        string msg = "LogisticSelectionPL::DlnWaitForTinySelectionCoefficient(), theta_A0 = ";
+        msg += ToString(theta_A0) + ", theta_a0 = " + ToString(theta_a0);
+        msg += "; attempted to divide by zero.";
+        throw impossible_error(msg);
+    }
+
+    theta_A0_over_theta_a0 = theta_A0/theta_a0;
+    double mu_into_A_from_a(param[3]), mu_into_a_from_A(param[4]),
+        mu_term_A(mu_into_A_from_a*(theta_A0+theta_a0)), mu_term_a(mu_into_a_from_A*(theta_A0+theta_a0));
+
+    for(treesum_it = treesum.begin(); treesum_it != treesum.end(); treesum_it++)
+    {
+        te = treesum_it->m_endtime;
+        te_squared = te*te;
+        const double& k_A = treesum_it->m_xpartlines[0]; // num. lineages, allele A
+        const double& k_a = treesum_it->m_xpartlines[1]; // num. lineages, allele a
+
+        result -= 0.5*(te_squared - ts_squared)*(k_A*(k_A-1.0+mu_term_A)/theta_A0_over_theta_a0 -
+                                                 k_a*(k_a-1.0+mu_term_a)*theta_A0_over_theta_a0);
+        result -= (s/3.0)*(te_squared*te - ts_squared*ts)*(k_A*(k_A-1.0+mu_term_A)/theta_A0_over_theta_a0 +
+                                                           k_a*(k_a-1.0+mu_term_a)*theta_A0_over_theta_a0);
+        ts= te;
+        ts_squared = te_squared;
+    }
+
+    return result/(theta_A0 + theta_a0);
+}
+
+//------------------------------------------------------------------------------------
+
+double LogisticSelectionPL::DlnPoint(const vector<double> & param, const TreeSummary * treedata,
+                                     const long int & whichparam)
+{
+    if (whichparam != m_s_is_here)
+    {
+        string msg = "LogisticSelectionPL::DlnPoint() called with whichparam = ";
+        msg += ToString(whichparam);
+        msg += ".  \"whichparam\" must equal " + ToString(m_s_is_here);
+        msg += ", which corresponds to the selection coefficient \"s.\"";
+        throw implementation_error(msg);
+    }
+
+    const Interval *treesum = treedata->GetCoalSummary()->GetLongPoint();
+    const Interval *pTreesum;
+    double theta_A0(param[0]), theta_a0(param[1]), s(param[m_s_is_here]);
+    double result(0.0), term_a;
+
+    // First, add up the contributions from coalescent events.
+
+    for (pTreesum = treesum; pTreesum != NULL; pTreesum = pTreesum->m_next)
+    {
+        const double& te = pTreesum->m_endtime;
+        term_a = SafeProductWithExp(theta_a0, s*te);
+
+        if (0L == pTreesum->m_oldstatus) // coal. in allele A
+        {
+            if (term_a < DBL_BIG)
+                result += te*term_a/(theta_A0 + term_a); // rhs = 0 for s << 0
+            else
+                result += te;
+        }
+
+        else if (1L == pTreesum->m_oldstatus) // coal. in allele a
+            result -= te/(term_a/theta_A0 + 1.0);
+
+        else
+        {
+            string msg = "LogisticSelectionPL::DlnPoint(), received a TreeSummary ";
+            msg += "containing an event with oldstatus = " + ToString(pTreesum->m_oldstatus);
+            msg += ".  \"oldstatus\" must be either 0 or 1, reflecting a coalescence in either ";
+            msg += "the subpopulation with allele A or the subpopulation with allele a.";
+            throw implementation_error(msg);
+        }
+    }
+
+    // Now, add the contributions from the mutation events a --> A and A --> a.
+    treesum = treedata->GetDiseaseSummary()->GetLongPoint();
+
+    for(pTreesum = treesum; pTreesum != NULL; pTreesum = pTreesum->m_next)
+    {
+        const double& te = pTreesum->m_endtime;
+
+        if (0L == pTreesum->m_oldstatus)
+            result += te;
+        else if (1L == pTreesum->m_oldstatus)
+            result -= te;
+        else
+        {
+            string msg = "LogisticSelectionPL::DlnPoint(), received a TreeSummary ";
+            msg += "containing an event with oldstatus = " + ToString(pTreesum->m_oldstatus);
+            msg += ".  \"oldstatus\" must be either 0 or 1, reflecting a mutation to either ";
+            msg += "allele A or allele a.";
+            throw implementation_error(msg);
+        }
+    }
+
+    return result;
+}
+
+//------------------------------------------------------------------------------------
+
+StickSelectPL::StickSelectPL(const ForceSummary& fs)
+    : PLForces(0),  // MDEBUG arbitrary value, is that okay?
+      m_thetastart(0),
+      m_thetaend(0),
+      m_toBigA_here(FLAGLONG),
+      m_toSmallA_here(FLAGLONG),
+      m_s_here(0),
+      m_r_here(FLAGLONG),
+      m_disstart(FLAGLONG)
+{
+    m_minuslnsqrt2pi = -log(sqrt(2*PI));
+    m_nxparts = fs.GetNParameters(force_COAL);
+    // JDEBUG--when switch to allowing migration remove the assert
+    assert(fs.GetNonLocalPartitionIndexes().size() < 2);
+    m_partindex = fs.GetNonLocalPartitionIndexes();
+    // JDEBUG--this doesn't work due to invalid parameters
+    // m_ndis = fs.GetNParameters(force_DISEASE);
+    m_ndis = 2L;
+    m_disindex = fs.GetPartIndex(force_DISEASE);
+    assert(m_ndis == 2);
+    assert(m_nxparts == 2);
+}
+
+//------------------------------------------------------------------------------------
+
+StickSelectPL::~StickSelectPL()
+{
+    // intentionally blank
+}
+
+//------------------------------------------------------------------------------------
+
+void StickSelectPL::SetToBigAIndex(long index)
+{
+    m_toBigA_here = index;
+    if (m_disstart == FLAGLONG)
+        // if toBig or toSmall isn't set, m_disstart will end
+        // up as FLAGLONG again, as that's smaller than all legal values
+        m_disstart = min(m_toBigA_here, m_toSmallA_here);
+}
+
+//------------------------------------------------------------------------------------
+
+void StickSelectPL::SetToSmallAIndex(long index)
+{
+    m_toSmallA_here = index;
+    if (m_disstart == FLAGLONG)
+        // if toBig or toSmall isn't set, m_disstart will end
+        // up as FLAGLONG again, as that's smaller than all legal values
+        m_disstart = min(m_toBigA_here, m_toSmallA_here);
+}
+
+//------------------------------------------------------------------------------------
+
+void StickSelectPL::SetLocalPartForces(const ForceSummary& fs)
+{
+    // JDEBUG--:  This code is cut and paste from CoalescePL
+    // and smells pretty bad as a result.
+
+    m_localpartforces = fs.GetLocalPartitionForces();
+    assert(!m_localpartforces.empty());
+
+    // set up vector which answers the following question:
+    // for each local partition force
+    //   for any xpartition theta (identified by index),
+    //   what partition (of the given force) is it?
+
+    // This code is copied approximately from PartitionForce::
+    // SumXPartsToParts().
+
+    ForceVec::const_iterator pforce;
+    const ForceVec& partforces = fs.GetPartitionForces();
+    LongVec1d indicator(partforces.size(), 0L);
+    LongVec1d nparts(registry.GetDataPack().GetAllNPartitions());
+    DoubleVec1d::size_type xpart;
+    long partindex;
+    for (pforce = partforces.begin(), partindex = 0;
+         pforce != partforces.end(); ++pforce, ++partindex)
+    {
+        vector<DoubleVec1d::size_type> indices(m_nxparts, 0);
+        for(xpart = 0; xpart < static_cast<DoubleVec1d::size_type>(m_nxparts); ++xpart)
+        {
+            indices[xpart] = indicator[partindex];
+            long int part;
+            for (part = nparts.size() - 1; part >= 0; --part)
+            {
+                ++indicator[part];
+                if (indicator[part] < nparts[part]) break;
+                indicator[part] = 0;
+            }
+        }
+        // initialize xparts vectors
+        vector<DoubleVec1d::size_type> emptyvec;
+        vector<vector<DoubleVec1d::size_type> > partvec(nparts[partindex], emptyvec);
+
+        if ((*pforce)->IsLocalPartitionForce())
+        {
+            m_whichlocalpart.push_back(indices);
+            m_whichlocalxparts.push_back(partvec);
+        }
+        m_whichpart.push_back(indices);
+        m_whichxparts.push_back(partvec);
+    }
+
+    // now construct the vector mapping partition to a set of xpartitions.
+    //
+    // we will do this by going through the vector mapping xpartition to
+    // partition we just constructed, letting its contents tell us which
+    // partition to add the parameter (xpartition) to and keeping a
+    // counter to let us know which xpartition to add.
+    long lpindex = 0;
+    for (pforce = partforces.begin(), partindex = 0;
+         pforce != partforces.end(); ++pforce, ++partindex)
+    {
+        for(xpart = 0; xpart < m_whichpart[partindex].size(); ++xpart)
+            m_whichxparts[partindex][m_whichpart[partindex][xpart]].
+                push_back(xpart);
+        if ((*pforce)->IsLocalPartitionForce())
+        {
+            for(xpart = 0; xpart < m_whichpart[partindex].size(); ++xpart)
+            {
+                m_whichlocalxparts[lpindex][m_whichpart[partindex][xpart]].push_back(xpart);
+            }
+            ++lpindex;
+        }
+    }
+} // SetLocalPartForces
+
+//------------------------------------------------------------------------------------
+
+double StickSelectPL::StickMean(double freqA, double s, double toA, double toa) const
+{
+    return s*freqA*(1-freqA) + toA*(1-freqA) - toa*freqA;
+}
+
+//------------------------------------------------------------------------------------
+
+double StickSelectPL::StickVar(double freqA, double tipfreqA, double thetaA) const
+{
+    return 2.0*freqA*(1-freqA)*tipfreqA / thetaA;
+}
+
+//------------------------------------------------------------------------------------
+
+double StickSelectPL::CommonFunctional(double freqBigA, double prevfreqBigA, double s,
+                                       double toBigA, double toSmallA, double length) const
+{
+    // this implements the un-squared numerator of the posterior on stick selection:
+    return freqBigA - prevfreqBigA - length *
+        StickMean(prevfreqBigA,s,toBigA,toSmallA);
+}
+
+//------------------------------------------------------------------------------------
+
+double StickSelectPL::lnPTreeStick(const DoubleVec1d& param, const DoubleVec1d& lparam, const TreeSummary* treedata)
+{
+    double answ(lnWait(param,treedata));
+
+    answ += lnPointTreeStick(param,lparam,treedata);
+
+    return answ;
+}
+
+//------------------------------------------------------------------------------------
+
+double StickSelectPL::lnWait(const DoubleVec1d& param, const TreeSummary* treedata)
+{
+    assert(m_disstart != FLAGLONG);
+    double answ(0.0);
+
+    assert(m_ndis == 2);
+    assert(m_ndis == m_nxparts);
+
+    // compute waiting times for theta and disease
+    // all LongWait() are the same....
+    const list<Interval>& coalsum(treedata->GetCoalSummary()->GetLongWait());
+    list<Interval>::const_iterator interval(coalsum.begin());
+    const DoubleVec1d& stairlengths(treedata->GetStickSummary().lengths);
+    const DoubleVec2d& stairfreqs(treedata->GetStickSummary().freqs);
+    double tipthetaA(param[m_thetastart]);
+    double jointend(stairlengths[0]),intervalend(interval->m_endtime);
+    double jointstart(0.0),intervalstart(0.0);
+    long joint(0);
+
+    while (interval != coalsum.end())
+    {
+        // how long is the bit of tree where nothing changes?
+        double lengthininterval = min(jointend, intervalend) - max(jointstart, intervalstart);
+
+        // adjust for coalescent contribution
+        long xpart;
+        for(xpart = 0; xpart < m_nxparts; ++xpart)
+        {
+            answ -= lengthininterval * stairfreqs[0][0] *
+                (interval->m_xpartlines[xpart])*(interval->m_xpartlines[xpart]-1) /
+                // because there is no migration we can use xpart, otherwise
+                // we'll need to add a loop in local partitions
+                (stairfreqs[joint][xpart] * tipthetaA);
+        }
+
+        // adjust for trait mutation contribution
+        long part;
+        for(part = 0; part < m_ndis; ++part)
+        {
+            answ -= lengthininterval * interval->m_partlines[m_disindex][part] *
+                param[(part ? m_disstart+1 : m_disstart)] *
+                // both macros rely on only two disease states present
+                // equation relies on nxparts == nparts!
+                stairfreqs[joint][(part ? 0 : 1)]/stairfreqs[joint][part];
+        }
+
+        if (jointend < intervalend)     // joint ends first
+        {
+            ++joint;
+            jointstart = jointend;
+            jointend += stairlengths[joint];
+        }
+        else
+        {
+            if (intervalend < jointend) // else interval ends first
+            {
+                ++interval;
+                intervalstart = intervalend;
+                intervalend = interval->m_endtime;
+            }
+            else                          // they both ended at the exact same time!
+            {
+                ++joint;
+                jointstart = jointend;
+                jointend += stairlengths[joint];
+                ++interval;
+                intervalstart = intervalend;
+                intervalend = interval->m_endtime;
+            }
+        }
+    }
+
+    // compute rec-rate contrib
+    if (!m_localpartforces.empty())
+    {
+        const vector<double>& rWait = treedata->GetRecSummary()->GetShortWait();
+        answ -= rWait[0] * param[m_r_here];
+    }
+
+    // there aren't separate "wait" and "point" terms wrt. sticks,
+    // therefore all the actual calculations are handled in "point".
+
+    return answ;
+}
+
+//------------------------------------------------------------------------------------
+
+double StickSelectPL::lnPointTreeStick(const DoubleVec1d& param, const DoubleVec1d& lparam,
+                                       const TreeSummary* treedata)
+// there aren't separate "wait" and "point" terms with stick selection,
+// all the actual calculations are handled in "point".
+//
+// The actual equation for the log tree posterior likelihood:
+// Sum_Over_All_Stick_Intervals[
+//    -log(sqrt(2*pi))-log(sqrt(Var(curr_freq)*int_length))-Exponent]
+//
+// Exponent =
+//    (curr_freq-(prev_freq*Mean(prev_freq)*int_length))**2
+//    ------------------------------------------------------------
+//             2*Var(prev_freq)*int_length
+//
+// Mean(freq) = sel_coeff*freq*(1-freq)+mu_to*(1-freq)-mu_from*freq
+// Var(freq) = freq*(1-freq) / theta
+{
+    assert(m_disstart != FLAGLONG);
+    double answ(0.0);
+
+    // stuff useful for all stick terms
+    const Interval * treesum = treedata->GetCoalSummary()->GetLongPoint();
+    const Interval * tit(treesum);
+    const DoubleVec1d& stairlengths(treedata->GetStickSummary().lengths);
+    const DoubleVec2d& stairfreqs(treedata->GetStickSummary().freqs);
+    const DoubleVec2d& stairlnfreqs(treedata->GetStickSummary().lnfreqs);
+    double tipprobA(stairfreqs[0][0]);
+    double lnthA(lparam[m_thetastart]), lnprA(log(tipprobA));
+    double jointend(stairlengths[0]);
+    long joint(0);
+
+    // compute theta contrib
+    do {
+        while (jointend < tit->m_endtime)
+        {
+            ++joint;
+            jointend += stairlengths[joint];
+        }
+
+        answ +=  LOG2 + lnprA - lnthA - stairlnfreqs[joint][tit->m_oldstatus];
+
+        tit = tit->m_next;
+    } while(tit != NULL);
+
+    // compute trait mutation contrib
+    treesum = treedata->GetDiseaseSummary()->GetLongPoint();
+    tit = treesum;
+    jointend = stairlengths[0];
+    joint = 0;
+    do {
+        while (jointend < tit->m_endtime)
+        {
+            ++joint;
+            jointend += stairlengths[joint];
+        }
+        // JDEBUG--is the macro correct??
+        answ +=  lparam[(tit->m_newstatus ? m_disstart+1 : m_disstart)] +
+            stairlnfreqs[joint][tit->m_oldstatus] - stairlnfreqs[joint][tit->m_newstatus];
+
+        tit = tit->m_next;
+    } while(tit != NULL);
+
+    // compute rec-rate contrib--Sum_j(recevents * log(r))
+    // as noted earlier (see Likelihood::FillForces()), the existence
+    // of a localpartforces vector is used to flag the presence/absence
+    // of recombination
+
+    // MDEBUG MREVIEW let's review this code--Mary doesn't get it.
+    if (!m_localpartforces.empty())
+    {
+        treesum = treedata->GetRecSummary()->GetLongPoint();
+        tit = treesum;
+        jointend = stairlengths[0];
+        joint = 0;
+        do {
+            while (jointend < tit->m_endtime)
+            {
+                ++joint;
+                jointend += stairlengths[joint];
+            }
+            // We can use m_partnerpicks[0] because there is only one lpforce in existence.
+            answ += lparam[m_r_here] + stairlnfreqs[joint][treesum->m_partnerpicks[0]];
+            tit = tit->m_next;
+        } while(tit != NULL);
+    }
+
+    return answ;
+}
+
+//------------------------------------------------------------------------------------
+
+double StickSelectPL::lnPoint(const DoubleVec1d& param, const DoubleVec1d& lparam, const TreeSummary* treedata)
+// There aren't separate "wait" and "point" terms with stick selection,
+// all the actual calculations are handled in "point".
+//
+// The actual equation for the log tree posterior likelihood:
+// Sum_Over_All_Stick_Intervals[
+//    -log(sqrt(2*pi))-log(sqrt(Var(prev_freq)*int_length))-Exponent]
+//
+// Exponent =
+//    (curr_freq-(prev_freq-Mean(prev_freq)*int_length))**2
+//    ------------------------------------------------------------
+//             2*Var(prev_freq)*int_length
+//
+// Mean(freq) = sel_coeff*freq*(1-freq)+mu_to*(1-freq)-mu_from*freq
+// Var(freq) = freq*(1-freq) / theta_total
+{
+    assert(m_disstart != FLAGLONG);
+    double answ(lnPointTreeStick(param,lparam,treedata));
+
+    const DoubleVec1d& stairlengths(treedata->GetStickSummary().lengths);
+    const DoubleVec2d& stairfreqs(treedata->GetStickSummary().freqs);
+    double tipthetaA(param[m_thetastart]);
+    double tipprobA(stairfreqs[0][0]);
+    double s(param[m_s_here]),toBigA(param[m_toBigA_here]),
+        toSmallA(param[m_toSmallA_here]);
+    assert(stairfreqs.size() == stairlengths.size());
+
+    DoubleVec1d::size_type step;
+    for(step = 1; step < stairfreqs.size(); ++step)
+    {
+        double variance = StickVar(stairfreqs[step-1][0],tipprobA,tipthetaA);
+        answ += m_minuslnsqrt2pi - 0.5 * log(variance*stairlengths[step-1]);
+        double numer = CommonFunctional(stairfreqs[step][0],
+                                        stairfreqs[step-1][0],s,toBigA,toSmallA,stairlengths[step-1]);
+        double denom(2.0*variance*stairlengths[step-1]);
+        numer = -1.0 * numer * numer;
+        answ += numer/denom;
+    }
+
+    return answ;
+}
+
+//------------------------------------------------------------------------------------
+
+double StickSelectPL::DlnWait(const DoubleVec1d& param, const TreeSummary* treedata, const long& whichparam)
+{
+    long which;
+    double answ(0.0);
+
+    // compute theta contrib
+    if (m_thetastart <= whichparam && whichparam < m_thetastart + m_nxparts)
+    {
+        const DoubleVec2d& stairfreqs(treedata->GetStickSummary().freqs);
+        const DoubleVec1d& stairlengths(treedata->GetStickSummary().lengths);
+        const list<Interval>& treesum(treedata->GetCoalSummary()->GetLongWait());
+        list<Interval>::const_iterator interval(treesum.begin());
+        double tipthetaA(param[m_thetastart]);
+        double tipprobA(stairfreqs[0][0]);
+        double jointend(stairlengths[0]),intervalend(interval->m_endtime);
+        double jointstart(0.0),intervalstart(0.0);
+        long joint(0);
+        which = whichparam-m_thetastart;
+        while (interval != treesum.end())
+        {
+            // how long is the bit of tree where nothing changes?
+            double lengthininterval = min(jointend, intervalend) - max(jointstart, intervalstart);
+
+            answ += lengthininterval * tipprobA *
+                (interval->m_xpartlines[which])*(interval->m_xpartlines[which] - 1)
+                // because there is no migration we can use xpart, otherwise
+                // we'll need to add a loop in local partitions
+                / (tipthetaA * tipthetaA * stairfreqs[joint][which]);
+
+            if (jointend < intervalend) // joint ends first
+            {
+                ++joint;
+                jointstart = jointend;
+                jointend += stairlengths[joint];
+            }
+            else
+                if (intervalend < jointend) // else interval ends first
+                {
+                    ++interval;
+                    intervalstart = intervalend;
+                    intervalend = interval->m_endtime;
+                }
+                else                      // they both ended at the exact same time!
+                {
+                    ++joint;
+                    jointstart = jointend;
+                    jointend += stairlengths[joint];
+                    ++interval;
+                    intervalstart = intervalend;
+                    intervalend = interval->m_endtime;
+                }
+        }
+
+        return answ;
+    }
+
+    // compute disease contrib
+    if (m_disstart <= whichparam && whichparam < m_disstart + m_ndis)
+    {
+        assert(m_disstart != FLAGLONG);
+        if (whichparam == m_disstart) which = 0;
+        else which = 1;
+        const DoubleVec2d& stairfreqs(treedata->GetStickSummary().freqs);
+        const DoubleVec1d& stairlengths(treedata->GetStickSummary().lengths);
+        const list<Interval>& treesum(treedata->GetDiseaseSummary()->GetLongWait());
+        list<Interval>::const_iterator interval(treesum.begin());
+        double jointend(stairlengths[0]),intervalend(interval->m_endtime);
+        double jointstart(0.0),intervalstart(0.0);
+        long joint(0);
+
+        while (interval != treesum.end())
+        {
+            // how long is the bit of tree where nothing changes?
+            double lengthininterval = min(jointend, intervalend) - max(jointstart, intervalstart);
+
+            answ -= lengthininterval * interval->m_partlines[m_disindex][which] *
+                // the macro relies on only two disease states present
+                // equation relies on nxparts == nparts!
+                stairfreqs[joint][(which ? 0 : 1)]/stairfreqs[joint][which];
+
+            if (jointend < intervalend) // joint ends first
+            {
+                ++joint;
+                jointstart = jointend;
+                jointend += stairlengths[joint];
+            }
+            else
+                if (intervalend < jointend) // else interval ends first
+                {
+                    ++interval;
+                    intervalstart = intervalend;
+                    intervalend = interval->m_endtime;
+                }
+                else                      // they both ended at the exact same time!
+                {
+                    ++joint;
+                    jointstart = jointend;
+                    jointend += stairlengths[joint];
+                    ++interval;
+                    intervalstart = intervalend;
+                    intervalend = interval->m_endtime;
+                }
+        }
+
+        return answ;
+    }
+
+    // compute recombination contrib
+    if (whichparam == m_r_here)
+    {
+        assert(!m_localpartforces.empty());
+        const vector<double>& rWait = treedata->GetRecSummary()->GetShortWait();
+        return -rWait[0];
+    }
+
+    // We compute all terms involving the stick in lnPoint and DlnPoint
+    // (arbitrarily) and thus return 0 if asked to take dWait of those parameters
+    return answ;
+}
+
+//------------------------------------------------------------------------------------
+
+double StickSelectPL::DlnPoint(const DoubleVec1d& param, const TreeSummary* treedata, const long& whichparam)
+{
+    // NB Assumes only one localpartforce and no migration, no growth!
+    long which;
+
+    // compute recombination contrib (does not involve stick)
+    if (whichparam == m_r_here)
+    {
+        const vector<double>& nrecs = treedata->GetRecSummary()->GetShortPoint();
+        if(nrecs[0]==0)
+        {
+            return 0.0;
+        }
+        else
+        {
+            return SafeDivide(nrecs[0], *(param.begin() + m_r_here));
+        }
+    }
+
+    // useful variables for all stick computations
+    const DoubleVec2d& stairfreqs(treedata->GetStickSummary().freqs);
+    const DoubleVec1d& stairlengths(treedata->GetStickSummary().lengths);
+    double s(param[m_s_here]),toBigA(param[m_toBigA_here]),
+        toSmallA(param[m_toSmallA_here]);
+    double tipthetaA(param[m_thetastart]);
+    double tipprobA(stairfreqs[0][0]);
+    assert(stairfreqs.size() == stairlengths.size());
+    double answ(0.0);
+
+    // compute theta contrib
+    if (m_thetastart <= whichparam && whichparam < m_thetastart + m_nxparts)
+    {
+        which = whichparam-m_thetastart;
+        const DoubleVec1d& ncoal = treedata->GetCoalSummary()->GetShortPoint();
+
+        answ = -ncoal[which] / tipthetaA;
+
+        // deal with the stick contribution to theta
+        DoubleVec1d::size_type step;
+        for(step = 1; step < stairfreqs.size(); ++step)
+        {
+            double numer = CommonFunctional(stairfreqs[step][0],
+                                            stairfreqs[step-1][0],s,toBigA,toSmallA,stairlengths[step-1]);
+            numer *= numer;
+            answ -= numer / (4.0 * stairfreqs[step-1][0] * tipprobA *
+                             (1.0 - stairfreqs[step-1][0]) * stairlengths[step-1]);
+            answ += 1.0 / (2 * tipthetaA);
+        }
+
+        return answ;
+    }
+
+    // compute disease contrib
+    if (m_disstart <= whichparam && whichparam < m_disstart + m_ndis)
+    {
+        // non-stick terms
+        assert(m_disstart != FLAGLONG);
+        if (whichparam == m_disstart) which = 0;
+        else which = 1;
+
+        answ += (treedata->GetDiseaseSummary()->GetShortPoint()[which]) / param[whichparam];
+
+        // stick terms
+        const DoubleVec2d& stairfreqs(treedata->GetStickSummary().freqs);
+        DoubleVec1d::size_type step;
+        // terms in toBigA
+        if (whichparam == m_toBigA_here)
+        {
+            for(step = 1; step < stairfreqs.size(); ++step)
+            {
+                answ += CommonFunctional(stairfreqs[step][0],stairfreqs[step-1][0],s,
+                                         toBigA,toSmallA,stairlengths[step-1]) * tipthetaA /
+                    (2.0 * tipprobA * stairfreqs[step-1][0]);
+            }
+            return answ;
+        }
+        // terms in toSmallA
+        if (whichparam == m_toSmallA_here)
+        {
+            for(step = 1; step < stairfreqs.size(); ++step)
+            {
+                answ -= CommonFunctional(stairfreqs[step][0],stairfreqs[step-1][0],s,
+                                         toBigA,toSmallA,stairlengths[step-1]) * tipthetaA /
+                    ((2.0 * tipprobA) * (1.0 - stairfreqs[step-1][0]));
+            }
+            return answ;
+        }
+        assert(false); // neither toBigA nor toSmallA?!
+    }
+
+    // compute s contrib
+    if (whichparam == m_s_here)
+    {
+        DoubleVec1d::size_type step;
+        for(step = 1; step < stairfreqs.size(); ++step)
+        {
+            answ += CommonFunctional(stairfreqs[step][0],stairfreqs[step-1][0],
+                                     s,toBigA,toSmallA,stairlengths[step-1]) * tipthetaA /
+                (2.0 * tipprobA);
+        }
+        return answ;
+    }
+
+    assert(false); // should have computed something!
+    return answ;
+}
+
+//____________________________________________________________________________________
diff --git a/src/postlike/plforces.h b/src/postlike/plforces.h
new file mode 100644
index 0000000..2a5d9eb
--- /dev/null
+++ b/src/postlike/plforces.h
@@ -0,0 +1,721 @@
+// $Id: plforces.h,v 1.39 2011/04/23 02:02:49 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+// PLforces class --------------------------------------------------
+//
+// know how to calculate waiting times and point probabilities
+// of the parameters
+//
+
+#ifndef PLFORCES_H
+#define PLFORCES_H
+
+#include <algorithm>
+#include <cassert>
+#include <cmath>
+#include <functional>
+#include <numeric>
+#include <string>
+#include <vector>
+
+#include "treesum.h"
+#include "summary.h"
+
+class PLForces
+{
+  private:
+    PLForces ();  // not implemented
+
+  protected:
+    PLForces(const PLForces& src) :
+        m_nPop(src.m_nPop), m_pTreedata(src.m_pTreedata), m_start(src.m_start),
+        m_end(src.m_end), m_growthstart(src.m_growthstart)
+    { };
+
+    long m_nPop;
+    TreeSummary *m_pTreedata;    // non-owning pointer
+    long m_start;        //offsets into the param vector that does not yet exist
+    long m_end;
+
+    long m_growthstart; //we need to know where the growth parameters are stored
+    // we need this in all forces to handle the scaled times.
+
+  public:
+    PLForces (long thisnpop)
+    {
+        m_nPop = thisnpop;
+        m_start = 0L;
+        m_end = 0L;
+        m_growthstart = 0L;
+    };
+
+    virtual ~ PLForces () {};
+
+    virtual PLForces* Clone() const = 0;
+
+    virtual double lnWait (const vector < double >&param,
+                           const TreeSummary * treedata) = 0;
+
+    virtual double lnPoint (const vector < double >&param,
+                            const vector < double >&lparam,
+                            const TreeSummary * treedata) = 0;
+
+    virtual double DlnWait (const vector < double >&param,
+                            const TreeSummary * treedata,
+                            const long &whichparam) = 0;
+
+    virtual double DlnPoint (const vector < double >&param,
+                             const TreeSummary * treedata,
+                             const long &whichparam) = 0;
+
+    virtual long GetNparam() { return m_nPop; };
+    long GetStart()  { return m_start; }; // used by Force::FindOrdinalPosition()
+
+    virtual void SetStart(const long& start) { m_start = start; };
+    virtual void SetEnd  (const long& end  ) { m_end   = end;   };
+
+    virtual void SetGrowthStart(const long& growthstart)
+    { m_growthstart = growthstart; };
+};
+
+//------------------------------------------------------------------------------------
+
+class CoalescePL:public PLForces
+{
+  private:
+    CoalescePL ();  // not implemented
+
+  protected:
+    // dim: lpforce X xpart
+    vector<vector<DoubleVec1d::size_type> > m_whichlocalpart;
+
+    // dim: pforce X xpart
+    vector<vector<DoubleVec1d::size_type> > m_whichpart;
+
+    // dim: lpforce X part X xpart-list
+    vector<vector<vector<DoubleVec1d::size_type> > > m_whichlocalxparts;
+
+    // dim: pforce X part X xpart-list
+    vector<vector<vector<DoubleVec1d::size_type> > > m_whichxparts;
+
+    ForceVec m_localpartforces;
+
+    double CalculateScaledLPCounts(const DoubleVec1d& params,
+                                   const LongVec2d& picks) const;
+
+    CoalescePL(const CoalescePL& src) : PLForces(src) { };
+
+  public:
+    // coalescence related wait, and point functions
+    // and derivatives of them
+    ~CoalescePL () { };
+
+    CoalescePL (long thisnpop) : PLForces(thisnpop) {};
+
+    virtual PLForces* Clone() const
+    {
+        return new CoalescePL(*this);
+    };
+
+    void SetLocalPartForces(const ForceSummary& fs);
+
+    double lnWait (const vector < double >&param,
+                   const TreeSummary * treedata);
+
+    double lnPoint (const vector < double >&param,
+                    const vector < double >&lparam,
+                    const TreeSummary * treedata);
+
+    double DlnWait (const vector < double >&param,
+                    const TreeSummary * treedata,
+                    const long &whichparam);
+
+    double DlnPoint (const vector < double >&param,
+                     const TreeSummary * treedata,
+                     const long &whichparam);
+};
+
+//------------------------------------------------------------------------------------
+
+// subclass of CoalescePL that uses long form with growth
+class CoalesceGrowPL:public CoalescePL
+{
+  private:
+    CoalesceGrowPL();  // not implemented
+
+  protected:
+    TimeManager* m_timesize;
+
+    CoalesceGrowPL(const CoalesceGrowPL& src) :
+        CoalescePL(src)
+    { };
+
+    double lnWaitForTinyGrowth(const double theta, const double growth,
+                               const unsigned long xpartition,
+                               const std::list<Interval>& treesummary);
+
+    double DlnWaitForTinyGrowth(const vector < double >&param,
+                                const TreeSummary * treedata,
+                                const long &whichparam);
+  public:
+    ~CoalesceGrowPL () { };
+
+    CoalesceGrowPL (long thisnpop) : CoalescePL(thisnpop) {};
+
+    virtual PLForces* Clone() const
+    {
+        return new CoalesceGrowPL(*this);
+    };
+
+    void SetTimeManager(TimeManager* newtimesize) {m_timesize = newtimesize;};
+
+    double lnWait (const vector < double >&param,
+                   const TreeSummary * treedata);
+
+    double lnPoint (const vector < double >&param,
+                    const vector < double >&lparam,
+                    const TreeSummary * treedata);
+
+    double DlnWait (const vector < double >&param,
+                    const TreeSummary * treedata, const long &whichparam);
+
+    //  DlnPoint is growth-independent, so just use CoalescePL::DlnPoint
+    //  NB:  not any more, with growth+disease+recombination!
+    double DlnPoint (const vector < double >&param,
+                     const TreeSummary * treedata, const long &whichparam);
+};
+
+//------------------------------------------------------------------------------------
+
+// exponential growth coalescence related wait, and point functions
+// and derivatives of them
+class GrowPL:public PLForces
+{
+  private:
+    GrowPL(); // not implemented
+
+  protected:
+    GrowPL(const GrowPL& src) :
+        PLForces(src), m_thetastart(src.m_thetastart)
+    { };
+
+    double DlnWaitForTinyGrowth(const vector < double >&param,
+                                const TreeSummary * treedata,
+                                const long &whichparam);
+
+    long m_thetastart;
+
+  public:
+    ~GrowPL () { };
+
+    GrowPL (long thisnpop) : PLForces(thisnpop)
+    {
+        m_thetastart = 0;
+    };
+
+    virtual PLForces* Clone() const
+    {
+        return new GrowPL(*this);
+    };
+
+    virtual void SetThetaStart(const long& thetastart)
+    { m_thetastart = thetastart; };
+
+    double lnWait (const vector < double >&param,
+                   const TreeSummary * treedata);
+
+    double lnPoint (const vector < double >&param,
+                    const vector < double >&lparam, const TreeSummary * treedata);
+
+    double DlnWait (const vector < double >&param,
+                    const TreeSummary * treedata, const long &whichparam);
+
+    double DlnPoint (const vector < double >&param,
+                     const TreeSummary * treedata, const long &whichparam);
+};
+
+//------------------------------------------------------------------------------------
+
+class DiseasePL:public PLForces
+{
+  private:
+    DiseasePL(); // not implemented
+
+  protected:
+    DoubleVec1d m_msum;
+    DiseasePL(const DiseasePL& src) : PLForces(src), m_msum(src.m_msum) { };
+
+  public:
+    // disease related wait, and point functions
+    // and derivatives of them
+
+    DiseasePL (long thisnstati) : PLForces(thisnstati), m_msum(thisnstati,0.0) {}
+
+    ~DiseasePL () { };
+
+    virtual PLForces* Clone() const
+    {
+        return new DiseasePL(*this);
+    };
+
+    double lnWait (const vector < double >&param, const TreeSummary * treedata);
+
+    double lnPoint (const vector < double >&param,
+                    const vector < double >&lparam, const TreeSummary * treedata);
+
+    double DlnWait (const vector < double >&param,
+                    const TreeSummary * treedata, const long &whichparam);
+
+    double DlnPoint (const vector < double >&param,
+                     const TreeSummary * treedata, const long &whichparam);
+};
+
+//------------------------------------------------------------------------------------
+
+class DiseaseLogisticSelectionPL:public DiseasePL
+{
+  private:
+    DiseaseLogisticSelectionPL(); // not implemented
+
+  protected:
+    long m_s_is_here;
+
+    DiseaseLogisticSelectionPL(const DiseaseLogisticSelectionPL& src) :
+        DiseasePL(src)
+    { };
+
+    double DlnWaitForTinyLogisticSelectionCoefficient(const vector<double>& param,
+                                                      const TreeSummary *treedata,
+                                                      const long &whichparam);
+  public:
+    // disease related wait, and point functions
+    // and derivatives of them
+    DiseaseLogisticSelectionPL(long thisnstati) : DiseasePL(thisnstati) { };
+
+    ~DiseaseLogisticSelectionPL() { };
+
+    virtual PLForces* Clone() const
+    {
+        return new DiseaseLogisticSelectionPL(*this);
+    };
+
+    void SetSelectionCoefficientLocation(long paramvecindex)
+    {
+        m_s_is_here = paramvecindex;
+    };
+
+    double lnWait (const vector < double >&param, const TreeSummary * treedata);
+
+    double lnPoint (const vector < double >&param,
+                    const vector < double >&lparam, const TreeSummary * treedata);
+
+    double DlnWait (const vector < double >&param,
+                    const TreeSummary * treedata, const long &whichparam);
+
+    // DlnPoint is selection-independent, so just use DiseasePL::DlnPoint().
+};
+
+//------------------------------------------------------------------------------------
+
+class MigratePL:public PLForces
+{
+  private:
+    MigratePL (); // not implemented
+
+  protected:
+    DoubleVec1d m_msum;
+    MigratePL(const MigratePL& src) : PLForces(src), m_msum(src.m_msum) { };
+
+  public:
+    // migration related wait, and point functions
+    // and derivatives of them
+
+    MigratePL (long thisnpop) : PLForces(thisnpop)
+    {
+        m_msum = CreateVec1d(m_nPop, static_cast<double>(0.0));
+    };
+
+    ~MigratePL () { };
+
+    virtual PLForces* Clone() const
+    {
+        return new MigratePL(*this);
+    };
+
+    double lnWait (const vector < double >&param,
+                   const TreeSummary * treedata);
+
+    double lnPoint (const vector < double >&param,
+                    const vector < double >&lparam,
+                    const TreeSummary * treedata);
+
+    double DlnWait (const vector < double >&param,
+                    const TreeSummary * treedata,
+                    const long &whichparam);
+
+    double DlnPoint (const vector < double >&param,
+                     const TreeSummary * treedata,
+                     const long &whichparam);
+};
+
+//------------------------------------------------------------------------------------
+
+class DivMigPL:public PLForces
+{
+  private:
+    DivMigPL ();  // not implemented
+
+  protected:
+    DivMigPL(const DivMigPL& src) : PLForces(src) { };
+
+  public:
+    // migration related wait, and point functions
+    // and derivatives of them
+
+    DivMigPL (long thisnpop) : PLForces(thisnpop) {};
+
+    ~DivMigPL () { };
+
+    virtual PLForces* Clone() const
+    {
+        return new DivMigPL(*this);
+    };
+
+    double lnWait (const vector < double >&param,
+                   const TreeSummary * treedata);
+
+    double lnPoint (const vector < double >&param,
+                    const vector < double >&lparam,
+                    const TreeSummary * treedata);
+    // NB:  Divergence is currently incompatible with likelihood
+    // evaluation, so the following two functions are not used.
+    // If they are ever needed, implementation is very similar to the
+    // MigPL versions.
+    double DlnWait (const vector < double >&param,
+                    const TreeSummary * treedata,
+                    const long &whichparam) { assert(false); return 0;};
+
+    double DlnPoint (const vector < double >&param,
+                     const TreeSummary * treedata,
+                     const long &whichparam) { assert(false); return 0;};
+};
+
+//------------------------------------------------------------------------------------
+
+class RecombinePL:public PLForces
+{
+  private:
+    RecombinePL ();  // not implemented
+
+  protected:
+    RecombinePL(const RecombinePL& src) : PLForces(src) { };
+
+  public:
+    // recombination related wait, and point functions
+    // and derivatives of them
+    RecombinePL(long thisnpop) : PLForces(thisnpop) {};
+
+    ~RecombinePL () { };
+
+    virtual PLForces* Clone() const
+    {
+        return new RecombinePL(*this);
+    };
+
+    double lnWait (const vector < double >&param,
+                   const TreeSummary * treedata);
+
+    double lnPoint (const vector < double >&param,
+                    const vector < double >&lparam,
+                    const TreeSummary * treedata);
+
+    double DlnWait (const vector < double >&param,
+                    const TreeSummary * treedata,
+                    const long &whichparam);
+
+    double DlnPoint (const vector < double >&param,
+                     const TreeSummary * treedata,
+                     const long &whichparam);
+};
+
+//------------------------------------------------------------------------------------
+
+class DivPL: public PLForces
+// Yes, this class does NOTHING!  (except for keeping track of its
+// parameters, which makes the maximizer happy)
+{
+  private:
+    DivPL(); // not implemented
+
+  protected:
+    DivPL(const DivPL& src) : PLForces(src) {};
+
+  public:
+    DivPL(long nparam) : PLForces(nparam) {};
+
+    ~DivPL() {};
+
+    virtual PLForces* Clone() const
+    { return new DivPL(*this); };
+
+    double lnWait (const vector < double >&param,
+                   const TreeSummary * treedata) { return 1.0; };
+
+    double lnPoint (const vector < double >&param,
+                    const vector < double >&lparam,
+                    const TreeSummary * treedata) { return 1.0; };
+
+    double DlnWait (const vector < double >&param,
+                    const TreeSummary * treedata,
+                    const long &whichparam) { return 1.0; };
+
+    double DlnPoint (const vector < double >&param,
+                     const TreeSummary * treedata,
+                     const long &whichparam) { return 1.0; };
+};
+
+//------------------------------------------------------------------------------------
+
+class SelectPL:public PLForces
+{
+  private:
+    SelectPL(); // not implemented
+
+  protected:
+    SelectPL(const SelectPL& src) : PLForces(src) { };
+
+  public:
+    // selection related wait, and point functions
+    // and derivatives of them
+    virtual PLForces* Clone() const
+    {
+        return new SelectPL(*this);
+    };
+
+    double lnWait (const vector < double >&param,
+                   const TreeSummary * treedata);
+
+    double lnPoint (const vector < double >&param,
+                    const vector < double >&lparam,
+                    const TreeSummary * treedata);
+
+    double DlnWait (const vector < double >&param,
+                    const TreeSummary * treedata,
+                    const long &whichparam);
+
+    double DlnPoint (const vector < double >&param,
+                     const TreeSummary * treedata,
+                     const long &whichparam);
+};
+
+//------------------------------------------------------------------------------------
+
+class CoalesceLogisticSelectionPL:public CoalescePL
+{
+  private:
+    CoalesceLogisticSelectionPL(); // not implemented
+
+  protected:
+    CoalesceLogisticSelectionPL(const CoalesceLogisticSelectionPL& src) : CoalescePL(src),
+                                                                          m_s_is_here(src.m_s_is_here) { };
+
+    long m_s_is_here;
+
+    double lnWaitForTinyLogisticSelectionCoefficient(const vector<double>& param,
+                                                     const TreeSummary *treedata);
+
+    double DlnWaitForTinyLogisticSelectionCoefficient(const vector<double>& param,
+                                                      const TreeSummary *treedata,
+                                                      const long &whichparam);
+
+  public:
+    ~CoalesceLogisticSelectionPL () { };
+
+    // perhaps move to the .cpp file?
+    CoalesceLogisticSelectionPL (long thisnpop) : CoalescePL(thisnpop)
+    {
+        if (2 != thisnpop)
+        {
+            string msg = "Attempted to create a CoalesceLogisticSelectionPL object ";
+            msg += "with " + ToString(thisnpop) + " populations.  This object can only ";
+            msg += "be used with two populations, one for the favored allele and ";
+            msg += "one for the disfavored allele.";
+            throw implementation_error(msg);
+        }
+    };
+
+    virtual PLForces* Clone() const
+    {
+        return new CoalesceLogisticSelectionPL(*this);
+    };
+
+    void SetSelectionCoefficientLocation(long paramvecindex)
+    {
+        m_s_is_here = paramvecindex;
+    };
+
+    double lnWait(const vector<double>& param,
+                  const TreeSummary *treedata);
+
+    double lnPoint(const vector<double>& param,
+                   const vector<double>& lparam,
+                   const TreeSummary *treedata);
+
+    double DlnWait(const vector<double>& param,
+                   const TreeSummary *treedata, const long &whichparam);
+
+    double DlnPoint(const vector<double>& param,
+                    const TreeSummary *treedata, const long &whichparam);
+};
+
+//------------------------------------------------------------------------------------
+
+class LogisticSelectionPL:public PLForces
+{
+  private:
+    LogisticSelectionPL(); // not implemented
+
+  protected:
+    LogisticSelectionPL(const LogisticSelectionPL& src) : PLForces(src), m_s_is_here(src.m_s_is_here) { };
+    long m_s_is_here;
+
+    double DlnWaitForTinyLogisticSelectionCoefficient(const vector<double>& param,
+                                                      const TreeSummary *treedata,
+                                                      const long &whichparam);
+
+  public:
+    ~LogisticSelectionPL () { };
+
+    // MDEBUG the 0 is arbitrary--apparently this class doesn't use npop
+    LogisticSelectionPL(long paramvecindex) : PLForces(0)
+    {
+        m_s_is_here = paramvecindex;
+    };
+
+    virtual PLForces* Clone() const
+    {
+        return new LogisticSelectionPL(*this);
+    };
+
+    double lnWait(const vector<double>& param,
+                  const TreeSummary *treedata);
+
+    double lnPoint(const vector<double>& param,
+                   const vector<double>& lparam,
+                   const TreeSummary *treedata);
+
+    double DlnWait(const vector<double>& param,
+                   const TreeSummary *treedata, const long &whichparam);
+
+    double DlnPoint(const vector<double>& param,
+                    const TreeSummary *treedata, const long &whichparam);
+};
+
+//------------------------------------------------------------------------------------
+
+class StickSelectPL:public PLForces
+{
+  private:
+    StickSelectPL(); // not implemented
+
+  public:
+    // stair selection related wait, and point functions
+    // and derivatives of them
+    StickSelectPL(const ForceSummary& fs);
+
+    virtual ~StickSelectPL();
+
+    // we accept the default copy ctor and operator=
+
+    virtual PLForces* Clone() const
+    {
+        return new StickSelectPL(*this);
+    };
+
+    double lnPTreeStick (const vector < double >&param,
+                         const vector < double >&lparam,
+                         const TreeSummary * treedata);
+
+    double lnWait (const vector < double >&param,
+                   const TreeSummary * treedata);
+
+    double lnPoint (const vector < double >&param,
+                    const vector < double >&lparam,
+                    const TreeSummary * treedata);
+
+    double DlnWait (const vector < double >&param,
+                    const TreeSummary * treedata,
+                    const long &whichparam);
+
+    double DlnPoint (const vector < double >&param,
+                     const TreeSummary * treedata,
+                     const long &whichparam);
+
+    void SetThetastart(long start) {m_thetastart = start;};
+    void SetThetaend(long end) {m_thetaend = end;};
+    void SetToSmallAIndex(long index);
+    void SetToBigAIndex(long index);
+    void SetSelCoeffIndex(long index) {m_s_here = index;};
+    void SetRecRateIndex(long index) {m_r_here = index;};
+    void SetLocalPartForces(const ForceSummary& fs);
+
+    // these are exposed as public so that Arranger.PStickParams() can find
+    // them -- the only alternative considered at this time was making them
+    // free functions, which Jon rejects for now.
+    double StickMean(double freqA, double s, double toA, double toa) const;
+    double StickVar(double freqA, double tipfreqA, double thetaA) const;
+
+  protected:
+    long m_thetastart, m_thetaend;
+    long m_toBigA_here;
+    long m_toSmallA_here;
+    long m_s_here;
+    long m_r_here;
+
+    // helper(s) used for stick selection
+    double m_minuslnsqrt2pi;
+
+    // helper(s) used for disease
+    long m_disstart;
+    long m_ndis;
+    long m_disindex;
+
+    // helpers(s) used for coalescence
+    long m_nxparts;
+    LongVec1d m_partindex;
+    ForceVec m_localpartforces;
+
+    // dim: lpforce X xpart
+    vector<vector<DoubleVec1d::size_type> > m_whichlocalpart;
+
+    // dim: pforce X xpart
+    vector<vector<DoubleVec1d::size_type> > m_whichpart;
+
+    // dim: lpforce X part X xpart-list
+    vector<vector<vector<DoubleVec1d::size_type> > > m_whichlocalxparts;
+
+    // dim: pforce X part X xpart-list
+    vector<vector<vector<DoubleVec1d::size_type> > > m_whichxparts;
+
+    // dim: xpart
+    vector<DoubleVec1d::size_type> m_whichmigpart;
+
+    double CommonFunctional(double freqBigA, double prevfreqBigA, double s,
+                            double toBigA, double toSmallA, double length) const;
+
+    // the ln point term covering P(Tree|Stick,params)
+    double lnPointTreeStick (const vector < double >&param,
+                             const vector < double >&lparam,
+                             const TreeSummary * treedata);
+};
+
+#endif // PLFORCES_H
+
+//____________________________________________________________________________________
diff --git a/src/postlike/plotstat.cpp b/src/postlike/plotstat.cpp
new file mode 100644
index 0000000..eb6fbd3
--- /dev/null
+++ b/src/postlike/plotstat.cpp
@@ -0,0 +1,45 @@
+// $Id: plotstat.cpp,v 1.10 2010/03/17 17:25:59 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+#include <vector>
+#include "plotstat.h"
+
+//------------------------------------------------------------------------------------
+
+ProfileLineStruct::ProfileLineStruct()
+    : loglikelihood(0.0),
+      percentile(0.0),
+      profilevalue(0.0),
+      profparam(),
+      isExtremeHigh(false),
+      isExtremeLow(false),
+      maximizerWarning(false)
+{
+}
+
+ProfileLineStruct::~ProfileLineStruct()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+const ProfileLineStruct& ProfileStruct::GetProfileLine(double percentile) const
+{
+    vector<ProfileLineStruct>::const_iterator prof;
+    for(prof = profilelines.begin(); prof != profilelines.end(); ++prof)
+        if (percentile == prof->percentile)
+            return *prof;
+
+    assert(false);
+    return(*prof);
+}
+
+//____________________________________________________________________________________
diff --git a/src/postlike/plotstat.h b/src/postlike/plotstat.h
new file mode 100644
index 0000000..602e601
--- /dev/null
+++ b/src/postlike/plotstat.h
@@ -0,0 +1,77 @@
+// $Id: plotstat.h,v 1.12 2011/04/23 02:02:49 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef PLOTSTAT_H
+#define PLOTSTAT_H
+
+#include <cmath>
+#include <string>
+#include <vector>
+
+#include "vectorx.h"
+
+class ForceSummary;
+class Parameter;
+
+enum plottype { log_ten, linear };
+enum analysistype { mle, profile };
+
+struct PlotStruct
+{
+  public:Parameter * xaxis;
+    Parameter *yaxis;
+    DoubleVec2d plane;
+};
+
+class ProfileLineStruct
+{
+  public:
+    // class variables
+    ProfileLineStruct();
+    ~ProfileLineStruct();
+    double loglikelihood;
+    double percentile;
+    double profilevalue;
+    DoubleVec1d profparam;
+    bool isExtremeHigh;
+    bool isExtremeLow;
+    bool maximizerWarning;
+};
+
+class ProfileStruct
+{
+  public:
+    // class variables
+    vector < ProfileLineStruct > profilelines;
+
+    // class methods
+    const ProfileLineStruct& GetProfileLine(double percentile) const;
+};
+
+class LikeGraphs
+{
+  public:
+    LikeGraphs () {};
+    ~LikeGraphs () {};
+
+    string MakeBorder (long points, long breaks = 4);
+    vector < string > MakeInnards (const DoubleVec2d & likes);
+    vector < string > MakeLikePlot (const StringVec1d & innards,
+                                    const Parameter & paramX,
+                                    const Parameter & paramY, long breaks = 4);
+    DoubleVec2d AddGraphs (const DoubleVec2d & a, const DoubleVec2d & b);
+
+  private:
+    bool Divides (long x, long y);
+};
+
+#endif // PLOTSTAT_H
+
+//____________________________________________________________________________________
diff --git a/src/postlike/profile.cpp b/src/postlike/profile.cpp
new file mode 100644
index 0000000..f984745
--- /dev/null
+++ b/src/postlike/profile.cpp
@@ -0,0 +1,821 @@
+// $Id: profile.cpp,v 1.73 2012/06/30 01:32:42 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+// profile.cpp [part of the analyzer module]
+//
+// - profiles (fixed and percentile)
+//
+// Peter Beerli November 2000
+//
+
+#include <cassert>
+#include <iostream>
+#include <vector>
+
+#include "analyzer.h"
+#include "constants.h"
+#include "forcesummary.h"
+#include "mathx.h"
+#include "maximizer.h"
+#include "parameter.h"
+#include "registry.h"
+#include "runreport.h"
+#include "types.h"
+#include "vector_constants.h"
+#include "vectorx.h"
+
+using namespace std;
+
+//------------------------------------------------------------------------------------
+
+const double PROFILE_EPSILON = 0.00001; // accuracy percentile in profiles
+
+//------------------------------------------------------------------------------------
+// returns a structure containing all possible profiletables
+// according to ParamStruct *toDolist
+
+void Analyzer::CalcProfiles (const DoubleVec1d MLEs, double likelihood, long int region)
+{
+    ParamVector toDolist(false);
+    ParamVector::iterator i;
+    long int ii;
+
+    // setup and save the guides for the gradients
+    m_MLEparams = MLEs;
+    m_MLElparams = CreateVec1d(m_MLEparams.size(), static_cast<double>(0.0));
+    LogVec0(m_MLEparams, m_MLElparams);
+    m_likelihood = likelihood;
+
+    // crank up runtime progress reports
+    long int numprofiles = toDolist.NumProfiledParameters();
+    RunReport& runreport = registry.GetRunReport();
+    runreport.RecordProfileStart();
+    long int thisprofile = 0;
+
+    for (i = toDolist.begin (), //over all parameters in toDo list
+             ii = 0; i != toDolist.end (); ++i, ++ii)
+    {
+        ParamStatus mystatus = i->GetStatus();
+        ProfileStruct emptyprofile ;
+        if (!mystatus.Valid()) continue;
+        if (mystatus.Varies() && i->IsProfiled())
+        {
+            m_maximizer->ProfileGuideFix(ii);
+            CalcProfile (i, ii, region);
+            m_maximizer->ProfileGuideRestore();
+            runreport.PrognoseProfiles(thisprofile, numprofiles);
+            ++thisprofile;
+        }
+        else                          // constant or non-head grouped parameters
+        {
+            i->AddProfile(emptyprofile, m_maximizer->GetPostlikeTag());
+        }
+    }
+}
+
+//------------------------------------------------------------------------------------
+// returns a structure containing a single profile table
+// according to ParamStruct *toDolist
+
+void Analyzer::CalcProfile (ParamVector::iterator guide, long int pnum, long int region)
+{
+    switch(guide->GetProfileType())
+    {
+        case profile_PERCENTILE :
+            CalcProfilePercentile (guide, pnum, region);
+            break;
+        case profile_FIX :
+            CalcProfileFixed (guide, pnum, region);
+            break;
+        case profile_NONE :
+            assert(false); //This should have been taken care of in CalcProfiles.
+            ProfileStruct emptyprofile ;
+            guide->AddProfile(emptyprofile, m_maximizer->GetPostlikeTag());
+            break;
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+void Analyzer::CalcProfileFixed (ParamVector::iterator guide, long int pnum, long int region)
+{
+    ProfileStruct theprofile;
+    m_newparams = m_MLEparams;
+
+    vector < double >::const_iterator fix;
+    unsigned long int i = 0u;
+    DoubleVec1d modifiers = m_forcesummary.GetModifiers(pnum); // FS gets 'em from ParamVector
+
+    for (fix = modifiers.begin(); fix != modifiers.end (); ++fix, ++i)
+    {
+        if (m_likelihood == -DBL_BIG)
+        {
+            AddBlankProfileForModifier(*fix, theprofile.profilelines);
+            continue;
+        }
+        ProfileLineStruct profileline;
+        double mlevalue = m_MLEparams[pnum];
+        double newvalue = (*fix) * mlevalue;
+        if (((guide->IsForce(force_GROW) && (i >= vecconst::growthmultipliers.size())) ||
+             (guide->IsForce(force_LOGISTICSELECTION) &&
+              (i >= vecconst::logisticselectionmultipliers.size()))) &&
+            (registry.GetUserParameters().GetVerbosity() != CONCISE) &&
+            (registry.GetUserParameters().GetVerbosity() != NONE   ))
+        {
+            newvalue = (*fix);
+            // The second part of this vector is filled with values which we want
+            // to *set* the param to, not to multiply the param by. --LS
+        }
+        m_newparams = m_MLEparams;
+        bool islog = false;
+        registry.GetForceSummary().SetParamWithConstraints(pnum, newvalue, m_newparams, islog);
+        string message = "";
+        bool retval(true);
+        retval = m_maximizer->Calculate(m_newparams, profileline.loglikelihood, message);
+        if (!retval)
+        {
+            if (profileline.loglikelihood != -DBL_BIG)
+            {
+                profileline.loglikelihood = -DBL_BIG;
+                RunReport& runreport = registry.GetRunReport();
+                string msg = "The maximizer failed for position ";
+                msg += ToString(pnum) + ", without a corresponding catastrophically  "
+                    + "low likelihood.  Re-setting to -" + Pretty(DBL_BIG)
+                    + " and continuing.";
+                runreport.ReportChat(msg);
+            }
+        }
+        else if (message != "")
+        {
+            //We got a warning from the maximizer (not an error)
+            message = "Warning:  When calculating the maximum for other parameters "
+                "when " + guide->GetName() + " equals " + ToString(newvalue) +
+                ", we received the following warning from the maximizer:  " +
+                message;
+            registry.GetRunReport().ReportDebug(message);
+        }
+
+        if (region != FLAGLONG)
+        {
+            //We need to convert the regional parameters to global parameters.
+            ForceParameters fp(region);
+            fp.SetRegionalParameters(m_newparams);
+            m_newparams = fp.GetGlobalParameters();
+        }
+        profileline.profilevalue = m_newparams[pnum];
+        profileline.profparam = m_newparams;
+        profileline.percentile = (*fix);
+        theprofile.profilelines.push_back(profileline);
+    }
+
+    guide->AddProfile(theprofile, m_maximizer->GetPostlikeTag());
+}
+
+//------------------------------------------------------------------------------------
+
+void Analyzer::CalcProfilePercentile (ParamVector::iterator guide, long int pnum, long int region)
+{
+    ProfileStruct theprofile;
+    vector<ProfileLineStruct> localprofiles;
+    m_newparams = m_MLEparams;
+
+    //Figure out what log likelihoods we need to hit.  These come from
+    // the percentiles in m_forcesummary.
+    DoubleVec1d percents = m_forcesummary.GetModifiers(pnum);
+    DoubleVec1d lowTargetLikes;
+    DoubleVec1d highTargetLikes;
+    DoublesMap  percsForLowLikes, percsForHighLikes;
+    for (unsigned long int percnum = 0; percnum<percents.size(); percnum++)
+    {
+        long int df = 1;
+        // It's a bit weird, but when profiling, the degrees of freedom
+        //  is 1, since that's the *difference* in degrees of freedom
+        //  between letting all parameters vary and letting all but
+        //  one parameter vary (he says, gesticulating wildly) --LS
+        double percent = percents[percnum];
+        if (percent < 0.5)
+        {
+            double targetLike = m_likelihood - (find_chi(df, (percent * 2.0))/2.0);
+            if (!percsForLowLikes.insert(make_pair(targetLike, percent)).second)
+            {
+                //The new targetLike is the same as an old targetLike, perhaps because
+                // the maximum likelihood is very very low.
+                AddBlankProfileForModifier(percent, localprofiles);
+            }
+            lowTargetLikes.push_back(targetLike);
+        }
+        else
+        {
+            double targetLike = m_likelihood - (find_chi(df, (1.0 - percent)*2.0)/2.0);
+            if (!percsForHighLikes.insert(make_pair(targetLike, percent)).second)
+            {
+                //The new targetLike is the same as an old targetLike, perhaps because
+                // the maximum likelihood is very very low.
+                AddBlankProfileForModifier(percent, localprofiles);
+            }
+            highTargetLikes.push_back(targetLike);
+        }
+    }
+
+    // We now go through the low set first and the high set second.  We'll do
+    // some setup first in each that's different, then the rest of the algorithm
+    // needs to work for both types.
+    DoHalfTheProfile(false, lowTargetLikes, percsForLowLikes,  pnum, region, localprofiles, guide);
+    DoHalfTheProfile(true, highTargetLikes, percsForHighLikes, pnum, region, localprofiles, guide);
+    theprofile.profilelines = localprofiles;
+    guide->AddProfile(theprofile, m_maximizer->GetPostlikeTag());
+}
+
+//------------------------------------------------------------------------------------
+
+void Analyzer::DoHalfTheProfile(bool high, DoubleVec1d& targetLikes,
+                                DoublesMap& percsForLikes,
+                                long int pnum, long int region,
+                                vector<ProfileLineStruct>& localprofiles,
+                                ParamVector::iterator guide)
+{
+    DoublesMap foundLikesForVals;
+    DoubleToVecMap foundVecsForVals;
+    DoubleToStringMap messagesForVals;
+    string message;
+
+    // ****Put the maximum into our likes and vectors.****
+    foundLikesForVals.insert(make_pair(m_MLEparams[pnum], m_likelihood));
+    foundVecsForVals.insert(make_pair(m_MLEparams[pnum], m_MLEparams));
+    messagesForVals.insert(make_pair(m_MLEparams[pnum], ""));
+
+    // ****Put one extreme value into our likes and vectors.****
+    double extremeParam, extremeLike;
+    if (high == false)
+    {
+        extremeParam = LowParameter(pnum, m_MLEparams[pnum]);
+    }
+    else
+    {
+        extremeParam = HighParameter(pnum, m_MLEparams[pnum]);
+    }
+    m_newparams = m_MLEparams;
+    bool islog = false;
+    registry.GetForceSummary().SetParamWithConstraints(pnum, extremeParam, m_newparams, islog);
+
+    m_maximizer->Calculate(m_newparams, extremeLike, message);
+
+    //Note:  maximizer failure is fine--it's supposed to be extreme.
+    foundLikesForVals.insert(make_pair(extremeParam, extremeLike));
+    foundVecsForVals.insert(make_pair(extremeParam, m_newparams));
+    messagesForVals.insert(make_pair(extremeParam, message));
+
+    // ****Now we can start looking for our targets, starting from the closest to the MLE.****
+    sort(targetLikes.begin(), targetLikes.end());
+    reverse(targetLikes.begin(), targetLikes.end());
+
+    for (DoubleVec1d::iterator targetLike = targetLikes.begin();
+         targetLike != targetLikes.end(); targetLike++)
+    {
+        ProfileLineStruct localprofile;
+        double percent = percsForLikes.find(*targetLike)->second;
+        if (m_likelihood == -DBL_BIG)
+        {
+            AddBlankProfileForModifier(percent, localprofiles);
+            continue;
+        }
+        DoublesMapiter highValAndLike = GetLastHigher(foundLikesForVals, *targetLike, high);
+        DoublesMapiter lowValAndLike =  GetFirstLower(foundLikesForVals, *targetLike, high);
+        DoublesMapiter closestFoundValAndLike = ClosestFoundFor(foundLikesForVals, *targetLike);
+
+        bool keep_going = true;
+        while ((fabs(closestFoundValAndLike->second - *targetLike) > PROFILE_EPSILON) && keep_going)
+        {
+            if (lowValAndLike == foundLikesForVals.end())
+            {
+                keep_going = AddMoreExtremeValue(foundLikesForVals, foundVecsForVals,
+                                                 messagesForVals, high, pnum);
+                highValAndLike = GetLastHigher(foundLikesForVals, *targetLike, high);
+                lowValAndLike = GetFirstLower(foundLikesForVals, *targetLike, high);
+                closestFoundValAndLike = ClosestFoundFor(foundLikesForVals, *targetLike);
+                continue;
+            }
+            if (fabs((highValAndLike->first - lowValAndLike->first)
+                     / max(fabs(highValAndLike->first), fabs(lowValAndLike->first)))
+                > PROFILE_EPSILON)
+            {
+                m_newparams = (foundVecsForVals.find(highValAndLike->first))->second;
+                double newlike;
+                double newval = GetNewValFromBracket(highValAndLike, lowValAndLike, *targetLike);
+                registry.GetForceSummary().SetParamWithConstraints(pnum, newval, m_newparams,islog);
+                m_maximizer->Calculate(m_newparams, newlike, message);
+                //Again, we're not worrying about maximization failure here--we'll
+                // assume we're simply scootching in from the most extreme value so far.
+                foundLikesForVals.insert(make_pair(newval, newlike));
+                foundVecsForVals.insert(make_pair(newval, m_newparams));
+                messagesForVals.insert(make_pair(newval, message));
+                if (newlike < *targetLike)
+                {
+                    lowValAndLike = foundLikesForVals.find(newval);
+                }
+                else
+                {
+                    highValAndLike = foundLikesForVals.find(newval);
+                }
+            }
+            else
+            {
+                //The likelihoods are not near each other, but the absolute values
+                // of the parameters are.  The most likely cause of this situation
+                // is that the lower likelihood is wrong because the maximizer didn't
+                // have very good starting values when it tried it.  So, we re-try
+                // that lower value and see if we get a better likelihood.
+                keep_going = ExpandSearch(foundLikesForVals, foundVecsForVals,
+                                          lowValAndLike, highValAndLike,
+                                          messagesForVals, pnum, *targetLike);
+                highValAndLike = GetLastHigher(foundLikesForVals, *targetLike, high);
+                lowValAndLike = GetFirstLower(foundLikesForVals, *targetLike, high);
+            }
+            closestFoundValAndLike =
+                ClosestFoundFor(foundLikesForVals, *targetLike);
+        }
+
+        // ****We're as close as we're going to get to our target--add it.****
+        if (region != FLAGLONG)
+        {
+            //We need to convert the regional parameters to global parameters.
+            ForceParameters fp(region);
+            fp.SetRegionalParameters(m_newparams);
+            m_newparams = fp.GetGlobalParameters();
+        }
+        double foundVal = closestFoundValAndLike->first;
+        double foundLike = closestFoundValAndLike->second;
+        if (lowValAndLike == foundLikesForVals.end())
+        {
+            //All likelihoods are greater than our target likelihood.  Take the
+            // most extreme value and use that.  However, label the profile
+            // as being extreme so we can put a '<' or '>' in front of it for
+            // the output report.
+            if (high)
+            {
+                foundVal = foundLikesForVals.rbegin()->first;
+                foundLike = foundLikesForVals.rbegin()->second;
+                localprofile.isExtremeHigh = true;
+            }
+            else
+            {
+                foundVal = foundLikesForVals.begin()->first;
+                foundLike = foundLikesForVals.begin()->second;
+                localprofile.isExtremeLow = true;
+            }
+        }
+
+        string warning = messagesForVals.find(foundVal)->second;
+        if (warning != "")
+        {
+            //We got a warning from the maximizer (not an error)
+            string msg = "Warning:  When calculating the maximum for other parameters ";
+            msg+= "for the " + ToString(percent) + " profile for "
+                + guide->GetName() +
+                " (when it equals " + ToString(foundVal) +
+                "), we received the following warning from the maximizer:  " +
+                warning;
+            registry.GetRunReport().ReportDebug(msg);
+            localprofile.maximizerWarning = true;
+        }
+
+        localprofile.profilevalue = foundVal;
+        localprofile.loglikelihood = foundLike;
+        localprofile.profparam = foundVecsForVals.find(foundVal)->second;
+        localprofile.percentile = percent;
+
+        if (region != FLAGLONG)
+        {
+            //We might need to convert from regional values to global values
+            ForceParameters fp(region);
+            fp.SetRegionalParameters(localprofile.profparam);
+            localprofile.profparam = fp.GetGlobalParameters();
+            localprofile.profilevalue = (fp.GetGlobalParameters())[pnum];
+        }
+
+        localprofiles.push_back(localprofile);
+    }
+
+    CheckForMultipleMaxima(foundLikesForVals, high);
+}
+
+//------------------------------------------------------------------------------------
+
+bool Analyzer::AddMoreExtremeValue(DoublesMap& foundLikesForVals,
+                                   DoubleToVecMap& foundVecsForVals,
+                                   DoubleToStringMap& messagesForVals,
+                                   bool high, long int pnum)
+{
+    double oldextreme;
+    if (high)
+    {
+        oldextreme = foundLikesForVals.rbegin()->first;
+    }
+    else
+    {
+        oldextreme = foundLikesForVals.begin()->first;
+    }
+    double newextreme;
+    if (high)
+    {
+        newextreme = HighParameter(pnum, oldextreme);
+        if (newextreme <= oldextreme)
+        {
+            return false;
+        }
+    }
+    else
+    {
+        newextreme = LowParameter(pnum, oldextreme);
+        if (newextreme >= oldextreme)
+        {
+            return false;
+        }
+    }
+    m_newparams = foundVecsForVals.find(oldextreme)->second;
+    bool islog = false;
+    registry.GetForceSummary().SetParamWithConstraints(pnum, newextreme, m_newparams, islog);
+    double foundLike;
+    string message;
+    m_maximizer->Calculate(m_newparams, foundLike, message);
+    //We don't care if this fails.
+    foundLikesForVals.insert(make_pair(newextreme, foundLike));
+    foundVecsForVals.insert(make_pair(newextreme, m_newparams));
+    messagesForVals.insert(make_pair(newextreme, message));
+    return true;
+} // FillMapsWithExtremes
+
+//------------------------------------------------------------------------------------
+//ExpandSearch checks to see if re-analyzing the value with the lower
+// likelihood with the starting values from the higher likelihood results
+// in a higher likelihood.  If so,
+
+bool Analyzer::ExpandSearch(DoublesMap& foundLikesForVals,
+                            DoubleToVecMap& foundVecsForVals,
+                            DoublesMapiter& lowValAndLike,
+                            DoublesMapiter& highValAndLike,
+                            DoubleToStringMap& messagesForVals,
+                            long int pnum, double targetLike)
+{
+    //First, find out which DoublesMapiter contains the greater likelihood.
+    DoublesMapiter highLike, lowLike;
+    if (lowValAndLike->second > highValAndLike->second)
+    {
+        assert(false); //I *think* this should never happen. Sadly, my brain is full.
+        highLike = lowValAndLike;
+        lowLike = highValAndLike;
+    }
+    else
+    {
+        highLike = highValAndLike;
+        lowLike = lowValAndLike;
+    }
+    m_newparams = (foundVecsForVals.find(highLike->first))->second;
+    double newvalue = (foundVecsForVals.find(lowLike->first))->second[pnum];
+    bool islog = false;
+    registry.GetForceSummary().SetParamWithConstraints(pnum, newvalue, m_newparams,islog);
+    double newlike;
+    string message;
+    bool retval(true);
+    retval = m_maximizer->Calculate(m_newparams, newlike, message);
+    if (!retval)
+        return false;
+    if (newlike <= targetLike)
+    {
+        return false;
+    }
+    //Replace the old set of parameters with the new one.
+    double oldval = lowLike->first;
+    foundLikesForVals.erase(oldval);
+    foundVecsForVals.erase(oldval);
+    messagesForVals.erase(oldval);
+    foundLikesForVals.insert(make_pair(newvalue, newlike));
+    foundVecsForVals.insert(make_pair(newvalue, m_newparams));
+    messagesForVals.insert(make_pair(newvalue, message));
+
+    return true;
+}
+
+//------------------------------------------------------------------------------------
+// returns high/highest and low/lowest values for specific forces
+
+double Analyzer::LowParameter(long int pnum, double currentLow)
+{
+    double value = m_forcesummary.GetLowParameter(pnum);
+    if (value > m_MLEparams[pnum])
+    {
+        if (m_MLEparams[pnum] > 0)
+        {
+            //Take the next-lowest power of 10 from the MLE.
+            int exponent = static_cast<int>(log10(m_MLEparams[pnum]));
+            value = pow(10.0,exponent-1);
+        }
+        else if (m_MLEparams[pnum] < 0)
+        {
+            //Take the next-highest power of 10 from the MLE, but negative.
+            int exponent = static_cast<int>(log10(fabs(m_MLEparams[pnum])));
+            value = -pow(10.0,exponent+1);
+        }
+        else
+        {
+            //m_MLEparams[pnum] == 0, presumably in a case where going negative
+            // is fatal.  There can be no profiling curve below zero, so
+            // just set this to 0 and go on.
+            value = 0;
+            static bool firsttime = true;
+            if (firsttime)
+            {
+                RunReport& runreport = registry.GetRunReport();
+                runreport.ReportChat("The MLE for a parameter was zero, which means no profiling can be performed"
+                                     " below that value.  Normal profiles should result from values"
+                                     " greater than the parameter.");
+            }
+            firsttime = false;
+        }
+    }
+    for (long int nmult = 0; nmult < 5; nmult++)
+    {
+        if (value >= currentLow-EPSILON)
+        {
+            //The EPSILON is a fudge factor for optimized code.
+            value *= m_forcesummary.GetLowMult(pnum);
+        }
+        else
+        {
+            return value;
+        }
+    }
+    return value;
+}
+
+//------------------------------------------------------------------------------------
+
+double Analyzer::HighParameter(long int pnum, double currentHigh)
+{
+    double value = m_forcesummary.GetHighParameter(pnum);
+    if (value < m_MLEparams[pnum])
+    {
+        if (value > 0)
+        {
+            //Take the next-highest power of 10 from the MLE
+            int exponent = static_cast<int>(log10(m_MLEparams[pnum]));
+            value = pow(10.0,exponent+1);
+        }
+        else
+        {
+            //What the heck are we doing with a negative 'high parameter'??
+            assert(false);
+            value = 10;
+        }
+    }
+    for (long int nmult = 0; nmult < 5; nmult++)
+    {
+        if (value <= currentHigh+EPSILON)
+        {
+            //The EPSILON is a fudge factor for optimized code.
+            value *= m_forcesummary.GetHighMult(pnum);
+        }
+        else
+        {
+            return value;
+        }
+    }
+    return value;
+}
+
+//------------------------------------------------------------------------------------
+//GetLastHigher returns an iterator to the 'last' likelihood with a
+// higher likelihood than the target likelihood.  'Last' means 'The last going
+// away from the MLE', so if we're profiling the upper half of the profile,
+// this means we need to use a reverse iterator (since the map is stored
+// in the order of the values, not the likelihoods).  There should always be
+// at least one entry in the map with a higher likelihood, which means
+// that if there are no entries in the map with a lower likelihood, we return
+// an iterator to the most extreme value (.begin() in the case of the upper
+// half of the profile).
+
+DoublesMapiter Analyzer::GetLastHigher(DoublesMap& foundLikesForVals, double targetLike, bool high)
+{
+    DoublesMapiter upwardsLike = foundLikesForVals.begin();
+    DoublesMapReviter backwardsLike = foundLikesForVals.rbegin();
+    for (; upwardsLike != foundLikesForVals.end();
+         upwardsLike++, backwardsLike++)
+    {
+        if (high)
+        {
+            if (upwardsLike->second < targetLike)
+            {
+                upwardsLike--;
+                return upwardsLike;
+            }
+        }
+        else
+        {
+            if (backwardsLike->second < targetLike)
+            {
+                backwardsLike--;
+                return foundLikesForVals.find(backwardsLike->first);
+            }
+        }
+    }
+    //Nothing was lower--return the last value
+    if (high)
+    {
+        upwardsLike = foundLikesForVals.end();
+        upwardsLike--;
+        return upwardsLike;
+    }
+    else
+    {
+        return foundLikesForVals.begin();
+    }
+}
+
+//------------------------------------------------------------------------------------
+//GetFirstLower returns an iterator to the 'first' likelihood with a lower
+// likelihood than the target likelihood.  'First' means 'the first found
+// going away from the MLE'.  If we are profiling the lower half (high=false),
+// this means we have to use the reverse iterator.
+//
+// There will not always be a value with a lower likelihood than the
+//  target--in this case, return the .end() iterator, regardless of which
+//  direction we're going.
+
+DoublesMapiter Analyzer::GetFirstLower(DoublesMap& foundLikesForVals, double targetLike, bool high)
+{
+    DoublesMapiter upwardsLike = foundLikesForVals.begin();
+    DoublesMapReviter backwardsLike = foundLikesForVals.rbegin();
+    for (; upwardsLike != foundLikesForVals.end();
+         upwardsLike++, backwardsLike++)
+    {
+        if (high)
+        {
+            if (upwardsLike->second < targetLike)
+            {
+                return upwardsLike;
+            }
+        }
+        else
+        {
+            if (backwardsLike->second < targetLike)
+            {
+                return foundLikesForVals.find(backwardsLike->first);
+            }
+        }
+    }
+    return foundLikesForVals.end();
+}
+
+//------------------------------------------------------------------------------------
+
+DoublesMapiter Analyzer::ClosestFoundFor(DoublesMap& foundLikesForVals, double targetLike)
+{
+    DoublesMapiter retval = foundLikesForVals.begin();
+    for (DoublesMapiter valLike = foundLikesForVals.begin(); valLike != foundLikesForVals.end(); valLike++)
+    {
+        if (fabs(valLike->second - targetLike) < fabs(retval->second  - targetLike))
+        {
+            retval = valLike;
+        }
+    }
+    return retval;
+}
+
+//------------------------------------------------------------------------------------
+
+double Analyzer::GetNewValFromBracket(DoublesMapiter& highValAndLike, DoublesMapiter& lowValAndLike,
+                                      double targetLike)
+{
+    //LS NOTE:  I'm changing this to just return halfway between the y
+    // values instead of trying to be clever about it.  There were too many
+    // edge cases where being clever took way longer.
+    double y1 = lowValAndLike->first;
+    double y2 = highValAndLike->first;
+    return (y1+y2)/2;
+
+    //For now, this will return a linear regression of the point in question.
+    // We might want to query the maximizer for the gradient at the high and
+    // low points for a better estimate.
+
+    //The formula we use is (y-y1) = m(x-x1), with m=(x2-x1)/(y2-y1)
+#if 0
+    return (( (highValAndLike->second - lowValAndLike->second) /
+              (highValAndLike->first  - lowValAndLike->first) )
+            * (targetLike - lowValAndLike->second)) + lowValAndLike->first;
+#endif
+#if 0
+    double x1 = lowValAndLike->second;
+    double x2 = highValAndLike->second;
+    double y1 = lowValAndLike->first;
+    double y2 = highValAndLike->first;
+    double x = targetLike;
+    if (x2 == -DBL_BIG || x1 == -DBL_BIG)
+    {
+        return (y1+y2)/2;
+    }
+    double retval = (( (y2-y1)/(x2-x1) ) * (x-x1) ) + y1;
+    assert ((x1 < x && x < x2) ||
+            (x1 > x && x > x2));
+    assert ((y1 < retval && retval < y2) ||
+            (y1 > retval && retval > y2));
+    return retval;
+#endif
+}
+
+//------------------------------------------------------------------------------------
+
+void Analyzer::AddBlankProfileForModifier(double modifier, vector<ProfileLineStruct>& localprofiles)
+{
+    ProfileLineStruct blankprofile;
+    blankprofile.isExtremeLow = true;
+    blankprofile.isExtremeHigh = true;
+    blankprofile.profilevalue = 0.0;
+    blankprofile.loglikelihood = -DBL_BIG;
+    blankprofile.profparam = m_MLEparams;
+    blankprofile.percentile = modifier;
+    localprofiles.push_back(blankprofile);
+}
+
+//------------------------------------------------------------------------------------
+//We don't do anything with this call currently except print out a debug
+// message if it returns true.  We're fairly late in the process to try to go
+// back and fix the MLE (if indeed we need to do that).
+
+void Analyzer::CheckForMultipleMaxima(DoublesMap& valLikeMap, bool high)
+{
+    bool foundmultiples = false;
+    double oldlike = valLikeMap.begin()->second;
+    for (DoublesMapiter valLike = valLikeMap.begin(); valLike != valLikeMap.end(); valLike++)
+    {
+        double newlike = valLike->second;
+        if (high)
+        {
+            if (newlike > oldlike)
+            {
+                foundmultiples = true;
+            }
+        }
+        else
+        {
+            if (newlike < oldlike)
+            {
+                foundmultiples = true;
+            }
+        }
+    }
+    if (foundmultiples)
+    {
+        string msg = "Multiple maxima?  A better maximum?  These likelihoods should ";
+        msg += (high ? "decrease" : "increase" );
+        msg += " as we go.\nParamVal\tLikelihood\n";
+        registry.GetRunReport().ReportDebug(msg);
+#ifndef NDEBUG
+        PrintDoublesMap(valLikeMap);
+#endif
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+void Analyzer::PrintDoublesMap(DoublesMap& printme)
+{
+    for (DoublesMapiter pmap = printme.begin(); pmap != printme.end(); pmap++)
+    {
+        PrintDoublesIter(pmap);
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+void Analyzer::PrintDoublesIter(DoublesMapiter& printme)
+{
+    cout << Pretty(printme->first, 16) << "\t" << Pretty(printme->second, 16)
+         << endl;
+}
+
+//------------------------------------------------------------------------------------
+
+void Analyzer::PrintDoubleToStringMap(DoubleToStringMap& printme)
+{
+    for (DoubleToStringiter dsiter = printme.begin(); dsiter != printme.end(); dsiter++)
+    {
+        PrintDoubleToStringIter(dsiter);
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+void Analyzer::PrintDoubleToStringIter(DoubleToStringiter& printme)
+{
+    cout << Pretty(printme->first, 16) << "\t" << Pretty(printme->second, 16)
+         << endl;
+}
+
+//____________________________________________________________________________________
diff --git a/src/report/curvefiles.cpp b/src/report/curvefiles.cpp
new file mode 100644
index 0000000..2899058
--- /dev/null
+++ b/src/report/curvefiles.cpp
@@ -0,0 +1,273 @@
+// $Id: curvefiles.cpp,v 1.5 2011/03/08 19:22:01 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+#include <fstream>
+#include <iostream>
+
+#include "bayesanalyzer_1d.h"
+#include "curvefiles.h"
+#include "parameter.h"
+#include "registry.h"
+#include "spreadsheet.h"
+#include "stringx.h"
+#include "vectorx.h"
+
+//------------------------------------------------------------------------------------
+
+void writeCurveHeader(  std::ofstream &     curvefileStream,
+                        std::string         pname,
+                        long                paramnum,
+                        long                regions,
+                        long                replicates,
+                        BayesAnalyzer_1D &  ba)
+{
+
+    curvefileStream << "Bayesian likelihood curve for " << pname << std::endl;
+
+    curvefileStream << "Overall curve represents " << replicates
+                    << " replicate(s) for each of " << regions
+                    << " different genomic regions" << std::endl;
+
+    curvefileStream << "The prior for this parameter was ";
+    if(ba.GetIsLog(paramnum))
+    {
+        curvefileStream << "logarithmic";
+    }
+    else
+    {
+        curvefileStream << "flat";
+    }
+    curvefileStream << std::endl;
+
+    long numpoints = ba.GetNumUniquePoints(paramnum);
+    double width   = ba.GetKernelWidth(paramnum);
+    curvefileStream << "It was created from "
+                    << numpoints << " unique data points." << std::endl;
+    curvefileStream << "Its kernel width was " << width << "." << std::endl;
+
+    const ParamVector paramvec(true);
+    double low  = paramvec[paramnum].GetPrior().GetLowerBound();
+    double high = paramvec[paramnum].GetPrior().GetUpperBound();
+    curvefileStream << "The prior ranged from " << low << " to " << high << ".";
+    if(ba.GetIsLog(paramnum))
+    {
+        double llow  = SafeLog(low);
+        double lhigh = SafeLog(high);
+        curvefileStream << " (In log space: " << llow << " to " << lhigh << ").";
+    }
+    curvefileStream << std::endl;
+    curvefileStream << std::endl;
+
+}
+
+double getMinIndex( BayesAnalyzer_1D &  ba,
+                    const Parameter &   param,
+                    long                paramnum)
+{
+    double minIndex = ba.GetMinParamValFromCurve(FLAGLONG, paramnum);
+
+    long numRegions = registry.GetDataPack().GetNRegions();
+    for(long int i = 0; i < numRegions; i++)
+    {
+        double thisRegionMin = ba.GetMinParamValFromCurve(i, paramnum);
+        minIndex = (thisRegionMin < minIndex) ? thisRegionMin : minIndex;
+    }
+
+    return minIndex;
+}
+
+double getMaxIndex( BayesAnalyzer_1D &  ba,
+                    const Parameter &   param,
+                    long                paramnum)
+{
+    double maxIndex = ba.GetMaxParamValFromCurve(FLAGLONG, paramnum);
+
+    long numRegions = registry.GetDataPack().GetNRegions();
+    for(long int i = 0; i < numRegions; i++)
+    {
+        double thisRegionMax = ba.GetMaxParamValFromCurve(i, paramnum);
+        maxIndex = (thisRegionMax > maxIndex) ? thisRegionMax : maxIndex;
+    }
+
+    return maxIndex;
+}
+
+double getIncrement(BayesAnalyzer_1D &  ba,
+                    const Parameter &   param,
+                    long                paramnum)
+{
+    double increment = ba.GetBinWidthFromCurve(FLAGLONG, paramnum);
+
+#ifndef NDEBUG  // Needed only to run assert() in debug mode.
+    long numRegions = registry.GetDataPack().GetNRegions();
+    for(long int i = 0; i < numRegions; i++)
+    {
+        double thisIncrement = ba.GetBinWidthFromCurve(i, paramnum);
+        assert(increment == thisIncrement);
+    }
+#endif // NDEBUG
+
+    return increment;
+}
+
+void WriteOneConsolidatedCurveFile( std::string         filePrefix,
+                                    BayesAnalyzer_1D &  ba,
+                                    const Parameter &   param,
+                                    long                paramnum)
+{
+    assert(param.IsVariable());
+
+    // EWFIX.REFACTOR
+    string pname = param.GetShortName();
+    string::size_type i = pname.find("/");
+    while (i != string::npos)
+    {
+        pname.replace(i,1,"+");
+        i = pname.find("/");
+    }
+
+    long regions = registry.GetDataPack().GetNRegions();
+    long replicates = registry.GetChainParameters().GetNReps();
+
+    // find out min, max, and increment for regions in this param
+    // EWFIX -- doesn't handle replicates yet
+    double minIndex  = getMinIndex (ba,param,paramnum);
+    double maxIndex  = getMaxIndex (ba,param,paramnum);
+    double increment = getIncrement(ba,param,paramnum);
+
+    std::string fileName = makeFileName(filePrefix,pname);
+    std::ofstream curvefileStream;
+    curvefileStream.precision(10);
+    curvefileStream.open(fileName.c_str(),std::ios::out);
+
+    long rowCount = 2 + (maxIndex - minIndex) / increment;
+
+    bool hasReplicates = replicates > 1;
+    bool multiRegion =   regions > 1;
+    long colCount = hasReplicates ? regions * (replicates+1) : regions;
+    colCount = multiRegion ? colCount + 1 : colCount;
+
+    bool isLog = ba.GetIsLog(paramnum);
+
+    writeCurveHeader(curvefileStream,pname,paramnum,regions,replicates,ba);
+
+    std::string item;
+
+    // name of parameter, giving Ln or not as appropriate
+    if(isLog)
+    {
+        item = "\"Ln(" + pname + ")\"";
+    }
+    else
+    {
+        item = "\"" + pname + "\"";
+    }
+    curvefileStream << item;
+
+    // first comes the overall estimate
+    item = "\"Like(" + pname + ":Overall)\"";
+    curvefileStream << "," << item;
+
+    // then, each region in turn
+    if(multiRegion)
+    {
+        for(long regNo=0;regNo < regions; regNo++)
+        {
+            item = "\"Like(" + pname + ":reg" + ToString(regNo+1)+ ")\"";
+            curvefileStream << "," << item;
+
+            if(hasReplicates)
+            {
+                for(long repNo=0;repNo < replicates; repNo++)
+                {
+                    item = "\"Like(" + pname + ":reg" + ToString(regNo+1)
+                        + ":rep" + ToString(repNo+1)
+                        + ")\"";
+                    curvefileStream << "," << item;
+                }
+            }
+        }
+    }
+    else
+    {
+        if(hasReplicates)
+        {
+            for(long repNo=0;repNo < replicates; repNo++)
+            {
+                item = "\"Like(" + pname +
+                    + ":rep" + ToString(repNo+1)
+                    + ")\"";
+                curvefileStream << "," << item;
+            }
+        }
+    }
+    curvefileStream << std::endl;
+
+    // Now, the values
+
+    for(long rowIndex = 1; rowIndex < rowCount; rowIndex++)
+    {
+        double pval = minIndex + (double)(rowIndex - 1) * increment;
+        double pvalOrExp = isLog ? exp(pval) : pval;
+        curvefileStream << pval;
+
+        double val = ba.GetLikeAtValForAllRegions(pvalOrExp,paramnum);
+        curvefileStream << "," << val;
+
+        if(multiRegion)
+        {
+            for(long regNo=0;regNo<regions;regNo++)
+            {
+                val = ba.GetLikeAtValForRegion(pvalOrExp,regNo,paramnum);
+                curvefileStream << "," << val;
+                if(hasReplicates)
+                {
+                    for(long repNo=0;repNo < replicates; repNo++)
+                    {
+                        val = ba.GetLikeAtValForReplicate(pvalOrExp,regNo,repNo,paramnum);
+                        curvefileStream << "," << val;
+                    }
+                }
+            }
+        }
+        else
+        {
+            if(hasReplicates)
+            {
+                for(long repNo=0;repNo < replicates; repNo++)
+                {
+                    val = ba.GetLikeAtValForReplicate(pvalOrExp,0,repNo,paramnum);
+                    curvefileStream << "," << val;
+                }
+            }
+        }
+        curvefileStream << std::endl;
+    }
+
+    curvefileStream.close();
+    registry.GetUserParameters().AddCurveFileName(fileName);
+
+}
+
+void WriteConsolidatedCurveFiles(std::string filePrefix, BayesAnalyzer_1D& ba)
+{
+
+    const ParamVector paramvec(true);
+    for (long pnum=0; pnum<static_cast<long>(paramvec.size()); pnum++)
+    {
+        const Parameter & param = paramvec[pnum];
+        if (!param.IsVariable()) continue;
+        WriteOneConsolidatedCurveFile(filePrefix,ba,param,pnum);
+
+    }
+}
+
+//____________________________________________________________________________________
diff --git a/src/report/curvefiles.h b/src/report/curvefiles.h
new file mode 100644
index 0000000..43586c9
--- /dev/null
+++ b/src/report/curvefiles.h
@@ -0,0 +1,41 @@
+// $Id: curvefiles.h,v 1.2 2010/03/02 23:12:29 bobgian Exp $
+
+/*
+  Copyright 2008  Peeter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef CURVEFILES_H
+#define CURVEFILES_H
+
+#include <fstream>
+#include <iostream>
+#include <string>
+#include "vectorx.h"
+
+class BayesAnalyzer_1D;
+class Force;
+class Parameter;
+
+void writeCurveHeader(  std::ofstream &     curvefileStream,
+                        std::string         pname,
+                        long                paramnum,
+                        long                regions,
+                        long                replicates,
+                        BayesAnalyzer_1D &  ba);
+
+double getMinIndex(BayesAnalyzer_1D&,long,long);
+
+void WriteOneConsolidatedCurveFile( std::string         filePrefix,
+                                    BayesAnalyzer_1D &  bayesAnalyzer,
+                                    const Parameter &   param,
+                                    long                paramnum);
+void WriteConsolidatedCurveFiles(   std::string         filePrefix,
+                                    BayesAnalyzer_1D &  bayesAnalyzer);
+
+#endif // CURVEFILES_H
+
+//____________________________________________________________________________________
diff --git a/src/report/outputfile.cpp b/src/report/outputfile.cpp
new file mode 100644
index 0000000..79f9def
--- /dev/null
+++ b/src/report/outputfile.cpp
@@ -0,0 +1,161 @@
+// $Id: outputfile.cpp,v 1.21 2012/06/30 01:32:42 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <exception>
+#include <iostream>
+
+#include "outputfile.h"
+#include "errhandling.h"
+#include "registry.h"
+
+#ifdef DMALLOC_FUNC_CHECK
+#include "/usr/local/include/dmalloc.h"
+#endif
+
+using namespace std;
+
+//------------------------------------------------------------------------------------
+
+OutputFile::OutputFile(const string& fname)
+{
+
+    m_outf.open(fname.c_str());
+
+    if (!m_outf)
+    {
+        file_error e("Unable to open output file\n");
+        throw e ;
+    }
+
+} // OutputFile::ctor
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+ResultsFile::ResultsFile()
+    : OutputFile(registry.GetUserParameters().GetResultsFileName())
+{
+
+} // ResultsFile
+
+//------------------------------------------------------------------------------------
+
+void ResultsFile::AddReport(ReportPage &report)
+{
+
+    m_reports.push_back(&report);
+
+} // AddReport
+
+//------------------------------------------------------------------------------------
+
+void ResultsFile::ShowReports()
+{
+    vector<ReportPage *>::iterator rpit;
+    for(rpit = m_reports.begin(); rpit != m_reports.end(); ++rpit)
+        (*rpit)->Show();
+
+} // ShowReports
+
+//------------------------------------------------------------------------------------
+
+void ResultsFile::Display()
+{
+    verbosity_type verbosity = registry.GetUserParameters().GetVerbosity();
+
+    MlePage estimatepage(m_outf);
+    if (registry.GetChainParameters().IsBayesian())
+    {
+        estimatepage.Setup("Most Probable Estimates (MPEs) of Parameters");
+    }
+    else
+    {
+        estimatepage.Setup("Maximum Likelihood Estimates (MLEs) of Parameters");
+    }
+
+    MapPage mappingpage(m_outf);
+    mappingpage.Setup("Mapping results");
+
+    ProfPage profilepage(m_outf);
+    profilepage.Setup("Profile Likelihoods");
+
+    UsrPage userpage(m_outf);
+    userpage.Setup("User Specified Options");
+
+    DataPage echopage(m_outf);
+    echopage.Setup("Data summary");
+
+    RunPage runreportpage(m_outf);
+    runreportpage.Setup("Run Reports by Region");
+
+    AddReport(estimatepage);
+    AddReport(profilepage);
+    AddReport(mappingpage);
+    AddReport(userpage);
+    if (verbosity != CONCISE && verbosity != NONE)
+    {
+        AddReport(echopage);  //Echo data
+        // We've taken the 'Echo data' option out of the menu, and just tied it
+        // to the verbosity level of the output instead. --LS
+        AddReport(runreportpage);
+    }
+
+    ShowReports();
+
+} // Run
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+XMLOutfile::XMLOutfile() :
+    OutputFile(registry.GetUserParameters().GetXMLOutFileName())
+{
+} // XMLOutfile::ctor
+
+XMLOutfile::XMLOutfile(string outfileName) :
+    OutputFile(outfileName)
+{
+} // XMLOutfile::ctor
+
+//------------------------------------------------------------------------------------
+
+// Any global model the user may have had will not be replicated
+// by this code.  Instead every region contains it's own datamodel.
+void XMLOutfile::Display()
+{
+    m_outf << "<lamarc version=\"" << VERSION << "\">" << endl;
+    m_outf << "<!-- Created by the Lamarc program -->" << endl;
+
+    unsigned long nindentspaces(INDENT_DEPTH);
+    StringVec1d xmllines;
+
+    StringVec1d chainxml(registry.GetChainParameters().ToXML(nindentspaces));
+    xmllines.insert(xmllines.end(),chainxml.begin(),chainxml.end());
+
+    StringVec1d userxml(registry.GetUserParameters().ToXML(nindentspaces));
+    xmllines.insert(xmllines.end(),userxml.begin(),userxml.end());
+
+    StringVec1d forcexml(registry.GetForceSummary().ToXML(nindentspaces));
+    xmllines.insert(xmllines.end(),forcexml.begin(),forcexml.end());
+
+    StringVec1d dataxml(registry.GetDataPack().ToXML(nindentspaces));
+    xmllines.insert(xmllines.end(),dataxml.begin(),dataxml.end());
+
+    StringVec1d::iterator line;
+    for(line = xmllines.begin(); line != xmllines.end(); ++line)
+    {
+        m_outf << *line << endl;
+    }
+
+    m_outf << "</lamarc>" << endl;
+
+} // XMLOutfile::Display
+
+//____________________________________________________________________________________
diff --git a/src/report/outputfile.h b/src/report/outputfile.h
new file mode 100644
index 0000000..8bd453a
--- /dev/null
+++ b/src/report/outputfile.h
@@ -0,0 +1,90 @@
+// $Id: outputfile.h,v 1.10 2011/03/07 06:08:51 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef OUTPUTFILE_H
+#define OUTPUTFILE_H
+
+#include <fstream>
+#include <string>
+#include "stringx.h"
+#include "reportpage.h"
+#include "constants.h"
+
+// Class OutputFile is the abstract base class for dealing with file i/o
+// at the end of the program.
+//
+// This class is not default-constructible or copyable.
+//
+// The class ctor will throw a file_error if ofstream.open() fails
+class OutputFile
+{
+  private:
+    // these are deliberately never implemented
+    OutputFile();
+    OutputFile(const OutputFile &src);
+    OutputFile &operator=(const OutputFile &src);
+
+  protected:
+    ofstream m_outf;
+
+  public:
+    OutputFile(const string& fname);
+    virtual ~OutputFile() { m_outf.close(); };
+
+    virtual void Display() = 0;
+};
+
+// Class ResultsFile manages the collection of report pages that make
+// up the final file output for the program.  It is expected to be a
+// singleton.
+//
+// This class is not default-constructible or copyable.
+//
+// The class ctor does not catch any exceptions thrown by the base class.
+class ResultsFile : public OutputFile
+{
+  private:
+    // these are deliberately never implemented
+    ResultsFile(const ResultsFile &src);
+    ResultsFile &operator=(const ResultsFile &src);
+
+    std::vector<ReportPage *> m_reports; // this points at local variables of Display()
+
+    void AddReport(ReportPage &report);
+    void ShowReports();
+
+  public:
+    ResultsFile();
+    virtual ~ResultsFile() {};
+
+    virtual void Display();
+
+};
+
+// Class XMLOutfile manages the creation of a correct xml input file from the
+// current data structures of the program.
+class XMLOutfile : public OutputFile
+{
+  private:
+    // these are deliberately never implemented
+    XMLOutfile(const XMLOutfile &src);
+    XMLOutfile &operator=(const XMLOutfile &src);
+
+  public:
+    XMLOutfile();
+    XMLOutfile(std::string outfileName);
+    virtual ~XMLOutfile() {};
+
+    virtual void Display();
+};
+
+#endif // OUTPUTFILE_H
+
+//____________________________________________________________________________________
diff --git a/src/report/reportpage.cpp b/src/report/reportpage.cpp
new file mode 100644
index 0000000..2b3576a
--- /dev/null
+++ b/src/report/reportpage.cpp
@@ -0,0 +1,3017 @@
+// $Id: reportpage.cpp,v 1.129 2013/10/25 17:00:53 mkkuhner Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+// All versions of SetupColhdr() used for PrintTable() must
+//   put "\n" at EOL for each line.  PrintTable() will not do it!
+//
+// UserOptions
+//    infilenames, if > 1, print ugly
+
+#include <cassert>
+#include <iostream>
+
+#include "analyzer.h"
+#include "calculators.h"
+#include "datatype.h"
+#include "defaults.h"
+#include "dlmodel.h"
+#include "force.h"
+#include "mathx.h"
+#include "parameter.h"
+#include "plotstat.h"
+#include "region.h"
+#include "registry.h"
+#include "reportpage.h"
+#include "runreport.h"
+#include "stringx.h"
+#include "timex.h"                      // for retrieving ending time stamp in MlePage::Show()
+#include "types.h"
+#include "vector_constants.h"
+#include "vectorx.h"
+#include "priorreport.h"                // for MlePage
+
+#ifdef DMALLOC_FUNC_CHECK
+#include "/usr/local/include/dmalloc.h"
+#endif
+
+using namespace std;
+
+//------------------------------------------------------------------------------------
+
+typedef vector < pair<unsigned long, double> > LDpairVec;
+typedef vector < pair<unsigned long, double> >::iterator LDpairVecIt;
+
+#define NOVAL  "*n/a*"                  //What to print if there's no value for a spot in the table.
+#define ERRVAL "*err*"                  //What to print if there's no value for a spot in the table.
+
+//------------------------------------------------------------------------------------
+
+ReportPage::ReportPage(ofstream& pf, long pglngth, long pgwdth)
+    : outf(pf)
+{
+    verbosity = registry.GetUserParameters().GetVerbosity();
+    pagelength = pglngth;
+    pagewidth = pgwdth;
+    current_length = 0;
+    m_bayesanalysis = registry.GetChainParameters().IsBayesian();
+    if (m_bayesanalysis)
+    {
+        m_MLE = "MPE";
+    }
+    else
+    {
+        m_MLE = "MLE";
+    }
+
+} // ReportPage::ReportPage
+
+//------------------------------------------------------------------------------------
+
+string ReportPage::MakeLineOf(const char ch, long length, long indent)
+{
+    string line;
+    long pos;
+
+    for(pos = 0; pos < indent; ++pos)
+        line += " ";
+
+    for( ; pos < length; ++pos)
+        line += ch;
+
+    line += string("\n");
+
+    return(line);
+
+} // ReportPage::MakeLineOf
+
+//------------------------------------------------------------------------------------
+
+string ReportPage::MakePageBreak()
+{
+    string line = string("
\n");
+
+    return(line);
+
+} // ReportPage::MakePageBreak
+
+//------------------------------------------------------------------------------------
+
+string ReportPage::MakeBlankLine()
+{
+    return(string("\n"));
+
+} // ReportPage::MakeBlankLine
+
+// GetOneRow takes 2d data and pulls out cross-slice of it.  Since the data
+//  is normally stored in columns, this routine gives us a row.
+StringVec1d ReportPage::GetOneRow(const StringVec2d table, const long rownum)
+{
+    StringVec1d row;
+    for (unsigned long i=0; i<table.size(); ++i)
+        row.push_back(table[i][rownum]);
+    return row;
+}
+
+//------------------------------------------------------------------------------------
+
+string ReportPage::MakeSimpleRow(vector<long>& colwdth,
+                                 vector<string>& contents)
+{
+    string line;
+    long col, ncols = colwdth.size();
+    for(col = 0; col < ncols; ++col)
+    {
+        line += MakeJustified(contents[col],colwdth[col]);
+    }
+    line += "\n";
+
+    return(line);
+
+} // ReportPage::MakeSimpleRow
+
+//------------------------------------------------------------------------------------
+
+string ReportPage::MakeTwoCol(const vector<long>& colwdth,
+                              const string& col1, const string& col2)
+{
+    // Left column; left justified
+    string line = MakeJustified(col1,-1*colwdth[0]);
+    // Right column; right justified
+    line += " " + MakeJustified(col2,colwdth[1]);
+    line += "\n";
+
+    return(line);
+
+} // ReportPage::MakeTwoCol
+
+//------------------------------------------------------------------------------------
+
+string ReportPage::MakeTwoCol(const vector<long>& colwdth,
+                              const string& col1, const char* col2)
+{
+    return(MakeTwoCol(colwdth,string(col1),string(col2)));
+
+} // ReportPage::MakeTwoCol
+
+//------------------------------------------------------------------------------------
+
+string ReportPage::MakeTwoCol(const vector<long>& colwdth,
+                              const char* col1, const string& col2)
+{
+
+    return(MakeTwoCol(colwdth,string(col1),string(col2)));
+
+} // ReportPage::MakeTwoCol
+
+//------------------------------------------------------------------------------------
+
+string ReportPage::MakeTwoCol(const vector<long>& colwdth,
+                              const char* col1, const char* col2)
+{
+    return(MakeTwoCol(colwdth,string(col1),string(col2)));
+
+} // ReportPage::MakeTwoCol
+
+//------------------------------------------------------------------------------------
+
+void ReportPage::PrintLineOf(const char ch, long length, long indent)
+{
+    outf << MakeLineOf(ch,length,indent);
+    ++current_length;
+
+} // ReportPage::PrintLineOf
+
+//------------------------------------------------------------------------------------
+
+void ReportPage::PrintBlankLine()
+{
+    //LS NOTE:  This is a very crude approximation of a decent page-break
+    // algorithm, but that'd require a fair overhaul of the system as it is.
+    if (current_length >= pagelength-3)
+    {
+        PrintPageBreak(); //resets current_length
+        PrintTitle();
+        PrintBlankLine();
+    }
+    else
+    {
+        outf << MakeBlankLine();
+        ++current_length;
+    }
+} // ReportPage::PrintBlankLine
+
+//------------------------------------------------------------------------------------
+
+void ReportPage::PrintSimpleRow(vector<long> &colwdth,
+                                vector<string> &contents)
+{
+    outf << MakeSimpleRow(colwdth,contents);
+    ++current_length;
+
+} // ReportPage::PrintSimpleRow
+
+//------------------------------------------------------------------------------------
+
+void ReportPage::PrintTwoCol(const vector<long> &colwdth, const string &col1,
+                             const string &col2)
+{
+    outf << MakeTwoCol(colwdth, col1, col2);
+    ++current_length;
+
+} // ReportPage::PrintTwoCol
+
+//------------------------------------------------------------------------------------
+
+void ReportPage::PrintTwoCol(const vector<long> &colwdth, const char *col1,
+                             const string &col2)
+{
+    outf << MakeTwoCol(colwdth, col1, col2);
+    ++current_length;
+
+} // ReportPage::PrintTwoCol
+
+//------------------------------------------------------------------------------------
+
+void ReportPage::PrintTwoCol(const vector<long> &colwdth, const string &col1,
+                             const char *col2)
+{
+    outf << MakeTwoCol(colwdth, col1, col2);
+    ++current_length;
+
+} // ReportPage::PrintTwoCol
+
+//------------------------------------------------------------------------------------
+
+void ReportPage::PrintTwoCol(const vector<long> &colwdth, const char *col1,
+                             const char *col2)
+{
+    outf << MakeTwoCol(colwdth, col1, col2);
+    ++current_length;
+
+} // ReportPage::PrintTwoCol
+
+//------------------------------------------------------------------------------------
+
+void ReportPage::PrintPageBreak()
+{
+    outf << "
" << endl;
+    current_length = 0;
+
+} // ReportPage::PrintPageBreak
+
+//------------------------------------------------------------------------------------
+
+void ReportPage::Setup(vector<string> &title, long pglength,
+                       long pgwidth, char tdlm)
+{
+    pagetitle = title;
+    titledlm = tdlm;
+    pagelength = pglength;
+    pagewidth = pgwidth;
+
+} // ReportPage::Setup
+
+//------------------------------------------------------------------------------------
+
+void ReportPage::Setup(string &title, long pglength,
+                       long pgwidth, char tdlm)
+{
+    vector<string> title1;
+
+    title1.push_back(title);
+    pagetitle = title1;
+    titledlm = tdlm;
+    pagelength = pglength;
+    pagewidth = pgwidth;
+
+} // ReportPage::Setup
+
+//------------------------------------------------------------------------------------
+
+void ReportPage::Setup(const char *title, long pglength,
+                       long pgwidth, char tdlm)
+{
+    vector<string> title1;
+    string chartitle(title);
+
+    title1.push_back(chartitle);
+    pagetitle = title1;
+    titledlm = tdlm;
+    pagelength = pglength;
+    pagewidth = pgwidth;
+
+} // ReportPage::Setup
+
+//------------------------------------------------------------------------------------
+
+vector<string> ReportPage::MakeTitle()
+{
+    vector<string> title = pagetitle;
+    vector<string>::iterator sit;
+
+    // add linefeeds to the end of each element of title.
+    for(sit = title.begin(); sit != title.end(); ++sit)
+        *sit += "\n";
+
+    // put title delimiter lines at begin and end of title
+    title.insert(title.begin(),MakeLineOf(titledlm,pagewidth));
+    title.push_back(MakeLineOf(titledlm,pagewidth));
+
+    return(title);
+
+} // ReportPage::MakeTitle
+
+//------------------------------------------------------------------------------------
+
+vector<string> ReportPage::MakeTableTitle(const string &title)
+{
+    vector<string> titlelines;
+    titlelines.push_back(title+":");
+    unsigned long linelength = title.size()+1;
+    if (linelength > pagewidth)
+        linelength = pagewidth-2;
+    titlelines.push_back(MakeLineOf('-',linelength));
+
+    return(titlelines);
+
+} // ReportPage::MakeTableTitle
+
+//------------------------------------------------------------------------------------
+
+vector<string> ReportPage::MakeTableTitle(const char *title)
+{
+    return(MakeTableTitle(string(title)));
+
+} // ReportPage::MakeTableTitle
+
+//------------------------------------------------------------------------------------
+
+void ReportPage::PrintTitle()
+{
+    PrintLineOf(titledlm,pagewidth);
+    vector<string>::iterator sit;
+    for(sit = pagetitle.begin(); sit != pagetitle.end(); ++sit)
+    {
+        outf << *sit << endl;
+        ++current_length;
+    }
+    PrintLineOf(titledlm,pagewidth);
+
+    sit = pagetitle.begin();
+    if (sit->find("(cont.)") == string::npos)
+        *sit += "\t(cont.)";
+} // ReportPage::PrintTitle
+
+//------------------------------------------------------------------------------------
+
+void ReportPage::PrintCenteredString(const string &str, long width,
+                                     long indent, bool trunc)
+{
+    string chunk = MakeCentered(str,width,indent,trunc);
+
+    outf << chunk;
+    ++current_length;
+
+} // ReportPage::PrintCenteredString
+
+//------------------------------------------------------------------------------------
+
+void ReportPage::PrintCenteredString(const char *str, long width,
+                                     long indent, bool trunc)
+{
+    const string pstr(str);
+
+    PrintCenteredString(pstr,width,indent,trunc);
+
+} // ReportPage::PrintCenteredString
+
+void ReportPage::PrintWrapped(const string &line)
+{
+    StringVec1d wrappedline;
+    wrappedline.push_back(line);
+    wrappedline = Linewrap(wrappedline, pagewidth-2);
+    outf << wrappedline[0] << endl;
+    ++current_length;
+    for (unsigned long i=1; i<wrappedline.size(); ++i)
+    {
+        outf << "  " << wrappedline[i] << endl;
+        ++current_length;
+        if (wrappedline[i].find("\n"))
+            ++current_length;
+    }
+} // PrintWrapped
+
+void ReportPage::PrintWrapped(const StringVec1d& lines)
+{
+    for (unsigned long nline=0; nline<lines.size(); nline++)
+    {
+        PrintWrapped(lines[nline]);
+    }
+} // PrintWrapped
+
+//------------------------------------------------------------------------------------
+
+void ReportPage::PrintTableTitle(const string &title)
+{
+    vector<string> titlelines = MakeTableTitle(title);
+    vector<string>::iterator tit;
+
+    for(tit = titlelines.begin(); tit != titlelines.end(); ++tit)
+        PrintWrapped(*tit);
+
+} // ReportPage::PrintTableTitle
+
+//------------------------------------------------------------------------------------
+
+void ReportPage::PrintTableTitle(const char *title)
+{
+    PrintTableTitle(string(title));
+
+} // ReportPage::PrintTableTitle
+
+//------------------------------------------------------------------------------------
+
+string ReportPage::MakeSectionBreak(const char dlm, long width,
+                                    long indent)
+{
+    return(MakeLineOf(dlm,width,indent));
+
+} // ReportPage::MakeSectionBreak
+
+//------------------------------------------------------------------------------------
+
+void ReportPage::PrintSectionBreak(const char dlm, long width,
+                                   long indent)
+{
+    outf << MakeSectionBreak(dlm,width,indent);
+    ++current_length;
+
+} // ReportPage::PrintSectionBreak
+
+//------------------------------------------------------------------------------------
+
+// colhdr is dimensioned: line
+// rowhdr is dimensioned: section X row
+// colwdth is dimensioned: col
+// innards is dimensioned: section X row X col
+
+vector<string> ReportPage::MakeTable(StringVec1d& colhdr, StringVec2d& rowhdr,
+                                     LongVec1d& colwdth, StringVec3d& innards)
+{
+    StringVec1d table;
+    long hdrwdth = colwdth[0];
+
+    long ncols = colwdth.size();
+    long totlength = 0;
+    for(long col = 0; col < ncols; ++col)
+        totlength += colwdth[col];
+
+    vector<string>::iterator lit;
+    for(lit = colhdr.begin(); lit != colhdr.end(); ++lit)
+        table.push_back(*lit);
+    //table.push_back(MakeLineOf('-',totlength));
+
+    long sect, nsect = innards.size(), sectindent=3;
+    for(sect = 0; sect < nsect; ++sect)
+    {
+        if (sect != 0)
+            table.push_back(MakeSectionBreak('-',totlength,sectindent));
+        table.push_back(rowhdr[sect][0]+"\n");
+        long line, nlines = rowhdr[sect].size();
+        for(line = 1; line < nlines; ++line)
+        {
+            string tline;
+            tline = MakeCentered(rowhdr[sect][line],hdrwdth);
+            long col, ncols = innards[sect][line].size();
+            for(col = 0; col < ncols; ++col)
+            {
+                tline += MakeCentered(innards[sect][line][col],colwdth[col+1]);
+            }
+            table.push_back(tline + "\n");
+        }
+    }
+
+    table.push_back(MakeLineOf('-',totlength));
+
+    return(table);
+
+} // ReportPage::MakeTable
+
+//------------------------------------------------------------------------------------
+
+// colhdr is dimensioned: line
+// rowhdr is dimensioned: section X row
+// colwdth is dimensioned: col
+// innards is dimensioned: section X row X col
+
+void ReportPage::PrintTable(vector<string> &colhdr, StringVec2d &rowhdr,
+                            vector<long> &colwdth,  StringVec3d &innards)
+{
+    vector<string> table = MakeTable(colhdr,rowhdr,colwdth,innards);
+    vector<string>::iterator line;
+
+    for(line = table.begin(); line != table.end(); ++line)
+    {
+        outf << *line;
+        ++current_length;
+    }
+} // ReportPage::PrintTable
+
+//------------------------------------------------------------------------------------
+
+double ReportPage::GetCentile(const vector<centilepair>& centiles,
+                              double pcent)
+{
+    vector<centilepair>::const_iterator cent;
+    for(cent = centiles.begin(); cent != centiles.end(); ++cent)
+    {
+        if (!CloseEnough(cent->first, pcent)) continue;
+        return cent->second;
+    }
+
+    assert(false);  // never found a needed centile!
+    return FLAGDOUBLE;
+
+} // ReportPage::GetCentile
+
+double ReportPage::GetReverseCentile(const vector<centilepair>& centiles,
+                                     double pcent)
+{
+    vector<centilepair>::const_iterator cent;
+    //For a bayesian fixed analysis, we need to search the other way 'round.
+    for(cent = centiles.begin(); cent != centiles.end(); ++cent)
+    {
+        if (!CloseEnough(cent->second, pcent)) continue;
+        return cent->first;
+    }
+
+    assert(false);  // never found a needed centile!
+    return FLAGDOUBLE;
+
+} // ReportPage::GetCentile
+
+//------------------------------------------------------------------------------------
+
+StringVec2d ReportPage::SortTable(StringVec2d intable, unsigned long sortcol,
+                                  unsigned long headerrows)
+{
+    if(intable.size() < sortcol)
+    {
+        registry.GetRunReport().ReportDebug("What the hey?  Tried to sort on a nonexistant column.  Last time this"
+                                            " happened was because a force was on that had no valid parameters.");
+        return intable;
+    }
+    sortcol--; //to change to an index instead of a column number.
+    if (intable[sortcol].size() <= headerrows)
+        return intable;
+
+    StringVec2d outtable;
+
+    string sortme = "";
+    for (unsigned long row=headerrows; row<intable[sortcol].size(); ++row)
+    {
+        sortme += intable[sortcol][row] + " ";
+    }
+    DoubleVec1d tosort;
+    try
+    {
+        tosort = StringToDoubleVecOrBarf(sortme);
+    }
+    catch(const exception& ex)
+    {
+        string msg = ex.what();
+        msg += ":  Unable to sort this table.  We probably did this on purpose.";
+        RunReport& runreport = registry.GetRunReport();
+        runreport.ReportDebug(msg);
+        return intable;
+    }
+
+    LDpairVec unsorted;
+    LongVec1d sorted;
+    for (unsigned long i=0; i<tosort.size(); ++i)
+        unsorted.push_back(make_pair(i, tosort[i]));
+
+    while (unsorted.size() > 0)
+    {
+        LDpairVecIt smallest_it = unsorted.begin();
+        for (LDpairVecIt i=unsorted.begin()++; i!=unsorted.end(); i++)
+            if (i->second < smallest_it->second)
+                smallest_it = i;
+        sorted.push_back(smallest_it->first);
+        unsorted.erase(smallest_it);
+    }
+
+    //LS TEST:  the sorted lines
+#if 0
+    for (unsigned long row=headerrows; row<intable[sortcol].size(); ++row)
+        cout << row << ":  " << sorted[row-headerrows] << ", "
+             << intable[sortcol][sorted[row-headerrows]+headerrows] << endl;
+#endif
+
+    for (unsigned long col=0; col<intable.size(); ++col)
+    {
+        StringVec1d newcolumn;
+        for (unsigned long row=0; row<headerrows; ++row)
+            newcolumn.push_back(intable[col][row]);
+        for (unsigned long row=headerrows; row<intable[sortcol].size(); ++row)
+            newcolumn.push_back(intable[col][sorted[row-headerrows]+headerrows]);
+        outtable.push_back(newcolumn);
+    }
+    return outtable;
+}
+
+// TrimString makes a column label a bit shorter, but in a way that doesn't
+//  reduce the amount of information in the title.  In other words, TrimString
+//  is used to make a column header that *doesn't* need a legend describing
+//  what was trimmed.  If you do need a legend, use MakeItShorter.
+void ReportPage::TrimString(string& title)
+{
+    string::size_type i;
+
+    //First try taking out double spaces.
+    i = title.find("  ");
+    while (i != string::npos)
+    {
+        title.erase(i, 1);
+        i = title.find("  ");
+    }
+
+    //Now take out leading or trailing spaces.
+    i = title.find(" ");
+    while (i == 0)
+    {
+        title.erase(0,1);
+        i = title.find(" ");
+    }
+
+    if (title.size()>0)
+        while ((title[title.size()-1]==' ') && title.size()>1)
+            title.erase(title.size()-1,1);
+
+    //Now try taking out the string "Population " from the title.
+    i = title.find("Population ");
+    while (i != string::npos)
+    {
+        title.erase(i, 11);
+        i = title.find("Population ");
+    }
+
+    return;
+}
+
+// MakeItShorter takes a section header and tries to make it fit into the
+//  given width.  If that doesn't work, it just truncates and returns false,
+//  telling the calling routine that it was not able to reduce the length
+//  of the string without the possibility of losing important information
+//  (if, say, the given title matches another title for the first 'width'
+//  characters, and only differs in the now-truncated bit.)  If you can think
+//  of other ways to truncate titles, be my guest and add 'em here.
+bool ReportPage::MakeItShorter(string& title, const unsigned long width)
+{
+    string::size_type i, j;
+    if (title.size() == 0)
+        return false;
+
+    //Try removing the "From " / " to " found in migration titles.
+    i = title.find("From ");
+    j = title.find(" to ");
+    while ((i != string::npos) && (j != string::npos))
+    {
+        title.replace(j, 4, "-");
+        title.erase(i, 5);
+        i = title.find("From ");
+        j = title.find(" to ");
+    }
+    if (title.size() <= width)
+        return true;
+
+    if (title.find("/") != string::npos)
+    {
+        //This is probably the shortname of a combined parameter.  We check
+        // this again to make sure there are actually multiples.
+        //Change 'Theta' to 'T'
+        i = title.find("Theta");
+        if (i != title.rfind("Theta"))
+        {
+            while (i != string::npos)
+            {
+                title.erase(i+1,4);
+                i = title.find("Theta");
+            }
+            if (title.size() <= width) return true;
+        }
+        //Change 'Growth' to 'G'
+        i = title.find("Growth");
+        if (i != title.rfind("Growth"))
+        {
+            while (i != string::npos)
+            {
+                title.erase(i+1,5);
+                i = title.find("Growth");
+            }
+            if (title.size() <= width) return true;
+        }
+    }
+    //If there's a '/N#' (where N is a capital letter and # is a number),
+    // reduce it to just '/#'.
+    i = title.find("/");
+    if (i != string::npos) i++;
+    while (i != string::npos)
+    {
+        if ((title[i] >= 'A') && (title[i] <= 'Z'))
+        {
+            i++;
+            if (i != string::npos)
+            {
+                if ((title[i] >= '0') && (title[i] <= '9'))
+                {
+                    title.erase(i-1,1);
+                }
+            }
+        }
+        i = title.find("/",i);
+        if (i != string::npos) i++;
+    }
+    if (title.size() <= width)
+        return true;
+
+    //Give up.
+    title.assign(title, 0, width);
+    return false;
+}
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+MlePage::MlePage(ofstream& pf, long pglngth, long pgwdth)
+    : ReportPage(pf,pglngth,pgwdth), hdrindent(3)
+{
+    label_colwidth = 14;
+    colwidth = 10; //Columns will be printed with spaces between 'em.
+
+} // MlePage::MlePage
+
+//------------------------------------------------------------------------------------
+
+void MlePage::Show()
+{
+    // General Progam header goes here for now
+    {
+        long shortlinelength = 60;
+        string line;
+
+        PrintLineOf('*',shortlinelength);
+        line.assign("LAMARC:  Maximum Likelihood Parameter Estimation\n");
+        PrintCenteredString(line,shortlinelength);
+        line.assign("using Hastings-Metropolis Markov Chain Monte Carlo\n");
+        PrintCenteredString(line,shortlinelength);
+        PrintLineOf('*',shortlinelength);
+        outf << "version " << VERSION << endl;
+        ++current_length;
+
+        // Appends indication of any debugging options which may different from the "usual" configuration.
+        if (DebuggingOptionsRunning())
+        {
+            outf << DebuggingOptionsString(current_length);
+        }
+
+        PrintBlankLine();
+        PrintBlankLine();
+        line = "Program started on " +
+            PrintTime(registry.GetUserParameters().GetProgramStartTime(),"%c")
+            + "\n";
+        PrintCenteredString(line,shortlinelength);
+        line = "finished on " + PrintTime(GetTime(),"%c") + "\n";
+        PrintCenteredString(line,shortlinelength,3L);
+        PrintBlankLine();
+    }
+
+    // Begin actual MlePage output
+    PrintTitle();
+    PrintBlankLine();
+    PrintBlankLine();
+
+    // print a MLE table for each force
+#ifndef JSIM
+    WriteBody();
+#else
+    WriteSimMles();
+#endif
+    PrintPageBreak();
+
+} // MlePage::Show
+
+// WriteBody is the main engine for creating the MLE output page.  It loops
+//  over all the forces, sticking them in the 3d string vector 'allforcetable'
+//  and when it's done, calls the routines that format and print it.  Both
+//  the data and the labels are stored in 'allforcetable'.
+void MlePage::WriteBody()
+{
+    const ForceVec forces = registry.GetForceSummary().GetAllForces();
+    StringVec3d allforcetable;
+    //All the data gets put in here.  The dimensions are [table][column][row].
+
+    Strmap namemap;
+    // The name map has a long name first, and a short name second.  If the
+    // short name is longer than colwidth, another entry with that name first
+    // and an even shorter name second should be provided.  The shorter names
+    // are used when the amount of space of a column or columns is too small.
+    // The namemap is used to create legends for the tables.
+
+    ForceVec::const_iterator fit;
+    for(fit = forces.begin(); fit != forces.end(); ++fit)
+    {
+        if ((*fit)->HasNoVaryingParameters()) continue;
+
+        string forcename = (*fit)->GetFullparamname();
+        string shortforcename = (*fit)->GetShortparamname();
+        force_type tag = (*fit)->GetTag();
+        namemap.insert(make_pair(forcename, shortforcename));
+
+        bool usepercentiles = true;
+        if ((*fit)->SummarizeProfTypes() != profile_PERCENTILE) usepercentiles = false;
+
+        DoubleVec1d modifiers = registry.GetForceSummary().GetModifiersByForce(tag);
+
+        StringVec1d subtypes = (*fit)->GetAxisname();
+        //Probably ("Population", "Region")
+        long titleindex = 0;
+        StringVec1d firstcolumn = MakeLabels(subtypes, modifiers, usepercentiles);
+        StringVec2d forcetable;
+        forcetable.push_back(firstcolumn);
+
+        const vector<Parameter>& parameters = (*fit)->GetParameters();
+        vector<Parameter>::const_iterator param;
+        for (param = parameters.begin(); param != parameters.end(); ++param)
+        {
+            ParamStatus mystatus = param->GetStatus();
+            if (!mystatus.Varies()) continue;
+
+            string paramname = param->GetName();
+            titleindex++;
+            //It would be nice if the 'section title' was stored in the parameter
+            // itself instead of in the force, but we do what we can do.
+
+            // Now store a 'chain' of progressively shorter names in the namemap.
+            // When printing it out, we have an as-of-now-unknown amount of space
+            // in which to print it.  See MakeItShorter for details.
+
+            TrimString(paramname);
+            string shortname = paramname;
+            string lastname = paramname;
+            while (MakeItShorter(shortname, shortname.size()-1))
+            {
+                namemap.insert(make_pair(lastname, shortname));
+                lastname = shortname;
+            }
+            shortname = param->GetShortName();
+            namemap.insert(make_pair(lastname, shortname));
+            lastname = shortname;
+            //We don't go to GetShortName immediately because often times the
+            // parameter short name is somewhat cryptic; i.e. 'Theta1' when we're
+            // actually looking for a short version of the *population* name.
+            if (shortname.size() > colwidth-1)
+            {
+                lastname = shortname;
+                MakeItShorter(shortname, colwidth-1);
+                namemap.insert(make_pair(lastname, shortname));
+            }
+            //That's as short as we'd ever want, though it's a straight truncation.
+
+            long nregs = registry.GetDataPack().GetNRegions();
+            for (long reg = 0; reg <= nregs; ++reg)
+            {
+                bool do_overall(false);
+                if (reg==nregs)
+                {
+                    if (nregs <= 1)
+                    {
+                        break;
+                    }
+                    else
+                    {
+                        do_overall = true;
+                    }
+                }
+                string regionname = "Overall";
+                double MLE = param->GetOverallMLE();
+                if (!do_overall)
+                {
+                    regionname = registry.GetDataPack().GetRegion(reg).GetRegionName();
+                    if (namemap.find(regionname) == namemap.end())
+                    {
+                        string shortregionname = "reg";
+                        shortregionname += indexToKey(reg);
+                        namemap.insert(make_pair(regionname, shortregionname));
+                    }
+                    MLE = param->GetMLE(reg);
+                }
+
+                StringVec1d percentiles;
+                if (usepercentiles)
+                    //if (usepercentiles && (param->GetProfileType() != profile_NONE) )
+                {
+                    percentiles = DoPercentiles(reg, param, modifiers, namemap);
+                }
+                StringVec1d newcolumn = MakeColumn(forcename, paramname, regionname,
+                                                   MLE, percentiles, usepercentiles);
+                forcetable.push_back(newcolumn);
+            }
+        }
+        AddForceToOutput(allforcetable, forcetable);
+    }
+
+    const RegionGammaInfo *pRegionGammaInfo = registry.GetRegionGammaInfo();
+    if (pRegionGammaInfo)
+    {
+        DoRegionGammaInfo(allforcetable, namemap, pRegionGammaInfo);
+    }
+
+    WrapOutput(allforcetable);
+    WriteOutput(allforcetable, namemap);
+}
+
+void MlePage::DoRegionGammaInfo(StringVec3d& allforcetable, Strmap& namemap,
+                                const RegionGammaInfo *pRegionGammaInfo)
+{
+    if (!pRegionGammaInfo)
+        throw implementation_error("MlePage::DoRegionGammaInfo() received a NULL RegionGammaInfo pointer.");
+    const ParamVector pvec(true);
+    vector<Parameter> dummy;
+    long indexOfAlpha = pvec.size() - 1;
+    dummy.push_back(pvec[indexOfAlpha]);
+    vector<Parameter>::const_iterator alpha_it = dummy.begin();
+    DoubleVec1d modifiers = registry.GetForceSummary().GetModifiers(indexOfAlpha);
+    StringVec1d percentiles;
+    bool usePercentiles = profile_PERCENTILE == pRegionGammaInfo->GetProfType();
+    StringVec1d subtypes;
+    subtypes.push_back("");
+    subtypes.push_back(""); // deliberately empty for gamma
+    StringVec1d labelColumn = MakeLabels(subtypes, modifiers, usePercentiles);
+    StringVec2d forcetable;
+    forcetable.push_back(labelColumn);
+    if (usePercentiles)
+        percentiles = DoPercentiles(registry.GetDataPack().GetNRegions(),
+                                    alpha_it, modifiers, namemap);
+    StringVec1d dataColumn = MakeColumn("RegGamma", "alpha", "Overall",
+                                        pRegionGammaInfo->GetMLE(),
+                                        percentiles, usePercentiles);
+    forcetable.push_back(dataColumn);
+    AddForceToOutput(allforcetable, forcetable);
+}
+
+// erynes extracted this method from MleBody::WriteBody(),
+// so that it could be called a second time to report the
+// gamma results, when gamma is present.
+StringVec1d MlePage::DoPercentiles(long region,
+                                   vector<Parameter>::const_iterator param,
+                                   const DoubleVec1d& modifiers,
+                                   Strmap& namemap)
+{
+    StringVec1d percentiles;
+    vector<centilepair> CIvec;
+    if (region == registry.GetDataPack().GetNRegions() &&
+        region > 1)
+        CIvec = param->GetOverallCIs();
+    else
+        CIvec = param->GetCIs(region);
+
+    for (unsigned long ind = 0; ind < modifiers.size(); ++ind)
+    {
+        if (profile_NONE == param->GetProfileType())
+        {
+            percentiles.push_back(MakeCentered(NOVAL,colwidth-2));
+            continue;
+        }
+        double centile = GetCentile(CIvec, modifiers[ind]);
+        string s_centile = Pretty(centile, colwidth-2);
+
+        //If the profiler gave up before finding values with as low a
+        // log likelihood as it would have liked, that information was
+        // saved in the analyzer, and we can get it out here to tell
+        // users that the value is "<1.53" instead of "1.53".
+        if (param->CentileIsExtremeLow(modifiers[ind], region))
+        {
+            s_centile = "<" + Pretty(centile, colwidth-3);
+            if (param->CentileIsExtremeHigh(modifiers[ind], region))
+            {
+                //Special flag condition when maximization failed.
+                s_centile = ERRVAL;
+            }
+        }
+        else if (param->CentileIsExtremeHigh(modifiers[ind], region))
+        {
+            s_centile = ">" + Pretty(centile, colwidth-3);
+        }
+        else if (param->CentileHadWarning(modifiers[ind], region))
+        {
+            s_centile = "*" + Pretty(centile, colwidth-3);
+            namemap.insert(make_pair("*", "This profile value had a warning from the maximizer, "
+                                     "probably a failure to converge after a large number of iterations."));
+        }
+        percentiles.push_back(s_centile);
+    }
+    return percentiles;
+}
+
+// MakeLabels makes a 1D vector that will act as the first column for a table
+//  in WriteBody out of 'subtypes' (from force.GetAxisName()), and a vector
+//  of modifiers, if indeed percentiles are being used.
+StringVec1d MlePage::MakeLabels(const StringVec1d subtypes, const DoubleVec1d modifiers, const bool usepercentiles)
+{
+    assert (subtypes.size() == 2);
+
+    StringVec1d column;
+    column.push_back(""); // Or could be "Parameter"; it's the force title line.
+    column.push_back(subtypes[0]);
+    column.push_back(subtypes[1]);
+    string MLEtitle = "Best Val (" + m_MLE + ")";
+    column.push_back(MLEtitle);
+    if (usepercentiles)
+    {
+        column.push_back("Percentile");
+        StringVec1d percentiles(VecElemToString(modifiers));
+        unsigned long length = percentiles.size();
+        for (unsigned long i=0; i<length; ++i)
+        {
+            string fullnum = "0.000";
+            fullnum.replace(0, percentiles[i].size(),percentiles[i]);
+            double percent = 100 * fabs(.5 - modifiers[i])*2;
+            string final = ToString(percent);
+            if (percent > 0)
+                final += "%   " + fullnum;
+            else
+                final = fullnum;
+            column.push_back(final);
+            if ((length > 5) && (i==length/2-1))
+                column.push_back(m_MLE);
+        }
+    }
+    return column;
+}
+
+// MakeColumn makes a 1D vector that acts as the body column for a table in
+//  in WriteBody.  All columns have all their labels; they aren't concatenated
+//  until later, so we can accurately wrap the table first.
+
+StringVec1d MlePage::MakeColumn(const string& forcename, const string& paramname,
+                                const string& regionname, const double MLE,
+                                const StringVec1d& percentiles,
+                                const bool usepercentiles)
+{
+    StringVec1d column;
+    column.push_back(forcename);
+    column.push_back(paramname);
+    column.push_back(regionname);
+    column.push_back(Pretty(MLE, colwidth-2));
+    if (usepercentiles)
+    {
+        column.push_back("");
+        unsigned long length = percentiles.size();
+        for (unsigned long i=0; i<length; ++i)
+        {
+            column.push_back(percentiles[i]);
+            if ((length > 5) && (i==length/2-1))
+                column.push_back(Pretty(MLE, colwidth-2));
+        }
+    }
+    return column;
+}
+
+// AddForceToOutput takes a 2D table containing all the information for a
+//  particular force, and adds it to 'allforcetable'.  The trick is that if
+//  the first colum of the new table matches the first column in one of the
+//  existing tables, it's appended to the end of the old table, to be wrapped
+//  later.  If nothing matches, it's added as a new table in the third
+//  dimension.
+
+void MlePage::AddForceToOutput(StringVec3d& allforcetable, StringVec2d forcetable)
+{
+    for (unsigned long table=0; table<allforcetable.size(); ++table)
+    {
+        if (DoColumnsMatch(allforcetable[table][0], forcetable[0]))
+        {
+            //The new force has the same labels as a previous force, so can
+            //be added as a new column.
+            for (unsigned long col=1; col<forcetable.size(); ++col)
+                allforcetable[table].push_back(forcetable[col]);
+            return;
+        }
+    }
+    //The new force has new labels, so cannot go in the same table.
+    allforcetable.push_back(forcetable);
+    return;
+}
+
+// DoColumnsMatch checks if, well, the columns match.  Exactly.
+bool MlePage::DoColumnsMatch(const StringVec1d col1, const StringVec1d col2)
+{
+    if (col1.size() != col2.size())
+        return false;
+    for (unsigned long row=0; row<col1.size(); ++row)
+        if (col1[row] != col2[row])
+            return false;
+
+    return true;
+}
+
+// WrapOutput is a fairly complex routine that takes a bunch of 2d tables and
+//  makes sure each table can fit widthwise on a page.  If it doesn't, it
+//  examines the content of the first three rows, which are all labels (the
+//  first saying which force it is, the second which partition or cross-
+//  partition (aka 'Population'), and the third which region.  If a row is
+//  found with identical labels across it, the wrapping routine is constrained
+//  to not break up those columns as to create a 'widow' or 'orphan'--a column
+//  broken off from its logical neighbors.  This means that 2-column sections
+//  may not be broken up, nor may 3-column sections, that 4-column sections may
+//  be broken exactly in their center, and so on.  Given the nature of the
+//  data, there should be no possible input here that allows no wrapping at
+//  all, but should such data be encountered, it will print a warning to the
+//  user, and simply wrap the data at the max number of columns.
+void MlePage::WrapOutput(StringVec3d& allforcetable)
+{
+    StringVec3d newforcetable;
+
+    unsigned long maxcols = (pagewidth - label_colwidth + 1)/(colwidth+1);
+    //maxcols should be 6 for pagewidth 75, label_colwidth=14, colwidth=10
+    //
+    //Note that the +1 is to compensate for the (unprinted) last space after
+    // the last column (normally accounted for with colwidth+1)
+    //
+    // Use +5 instead of +1 to get a full-width column
+
+    for (unsigned long table = 0; table < allforcetable.size(); ++table)
+    {
+        if (allforcetable[table][1].size() > 6)
+            allforcetable[table] = SortTable(allforcetable[table], 2, 5);
+        if (allforcetable[table].size()-1 < maxcols)
+        {
+            newforcetable.push_back(allforcetable[table]);
+        }
+        else
+        {
+            LongVec2d allbreaks;
+            for (unsigned long row=0; row < 3; ++row)
+            {
+                LongVec1d breaks;
+                string prev = allforcetable[table][1][row];
+                long samelen = 1;
+                for (unsigned long column=2; column < allforcetable[table].size(); ++column)
+                {
+                    if (allforcetable[table][column][row] == prev)
+                    {
+                        samelen++;
+                    }
+                    else
+                    {
+                        breaks.push_back(samelen);
+                        samelen = 1;
+                    }
+                    prev = allforcetable[table][column][row];
+                }
+                breaks.push_back(samelen);
+                allbreaks.push_back(breaks);
+            }
+            //'allbreaks' now contains three vectors of where the 'natural' break
+            // points are.  Given the widow/orphan rules:
+            //   size-1 groups can be broken up wherever.
+            //   size-2 and -3 groups must be contiguous.
+            //   size-4 and greater groups may be broken in such a way as to
+            //     always keep at least 2 columns together.
+            //
+            // We now make a boolean vector that tells us where we may break up our
+            // columns.
+
+            vector<bool> allowedbreaks(allforcetable[table].size()-2);
+            //'allowedbreaks' indicate if the column after the ab index (in the
+            // body of the data; not the label column) may be wrapped.
+            for (unsigned long col=0; col<allowedbreaks.size(); ++col)
+                allowedbreaks[col]=true;
+            for (long row=0; row<3; row++)
+            {
+                long actualcol=0;
+                for (unsigned long breakcol=0; breakcol < allbreaks[row].size(); ++breakcol)
+                {
+                    long grouplen = allbreaks[row][breakcol];
+                    if (grouplen > 1)
+                    {
+                        allowedbreaks[actualcol] = false;
+                        allowedbreaks[actualcol + grouplen - 2] = false;
+                        //grouplen is a size, not an index; hence the -2.
+                    }
+                    actualcol += grouplen;
+                }
+            }
+
+            //LS TEST -- printing routine
+#if 0
+            cout << "Allowed breaks vector:" << endl;
+            for (unsigned long i=0; i<allowedbreaks.size(); ++i)
+                cout << allowedbreaks[i];
+            cout << endl;
+#endif
+
+            //Now to actually wrap the darn table.  Start by checking at maxcols
+            // and go down from there.
+
+            unsigned long lastwrap = 0;
+
+            for (unsigned long column = maxcols; column < allforcetable[table].size()+maxcols-1; column += maxcols)
+            {
+                StringVec2d newtable;
+                if (column < allforcetable[table].size()-1)
+                {
+                    while ((column > lastwrap) && (allowedbreaks[column-1]==false))
+                        --column;
+                    if (column==lastwrap)
+                    {
+                        RunReport& runreport = registry.GetRunReport();
+                        runreport.ReportChat("Couldn't find a place to wrap columns for output."
+                                             "  Just using the max instead.");
+                        column += maxcols;
+                    }
+                }
+                else
+                    column = allforcetable[table].size()-1;
+                newtable.push_back(allforcetable[table][0]);
+                for (unsigned long newcol = lastwrap+1; newcol<=column; ++newcol)
+                    newtable.push_back(allforcetable[table][newcol]);
+                lastwrap = column;
+                newforcetable.push_back(newtable);
+            }
+        }
+    }
+    allforcetable = newforcetable;
+}
+
+// WriteOutput takes correctly-wrapped input data, formats the labels,
+//  creates a legend if needed, and then prints the data to the output file.
+//  It attempts a very crude page-wrapping mechanism wherein if the number
+//  of columns would put us over the page limit, it prints a page break
+//  and the title again.  This needs work if we want appropriate line
+//  breaks in the output, but probably not here.
+void MlePage::WriteOutput(StringVec3d& allforcetable, Strmap namemap)
+{
+    for (unsigned long table=0; table<allforcetable.size(); ++table)
+    {
+        if (pagelength < current_length)
+        {
+            registry.GetRunReport().ReportDebug
+                ("Printed too many lines per page in the output file.");
+        }
+        if (pagelength < current_length + allforcetable[table][0].size())
+        {
+            PrintPageBreak();
+            PrintTitle();
+        }
+        StrPairVec legend;
+        StrPairVecIter legendline;
+
+        // pull warning message out of namemap and munge onto legend
+        // note: putting the warning into namemap in the first place
+        // is a sad, sad hack which results in the error being repeated
+        // on every table
+        Strmapiter asterisk = namemap.find("*");
+        if (asterisk != namemap.end())
+        {
+            legend.push_back(make_pair(asterisk->second, asterisk->first));
+        }
+
+        //We go through the first three rows individually, making sure not to
+        // 'double-print' any labels.  This gives us enough space to print
+        // the long name instead of resorting to the short name all the time.
+        vector<bool> breaks;
+        for (unsigned long i=0; i<allforcetable[table].size(); ++i)
+        {
+            breaks.push_back(false);
+        }
+        for (long titlerow=0; titlerow<3; ++titlerow)
+        {
+            if ((titlerow !=2) || (registry.GetDataPack().GetNRegions() > 1))
+            {
+                StringVec1d newrow = GetOneRow(allforcetable[table], titlerow);
+                NixRedundancy(newrow, legend, namemap, breaks);
+                WriteLine(newrow);
+            }
+        }
+
+        for (unsigned long row=3; row<allforcetable[table][0].size(); ++row)
+        {
+            outf << MakeJustified(allforcetable[table][0][row], label_colwidth);
+            for (unsigned long column=1; column<allforcetable[table].size(); ++column)
+            {
+                if (breaks[column])
+                    outf << "|";
+                else
+                    outf << " ";
+                outf << MakeCentered(allforcetable[table][column][row], colwidth);
+            }
+            outf << endl;
+            ++current_length;
+        }
+        for (legendline=legend.begin(); legendline != legend.end(); legendline++)
+        {
+            string line = legendline->second;
+            line += ":  " + legendline->first;
+            PrintWrapped(line);
+        }
+        PrintBlankLine();
+        PrintBlankLine();
+    }
+}
+
+// NixRedundancy takes a row of strings, many of which will be the same, and
+//  eliminates and centers all the redundant titles into just one title.
+//
+//  If all that's left is one title, the first string in the vector is also
+//  replaced by that redundant title.
+
+void MlePage::NixRedundancy(StringVec1d& row, StrPairVec& legend,
+                            Strmap namemap, vector<bool>& breaks)
+{
+    string test = row[1];
+    unsigned long width = colwidth;
+
+    StringVec1d newrow;
+    string title = MakeJustified(row[0], -label_colwidth);
+    title += " ";
+    newrow.push_back(title);
+    vector<unsigned long> widths;
+    StringVec1d pipesorspaces;
+    long legendlevel = 0;
+    widths.push_back(label_colwidth);
+    pipesorspaces.push_back(" ");
+    for (unsigned long column=2; column<row.size(); ++column)
+    {
+        if (row[column] == test)
+            width += colwidth+1;
+        else
+        {
+            //We have a row with more than one title
+            title = test;
+            if (title.size() > width)
+            {
+                long thislevel = 1;
+                string old = title;
+                Strmapiter mapline=namemap.find(title);
+                while (mapline != namemap.end() && title.size() > width)
+                {
+                    title = mapline->second;
+                    mapline = namemap.find(title);
+                    thislevel++;
+                }
+                if (thislevel > legendlevel)
+                    legendlevel = thislevel;
+            }
+            //title = MakeCentered(title, width);
+            if (width > colwidth)
+            {
+                breaks[column] = true;
+                pipesorspaces.push_back("|");
+            }
+            else if (breaks[column])
+                pipesorspaces.push_back("|");
+            else
+                pipesorspaces.push_back(" ");
+            newrow.push_back(test);
+            widths.push_back(width);
+            test = row[column];
+            width = colwidth;
+        }
+    }
+    //The last row's title still needs to be added to newrow
+    title = test;
+    if (title.size() > width)
+    {
+        long thislevel = 1;
+        string old = title;
+        Strmapiter mapline=namemap.find(title);
+        while (mapline != namemap.end() && title.size() > width)
+        {
+            title = mapline->second;
+            mapline = namemap.find(title);
+            thislevel++;
+        }
+        if (thislevel > legendlevel)
+            legendlevel = thislevel;
+    }
+    newrow.push_back(test);
+    widths.push_back(width);
+    pipesorspaces.push_back("");
+    for (unsigned long i=1; i<newrow.size(); i++)
+    {
+        string old = newrow[i];
+        for (long ll=1; ll < legendlevel; ll++)
+        {
+            Strmapiter mapline = namemap.find(newrow[i]);
+            if (mapline != namemap.end())
+                newrow[i] = mapline->second;
+        }
+        Strmapiter mapline = namemap.find(newrow[i]);
+        while ((newrow[i].size() > widths[i]) && (mapline != namemap.end()))
+        {
+            //This should never be reached, but just in case.
+            newrow[i] = mapline->second;
+            mapline = namemap.find(newrow[i]);
+        }
+        if (old != newrow[i])
+            legend.push_back(make_pair(old, newrow[i]));
+        newrow[i] = MakeCentered(newrow[i],widths[i]) + pipesorspaces[i];
+    }
+
+    row = newrow;
+    //Warning:  This routine requires that the 'depth' of the chained legend
+    // associations in 'namemap' be equivalent for each title on the row.  If
+    // not, it'll work, but might look funny and/or have mismatched titles
+    // across the row.  For reliable but potentially mismatched code, try
+    // the version just before this, before 5/28/04
+
+} // MlePage::NixRedundancy()
+
+// WriteLine takes a string vector and prints it, not adding spaces between the
+//  columns and a line return at the end.
+void MlePage::WriteLine(const StringVec1d line)
+{
+    if (line.size())
+        outf << line[0];
+    for (unsigned long column=1; column<line.size(); ++column)
+        outf << line[column];
+    outf << endl;
+    ++current_length;
+}
+
+//------------------------------------------------------------------------------------
+
+void MlePage::WriteSimMles()
+{
+    // loop over all forces, printing a set of line:
+    //    forcename, parameter name, region/overall, mle value, 95% CI's
+    //    forcename, parameter name, 90% CI's, 95% exclude, 90% exclude
+    //
+    //  finally print the number of variable sites (it may not be present
+    //  if "verbose" output was not chosen).
+
+    const ForceVec forces(registry.GetForceSummary().GetAllForces());
+    const StringVec1d regnames(registry.GetDataPack().GetAllRegionNames());
+
+    ForceVec::const_iterator fit;
+    for(fit = forces.begin(); fit != forces.end(); ++fit)
+    {
+        DoubleVec2d mles((*fit)->GetMles());
+        DoubleVec1d overallmles((*fit)->GetPopmles());
+        string fname((*fit)->GetShortparamname());
+        StringVec1d pnames((*fit)->GetAllParamNames());
+
+        assert(mles.size() == overallmles.size() &&
+               mles.size() == static_cast<unsigned long>((*fit)->GetNParams()));
+
+        const vector<Parameter>& parameters = (*fit)->GetParameters();
+        bool usepercentiles = true;
+        if ((*fit)->SummarizeProfTypes() != profile_PERCENTILE) usepercentiles = false;
+
+        unsigned long param, nparam((*fit)->GetNParams());
+        for(param = 0; param < nparam; ++param)
+        {
+            if (pnames[param].empty())  // flag for invalid parameter
+                continue;                // supported by Force::GetAllParamNames()
+            string baseline(fname + " | " + pnames[param]);
+            string baseline2(fname + " | " + pnames[param]);
+            assert(mles[param].size() == regnames.size());
+            unsigned long region;
+            vector<centilepair> CIvec;
+            for(region = 0; region < mles[param].size(); ++region)
+            {
+                string line(baseline);
+                line +=  " | " + regnames[region];
+                line += ": " + ToString(mles[param][region]);
+
+                string line2(baseline2);
+                if (usepercentiles)
+                {
+                    CIvec = parameters[param].GetCIs(region);
+                    double lower(GetCentile(CIvec,0.025)), upper(GetCentile(CIvec,0.975));
+                    double truevalue(parameters[param].GetTruth());
+                    bool exclude95(lower < truevalue && truevalue < upper);
+                    line += "; " + ToString(lower) + "->" + ToString(upper);
+                    double lower2(GetCentile(CIvec,0.05)), upper2(GetCentile(CIvec,0.95));
+                    bool exclude90(lower2 < truevalue && truevalue < upper2);
+                    line2 += " 90% " + ToString(lower2) + "=>" + ToString(upper2);
+                    line2 += "$ x95=" + ToString(exclude95);
+                    if (!exclude95)
+                    {
+                        if (truevalue < lower) line2 += "Below";
+                        else line2 += "Above";
+                    }
+                    line2 += "# x90=" + ToString(exclude90);
+                }
+
+                outf << line << endl;
+                if (usepercentiles) outf << line2 << endl;
+            }
+            string line(baseline);
+            line += " | Overall: " + ToString(overallmles[param]);
+
+            string line2(baseline2);
+            if (usepercentiles)
+            {
+                CIvec = parameters[param].GetOverallCIs();
+                double lower(GetCentile(CIvec,0.025)), upper(GetCentile(CIvec,0.975));
+                line += "; " + ToString(lower) + "->" + ToString(upper);
+                double truevalue(parameters[param].GetTruth());
+                bool exclude95(lower < truevalue && truevalue < upper);
+                double lower2(GetCentile(CIvec,0.05)), upper2(GetCentile(CIvec,0.95));
+                bool exclude90(lower2 < truevalue && truevalue < upper2);
+                line2 += " 90% " + ToString(lower2) + "=>" + ToString(upper2);
+                line2 += "$ x95=" + ToString(exclude95);
+                if (!exclude95)
+                {
+                    if (truevalue < lower) line2 += "Below";
+                    else line2 += "Above";
+                }
+                line2 += "# x90=" + ToString(exclude90);
+            }
+
+            outf << line << endl;
+            if (usepercentiles) outf << line2 << endl;
+        }
+    }
+}
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+MapPage::MapPage(ofstream& pf, long pglngth, long pgwdth)
+    : ReportPage(pf,pglngth,pgwdth)
+{
+} // MapPage::constructor
+
+void MapPage::Show()
+{
+    if (!registry.GetDataPack().AnyMapping())
+    {
+        return;
+    }
+
+    PrintTitle();
+
+    //The results of the mapping algorithm is stored in the data pack.
+    for (long reg=0; reg<registry.GetDataPack().GetNRegions(); reg++)
+    {
+        const Region& region = registry.GetDataPack().GetRegion(reg);
+        for (long mloc=0; mloc<region.GetNumMovingLoci(); mloc++)
+        {
+            PrintBlankLine();
+            const Locus& locus = region.GetMovingLocus(mloc);
+            switch (locus.GetAnalysisType())
+            {
+                case mloc_mapjump:
+                    PrintWrapped("The analysis for this trait was performed by allowing the location of"
+                                 " the trait marker to move from place to place as trees were created.");
+                    break;
+                case mloc_mapfloat:
+                    PrintWrapped("This analysis for this trait was performed by collecting trees, then calculating"
+                                 " the data likelihood of the trait marker at all allowed sites"
+                                 " on those trees, and then averaging.");
+                    break;
+                case mloc_data:
+                case mloc_partition:
+                    assert(false);      //These loci should not be in the moving locus vector.
+                    return;
+            }
+            PrintBlankLine();
+
+            PrintWrapped("Mapping results for " + locus.GetName()
+                         + " from the region \""
+                         + region.GetRegionName() + "\".");
+            long regoffset = region.GetSiteSpan().first;
+            PrintWrapped(locus.ReportMappingInfo(regoffset));
+            PrintBlankLine();
+        }
+    }
+    PrintPageBreak();
+}
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+ProfPage::ProfPage(ofstream& pf, long pglngth, long pgwdth)
+    : ReportPage(pf,pglngth,pgwdth)
+{
+    colwidth = 10;
+} // ProfPage::constructor
+
+//------------------------------------------------------------------------------------
+
+void ProfPage::Show()
+{
+    const ParamVector overallparams(true);
+    RegionGammaInfo *pRegionGammaInfo = registry.GetRegionGammaInfo();
+
+    if (overallparams.CheckCalcProfiles() == paramlist_NO) return;
+
+    PrintTitle();
+    PrintBlankLine();
+    if (overallparams.size() > 1)
+    {
+        outf << "The first listed parameter is the parameter held constant."
+             << endl;
+        ++current_length;
+        PrintBlankLine();
+    }
+
+    PrintLineOf('=');
+    outf << MakeCentered("Overall Profile Tables") << endl;
+    ++current_length;
+    PrintLineOf('=');
+    PrintBlankLine();
+    pagetitle.push_back("(overall profile tables)");
+
+    DisplayParameters(overallparams,FLAGLONG); // print the overall profiles
+
+    if (pRegionGammaInfo)
+        pRegionGammaInfo->Deactivate(); // to avoid attempting to print alpha in the
+    // regional profiles; see regionalparams below
+
+    unsigned long nregions = registry.GetDataPack().GetNRegions();
+    if (verbosity == VERBOSE && nregions > 1)
+    {
+        const ParamVector regionalparams(true); // will not contain alpha
+        PrintLineOf('=');
+        outf << MakeCentered("Regional Profile Tables") << endl;
+        ++current_length;
+        PrintLineOf('=');
+        PrintBlankLine();
+        for(unsigned long region = 0; region < nregions; ++region)
+        {
+            string rname = registry.GetDataPack().GetRegion(region).GetRegionName();
+            pagetitle[1] = "(regional profile tables for region " + rname + ")";
+            PrintBlankLine();
+            outf << "****** Estimates for region " << rname;
+            outf << " ******" << endl;
+            ++current_length;
+            PrintBlankLine();
+            DisplayParameters(regionalparams,region);
+        }
+    }
+
+    // If desired, at this point one could return the registry's "RegionGammaInfo"
+    // to its former state by calling pRegionGammaInfo->Activate().
+
+    PrintPageBreak();
+
+} // ProfPage::Show
+
+// "region" is FLAGLONG if the overall profiles are being asked for
+// "region" is the region number, numbering from zero, of the region
+//    being profiled.
+void ProfPage::DisplayParameters(const ParamVector& params, long region)
+{
+    const ForceSummary& forcesummary = registry.GetForceSummary();
+
+    string regionname = "Overall";
+    if (region != FLAGLONG)
+    {
+        regionname = registry.GetDataPack().GetRegion(region).GetRegionName();
+    }
+
+    unsigned long pindex, nparams = params.size();
+    for(pindex = 0; pindex < nparams; ++pindex)
+    {
+        const Parameter param = params[pindex];
+        proftype ptype = param.GetProfileType();
+        if (!param.IsValidParameter() || ptype == profile_NONE) continue;
+
+        StringVec2d forcetable;
+        string line, explanation;
+        if (profile_FIX == ptype)
+        {
+            if (m_bayesanalysis)
+                explanation = "Fixed profile: Points shown are pre-determined multiples of MPE.\n";
+            else
+                explanation = "Fixed profile: Points shown are pre-determined multiples of MLE.\n";
+        }
+        else // profile_PERCENTILE == ptype
+        {
+            if (m_bayesanalysis)
+                explanation = "Percentile profile: Points shown indicate credibility intervals.\n";
+            else
+                explanation = "Percentile profile: Points shown indicate approximate confidence intervals.\n";
+        }
+        if (current_length + 4 + 12 > pagelength)
+        {
+            PrintPageBreak();
+            PrintTitle();
+            PrintBlankLine();
+        }
+        line = regionname + ": " + param.GetName() + " (" + param.GetShortName() + ")";
+        PrintTableTitle(line);
+        PrintWrapped(explanation);
+        //Set up the first three columns.
+        StringVec1d column;
+        string label;
+        string mle;
+        double mleval, mleperc = 0.0; //Set to zero to avoid compiler warning.
+        DoubleVec1d modifiers;
+        StringVec1d mod_strings;
+        bool mixedmult = false;
+
+        vector<vector<centilepair> > centiles;
+        vector<centilepair> CIvec;
+
+        if (region==FLAGLONG)
+        {
+            mleval = param.GetOverallMLE();
+            centiles = param.GetOverallProfile();
+            CIvec = param.GetOverallCIs();
+        }
+        else
+        {
+            mleval = param.GetMLE(region);
+            centiles = param.GetProfiles(region);
+            CIvec = param.GetCIs(region);
+        }
+        if (m_bayesanalysis)
+        {
+            mleperc = GetReverseCentile(CIvec, mleval);
+        }
+        //LS TEST
+#if 0
+        for (unsigned long i=0; i<CIvec.size(); i++)
+            cout << CIvec[i].first << ", " << CIvec[i].second << endl;
+#endif
+
+        // The first column:  Percentile/Multiplier labels.
+        if (ptype == profile_FIX)
+        {
+            label = "Multiplier";
+        }
+        else  {
+            label = "Percentile";
+        }
+        mle = m_MLE;
+        modifiers = forcesummary.GetModifiers(pindex); // FS gets 'em from ParamVector
+        //we need the pindex modifier in case we're fixed and have growth.
+        mod_strings = (VecElemToString(modifiers));
+        if (ptype == profile_PERCENTILE)
+        {
+            for (unsigned long i=0; i<mod_strings.size(); ++i)
+            {
+                string fullnum = "0.000";
+                fullnum.replace(0, mod_strings[i].size(),mod_strings[i]);
+                mod_strings[i] = fullnum;
+            }
+            if (m_bayesanalysis)
+            {
+                double percval = GetReverseCentile(CIvec,mleval);
+                // code to get the percentile as a string of form d.ddd
+                char myBuffer[6];
+                if (sprintf(myBuffer,"%04.3f",percval) != 5)
+                {
+                    assert(false);
+                }
+                mle = myBuffer;
+                mle = mle + "-" + m_MLE;
+            }
+        }
+        else  { //ptype == fixed, we've already assured ptype != none
+            if ((param.IsForce(force_GROW)) && (ptype == profile_FIX) &&(verbosity != CONCISE) && (verbosity != NONE))
+            {
+                mod_strings = MakeGrowFixedColumn();
+                mixedmult = true;
+                if (m_bayesanalysis)
+                    modifiers = MakeGrowFixedModifiers(mleval);
+            }
+            else if ((param.IsForce(force_LOGISTICSELECTION)) &&
+                     (ptype == profile_FIX) &&(verbosity != CONCISE) && (verbosity != NONE))
+            {
+                mod_strings = MakeLogisticSelectionFixedColumn();
+                mixedmult = true;
+                if (m_bayesanalysis)
+                    modifiers = MakeLogisticSelectionFixedModifiers(mleval);
+            }
+            else
+            {
+                for (unsigned long i=0; i<mod_strings.size(); ++i)
+                {
+                    mod_strings[i] = MakeJustified(mod_strings[i],-5);
+                    if (m_bayesanalysis)
+                        modifiers[i] = modifiers[i]*mleval;
+                }
+            }
+        }
+        column = MakeColumn(label, mle, mod_strings, mixedmult);
+        if (mixedmult)
+        {
+            column[0] = MakeCentered("[Value] or", colwidth);
+        }
+        forcetable.push_back(column);
+
+        //The second column:  The fixed parameter
+        label = param.GetShortName();
+        MakeItShorter(label, colwidth-2);
+        mle = Pretty(mleval, colwidth-2);
+
+        if (m_bayesanalysis && ptype==profile_FIX)
+        {
+            //We have to report both the likes and the percentiles.  Also,
+            // our modifiers string holds the values, not the mults.
+
+            column = MakeColumn(label, mle, VecElemToString(modifiers, colwidth-2), mixedmult);
+            forcetable.push_back(column);
+
+            //Now Make a third column of percentiles.
+            label = "Percentiles";
+            mle = Pretty(GetReverseCentile(CIvec, mleval), colwidth-2);
+        }
+
+        vector<bool> badLnLs(column.size(), false);
+        bool bayesfixed = (m_bayesanalysis && ptype == profile_FIX);
+        mod_strings = MakeModColFrom(CIvec, modifiers, badLnLs, bayesfixed);
+
+        column = MakeColumn(label, mle, mod_strings, mixedmult);
+        forcetable.push_back(column);
+
+        //The third column:  Log likelihoods
+        if (m_bayesanalysis)
+        {
+            label = "Point Prob";
+            if (ptype==profile_FIX)
+                TradeValsForPercs(modifiers, CIvec);
+        }
+        else
+            label = "Ln(L)";
+
+        if (region==FLAGLONG)
+        {
+            mle = Pretty(forcesummary.GetOverallLlikeMle(), colwidth-2);
+            CIvec = param.GetOverallPriorLikes();
+        }
+        else
+        {
+            mle = Pretty(forcesummary.GetLlikeMle(region), colwidth-2);
+            CIvec = param.GetPriorLikes(region);
+        }
+        if (m_bayesanalysis)
+        {
+            mle = Pretty(GetCentile(CIvec, mleperc));
+        }
+
+        mod_strings = MakeLnLColFrom(CIvec,modifiers,badLnLs);
+        column = MakeColumn(label, mle, mod_strings, mixedmult);
+        forcetable.push_back(column);
+
+        //Print a table of just the log likelihoods
+        if (m_bayesanalysis)
+        {
+            PrintWrapped("Point Probabilities:");
+        }
+        else
+        {
+            PrintWrapped("Log Likelihoods:");
+        }
+        if (mixedmult)
+        {
+            line = "Values used for growth are a combination of multiples of the "
+                + m_MLE + " and fixed values.  Fixed values are shown in [brackets].";
+            PrintWrapped(line);
+        }
+        PrintBlankLine();
+        StringVec2d sortedtable = SortTable(forcetable, 2, (mixedmult ? 2 : 1));
+        StringVec3d wrappedtable = WrapTable(sortedtable);
+        PrintTable(wrappedtable);
+
+        //Now take back off the log likelihoods line.
+        forcetable.pop_back();
+
+        long nvariable_params = params.NumVariableParameters();
+        //Now loop through all the rest of the parameters.
+        if ((nvariable_params > 1) && !m_bayesanalysis)
+        {
+            line = "Best fit parameters with " + param.GetShortName()
+                + " held constant:";
+            PrintWrapped(line);
+            PrintBlankLine();
+            for(unsigned long p2index = 0; p2index < nparams; ++p2index)
+            {
+                const Parameter floatparam = params[p2index];
+                if (p2index == pindex || !floatparam.IsVariable()) continue;
+                //Note:  comment out the IsVariable condition above if you wish to
+                // display those parameters in profiling.
+                label = floatparam.GetShortName();
+                MakeItShorter(label, colwidth-1);
+                if (region == FLAGLONG)
+                    mle = Pretty(params[p2index].GetOverallMLE(), colwidth-2);
+                else
+                    mle = Pretty(params[p2index].GetMLE(region), colwidth-2);
+
+                mod_strings = MakeModColFrom(centiles[p2index], modifiers, badLnLs);
+
+                column = MakeColumn(label, mle, mod_strings, mixedmult);
+                forcetable.push_back(column);
+            }
+
+            sortedtable = SortTable(forcetable, 2, (mixedmult ? 2 : 1));
+            wrappedtable = WrapTable(sortedtable);
+            PrintTable(wrappedtable);
+            PrintBlankLine();
+        }
+    }
+} // ProfPage::DisplayParameters
+
+StringVec1d ProfPage::MakeColumn(const string& name, const string& mle,
+                                 const StringVec1d mod_strings, bool mixedmult)
+{
+    StringVec1d column;
+    if (mixedmult)
+        column.push_back("");
+    column.push_back(name);
+    unsigned long length = mod_strings.size();
+    for (unsigned long i=0; i<length; ++i)
+    {
+        column.push_back(mod_strings[i]);
+        if (i==length/2-1)
+            column.push_back(mle);
+    }
+    return column;
+}
+
+//A special case to end all special cases.  This code should trigger if and
+// only if we're trying to make a long (not concise) list of growth modifiers
+// with fixed profiling.  The reason for this is that this case does some
+// multiplying and some set-value-to-X'ing.
+StringVec1d ProfPage::MakeGrowFixedColumn()
+{
+    StringVec1d gmult  = VecElemToString(vecconst::growthmultipliers);
+    StringVec1d gfixed = VecElemToString(vecconst::growthfixed);
+    StringVec1d combined;
+    for (unsigned long i=0; i<gmult.size(); ++i)
+        combined.push_back(MakeJustified(gmult[i], -7));
+    for (unsigned long i=0; i<gfixed.size(); ++i)
+        combined.push_back(MakeJustified(("[" + gfixed[i]) + "]", -7));
+    return combined;
+}
+
+DoubleVec1d ProfPage::MakeGrowFixedModifiers(double mle)
+{
+    DoubleVec1d gmult  = vecconst::growthmultipliers;
+    DoubleVec1d gfixed = vecconst::growthfixed;
+    DoubleVec1d combined;
+    for (unsigned long i=0; i<gmult.size(); ++i)
+        combined.push_back(gmult[i]*mle);
+    for (unsigned long i=0; i<gfixed.size(); ++i)
+        combined.push_back(gfixed[i]);
+    return combined;
+}
+
+// Ha-ha!  A new special case, perfectly analogous to the previous one,
+// just a different force.
+StringVec1d ProfPage::MakeLogisticSelectionFixedColumn()
+{
+    StringVec1d lsmult  = VecElemToString(vecconst::logisticselectionmultipliers);
+    StringVec1d lsfixed = VecElemToString(vecconst::logisticselectionfixed);
+    StringVec1d combined;
+    for (unsigned long i=0; i<lsmult.size(); ++i)
+        combined.push_back(MakeJustified(lsmult[i], -7));
+    for (unsigned long i=0; i<lsfixed.size(); ++i)
+        combined.push_back(MakeJustified(("[" + lsfixed[i]) + "]", -7));
+    return combined;
+}
+
+DoubleVec1d ProfPage::MakeLogisticSelectionFixedModifiers(double mle)
+{
+    DoubleVec1d lsmult  = vecconst::logisticselectionmultipliers;
+    DoubleVec1d lsfixed = vecconst::logisticselectionfixed;
+    DoubleVec1d combined;
+    for (unsigned long i=0; i<lsmult.size(); ++i)
+        combined.push_back(lsmult[i]*mle);
+    for (unsigned long i=0; i<lsfixed.size(); ++i)
+        combined.push_back(lsfixed[i]);
+    return combined;
+}
+
+StringVec1d ProfPage::MakeModColFrom(const vector<centilepair>& numbers,
+                                     const DoubleVec1d modifiers,
+                                     vector<bool> badLnLs,
+                                     bool bayesfixed)
+{
+    if (badLnLs.size() < modifiers.size())
+        badLnLs.assign(modifiers.size(), false);
+    StringVec1d line;
+    for(unsigned long perc = 0; perc < modifiers.size(); ++perc)
+        if (numbers.empty()) line.push_back(NOVAL);
+        else
+        {
+            if (badLnLs[perc])
+                line.push_back(ERRVAL);
+            else
+            {
+                if (!bayesfixed)
+                {
+                    line.push_back(Pretty(GetCentile(numbers,modifiers[perc]),colwidth-2));
+                }
+                else
+                {
+                    line.push_back(Pretty(GetReverseCentile(numbers,modifiers[perc]),colwidth-2));
+                }
+            }
+        }
+    return line;
+
+} // ProfPage::MakeModColFrom
+
+StringVec1d ProfPage::MakeLnLColFrom(const vector<centilepair>& numbers,
+                                     const DoubleVec1d modifiers,
+                                     vector<bool>& badLnLs)
+{
+    StringVec1d line;
+    for(unsigned long perc = 0; perc < modifiers.size(); ++perc)
+        if (numbers.empty()) line.push_back(NOVAL);
+        else
+        {
+            double val = GetCentile(numbers, modifiers[perc]);
+            if (val == -DBL_BIG)
+            {
+                line.push_back(ERRVAL);
+                badLnLs[perc] = true;
+            }
+            else
+            {
+                line.push_back(Pretty(val,colwidth-2));
+            }
+        }
+
+    return line;
+
+} // ProfPage::MakeModColFrom
+
+void ProfPage::TradeValsForPercs(DoubleVec1d& modifiers, vector<centilepair> CIvec)
+{
+    for (unsigned long i=0; i<modifiers.size(); i++)
+    {
+        vector<centilepair>::const_iterator cent;
+        bool found=false;
+        for(cent = CIvec.begin(); cent != CIvec.end() && !found; ++cent)
+        {
+            if (CloseEnough(cent->second, modifiers[i]))
+            {
+                modifiers[i] = cent->first;
+                found = true;
+            }
+        }
+        assert (found); //Didn't find a needed number to trade out.
+    }
+}
+
+StringVec3d ProfPage::WrapTable(StringVec2d& forcetable)
+{
+    StringVec3d wrappedtable;
+    StringVec2d subtable;
+    unsigned long maxcols = (pagewidth-2) / (colwidth+1);
+    //The -2 is for an extra couple spaces between the fixed parameter and the
+    //rest of the parameters.
+    subtable.push_back(forcetable[0]);
+    subtable.push_back(forcetable[1]);
+    unsigned long nextwrap = maxcols;
+    for (unsigned long column=2; column < forcetable.size(); ++column)
+    {
+        if (column==nextwrap)
+        {
+            nextwrap += maxcols-2;
+            wrappedtable.push_back(subtable);
+            subtable.clear();
+            subtable.push_back(forcetable[0]);
+            subtable.push_back(forcetable[1]);
+        }
+        subtable.push_back(forcetable[column]);
+    }
+    wrappedtable.push_back(subtable);
+    return wrappedtable;
+
+} // ProfPage::WrapTable
+
+void ProfPage::PrintTable(StringVec3d& wrappedtable)
+{
+    for (unsigned long table=0; table<wrappedtable.size(); ++table)
+    {
+        if (wrappedtable[table][0].size() + current_length > pagelength)
+        {
+            PrintPageBreak();
+            PrintTitle();
+            PrintBlankLine();
+        }
+        // outf << current_length;
+        for (unsigned long row=0; row<wrappedtable[table][0].size(); ++row)
+        {
+            StringVec1d line = GetOneRow(wrappedtable[table], row);
+            PrintProfileLine(line);
+        }
+        PrintBlankLine();
+    }
+} // ProfPage::PrintTable
+
+void ProfPage::PrintProfileLine(StringVec1d& line)
+{
+    for (unsigned long col=0; col<line.size(); ++col)
+    {
+        if (col==2)
+            outf << " | ";
+        outf << MakeCentered(line[col], colwidth);
+    }
+    outf << endl;
+    ++current_length;
+}
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+UsrPage::UsrPage(ofstream& pf, long pglngth, long pgwdth)
+    : ReportPage(pf,pglngth,pgwdth)
+{
+
+    if (registry.GetForceSummary().CheckForce(force_DIVERGENCE)) {
+      npops = registry.GetDataPack().GetNPartitionsByForceType(force_DIVMIG);
+    } else {
+      npops = registry.GetDataPack().GetNPartitionsByForceType(force_MIG);
+    }
+
+} // UsrPage::UsrPage
+
+//------------------------------------------------------------------------------------
+
+void UsrPage::Show()
+{
+    const ForceSummary& forcesum = registry.GetForceSummary();
+    vector<string>::iterator sit;
+
+    PrintTitle();
+    PrintBlankLine();
+    string line = "Force specific options";
+
+    if (verbosity == NORMAL || verbosity == VERBOSE)
+    {
+        PrintTableTitle(line);
+
+        PrintWrapped("Starting Parameters:");
+        PrintBlankLine();
+
+        const ForceVec forces = forcesum.GetAllForces();
+        ForceVec::const_iterator fit;
+        for(fit = forces.begin(); fit != forces.end(); ++fit)
+        {
+            vector<string> str = Linewrap((*fit)->MakeStartParamReport(),pagewidth);
+
+            for(sit = str.begin(); sit != str.end(); ++sit)
+                outf << sit->c_str() << endl;
+            PrintBlankLine();
+        }
+        PrintBlankLine();
+        PrintBlankLine();
+        // Write a report on priors used
+        if (m_bayesanalysis)
+        {
+            PrintWrapped("Priors for Parameters:");
+            PrintBlankLine();
+            const ForceVec forces = registry.GetForceSummary().GetAllForces();
+            ForceVec::const_iterator fit;
+            for(fit = forces.begin(); fit != forces.end(); ++fit)
+            {
+                PriorReport preport(**fit);
+                preport.WriteTo(outf);
+            }
+            PrintBlankLine();
+            PrintBlankLine();
+        }
+
+    }
+
+    // WARNING v2--Custom force matrix reports go here!!
+
+    const ChainParameters& chainparams = registry.GetChainParameters();
+    line.assign("Search Strategy");
+    PrintTableTitle(line);
+    line = "Type of analysis:  ";
+    if (registry.GetChainParameters().IsBayesian())
+    {
+        line += "Bayesian";
+    }
+    else
+    {
+        line += "Likelihood";
+    }
+    PrintWrapped(line);
+    PrintBlankLine();
+
+    line = "Number of replicates:  ";
+    line += ToString(registry.GetChainParameters().GetNReps());
+    PrintWrapped(line);
+    PrintBlankLine();
+
+    PrintWrapped("Markov Chain Parameters:");
+    m_colwidths.clear();
+    m_colwidths.push_back(18);
+    m_colwidths.push_back(16);
+    m_colwidths.push_back(16);
+    vector<string> strvec;
+    strvec.push_back(" ");
+    strvec.push_back("Initial");
+    strvec.push_back("Final");
+    PrintSimpleRow(m_colwidths,strvec);
+    strvec.clear();
+    strvec.push_back("Number of Chains");
+    strvec.push_back(ToString(chainparams.GetNChains(0)));
+    strvec.push_back(ToString(chainparams.GetNChains(1)));
+    PrintSimpleRow(m_colwidths,strvec);
+    strvec.clear();
+    strvec.push_back("Trees Sampled");
+    strvec.push_back(ToString(chainparams.GetNSamples(0)));
+    strvec.push_back(ToString(chainparams.GetNSamples(1)));
+    PrintSimpleRow(m_colwidths,strvec);
+    strvec.clear();
+    strvec.push_back("Sampling Increment");
+    strvec.push_back(ToString(chainparams.GetInterval(0)));
+    strvec.push_back(ToString(chainparams.GetInterval(1)));
+    PrintSimpleRow(m_colwidths,strvec);
+    strvec.clear();
+    strvec.push_back("Trees Discarded");
+    strvec.push_back(ToString(chainparams.GetNDiscard(0)));
+    strvec.push_back(ToString(chainparams.GetNDiscard(1)));
+    PrintSimpleRow(m_colwidths,strvec);
+    PrintBlankLine();
+
+    // all remaining entries are 2 column entries with this format
+    m_colwidths.clear();
+    m_colwidths.push_back(30);
+    m_colwidths.push_back(-40);
+
+    DoubleVec1d chtemps = chainparams.GetAllTemperatures();
+    long temp, ntemps = chtemps.size();
+    if (ntemps > 1)
+    {
+        line.erase();
+        for (temp = 0; temp < ntemps; ++temp)
+        {
+            line += " "+ToString(chtemps[temp]);
+            if (temp == ntemps-1) continue;
+            if (temp == ntemps-2)
+            {
+                line += " and";
+                continue;
+            }
+            line += ',';
+        }
+        PrintTwoCol(m_colwidths,"Chain Temperatures",line);
+    }
+
+#if 0  // WARNING warning -- No haplotype model interface stuff yet
+    line.assign(iobag.GetHapmodel());
+    if (line != "none")
+    {
+        PrintTwoCol(m_colwidths,"Haplotype rearrangement",line);
+    }
+#endif
+
+    const UserParameters& userparams = registry.GetUserParameters();
+    line = ToString(userparams.GetRandomSeed());
+    PrintTwoCol(m_colwidths,"Random number seed",line);
+    PrintBlankLine();
+    PrintBlankLine();
+
+    if (verbosity != CONCISE && (verbosity != NONE))
+    {
+        PrintTableTitle("File options");
+        line.assign("Read data from file:");
+        PrintTwoCol(m_colwidths,line,userparams.GetDataFileName());
+        if (userparams.GetReadSumFile())
+        {
+            line.assign("Read summary file:");
+            PrintTwoCol(m_colwidths, line, userparams.GetTreeSumInFileName());
+        }
+        if (userparams.GetWriteSumFile())
+        {
+            line.assign("Wrote summary file:");
+            PrintTwoCol(m_colwidths, line, userparams.GetTreeSumOutFileName());
+        }
+        StringVec1d curvefilenames = userparams.GetCurveFileNames();
+        if (curvefilenames.size() > 0)
+        {
+            //LS NOTE: not GetWriteCurveFiles() because if not bayesian, that
+            // parameter is meaningless.  This version is much safer.
+            line.assign("Wrote to curve file(s):");
+            for (size_t i=0; i<curvefilenames.size(); i++)
+            {
+                PrintTwoCol(m_colwidths, line, curvefilenames[i]);
+                line.assign("");
+            }
+        }
+        StringVec1d mapfilenames = userparams.GetMapFileNames();
+        if (mapfilenames.size() > 0)
+        {
+            line.assign("Wrote to mapping file(s):");
+            for (size_t i=0; i<mapfilenames.size(); i++)
+            {
+                PrintTwoCol(m_colwidths, line, mapfilenames[i]);
+                line.assign("");
+            }
+        }
+        StringVec1d profilenames = userparams.GetProfileNames();
+        if (profilenames.size() > 0)
+        {
+            line.assign("Wrote to profile file(s):");
+            for (size_t i=0; i<profilenames.size(); i++)
+            {
+                PrintTwoCol(m_colwidths, line, profilenames[i]);
+                line.assign("");
+            }
+        }
+        set<string> tracefilenames = userparams.GetTraceFileNames();
+        if (tracefilenames.size() > 0)
+        {
+            line.assign("Wrote to Tracer file(s):");
+            for (set<string>::iterator tname = tracefilenames.begin();
+                 tname != tracefilenames.end(); tname++)
+            {
+                PrintTwoCol(m_colwidths, line, *tname);
+                line.assign("");
+            }
+        }
+        set<string> newicktreefilenames = userparams.GetNewickTreeFileNames();
+        if (newicktreefilenames.size() > 0)
+        {
+            line.assign("Wrote to Newick Tree file(s):");
+            for (set<string>::iterator nname = newicktreefilenames.begin();
+                 nname != newicktreefilenames.end(); nname++)
+            {
+                PrintTwoCol(m_colwidths, line, *nname);
+                line.assign("");
+            }
+        }
+        PrintBlankLine();
+        PrintBlankLine();
+
+        PrintTableTitle("Output summary options");
+        //   line = ((userparams.GetEchoData()) ? "Yes" : "No");
+        //   PrintTwoCol(m_colwidths,"Echo data?",line);
+
+        ParamVector paramvec(false);
+#if 0 //WARNING no plotting of likelihood curves yet, 2001/11/12
+        paramlistcondition test = paramvec.CheckCalcPProfiles();
+        line = (test==paramlist_NO ? "No" : "Yes");
+        PrintTwoCol(m_colwidths, "Plot likelihood curves?", line);
+#endif
+        paramlistcondition test = paramvec.CheckCalcProfiles();
+        line = (test==paramlist_NO ? "None" : "Yes");
+        PrintTwoCol(m_colwidths, "Calculate profile likelihoods?", line);
+    }
+
+    PrintPageBreak();
+
+} // UsrPage::Show
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+DataPage::DataPage(ofstream& pf, long pglngth, long pgwdth)
+    : ReportPage(pf,pglngth,pgwdth),
+      table1(1), table2(2)
+{
+    nxparts = registry.GetDataPack().GetNCrossPartitions();
+
+} // DataPage::DataPage
+
+//------------------------------------------------------------------------------------
+
+vector<string> DataPage::SetupColhdr(long whichtable)
+{
+    long totlength=0;
+    string line;
+    vector<string> hdr;
+    vector<long>::iterator lit;
+
+    for(lit = m_colwidths.begin(); lit != m_colwidths.end(); ++lit)
+        totlength += *lit;
+
+    if (whichtable == table1)
+    {
+        line.clear();
+        line.append(m_colwidths[0]+m_colwidths[1],' ');
+        line.append(MakeCentered("Datatype",m_colwidths[2]));
+        line.append(MakeCentered("MuRate",m_colwidths[3]));
+        line += "\n";
+        hdr.push_back(line);
+    }
+
+    if (whichtable == table2)
+    {
+        line = MakeJustified("Population",-1*m_colwidths[0]);
+        line += MakeCentered("Variable",m_colwidths[1]);
+        line += MakeCentered("Relative",m_colwidths[2]);
+        line += MakeCentered("Relative",m_colwidths[2]);
+        line += MakeCentered("Pairwise",m_colwidths[3]);
+        line += MakeCentered("Sample",m_colwidths[4]);
+        line += "\n";
+        hdr.push_back(line);
+        line = "   ";
+        line += MakeJustified("Region",-1*(m_colwidths[0]-3));
+        line += MakeCentered("markers",m_colwidths[1]);
+        line += MakeCentered("Ne",m_colwidths[2]);
+        line += MakeCentered("rec rate",m_colwidths[2]);
+        line += MakeCentered("theta",m_colwidths[3]);
+        line += MakeCentered("size",m_colwidths[4]);
+        line += "\n";
+        hdr.push_back(line);
+    }
+
+    return(hdr);
+
+} // DataPage::SetupColhdr
+
+//------------------------------------------------------------------------------------
+
+StringVec2d DataPage::SetupRowhdr(long whichtable)
+{
+    const DataPack& datap = registry.GetDataPack();
+    long nreg = datap.GetNRegions(), totlength = 0;
+    string line;
+    vector<string> vecline;
+    StringVec2d hdr;
+    vector<long>::iterator lit;
+
+    for(lit = m_colwidths.begin(); lit != m_colwidths.end(); ++lit)
+        totlength += *lit;
+
+    if (whichtable == table1)
+    {
+        long reg;
+        for(reg = 0; reg < nreg; ++reg)
+        {
+            const Region& region = datap.GetRegion(reg);
+            vecline.clear();
+            char spc(' ');
+            string line(region.GetRegionName());
+            if (m_colwidths[0]-line.size() > 0)
+                line.append(m_colwidths[0]-line.size()-1,spc);
+            line += "|";
+            vecline.push_back(line);
+            line.assign(m_colwidths[0]-1,spc);
+            line += "|";
+            vecline.insert(vecline.end(),region.GetNumAllLoci(),line);
+            hdr.push_back(vecline);
+        }
+    }
+
+    if (whichtable == table2)
+    {
+        vector<string> xpartn = datap.GetAllCrossPartitionNames();
+        long xpart, reg;
+        for(xpart = 0; xpart < nxparts; ++xpart)
+        {
+            vecline.clear();
+            line = indexToKey(xpart)+" "+xpartn[xpart];
+            vecline.push_back(line);
+            for(reg = 0; reg < nreg; ++reg)
+            {
+                line = indexToKey(reg)+" "+datap.GetRegion(reg).GetRegionName();
+                vecline.push_back(line);
+            }
+            hdr.push_back(vecline);
+        }
+    }
+
+    return(hdr);
+
+} // DataPage::SetupRowhdr
+
+//------------------------------------------------------------------------------------
+
+StringVec3d DataPage::SetupInnards(long whichtable)
+{
+    string tmp;
+    StringVec1d line;
+    StringVec2d line2;
+    StringVec3d innards;
+    const DataPack& datapack = registry.GetDataPack();
+
+    if (whichtable == table1)           //Multi-locus
+    {
+        LongVec1d locreg = datapack.GetNumAllLociPerRegion();
+        StringVec2d locn = datapack.GetAllLociNames();
+        StringVec1d regnames = datapack.GetAllRegionNames();
+        StringVec2d locd = datapack.GetAllLociDataTypes();
+        StringVec2d locm = datapack.GetAllLociMuRates();
+        long reg, nreg = datapack.GetNRegions();
+        for(reg = 0; reg < nreg; ++reg)
+        {
+            line2.push_back(line);
+            long loc;
+            for(loc = 0; loc < locreg[reg]; ++loc)
+            {
+                line.push_back(locn[reg][loc]+" |");
+                line.push_back(locd[reg][loc]);
+                line.push_back(locm[reg][loc]);
+                line2.push_back(line);
+                line.clear();
+            }
+            innards.push_back(line2);
+            line2.clear();
+        }
+    }
+
+    if (whichtable == table2)
+    {
+        long xpart, reg, nreg = datapack.GetNRegions();
+        DoubleVec1d popsizescalars(datapack.GetRegionalPopSizeScalars());
+        for(xpart = 0; xpart < nxparts; ++xpart)
+        {
+            line2.clear();
+            line2.push_back(line);
+            for(reg = 0; reg < nreg; ++reg)
+            {
+                const Region& regs = datapack.GetRegion(reg);
+                deque<bool> isCalculated;
+                DoubleVec1d qthetas(nxparts);
+                DoubleVec1d muratios = regs.GetMuRatios();
+                ThetaWattersonRegion(datapack,regs,popsizescalars[reg],muratios,
+                                     qthetas,isCalculated);
+                line.push_back(ToString((regs.CalcNVariableMarkers())[xpart]));
+                line.push_back(ToString(popsizescalars[reg]));
+                line.push_back(/* region relative rec-rate goes here */ "1.0");
+                line.push_back(ToString(qthetas[xpart]));
+                line.push_back(ToString(regs.GetNXTips(xpart)));
+                // line.push_back(ToString(regs.GetNIndividuals()));
+                line2.push_back(line);
+                line.clear();
+            }
+            innards.push_back(line2);
+        }
+    }
+
+    return(innards);
+
+} // DataPage::SetupInnards
+
+//------------------------------------------------------------------------------------
+
+void DataPage::Show()
+{
+    const DataPack& datapack = registry.GetDataPack();
+    long nreg = datapack.GetNRegions();
+
+    PrintTitle();
+    PrintBlankLine();
+
+    if (verbosity != CONCISE && (verbosity != NONE))
+    {
+        m_colwidths.clear();
+        m_colwidths.push_back(22);
+        m_colwidths.push_back(38);
+        long npops(0L);
+        if (registry.GetForceSummary().CheckForce(force_DIVERGENCE)){
+          npops = datapack.GetNPartitionsByForceType(force_DIVMIG);
+        } else {
+          npops = datapack.GetNPartitionsByForceType(force_MIG);
+        }
+        if (!npops) ++npops;
+        string line = ToString(npops);
+        PrintTwoCol(m_colwidths,"Number of populations:",line);
+        line = ToString(nreg);
+        PrintTwoCol(m_colwidths,"Number of regions:",line);
+        m_colwidths.clear();
+        m_colwidths.push_back(42);
+        m_colwidths.push_back(18);
+        line = "Total number of samples in all regions";
+        string line1 = ToString(datapack.GetNTips());
+        PrintTwoCol(m_colwidths,line,line1);
+        PrintBlankLine();
+        PrintBlankLine();
+
+        if (datapack.HasMultiLocus())
+        {
+            PrintTableTitle("Linked-segments by region");
+            m_colwidths.clear();
+            m_colwidths.push_back(18);  // for region name + 1
+            m_colwidths.push_back(11);  // for locus name + 1
+            m_colwidths.push_back(10);  // for datatype
+            m_colwidths.push_back(18);  // for murate
+            StringVec1d colhdr = SetupColhdr(table1);
+            StringVec2d rowhdr = SetupRowhdr(table1);
+            StringVec3d innards = SetupInnards(table1);
+            PrintTable(colhdr,rowhdr,m_colwidths,innards);
+            PrintBlankLine();
+            PrintBlankLine();
+        }
+
+        PrintTableTitle("Region summary");
+        m_colwidths.clear();
+        m_colwidths.push_back(15);      // col for names
+        m_colwidths.push_back(10);      // col for # variable markers
+        m_colwidths.push_back(10);      // col for relative population size
+        m_colwidths.push_back(10);      // col for relative recombination rate
+        m_colwidths.push_back(10);      // col for simple theta value
+        m_colwidths.push_back(10);      // col for # of individuals
+        StringVec1d colhdr = SetupColhdr(table2);
+        StringVec2d rowhdr = SetupRowhdr(table2);
+        StringVec3d innards = SetupInnards(table2);
+        PrintTable(colhdr,rowhdr,m_colwidths,innards);
+        PrintBlankLine();
+        PrintBlankLine();
+
+        PrintTableTitle("Summary of Data Model Parameters");
+        PrintBlankLine();
+        PrintLineOf('-');
+        long reg;
+        for(reg = 0; reg < nreg; ++reg)
+        {
+            const Region& region = datapack.GetRegion(reg);
+            StringVec2d dataModelsReports = region.CreateAllDataModelReports();
+            for (StringVec2d::iterator oneReport = dataModelsReports.begin();
+                 oneReport != dataModelsReports.end(); oneReport++)
+            {
+                PrintBlankLine();
+                for (StringVec1d::iterator oneLine = (*oneReport).begin();
+                     oneLine != (*oneReport).end(); oneLine++)
+                {
+                    outf << (*oneLine).c_str() << endl;
+                }
+                PrintLineOf('-');
+            }
+        }
+    }
+
+    if (verbosity == VERBOSE)
+    {
+        PrintBlankLine();
+        outf << "Input Genetic Data" << endl;
+        long reg, nregs = datapack.GetNRegions();
+        for(reg = 0; reg < nregs; ++reg)
+        {
+            PrintLineOf('-');
+            const Region& region = datapack.GetRegion(reg);
+            outf << "   For the " << region.GetRegionName() << " region" << endl;
+            PrintLineOf('-');
+            StringVec2d dataecho(region.GetMarkerDataWithLabels());
+            unsigned long loc, nloc(region.GetNumAllLoci());
+            assert(nloc == dataecho.size());
+            for(loc = 0; loc < nloc; ++loc)
+            {
+                StringVec1d lines(LinewrapCopy(dataecho[loc], 10));
+                StringVec1d::const_iterator line;
+                for(line = lines.begin(); line != lines.end(); ++line)
+                    outf << *line << endl;
+                PrintBlankLine();
+            }
+        }
+    }
+
+    PrintPageBreak();
+
+} // DataPage::Show
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+RunPage::RunPage(ofstream& pf, long pglngth, long pgwdth)
+    : ReportPage(pf,pglngth,pgwdth)
+{
+
+} // RunPage::RunPage
+
+//------------------------------------------------------------------------------------
+
+void RunPage::Show()
+{
+    PrintTitle();
+    PrintBlankLine();
+    string msg = "\"Accepted\" is the observed rate at which any change to the proposal trees ";
+    if ((registry.GetChainParameters().GetAllTemperatures()).size() > 1)
+    {
+        msg += "in the coldest chain ";
+    }
+    msg += "was accepted.";
+    PrintWrapped(msg);
+
+    const RunReport& runreport = registry.GetRunReport();
+    StringVec1d messages = runreport.GetMessages();
+    KeyToLongMap keyFrequencies = runreport.GetKeyFrequencies();
+    LongToKeyMap keyIndices = runreport.GetKeyIndices();
+
+    pagewidth = 80;
+    for (long strindex = 0; strindex<static_cast<long>(messages.size()); strindex++)
+    {
+        PrintWrapped(messages[strindex]);
+        LongToKeyMapiter keyindex = keyIndices.find(strindex);
+        if (keyindex != keyIndices.end())
+        {
+            //This is a 'ReportOnce' message.  Tell the user how often it got called later.
+            OnceKey key = keyindex->second;
+            long freq = keyFrequencies.find(key)->second;
+            string msg = "[Note:  the conditions that triggered the above message happened a total of "
+                + ToString(freq) + " time(s) in this run of LAMARC.]";
+            PrintWrapped(msg);
+            PrintBlankLine();
+        }
+    }
+
+    PrintPageBreak();
+
+} // RunPage::Show
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+LikePage::LikePage(ofstream& pf, long pglngth, long pgwdth)
+    : ReportPage(pf,pglngth,pgwdth)
+{
+
+} // LikePage::LikePage
+
+//------------------------------------------------------------------------------------
+
+vector<string> LikePage::MakeInnards(const DoubleVec2d& likes)
+{
+    typedef pair<double, string> sympair;
+
+    long x, y;
+    double maxlike = likes[0][0];
+
+    // find highest value in table
+    for (x = 0; x < (long) likes.size(); ++x)
+    {
+        for (y = 0; y < (long) likes[x].size(); ++y)
+        {
+            if (likes[x][y] > maxlike) maxlike = likes[x][y];
+        }
+    }
+
+    // establish critical values
+    long i;
+    vector<sympair> critvalues;
+
+    critvalues.push_back(sympair(maxlike, string("X")));
+    critvalues.push_back(sympair(maxlike - DF, string("*")));
+    critvalues.push_back(sympair(maxlike - 2*DF, string("+")));
+    critvalues.push_back(sympair(maxlike - 3*DF, string("-")));
+    critvalues.push_back(sympair(NEG_MAX, string(" ")));
+
+    // fill up table with symbols
+
+    StringVec1d innards;
+    string line;
+
+    for (x = 0; x < (long) likes.size(); ++x)
+    {
+        for (y = 0; y < (long) likes[x].size(); ++y)
+        {
+            for (i = 0; i < (long) critvalues.size(); ++i)
+            {
+                if (likes[x][y] >= critvalues[i].first)
+                {
+                    line += critvalues[i].second;
+                    break;
+                }
+            }
+        }
+        innards.push_back(line);
+        line.erase();
+    }
+
+    return(innards);
+
+} // LikePage::MakeInnards
+
+//------------------------------------------------------------------------------------
+
+StringVec1d LikePage::MakeLikePlot(const StringVec1d& innards, const
+                                   Parameter& paramX, const Parameter& paramY, long breaks)
+{
+    StringVec1d plot;
+
+#if 0
+    // not till V2
+    string line;
+    long v, x, y;
+    long linelength = 4 + paramX.plotpoints;
+
+    // make figure legplotend
+    // header lines
+    line = "          X Axis    Y Axis";
+    plot.push_back(Pretty(line, linelength));
+    line = Pretty("    Tick",10);
+    line += Pretty(paramX.name, 10);
+    line += Pretty(paramY.name, 10);
+    plot.push_back(Pretty(line, linelength));
+
+    line = "          ";
+    if (paramX.style == log_ten) line += "(log10)   ";
+    else line += "(linear)  ";
+
+    if (paramY.style == log_ten) line += "(log10)   ";
+    else line += "(linear)  ";
+    plot.push_back(Pretty(line, linelength));
+
+    // value lines
+    double val;
+    long xvals = paramX.plotpoints/breaks;
+    long yvals = paramY.plotpoints/breaks;
+    long maxvals;
+    if (xvals > yvals) maxvals = xvals;
+    else maxvals = yvals;
+
+    for (v = 0; v < maxvals; ++v)
+    {
+        line = Pretty(v+1L);
+
+        if (v <= xvals)                 // print x values
+        {
+            val = paramX.plotstart +
+                (double)v * (paramX.plotend - paramX.plotstart) / (double) paramX.plotpoints;
+            if (paramX.style == log_ten) val = pow(10.0, val);
+            line += "  " + Pretty(val);
+        }
+        else
+            line += "          ";
+
+        if (v <= yvals)                 // print y values
+        {
+            val = paramY.plotstart +
+                (double)v * (paramY.plotend - paramY.plotstart) / (double) paramY.plotpoints;
+            if (paramY.style == log_ten) val = pow(10.0, val);
+            line += "  " + Pretty(val);
+        }
+        else
+            line += "          ";
+
+        plot.push_back(Pretty(line, linelength));
+    }
+
+    line.erase();
+
+    // make actual figure
+    plot.push_back(MakeBorder(paramX.plotpoints));
+    for (x = 0; x < (long) innards.size(); ++x)
+    {
+        if (Divides(x+1, breaks)) line += indexToKey(breaks/paramY.plotpoints - x) + " +";
+        else line += "  |";
+        for (y = 0; y < (long) innards[x].size(); ++y)
+        {
+            line += innards[x][y];
+        }
+        if (Divides(x+1, breaks)) line += "+";
+        else line += "|";
+        plot.push_back(line);
+        line.erase();
+    }
+    plot.push_back(MakeBorder(paramX.plotpoints));
+    line.erase();
+    for (x = 0; x < paramX.plotpoints; ++x)
+    {
+        if (Divides(x+1, breaks)) line += indexToKey(x);
+        else line += " ";
+    }
+    plot.push_back(line);
+#endif
+    return(plot);
+
+}  // LikePage::MakeLikePlot
+
+//------------------------------------------------------------------------------------
+
+bool LikePage::Divides(long x, long y)
+{
+    return (x/y * y == x);
+} // LikePage::Divides
+
+//------------------------------------------------------------------------------------
+
+string LikePage::MakeBorder(long points, long breaks)
+{
+
+    string line;
+    long i;
+    line += "+";                            // left border
+    for (i = 0; i < points; ++i)
+    {
+        if (Divides(i+1, breaks)) line += "+";
+        else line += "-";
+    }
+
+    line += "+";                            // right border
+    return(line);
+} // LikePage::MakeBorder
+
+//------------------------------------------------------------------------------------
+
+DoubleVec2d LikePage::AddGraphs(const DoubleVec2d& a, const
+                                DoubleVec2d& b)
+{
+    // Find highest value in either graph
+    assert(a.size() == b.size());
+    assert(a[0].size() == b[0].size()); // can't add graphs of different sizes
+
+    double maxlike = a[0][0];
+    long i, j;
+    for (i = 0; i < (long) a.size(); ++i)
+    {
+        for (j = 0; j < (long) a[0].size(); ++j)
+        {
+            if (a[i][j] > maxlike) maxlike = a[i][j];
+            if (b[i][j] > maxlike) maxlike = b[i][j];
+        }
+    }
+
+    DoubleVec2d newgraph;
+    DoubleVec1d newline;
+    double aval, bval, newval = 0.0;
+
+    // Normalize all values to the maximum
+    for (i = 0; i < (long) a.size(); ++i)
+    {
+        for (j = 0; j < (long) a.size(); ++j)
+        {
+            aval = a[i][j] - maxlike;
+            bval = b[i][j] - maxlike;
+            if (aval > EXPMIN) newval += exp(aval);
+            if (bval > EXPMIN) newval += exp(bval);
+            if (newval != 0) newline.push_back(log(newval));
+            else newline.push_back(EXPMIN);
+            newval = 0.0;
+        }
+        newgraph.push_back(newline);
+        newline.clear();
+    }
+    return(newgraph);
+
+} // LikePage::AddGraphs
+
+//------------------------------------------------------------------------------------
+
+bool LikePage::EmptyPlot(PlotStruct& plot)
+{
+
+    return(plot.plane.empty());
+
+} // LikePage::EmptyPlot
+
+//------------------------------------------------------------------------------------
+
+void LikePage::Show()
+{
+
+#if 0    // DEBUG !!  not till V3
+    PrintTitle();
+    PrintBlankLine();
+
+    ParamVector parameters(false);
+
+    // DEBUG vector<PlotStruct> plots = registry.GetPostoutPack().GetPriorgraphs();
+
+    ParamVector::iterator pit;
+
+    for(pit = parameters.begin(); pit != parameters.end(); ++pit)
+    {
+        if (!(pit->IsValidParameter())) continue;
+
+        DoubleVec2d data = pit->plane;
+        Parameter& xaxis = *(pit->xaxis);
+        Parameter& yaxis = *(pit->yaxis);
+        bool twoplots = false;
+        StringVec1d innards = MakeInnards(data);
+        StringVec1d plot = MakeLikePlot(innards, xaxis, yaxis);
+        StringVec1d plot2;
+
+        ++pit;
+        if (pit != plots.end())
+        {
+            data = pit->plane;
+            innards = MakeInnards(data);
+            plot2 = MakeLikePlot(innards, xaxis, yaxis);
+            twoplots = true;
+        }
+
+        if (twoplots)
+        {
+            long line;
+            for (line = 0; line < (long)plot.size(); ++line)
+                cout << plot[line] << "  " << plot2[line] << endl;
+        }
+        else
+        {
+            long line;
+            for (line = 0; line < (long)plot.size(); ++line)
+                cout << plot[line] << endl;
+        }
+    }
+
+    PrintPageBreak();
+
+#endif // 0
+
+} // LikePage::Show
+
+//____________________________________________________________________________________
diff --git a/src/report/reportpage.h b/src/report/reportpage.h
new file mode 100644
index 0000000..c888a44
--- /dev/null
+++ b/src/report/reportpage.h
@@ -0,0 +1,306 @@
+// $Id: reportpage.h,v 1.38 2012/06/30 01:32:42 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+/******************************************************************
+ The base class ReportPage represents a single page of the output
+ report; subclasses produce specific pages.
+
+ Written by Jon Yamato
+*******************************************************************/
+
+#ifndef REPORTPAGE_H
+#define REPORTPAGE_H
+
+#include <map>
+#include <string>
+#include <vector>
+
+#include "types.h"
+
+using std::map;
+using std::ofstream;
+using std::string;
+class PlotStruct;
+class Parameter;
+class RegionGammaInfo;
+
+const long DEFLENGTH = 60;
+const char DEFDLM='=';
+
+typedef std::map<string, string> Strmap;
+typedef std::map<string, string>::iterator Strmapiter;
+typedef std::vector< std::pair < string, string> > StrPairVec;
+typedef std::vector< std::pair < string, string> >::iterator StrPairVecIter;
+
+//------------------------------------------------------------------------------------
+
+class ReportPage
+{
+
+  private:
+    ReportPage();  // deliberately undefined
+
+  protected:
+    ofstream &outf;
+    vector<string> pagetitle;
+    char titledlm;
+    unsigned long pagelength, pagewidth, current_length;
+    verbosity_type verbosity;
+    bool m_bayesanalysis;
+    string m_MLE;
+
+    string MakeLineOf(const char ch, long length=DEFLINELENGTH,
+                      long indent=DEFINDENT);
+    string MakePageBreak();
+    string MakeBlankLine();
+    vector<string> MakeTitle();
+    virtual vector<string> MakeTableTitle(const string &title);
+    virtual vector<string> MakeTableTitle(const char *title);
+    StringVec1d GetOneRow(const StringVec2d table, const long rownum);
+    virtual string MakeSimpleRow(vector<long>& colwdth, vector<string>& contents);
+    virtual string MakeTwoCol(const vector<long>& colwdth, const string& col1,
+                              const string& col2);
+    virtual string MakeTwoCol(const vector<long>& colwdth, const string& col1,
+                              const char* col2);
+    virtual string MakeTwoCol(const vector<long>& colwdth, const char* col1,
+                              const string& col2);
+    virtual string MakeTwoCol(const vector<long>& colwdth, const char* col1,
+                              const char* col2);
+
+    virtual string MakeSectionBreak(const char dlm=DEFDLM,
+                                    long width=DEFLINELENGTH, long indent=DEFINDENT);
+    virtual vector<string> MakeTable(vector<string> &colhdr, StringVec2d &rowhdr,
+                                     vector<long>& colwdth, StringVec3d &innards);
+
+    void PrintTitle();
+    void PrintLineOf(const char ch, long length=DEFLINELENGTH,
+                     long indent=DEFINDENT);
+    void PrintPageBreak();
+    void PrintBlankLine();
+
+    virtual void PrintSimpleRow(vector<long> &colwdth, vector<string> &contents);
+    virtual void PrintTwoCol(const vector<long> &colwdth, const string &col1,
+                             const string &col2);
+    virtual void PrintTwoCol(const vector<long> &colwdth, const char *col1,
+                             const string &col2);
+    virtual void PrintTwoCol(const vector<long> &colwdth, const string &col1,
+                             const char *col2);
+    virtual void PrintTwoCol(const vector<long> &colwdth, const char *col1,
+                             const char *col2);
+    virtual void PrintCenteredString(const string &str, long width=DEFLINELENGTH,
+                                     long indent=DEFINDENT, bool trunc = true);
+    virtual void PrintWrapped(const string &line);
+    virtual void PrintWrapped(const StringVec1d& line);
+    virtual void PrintCenteredString(const char *str, long width=DEFLINELENGTH,
+                                     long indent=DEFINDENT, bool trunc = true);
+    virtual void PrintTableTitle(const string &title);
+    virtual void PrintTableTitle(const char *title);
+    virtual void PrintSectionBreak(const char dlm=DEFDLM,
+                                   long width=DEFLINELENGTH, long indent=DEFINDENT);
+    virtual void PrintTable(vector<string> &colhdr, StringVec2d &rowhdr,
+                            vector<long> &colwdth,  StringVec3d &innards);
+
+    // helper functions for the MlePage and ProfPage
+    StringVec2d SortTable(StringVec2d intable, unsigned long sortcol,
+                          unsigned long headerrows);
+    bool MakeItShorter(string& title, const unsigned long width);
+
+  public:
+    ReportPage(ofstream& pf, long pglngth=DEFLENGTH, long pgwdth=DEFLINELENGTH);
+    //  We accept the default for these and other ReportPage objects.
+    //ReportPage(const ReportPage &src);
+    //virtual ReportPage &operator=(const ReportPage &src);
+    virtual ~ReportPage() {};
+
+    static  double GetCentile(const vector<centilepair>& centiles,
+                              double pcent);
+    static  double GetReverseCentile(const vector<centilepair>& centiles,
+                                     double pcent);
+    virtual void Setup(vector<string> &title, long pglength=DEFLENGTH,
+                       long pgwidth=DEFLINELENGTH, char tdlm=DEFDLM);
+    virtual void Setup(string &title, long pglength=DEFLENGTH,
+                       long pgwidth=DEFLINELENGTH, char tdlm=DEFDLM);
+    virtual void Setup(const char *title, long pglength=DEFLENGTH,
+                       long pgwidth=DEFLINELENGTH, char tdlm=DEFDLM);
+    virtual void Show() = 0;
+
+    static void TrimString(string& title);
+};
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+class MlePage : public ReportPage
+{
+  protected:
+    const long hdrindent;
+    long npops;
+    unsigned long colwidth;
+    long label_colwidth;
+    StringVec3d allforcetable;
+
+    //  virtual void CopyMembers(const MlePage &src);
+    void WritePriors();
+    void WriteBody();
+    StringVec1d DoPercentiles(long region,
+                              vector<Parameter>::const_iterator param,
+                              const DoubleVec1d& modifiers,
+                              Strmap& namemap);
+    StringVec1d MakeLabels(const StringVec1d subtypes, const DoubleVec1d modifiers, const bool usepercentile);
+    StringVec1d MakeColumn(const string& forcename, const string& paramname,
+                           const string& regionname, const double MLE,
+                           const StringVec1d& percentiles, const bool usepercentiles);
+    void DoRegionGammaInfo(StringVec3d& allforcetable, Strmap& namemap,
+                           const RegionGammaInfo *pRegionGammaInfo);
+    void AddForceToOutput(StringVec3d& allforcetable, StringVec2d forcetable);
+    bool DoColumnsMatch(const StringVec1d col1, const StringVec1d col2);
+    void WrapOutput(StringVec3d& allforcetable);
+    void WriteOutput(StringVec3d& allforcetable, Strmap namemap);
+    void NixRedundancy(StringVec1d& row, StrPairVec& legend, Strmap namemap,
+                       vector<bool>& breaks);
+    void WriteLine(const StringVec1d line);
+
+    // simulation specific code--to print something easily machine parsible
+    void WriteSimMles();
+
+  public:
+    MlePage(ofstream& pf, long pglngth=DEFLENGTH, long pgwdth=DEFLINELENGTH);
+    MlePage(const MlePage &src); //Undefined
+    virtual ~MlePage() {};
+    virtual void Show();
+
+};
+
+class MapPage : public ReportPage
+{
+  public:
+    MapPage(ofstream& pf, long pglngth=DEFLENGTH, long pgwdth=DEFLINELENGTH);
+    virtual ~MapPage() {};
+
+    virtual void Show();
+};
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+class ProfPage : public ReportPage
+{
+  protected:
+    unsigned long colwidth;
+
+    //  virtual void CopyAllMembers(const ProfPage& src);
+    virtual void DisplayParameters(const ParamVector& params, long region);
+    StringVec1d MakeColumn(const string& name, const string& mle,
+                           const StringVec1d mod_strings, const bool mixedmult);
+    StringVec1d MakeGrowFixedColumn();
+    StringVec1d MakeLogisticSelectionFixedColumn();
+    DoubleVec1d MakeLogisticSelectionFixedModifiers(double mle);
+    StringVec1d MakeModColFrom(const vector<centilepair>& numbers,
+                               const DoubleVec1d modifiers,
+                               vector<bool> badLnLs,
+                               bool bayesfixed = false);
+    StringVec1d MakeLnLColFrom(const vector<centilepair>& numbers,
+                               const DoubleVec1d modifiers,
+                               vector<bool>& badLnLs);
+    StringVec3d WrapTable(StringVec2d& forcetable);
+    void PrintTable(StringVec3d& wrappedtable);
+    void PrintProfileLine(StringVec1d& line);
+
+  public:
+    ProfPage(ofstream& pf, long pglngth=DEFLENGTH, long pgwdth=DEFLINELENGTH);
+    static void TradeValsForPercs(DoubleVec1d& modifiers, vector<centilepair> CIvec);
+    static DoubleVec1d MakeGrowFixedModifiers(double mle);
+
+    virtual void Show();
+};
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+class UsrPage : public ReportPage
+{
+  protected:
+    long npops;
+    vector<long> m_colwidths;
+
+    //virtual void CopyMembers(const UsrPage &src);
+
+  public:
+    UsrPage(ofstream& pf, long pglngth=DEFLENGTH, long pgwdth=DEFLINELENGTH);
+    virtual ~UsrPage() {};
+
+    virtual void Show();
+};
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+class DataPage : public ReportPage
+{
+
+  private:
+    const long table1, table2;
+    long nxparts;
+    vector<long> m_colwidths;
+
+  protected:
+    //virtual void CopyMembers(const DataPage &src);
+    virtual vector<string> SetupColhdr(long whichtable);
+    virtual StringVec2d SetupRowhdr(long whichtable);
+    virtual StringVec3d SetupInnards(long whichtable);
+
+  public:
+    DataPage(ofstream& pf, long pglngth=DEFLENGTH, long pgwdth=DEFLINELENGTH);
+    virtual ~DataPage() {};
+
+    virtual void Show();
+};
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+class RunPage : public ReportPage
+{
+
+  private:
+    vector<long> m_colwidths;
+
+  public:
+    RunPage(ofstream& pf, long pglngth=DEFLENGTH, long pgwdth=DEFLINELENGTH);
+    virtual ~RunPage() {};
+
+    virtual void Show();
+};
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+class LikePage : public ReportPage
+{
+  private:
+    bool Divides(long x, long y);
+    bool EmptyPlot(PlotStruct& plot);
+
+  public:
+    LikePage(ofstream& pf, long pglngth=DEFLENGTH, long pgwdth=DEFLINELENGTH);
+    virtual ~LikePage() {};
+
+    string MakeBorder(long points, long breaks = 4);
+    vector<string> MakeInnards(const DoubleVec2d& likes);
+    vector<string> MakeLikePlot(const StringVec1d& innards,
+                                const Parameter& paramX, const Parameter& paramY, long breaks = 4);
+    DoubleVec2d AddGraphs(const DoubleVec2d& a, const DoubleVec2d& b);
+
+    virtual void Show();
+};
+
+#endif // REPORTPAGE_H
+
+//____________________________________________________________________________________
diff --git a/src/report/runreport.cpp b/src/report/runreport.cpp
new file mode 100644
index 0000000..1923f0b
--- /dev/null
+++ b/src/report/runreport.cpp
@@ -0,0 +1,727 @@
+// $Id: runreport.cpp,v 1.60 2012/06/30 01:32:42 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+#include <iostream>
+
+#include "runreport.h"
+#include "chainout.h"
+#include "chainpack.h"
+#include "force.h"
+#include "forcesummary.h"
+#include "stringx.h"
+#include "types.h"
+#include "registry.h"  // for GetDataPack() in MakeReport()
+
+#ifdef DMALLOC_FUNC_CHECK
+#include "/usr/local/include/dmalloc.h"
+#endif
+
+using namespace std;
+
+//------------------------------------------------------------------------------------
+
+string UNKNOWN_STRING = "<unknown>";
+
+//------------------------------------------------------------------------------------
+
+/****************************************************************
+   This function predicts the end of the chain process, given
+   the number of steps so far and the total required number of
+   steps.  It is meant to be called by the chain manager.
+*****************************************************************/
+RunReport::RunReport(const ForceSummary& fs, verbosity_type progress)
+    : m_level(progress),
+      m_filelevel(registry.GetUserParameters().GetVerbosity()),
+      m_forcesum(fs)
+{
+    m_profiletotal = static_cast<time_t>(0.0);
+};
+
+void RunReport::PrognoseRegion(const ChainPack& chpack, long int region,
+                               long int steps, long int total)
+{
+
+    if (m_level == CONCISE || m_level == NONE) return;
+    if (steps == total) return; //Let's not predict the current time.
+
+    string timestr;
+
+#ifdef NOTIME_FUNC
+    // system does not provide a clock so no prognosis can be made
+    timestr = "unknown";
+#else
+    time_t tnow = GetTime();
+    time_t tstart = chpack.GetStartTime(region);   // start of the whole thing
+    if (steps == 0) timestr = "unknown";
+    else
+    {
+        double proportion = static_cast<double>(total) / static_cast<double>(steps);
+        time_t predict = static_cast<time_t>(((tnow-tstart) * proportion) + tstart);
+        timestr = PrintTime(predict,string("%c"));
+    }
+#endif
+
+    string msg = "Predicted end of chains for this region:  ";
+    msg += timestr + "\n";
+    ReportNormal(msg);
+
+} // PrognoseRegion
+
+//------------------------------------------------------------------------------------
+
+// This functions are used to predict the end of the profile
+// process and print a report on progress so far.  When first
+// beginning a profile, call RecordProfileStart() to set the time;
+// then call PrognoseProfiles to report on progress.
+
+void RunReport::RecordProfileStart()
+{
+    // Don't even try if there is no system clock.
+#ifndef NOTIME_FUNC
+    m_profilestart = GetTime();
+#endif
+    ReportUrgent("Beginning profiling, please be patient");
+} // RecordProfileStart
+
+//------------------------------------------------------------------------------------
+
+void RunReport::PrognoseProfiles(long int thisprof, long int totprofs)
+{
+    if (m_level == NONE || m_level == CONCISE) return;
+    long int profile = thisprof + 1;   // elsewhere they are counted starting from 0
+    if (profile==totprofs)
+    {
+        m_profiletotal += (GetTime() - m_profilestart);
+        string msg = "Finished profile "+ ToString(profile)
+            + " of " + ToString(totprofs) + ".\n";
+        ReportNormal(msg);
+        return;
+    }
+    string timestr;
+
+#ifdef NOTIME_FUNC
+    timestr = "unknown";
+#else
+    time_t tnow = GetTime();
+    double proportion = static_cast<double>(totprofs) / static_cast<double>(profile);
+    time_t predict = static_cast<time_t>(((tnow - m_profilestart) * proportion) + m_profilestart);
+    timestr = PrintTime(predict, string("%c"));
+#endif
+    string msg = "Finished profile " + ToString(profile) + " of " + ToString(totprofs)
+        + ".  Predicted end of this set of profiles:  " + timestr + "\n";
+    ReportNormal(msg);
+
+} // PrognoseProfiles
+
+void RunReport::PrognoseAll(const ChainPack& chpack, long int thisregion, long int totalregions)
+{
+    if (m_level == CONCISE || m_level == NONE) return;
+    thisregion++;
+    if ((thisregion == totalregions) && (totalregions == 1)) return;
+    //The program is already done, since there's no 'overall' region to profile.
+
+    string timestr;
+
+#ifdef NOTIME_FUNC
+    // system does not provide a clock so no prognosis can be made
+    timestr = "unknown";
+#else
+    time_t tnow = GetTime();
+    time_t tstart = chpack.GetStartTime();   // start of the whole thing
+    double proportion = static_cast<double>(totalregions) / static_cast<double>(thisregion);
+    time_t predict = static_cast<time_t>(((tnow-tstart) * proportion) + tstart);
+    if (totalregions > 1)
+    {
+        //We will probably spend as much time profiling over all regions as
+        // we spend profiling all the regions individually.
+        predict += static_cast<time_t>(m_profiletotal * proportion);
+    }
+    timestr = PrintTime(predict,string("%c"));
+#endif
+
+    string msg = "Predicted end of this LAMARC run: ";
+    msg += timestr;
+    ReportNormal(msg);
+
+} // PrognoseAll
+
+/**************************************************************
+   This function prints the "moving bar" which shows progress
+   of the run.  It is meant to be called by the chain.
+   You must call SetBarParameters before calling this.  Also,
+   for PrintBar to work properly it must be called for the
+   first time with steps = 0 and for the last time with steps = m_totalsteps.
+**************************************************************/
+
+void RunReport::PrintBar(long int steps)
+{
+
+#ifdef NOPROGRESSBAR
+    if (steps == 0)
+    {
+        string msg = m_chaintype + " Chain " + ToString(m_chainno+1) + " of "
+            + ToString(registry.GetChainParameters().GetNChains(m_chaintype_num)) + ":";
+        ReportNormal(msg, false);
+    }
+    return;
+#else
+    if (m_level == CONCISE || m_level == NONE) return;
+
+    long int i;
+    static long int previous = 0;
+    long int width = 12;
+
+    if (steps == 0)
+    { // Initial display of bar
+        long int chaintot = registry.GetChainParameters().GetNChains(m_chaintype_num);
+        string msg = m_chaintype + " Chain " + ToString(m_chainno+1) + " of "
+            + ToString(chaintot) + ":";
+        SaveOutput(msg, false);
+        cout << endl << PrintTime(GetTime()) << "  ";
+        cout << MakeJustified(m_chaintype + string(" chain "),-14)
+             << Pretty(m_chainno + 1L, 3)
+             << " of " << Pretty(chaintot, 3) << ":  ";
+        cout << "[|                   ] ";
+        cout << Pretty(steps + 1L, width);
+        cout.flush();
+        previous = 0;
+        return;
+    }
+
+    long int counterDisplay = steps - m_burnin;
+
+    if(counterDisplay % m_counter_display_increment == 0)
+    {
+        long int percent = static_cast<long int>(100.0 * static_cast<double>(steps)
+                                                 / static_cast<double>(m_totalsteps));
+
+        if (percent > previous && ((percent/5) * 5) - percent < EPSILON)
+        {
+            for (i = 0; i < width+22; i++) cout << "\b";   // back up
+            for (i = 0; i < percent/5; i++)
+            {
+                cout << "=";
+            }
+            if (percent != 100) cout << "|";
+            for (i = 0; i < 19-(percent/5); i++) cout << " ";
+            cout << "] ";
+        }
+        else
+        {
+            for (i = 0; i < width; ++i) cout << "\b";
+        }
+        cout << Pretty(counterDisplay, width);
+        if (steps == m_totalsteps) cout << " steps " << endl;
+        cout.flush();
+        previous = percent;
+    }
+#endif
+} // PrintBar
+
+//------------------------------------------------------------------------------------
+
+void RunReport::AddForceToTable(const Force* force, StringVec1d& table,
+                                const ChainOut& chout) const
+{
+    unsigned long int i;
+
+    StringVec1d temptable = force->MakeChainParamReport(chout);
+
+    // add the header
+    table[0] += MakeCentered(force->GetShortparamname(), temptable[0].size());
+
+    // Code added to deal with cases where the tables are not the same
+    // size, as with divergence
+
+    if (temptable.size() + 1 > table.size())
+    {
+        // put in blank entries
+        assert(table.size() > 0);  // no entries at all??
+        string blankentry(' ',table[1].size());
+        for (i = table.size(); i < temptable.size() + 1; ++i)
+        {
+            table.push_back(blankentry);
+        }
+    }
+    // add the parameter values
+    for (i = 1; i < table.size() && i < temptable.size() + 1; ++i)
+    {
+        table[i] += "  " + temptable[i-1];
+    }
+
+} // AddForceToTable
+
+//------------------------------------------------------------------------------------
+
+void RunReport::MakeReport(const ChainOut& chout) const
+{
+    // Prepare header lines
+    unsigned long int i;
+
+    // Prepare tables of chain MLEs
+    m_scalars.erase();
+    m_tables.clear();
+
+    const vector<Force *>& forces = m_forcesum.GetAllForces();
+
+    // initialize the cross partition tables with naming info
+    // the partition tables will be handled in the loop below
+    const DataPack& datapack = registry.GetDataPack();
+    StringVec1d xpartnames = datapack.GetAllCrossPartitionNames();
+
+    StringVec1d tables2d;
+    StringVec1d::iterator name;
+    if (xpartnames.size() > 1)
+    {
+        // all cross-partitions will use the Class name "Class".
+        tables2d.push_back(MakeJustified("Class",-22));
+        for (name = xpartnames.begin(); name != xpartnames.end(); ++name)
+            tables2d.push_back(MakeJustified(*name, -20));
+    }
+
+    StringVec2d tables3d;
+
+    for (i = 0; i < forces.size(); ++i) // fill up all tables
+    {
+        if (forces[i]->ReportDimensionality() == 1) // scalar
+        {
+            m_scalars += MakeJustified(forces[i]->GetShortparamname(), -10) + "  ";
+            m_scalars += forces[i]->MakeChainParamReport(chout)[0] + "  ";
+        }
+        else
+        {                               // tabular
+            // crosswise tables
+            if (forces[i]->ReportDimensionality() == 2)
+            {
+                AddForceToTable(forces[i],tables2d,chout);
+            }
+            else
+            {
+                // partition tables
+                assert (forces[i]->ReportDimensionality() == 3);
+                StringVec1d table3d;
+                StringVec1d partnames = datapack.
+                    GetAllPartitionNames(forces[i]->GetTag());
+                table3d.push_back(MakeJustified(forces[i]->GetClassName(),-23));
+                for (name = partnames.begin(); name != partnames.end(); ++name)
+                    table3d.push_back(MakeJustified(*name, -22));
+                AddForceToTable(forces[i],table3d,chout);
+                tables3d.push_back(table3d);
+            }
+        }
+    }
+
+    // put all the tables together into the big table
+    m_tables.push_back(string("")); // add an empty line between m_scalars
+    // and tables
+    m_tables.assign(tables2d.begin(), tables2d.end());
+    m_tables.push_back(string("")); // add an empty line between tables
+    StringVec2d::iterator tab3d;
+    for(tab3d = tables3d.begin(); tab3d != tables3d.end(); ++tab3d)
+    {
+        m_tables.insert(m_tables.end(), tab3d->begin(), tab3d->end());
+        m_tables.push_back(string("")); // add an empty line between tables
+    }
+
+    // if heating, make swapping report
+    long int numtemps = chout.GetNumtemps();
+    if (numtemps > 1)
+    {
+        DoubleVec1d swaprates = chout.GetSwaprates();
+        DoubleVec1d temperatures = chout.GetTemperatures();
+        assert(swaprates.size() == static_cast<unsigned long int>(numtemps));
+        assert(temperatures.size() == static_cast<unsigned long int>(numtemps));
+        string tableline = "Temperature: ";
+        long int temp;
+        for (temp = 0; temp < numtemps; ++temp)
+        {
+            tableline += Pretty(temperatures[temp],7) + "  ";
+        }
+        m_tables.push_back(tableline);
+        tableline = "  Swap rate: ";
+        for (temp = 0; temp < numtemps-1; ++temp)
+        {
+            if (swaprates[temp] >= 0.0)
+            {
+                tableline += Pretty(100.0 * swaprates[temp], 7) + "  ";
+            }
+            else
+            {
+                tableline += "-------  ";
+            }
+        }
+        tableline += "-------";
+        m_tables.push_back(tableline);
+    }
+} // MakeReport
+
+//------------------------------------------------------------------------------------
+
+vector<string> RunReport::FormatReport(const ChainOut& chout, bool current,
+                                       long int linelength) const
+{
+    bool bayesanalysis = registry.GetChainParameters().IsBayesian();
+    string skiptimestamp;
+    if (current) skiptimestamp = "  ";
+    else skiptimestamp = "";
+
+    vector<string> report;
+    vector<string> temptable;
+
+    // header
+    //If this is a summary over regions or replicates, we do not want the
+    // header information at all.  Everything is set 'FLAGLONG' or 'FLAGDOUBLE'
+    // so we check that.  It's sort of a hack, but enh.
+    if (!(chout.GetAccrate() == FLAGDOUBLE &&
+          chout.GetLlikedata() == FLAGDOUBLE &&
+          chout.GetNumBadTrees() == FLAGDOUBLE &&
+          chout.GetTinyPopTrees() == FLAGDOUBLE &&
+          chout.GetZeroDLTrees() == FLAGDOUBLE &&
+          chout.GetStretchedTrees() == FLAGDOUBLE ))
+    {
+        string tempstring = "";
+        if (current) tempstring = PrintTime(GetTime()) + "  ";
+        tempstring += "Accepted ";
+        if (chout.GetAccrate() == FLAGDOUBLE)
+        {
+            tempstring += UNKNOWN_STRING;
+        }
+        else
+        {
+            tempstring += MakeJustified(ToString(100.0 * chout.GetAccrate()), 5) + "%";
+        }
+        if (bayesanalysis)
+            tempstring += " | Point Likelihood " + Pretty(chout.GetLlikemle(), 10);
+        else
+            tempstring += " | Posterior lnL " + Pretty(chout.GetLlikemle(), 10);
+        tempstring += " | Data lnL ";
+        if (chout.GetLlikedata() == NEG_MAX)
+        {
+            tempstring += UNKNOWN_STRING;
+        }
+        else
+        {
+            tempstring += Pretty(chout.GetLlikedata(), 10);
+        }
+        report.push_back(tempstring);
+        long int badtrees = chout.GetNumBadTrees();
+        long int tinytrees = chout.GetTinyPopTrees();
+        long int zerodltrees = chout.GetZeroDLTrees();
+        long int stretchedtrees = chout.GetStretchedTrees();
+        if (badtrees == 0 && tinytrees == 0 &&
+            (zerodltrees == 0 || zerodltrees == FLAGLONG) &&
+            (stretchedtrees == 0 || stretchedtrees == FLAGLONG))
+        {
+            tempstring = "No trees discarded due to limit violations.";
+            report.push_back(tempstring);
+        }
+        else
+        {
+            tempstring = "Trees discarded due to too many events: ";
+            if (badtrees == FLAGLONG)
+            {
+                tempstring += UNKNOWN_STRING;
+            }
+            else
+            {
+                tempstring += Pretty(badtrees);
+            }
+            report.push_back(tempstring);
+
+            tempstring = "Trees discarded due to too small population sizes: ";
+            if (tinytrees == FLAGLONG)
+            {
+                tempstring += UNKNOWN_STRING;
+            }
+            else
+            {
+                tempstring += Pretty(tinytrees);
+            }
+            report.push_back(tempstring);
+
+            tempstring = "Trees discarded due to an infinitesimal data likelihood: ";
+            if (zerodltrees == FLAGLONG)
+            {
+                tempstring += UNKNOWN_STRING;
+            }
+            else
+            {
+                tempstring += Pretty(zerodltrees);
+            }
+            report.push_back(tempstring);
+
+            tempstring = "Trees discarded due to extremely long branch lengths: ";
+            if (stretchedtrees == FLAGLONG)
+            {
+                tempstring += UNKNOWN_STRING;
+            }
+            else
+            {
+                tempstring += Pretty(stretchedtrees);
+            }
+            report.push_back(tempstring);
+        }
+        // If multiple Arrangers, print an Arranger report
+        ratemap arrates = chout.GetAllAccrates();
+        ratemap::const_iterator rate;
+        if (arrates.size() > 1)
+        {
+            for (rate = arrates.begin(); rate != arrates.end(); ++rate)
+            {
+                report.push_back(MakeJustified(rate->first + " accepted ", -28) + " " +
+                                 MakeJustified(ToString(rate->second.first), 8) + "/" +
+                                 ToString(rate->second.second) + " proposals");
+            }
+        }
+    }
+
+    LongVec1d bayesunique = chout.GetBayesUnique();
+    if (bayesunique.size()>0)
+    {
+        //We have bayesian stuff.
+        report.push_back("");
+        report.push_back("Number of unique sampled values for each parameter:");
+        const ParamVector paramvec(true);
+        assert(bayesunique.size() == paramvec.size());
+        bool toofew = false;
+        for (unsigned long int pnum = 0; pnum < paramvec.size(); pnum++)
+        {
+            ParamStatus mystatus = paramvec[pnum].GetStatus();
+            if (mystatus.Varies())
+            {
+                string numbayes = ToString(bayesunique[pnum]);
+                if (bayesunique[pnum] < 50)
+                {
+                    toofew = true;
+                    numbayes = numbayes + "*";
+                }
+                report.push_back(MakeJustified(numbayes, max(static_cast<int>(numbayes.size()), 6))
+                                 + ": " + MakeJustified(paramvec[pnum].GetName(),-70));
+            }
+        }
+        if (toofew)
+        {
+            report.push_back("* Note!  This parameter has too few sampled values to provide a reasonable estimate.  "
+                             "Consider collecting more samples or narrowing your prior for this value.");
+        }
+        report.push_back("");
+    }
+
+    // Actually print the estimates
+    if (m_scalars.size())
+    {
+        report.push_back(skiptimestamp + m_scalars);
+    }
+    temptable.insert(temptable.end(),m_tables.begin(),m_tables.end());
+
+    temptable = Linewrap(temptable, linelength);
+
+    transform(temptable.begin(),temptable.end(),temptable.begin(),
+              bind1st(plus<string>(),skiptimestamp));
+    report.insert(report.end(),temptable.begin(),temptable.end());
+
+    return(report);
+} // FormatReport
+
+//------------------------------------------------------------------------------------
+
+void RunReport::DisplayReport(const ChainOut& chout)
+{
+    MakeReport(chout);
+    vector<string> rpt = FormatReport(chout,true,80);
+    for (unsigned long int it = 0; it < rpt.size(); ++it)
+    {
+        ReportNormal(rpt[it], false, 80);
+    }
+} // DisplayReport
+
+//------------------------------------------------------------------------------------
+
+void RunReport::SetBarParameters(long int totsteps, long int burn, long int chno, long int chtype)
+{
+    m_totalsteps = totsteps;
+    m_chainno = chno;
+    m_burnin = burn;
+    m_counter_display_increment = 10;
+    if (m_totalsteps % 10L != 0)
+    {
+        m_counter_display_increment = registry.GetChainParameters().GetInterval(chtype);
+    }
+    m_burnpercent = 100.0 * static_cast<double>(burn)/static_cast<double>(totsteps);
+    m_chaintype_num = chtype;
+    switch (chtype)
+    {
+        case 0:
+            m_chaintype = "Initial";
+            break;
+        case 1:
+            m_chaintype = "Final";
+            break;
+        default:
+            assert(false);
+            m_chaintype = "Unknown";
+            break;
+    }
+} // SetBarParameters
+
+//------------------------------------------------------------------------------------
+
+void RunReport::ReportUrgent(const string& msg, bool printtime, long int linelength)
+{
+    if (m_level != NONE)
+    {
+        PrettyPrint(msg, printtime, linelength);
+    }
+    if (m_filelevel != NONE)
+    {
+        SaveOutput(msg, printtime);
+    }
+}
+
+void RunReport::ReportNormal(const string& msg, bool printtime, long int linelength)
+{
+    if (m_level == NORMAL || m_level==VERBOSE)
+    {
+        PrettyPrint(msg, printtime, linelength);
+    }
+    if (m_filelevel == NORMAL || m_filelevel==VERBOSE)
+    {
+        SaveOutput(msg, printtime);
+    }
+}
+
+void RunReport::ReportNormal(const StringVec1d& msgs, bool printtime, long int linelength)
+{
+    if (msgs.size() > 0)
+    {
+        ReportNormal(msgs[0], printtime, linelength);
+    }
+    for (unsigned long int line = 1; line<msgs.size(); line++)
+    {
+        ReportNormal("          " + msgs[line], false, linelength);
+    }
+}
+
+void RunReport::ReportChat(const string& msg, bool printtime, long int linelength)
+{
+    if (m_level==VERBOSE)
+    {
+        PrettyPrint(msg, printtime, linelength);
+    }
+    if (m_filelevel==VERBOSE)
+    {
+        SaveOutput(msg, printtime);
+    }
+}
+
+void RunReport::ReportDebug(const string& msg, bool printtime, long int linelength)
+{
+#ifdef NDEBUG
+    return;
+#else
+    string debugmsg = "Debug message: " + msg;
+    PrettyPrint(debugmsg, printtime, linelength);
+    SaveOutput(debugmsg, printtime);
+#endif
+}
+
+void RunReport::ReportOnce(const string& msg, OnceKey key, bool doNormal)
+{
+    if (m_keyfrequencies.find(key) == m_keyfrequencies.end())
+    {
+        //This is the first time we've seen this message.  Add it, and report
+        // it to the user, either with ReportNormal or ReportDebug.
+        m_keyfrequencies.insert(make_pair(key, 1L));
+        if (doNormal)
+        {
+            ReportNormal(msg);
+            m_keyindices.insert(make_pair(static_cast<long int>(m_messages.size())-1, key));
+        }
+        else
+        {
+            ReportDebug(msg);
+#ifndef NDEBUG
+            m_keyindices.insert(make_pair(static_cast<long int>(m_messages.size())-1, key));
+#endif
+        }
+        //Whether we actually reported it or not, we stick it in the index.  This
+        // forces each OnceKey to come from exactly one call (or that all such
+        // calls be the same version of doNormal); at present, there
+        // are only four such calls, and they're all unique.
+    }
+    else
+    {
+        //We've seen this message before. Increment its count, but don't report.
+        KeyToLongMapiter oldkey = m_keyfrequencies.find(key);
+        long int count = oldkey->second;
+        count++;
+        m_keyfrequencies.erase(oldkey);
+        m_keyfrequencies.insert(make_pair(key, count));
+    }
+}
+
+//Private--use one of the above routines instead.
+void RunReport::PrettyPrint(const string& msg, bool printtime, long int linelength)
+{
+    vector<string> msgline;
+    msgline.push_back(msg);
+    long int full_line = linelength - 10;
+
+    string timestr, skiptime;
+#ifdef NOTIME_FUNC
+    // system does not provide a clock
+    timestr = "";
+    skiptime = "";
+    full_line = linelength;
+#else
+    if (printtime)
+    {
+        time_t tnow = GetTime();
+        timestr = PrintTime(tnow) + "  ";
+        skiptime = "          ";
+    }
+    else
+    {
+        timestr = "";
+        skiptime = "";
+        full_line = linelength;
+    }
+#endif
+
+    vector<string> msglines = Linewrap(msgline, full_line);
+
+    vector<string>::iterator oneline = msglines.begin();
+    cout << timestr << (*oneline) << endl;
+    for(oneline++; oneline != msglines.end(); ++oneline)
+        cout << skiptime << (*oneline) << endl;
+}
+
+void RunReport::SaveOutput(const string& msg, bool printtime)
+{
+    string timestr;
+#ifdef NOTIME_FUNC
+    // system does not provide a clock
+    timestr = "";
+#else
+    if (printtime)
+    {
+        time_t tnow = GetTime();
+        timestr = PrintTime(tnow) + "  ";
+    }
+    else
+    {
+        timestr = "";
+    }
+#endif
+    m_messages.push_back(timestr + msg);
+}
+
+//____________________________________________________________________________________
diff --git a/src/report/runreport.h b/src/report/runreport.h
new file mode 100644
index 0000000..eb6dbc5
--- /dev/null
+++ b/src/report/runreport.h
@@ -0,0 +1,136 @@
+// $Id: runreport.h,v 1.20 2011/04/23 02:02:49 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef RUNREPORT_H
+#define RUNREPORT_H
+
+#include <cmath>
+#include <ctime>
+#include <iostream>
+#include <sstream>
+#include <string>
+#include <vector>
+
+#include "vectorx.h"
+#include "stringx.h"
+#include "timex.h"
+#include "constants.h"
+
+class ChainOut;
+class ChainPack;
+class Force;
+class ForceSummary;
+
+enum OnceKey{oncekey_OverflowOfSafeExpDiff,
+             oncekey_UnderflowOfSafeExpDiff,
+             oncekey_OverflowOfProductWithExp,
+             oncekey_UnderflowOfProductWithExp};
+
+typedef std::map<OnceKey, long> KeyToLongMap;
+typedef std::map<OnceKey, long>::iterator KeyToLongMapiter;
+typedef std::map<long, OnceKey> LongToKeyMap;
+typedef std::map<long, OnceKey>::iterator LongToKeyMapiter;
+
+/******************************************************************
+This class manages runtime reports and provides runtime-report
+information to the output manager.  It takes input from the
+chain manager.  Key routines are PrintBar, meant to be called
+repeatedly by the chain itself (for the scrolling bar);
+MakeReport, which constructs a chain summary report, and
+FormatReport, which prints it.  (The two are separate
+so that the output reporter can take the results of MakeReport and
+reformat them to its liking.)
+
+Mary Kuhner       October 2000
+
+Added prognosis of profiles October 2001 -- Mary Kuhner
+
+******************************************************************/
+
+class RunReport
+{
+  public:
+
+    RunReport(const ForceSummary& fs, verbosity_type progress=NORMAL);
+    ~RunReport() {};
+    void SetBarParameters(long totsteps, long burn, long chno, long chtype);
+    void PrintBar(long steps);
+    void PrognoseRegion(const ChainPack& chpack, long region,
+                        long steps, long total);
+    void RecordProfileStart();
+    void PrognoseProfiles(long thisprof, long totprofs);
+    void PrognoseAll(const ChainPack& chpack, long thisreg, long totregs);
+    void MakeReport(const ChainOut& chout) const;
+    vector<string> FormatReport(const ChainOut& chout, bool current,
+                                long linelength) const;
+    void DisplayReport(const ChainOut& chout);
+
+    //These functions are used by the rest of the program to communicate with
+    // the user, according to the verbosity level set by the user, and sometimes
+    // whether or not the NDEBUG flag is on.
+    void ReportUrgent(const string& msg, bool printtime = true, long linelength=DEFLINELENGTH);
+    void ReportNormal(const string& msg, bool printtime = true, long linelength=DEFLINELENGTH);
+    void ReportNormal(const StringVec1d& msgs, bool printtime = true, long linelength=DEFLINELENGTH);
+    void ReportChat(const string& msg, bool printtime = true, long linelength=DEFLINELENGTH);
+    void ReportDebug(const string& msg, bool printtime = true, long linelength=DEFLINELENGTH);
+
+    void ReportOnce(const string& msg, OnceKey key, bool doNormal);
+    void SaveOutput(const string& msg, bool printtime = true);
+
+    StringVec1d GetMessages() const {return m_messages;};
+    KeyToLongMap GetKeyFrequencies() const {return m_keyfrequencies;};
+    LongToKeyMap GetKeyIndices() const {return m_keyindices;};
+
+  private:
+
+    verbosity_type m_level;
+    verbosity_type m_filelevel;
+    // the following three are mutable so that a report can be prepared
+    // and stored internally even in a const RunReport object
+    mutable string       m_scalars;            // these three store run reports
+    mutable StringVec1d  m_tables;
+    const ForceSummary& m_forcesum;
+    time_t       m_profilestart;       // time at which profiling began
+    time_t       m_profiletotal;
+
+    // these are used to control the scrolling bar
+    long         m_totalsteps;
+    long         m_chainno;
+    long         m_counter_display_increment;
+    string       m_chaintype;
+    long         m_chaintype_num;
+    long         m_burnin;
+    double       m_burnpercent;
+
+    //These are for saving the messages we get.
+    StringVec1d  m_messages;
+    KeyToLongMap m_keyfrequencies;
+    LongToKeyMap m_keyindices;
+
+    void AddForceToTable(const Force* force, StringVec1d& table,
+                         const ChainOut& chout) const;
+    void PrettyPrint(const string& msg, bool printtime, long linelength);
+
+}; // RunReport
+
+//------------------------------------------------------------------------------------
+// These are tools for marking output (both header printed on screen at LAMARC startup
+// and beginning of output file "outfile.txt") to indicate unambiguously whether any
+// "unusual" debugging options are activated.  If all the relevant pre-processor flags
+// are all in their usual states, no extra strings are printed.
+
+bool DebuggingOptionsRunning();
+string DebuggingOptionsString(unsigned long int & current_linecount);
+
+//------------------------------------------------------------------------------------
+
+#endif // RUNREPORT_H
+
+//____________________________________________________________________________________
diff --git a/src/report/spreadsheet.cpp b/src/report/spreadsheet.cpp
new file mode 100644
index 0000000..22cca89
--- /dev/null
+++ b/src/report/spreadsheet.cpp
@@ -0,0 +1,240 @@
+// $Id: spreadsheet.cpp,v 1.4 2011/03/08 19:22:01 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+#include <fstream>
+#include <iostream>
+
+#include "registry.h"
+#include "spreadsheet.h"
+#include "stringx.h"
+#include "tinyxml.h"
+#include "tixml_base.h"
+#include "vectorx.h"
+
+//------------------------------------------------------------------------------------
+
+std::string makeFileName(std::string filePrefix, std::string paramName)
+{
+    return (filePrefix + "_" + paramName + ".csv");
+}
+
+size_t getNumSlices(TiXmlElement * estimateElem)
+{
+    TiXmlElement * profElem = ti_requiredChild(estimateElem,"profile");
+    std::vector<TiXmlElement*> slices = ti_requiredChildren(profElem,"profile-slice");
+    return slices.size();
+}
+
+void addFirstCol(   StringVec2d &   table,
+                    TiXmlElement *  estimateElem,
+                    std::string &   profileTypeOut,
+                    std::string &   analysisTypeOut)
+{
+    TiXmlElement * profElem = ti_requiredChild(estimateElem,"profile");
+    profileTypeOut = ti_attributeValue(profElem,"type");
+    analysisTypeOut= ti_attributeValue(profElem,"analysis");
+
+    std::string profileIndexLabel = profileTypeOut == "fixed" ? "multiplier" : "percentile";
+    table[0][0] = "\"" + profileIndexLabel + "\"";
+
+    std::vector<TiXmlElement*> slices = ti_requiredChildren(profElem,"profile-slice");
+    std::vector<TiXmlElement*>::const_iterator siter;
+    size_t index;
+    for(siter = slices.begin(), index = 1; siter != slices.end(); siter++, index++)
+    {
+        std::string labelVal = "";
+
+        TiXmlElement * perc = ti_optionalChild(*siter,profileIndexLabel);
+        if(perc != NULL)
+        {
+            labelVal = ti_attributeValue(perc,"value");
+        }
+        if(ti_hasAttribute(*siter,"special"))
+        {
+            labelVal = ti_attributeValue(*siter,"special");
+        }
+        table[index][0] = labelVal;
+    }
+}
+
+void addEstimate(   StringVec2d &   table,
+                    size_t          estIndex,
+                    TiXmlElement *  estimateElem,
+                    std::string     paramName,
+                    std::string     regionName,
+                    std::string     expectedProfileType,
+                    std::string     expectedAnalysisType)
+{
+    TiXmlElement * profElem = ti_requiredChild(estimateElem,"profile");
+    std::string profileType = ti_attributeValue(profElem,"type");
+    std::string analysisType = ti_attributeValue(profElem,"analysis");
+    assert(profileType == expectedProfileType);     // EWFIX.BUG.838 -- should throw
+    assert(analysisType == expectedAnalysisType);   // EWFIX.BUG.838 -- should throw
+
+    size_t paramIndex = estIndex * 2 + 1;
+    size_t probIndex  = paramIndex + 1;
+
+    std::string probTag   = (expectedAnalysisType == "bayesian") ? "point-probability" : "log-likelihood";
+    std::string probLabel = (expectedAnalysisType == "bayesian") ? "PointProb" : "LnLike";
+
+    table[0][paramIndex] = "\"" + paramName + ":" + regionName + "\"";
+    table[0][probIndex ] = "\"" + probLabel + "(" + paramName + ":" + regionName + ")\"";
+
+    std::vector<TiXmlElement*> slices = ti_requiredChildren(profElem,"profile-slice");
+    std::vector<TiXmlElement*>::const_iterator siter;
+    size_t sliceIndex;
+    for(siter = slices.begin(), sliceIndex = 1; siter != slices.end(); siter++, sliceIndex++)
+    {
+        TiXmlElement * paramElem = ti_requiredChild(*siter,"param-value");
+        std::string paramVal = ti_attributeValue(paramElem,"value");
+
+        TiXmlElement * probElem = ti_requiredChild(*siter,probTag);
+        std::string probVal = ti_attributeValue(probElem,"value");
+
+        table[sliceIndex][paramIndex] = paramVal;
+        table[sliceIndex][probIndex]  = probVal;
+    }
+}
+
+void reorderEstimates(std::vector<TiXmlElement*> & estimates)
+// if there is an "overall" estimate, move it to the front
+{
+    std::vector<TiXmlElement*>::iterator eiter;
+    std::vector<TiXmlElement*>::iterator overall = estimates.end();
+    for(eiter = estimates.begin(); eiter != estimates.end(); eiter++)
+    {
+        TiXmlElement * estElem = *eiter;
+        std::string typeVal = ti_attributeValue(estElem,"type");    // single region or overall
+        if(typeVal == "overall")
+        {
+            overall = eiter;
+            break;
+        }
+    }
+
+    if(overall != estimates.end())
+    {
+        TiXmlElement * overallElem = *eiter;
+        estimates.erase(eiter);
+        estimates.insert(estimates.begin(),overallElem);
+    }
+}
+
+void WriteOneProfileSpread( std::string     filePrefix,
+                            TiXmlElement *  forceElem,
+                            TiXmlElement *  paramElem)
+{
+    std::string forceName = ti_attributeValue(forceElem,"short-name");
+    std::string paramName = ti_attributeValue(paramElem,"short-name");
+
+    string::size_type i = paramName.find("/");
+    while (i != string::npos)
+    {
+        paramName.replace(i,1,"+");
+        i = paramName.find("/");
+    }
+
+    std::string fileName  = makeFileName(filePrefix,paramName);
+
+    std::ofstream profileStream;
+    profileStream.open(fileName.c_str(),std::ios::out);
+
+    // open file
+    std::vector<TiXmlElement*> estimates = ti_requiredChildren(paramElem,"estimate");
+    size_t numEstimates = estimates.size();
+
+    assert(estimates.begin() != estimates.end());
+    size_t numSlices = getNumSlices(*(estimates.begin()));
+
+    size_t rowCount = 1 + numSlices;         // header + slices
+    size_t colCount = 1 + 2 * numEstimates;  // multiplier/percentile + for each estimate, param and prob/like
+
+    reorderEstimates(estimates);
+
+    StringVec2d table = CreateVec2d(rowCount,colCount,std::string(""));
+    std::string expectedProfileType;
+    std::string expectedAnalysisType;
+    addFirstCol(table,*(estimates.begin()),expectedProfileType,expectedAnalysisType);
+
+    std::vector<TiXmlElement*>::const_iterator eiter;
+    size_t index;
+    for(eiter = estimates.begin(), index=0; eiter != estimates.end(); eiter++,index++)
+    {
+        TiXmlElement * estElem = *eiter;
+        std::string regionName;
+        std::string typeVal = ti_attributeValue(estElem,"type");    // single region or overall
+        if(typeVal == "overall")
+        {
+            regionName = "overall";
+        }
+        else
+        {
+            regionName = ti_attributeValue(estElem,"region-name");
+        }
+        addEstimate(table,index,*eiter,paramName,regionName,expectedProfileType,expectedAnalysisType);
+    }
+
+    // header lines
+    std::string intervalTypes = (expectedAnalysisType == "bayesian")
+        ? "credibility intervals"
+        : "confidence intervals";
+    // parameter name
+    profileStream << "Profile Information" << std::endl;
+    profileStream << "Parameter: " << paramName << std::endl;
+    profileStream << "Analysis type: " << expectedAnalysisType
+                  << " which produces " << intervalTypes << std::endl;
+    profileStream << "Data points are from " << expectedProfileType << " profiling." << std::endl;
+
+    size_t row;
+    size_t col;
+    for(row = 0; row < rowCount; row++)
+    {
+        for(col = 0; col < colCount; col++)
+        {
+            if(col != 0)
+            {
+                profileStream << ",";
+            }
+            profileStream << table[row][col];
+        }
+        profileStream << std::endl;
+    }
+
+    profileStream.close();
+    registry.GetUserParameters().AddProfileName(fileName);
+}
+
+void WriteProfileSpreads(std::string filePrefix, TiXmlElement * reportTop)
+{
+
+    // <lamarc-run-report>
+    //     <parameters>
+    //         <force>
+    //             <parameter>
+    TiXmlElement * params = ti_requiredChild(reportTop,"parameters");
+    std::vector<TiXmlElement *> forces = ti_requiredChildren(params,"force");
+    std::vector<TiXmlElement *>::const_iterator fiter;
+    for(fiter = forces.begin(); fiter != forces.end(); fiter++)
+    {
+        std::vector<TiXmlElement *> params = ti_requiredChildren(*fiter,"parameter");
+        std::vector<TiXmlElement *>::const_iterator piter;
+        for(piter = params.begin(); piter != params.end(); piter++)
+        {
+            std::string profType = ti_attributeValue((*piter),"type");
+            if(profType == "percentile" || profType == "fixed")
+            {
+                WriteOneProfileSpread(filePrefix,*fiter,*piter);
+            }
+        }
+    }
+}
+
+//____________________________________________________________________________________
diff --git a/src/report/spreadsheet.h b/src/report/spreadsheet.h
new file mode 100644
index 0000000..88c7476
--- /dev/null
+++ b/src/report/spreadsheet.h
@@ -0,0 +1,55 @@
+// $Id: spreadsheet.h,v 1.2 2010/03/02 23:12:29 bobgian Exp $
+
+/*
+  Copyright 2008  Peeter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef SPREADSHEET_H
+#define SPREADSHEET_H
+
+#include <string>
+#include "vectorx.h"
+
+class Force;
+class Parameter;
+class TiXmlElement;
+
+/******************************************************************
+Classes for writing spreadsheet-ready versions of data from
+outfile.txt
+
+Elizabeth Walkup  September 2009
+
+******************************************************************/
+
+std::string makeFileName(   std::string     filePrefix,
+                            std::string     paramName);
+size_t      getNumSlices(   TiXmlElement *  estimateElem);
+void        addFirstCol(    StringVec2d &   table,
+                            TiXmlElement *  esitmateElem,
+                            std::string &   profileTypeOut,
+                            std::string &   analysisTypeOut);
+void        addEstimate(    StringVec2d &   table,
+                            size_t          estimateIndex,
+                            TiXmlElement *  esitmateElem,
+                            std::string     paramName,
+                            std::string     regionName,
+                            std::string     expectedProfileType,
+                            std::string     expectedAnalysisType);
+
+void        reorderEstimates(   std::vector<TiXmlElement*> &    estimates);
+
+void WriteOneProfileSpread( std::string     filePrefix,
+                            TiXmlElement *  forceElem,
+                            TiXmlElement *  paramElem);
+
+void WriteProfileSpreads(   std::string     filePrefix,
+                            TiXmlElement *  reportTop);
+
+#endif // SPREADSHEET_H
+
+//____________________________________________________________________________________
diff --git a/src/report/xml_report.cpp b/src/report/xml_report.cpp
new file mode 100644
index 0000000..e06d954
--- /dev/null
+++ b/src/report/xml_report.cpp
@@ -0,0 +1,646 @@
+// $Id: xml_report.cpp,v 1.16 2012/06/30 01:32:42 bobgian Exp $
+
+/*
+  Copyright 2008  Peeter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+
+#include "chainmanager.h"
+#include "chainout.h"
+#include "force.h"
+#include "mathx.h"
+#include "parameter.h"
+#include "region.h"
+#include "reportpage.h"
+#include "timex.h"
+#include "vector_constants.h"
+#include "xml_report.h"
+
+//------------------------------------------------------------------------------------
+
+XMLReport::XMLReport(std::string filename)
+    :   m_filename(filename),
+        m_doc(filename.c_str())
+{
+}
+
+XMLReport::~XMLReport()
+{
+}
+
+void
+XMLReport::AddSlicesTo( TiXmlElement * parentElem,
+                        const Parameter& param,
+                        force_type tag,
+                        long regNo,
+                        bool do_overall)
+{
+
+    bool isBayes = registry.GetChainParameters().IsBayesian();
+    proftype ptype = param.GetProfileType();
+    verbosity_type verbosity = registry.GetUserParameters().GetVerbosity();
+
+    TiXmlElement * profileElem = new TiXmlElement("profile");
+    std::string profTypeString = ToString(ptype);
+    profileElem->SetAttribute("type",profTypeString.c_str());
+    profileElem->SetAttribute("analysis", isBayes ? "bayesian" : "likelihood");
+    parentElem->LinkEndChild(profileElem);
+
+    double mleval = -DBL_BIG;
+    double mleperc = -DBL_BIG;      // only for bayesian
+    double llikeAtMle = -DBL_BIG;   // log likelihood for like, point prob for bayes
+
+#if 0 // EWFIX.REMOVE
+    vector<vector<centilepair> > centiles;
+#endif
+
+    vector<centilepair> CIvec;
+    vector<centilepair> priorLikeVec;
+
+    unsigned long paramindex = param.GetParamVecIndex();
+    DoubleVec1d modifiers = registry.GetForceSummary().GetModifiers(paramindex);
+    DoubleVec1d special_modifiers;  // used for fixed w/ growth
+
+    if(do_overall)
+    {
+        mleval = param.GetOverallMLE();
+        llikeAtMle = registry.GetForceSummary().GetOverallLlikeMle();
+#if 0   // EWFIX.REMOVE
+        centiles = param.GetOverallProfile();
+#endif
+        if(param.IsProfiled())
+        {
+            CIvec = param.GetOverallCIs();
+            priorLikeVec = param.GetOverallPriorLikes();
+        }
+    }
+    else
+    {
+        mleval = param.GetMLE(regNo);
+        llikeAtMle = registry.GetForceSummary().GetLlikeMle(regNo);
+#if 0   // EWFIX.REMOVE
+        centiles = param.GetProfiles(regNo);
+#endif
+        if(param.IsProfiled())
+        {
+            CIvec = param.GetCIs(regNo);
+            priorLikeVec = param.GetPriorLikes(regNo);
+        }
+    }
+
+    // 999999999999999999999
+    // EWFIX -- need to sort on double val
+    std::map< std::pair<double, double>, TiXmlElement *> slices;
+
+    if(param.IsProfiled())
+    {
+        if(isBayes)
+        {
+            mleperc = ReportPage::GetReverseCentile(CIvec,mleval);
+            llikeAtMle = ReportPage::GetCentile(priorLikeVec,mleperc);
+        }
+
+        DoubleVec1d modTimesMleToPercs;
+        if(ptype == profile_FIX )
+        {
+            if (param.IsForce(force_GROW) && verbosity != CONCISE && verbosity != NONE)
+            {
+                special_modifiers = vecconst::growthmultipliers;
+                special_modifiers.insert(special_modifiers.end(),vecconst::growthfixed.begin(),vecconst::growthfixed.end());
+            }
+            if (isBayes)
+            {
+                if (param.IsForce(force_GROW) && verbosity != CONCISE && verbosity != NONE)
+                {
+                    modifiers = ProfPage::MakeGrowFixedModifiers(mleval);
+                    for (unsigned long ind = 0; ind < modifiers.size(); ++ind)
+                    {
+                        modTimesMleToPercs.push_back(modifiers[ind]);
+                    }
+                }
+                else
+                {
+                    for (unsigned long ind = 0; ind < modifiers.size(); ++ind)
+                    {
+                        modTimesMleToPercs.push_back(modifiers[ind]*mleval);
+                    }
+                }
+                ProfPage::TradeValsForPercs(modTimesMleToPercs,CIvec);
+            }
+        }
+
+        for (unsigned long ind = 0; ind < modifiers.size(); ++ind)
+        {
+            if (ptype == profile_NONE) continue;
+
+            TiXmlElement * oneSlice = new TiXmlElement("profile-slice");
+
+            double paramValue = -DBL_BIG;
+            if(ptype == profile_FIX && (isBayes || verbosity == CONCISE || verbosity == NONE))
+            {
+                if (param.IsForce(force_GROW) && verbosity != CONCISE && verbosity != NONE)
+                {
+                    paramValue = modifiers[ind];
+                }
+                else
+                {
+                    paramValue = mleval * modifiers[ind];
+                }
+            }
+            else
+            {
+                paramValue = ReportPage::GetCentile(CIvec,modifiers[ind]);
+            }
+
+            ///////////////////////////////////////////////////////////
+            // <multiplier> -- only for fixed
+            ///////////////////////////////////////////////////////////
+            if(ptype == profile_FIX)
+            {
+                std::string modString = "multiplier";
+                double modValue = modifiers[ind];
+
+                if(verbosity != CONCISE && verbosity != NONE && param.IsForce(force_GROW))
+                {
+                    modValue = special_modifiers[ind];
+                    if(paramValue == special_modifiers[ind])
+                    {
+                        modString = "fixed-value";
+                    }
+                }
+
+                TiXmlElement * modElem = new TiXmlElement(modString);
+                modElem->SetAttribute("value",ToString(modValue).c_str());
+                oneSlice->LinkEndChild(modElem);
+            }
+
+            ///////////////////////////////////////////////////////////
+            // <param-value>
+            ///////////////////////////////////////////////////////////
+            TiXmlElement * paramValueElem = new TiXmlElement("param-value");
+            oneSlice->LinkEndChild(paramValueElem);
+            paramValueElem->SetAttribute("value",ToString(paramValue).c_str());
+            if(ptype == profile_PERCENTILE)
+            {
+                //If the profiler gave up before finding values with as low a
+                // log likelihood as it would have liked, that information was
+                // saved in the analyzer, and we can get it out here to tell
+                // users that the value is "<1.53" instead of "1.53".
+                if (param.CentileIsExtremeLow(modifiers[ind], regNo))
+                {
+                    if (param.CentileIsExtremeHigh(modifiers[ind], regNo))
+                    {
+                        // EWFIX -- do we have better warn status?
+                        paramValueElem->SetAttribute("warning","failed");
+                        paramValueElem->SetAttribute("value","***");
+                    }
+                    else
+                    {
+                        paramValueElem->SetAttribute("warning","lower");
+                    }
+                }
+                else if (param.CentileIsExtremeHigh(modifiers[ind], regNo))
+                {
+                    paramValueElem->SetAttribute("warning","higher");
+                }
+                else if (param.CentileHadWarning(modifiers[ind], regNo))
+                {
+                    paramValueElem->SetAttribute("warning","other");
+                }
+            }
+
+            ///////////////////////////////////////////////////////////
+            // <percentile>
+            ///////////////////////////////////////////////////////////
+            if(ptype == profile_PERCENTILE)
+            {
+                TiXmlElement * percElem = new TiXmlElement("percentile");
+                percElem->SetAttribute("value",ToString(modifiers[ind]));
+                oneSlice->LinkEndChild(percElem);
+            }
+            if(ptype == profile_FIX && isBayes)
+            {
+                TiXmlElement * percElem = new TiXmlElement("percentile");
+
+                double modVal = modifiers[ind];
+                if (! (param.IsForce(force_GROW) && verbosity != CONCISE && verbosity != NONE))
+                {
+                    modVal *= mleval;
+                }
+
+                double val = ReportPage::GetReverseCentile(CIvec,modVal);
+                percElem->SetAttribute("value",ToString(val));
+                oneSlice->LinkEndChild(percElem);
+            }
+
+            ///////////////////////////////////////////////////////////
+            // <log-likelihood> -- not for bayes
+            ///////////////////////////////////////////////////////////
+            if(!isBayes)
+            {
+                TiXmlElement * logLike = new TiXmlElement("log-likelihood");
+                double lnl = ReportPage::GetCentile(priorLikeVec,modifiers[ind]);
+
+#if 0
+                // EWFIX.HACK.HACK.HACK -- I don't understand what's going
+                // on in reportpage.cpp, but at least this gets the right
+                // values out
+                if(lnl == FLAGLONG && ptype == profile_FIX && param.IsForce(force_GROW) && verbosity != CONCISE && verbosity != NONE)
+                {
+                    lnl = ReportPage::GetCentile(priorLikeVec,modifiers[ind]/mleval);
+                }
+#endif
+
+                logLike->SetAttribute("value",ToString(lnl).c_str());
+                oneSlice->LinkEndChild(logLike);
+            }
+
+            ///////////////////////////////////////////////////////////
+            // <point-probability> -- only for bayes
+            ///////////////////////////////////////////////////////////
+            if(isBayes)
+            {
+                TiXmlElement * pointProb = new TiXmlElement("point-probability");
+                double lnl = -DBL_BIG;
+                if(ptype == profile_FIX)
+                {
+                    lnl = ReportPage::GetCentile(priorLikeVec,modTimesMleToPercs[ind]);
+                }
+                else
+                {
+                    lnl = ReportPage::GetCentile(priorLikeVec,modifiers[ind]);
+                }
+                std::string likeStr = (lnl == -DBL_BIG) ? "***" : ToString(lnl);
+                pointProb->SetAttribute("value",likeStr.c_str());
+                oneSlice->LinkEndChild(pointProb);
+            }
+
+            slices[std::pair<double, double>(paramValue,modifiers[ind])]=oneSlice;
+
+        }
+    }
+
+    // add in element at MLE/MPE
+    bool haveMle = false;
+
+    // element may already be there, but not marked as MLE
+    // I believe this only happens for profile_FIX && isBayes
+    // It can also be there because we've hit the maximum or
+    // minimum value -- but in that case we want the duplicate
+    // entries
+    std::map< std::pair<double, double>, TiXmlElement *>::const_iterator sliter;
+    for(sliter = slices.begin(); sliter != slices.end(); sliter++)
+    {
+        std::pair<double, double> sortVal = (*sliter).first;
+        double paramValue = sortVal.first;
+        if(isBayes && ptype == profile_FIX && CloseEnough(paramValue,mleval))
+        {
+            haveMle = true;
+            assert(sortVal.second == 1.0 || param.IsForce(force_GROW));
+            ((*sliter).second)->SetAttribute("special", "mpe");
+        }
+    }
+
+    if (!haveMle)
+    {
+        TiXmlElement * newSlice = new TiXmlElement("profile-slice");
+        double secondSort = ptype == profile_FIX ? 1.0 : 0.5;
+        slices[std::pair<double, double>(mleval,secondSort)] = newSlice;
+
+        newSlice->SetAttribute("special", isBayes ? "mpe" : "mle");
+
+        if(ptype == profile_FIX)
+        {
+            TiXmlElement * multiplierElem = new TiXmlElement("multiplier");
+            newSlice->LinkEndChild(multiplierElem);
+            multiplierElem->SetAttribute("value",ToString(1.0).c_str());
+        }
+
+        TiXmlElement * paramValueElem = new TiXmlElement("param-value");
+        newSlice->LinkEndChild(paramValueElem);
+        paramValueElem->SetAttribute("value",ToString(mleval).c_str());
+
+        if(param.IsProfiled())
+        {
+            if(isBayes)
+            {
+                TiXmlElement * percElem = new TiXmlElement("percentile");
+                newSlice->LinkEndChild(percElem);
+                percElem->SetAttribute("value",ToString(mleperc).c_str());
+
+                // EWFIX -- wrong for bayes/fix/grow ?? test this !!
+                TiXmlElement * pointProbElem = new TiXmlElement("point-probability");
+                newSlice->LinkEndChild(pointProbElem);
+                pointProbElem->SetAttribute("value",ToString(llikeAtMle).c_str());
+            }
+            else
+            {
+                TiXmlElement * logLikeElem = new TiXmlElement("log-likelihood");
+                newSlice->LinkEndChild(logLikeElem);
+                logLikeElem->SetAttribute("value",ToString(llikeAtMle).c_str());
+            }
+        }
+
+    }
+
+    for(sliter = slices.begin(); sliter != slices.end(); sliter++)
+    {
+        profileElem->LinkEndChild((*sliter).second);
+    }
+}
+
+TiXmlElement *
+XMLReport::MakeParameter(const Parameter& param, force_type force_tag)
+{
+
+    string paramname = param.GetName();
+    ReportPage::TrimString(paramname);
+    string shortparamname = param.GetShortName();
+    ReportPage::TrimString(shortparamname);
+    unsigned long pindex = param.GetParamVecIndex();
+    proftype ptype = param.GetProfileType();
+
+    TiXmlElement * paramElem = new TiXmlElement("parameter");
+    paramElem->SetAttribute("short-name",shortparamname.c_str());
+    paramElem->SetAttribute("name",paramname.c_str());
+    paramElem->SetAttribute("index",ToString(pindex));
+    paramElem->SetAttribute("type",ToString(ptype));
+
+    long nregs = registry.GetDataPack().GetNRegions();
+    for (long reg = 0; reg <= nregs; ++reg)
+    {
+        if (reg == nregs && nregs <= 1) break;
+        bool do_overall = (reg==nregs && nregs > 1);
+
+        TiXmlElement * estimateElem = new TiXmlElement("estimate");
+        paramElem->LinkEndChild(estimateElem);
+        if(do_overall)
+        {
+            estimateElem->SetAttribute("type","overall");
+        }
+        else
+        {
+            estimateElem->SetAttribute("type","single-region");
+            std::string regionname = registry.GetDataPack().GetRegion(reg).GetRegionName();
+            estimateElem->SetAttribute("region-name",regionname.c_str());
+        }
+
+        AddSlicesTo(estimateElem,param,force_tag,reg,do_overall);
+    }
+    return paramElem;
+}
+
+TiXmlElement *
+XMLReport::MakeParameters()
+{
+
+    TiXmlElement * parametersElem = new TiXmlElement("parameters");
+
+    const ForceVec forces = registry.GetForceSummary().GetAllForces();
+
+    ForceVec::const_iterator fit;
+    for(fit = forces.begin(); fit != forces.end(); ++fit)
+    {
+        // organizing parameters by force
+        TiXmlElement * forceElem = new TiXmlElement("force");
+        parametersElem->LinkEndChild(forceElem);
+
+        // identify the force
+        string forcename = (*fit)->GetFullparamname();
+        string shortforcename = (*fit)->GetShortparamname();
+        force_type tag = (*fit)->GetTag();
+
+        forceElem->SetAttribute("long-name",forcename.c_str());
+        forceElem->SetAttribute("short-name",shortforcename.c_str());
+
+        const vector<Parameter>& parameters = (*fit)->GetParameters();
+        vector<Parameter>::const_iterator param;
+        for (param = parameters.begin(); param != parameters.end(); ++param)
+        {
+            ParamStatus mystatus = param->GetStatus();
+            if (!mystatus.Valid()) continue;
+
+            TiXmlElement * paramElem = MakeParameter(*param,tag);
+            forceElem->LinkEndChild(paramElem);
+        }
+    }
+    // RegionGammaInfo should go here
+    return parametersElem;
+}
+
+TiXmlElement *
+XMLReport::MakeChainReport(const ChainOut& co,size_t regNo, size_t repNo, size_t chainNo)
+{
+    TiXmlElement * elem = new TiXmlElement("chain-info");
+    elem->SetAttribute("region",ToString(regNo).c_str());
+    elem->SetAttribute("replicate",ToString(repNo).c_str());
+    elem->SetAttribute("chain",ToString(chainNo).c_str());
+
+    // info on timing
+    TiXmlElement * timeElem = new TiXmlElement("runtime");
+    time_t starttime = co.GetStarttime();
+    time_t endtime = co.GetEndtime();
+    timeElem->SetAttribute("start",PrintTime(starttime).c_str());
+    timeElem->SetAttribute("end",PrintTime(endtime).c_str());
+    timeElem->SetAttribute("seconds",ToString(endtime-starttime).c_str());
+    elem->LinkEndChild(timeElem);
+
+    // info on discarded trees
+    TiXmlElement * discardsElem = new TiXmlElement("discarded-trees");
+
+    TiXmlElement * badTreesElem = new TiXmlElement("bad-trees");
+    badTreesElem->SetAttribute("value",ToString(co.GetNumBadTrees()).c_str());
+    discardsElem->LinkEndChild(badTreesElem);
+
+    TiXmlElement * tinyPopTreesElem = new TiXmlElement("tiny-pop-trees");
+    tinyPopTreesElem->SetAttribute("value",ToString(co.GetTinyPopTrees()).c_str());
+    discardsElem->LinkEndChild(tinyPopTreesElem);
+
+    TiXmlElement * zeroDlTreesElem = new TiXmlElement("zero-dl-trees");
+    zeroDlTreesElem->SetAttribute("value",ToString(co.GetZeroDLTrees()).c_str());
+    discardsElem->LinkEndChild(zeroDlTreesElem);
+
+    TiXmlElement * longBranchTreesElem = new TiXmlElement("long-branch-trees");
+    longBranchTreesElem->SetAttribute("value",ToString(co.GetStretchedTrees()).c_str());
+    discardsElem->LinkEndChild(longBranchTreesElem);
+
+    elem->LinkEndChild(discardsElem);
+
+    // info on acceptance rates
+    TiXmlElement * acceptRatesElem = new TiXmlElement("acceptance-rates");
+    elem->LinkEndChild(acceptRatesElem);
+
+    TiXmlElement * overallAcceptRate = new TiXmlElement("overall-acceptance");
+    overallAcceptRate->SetAttribute("value",ToString(co.GetAccrate()).c_str());
+    acceptRatesElem->LinkEndChild(overallAcceptRate);
+
+    ratemap::const_iterator iter;
+    ratemap rates = co.GetAllAccrates();
+    for(iter=rates.begin(); iter != rates.end(); iter++)
+    {
+        std::string rateType = (*iter).first;
+        std::pair<long,long> values = (*iter).second;
+        long accepted = values.first;
+        long proposed = values.second;
+
+        TiXmlElement * rateElem = new TiXmlElement("arranger-rate");
+        rateElem->SetAttribute("type",rateType);
+        rateElem->SetAttribute("accepted",ToString(accepted).c_str());
+        rateElem->SetAttribute("proposed",ToString(proposed).c_str());
+        acceptRatesElem->LinkEndChild(rateElem);
+    }
+
+    // unique sampled values (for Bayesian)
+    vector<long> bayesunique = co.GetBayesUnique();
+    if (!bayesunique.empty())
+    {
+        TiXmlElement * uniqBayesElem = new TiXmlElement("unique-sampled-values");
+        elem->LinkEndChild(uniqBayesElem);
+
+        const ParamVector paramvec(true);
+        assert(paramvec.size() == bayesunique.size());
+        for (unsigned long pnum=0; pnum<paramvec.size(); pnum++)
+        {
+            ParamStatus mystatus = paramvec[pnum].GetStatus();
+            if (mystatus.Inferred())
+            {
+                TiXmlElement * pElem = new TiXmlElement("parameter");
+                uniqBayesElem->LinkEndChild(pElem);
+
+                pElem->SetAttribute("name",paramvec[pnum].GetName());
+                pElem->SetAttribute("samples",bayesunique[pnum]);
+
+                if (bayesunique[pnum] < 50)
+                {
+                    pElem->SetAttribute("warning","too few");
+                }
+            }
+        }
+    }
+
+    return elem;
+}
+
+TiXmlElement *
+XMLReport::MakeWarnings()
+{
+    TiXmlElement * warnElem = new TiXmlElement("warnings");
+
+    TiXmlElement * frontEndElem = new TiXmlElement("front-end-warnings");
+    warnElem->LinkEndChild(frontEndElem);
+    TiXmlComment * warnComment = new TiXmlComment();
+    warnComment->SetValue("foo!");
+    warnElem->LinkEndChild(warnComment);
+
+    TiXmlElement * backEndElem = new TiXmlElement("back-end-warnings");
+    warnElem->LinkEndChild(backEndElem);
+    warnComment = new TiXmlComment();
+    warnComment->SetValue("fie!");
+    warnElem->LinkEndChild(warnComment);
+
+    return warnElem;
+}
+
+void
+XMLReport::AddRangeElements(TiXmlElement * traitElem,rangeset sites,std::string label)
+{
+    TiXmlElement * range = new TiXmlElement("range");
+    traitElem->LinkEndChild(range);
+    range->SetAttribute("type",label.c_str());
+
+    for(rangeset::iterator i = sites.begin(); i != sites.end(); i++)
+    {
+        rangepair rp = *i;
+        TiXmlElement * sites = new TiXmlElement("sites");
+        range->LinkEndChild(sites);
+        sites->SetAttribute("start",ToString(rp.first).c_str());
+        sites->SetAttribute("stop",ToString(rp.second-1).c_str());
+    }
+}
+
+TiXmlElement *
+XMLReport::Write(const ChainManager& chainMan)
+{
+    TiXmlElement * topItem = new TiXmlElement("lamarc-run-report");
+    m_doc.LinkEndChild( topItem );
+
+    // parameters
+    TiXmlElement * parameters = MakeParameters();
+    topItem->LinkEndChild(parameters);
+
+    // mapping
+    if (registry.GetDataPack().AnyMapping())
+    {
+        TiXmlElement * traitsElem = new TiXmlElement("traits");
+        topItem->LinkEndChild(traitsElem);
+
+        for (long reg=0; reg<registry.GetDataPack().GetNRegions(); reg++)
+        {
+            const Region& region = registry.GetDataPack().GetRegion(reg);
+            for (long mloc=0; mloc<region.GetNumMovingLoci(); mloc++)
+            {
+                const Locus& locus = region.GetMovingLocus(mloc);
+
+
+                TiXmlElement * traitElem = new TiXmlElement("trait");
+                traitsElem->LinkEndChild(traitElem);
+
+                traitElem->SetAttribute("name",locus.GetName().c_str());
+                traitElem->SetAttribute("type",ToString(locus.GetAnalysisType()).c_str());
+
+
+                long regoffset = region.GetSiteSpan().first;
+                std::set<std::pair<double, long int> > orderedsites = locus.MakeOrderedSites(regoffset);
+
+                rangeset bestsites = locus.GetBestSites(orderedsites);
+                AddRangeElements(traitElem,bestsites,"best");
+
+                rangeset top5 = locus.GetTopSites(orderedsites,0.05);
+                AddRangeElements(traitElem,top5,"0.05");
+
+                rangeset top50 = locus.GetTopSites(orderedsites,0.50);
+                AddRangeElements(traitElem,top50,"0.50");
+
+                rangeset top95 = locus.GetTopSites(orderedsites,0.95);
+                AddRangeElements(traitElem,top95,"0.95");
+            }
+        }
+    }
+
+    // chains and arrangers report
+    TiXmlElement * chainPackElem = new TiXmlElement("chains");
+    topItem->LinkEndChild(chainPackElem);
+    vector<vector<vector<ChainOut > > > chains = chainMan.GetChainPack().GetAllChains();
+    for(size_t i = 0; i < chains.size(); i++)
+    {
+        for(size_t j = 0; j < chains[i].size(); j++)
+        {
+            for(size_t k = 0; k < chains[i][j].size(); k++)
+            {
+                const ChainOut & co = chains[i][j][k];
+                chainPackElem->LinkEndChild(MakeChainReport(co,i,j,k));
+            }
+        }
+    }
+
+    // EWFIX.ADD full paths to output files ??
+
+    // EWFIX.ADD warnings given
+#if 0
+    TiXmlElement * warnings = MakeWarnings();
+    topItem->LinkEndChild(warnings);
+#endif
+
+    // this writes it all out -- file name already set at construction
+    // of m_doc
+    m_doc.SaveFile();
+
+    return topItem;
+}
+
+//____________________________________________________________________________________
diff --git a/src/report/xml_report.h b/src/report/xml_report.h
new file mode 100644
index 0000000..8fa0c35
--- /dev/null
+++ b/src/report/xml_report.h
@@ -0,0 +1,62 @@
+// $Id: xml_report.h,v 1.7 2011/03/07 06:08:51 bobgian Exp $
+
+/*
+  Copyright 2008  Peeter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef XML_REPORT_H
+#define XML_REPORT_H
+
+#include <string>
+#include "tinyxml.h"
+
+class ChainManager;
+class Parameter;
+
+/******************************************************************
+This class produces an XML version of the outfile.txt
+At initial writing, it is not a complete version, just
+what I need to evaluate simulations of the new lamarc
+data uncertainty model
+
+Elizabeth Walkup  December 2008
+
+******************************************************************/
+
+class ChainOut;
+
+class XMLReport
+{
+  private:
+    std::string     m_filename;
+    TiXmlDocument   m_doc;
+
+  protected:
+    void            AddSlicesTo(TiXmlElement *,
+                                const Parameter&,
+                                force_type,
+                                long regNo,
+                                bool doOverall);
+    TiXmlElement *  MakeParameter(const Parameter&, const force_type);
+    TiXmlElement *  MakeParameters();
+    TiXmlElement *  MakeChainReport(const   ChainOut&,
+                                    size_t  regNo,
+                                    size_t  repNo,
+                                    size_t  chainNo);
+    TiXmlElement *  MakeWarnings();
+
+    void            AddRangeElements(TiXmlElement *, rangeset sites, std::string label);
+
+  public:
+    XMLReport(std::string filename);
+    virtual ~XMLReport();
+    TiXmlElement * Write(const ChainManager&);
+};
+
+#endif // XML_REPORT_H
+
+//____________________________________________________________________________________
diff --git a/src/tools/mathx.cpp b/src/tools/mathx.cpp
new file mode 100644
index 0000000..40430de
--- /dev/null
+++ b/src/tools/mathx.cpp
@@ -0,0 +1,1740 @@
+// $Id: mathx.cpp,v 1.54 2012/06/30 01:32:42 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+#include <cmath>
+#include <iostream>
+#include <numeric>
+#include <stdio.h>
+
+#include "constants.h" // for use of EXPMIN in UnderFlowExp()
+#include "definitions.h"
+#include "errhandling.h"
+#include "mathx.h"
+#include "registry.h"
+#include "runreport.h"
+#include "stringx.h"
+#include "vectorx.h"
+
+#ifdef DMALLOC_FUNC_CHECK
+#include "/usr/local/include/dmalloc.h"
+#endif
+
+#ifdef HAVE_LGAMMA
+#define LGAMMA lgamma
+#else
+#define LGAMMA mylgamma
+#endif
+
+using namespace std;
+
+//------------------------------------------------------------------------------------
+
+const double EigenCalculator::accuracy = 1e-15;
+const double SMALL_v0 = 0.01; // erynes heuristic used by BesselK and DvBesselK
+const double NUDGE_AMOUNT = 1.0e-09; // erynes heuristic used by BesselK and DvBesselK
+
+//------------------------------------------------------------------------------------
+
+double mylgamma (double z);
+
+//------------------------------------------------------------------------------------
+// Calculation of rate values following a gamma distribution for
+// given probability values.
+
+DoubleVec1d gamma_rates(double alpha, long int ratenum)
+{
+    vector<double> values;
+    double x, low, mid, high, xlow, xhigh, freq, inc, mid0;
+    long int i, j;
+
+    values.reserve(ratenum);
+
+    x    = 10.0;
+    inc  = 1.0 / (double)ratenum;
+    freq = -inc / 2.0;
+    mid0 = incompletegamma(alpha, 10.0);
+
+    for (i = 0; i < ratenum; i++)
+    {
+        low   = 0;
+        mid   = mid0;
+        high  = 1.0;
+        freq += inc;
+
+        if (freq < mid)
+        {
+            high  = mid;
+            xlow  = 0;
+            xhigh = 10.0;
+            x     = 5.0;
+        }
+
+        else
+        {
+            low   = mid;
+            xlow  = 10.0;
+            xhigh = 1e10;
+            x     = 1e5;
+        }
+
+        for (j = 0; j < 1000 && fabs(low - high) > 0.0001 && x > 0.000000001; j++)
+        {
+            mid = incompletegamma(alpha, x);
+            if (freq < mid)
+            {
+                high  = mid;
+                xhigh = x;
+                x     = (x + xlow) / 2.0;
+            }
+
+            else
+            {
+                low  = mid;
+                xlow = x;
+                x    = (x + xhigh) / 2.0;
+            }
+        }
+
+        if (x >= 10e10)
+        {
+            values.clear();
+            break;
+        }
+
+        values.push_back(x / alpha);
+    }
+
+    return values;
+}
+
+//------------------------------------------------------------------------------------
+
+#define MIN(a,b) (((a)<(b)) ? (b) : (a))
+
+double my_gamma(double x)
+{
+    double result = log_gamma(x);
+    if (result < EXPMAX)
+        return exp(log_gamma(x));
+    return EXP_OF_EXPMAX;
+}
+
+double
+alnorm (double x, int up)
+{
+    /* Initialized data */
+    /* *** machine dependent constants ????????????? */
+    /*static */ double zero = 0.0;
+    /*static */ double a1 = 5.75885480458;
+    /*static */ double a2 = 2.62433121679;
+    /*static */ double a3 = 5.92885724438;
+    /*static */ double b1 = -29.8213557807;
+    /*static */ double b2 = 48.6959930692;
+    /*static */ double c1 = -3.8052e-8;
+    /*static */ double c2 = 3.98064794e-4;
+    /*static */ double c3 = -.151679116635;
+    /*static */ double c4 = 4.8385912808;
+    /*static */ double c5 = .742380924027;
+    /*static */ double one = 1.0;
+    /*static */ double c6 = 3.99019417011;
+    /*static */ double d1 = 1.00000615302;
+    /*static */ double d2 = 1.98615381364;
+    /*static */ double d3 = 5.29330324926;
+    /*static */ double d4 = -15.1508972451;
+    /*static */ double d5 = 30.789933034;
+    /*static */ double half = 0.5;
+    /*static */ double ltone = 7.0;
+    /*static */ double utzero = 18.66;
+    /*static */ double con = 1.28;
+    /*static */ double p = .398942280444;
+    /*static */ double q = .39990348504;
+    /*static */ double r = .398942280385;
+
+    /*static */ double y, result;
+
+    if (x < zero)
+    {
+        up = !up;
+        x = -x;
+    }
+    if (x <= ltone || (up && x <= utzero))
+    {
+        y = half * x * x;
+        if (x > con)
+        {
+            result =
+                r * exp (-y) / (x + c1 +
+                                d1 / (x + c2 +
+                                      d2 / (x + c3 +
+                                            d3 / (x + c4 +
+                                                  d4 / (x + c5 +
+                                                        d5 / (x + c6))))));
+            return ((!up) ? one - result : result);
+        }
+        result =
+            half - x * (p - q * y / (y + a1 + b1 / (y + a2 + b2 / (y + a3))));
+        return ((!up) ? one - result : result);
+    }
+    else
+    {
+        return ((!up) ? 1.0 : 0.0);
+    }
+    /*fake */ return -99;
+}                               // alnorm
+
+// The "complementary error function."
+double erfc(double x)
+{
+    if (x >= 0.0)
+        return incompletegamma(0.5, x*x);
+    return 2.0 - incompletegamma(0.5, x*x); // a mathematical fact
+}
+
+//  ALGORITHM AS239  APPL. STATIST. (1988) VOL. 37, NO. 3
+//  Computation of the Incomplete Gamma Integral
+//  Auxiliary functions required: LogG() = logarithm of the gamma function,
+//  and Tail() = algorithm AS66
+//  In Mathematica, this is GammaRegularized[a,x] == Gamma[a,x]/Gamma[a].
+//  erynes note: The code below implements
+//  incompletegamma(a,x) = (1/G(a)) times the integral of exp(-t)*t^(a-1)
+//  from t = x to t = infinity for a > 0, where G(a) is the gamma function
+//  (if "a" is a positive integer, then G(a) = (a-1)!).
+//  erynes note number 2:
+//  incompletegamma(1/2,x^2) == erfc(x) for x >= 0,
+//  where erfc(x) is the complementary error function.
+double incompletegamma (double alpha, double x)
+{
+    double gama, d_1, d_2, d_3;
+    /*static */ double a, b, c, an, rn;
+    /*static */ double pn1, pn2, pn3, pn4, pn5, pn6, arg;
+
+    gama = 0.0;
+    /*  Check that we have valid values for X and P */
+    if (alpha <= 0.0 || x < 0.0)
+    {
+        logic_error e("Failure in incomplete-gamma calculation");
+        throw e;
+    }
+    if (fabs (x) < DBL_EPSILON)
+        return gama;
+
+    /*  Use a normal approximation if P > PLIMIT */
+    if (alpha > 1e3)
+    {
+        pn1 =
+            sqrt (alpha) * 3.0 * (pow (x / alpha, (1.0 / 3.0)) + 1.0 / (alpha * 9.0) -
+                                  1.0);
+        gama = alnorm(pn1, false);
+        return gama;
+    }
+
+    /*  If X is extremely large compared to P then set GAMMAD = 1 */
+    if (x > 1e8)
+    {
+        gama = 1.0;
+        return gama;
+    }
+
+    if (x <= 1.0 || x < alpha)
+    {
+        /*  Use Pearson's series expansion. */
+        /*  (Note that P is not large enough to force overflow in lgamma()). */
+        arg = alpha * log (x) - x - LGAMMA (alpha + 1.0);
+        c = 1.0;
+        gama = 1.0;
+        a = alpha;
+        while (c > 1e-14)
+        {
+            a += 1.0;
+            c = c * x / a;
+            gama += c;
+        }
+        arg += log (gama);
+        gama = 0.0;
+        if (arg >= -88.0)
+        {
+            gama = exp(arg);
+        }
+
+    }
+    else
+    {
+        /*  Use a continued fraction expansion */
+        arg = alpha * log (x) - x - LGAMMA (alpha);
+        a = 1.0 - alpha;
+        b = a + x + 1.0;
+        c = 0.0;
+        pn1 = 1.0;
+        pn2 = x;
+        pn3 = x + 1.0;
+        pn4 = x * b;
+        gama = pn3 / pn4;
+        for (;;)
+        {
+            a += 1.0;
+            b += 2.0;
+            c += 1.0;
+            an = a * c;
+            pn5 = b * pn3 - an * pn1;
+            pn6 = b * pn4 - an * pn2;
+            if (fabs (pn6) > 0.0)
+            {
+                rn = pn5 / pn6;
+                /* Computing MIN */
+                d_2 = 1e-14, d_3 = rn * 1e-14;
+                if ((d_1 = gama - rn, fabs (d_1)) <= MIN (d_2, d_3))
+                {
+                    arg += log (gama);
+                    gama = 1.0;
+                    if (arg >= -88.0)
+                    {
+                        gama = 1.0 - exp (arg);
+                    }
+                    return gama;
+                }
+                gama = rn;
+            }
+            pn1 = pn3;
+            pn2 = pn4;
+            pn3 = pn5;
+            pn4 = pn6;
+            if (fabs (pn5) >= 1e37)
+            {
+                /*  Re-scale terms in continued fraction if terms are large */
+                pn1 /= 1e37;
+                pn2 /= 1e37;
+                pn3 /= 1e37;
+                pn4 /= 1e37;
+            }
+        }
+    }
+    return gama;
+}                               // incompletegamma()
+
+//------------------------------------------------------------------------------------
+// Uses Lanczos-type approximation to ln(gamma) for z > 0.
+//  Reference: Lanczos, C. 'A precision approximation of the gamma function',
+//      J. SIAM Numer. Anal., B, 1, 86-96, 1964.
+//  Accuracy: About 14 significant digits except for small regions
+//      in the vicinity of 1 and 2.
+//  Programmer: Alan Miller CSIRO Division of Mathematics & Statistics
+//  Latest revision - 17 April 1988
+
+double log_gamma(double z)
+{
+    if (z <= 0.0)
+        return DBL_MAX; // This will kill the receiving calculation.
+
+    long int i;
+    double result, denom;
+
+    double a[] = { 1.659470187408462e-7, 9.934937113930748e-6,
+                   -0.1385710331296526,  12.50734324009056,
+                   -176.6150291498386,    771.3234287757674,
+                   -1259.139216722289,     676.5203681218835 };
+
+    result = 0.0;
+    denom  = z + 7.0;
+    for (i = 0; i < 8; i++)
+    {
+        result += a[i] / denom;
+        denom  -= 1.0;
+    }
+
+    result += 0.9999999999995183;
+    result = log(result) - (z + 6.5) + (z - 0.5) * log(z + 6.5) + 0.9189385332046727;
+    return result;
+}
+
+// The function for chi is:
+//  f(x) = {1/[2^(df/2) * gamma(df/2)]} * x^{[(df/2)-1] * e^(-x/2)}
+
+double
+find_chi (long int df, double prob)
+{
+    double a, b, m;
+    double xb = 200.0;
+    double xa = 0.0;
+    double xm = 5.0;
+    a = probchi (df, xa);
+    m = probchi (df, xm);
+    b = probchi (df, xb);
+    while (fabs (m - prob) > EPSILON)
+    {
+        if (m < prob)
+        {
+            b = m;
+            xb = xm;
+        }
+        else
+        {
+            a = m;
+            xa = xm;
+        }
+        xm = (-(b * xa) + prob * xa + a * xb - prob * xb) / (a - b);      //(xa + xb)/2.0;
+
+        m = probchi (df, xm);
+    }
+    return xm;
+}
+
+double
+probchi (long int df, double chi)
+{
+    double prob;
+    double v = ((double) df) / 2.0;
+    if (chi > DBL_EPSILON && v > DBL_EPSILON)
+    {
+        //lg = EXP (LGAMMA (v));
+        prob = 1.0 - incompletegamma (v, chi / 2.0);
+    }
+    else
+    {
+        prob = 1.0;
+        // printf("prob=%f v=%f chi=%f lg(v/2)=%f  ig(chi/2,v/2)=%f\n",
+        // prob,v,chi,lg, incompletegamma(chi/2.0,v/2.0));
+    }
+
+    return prob;
+}
+
+//       Uses Lanczos-type approximation to ln(gamma) for z > 0.
+//       Reference:
+//            Lanczos, C. 'A precision approximation of the gamma
+//                    function', J. SIAM Numer. Anal., B, 1, 86-96, 1964.
+//       Accuracy: About 14 significant digits except for small regions
+//                 in the vicinity of 1 and 2.
+//       Programmer: Alan Miller
+//                   CSIRO Division of Mathematics & Statistics
+//       Latest revision - 17 April 1988
+// translated and modified into C by Peter Beerli 1997
+
+double
+mylgamma (double z)
+{
+    double a[9] = { 0.9999999999995183, 676.5203681218835,
+                    -1259.139216722289, 771.3234287757674, -176.6150291498386,
+                    12.50734324009056, -0.1385710331296526, 9.934937113930748e-6,
+                    1.659470187408462e-7
+    };
+    double lnsqrt2pi = 0.9189385332046727;
+    double result;
+    long int j;
+    double tmp;
+    if (z <= 0.0)
+    {
+        return DBL_MAX;           //this will kill the receiving calculation
+    }
+    result = 0.0;
+    tmp = z + 7.0;
+    for (j = 9; j >= 2; --j)
+    {
+        result += a[j - 1] / tmp;
+        tmp -= 1.0;
+    }
+    result += a[0];
+    result = log (result) + lnsqrt2pi - (z + 6.5) + (z - 0.5) * log (z + 6.5);
+    return result;
+}                               // lgamma
+
+//------------------------------------------------------------------------------------
+
+double SafeDivide(double num, double denom)
+{
+    if (denom)
+    {
+        return num/denom;
+    }
+    else
+    {
+        return num > 0.0 ? DBL_BIG : -DBL_BIG;
+    }
+
+} // SafeDivide
+
+//------------------------------------------------------------------------------------
+
+double logfac (long int n)
+{
+    /* log(n!) values were calculated with Mathematica
+       with a precision of 30 digits */
+    switch (n)
+    {
+        case 0:
+            return 0.0;
+        case 1:
+            return 0.0;
+        case 2:
+            return 0.693147180559945309417232121458;
+        case 3:
+            return 1.791759469228055000812477358381;
+        case 4:
+            return 3.1780538303479456196469416013;
+        case 5:
+            return 4.78749174278204599424770093452;
+        case 6:
+            return 6.5792512120101009950601782929;
+        case 7:
+            return 8.52516136106541430016553103635;
+        case 8:
+            return 10.60460290274525022841722740072;
+        case 9:
+            return 12.80182748008146961120771787457;
+        case 10:
+            return 15.10441257307551529522570932925;
+        case 11:
+            return 17.50230784587388583928765290722;
+        case 12:
+            return 19.98721449566188614951736238706;
+        case 13:
+            return 22.5521638531234228855708498286;
+        case 14:
+            return 25.1912211827386815000934346935;
+        case 15:
+            return 27.8992713838408915660894392637;
+        case 16:
+            return 30.6718601060806728037583677495;
+        case 17:
+            return 33.5050734501368888840079023674;
+        case 18:
+            return 36.3954452080330535762156249627;
+        case 19:
+            return 39.3398841871994940362246523946;
+        case 20:
+            return 42.3356164607534850296598759707;
+        case 21:
+            return 45.3801388984769080261604739511;
+        case 22:
+            return 48.4711813518352238796396496505;
+        case 23:
+            return 51.6066755677643735704464024823;
+        case 24:
+            return 54.7847293981123191900933440836;
+        case 25:
+            return 58.0036052229805199392948627501;
+        case 26:
+            return 61.2617017610020019847655823131;
+        case 27:
+            return 64.5575386270063310589513180238;
+        case 28:
+            return 67.8897431371815349828911350102;
+        case 29:
+            return 71.2570389671680090100744070426;
+        case 30:
+            return 74.6582363488301643854876437342;
+        default:
+            return log(factorial(static_cast<double>(n)));
+            //return LGAMMA (n + 1.0);
+    }
+} // logfac
+
+double factorial(double number)
+{
+    double temp;
+
+    if(number <= 1.0) return 1.0;
+
+    temp = number * factorial(number - 1.0);
+    return temp;
+}
+
+//------------------------------------------------------------------------------------
+
+double UnderFlowExp(double pow)
+{
+    return ((pow < EXPMIN) ? 0.0 : exp(pow));
+} // UnderFlowExp
+
+//------------------------------------------------------------------------------------
+
+bool IsEven(long int n)
+{
+    // an even number divided by 2 is not truncated
+    return ((n / 2L) * 2L == n);
+} // IsEven
+
+//------------------------------------------------------------------------------------
+
+void ScaleLargestToZero(DoubleVec1d& unscaled)
+{
+    double big(*max_element(unscaled.begin(),unscaled.end()));
+    for (unsigned long int wnum = 0; wnum < unscaled.size(); wnum++)
+    {
+        if (big <= EXPMIN)
+        {
+            assert(false); //Why is this?  We probably have an error somewhere.
+            unscaled[wnum] = 0.0;
+        }
+        else if (unscaled[wnum] <= EXPMIN)
+        {
+            unscaled[wnum] = EXPMIN;
+        }
+        else
+        {
+            unscaled[wnum] -= big;
+        }
+    }
+}
+
+void ScaleToSumToOne(DoubleVec1d& vec)
+{
+    double sum = accumulate(vec.begin(), vec.end(), 0.0);
+    transform(vec.begin(),
+              vec.end(),
+              vec.begin(),
+              bind2nd(divides<double>(),sum));
+}
+
+// AddValsOfLogs takes vectors that are logs, scales and converts them to
+//  normal values, adds them, takes their logs again, and re-scales them back.
+DoubleVec1d AddValsOfLogs(DoubleVec1d vec1, DoubleVec1d vec2)
+{
+    assert(vec1.size() == vec2.size());
+    double big(*max_element(vec1.begin(), vec1.end()));
+    big = max(big, *max_element(vec2.begin(), vec2.end()));
+    for (unsigned long int wnum=0; wnum<vec1.size(); wnum++)
+    {
+        if (big <= EXPMIN)
+        {
+            assert(false); //Why is this?  We probably have an error somewhere.
+            vec1[wnum] = EXPMIN;
+            vec2[wnum] = EXPMIN;
+        }
+        else
+        {
+            if (vec1[wnum] <= EXPMIN)
+            {
+                vec1[wnum] = EXPMIN;
+            }
+            else
+            {
+                vec1[wnum] -= big;
+            }
+            if (vec2[wnum] <= EXPMIN)
+            {
+                vec2[wnum] = EXPMIN;
+            }
+            else
+            {
+                vec2[wnum] -= big;
+            }
+        }
+    }
+    vec1 = SafeExp(vec1);
+    vec2 = SafeExp(vec2);
+    DoubleVec1d retvec = vec1;
+    transform(vec2.begin(),
+              vec2.end(),
+              retvec.begin(),
+              retvec.begin(),
+              plus<double>());
+    retvec = SafeLog(retvec);
+    for (unsigned long int wnum = 0; wnum<retvec.size(); wnum++)
+    {
+        if ((retvec[wnum] <= EXPMIN) || (retvec[wnum] + big <=EXPMIN))
+        {
+            retvec[wnum] = EXPMIN;
+        }
+        else if ((retvec[wnum] >= EXPMAX) || (retvec[wnum] + big >= EXPMAX))
+        {
+            retvec[wnum] = EXPMAX;
+        }
+        else
+        {
+            retvec[wnum] += big;
+        }
+    }
+    return retvec;
+}
+
+pair<double, double> EigenCalculator::Coeffs(double x, double y)
+// cosine and sine of angle between the origin and (x,y)
+// pair.first is cosine, pair.second is sine
+{
+    double root = sqrt(pow(x,2.0)+pow(y,2.0));
+    pair<double, double> cs;
+    if (root < accuracy)
+    {
+        cs.first = 1.0;
+        cs.second = 0.0;
+    }
+    else
+    {
+        cs.first = x/root;
+        cs.second = y/root;
+    }
+    return cs;
+} // Coeffs
+
+//------------------------------------------------------------------------------------
+
+void EigenCalculator::Givens(DoubleVec2d& a, long int x, long int y, long int size,
+                             const pair<double, double> cs, bool userow)
+// Givens method.  Modifies input matrix.
+{
+    long int k;
+    for (k = 0; k < size; ++k)
+    {
+        if (userow)
+        {
+            double d = cs.first * a[x][k] + cs.second * a[y][k];
+            a[y][k] = cs.first * a[y][k] - cs.second * a[x][k];
+            a[x][k] = d;
+        }
+        else
+        {
+            double d = cs.first * a[k][x] + cs.second * a[k][y];
+            a[k][y] = cs.first * a[k][y] - cs.second * a[k][x];
+            a[k][x] = d;
+        }
+    }
+    // cases:  we pass array.size()=4
+    // Pascal:  k from 1 to 4 (4 times)
+    // C: k from 0 to 3 (4 times) (fine)
+    // case:  we pass i (at its top value)
+    // Pascal:  k from 1 to 4 (4 times)
+    // C: k from 0 to 2 (3 times) (not fine)
+} // Givens
+
+//------------------------------------------------------------------------------------
+
+void EigenCalculator::Tridiag(DoubleVec2d& a, DoubleVec2d& eigvecs)
+{
+    unsigned long int i, j;
+    pair<double, double> cs;
+    for (i = 1; i < a.size()-1; ++i)
+    {
+        for (j = i+1; j < a.size(); ++j)
+        {
+            cs = Coeffs(a[i-1][i], a[i-1][j]);
+            Givens(a, i, j, a.size(), cs, true);
+            Givens(a, i, j, a.size(), cs, false);
+            Givens(eigvecs, i, j, eigvecs.size(), cs, true);
+        }
+    }
+} // Tridiag
+
+//------------------------------------------------------------------------------------
+
+void EigenCalculator::Shiftqr(DoubleVec2d& a, DoubleVec2d& eigvecs)
+{
+    long int i;
+    for (i = a.size()-1; i > 0; --i)
+    {
+        do {
+            double d = sqrt(pow(a[i-1][i-1] - a[i][i], 2.0) +
+                            pow(a[i][i-1], 2.0));
+            double approx = a[i-1][i-1] + a[i][i];
+            if (a[i][i] < a[i-1][i-1])
+                approx = (approx-d)/2.0;
+            else
+                approx = (approx+d)/2.0;
+            long int j;
+            for (j = 0; j <= i; ++j)
+                a[j][j] = a[j][j] - approx;
+            for (j = 0; j <= i-1; ++j)
+            {
+                pair<double, double> cs = Coeffs(a[j][j], a[j+1][j]);
+                // in the following two calls, Pascal's i has been
+                // converted to i+1 because it is standing in for a
+                // size.
+                Givens(a, j, j+1, i+1, cs, true);
+                Givens(a, j, j+1, i+1, cs, false);
+                Givens(eigvecs, j, j+1, eigvecs.size(), cs, true);
+            }
+            for (j = 0; j <= i; ++j)
+                a[j][j] += approx;
+        } while (fabs(a[i][i-1]) > accuracy);
+    }
+} // Shiftqr
+
+//------------------------------------------------------------------------------------
+
+pair<DoubleVec1d, DoubleVec2d> EigenCalculator::Eigen(DoubleVec2d a)
+// return a pair containing eigenvalues and eigenvectors
+{
+    unsigned long int i;
+    DoubleVec2d eigvecs;
+    for (i = 0; i < a.size(); ++i)
+    {
+        // ones along the diagonal, zeroes elsewhere
+        DoubleVec1d row(a.size(), 0.0);
+        row[i] = 1.0;
+        eigvecs.push_back(row);
+    }
+
+    Tridiag(a, eigvecs);
+    Shiftqr(a, eigvecs);
+    DoubleVec1d eigvals(a.size(), 0.0);
+    for (i = 0; i < a.size(); ++i)
+    {
+        eigvals[i] = a[i][i];
+    }
+    return make_pair<DoubleVec1d, DoubleVec2d>(eigvals, eigvecs);
+} // Eigen
+
+//------------------------------------------------------------------------------------
+
+void EigenCalculator::DotProduct(const DoubleVec2d& first, const DoubleVec2d& second,
+                                 DoubleVec2d& answer)
+// dot product of first and second put into PRE-EXISTING answer!
+// second has been PRE-TRANSPOSED!!
+{
+    // should be square
+    assert(first.size() == first[0].size());
+    // and all the same size!
+    assert(first.size() == second.size());
+    assert(first.size() == answer.size());
+    double initial = 0.0;
+
+    long int i, j, n = first.size();
+    for (i = 0; i < n; ++i)
+    {
+        for (j = 0; j < n; ++j)
+        {
+            answer[i][j] = inner_product(first[i].begin(), first[i].end(), second[j].begin(), initial);
+        }
+    }
+
+} // DotProduct
+
+//------------------------------------------------------------------------------------
+
+DoubleVec2d Invert(const DoubleVec2d& src)
+// Invert square matrix via Gauss-Jordan reduction
+{
+    // copy the target vector
+    DoubleVec2d a(src);
+
+    // invert it in place (that's what I have code for!)
+    unsigned long int i, j, k;
+    unsigned long int n = a.size();
+    double temp;
+
+    for (i = 0; i < n; ++i)
+    {
+        // DEBUG:  need to do something if matrix is singular!
+        temp = 1.0 / a[i][i];
+        a[i][i] = 1.0;
+        for (j = 0; j < n; ++j)
+        {
+            a[i][j] *= temp;
+        }
+        for (j = 0; j < n; ++j)
+        {
+            if (j != i)
+            {
+                temp = a[j][i];
+                a[j][i] = 0.0;
+                for (k = 0; k < n; ++k)
+                {
+                    a[j][k] -= temp * a[i][k];
+                }
+            }
+        }
+    }
+    return a;
+} // Invert
+
+//------------------------------------------------------------------------------------
+
+DoubleVec2d Transpose(const DoubleVec2d& src)
+// Transpose a square matrix
+{
+    DoubleVec2d a(src);
+    unsigned long int i, j, n=a.size();
+    for (i = 0; i < n; ++i)
+    {
+        for (j = 0; j < n; ++j)
+        {
+            a[i][j] = src[j][i];
+        }
+    }
+    return a;
+} // Transpose
+
+//------------------------------------------------------------------------------------
+
+// This is a free function for approximate comparison of doubles.
+// It is used to help find the correct profile modifier value in
+// the face of rounding error.
+
+bool CloseEnough(double a, double b)
+{
+    if (a == 0)
+    {
+        if (b == 0) return true;
+        else return false;
+    }
+    if (a<0)
+    {
+        if (b<0)
+        {
+            a = -a;
+            b = -b;
+        }
+        else return false;
+    }
+    double la = log(a);
+    double lb = log(b);
+    double test = fabs(la - lb);
+    if (test < EPSILON) return true;
+    else return false;
+
+} // CloseEnough
+
+double SafeExpAndSum(const DoubleVec1d& src)
+{
+    assert(!src.empty());  // don't call me on an empty vector!
+    double sum(0.0), biggest(*(max_element(src.begin(),src.end())));
+    DoubleVec1d::const_iterator lns;
+    for(lns = src.begin(); lns != src.end(); ++lns)
+    {
+        if ((*lns) - biggest > EXPMIN)
+            sum += exp((*lns) - biggest);
+    }
+
+    return SafeLog(sum) + biggest;
+
+} // SafeExpAndSum
+
+DoubleVec1d SafeExp(const DoubleVec1d& src)
+{
+    assert(!src.empty());  // don't call me on an empty vector!
+    DoubleVec1d retvec;
+    DoubleVec1d::const_iterator lns;
+    for(lns = src.begin(); lns != src.end(); ++lns)
+    {
+        retvec.push_back(SafeExp(*lns));
+    }
+
+    return retvec;
+
+} // SafeSumOfLogsToBeExp
+
+double SafeExp(double src)
+{
+    if (src <= EXPMIN)
+    {
+        return 0.0;
+    }
+    else if (src >= EXPMAX)
+    {
+        return DBL_BIG;
+    }
+    return exp(src);
+
+} // SafeExp
+
+// Technique to avoid overflow and underflow of a*exp(x).
+// In the description that follows, we set a = -a for a < 0,
+// and we use "M" to represent the maximum feasible number.
+// This means that numbers > M overflow, and numbers < 1/M underflow.
+//
+// First we consider the case of overflow.
+// a*exp(x) will not overflow if a*exp(x) <= M, meaning exp(x) <= M/a.
+// Defining EXPMAX = log(M), we see a*exp(x) will not overflow if
+//     x <= EXPMAX - log(a).
+// For the case in which exp(x) overflows but a*exp(x) does not, we compute
+// a*exp(x) == (a*exp(EXPMAX)) * exp(x - EXPMAX), so that neither of the two
+// terms on the right-hand side of this equation overflows.
+// (We use the pre-computed EXP_OF_EXPMAX instead of computing exp(EXPMAX).)
+// In practical terms, if EXPMAX = 200 and x = 212 so that exp(212) overflows,
+// we can compute a*exp(x) if fabs(a) < 6.14e-6.
+//
+// Underflow is analogous to overflow.  In this case, we have
+// fabs(a*exp(x)) < 1.0/M becoming identically 0 for sufficiently large M.
+// If "a" is sufficiently greater than 1.0, then the product will not underflow.
+// (Again, we set a = -a for a > 0 to avoid writing fabs(a).)
+// For a*exp(x) to avoid underflow, we have the condition
+// a*exp(x) >= 1.0/M, meaning exp(x) >= 1.0/(M*a), or x > log(1.0/(M*a)), so
+// a*exp(x) will not underflow if
+//    x >= EXPMIN - log(a).
+// For the case in which exp(x) underflows but a*exp(x) does not, we compute
+// a*exp(x) == (a*exp(EXPMIN)) * exp(x - EXPMIN), so that neither of the two
+// terms on the right-hand side of this equation underflows.
+// (We use the pre-computed EXP_OF_EXPMIN instead of computing exp(EXPMIN).)
+//
+// erynes 2003/11/21 -- devised and implemented this function
+double SafeProductWithExp(double a, double x)
+{
+    if (0.0 == a)  // must reject this at the outset to avoid log(0) and 1/0
+        return 0.0; // keep going if 0 < a < DBL_EPSILON, in case x is large
+    bool a_is_negative = a < 0 ? true : false;
+    if (a_is_negative)
+        a *= -1.0;
+
+    if (x > 0.0)
+    {
+        if (x <= EXPMAX - log(a))
+        {
+            // computable
+            if (x <= EXPMAX)
+                return a_is_negative ? -a*exp(x) : a*exp(x);
+            // else 0.0 < a < 1.0
+            double a_exp_x1 = EXP_OF_EXPMAX / (1.0/a);
+            return a_is_negative ? -a_exp_x1*exp(x - EXPMAX)
+                :  a_exp_x1*exp(x - EXPMAX);
+        }
+        return OverflowOfProductWithExp(a_is_negative ? -a : a, x);
+    }
+
+    // else x < 0.
+    if (x >= EXPMIN - log(a))
+    {
+        // computable
+        if (x >= EXPMIN)
+            return a_is_negative ? -a*exp(x) : a*exp(x);
+        // else a > 1.0
+        double a_exp_x1 = a * EXP_OF_EXPMIN;
+        return a_is_negative ? -a_exp_x1*exp(x - EXPMIN)
+            :  a_exp_x1*exp(x - EXPMIN);
+    }
+    return UnderflowOfProductWithExp(a_is_negative ? -a : a, x);
+
+} // double SafeProductWithExp(double a, double x)
+
+// This function, when possible, computes exp(x1) - exp(x2)
+// without overflow or underflow, although exp(x1) and/or exp(x2)
+// may overflow or underflow individually.
+// The technique is as follows:  Define "result" = exp(x1) - exp(x2).
+// Then
+//     result == exp(x2) * ( exp(x1)/exp(x2) - 1.0 )
+//            == exp(x2) * ( exp(x1 - x2) - 1.0 ),
+// and
+//     log(result) == x2 + log(exp(x1 - x2) - 1.0),
+// so that the right-hand side of this equation is computable.
+// If EXPMIN < log(result) < EXPMAX, then we can return
+//     result = exp(x2 + log(exp(x1 - x2) - 1.0).
+// Note that, in practice, for x1 > EXPMAX, x2 must be extremely
+// close to x1 in order for this function to be useful.
+// For example, for EXPMAX = 200 and x2 = 212 with exp(212) overflowing,
+// SafeExpDiff(x1, x2) will overflow and return EXP_OF_EXPMAX
+// unless 212.000006 >= x1 >= 211.999994.
+//
+// erynes 2003/11/21 -- implemented and half-devised this function
+double SafeExpDiff(double x1, double x2)
+{
+    if (x1 >= EXPMIN && x2 >= EXPMIN &&
+        x1 <= EXPMAX && x2 <= EXPMAX)
+        return exp(x1) - exp(x2);
+
+    bool result_is_negative = x2 > x1 ? true : false;
+    double x_larger  = result_is_negative ? x2 : x1,
+        x_smaller = result_is_negative ? x1 : x2,
+        arg_diff = x_larger - x_smaller;
+
+    if (arg_diff < DBL_EPSILON)
+        return 0.0;
+
+    if (arg_diff > EXPMAX)
+    {
+        if (x_smaller < 0.0 && x_larger < EXPMAX)
+            return result_is_negative ? -exp(x_larger) : exp(x_larger);
+        return OverflowOfSafeExpDiff(x1, x2);
+    }
+
+    if (arg_diff < EXPMIN)
+        return UnderflowOfSafeExpDiff(x1, x2); // recall x_smaller < x_larger
+
+    double log_of_result = x_smaller + log(exp(arg_diff) - 1);
+
+    if (log_of_result > EXPMAX)
+        return OverflowOfSafeExpDiff(x1, x2);
+
+    if (log_of_result < EXPMIN)
+        return UnderflowOfSafeExpDiff(x1, x2);
+
+    return result_is_negative ? -exp(log_of_result) : exp(log_of_result);
+}
+
+// This function is called when SafeExpDiff(x1, x2) overflows,
+// which means exp(x1) - exp(x2) overflows.
+// It can easily be modified to also track how often this overflows.
+// 2003/11/21 added by erynes
+double OverflowOfSafeExpDiff(double x1, double x2)
+{
+    RunReport& runreport = registry.GetRunReport();
+    string msg="Overflow error:  Attempted to compute exp(";
+    msg += ToString(x1) + ") - exp(" + ToString(x2) + ").  Returning "
+        + ToString(x1 < x2 ? -EXP_OF_EXPMAX : EXP_OF_EXPMAX)
+        + " = " + (x1 < x2 ? "-" : "") + "EXP_OF_EXPMAX.  "
+        + "(Further overflow errors of this type will not be reported.)\n";
+    runreport.ReportOnce(msg, oncekey_OverflowOfSafeExpDiff, true);
+
+    return x1 < x2 ? -EXP_OF_EXPMAX : EXP_OF_EXPMAX;
+}
+
+// This function is called when SafeExpDiff(x1, x2) underflows,
+// which means exp(x1) - exp(x2) underflows.
+// It can easily be modified to track how often this underflows.
+// 2003/11/21 added by erynes
+double UnderflowOfSafeExpDiff(double x1, double x2)
+{
+    RunReport& runreport = registry.GetRunReport();
+    string msg = "Underflow error:  Attempted to compute exp(";
+    msg += ToString(x1) + ") - exp(" + ToString(x2) + ").  Returning 0.  "
+        + "(Further underflow errors of this type will not be reported.)\n";
+    runreport.ReportOnce(msg, oncekey_UnderflowOfSafeExpDiff, false);
+
+    return 0.0;
+}
+
+// This function is called when SafeProductWithExp(a, x) overflows,
+// which means a*exp(x) overflows.
+// It can easily be modified to also track how often a*exp(x) overflows.
+// 2003/11/21 added by erynes
+double OverflowOfProductWithExp(double a, double x)
+{
+    RunReport& runreport = registry.GetRunReport();
+    string msg = "Overflow error:  Attempted to compute ";
+    msg += ToString(a) + " * exp(" + ToString(x) + ").  Returning "
+        + ToString(a < 0 ? -EXP_OF_EXPMAX : EXP_OF_EXPMAX)
+        + " = " + (a < 0 ? "-" : "") + "EXP_OF_EXPMAX.  "
+        + "(Further overflow errors of this type will not be reported.)\n";
+    runreport.ReportOnce(msg, oncekey_OverflowOfProductWithExp, true);
+
+    return a < 0 ? -EXP_OF_EXPMAX : EXP_OF_EXPMAX;
+}
+
+// This function is called when SafeProductWithExp(a, x) underflows,
+// which means a*exp(x) underflows.
+// It can easily be modified to track how often a*exp(x) underflows.
+// 2003/11/21 added by erynes
+double UnderflowOfProductWithExp(double a, double x)
+{
+    RunReport& runreport = registry.GetRunReport();
+    string msg = "Underflow error:  Attempted to compute ";
+    msg += ToString(a) + " * exp(" + ToString(x) + ").  Returning 0.  "
+        + "(Further underflow errors of this type will not be reported.)\n";
+    runreport.ReportOnce(msg, oncekey_UnderflowOfProductWithExp, false);
+
+    return 0;
+}
+
+double LogZero(const double x)
+{
+    return ((x > 0.0) ? log(x) : 0.0);
+}
+
+double SafeLog(const double x)
+{
+    return ((x > 0.0) ? log(x) : -DBL_BIG);
+}
+
+DoubleVec1d SafeLog(const DoubleVec1d src)
+{
+    DoubleVec1d retvec;
+    for (DoubleVec1d::const_iterator i=src.begin(); i != src.end(); ++i)
+    {
+        retvec.push_back(SafeLog(*i));
+    }
+    return retvec;
+}
+
+long int ChooseRandomFromWeights(DoubleVec1d& weights)
+{
+    double sumweights = accumulate(weights.begin(),weights.end(),0.0);
+    transform(weights.begin(),weights.end(),weights.begin(),
+              bind2nd(divides<double>(),sumweights));
+
+    long int chosen = -1;
+    double chance = registry.GetRandom().Float();
+    while (chance > 0 && chosen < static_cast<long int>(weights.size()-1))
+    {
+        chance -= weights[++chosen];
+    }
+    return chosen;
+}
+
+// ExpE1(x) -- added 2004/10/06 by erynes to calculate the expectation value
+//                                           of "endtime" with positive growth
+//
+// Receives a positive real number x, and returns exp(x)*E1(x), where E1(x) is
+// the exponential integral function En(x) for n = 1.  The function returns the
+// product of exp(x) and E1(x), rather than E1(x) or En(x) alone, because at
+// present we only use the product of exp(x) and E1(x), and since the most
+// common form of E1(x) includes an internal factor of exp(-x), we can avoid two
+// calls to exp() by allowing exp(x) and exp(-x) to cancel one another first.
+//
+double ExpE1(const double& x)
+{
+    if (x <= 0.0)
+    {
+        string msg = " Nonpositive number (" + ToString(x)
+            + ") received by math function ExpE1(); this is illegal.";
+        implementation_error e(msg);
+        throw e;
+    }
+
+    const double epsilon = 1.0e-07; // convergence criterion
+    const unsigned long int max_iterations = 15UL; // surrender criterion
+    double result;
+
+    if (x >= 1.0)
+    {
+        // Continued fraction representation:  Lentz's algorithm.
+        double a, b, c, d, h, delta;
+        unsigned long int nIter(0UL);
+        b = x + 1.0;
+        c = 1.0/DBL_EPSILON;
+        d = 1.0/b;
+        h = d;
+        for (unsigned long int i = 1; i <= max_iterations; i++)
+        {
+            a = -1.0*i*i;
+            b += 2.0;
+            d = 1.0/(a*d + b);
+            c = b + a/c;
+            delta = c*d;
+            h *= delta;
+            nIter++;
+            if (fabs(delta - 1.0) < epsilon)
+                return h;
+        }
+        // Failed to converge after nIter iterations.
+        // Apparently this can only happen when x is very close to 1.
+        // In that case, our result will be close enough.
+        return h;
+    }
+
+    else
+    {
+        //series representation
+        double factor, term;
+        unsigned long int nIter(0UL);
+        result = -log(x) - EULERS_CONSTANT;
+        factor = 1.0;
+        for (unsigned long int i = 1; i <= max_iterations; i++)
+        {
+            factor *= -x/i;
+            term = -factor/i;
+            result += term;
+            nIter++;
+            if (fabs(term) < fabs(result) * epsilon)
+                return SafeProductWithExp(result, x);
+        }
+        // Failed to converge after nIter iterations.
+        // Apparently this can only happen when x is very close to 1.
+        // In that case, our result will be close enough.
+        return SafeProductWithExp(result, x);
+    }
+}
+
+// Function to compute the modified Bessel function of the second kind,
+// of order v, at argument x.  It is also sometimes called the MacDonald
+// function.  It is denoted in the literature as K(v,x), where "v" is
+// written as a subscript, rather than a parameter.  Mathematica denotes
+// this function BesselK, so we'll do the same.
+//
+// Because K(v,x) is calculated iteratively, and because
+// dK(v,x)/dx is a simple function of both K(v,x) and K(v+1,x),
+// this function returns both of these results, since the latter
+// can be obtained essentially for free once the former is computed.
+// The return value of this function is K(v,x).  K(v+1,x) is returned
+// by reference in argument "resultFor_vPlusOne."
+//
+// Implemented by erynes in July and October 2005, adapted from:
+// Shanjie Zhang and Jianming Jin, _Computation of Special Functions._
+// New York: Wiley-Interscience (1996).
+//
+// Contains an original approximation for x < 9 when v is very close
+// to an integer.  No special approximation appears to be needed
+// for any other case, including when v is equal to an integer.
+//
+double BesselK(double v, double x, double& resultFor_vPlusOne)
+{
+    if (x <= 0.0)
+        throw implementation_error("BesselK() received illegal argument:  "
+                                   + ToString(x) + ".");
+
+    bool orderIsNegative(false), mustUseTwo_v0s(false),
+        first_v0_calculation(true);
+    double result(0.0); // used ONLY if -1 < v < 0
+
+    if (v < 0.0)
+    {
+        if (v > -1.0)
+        {
+            // Generally, we calculate K(v0,x) and K(v0+1,x),
+            // where v0 is the "fractional part" of v, and then
+            // we calculate K(v,x) and K(v+1,x) iteratively.
+            // If v <= -1, we use the property K(-v,x) = K(v,x),
+            // and swap the results at the end.  But if -1 < v < 0,
+            // then v < 0 and v+1 > 0, so we have to compute K(v0,x)
+            // twice--once for v (say, -0.7), and a separate time
+            // for v+1 (say, 0.3).
+            v = -v;
+            mustUseTwo_v0s = true;
+        }
+        else
+        {
+            // K(-v, x) == K(v, x) and K(-v + 1, x) == K(v - 1, x).
+            v = -v - 1.0;
+        }
+        orderIsNegative = true; // later, we'll swap K(v+1,x) and K(v,x)
+    }
+
+    // For tiny x, K(v,x) = 0.5*gamma(v)*pow(0.5*x, -v).
+    double gamma_vPlus1__DividedBy__DBL_BIG = my_gamma(v+1.0)/DBL_BIG;
+    if (x <= 2.0*pow(gamma_vPlus1__DividedBy__DBL_BIG*0.5, 1.0/(v+1.0)))
+    {
+        // K(v+1,x) overflows, and hence K(v,x) overflows too.
+        resultFor_vPlusOne = DBL_BIG;
+        return DBL_BIG;
+    }
+
+    // For huge x, K(v,x) = sqrt(PI/(2x))*exp(-x).
+    if (x > -EXPMIN || 0.0 == exp(-x)*sqrt(0.5*PI/x))
+    {
+        // K(v+1,x) underflows, for all v.
+        resultFor_vPlusOne = 0.0;
+        return 0.0;
+    }
+
+    const double EPSILON_ZJ = 1.0e-15; // Zhang & Jin's convergence criterion
+    unsigned long int n = static_cast<unsigned long int>(floor(v)); // closest integral order
+    double v0 = v - n; // the "fractional part" of the order
+    const unsigned long int MAX_ITERATIONS = 50; // Zhang & Jin's heuristic cutoff
+
+    double half_x_squared = 0.25*x*x; // avoid repeated recalculations
+    double Kv0 = 0.0;      // == K(v0, x)
+    double Kv0plus1 = 0.0; // == K(v0+1, x)
+
+    // First, calculate K(v0, x) and K(v0+1, x).
+
+  Calculate_Kv0:
+    if (x <= 9.0)
+    {
+        // Small argument:  Use the series expansion.
+        if (0.0 == v0)
+        {
+            // K(0,x) = -(ln(x/2) + EULERS_CONSTANT)*I(0,x)
+            //          + sum(coeff_i*term_i, from i=1 to i=large),
+            //
+            // where I(0,x) = modified Bessel fn. of the 1st kind of order 0
+            //              = sum(term_i, from i = 0 to i = large),
+            // with
+            //
+            // term_i = ((x/2)^(2*i)) / (i!)^2
+            //
+            // and
+            //
+            // coeff_i = sum(1/j, from j=1 to j=i), or 1 if i=0.
+            //
+            // K(1,x) = (1/x) + (ln(x/2) + EULERS_CONSTANT)*I(1,x)
+            //                - (x/2)*sum(coeff_i_2*term_i_2, from i=1 to i=large),
+            // where I(1,x) = modified Bessel fn. of the 1st kind of order 1
+            //              = (x/2)*sum(term_i_2, from i=1 to i=large),
+            //
+            // with
+            //
+            // term_i_2 = ((x/2)^(2*i)) / (i!*(i+1)!)
+            //
+            // and
+            //
+            // coeff_i_2 = sum(1/j, from j=1 to j=i) + (1/2)*(1/(i+1)), or 1/2 if i=0.
+
+            double prevKv0 = DBL_BIG, prevKv0plus1 = DBL_BIG;
+            double term_A = log(0.5*x) + EULERS_CONSTANT;
+            double term_i = 1.0, term_i_2 = 1.0;
+            double coeff_i = 0.0, coeff_i_2 = 0.0;
+            Kv0 = -term_A; // i=0 term
+            Kv0plus1 = 1.0/x + term_A*0.5*x - 0.25*x; // i=0 term
+
+            for (unsigned long int i = 1; i <= MAX_ITERATIONS; i++)
+            {
+                term_i *= half_x_squared/(i*i);
+                term_i_2 *= half_x_squared/(i*(i+1));
+                coeff_i += 1.0/i;
+                coeff_i_2 = coeff_i + 1.0/(2.0*(i+1.0));
+                Kv0 += (-term_A + coeff_i)*term_i;
+                Kv0plus1 += (term_A - coeff_i_2)*0.5*x*term_i_2;
+                if (fabs((prevKv0 - Kv0)/Kv0) < EPSILON_ZJ &&
+                    fabs((prevKv0plus1 - Kv0plus1)/Kv0plus1) < EPSILON_ZJ)
+                    break; // convergence achieved
+                prevKv0 = Kv0;
+                prevKv0plus1 = Kv0plus1;
+            }
+        }
+
+        else // v0 > 0
+        {
+            // K(v0, x) = (PI/2)*(I(-v0, x) - I(v0, x))/sin(v0*PI),
+            // where
+            // I(v0,x) = modified Bessel fn. of the 1st kind of order v0
+            //         = sum(((x/2)^(2i+v0))/(i!*gamma(i+v0+1)),
+            //                from i=0 to i=large)
+            //
+            // and gamma(x+1) = x*gamma(x).
+            //
+            // For v0 close to 0 or 1, the above formula for K(v0,x) approaches 0/0,
+            // and behaves quasi-randomly.  An excellent example is K(1.999999,7).
+            // Increasing the number of iterations and increasing the strictness
+            // of the convergence criterion (i.e., decreasing EPSILON_ZJ)
+            // does not seem to help.  We thus employ the following approximation,
+            // derived by erynes in autumn 2005.
+
+            if (v0 <= SMALL_v0 || v0 >= 1.0 - SMALL_v0)
+            {
+                double K0(0.0), K1(0.0); // K(0,x) and K(1,x)
+                K0 = BesselK(0.0, x, K1);
+                if (v0 >= 1.0 - SMALL_v0)
+                {
+                    v0 = -(1.0 - v0); // calculate K(-eps, x), K(1 - eps, x), etc.
+                    n += 1; // An example:  Redefining 2 + .95 to be 3 - .05, n=2 --> n=3.
+                }
+
+                Kv0 = K0 * (v0*v0/(2.0*x + 1.0) + 1.0);
+                Kv0plus1 = ((2.0*x-1.0)*(4.0*x+1.0)*K1/(2.0*(x+1.0)*(x+1.0)))*v0*v0 + (K0/x)*v0 + K1;
+
+                if (0 == n)
+                {
+                    resultFor_vPlusOne = Kv0plus1;
+                    if (Kv0 >= DBL_BIG)
+                    {
+                        // Try to avoid returning "inf" in either variable.
+                        resultFor_vPlusOne = DBL_BIG;
+                        return DBL_BIG;
+                    }
+                    if (resultFor_vPlusOne >= DBL_BIG)
+                        resultFor_vPlusOne = DBL_BIG;
+                    return Kv0;
+                }
+                if (1 == n)
+                {
+                    resultFor_vPlusOne = Kv0 + (2.0*(1.0 + v0)/x)*Kv0plus1;
+                    if (Kv0plus1 >= DBL_BIG)
+                    {
+                        // Try to avoid returning "inf" in either variable.
+                        resultFor_vPlusOne = DBL_BIG;
+                        return DBL_BIG;
+                    }
+                    if (resultFor_vPlusOne >= DBL_BIG)
+                        resultFor_vPlusOne = DBL_BIG;
+                    return Kv0plus1;
+                }
+            }
+
+            else // the size of v0 makes K(v0, x) = (PI/2)*(I(-v0, x) - I(v0, x))/sin(v0*PI) stable
+            {
+                // Variables for computing K(v0,x).
+                double gamma_1_plus_v0 = my_gamma(1.0+v0),
+                    gamma_1_minus_v0 = my_gamma(1.0-v0);
+                double factor_neg = 1.0/(gamma_1_minus_v0*pow(0.5*x, v0));
+                double factor_pos = 1.0/(factor_neg*gamma_1_minus_v0*gamma_1_plus_v0);
+                double neg_term_i = 1.0, pos_term_i = 1.0;
+                double sum = factor_neg - factor_pos; // i=0 terms
+                double prev_sum = DBL_BIG;
+                // Variables for computing K(v0+1,x).
+                double factor_neg_2 = -v0*factor_neg/(0.5*x);
+                double factor_pos_2 = factor_pos*0.5*x/(1.0+v0);
+                double neg_term_i_2 = 1.0, pos_term_i_2 = 1.0;
+                double sum_2 = factor_neg_2 - factor_pos_2; // i=0 terms
+                double prev_sum_2 = DBL_BIG;
+
+                for (unsigned long int i = 1; i < 2.4*MAX_ITERATIONS; i++) // Zhang & Jin's heuristic cutoff
+                {
+                    // Compute the sum for K(v0,x).
+                    neg_term_i *= half_x_squared/(i*(i - v0));
+                    pos_term_i *= half_x_squared/(i*(i + v0));
+                    sum += factor_neg*neg_term_i - factor_pos*pos_term_i;
+
+                    // Compute the sum for K(v0+1,x).
+                    neg_term_i_2 *= half_x_squared/(i*(i - (v0+1.0)));
+                    pos_term_i_2 *= half_x_squared/(i*(i + (v0+1.0)));
+                    sum_2 += factor_neg_2*neg_term_i_2 - factor_pos_2*pos_term_i_2;
+
+                    if (fabs((prev_sum - sum)/sum) < EPSILON_ZJ &&
+                        fabs((prev_sum_2 - sum_2)/sum_2) < EPSILON_ZJ)
+                        break; // convergence achieved
+                    prev_sum = sum;
+                    prev_sum_2 = sum_2;
+                }
+
+                Kv0 = (0.5*PI)*sum/sin(v0*PI);
+                Kv0plus1 = (0.5*PI)*sum_2/sin((v0+1.0)*PI);
+            }
+        }
+    }
+    else // x > 9.
+    {
+        // Large argument:  Use the asymptotic expansion.
+        //
+        // K(v0,x) = sqrt(PI/(2x))*exp(-x)*(1 + sum(product[(u - (2k-1)^2)/(8k*x),
+        //                                                  from u=1 to u=m],
+        //                                          from m=1 to m=kmax)),
+        // where u = 4*v0*v0
+        // and kmax is an integer determined heuristically by Zhang & Jin (1996).
+        //
+
+        double sum(0.0), product(1.0), sum_2(0.0), product_2(1.0);
+        double four_v0_squared = 4.0*v0*v0; // avoid repeated recalculations
+        double four_v0plus1_squared = 4.0*(v0+1.0)*(v0+1.0); // ditto
+        unsigned long int kmax = 14; // kmax is Zhang & Jin's heuristic cutoff
+
+        if (x >= 50.0)
+            kmax = 8;  // Zhang & Jin heuristics
+        else if (x >= 35.0)
+            kmax = 10; // Zhang & Jin heuristics
+
+        for (unsigned long int k = 1; k <= kmax; k++)
+        {
+            product *= (four_v0_squared - (2.0*k - 1.0)*(2.0*k - 1.0))/(8.0*k*x);
+            sum += product;
+            product_2 *= (four_v0plus1_squared - (2.0*k - 1.0)*(2.0*k - 1.0))/(8.0*k*x);
+            sum_2 += product_2;
+        }
+        Kv0 = sqrt(PI/(2.0*x))*exp(-x)*(1.0 + sum);
+        Kv0plus1 = sqrt(PI/(2.0*x))*exp(-x)*(1.0 + sum_2);
+    }
+
+    if (mustUseTwo_v0s)
+    {
+        if (first_v0_calculation)
+        {
+            result = Kv0;
+            first_v0_calculation = false;
+            v0 = -v + 1.0;
+            goto Calculate_Kv0;
+        }
+        else
+            resultFor_vPlusOne = Kv0;
+        if (result >= DBL_BIG)
+        {
+            // Try to avoid returning "inf" in either variable.
+            result = resultFor_vPlusOne = DBL_BIG;
+        }
+        else if (resultFor_vPlusOne >= DBL_BIG)
+            resultFor_vPlusOne = DBL_BIG;
+        return result;
+    }
+
+    // Now, use K(v0,x) and K(v0+1,x) to calculate the desired K(v,x) == K(v0+n,x),
+    // using the recurrence relationship:
+    //
+    // K(v+1,x) = K(v-1,x) + (2v/x)K(v,x).
+
+    double A = 0.0, B = Kv0, C = Kv0plus1;
+    for (unsigned long int i = 1; i <= n; i++)
+    {
+        A = B;
+        B = C;
+        C = A + (2.0*(v0 + i)/x)*B;
+    }
+
+    if (orderIsNegative)
+        swap(B,C);
+    if (B >= DBL_BIG)
+        B = C = DBL_BIG;
+    else if (C >= DBL_BIG)
+        C = DBL_BIG;
+    resultFor_vPlusOne = C;
+    return B;
+}
+
+// Function to compute a numerical estimate of the analytically
+// intractable partial derivative of K(v,x) with repect to v,
+// where K(v,x) is the modified Bessel function of the second kind
+// of order v evaluated at x, sometimes called the MacDonald function.
+// [Note:  The partial derivative of K(v,x) with repect to x
+// is simply (v/x)*K(v,x) - K(v+1,x).]
+//
+// Arguments:  v is the order, x is the argument,
+// Kvx is the already-computed value of K(v,x).
+//
+// WARNING:  This simple algorithm may be insufficiently precise!
+//
+// Contains an original approximation derived by erynes for when
+// v is close to an integer.
+//
+double DvBesselK(double v, double x, double Kvx)
+{
+    if (x <= 0.0)
+        throw implementation_error("DvBesselK() received illegal argument:  "
+                                   + ToString(x) + ".");
+    if (0.0 == v)
+        return 0.0; // this is independent of x, for nonzero x
+
+    // K(-v,x) == K(v,x), so the sign of the value we return
+    // must equal the sign of v.  Work with v > 0 for convenience.
+    bool vIsNegative(false);
+    if (v < 0.0)
+    {
+        vIsNegative = true;
+        v = -v;
+    }
+
+    // For tiny x, K(v,x) = 0.5*gamma(v)*pow(0.5*x, -v).
+    double gamma_v__DividedBy__DBL_BIG = my_gamma(v)/DBL_BIG;
+    if (x <= 2.0*pow(0.5*gamma_v__DividedBy__DBL_BIG, 1.0/v))
+        return (vIsNegative ? -DBL_BIG : DBL_BIG);
+    else if (DBL_BIG == Kvx)
+        return (vIsNegative ? -DBL_BIG : DBL_BIG);
+
+    unsigned long int n = static_cast<unsigned long int>(floor(v));
+    double v0 = v - 1.0*n;
+    const double h = 1.0e-06; // used to compute the derivative from its definition
+    double dummy; // dummy variable needed for storage; value not used
+
+    if (v0 <= SMALL_v0 && v0+h >= SMALL_v0)
+    {
+        // need to nudge backwards
+        v0 = (SMALL_v0 - NUDGE_AMOUNT) - h;
+        v = n + v0;
+        Kvx = BesselK(v, x, dummy);
+    }
+    else if (v0 < 1.0 - SMALL_v0 && v0+h >= 1.0 - SMALL_v0)
+    {
+        // need to nudge backwards
+        v0 = (1.0 - SMALL_v0 - NUDGE_AMOUNT) - h;
+        v = n + v0;
+        Kvx = BesselK(v, x, dummy);
+    }
+    else if (v0 > 1.0 - SMALL_v0 && v0+h == 1.0)
+    {
+        // need to nudge backwards
+        v0 = (1.0 - NUDGE_AMOUNT) - h;
+        v = n + v0;
+        Kvx = BesselK(v, x, dummy);
+    }
+    // Note that v0 > 1.0 - SMALL_v0 && v0+h > 1.0 is safe,
+    // because h < SMALL_v0, hence v0+h < 1.0 + SMALL_v0.
+    // Also is v0 == 1.0 - SMALL_v0 is safe, because in that case
+    // K(v0,x) and K(v0+h,x) will both be calculated using the
+    // derived approximation.
+
+    double K_v_plus_h__x(0.0), result;
+
+    K_v_plus_h__x = BesselK(v+h, x, dummy);
+    result = (vIsNegative ? -(K_v_plus_h__x - Kvx)/h :
+              (K_v_plus_h__x - Kvx)/h);
+    if (result <= -DBL_BIG)
+        result = -DBL_BIG;
+    else if (result >= DBL_BIG)
+        result = DBL_BIG;
+    return result;
+
+#if 0
+    // This is old code by erynes that computes the derivative of the approximation
+    // that's conditionally employed in BesselK().  For at least the moment (early
+    // 2007), we're taking it out, and always computing the derivative using an
+    // approximation to the definition of the derivative (i.e., slope computed
+    // at points x and x+h where h is small).
+
+    if (v0 > 1.0 - SMALL_v0)
+    {
+        n += 1;
+        v0 = v - 1.0*n; // Thus, v0 < 0.0  Example:  2.0 + 0.95 --> 3.0 - 0.05.
+    }
+
+    double K1x(0.0);
+    double K0x = BesselK(0.0, x, K1x);
+    double A = 0.0;
+    // erynes determined that, for small "e", e > 0 or e < 0,
+    // K(e,x) = K(0,x)*(1 +  e^2/(2x + 1)),
+    // K(1+e,x) = K(1,x) + (K(0,x)/x)*e + ((2x-1)(4x+1)K(1,x)/(2*(x+1)^2))*e^2,
+    // where the "smallness" of e can vary with x, but is often near 0.01.
+    // "e" in these equations corresponds to "v0" in the code.
+    double B = (2.0 * K0x/(2.0 * x + 1.0)) * v0;
+    if (0 == n)
+        return (vIsNegative ? -B : B);
+    double C = K0x/x + ((2.0*x-1.0)*(4.0*x+1.0)*K1x/((x+1.0)*(x+1.0)))*v0; // recall x > 0
+    Kvx = K1x + (K0x/x)*v0 + ((2.0*x-1.0)*(4.0*x+1.0)*K1x/(2.0*(x+1.0)*(x+1.0)))*v0*v0;
+
+    double Kv1x = K0x*(v0*v0/(2.0*x + 1.0) + 1.0) + (2.0*(1.0+v0)/x)*Kvx; // == K(2+v0,x)
+
+    for (unsigned long int i = 1; i < n; i++)
+    {
+        A = B;
+        B = C;
+        if (0 == i % 2)
+        {
+            C = A + (2.0/x)*(Kv1x + (i + v0)*B);
+            if (i != n - 1) // avoid unnecessary computation
+                Kvx = BesselK(1.0+i+v0, x, Kv1x);
+        }
+        else
+            C = A + (2.0/x)*(Kvx + (i + v0)*B);
+    }
+
+    return (vIsNegative ? -C : C);
+#endif // 0
+}
+
+// Euler's psi function == d(log_gamma(x))/dx.
+// Implemented by erynes in August 2005, adapted from:
+// Shanjie Zhang and Jianming Jin, _Computation of Special Functions._
+// New York: Wiley-Interscience (1996).
+// (Their implementation is taken directly from Abramowitz and Stegun.)
+//
+double psi(double x)
+{
+    double result(0.0);
+    double abs_x = x >= 0.0 ? x : -x;
+    if (floor(x) == x)
+    {
+        // x is an integer.  Use the expansion for integers.
+        if (x <= 0.0)
+        {
+            throw implementation_error("psi() received illegal argument:  "
+                                       + ToString(x) + ".");
+            return DBL_BIG; // psi(x) is undefined for 0 and negative integers
+        }
+        for (unsigned long int k = 1; k < static_cast<unsigned long int>(floor(x)); k++)
+            result += 1.0/k;
+        return -EULERS_CONSTANT + result;
+    }
+
+    if (floor(abs_x + 0.5) == abs_x + 0.5)
+    {
+        // x is a half-integer.  Use the expansion for half-integers.
+        for (unsigned long int k = 1;
+             k <= static_cast<unsigned long int>(floor(abs_x - 0.5)); k++)
+            result += 2.0/(2.0*k - 1.0);
+        result += -EULERS_CONSTANT - 2.0*LOG2;
+    }
+    else
+    {
+        // Not an integer or half-integer.
+        if (abs_x < 10.0)
+        {
+            // Add an integer to abs_x to make abs_x + n > 10,
+            // then use the asymptotic expansion for large x,
+            // then use the recurrence relationship between psi(x+n) and psi(x).
+            // First, compute sum[1/(x+k)], then later, subtract this
+            // from psi(x+n) to obtain psi(x).
+            unsigned long int n = static_cast<unsigned long int>(10.0 - floor(abs_x));
+            for (unsigned long int k = 0; k < n; k++)
+                result -= 1.0/(abs_x + k);
+            abs_x += n;
+        }
+        // The asymptotic expansion.  These numbers are Bernoulli numbers;
+        // the expansion actually comes from Abramowitz and Stegun's book.
+        double x2 = 1.0/(abs_x*abs_x),
+            a1 = -1.0/12.0,
+            a2 = +1.0/120.0,
+            a3 = -1.0/252.0,
+            a4 = +1.0/240.0,
+            a5 = -1.0/132.0,
+            a6 = +691.0/32760.0,
+            a7 = a1,
+            a8 = +3617.0/8160.0;
+        result += log(abs_x) - 0.5/abs_x +
+            x2*(((((((a8*x2+a7)*x2+a6)*x2+a5)*x2+a4)*x2+a3)*x2+a2)*x2+a1);
+    }
+
+    if (x < 0.0)
+        result += 1.0/abs_x + PI/tan(PI*abs_x); // psi(-x) = psi(x) + 1/x + PI*cot(PI*x)
+    return result;
+}
+
+//____________________________________________________________________________________
diff --git a/src/tools/mathx.h b/src/tools/mathx.h
new file mode 100644
index 0000000..28b3039
--- /dev/null
+++ b/src/tools/mathx.h
@@ -0,0 +1,151 @@
+// $Id: mathx.h,v 1.33 2012/06/30 01:32:42 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef MATHX_H
+#define MATHX_H
+
+#include <cmath>
+#include <vector>
+
+#include "vectorx.h"
+
+// if we're on an alpha...  does this catch that?
+#ifdef alphaev56
+#define MYEXP UnderFlowExp
+#else
+#define MYEXP exp
+#endif
+
+//------------------------------------------------------------------------------------
+// The proper version of isnan and the right way  to get access to
+// it depends on the system and compiler. Here we try to get that
+// all straightened out.
+
+// Macosx doesn't currently include isnan properly without this
+// G-d only knows why it needs iostream to compile properly
+#ifdef LAMARC_COMPILE_MACOSX
+#if (__GNUC__ < 4)
+#include <iostream>
+extern "C" int isnan(double);
+#endif
+#endif
+
+// the default isnan
+#define systemSpecificIsnan isnan
+
+// Microsoft VC++ uses a different isnan function. Unfortunately,
+// Metrowerks defines _MSC_VER sometimes, but we don't want to
+// do this in that case
+#ifdef __MWERKS__
+#elif defined _MSC_VER
+#include <float.h>
+#define systemSpecificIsnan _isnan
+#endif
+
+const double LOG2 = 0.69314718055994530942; // log(2.0).
+const double LOG10 = 2.30258509299404568401799145468; // log(10.0)
+const double PI = 3.1415926535897932384626433832795028841972;
+const double SQRT_PI = 1.7724538509055160272981674833411451827975; // sqrt(PI)
+const double EULERS_CONSTANT = 0.5772156649015328606065120900824024310422;
+
+using std::vector;
+double log_gamma(double x);
+double my_gamma(double x);
+double erfc(double x); // the "complementary error function"
+double incompletegamma(double alpha, double x);
+double probchi(long df, double chi);
+double find_chi(long df, double prob);
+DoubleVec1d gamma_rates(double alpha, long ratenum);
+double alnorm (double x, int up);
+double SafeDivide(double num, double denom);
+double logfac(long n);
+//long   factorial(long n);
+double factorial(double n);
+double UnderFlowExp(double pow);
+bool IsEven(long n);
+DoubleVec2d Invert(const DoubleVec2d& src);  // invert square matrix
+DoubleVec2d Transpose(const DoubleVec2d& src);  // transpose square matrix
+bool CloseEnough(double a, double b);  // are values within epsilon?
+DoubleVec1d SafeExp(const DoubleVec1d& src);
+double SafeExpAndSum(const DoubleVec1d& src);
+double SafeExp(double src);
+double SafeProductWithExp(double a, double x);
+double OverflowOfProductWithExp(double a, double x);
+double UnderflowOfProductWithExp(double a, double x);
+double SafeExpDiff(double x1, double x2);
+double OverflowOfSafeExpDiff(double x1, double x2);
+double UnderflowOfSafeExpDiff(double x1, double x2);
+double LogZero(double x);
+double SafeLog(double x);
+DoubleVec1d SafeLog(DoubleVec1d src);
+long ChooseRandomFromWeights(DoubleVec1d& weights);
+double ExpE1(const double& x);
+double BesselK(double v, double x, double& resultFor_vPlusOne); // == K(v,x)
+double DvBesselK(double v, double x, double Kvx);
+double psi(double x); // Euler's psi function == d(log_gamma(x))/dx.
+void ScaleLargestToZero(DoubleVec1d& unscaled);
+void ScaleToSumToOne(DoubleVec1d& vec);
+DoubleVec1d AddValsOfLogs(DoubleVec1d vec1, DoubleVec1d vec2);
+
+#if 0
+class Gamma
+{
+    double alpha;
+    double logAlpha;
+    double logAlpha1;
+    double invAlpha9;
+    double sqrAlpha3;
+
+  public:
+    Gamma();
+    Gamma(double);
+    void           SetAlpha(double);
+    DoubleVec1d    Calculate(long);         // number of divisions
+    double         IncompleteGamma(double alpha, double x);
+    double         LogGamma(double);
+    double         Tail(double);
+    double ProbChi2(long df , double chi);
+    double FindChi2(long df, double prob);
+};
+#endif
+
+class EigenCalculator
+{
+  private:
+
+    static const double accuracy;
+
+    // Cosine and sine
+    std::pair<double, double> Coeffs(double x, double y);
+
+    // Givens matrix reduction
+    void Givens(DoubleVec2d& a, long x, long y, long size,
+                const std::pair<double, double> cs, bool userow);
+
+    // Matrix tridiagonalization
+    void Tridiag(DoubleVec2d& a, DoubleVec2d& eigvecs);
+
+    // Intermediate work of eigenvalues/vectors
+    void Shiftqr(DoubleVec2d& a, DoubleVec2d& eigvecs);
+
+    // Modified dot-product
+    void DotProduct(const DoubleVec2d& first, const DoubleVec2d& second,
+                    DoubleVec2d& answer);
+
+  public:
+
+    // Driver for eigenvalues/vectors
+    std::pair<DoubleVec1d, DoubleVec2d> Eigen(DoubleVec2d a);
+
+};
+
+#endif // MATHX_H
+
+//____________________________________________________________________________________
diff --git a/src/tools/random.cpp b/src/tools/random.cpp
new file mode 100644
index 0000000..d6d0bea
--- /dev/null
+++ b/src/tools/random.cpp
@@ -0,0 +1,160 @@
+// $Id: random.cpp,v 1.20 2011/04/23 02:02:49 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+// Mary 9/1/2010 Changed to use Boost random number generator (Mersenne Twister)
+// due to suspicions that the homebrew was showing too much pattern.
+
+#include <cassert>
+#include <cmath>
+#include <ctime>
+#include <fstream>
+#include <iostream>
+
+#include "random.h"
+#include "stringx.h"
+
+#ifdef DMALLOC_FUNC_CHECK
+#include "/usr/local/include/dmalloc.h"
+#endif
+
+using namespace std;
+
+//------------------------------------------------------------------------------------
+
+Random::Random()
+{
+    m_rng.seed(static_cast<unsigned int>(time(0)));
+}
+
+//------------------------------------------------------------------------------------
+
+Random::Random(long seed)
+{
+    m_rng.seed(static_cast<unsigned int>(seed));
+}
+
+//------------------------------------------------------------------------------------
+
+void Random::Seed(long seed)
+{
+    m_rng.seed(static_cast<unsigned int>(seed));
+} // Seed
+
+//------------------------------------------------------------------------------------
+
+void Random::Read(char *filename)
+{
+    unsigned int seed;
+
+    ifstream file;
+    file.open(filename);
+    file >> seed;
+    file.close();
+
+    m_rng.seed(seed);
+}
+
+//------------------------------------------------------------------------------------
+
+void Random::Write(char *filename)
+{
+    ofstream file;
+    file.open(filename,ios::app);
+    file << Float() << endl;
+    file.close();
+}
+
+//------------------------------------------------------------------------------------
+
+long Random::Long(long m)
+{
+    assert(m>0);
+    // DON'T make these two structures static -- you cannot since
+    // the initial one takes an argument from the enclosing method
+    boost::uniform_int<> dist(0,m-1);
+    boost::variate_generator<boost::mt19937&, boost::uniform_int<> >
+        vg(m_rng, dist);
+    return vg();
+}
+
+//------------------------------------------------------------------------------------
+
+double Random::Float()
+{
+    // Making these two structures static saves us from having to
+    // re-create them each time we use this method.
+    static boost::uniform_real<> fng;
+    static boost::variate_generator<boost::mt19937&, boost::uniform_real<> > vg(m_rng, fng);
+
+    // the generator can return 0.0 (we've seen it 2010-10-22), so
+    // we need to repeat until we get a good value. We don't want
+    // exactly 0.0 or 1.0
+    while(true)
+    {
+        double val =  vg();
+        if (val > 0.0 && val < 1.0)
+        {
+            return val;
+        }
+    }
+    assert(false);
+    return 0.0;
+}
+
+//------------------------------------------------------------------------------------
+
+char Random::Base()
+{
+    long whichbase = Long(4);
+    if (whichbase == 0) return 'A';
+    if (whichbase == 1) return 'C';
+    if (whichbase == 2) return 'G';
+    if (whichbase == 3) return 'T';
+
+    assert(false); // unknown base in Random::Base()
+    return 'X';
+
+}
+
+//------------------------------------------------------------------------------------
+
+bool Random::Bool()
+{
+    return (Long(2) == 1L);
+}
+
+//------------------------------------------------------------------------------------
+
+string Random::Name()
+{
+    string name;
+
+    name = ToString(Base()) + ToString(Base()) + ToString(Long(1000));
+
+    return name;
+
+}
+
+//------------------------------------------------------------------------------------
+
+double Random::Normal()
+// Sample from a normal distribution with mean 0 and variance 1
+// using Box-Muller algorithm (see Wikipedia "Normal Distribution"
+// article).
+{
+    // Making these two structures static saves us from having to
+    // re-create them each time we use this method.
+    static boost::normal_distribution<> norm(0.0, 1.0);
+    static boost::variate_generator<boost::mt19937&, boost::normal_distribution<> > vg(m_rng, norm);
+
+    return vg();
+} // Normal
+
+//____________________________________________________________________________________
diff --git a/src/tools/random.h b/src/tools/random.h
new file mode 100644
index 0000000..3739b8b
--- /dev/null
+++ b/src/tools/random.h
@@ -0,0 +1,50 @@
+// $Id: random.h,v 1.17 2011/03/07 06:08:51 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef RANDOM_H
+#define RANDOM_H
+
+#include <string>
+#include <boost/random.hpp>
+#include <boost/random/mersenne_twister.hpp>
+
+#define MAX_RANDOM  4294967296.0        // 2^32
+
+class Random
+{
+    Random(const Random&);          // undefined
+    Random operator=(Random);       // undefined
+
+    long num0, num1, num2, num3;
+    long n0, n1, n2, n3;
+    long m0, m1, m2, m3;
+
+    boost::mt19937 m_rng;
+
+  public:
+    // The default ctor seeds the RNG from the system clock.  Warning:  if you
+    // call it twice in quick succession the answer will probably be the same!
+    // It is using *seconds*, not milliseconds.
+    Random();
+    Random(long seed);
+    void Seed(long seed);
+    void Read(char*);
+    void Write(char*);
+    long Long(long range);
+    double Float();
+    char Base();
+    bool Bool();
+    std::string Name();
+    double Normal();
+};
+
+#endif // RANDOM_H
+
+//____________________________________________________________________________________
diff --git a/src/tools/rangex.cpp b/src/tools/rangex.cpp
new file mode 100644
index 0000000..999eba5
--- /dev/null
+++ b/src/tools/rangex.cpp
@@ -0,0 +1,734 @@
+// $Id: rangex.cpp,v 1.31 2013/11/07 22:56:31 jyamato Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+#include <iostream>
+
+#if 0                                   // RSGNOTE: Not currently used.
+#include <cstdlib>                      // For std::atol() in ToRangePair
+#endif
+
+#include "local_build.h"
+
+#include "rangex.h"
+#include "stringx.h"
+#include "registry.h"
+
+using std::string;
+using std::make_pair;
+
+//------------------------------------------------------------------------------------
+// Print intervals as half-open (ie, lower limit included, upper limit excluded).
+// Prints interval as "[n1,n2)".
+
+string ToString(rangepair rpair)
+{
+    // Note!  This prints rangepairs in "internal units", not "user units".
+    // For "user units", call ToStringUserUnits() instead.
+
+    return "[" + ToString(rpair.first) + "," + ToString(rpair.second) + ")";
+}
+
+//------------------------------------------------------------------------------------
+// Print intervals as half-open (ie, lower limit included, upper limit excluded).
+// Prints interval as "[n1,n2)" and several as "[n1,n2) , [n3,n4)".
+
+string ToString(rangeset rset)
+{
+    // Note!  This prints rangesets in "internal units", not "user units".
+    // For "user units", call ToStringUserUnits() instead.
+
+    rangeset::iterator rpair = rset.begin();
+
+    if (rpair == rset.end())
+    {
+        return "(none)";
+    }
+
+    string retval = ToString(*rpair);
+    ++rpair;
+
+    for ( ; rpair != rset.end() ; ++rpair)
+    {
+        retval += " , " + ToString(*rpair);
+    }
+
+    return retval;
+}
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+// This is needed only because RecRange::PrintLinks calls it in the Littlelinks version.
+// Print intervals as half-open (ie, lower limit included, upper limit excluded).
+// Prints interval as "[n1,n2)" and several as "[n1,n2) , [n3,n4)".
+
+#ifndef RUN_BIGLINKS
+
+string ToString(linkrangeset rset)
+{
+    // Note!  This prints rangesets in "internal units", not "user units".
+    // For "user units", call ToStringUserUnits() instead.
+
+    linkrangeset::iterator rpair = rset.begin();
+
+    if (rpair == rset.end())
+    {
+        return "(none)";
+    }
+
+    string retval = ToString(*rpair);
+    ++rpair;
+
+    for ( ; rpair != rset.end() ; ++rpair)
+    {
+        retval += " , " + ToString(*rpair);
+    }
+
+    return retval;
+}
+
+#endif // RUN_BIGLINKS
+
+//------------------------------------------------------------------------------------
+// Print intervals as half-open (ie, lower limit included, upper limit excluded) 
+// incremented by 1 to make the intervals consistent with GraphML conventions
+// Prints interval as "[n1,n2)".
+
+string ToGraphMLString(rangepair rpair)
+{
+    // Note!  This prints rangepairs in "internal units", not "user units".
+    // For "user units", call ToStringUserUnits() instead.
+    // Note also: it is NOT (rpair.second + 1) because this is indexes not live sites
+    return "[" + ToString(rpair.first+1) + "," + ToString(rpair.second) + ")";
+}
+
+//------------------------------------------------------------------------------------
+// Print intervals as half-open (ie, lower limit included, upper limit excluded).
+// incremented by 1 to make the intervals consistent with GraphML conventions
+// Prints interval as "[n1,n2)" and several as "[n1,n2) , [n3,n4)".
+
+string ToGraphMLString(rangeset rset)
+{
+    // Note!  This prints rangesets in "internal units", not "user units".
+    // For "user units", call ToStringUserUnits() instead.
+
+    rangeset::iterator rpair = rset.begin();
+
+    if (rpair == rset.end())
+    {
+        return "(none)";
+    }
+
+    string retval = ToGraphMLString(*rpair);
+    ++rpair;
+
+    for ( ; rpair != rset.end() ; ++rpair)
+    {
+        retval += " , " + ToGraphMLString(*rpair);
+    }
+
+    return retval;
+}
+
+//------------------------------------------------------------------------------------
+// Print intervals as fully closed (ie, both lower and upper limits included).
+// Prints interval as "n1:n2" and several as "n1:n2, n3:n4".
+
+string ToStringUserUnits(rangepair rpair)
+{
+    // Note!  This prints rangepairs in "user units" instead of "internal units".
+    // In other words, if the range is <0,34>, this is an "open-upper-end" interval,
+    // and the actual active sites are 0:33.
+    //
+    // Furthermore, if the user is expecting to have no zeroes in the output, we
+    // need to convert the 0 to -1, and display "-1:33" instead.
+
+    rpair.second--;                     // Print last included value, not first excluded value.
+    rpair = ToNoZeroesIfNeeded(rpair);
+
+    string retval = ToString(rpair.first);
+    if (rpair.second > rpair.first)
+    {
+        retval += ":" + ToString(rpair.second);
+    }
+
+    return retval;
+}
+
+//------------------------------------------------------------------------------------
+// Print intervals as fully closed (ie, both lower and upper limits included).
+// Prints interval as "n1:n2" and several as "n1:n2, n3:n4".
+
+string ToStringUserUnits(rangeset rset)
+{
+    // Note!  This prints rangesets in "user units" instead of "internal units".
+    // In other words, if the range is <0,34>, this is an "open-upper-end" interval,
+    // and the actual active sites are 0:33.
+    //
+    // Furthermore, if the user is expecting to have no zeroes in the output, we
+    // need to convert the 0 to -1, and display "-1:33" instead.
+
+    rangeset::iterator rpair = rset.begin();
+
+    if (rpair == rset.end())
+    {
+        return "(none)";
+    }
+
+    string retval = ToStringUserUnits(*rpair);
+    ++rpair;
+
+    for ( ; rpair != rset.end() ; ++rpair)
+    {
+        retval += ", " + ToStringUserUnits(*rpair);
+    }
+
+    return retval;
+}
+
+rangepair ToRangePair(string & instr)
+{
+    rangepair retpair = make_pair(0, 0);
+    unsigned int cdx = instr.find(',');
+    long stval;
+    long ndval;
+
+    if (cdx == instr.length())
+    {
+        // Throw bad rangepair error - JMFIX
+        std::cerr << "ToRangePair: instr \"" << instr << "\" has no \":\" so it is not a valid rangepair." << std::endl;
+    }
+    else
+    {
+        // Extract range (reads as half-open interval, closed at lower end and open at upper end).
+        stval = std::atol(instr.substr(0, cdx).c_str());
+        ndval = std::atol(instr.substr(cdx+1, instr.length()).c_str()); // Convert to open upper end.
+        // Fix start index if the zero had been eliminated.
+        if (registry.GetConvertOutputToEliminateZeroes() && (stval < 0))
+        {
+            ++stval;
+        }
+        retpair = make_pair(stval, ndval);
+    }
+    return retpair;
+}
+
+rangeset ToRangeSet(string & instr)
+{
+    rangeset retset;
+    unsigned int stidx = 1;
+    unsigned int sdx = instr.find('[');
+    unsigned int rdx = instr.find(')');
+    string internal;
+
+    while (rdx < instr.length())
+    {
+        // Internal pairs.
+        internal = instr.substr(stidx, rdx - 1);
+        retset = AddPairToRange(ToRangePair(internal), retset);
+        sdx = instr.find('[', rdx); 
+        rdx = instr.find(')', sdx); 
+        stidx = sdx + 1;
+    }
+    return retset;
+}
+
+
+//------------------------------------------------------------------------------------
+// Returns a RANGESET as a set containing a single RANGEPAIR, that defined by its two arguments.
+// Note that the arguments define a half-open interval, INCLUDING the lower and EXCLUDING the upper argument.
+
+rangeset MakeRangeset(long int low, long int high)
+{
+    rangeset retset;
+    retset.insert(make_pair(low, high));
+
+    return retset;
+}
+
+//------------------------------------------------------------------------------------
+// Returns a LINKRANGESET as a set containing a single LINKRANGEPAIR, that defined by its two arguments.
+// Note that the arguments define a half-open interval, INCLUDING the lower and EXCLUDING the upper argument.
+
+linkrangeset MakeRangeset(unsigned long int low, unsigned long int high)
+{
+    linkrangeset retset;
+    retset.insert(make_pair(low, high));
+
+    return retset;
+}
+
+//------------------------------------------------------------------------------------
+// Returns a RANGESET as the SET UNION of the first argument (a RANGEPAIR) and the second argument (a RANGESET).
+
+rangeset AddPairToRange(const rangepair & addpart, const rangeset & rset)
+{
+    rangeset retset = rset;
+
+    rangepair low  = make_pair(addpart.first, addpart.first);
+    rangepair high = make_pair(addpart.second, addpart.second);
+
+    long int newlow = low.first;
+    long int newhigh = high.second;
+
+    rangesetiter early = retset.lower_bound(low);
+    if (early != retset.begin())
+    {
+        --early;
+        if (early->second >= low.first)
+        {
+            //'low' falls within early's interval
+            newlow = early->first;
+        }
+    }
+
+    rangesetiter late = retset.upper_bound(high);
+    //We need to increment this late.first == high.first
+    if (late != retset.end())
+    {
+        if (late->first == high.first)
+        {
+            ++late;
+        }
+    }
+    if (late != retset.begin())
+    {
+        --late;
+        if (late->second > high.first)
+        {
+            //'high' falls within late's interval
+            newhigh = late->second;
+        }
+    }
+
+    early = retset.lower_bound(make_pair(newlow, newlow + 1));
+    late  = retset.upper_bound(make_pair(newhigh - 1, newhigh));
+
+    retset.erase(early, late);
+    retset.insert(make_pair(newlow, newhigh));
+
+    return retset;
+}
+
+//------------------------------------------------------------------------------------
+// Returns a LINKRANGESET as the SET UNION of the first argument (a LINKRANGEPAIR)
+// and the second argument (a LINKRANGESET).
+
+linkrangeset AddPairToRange(const linkrangepair & addpart, const linkrangeset & rset)
+{
+    linkrangeset retset = rset;
+
+    linkrangepair low  = make_pair(addpart.first, addpart.first);
+    linkrangepair high = make_pair(addpart.second, addpart.second);
+
+    unsigned long int newlow = low.first;
+    unsigned long int newhigh = high.second;
+
+    linkrangesetiter early = retset.lower_bound(low);
+    if (early != retset.begin())
+    {
+        --early;
+        if (early->second >= low.first)
+        {
+            //'low' falls within early's interval
+            newlow = early->first;
+        }
+    }
+
+    linkrangesetiter late = retset.upper_bound(high);
+    //We need to increment this late.first == high.first
+    if (late != retset.end())
+    {
+        if (late->first == high.first)
+        {
+            ++late;
+        }
+    }
+    if (late != retset.begin())
+    {
+        --late;
+        if (late->second > high.first)
+        {
+            //'high' falls within late's interval
+            newhigh = late->second;
+        }
+    }
+
+    early = retset.lower_bound(make_pair(newlow, newlow + 1));
+    late  = retset.upper_bound(make_pair(newhigh - 1, newhigh));
+
+    retset.erase(early, late);
+    retset.insert(make_pair(newlow, newhigh));
+
+    return retset;
+}
+
+//------------------------------------------------------------------------------------
+// Returns a RANGESET as the SET DIFFERENCE of the second argument (a RANGESET)
+// and the first argument (a RANGEPAIR).
+
+rangeset RemovePairFromRange(const rangepair & removepart, const rangeset & rset)
+{
+    rangeset retset;
+    for (rangeset::iterator range = rset.begin(); range != rset.end(); ++range)
+    {
+        if ((range->first >= removepart.first) && (range->second <= removepart.second))
+        {
+            //Don't add it.
+        }
+        else
+        {
+            if ((range->first < removepart.first) && (range->second > removepart.second))
+            {
+                //Add two outside halves.
+                retset.insert(make_pair(range->first, removepart.first));
+                retset.insert(make_pair(removepart.second, range->second));
+            }
+            else if ((range->second > removepart.first) && (range->second <= removepart.second))
+            {
+                //Add only the first half.
+                retset.insert(make_pair(range->first, removepart.first));
+            }
+            else if ((range->first >= removepart.first) && (range->first <= removepart.second))
+            {
+                //Add only the second half.
+                retset.insert(make_pair(removepart.second, range->second));
+            }
+            else
+            {
+                //Add the whole thing.
+                retset.insert(*range);
+            }
+        }
+    }
+    return retset;
+}
+
+//------------------------------------------------------------------------------------
+// Returns a LINKRANGESET as the SET DIFFERENCE of the second argument (a LINKRANGESET)
+// and the first argument (a LINKRANGEPAIR).
+
+linkrangeset RemovePairFromRange(const linkrangepair & removepart, const linkrangeset & rset)
+{
+    linkrangeset retset;
+    for (linkrangeset::iterator range = rset.begin(); range != rset.end(); ++range)
+    {
+        if ((range->first >= removepart.first) && (range->second <= removepart.second))
+        {
+            //Don't add it.
+        }
+        else
+        {
+            if ((range->first < removepart.first) && (range->second > removepart.second))
+            {
+                //Add two outside halves.
+                retset.insert(make_pair(range->first, removepart.first));
+                retset.insert(make_pair(removepart.second, range->second));
+            }
+            else if ((range->second > removepart.first) && (range->second <= removepart.second))
+            {
+                //Add only the first half.
+                retset.insert(make_pair(range->first, removepart.first));
+            }
+            else if ((range->first >= removepart.first) && (range->first <= removepart.second))
+            {
+                //Add only the second half.
+                retset.insert(make_pair(removepart.second, range->second));
+            }
+            else
+            {
+                //Add the whole thing.
+                retset.insert(*range);
+            }
+        }
+    }
+    return retset;
+}
+
+//------------------------------------------------------------------------------------
+// Returns a RANGESET as the SET DIFFERENCE of the second argument (a RANGESET)
+// and the first argument (a RANGESET).
+
+rangeset RemoveRangeFromRange(const rangeset & removerange, const rangeset & rset)
+{
+    rangeset retset = rset;
+
+    for (rangeset::iterator removepair = removerange.begin(); removepair != removerange.end(); removepair++)
+    {
+        retset = RemovePairFromRange(*removepair, retset);
+    }
+
+    return retset;
+}
+
+//------------------------------------------------------------------------------------
+// Returns a LINKRANGESET as the SET DIFFERENCE of the second argument (a LINKRANGESET)
+// and the first argument (a LINKRANGESET).
+
+linkrangeset RemoveRangeFromRange(const linkrangeset & removerange, const linkrangeset & rset)
+{
+    linkrangeset retset = rset;
+
+    for (linkrangeset::iterator removepair = removerange.begin(); removepair != removerange.end(); removepair++)
+    {
+        retset = RemovePairFromRange(*removepair, retset);
+    }
+
+    return retset;
+}
+
+//------------------------------------------------------------------------------------
+// LS NOTE:
+// These functions originally written for range.cpp, but ended up being needed elsewhere.
+//
+// Note that 'Union' (used to be 'OR' but I got confused too often) actually does the exact same thing that
+// 'AddRangeToRange' used to, but much faster, so I took out AddRangeToRange entirely.  Perhaps there's a
+// similar way to speed up RemoveRangeFromRange?  We'll see if it shows up in the profiler.
+
+rangeset Union(const rangeset & set1, const rangeset & set2)
+{
+    if (set1.empty()) return set2;
+    if (set2.empty()) return set1;
+
+    rangeset mergeset;
+    merge(set1.begin(), set1.end(), set2.begin(), set2.end(), inserter(mergeset, mergeset.begin()));
+
+    rangeset newset;
+    rangeset::iterator rit;
+
+    for(rit = mergeset.begin(); rit != mergeset.end(); ++rit)
+    {
+        ORAppend(*rit, newset);
+    }
+
+    return newset;
+} // OR
+
+//------------------------------------------------------------------------------------
+// Helper function for 'Union'
+
+void ORAppend(rangepair newrange, rangeset & oldranges)
+{
+    if (oldranges.empty())
+    {
+        oldranges.insert(newrange);
+        return;
+    }
+
+    rangeset::iterator last = --oldranges.end();
+
+    assert(newrange.first >= last->first); // Expect sequential, sorted input.
+
+    if (newrange.second <= last->second) return; // New is contained in old.
+
+    if (newrange.first > last->second)  // New is after old.
+    {
+        oldranges.insert(oldranges.end(), newrange);
+        return;
+    }
+
+    newrange.first = last->first;       // New starts within old and extends past it.
+    oldranges.erase(last);
+    oldranges.insert(oldranges.end(), newrange);
+
+    return;
+} // ORAppend
+
+//------------------------------------------------------------------------------------
+// Used to be 'AND' until I got confused too often.
+
+rangeset Intersection(const rangeset & mother, const rangeset & father)
+{
+    rangeset::const_iterator m = mother.begin();
+    rangeset::const_iterator f = father.begin();
+
+    rangeset result;
+    rangepair newpair;
+
+    if (mother.empty() || father.empty()) return result;
+
+    while (true)
+    {
+        newpair.first = std::max((*m).first, (*f).first);
+        newpair.second = std::min((*m).second, (*f).second);
+
+        if (newpair.first < newpair.second)
+            result.insert(result.end(),newpair);
+
+        if ((*m).second < (*f).second) ++m;
+        else ++f;
+
+        if (m == mother.end() || f == father.end()) return result;
+    }
+} // Intersection
+
+//------------------------------------------------------------------------------------
+// Counts the sites (or any other type of object represented as a PAIR of INTs) in a RANGESET.
+
+long int CountSites(const rangeset & rset)
+{
+    if (rset.empty()) return 0L;
+
+    long int count(0L);
+
+    for (rangeset::const_iterator rpair = rset.begin(); rpair != rset.end(); ++rpair)
+    {
+        count += rpair->second - rpair->first;
+    }
+
+    return count;
+}
+
+//------------------------------------------------------------------------------------
+
+#if 0 // Apparently unused.
+
+rangeset SubtractFromRangeset(const rangeset & rset, long int offset)
+{
+    long int invOffset = 0 - offset;
+    return AddToRangeset(rset, invOffset);
+}
+
+#endif
+
+//-----------------------------------------
+
+#if 0 // Apparently unused.
+
+rangeset AddToRangeset(const rangeset & rset, long int offset)
+{
+    rangeset::const_iterator range;
+    rangeset newRangeSet;
+
+    for(range = rset.begin(); range != rset.end(); ++range)
+    {
+        long int newFirst = range->first + offset;
+        long int newSecond = range->second + offset;
+
+        newRangeSet.insert(make_pair(newFirst, newSecond));
+    }
+
+    return newRangeSet;
+}
+
+#endif
+
+//------------------------------------------------------------------------------------
+// Tests whether second argument (an INDEX) is within any of the intervals indicated by the pairs
+// in the rangeset which is the first argument.
+// This function is applied only to SITE rangesets, not to LINK (Big or Little) rangesets.
+
+bool IsInRangeset(const rangeset & targetset, long int target)
+{
+    if (targetset.empty())
+        return false;
+
+    rangeset::const_iterator range;
+
+    for(range = targetset.begin(); range != targetset.end(); ++range)
+    {
+        if (range->second <= target) continue;
+        if (range->first <= target) return true;
+        break;
+    }
+
+    return false;
+} // IsInRangeset
+
+//------------------------------------------------------------------------------------
+// Tests whether second argument (an INDEX) is within any of the intervals indicated by the pairs
+// in the rangeset which is the first argument.
+// This function is applied only to SITE rangesets, not to LINK (Big or Little) rangesets.
+
+#if 0 // RSGDEBUG: Enabled for debugging but called (only on RecRange objects) in commented-out section of debugging function.
+#ifndef RUN_BIGLINKS
+
+bool IsInRangeset(const linkrangeset & targetset, signed long int target)
+{
+    if (targetset.empty())
+        return false;
+
+    linkrangeset::const_iterator range;
+
+    for(range = targetset.begin(); range != targetset.end(); ++range)
+    {
+        if (static_cast<signed long int>(range->second) <= target) continue;
+        if (static_cast<signed long int>(range->first) <= target) return true;
+        break;
+    }
+
+    return false;
+} // IsInRangeset
+
+#endif // RUN_BIGLINKS
+#endif // RSGDEBUG
+
+//------------------------------------------------------------------------------------
+// These functions are to be used when converting site labels for display to
+// the user, and from *menu* input from the user.  It is assumed that in the
+// XML, the 'user input' has already been converted to the 'sequential' version.
+
+long int ToSequentialIfNeeded(long int input)
+{
+    //Note:  input might equal zero if it was the upper end of a rangepair,
+    // since rangepairs are open-ended by default.  In other words, if the user
+    // types in "-20:-1" in the menu, the first thing that happens is that these
+    // numbers are converted to the pair <-20, 0>.  To convert these values to
+    // the 'sequential' numbering system, this needs to be converted to the
+    // pair <-19, 1>.
+
+    if (registry.GetConvertOutputToEliminateZeroes() && (input <= 0))
+    {
+        input++;
+    }
+
+    return input;
+}
+
+//------------------------------------------------------------------------------------
+
+long int ToNoZeroesIfNeeded(long int input)
+{
+    if (registry.GetConvertOutputToEliminateZeroes() && (input <= 0))
+    {
+        input--;
+    }
+
+    return input;
+}
+
+//------------------------------------------------------------------------------------
+
+rangepair ToSequentialIfNeeded(rangepair input)
+{
+    if (registry.GetConvertOutputToEliminateZeroes())
+    {
+        input.first = ToSequentialIfNeeded(input.first);
+        input.second = ToSequentialIfNeeded(input.second);
+    }
+
+    return input;
+}
+
+//------------------------------------------------------------------------------------
+
+rangepair ToNoZeroesIfNeeded(rangepair input)
+{
+    if (registry.GetConvertOutputToEliminateZeroes())
+    {
+        input.first = ToNoZeroesIfNeeded(input.first);
+        input.second = ToNoZeroesIfNeeded(input.second);
+    }
+
+    return input;
+}
+
+//____________________________________________________________________________________
+
diff --git a/src/tools/rangex.h b/src/tools/rangex.h
new file mode 100644
index 0000000..a130c5a
--- /dev/null
+++ b/src/tools/rangex.h
@@ -0,0 +1,124 @@
+// $Id: rangex.h,v 1.23 2013/08/22 18:03:12 jmcgill Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef RANGEX_H
+#define RANGEX_H
+
+#include <set>
+#include <string>
+#include <vector>
+
+#include "local_build.h"
+
+//------------------------------------------------------------------------------------
+
+struct rangecmp
+{
+    bool operator()(const std::pair<long int, long int> & p1, const std::pair<long int, long int> & p2)
+    {
+        if (p1.first == p2.first)
+        {
+            return (p1.second < p2.second);
+        }
+        return (p1.first < p2.first);
+    }
+};
+
+//-----------------------------------------
+
+struct linkrangecmp
+{
+    bool operator()(const std::pair<unsigned long int, unsigned long int> & p1, const std::pair<unsigned long int, unsigned long int> & p2)
+    {
+        if (p1.first == p2.first)
+        {
+            return (p1.second < p2.second);
+        }
+        return (p1.first < p2.first);
+    }
+};
+
+//------------------------------------------------------------------------------------
+
+typedef std::pair<long int, long int>                         rangepair;
+typedef std::vector<rangepair>                                rangevector;
+typedef std::set<rangepair, rangecmp>                         rangeset;
+typedef std::set<rangepair, rangecmp>::iterator               rangesetiter;
+typedef std::set<rangepair, rangecmp>::const_iterator         rangesetconstiter;
+
+typedef std::pair<unsigned long int, unsigned long int>       linkrangepair;
+typedef std::vector<linkrangepair>                            linkrangevector;
+typedef std::set<linkrangepair, linkrangecmp>                 linkrangeset;
+typedef std::set<linkrangepair, linkrangecmp>::iterator       linkrangesetiter;
+typedef std::set<linkrangepair, linkrangecmp>::const_iterator linkrangesetconstiter;
+
+//------------------------------------------------------------------------------------
+
+std::string ToString(rangepair rpair);
+std::string ToString(rangeset rset);
+
+std::string ToGraphMLString(rangepair rpair);
+std::string ToGraphMLString(rangeset rset);
+
+#ifndef RUN_BIGLINKS
+std::string ToString(linkrangeset rset);
+#endif // RUN_BIGLINKS
+
+std::string ToStringUserUnits(rangepair rpair);
+std::string ToStringUserUnits(rangeset rset);
+
+//------------------------------------------------------------------------------------
+
+rangepair     ToRangePair(std::string & instr);
+rangeset      ToRangeSet(std::string & instr);
+
+rangeset      MakeRangeset(signed long int low, signed long int high);
+linkrangeset  MakeRangeset(unsigned long int low, unsigned long int high);
+
+rangeset      AddPairToRange(const rangepair & addpart, const rangeset & rset);
+linkrangeset  AddPairToRange(const linkrangepair & addpart, const linkrangeset & rset);
+
+rangeset      RemovePairFromRange(const rangepair & removepart, const rangeset & rset);
+linkrangeset  RemovePairFromRange(const linkrangepair & removepart, const linkrangeset & rset);
+
+rangeset      RemoveRangeFromRange(const rangeset & removerange, const rangeset & rset);
+linkrangeset  RemoveRangeFromRange(const linkrangeset & removerange, const linkrangeset & rset);
+
+rangeset      Union(const rangeset & set1, const rangeset & set2);
+void          ORAppend(rangepair newrange, rangeset & oldranges);
+
+rangeset      Intersection(const rangeset & set1, const rangeset & set2);
+long int      CountSites(const rangeset & rset);
+
+#if 0 // Apparently unused.
+rangeset      SubtractFromRangeset(const rangeset & rset, long int offset);
+rangeset      AddToRangeset(const rangeset & rset, long int offset);
+#endif
+
+//------------------------------------------------------------------------------------
+
+bool      IsInRangeset(const rangeset & targetrange, long int target);
+
+#ifndef RUN_BIGLINKS
+#if 0 // RSGDEBUG: Enabled for debugging but called (only on RecRange objects) in commented-out section of debugging function.
+bool      IsInRangeset(const linkrangeset & targetrange, long int target);
+#endif // RSGDEBUG
+#endif // RUN_BIGLINKS
+
+long int  ToSequentialIfNeeded(long int input);
+long int  ToNoZeroesIfNeeded(long int input);
+rangepair ToSequentialIfNeeded(rangepair input);
+rangepair ToNoZeroesIfNeeded(rangepair input);
+
+//------------------------------------------------------------------------------------
+
+#endif // RANGEX_H
+
+//____________________________________________________________________________________
diff --git a/src/tools/stringx.cpp b/src/tools/stringx.cpp
new file mode 100644
index 0000000..e7ae68e
--- /dev/null
+++ b/src/tools/stringx.cpp
@@ -0,0 +1,2206 @@
+// $Id: stringx.cpp,v 1.130 2013/11/08 21:46:21 mkkuhner Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+#include <cctype>
+#include <functional>
+#include <stdexcept>
+#include <string>
+#include <cstring>
+#include <cstdio>
+
+// NOTE: older versions required including <direct.h> here and
+// using _getcwd() below for MSWINDOWS compiles
+#include <unistd.h>       // unistd.h -- provides getcwd()
+
+#include "arranger_types.h"
+#include "errhandling.h"
+#include "mathx.h"
+#include "stringx.h"
+#include "xml_strings.h"  // for xmlstr::XML_ATTRTYPE_NAME in MakeTagWithName()
+#include "ui_strings.h"
+#include "paramstat.h"
+
+#ifdef DMALLOC_FUNC_CHECK
+#include "/usr/local/include/dmalloc.h"
+#endif
+
+using namespace std;
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+void UpperCase(string &s)
+{
+    long int length, i;
+    length = s.size();
+    for (i = 0; i < length; i++)
+        s[i] = toupper(s[i]);
+}
+
+//------------------------------------------------------------------------------------
+
+void LowerCase(string &s)
+{
+    long int length, i;
+    length = s.size();
+    for (i = 0; i < length; i++)
+        s[i] = tolower(s[i]);
+}
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+string ToString(char character)
+{
+    ostringstream ostr;
+    ostr << character;
+    string s(ostr.str());
+    return s;
+}
+
+//------------------------------------------------------------------------------------
+
+string ToString(int number)
+{
+    ostringstream ostr;
+    ostr << number;
+    string s(ostr.str());
+    return s;
+}
+
+//------------------------------------------------------------------------------------
+
+string ToString(bool tag)
+{
+    string s;
+    if(tag)
+        s="Yes";
+    else
+        s="No";
+    return s;
+}
+
+//------------------------------------------------------------------------------------
+
+string ToString(noval val)
+{
+    return "";
+}
+
+//------------------------------------------------------------------------------------
+
+string ToString(unsigned int number)
+{
+    ostringstream ostr;
+    ostr << number;
+    string s(ostr.str());
+    return s;
+}
+
+//------------------------------------------------------------------------------------
+
+string ToString(unsigned long int number)
+{
+    ostringstream ostr;
+    ostr << number;
+    string s(ostr.str());
+    return s;
+}
+
+//------------------------------------------------------------------------------------
+
+string ToString(unsigned long long number)
+{
+    ostringstream ostr;
+    ostr << number;
+    string s(ostr.str());
+    return s;
+}
+
+//------------------------------------------------------------------------------------
+
+string ToString(long int number)
+{
+    ostringstream ostr;
+    ostr << number;
+    string s(ostr.str());
+    return s;
+}
+
+//------------------------------------------------------------------------------------
+
+string ToString(long long number)
+{
+    ostringstream ostr;
+    ostr << number;
+    string s(ostr.str());
+    return s;
+}
+
+//------------------------------------------------------------------------------------
+
+string ToString(double number)
+{
+    if (numeric_limits<double>::has_infinity)
+    {
+        if (number == numeric_limits<double>::infinity())
+        {
+            return "inf";
+        }
+        if (number == -numeric_limits<double>::infinity())
+        {
+            return "-inf";
+        }
+    }
+    if (systemSpecificIsnan(number))
+    {
+        return "nan";
+    }
+    ostringstream ostr;
+    ostr << number;
+    string s(ostr.str());
+    return s;
+}
+
+//------------------------------------------------------------------------------------
+
+string ToDecimalString(double number)
+{
+    ostringstream ostr;
+    ostr.setf(ios_base::fixed);
+    ostr << number;
+    string s(ostr.str());
+    return s;
+}
+
+//------------------------------------------------------------------------------------
+
+string ToString(force_type ftype)
+{
+    switch(ftype)
+    {
+        case force_COAL:
+            return uistr::coalescence;
+            break;
+        case force_MIG:
+            return uistr::migration;
+            break;
+        case force_DIVMIG:
+            return uistr::divmigration;
+            break;
+        case force_DISEASE:
+            return uistr::disease;
+            break;
+        case force_REC:
+            return uistr::recombination;
+            break;
+        case force_GROW:
+            return uistr::growth;
+            break;
+        case force_REGION_GAMMA:
+            return uistr::regionGamma;
+            break;
+        case force_EXPGROWSTICK:
+            return uistr::expGrowStick;
+            break;
+        case force_LOGISTICSELECTION:
+            return uistr::logisticSelection;
+            break;
+        case force_LOGSELECTSTICK:
+            return uistr::logSelectStick;
+            break;
+        case force_DIVERGENCE:
+            return uistr::divergence;
+            break;
+        case force_NONE:
+            return "NO_force";
+            break;
+    }
+    assert(false);
+    return "";
+}
+
+//------------------------------------------------------------------------------------
+
+string ToShortString(force_type ftype)
+{
+    switch(ftype)
+    {
+        case force_COAL:
+            return lamarcstrings::COAL;
+            break;
+        case force_MIG:
+            return lamarcstrings::MIG;
+            break;
+        case force_DIVMIG:
+            return lamarcstrings::DIVMIG;
+            break;
+        case force_DISEASE:
+            return lamarcstrings::DISEASE;
+            break;
+        case force_REC:
+            return lamarcstrings::REC;
+            break;
+        case force_GROW:
+            return lamarcstrings::GROW;
+            break;
+        case force_REGION_GAMMA:
+            //GAMMA DEBUG
+            assert(false);
+            throw implementation_error("The 'Gamma' force not fully supported.");
+            break;
+        case force_EXPGROWSTICK:
+            return lamarcstrings::EXPGROWSTICK;
+            break;
+        case force_LOGISTICSELECTION:
+            return lamarcstrings::LOGISTICSELECTION;
+            break;
+        case force_LOGSELECTSTICK:
+            return lamarcstrings::LOGSELECTSTICK;
+            break;
+        case force_DIVERGENCE:
+            return lamarcstrings::DIVERGENCE;
+            break;
+        case force_NONE:
+            return "NO_force";
+            break;
+    }
+    assert(false);
+    return "";
+}
+
+//------------------------------------------------------------------------------------
+
+string ToString(method_type method, bool getLongName)
+{
+    switch(method)
+    {
+        case method_FST:
+            if(getLongName) return lamarcstrings::longNameFST;
+            return lamarcstrings::shortNameFST;
+            break;
+        case method_PROGRAMDEFAULT:
+            if(getLongName) return lamarcstrings::longNamePROGRAMDEFAULT;
+            return lamarcstrings::shortNamePROGRAMDEFAULT;
+            break;
+        case method_USER:
+            if(getLongName) return lamarcstrings::longNameUSER;
+            return lamarcstrings::shortNameUSER;
+            break;
+        case method_WATTERSON:
+            if(getLongName) return lamarcstrings::longNameWATTERSON;
+            return lamarcstrings::shortNameWATTERSON;
+            break;
+    }
+    assert(false);
+    return lamarcstrings::longNameUSER;
+}
+
+//------------------------------------------------------------------------------------
+
+string ToString(growth_type gtype, bool getLongName )
+{
+    switch(gtype)
+    {
+        case growth_CURVE:
+            if(getLongName) return lamarcstrings::longCurveName;
+            return lamarcstrings::shortCurveName;
+            break;
+        case growth_STICK:
+            if(getLongName) return lamarcstrings::longStickName;
+            return lamarcstrings::shortStickName;
+            break;
+        case growth_STICKEXP:
+            if(getLongName) return lamarcstrings::longStickExpName;
+            return lamarcstrings::shortStickExpName;
+            break;
+        default:
+            assert(false);
+            return "";
+            break;
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+string ToString(growth_scheme gscheme, bool getLongName )
+{
+    switch(gscheme)
+    {
+        case growth_EXP:
+            if(getLongName) return lamarcstrings::longExpName;
+            return lamarcstrings::shortExpName;
+            break;
+        case growth_STAIRSTEP:
+            if(getLongName) return lamarcstrings::longStairStepName;
+            return lamarcstrings::shortStairStepName;
+            break;
+        default:
+            assert(false);
+            return "";
+            break;
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+string ToString(model_type model, bool getLongName )
+{
+    switch(model)
+    {
+        case F84:
+            if(getLongName) return lamarcstrings::longF84Name;
+            return lamarcstrings::shortF84Name;
+            break;
+        case Brownian:
+            if(getLongName) return lamarcstrings::longBrownianName;
+            return lamarcstrings::shortBrownianName;
+            break;
+        case Stepwise:
+            if(getLongName) return lamarcstrings::longStepwiseName;
+            return lamarcstrings::shortStepwiseName;
+            break;
+        case KAllele:
+            if(getLongName) return lamarcstrings::longKAlleleName;
+            return lamarcstrings::shortKAlleleName;
+            break;
+        case GTR:
+            if(getLongName) return lamarcstrings::longGTRName;
+            return lamarcstrings::shortGTRName;
+            break;
+        case MixedKS:
+            if(getLongName) return lamarcstrings::longMixedKSName;
+            return lamarcstrings::shortMixedKSName;
+            break;
+        default:
+            assert(false);
+            return "";
+            break;
+    }
+}
+
+string ToString(paramlistcondition par)
+{
+    switch (par)
+    {
+        case paramlist_YES: return "all "; break; // extra space for alignment
+        case paramlist_NO: return "none"; break;
+        case paramlist_MIX: return "some"; break;
+        default: assert(false); return ""; break;
+    }
+}
+
+string ToString(proftype prof)
+{
+    switch(prof)
+    {
+        case profile_PERCENTILE: return "percentile"; break;
+        case profile_FIX: return "fixed"; break;
+        case profile_NONE: return "none"; break;
+    }
+    assert(false);
+    return "";
+}
+
+string ToString(priortype ptype)
+{
+    switch(ptype)
+    {
+        case LINEAR: return "linear"; break;
+        case LOGARITHMIC: return "log"; break;
+    }
+    assert(false);
+    return "";
+}
+
+string ToString(pstatus pstat)
+{
+    switch(pstat)
+    {
+        case pstat_unconstrained: return "unconstrained"; break;
+        case pstat_invalid: return "invalid"; break;
+        case pstat_constant: return "constant"; break;
+        case pstat_identical: return "identical"; break;
+        case pstat_identical_head: return "identical_head"; break;
+        case pstat_multiplicative: return "multiplicative"; break;
+        case pstat_multiplicative_head: return "multiplicative_head"; break;
+    }
+    assert(false);
+    return "";
+}
+
+string ToString(const ParamStatus& pstat)
+{
+    return ToString(pstat.Status());
+}
+
+string ToString(selection_type stype, bool getLongName )
+{
+    switch(stype)
+    {
+        case selection_DETERMINISTIC:
+            if(getLongName) return lamarcstrings::longDSelectionName;
+            return lamarcstrings::shortDSelectionName;
+            break;
+        case selection_STOCHASTIC:
+            if(getLongName) return lamarcstrings::longSSelectionName;
+            return lamarcstrings::shortSSelectionName;
+            break;
+    }
+    assert(false);
+    return "";
+}
+
+// because we have some template code that might want this
+string ToString(string str)
+{
+    return str;
+}
+
+string ToString(UIId id)
+{
+    string toReturn = "";
+    if(id.HasForce())
+    {
+        force_type ft = id.GetForceType();
+        toReturn += ToString(ft);
+    }
+    if(id.HasIndex1())
+    {
+        toReturn += ":";
+        toReturn += ToString(id.GetIndex1());
+    }
+    if(id.HasIndex2())
+    {
+        toReturn += ":";
+        toReturn += ToString(id.GetIndex2());
+    }
+    if(id.HasIndex3())
+    {
+        toReturn += ":";
+        toReturn += ToString(id.GetIndex3());
+    }
+    return toReturn;
+}
+
+string ToString (verbosity_type verbosity)
+{
+    switch (verbosity)
+    {
+        case CONCISE:
+            return string ("concise");
+            break;
+        case NORMAL:
+            return string ("normal");
+            break;
+        case NONE:
+            return string ("none");
+            break;
+        case VERBOSE:
+            return string("verbose");
+            break;
+        default:
+            throw implementation_error("Unknown verbosity type");
+            return string ("");
+            break;
+    }
+}
+
+string ToString (vector<method_type> meths)
+{
+    string methstring("");
+    vector<method_type>::iterator meth;
+    for (meth = meths.begin(); meth != meths.end(); ++meth)
+    {
+        methstring += " " + ToString(*meth,true);   // true => longer name
+    }
+    return methstring;
+}
+
+string ToString (vector<proftype> profs)
+{
+    string profstring("");
+    vector<proftype>::iterator prof;
+    for (prof = profs.begin(); prof != profs.end(); ++prof)
+    {
+        profstring += ToString(*prof) + " ";
+    }
+    return profstring;
+}
+
+string ToString (vector<ParamStatus> pstats)
+{
+    string pstatstring("");
+    vector<ParamStatus>::iterator pstat;
+    for (pstat = pstats.begin(); pstat != pstats.end(); ++pstat)
+    {
+        pstatstring += ToString((*pstat).Status()) + " ";
+    }
+    return pstatstring;
+}
+
+string ToString (ParamVector & pvec)
+{
+    throw implementation_error("No ToString(ParamVector&) exists");
+    return "";
+}
+
+string ToString (vector<UIId> ids)
+{
+    string idstring("");
+    vector<UIId>::iterator iditer;
+    for (iditer = ids.begin(); iditer != ids.end(); ++iditer)
+    {
+        idstring += " " + ToString(*iditer);
+    }
+    return idstring;
+}
+
+string ToString (vector<vector<UIId> > ids)
+{
+    string idstring("");
+    vector<vector<UIId> >::iterator iditer;
+    for (iditer = ids.begin(); iditer != ids.end(); ++iditer)
+    {
+        idstring += ", " + ToString(*iditer);
+    }
+    return idstring;
+}
+
+string ToStringTF(bool tag)
+{
+    string s;
+    if(tag)
+        s="true";
+    else
+        s="false";
+    return s;
+}
+
+//------------------------------------------------------------------------------------
+
+bool CaselessStrCmp(const string& lhs, const string& rhs)
+{
+
+    if (lhs.size() != rhs.size()) return false;
+
+    size_t i;
+    for (i = 0; i < lhs.size(); ++i)
+    {
+        if (toupper(lhs[i]) != toupper(rhs[i])) return false;
+    }
+    return true;
+
+} // CaselessStrCmp
+
+// char* overloads of the preceeding
+
+bool CaselessStrCmp(const string& lhs, const char* rhs)
+{
+    return CaselessStrCmp(lhs, string(rhs));
+} // CaselessStrCmp
+
+bool CaselessStrCmp(const char* lhs, const string& rhs)
+{
+    return CaselessStrCmp(string(lhs), rhs);
+} // CaselessStrCmp
+
+bool CaselessStrCmp(const char* lhs, const char* rhs)
+{
+    return CaselessStrCmp(string(lhs), string(rhs));
+} // CaselessStrCmp
+
+//------------------------------------------------------------------------------------
+// Case insensitive string comparison taken from Scott Meyers'
+// _Effective STL_ items 19 and 35.
+
+long int ciCharCompare(char c1, char c2)
+{
+    long int lc1 = toupper(static_cast<unsigned char>(c1));
+    long int lc2 = toupper(static_cast<unsigned char>(c2));
+    if (lc1 < lc2) return -1;
+    if (lc1 > lc2) return 1;
+    return 0;
+} // ciCharCompare
+
+long int ciStringCompareImpl(const string& s1, const string& s2)
+{
+    typedef pair<string::const_iterator, string::const_iterator> PSCI;
+
+    PSCI p = mismatch(s1.begin(),s1.end(), s2.begin(),
+                      not2(ptr_fun(ciCharCompare)));
+    if (p.first == s1.end())
+    {
+        if (p.second == s2.end()) return 0;
+        else return -1;
+    }
+    return ciCharCompare(*p.first, *p.second);
+
+} // ciStringCompareImpl
+
+long int ciStringCompare(const string& s1, const string& s2)
+{
+    if (s1.size() <= s2.size()) return ciStringCompareImpl(s1,s2);
+    else return -ciStringCompareImpl(s2,s1);
+} // ciStringCompare
+
+bool ciCharLess(char c1, char c2)
+{
+    return
+        toupper(static_cast<unsigned char>(c1)) <
+        toupper(static_cast<unsigned char>(c2));
+} // ciCharLess
+
+bool ciStringLess(const string& s1, const string& s2)
+{
+    return lexicographical_compare(s1.begin(), s1.end(),
+                                   s2.begin(), s2.end(),
+                                   ciCharLess);
+} // ciStringLess
+
+bool ciStringEqual(const string& s1, const string& s2)
+{
+    return !ciStringCompare(s1, s2);
+} // ciStringEqual
+
+//------------------------------------------------------------------------------------
+// Careful: was not able to link this function
+// when its name was DoubleVecFromString(..)
+
+bool FromString(const string & in, DoubleVec1d & out)
+{
+    if (in.empty()) return false;
+
+    string whitespace(" \t"), input = in;
+    while (!input.empty())
+    {
+        double value;
+        string::size_type start = input.find_first_not_of(whitespace);
+        if (start == string::npos) break;
+
+        string::size_type end = input.find_first_of(whitespace,start);
+        if (end != string::npos)
+        {
+            FromString(input.substr(start,end-start),value);
+            input = input.substr(end,input.size()-end);
+        }
+        else
+        {
+            FromString(input,value);
+            input.erase();
+        }
+
+        out.push_back(value);
+    }
+
+    return true;
+}
+
+bool FromString(const string& input, ProftypeVec1d & out)
+{
+    StringVec1d vectorize;
+    bool parsed = FromString(input,vectorize);
+    if(parsed)
+    {
+        StringVec1d::iterator i;
+        for(i=vectorize.begin(); i != vectorize.end(); i++)
+        {
+            out.push_back(ProduceProftypeOrBarf(*i));
+        }
+    }
+    return false;
+}
+
+bool FromString(const string & in, MethodTypeVec1d & out)
+{
+    if (in.empty()) return false;
+
+    string whitespace(" \t"), input = in;
+    while (!input.empty())
+    {
+        method_type value;
+        string::size_type start = input.find_first_not_of(whitespace);
+        if (start == string::npos) break;
+
+        string::size_type end = input.find_first_of(whitespace,start);
+        if (end != string::npos)
+        {
+            FromString(input.substr(start,end-start),value);
+            input = input.substr(end,input.size()-end);
+        }
+        else
+        {
+            FromString(input,value);
+            input.erase();
+        }
+
+        out.push_back(value);
+    }
+
+    return true;
+}
+
+bool FromString(const string & in, ParamVector & out)
+{
+    throw implementation_error("FromString should never be called for ParamVector");
+    return false;
+}
+
+bool FromString(const string & in, StringVec1d & out)
+{
+    if (in.empty()) return false;
+
+    string whitespace(" \t"), input = in;
+    while (!input.empty())
+    {
+
+        string::size_type start = input.find_first_not_of(whitespace);
+        if (start == string::npos) break;
+
+        string::size_type end = input.find_first_of(whitespace,start);
+        if (end != string::npos)
+        {
+            string thisTerm = input.substr(start,end-start);
+            out.push_back(thisTerm);
+            input = input.substr(end,input.size()-end);
+        }
+        else
+        {
+            string thisTerm = input.substr(start,input.size()-start);
+            out.push_back(thisTerm);
+            input.erase();
+        }
+
+    }
+
+    return true;
+}
+
+bool FromString(const string & in, LongVec1d & out)
+{
+    if (in.empty()) return false;
+
+    string whitespace(" \t"), input = in;
+    while (!input.empty())
+    {
+        long int value;
+        string::size_type start = input.find_first_not_of(whitespace);
+        if (start == string::npos) break;
+
+        string::size_type end = input.find_first_of(whitespace,start);
+        if (end != string::npos)
+        {
+            FromString(input.substr(start,end-start),value);
+            input = input.substr(end,input.size()-end);
+        }
+        else
+        {
+            FromString(input,value);
+            input.erase();
+        }
+
+        out.push_back(value);
+    }
+
+    return true;
+
+}
+
+bool FromString(const string & in, long int & out)
+{
+    if(in.empty())
+        return false;
+    else
+    {
+        istringstream mystream(in);
+        mystream >> out;
+    }
+    return true;
+}
+
+bool FromString(const string & in, double& out)
+{
+    if(in.empty())
+        return false;
+    else
+    {
+        istringstream mystream(in);
+        mystream >> out;
+    }
+    return true;
+}
+
+bool FromString(const string & in, method_type& out)
+{
+    try
+    {
+        out = ProduceMethodTypeOrBarf(in);
+    }
+    catch(const data_error& e)
+    {
+        return false;
+    }
+    return true;
+}
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d StringToDoubleVecOrBarf(const string & in)
+{
+    DoubleVec1d out;
+    if (in.empty()) return out;
+
+    string whitespace(" \t"), input = in;
+    while (!input.empty())
+    {
+        double value;
+        string::size_type start = input.find_first_not_of(whitespace);
+        if (start == string::npos) break;
+
+        string::size_type end = input.find_first_of(whitespace,start);
+        if (end != string::npos)
+        {
+            value = ProduceDoubleOrBarf(input.substr(start,end-start));
+            input = input.substr(end,input.size()-end);
+        }
+        else
+        {
+            value = ProduceDoubleOrBarf(input);
+            input.erase();
+        }
+
+        out.push_back(value);
+    }
+
+    return out;
+}
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+string MakeTag(const string& tagname)
+{
+    return (string("<") + tagname + string(">"));
+} // MakeTag
+
+//------------------------------------------------------------------------------------
+
+string MakeCloseTag(const string& tagname)
+{
+    if (tagname[0] == '<') // we're already some form of xml tag
+        if (tagname[1] == '/') // we're already a "closing" tag
+            return tagname;
+        else
+        {
+            string nutag(tagname);
+            return nutag.insert(1L, "/");
+        }
+    else
+        return (string("</") + tagname + string(">"));
+} // MakeCloseTag
+
+//------------------------------------------------------------------------------------
+
+string MakeTagWithName(const string& tag, const string& name)
+{
+    //Replace quote marks with '"'
+    string safename = name;
+    string::size_type quotpos = safename.find("\"");
+    while (quotpos != string::npos)
+    {
+        safename.replace(quotpos, 1, """);
+        quotpos = safename.find("\"");
+    }
+    return MakeTag(tag + " " + xmlstr::XML_ATTRTYPE_NAME + "=" + "\"" + safename + "\"");
+} // MakeTagWithName
+
+//------------------------------------------------------------------------------------
+
+string MakeTagWithType(const string& tag, const string& type)
+{
+    return MakeTag(tag + " " + xmlstr::XML_ATTRTYPE_TYPE + "=" + "\"" + type + "\"");
+} // MakeTagWithType
+
+//------------------------------------------------------------------------------------
+
+string MakeTagWithTypePlusPanel(const string& tag, const string& type)
+{
+    return MakeTag(tag + " " + xmlstr::XML_ATTRTYPE_TYPE + "=" + "\"" + type + "\""+" source=\"panel\"");
+} // MakeTagWithTypePlusPanel
+
+//------------------------------------------------------------------------------------
+
+string MakeTagWithConstraint(const string& tag, const string& constraint)
+{
+    return MakeTag(tag + " " + xmlstr::XML_ATTRTYPE_CONSTRAINT + "=" + "\"" + constraint + "\"");
+} // MakeTagWithType
+
+//------------------------------------------------------------------------------------
+// MakeJustified will fill out a string with spaces to a width of |width|.
+//  Positive values of width are used to make strings right-justified,
+//  and negative values are used to make strings left-justified.
+
+string MakeJustified(const string &instr, long int width)
+{
+    string stuff = instr;
+    string::size_type tabpos = stuff.find("\t");
+    while (tabpos != string::npos)
+    {
+        stuff.replace(tabpos, 1, "    ");
+        tabpos = stuff.find("\t");
+    }
+    long int xtraspc = static_cast<long int>(abs(width)) - stuff.size();
+
+    if (xtraspc <= 0)
+        return(stuff.substr(0,abs(width)));
+
+    string str;
+    if (width < 0)
+    {
+        str = stuff + str.assign(xtraspc,' ');
+    }
+    else
+    {
+        str = str.assign(xtraspc,' ') + stuff;
+    }
+
+    return(str);
+
+} // MakeJustified
+
+//------------------------------------------------------------------------------------
+
+string MakeJustified(const char *stuff, long int width)
+{
+    string str(stuff);
+
+    return(MakeJustified(str,width));
+
+} // MakeJustified
+
+//------------------------------------------------------------------------------------
+// MakeCentered used to push stuff to the right, but now it pushes stuff
+//  off to the left instead.
+
+string MakeCentered(const string &instr, long int width, long int indent, bool trunc)
+{
+    string str = instr;
+    string::size_type tabpos = str.find("\t");
+    while (tabpos != string::npos)
+    {
+        str.replace(tabpos, 1, "    ");
+        tabpos = str.find("\t");
+    }
+
+    string line;
+    long int strsize = str.size(), numchar = 0, truewidth = width-indent;
+    long int halfsize = strsize/2;
+
+    if (trunc && (strsize > truewidth))
+    {
+        line.assign(indent,' ');
+        numchar += indent;
+        if (width - indent >= 0)
+        {
+            line.append(str.substr(0,truewidth));
+            numchar += (str.substr(0,truewidth)).size();
+        }
+        return(line);
+    }
+
+    char lastchar = str[strsize-1];
+    if (halfsize < truewidth/2 && static_cast<long int>(lastchar) != LINEFEED)
+    {
+        line.assign(truewidth/2-halfsize,' ');
+    }
+    numchar += truewidth/2-halfsize;
+
+    line.insert(0,str);
+    numchar += strsize;
+
+    string indstr;
+    indstr.assign(width-numchar, ' ');
+    line.insert(0, indstr);
+
+    indstr.assign(indent,' ');
+    line.insert(0, indstr);
+    numchar += indent;
+
+    return(line);
+} // MakeCentered
+
+//------------------------------------------------------------------------------------
+
+string MakeCentered(const char *str, long int width, long int indent, bool trunc)
+{
+    const string pstr(str);
+
+    return (MakeCentered(pstr,width,indent,trunc));
+} // MakeCentered
+
+//------------------------------------------------------------------------------------
+
+string MakeIndent(const string& str, unsigned long int indent)
+{
+    string line;
+    line.assign(indent,' ');
+    line += str;
+
+    return line;
+
+} // MakeIndent
+
+//------------------------------------------------------------------------------------
+
+string MakeIndent(const char* str, unsigned long int indent)
+{
+    const string pstr(str);
+
+    return (MakeIndent(pstr,indent));
+
+} // MakeIndent
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+/******************************************
+ * function to format a double-precision  *
+ * number into a fixed-width field, going *
+ * to scientific notation as necessary.   *
+ * The width must be greater than 5.  If  *
+ * the number is greater than e+99 the    *
+ * returned string may be longer than     *
+ * the given field width.  Also, if the   *
+ * number is negative, it will always be  *
+ * one character greater than the given   *
+ * width.                                 *
+ ******************************************/
+
+string Pretty(double p, int w)
+{
+    ostringstream ost;
+    if (w < 6)
+    {
+        ost << "*";
+        return(string(ost.str()));
+    }
+    ost.width(w);
+    ost.setf(ios::showpoint);
+
+    if (fabs(p) >= 1.0 || p == 0.0)     //greater than one, or zero
+    {
+        ost.setf(ios::fixed);
+        ost.setf(ios::scientific);
+        if (fabs(p) >= pow(10.0,w)) ost.precision(w-5);
+        else if (fabs(p) >= pow(10.0,w-1))
+        {
+            /* The following would print numbers exactly the width we want with no
+               decimal point.  Uncomment this and comment the next line if you want
+               to change this behavior. */
+            //ost.unsetf(ios::showpoint);
+            //ost.precision(0);
+            ost.precision(w-5);
+        }
+        else ost.precision(w-1);
+    }
+    else
+    {              //less than one
+        if (fabs(p) <= pow(0.1,w-4))
+        {
+            ost.setf(ios::scientific);
+            ost.unsetf(ios::fixed);
+            ost.precision(w-6);
+        }
+        else
+        {
+            ost.setf(ios::fixed);
+            ost.unsetf(ios::scientific);
+            ost.precision(w-2);
+        }
+    }
+    ost << p;
+    return(string(ost.str()));
+} // Pretty(double)
+
+//  Overload of Pretty for long int (much simpler!)
+
+string Pretty(long int p, int w)
+{
+    ostringstream ost;
+    ost.width(w);
+    ost << p;
+    return(string(ost.str()));
+} // Pretty(long int)
+
+string Pretty(unsigned long int p, int w)
+{
+    ostringstream ost;
+    ost.width(w);
+    ost << p;
+    return(string(ost.str()));
+} // Pretty(unsigned long int)
+
+//  Overload of Pretty for string (much simpler!)
+
+string Pretty(string str, int w)
+{
+    if ((long int)str.size() == w) return(str);
+    if ((long int)str.size() > w) return(str.substr(0,w));
+
+#if 0 // WARNING warning -- doesn't do padding anymore
+    long int i;
+    for (i = (long int)str.size(); i < w; ++i)
+    {
+        str += " ";
+    }
+#endif
+
+    return(str);
+} // Pretty(string)
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+bool StringCompare(const string &s1, const char *s2, long int pos, long int n)
+{
+    string temp = s1.substr(pos, n);
+    return (temp == s2);
+}
+
+//------------------------------------------------------------------------------------
+
+bool StringCompare(const string &s1, const string &s2, long int pos, long int n)
+{
+    string temp = s1.substr(pos, n);
+    return (temp == s2);
+}
+
+//------------------------------------------------------------------------------------
+
+bool CompareWOCase(const string& s1, const string& s2)
+{
+    string str1, str2(s2);
+
+    //transform(s1.begin(),s1.end(),str1.begin(),tolower);
+    //transform(s2.begin(),s2.end(),str2.begin(),tolower);
+    // To conform to gcc3.1's C++ standard we do:
+    transform(s1.begin(),s1.end(),str1.begin(),
+              static_cast<int(*)(int)>(tolower));
+    transform(s2.begin(),s2.end(),str2.begin(),
+              static_cast<int(*)(int)>(tolower));
+
+    return (str1 == str2);
+} // CompareWOCase(string,string)
+
+//------------------------------------------------------------------------------------
+
+bool CompareWOCase(const char* s1, const string& s2)
+{
+    return CompareWOCase(string(s1),s2);
+} // CompareWOCase(char*,string)
+
+//------------------------------------------------------------------------------------
+
+bool CompareWOCase(const string& s1, const char* s2)
+{
+    return CompareWOCase(s1,string(s2));
+} // CompareWOCase(string,char*)
+
+//------------------------------------------------------------------------------------
+
+bool CompareWOCase(const char* s1, const char* s2)
+{
+    return CompareWOCase(string(s1),string(s2));
+} // CompareWOCase(char*,char*)
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+int StringType(const string &s)
+{
+    long int i = 0;
+    while(isspace(s[i]))
+        i++;
+
+    // Alpha type _______________________________________________
+
+    if (isalpha(s[i]))
+        return 1;                                  // Alpha type
+
+    if (s[i] == '_')
+    {
+        i++;
+        while (s[i] == ' ')
+            i++;
+
+        if (isalnum(s[i]))
+            return 1;                                // Alphanumeric type
+
+        return 3;                                  // Punctuation type
+    }
+
+    // Numeric type _____________________________________________
+
+    if (isdigit(s[i]))
+        return 2;                                  // Numeric type
+
+    if (s[i] == '.')
+    {
+        i++;
+        if (isdigit(s[i]))
+            return 2;                                // Numeric type
+
+        return 3;                                  // Punctuation
+    }
+
+    if (s[i] == '-' || s[i] == '+')
+    {
+        i++;
+        if (isdigit(s[i]))
+            return 2;                                // Numeric type
+
+        if (s[i] == '.')
+        {
+            i++;
+            if (isdigit(s[i]))
+                return 2;                              // Numeric type
+        }
+    }
+
+    return 3;                                    // Punctuation
+}
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+string::size_type GetCharacter(const string &s, char &ch, long int posn)
+{
+    long int len = s.size();
+    if (posn < 0 || len <= posn)
+        return string::npos;
+
+    while(isspace(s[posn]))
+        posn++;
+
+    ch = s[posn];
+    posn++;
+
+    return posn;
+}
+
+//------------------------------------------------------------------------------------
+
+/**********************************************************************
+ Linewrap wraps a table (contained in a vector of strings) so that
+ it is no longer than the given linelength.  It uses the following
+ rules:
+ (1) Break at a column in which all entries have spaces or nothing.
+ (2) Lines which consist only of dash and space may be broken anywhere.
+ (3) If no acceptable break can be found, the original vector will be
+ returned.
+
+ It uses the function IsSeparator(const string) as a helper function.
+***********************************************************************/
+
+vector<string> Linewrap(vector<string> invec, long int linelength)
+{
+
+    // find a suitable location to wrap
+    string::size_type site, line;
+    string::size_type oldsize = invec.size();
+    bool found = false;
+    string::size_type breakpoint = string::npos;
+
+    for (line = 0; line < oldsize; ++line)
+    {
+        if (static_cast<long int>(invec[line].size()) > linelength)
+        {
+            found = true;
+        }
+    }
+    if (!found) return(invec);          // didn't need wrapping at all
+
+    for (site = linelength; true; site--)
+    {
+        found = true;
+        for (line = 0; line < oldsize; line++)
+        {
+            if (static_cast<string::size_type>(invec[line].size()) <= site)
+            {
+                continue; // skip short lines
+            }
+            if (IsSeparator(invec[line])) continue;         // skip separators
+            if (invec[line][site] != ' ')
+            {
+                found = false;
+                break;
+            }
+        }
+        if (found)
+        {
+            breakpoint = site;
+            break;
+        }
+
+        // since we're counting down using an unsigned index,
+        // we need to make sure we stop.
+        // WARNING -- you probably don't want to add any code
+        // after this statement in this for loop
+        if (site == 0) break;
+    }
+
+    if (breakpoint == string::npos)    // never found a breakpoint
+        breakpoint = static_cast<string::size_type>(linelength);
+    // Give up and break at the line length anyway.
+
+    // cut each line at that location
+    vector<string> firstpart;
+    vector<string> secondpart;
+    for (line = 0; line < oldsize; ++line)
+    {
+        if (static_cast<string::size_type>(invec[line].size()) <= breakpoint)
+        {
+            // line does not need cutting
+            firstpart.push_back(invec[line]);
+            secondpart.push_back(string(""));
+        }
+        else
+        {                            // line does need cutting
+            firstpart.push_back(invec[line].substr(0, breakpoint));
+            secondpart.push_back(invec[line].substr(breakpoint+1,
+                                                    invec[line].size() - breakpoint));
+        }
+    }
+
+    //wrap the second part recursively, in case it's very long
+    secondpart = Linewrap(secondpart, linelength);
+
+    //catenate first and second parts
+    for (line = 0; line < static_cast<string::size_type>(secondpart.size()); ++line)
+    {
+        firstpart.push_back(secondpart[line]);
+    }
+
+    return(firstpart);
+
+} // Linewrap
+
+//------------------------------------------------------------------------------------
+
+vector<string> Linewrap(string instring, long int linelength, long int indent)
+{
+    StringVec1d returnVec;
+    StringVec1d firstLine;
+    firstLine.push_back(instring);
+    firstLine = Linewrap(firstLine, linelength);
+    if (firstLine.size() == 1)
+    {
+        return firstLine;
+    }
+    returnVec.push_back(firstLine[0]);
+    instring.erase(0,firstLine[0].size());
+    while (instring.find(" ") == 0)
+    {
+        instring.erase(0,1);
+    }
+    StringVec1d otherLines;
+    otherLines.push_back(instring);
+    otherLines = Linewrap(otherLines, linelength - indent);
+    for (unsigned long int i = 0; i<otherLines.size(); i++)
+    {
+        returnVec.push_back(MakeJustified("",indent) + otherLines[i]);
+    }
+    return returnVec;
+}
+
+//------------------------------------------------------------------------------------
+
+//LinewrapCopy takes a vector of strings, cuts out the beginning of each
+// according to the repeat_length, calls Linewrap on the rest, then prepends
+// the cut-out bit back to the beginning of each set of strings.
+//
+// The vector of strings returned therefore has the dimensionality of n times
+// the dimensionality of the original vector, where n is how many times it had
+// to be wrapped.
+//
+// If you want a blank line between wrapped lines, include one at the bottom
+// of the original vector of strings.
+
+vector<string> LinewrapCopy(vector<string> original, long int repeat_length, long int total_length)
+{
+    assert (total_length > repeat_length);
+
+    vector<string> repeated;
+    vector<string> wrapped;
+
+    for (unsigned long int iter = 0; iter < original.size(); iter++)
+    {
+        string begin;
+        begin.assign(original[iter], 0, repeat_length);
+        repeated.push_back(begin);
+
+        string end("");
+        if (static_cast<long int>(original[iter].size()) > repeat_length)
+        {
+            end.assign(original[iter], repeat_length, original[iter].size());
+        }
+        wrapped.push_back(end);
+    }
+    wrapped = Linewrap(wrapped, (total_length - repeat_length));
+
+    for (unsigned long int wrap_iter = 0, repeat_iter = 0; wrap_iter < wrapped.size(); wrap_iter++, repeat_iter++)
+    {
+        if (repeat_iter >= repeated.size())
+            repeat_iter = repeat_iter - repeated.size();
+        wrapped[wrap_iter].insert(0, repeated[repeat_iter]);
+    }
+    return wrapped;
+} // LinewrapCopy
+
+//------------------------------------------------------------------------------------
+// Is a string composed solely of dash, underscore and/or space?
+
+bool IsSeparator(const string s)
+{
+    long int i;
+    for (i = 0; i < (long int)s.size(); ++i)
+    {
+        if (s[i] != ' ' && s[i] != '-' && s[i] != '_') return (false);
+    }
+    return(true);
+} // IsSeparator
+
+//------------------------------------------------------------------------------------
+
+bool ProduceBoolOrBarf(const string& src)
+{
+    string st = src;
+    LowerCase(st);
+    if (st == "true" || st == "yes" || st == "on" )
+    {
+        return true;
+    }
+    if (st == "false" || st == "no"|| st == "off" )
+    {
+        return false;
+    }
+
+    invalid_argument e("Invalid argument to ProduceBoolOrBarf:"+src);
+    throw e;
+} // ProduceBoolOrBarf
+
+//------------------------------------------------------------------------------------
+
+bool IsInteger(const string& src)
+{
+    if (src.empty()) return false;
+
+    long int i;
+    long int end = src.size();
+    bool minusfound = false;
+    for (i = 0; i < end; ++i)
+    {
+        if (isspace(src[i])) continue;  // whitespace is okay
+        if (src[i] == '-')
+        {
+            if (minusfound) return false;
+            else
+            {
+                minusfound = true;
+                continue;
+            }
+        }
+        if (!isdigit(src[i])) return false;
+    }
+    return true;
+} // IsInteger
+
+//------------------------------------------------------------------------------------
+
+bool IsReal(const string& src)
+{
+    if (src.empty()) return false;
+
+    long int i;
+    long int end = src.size();
+    bool pointfound = false;
+    for (i = 0; i < end; ++i)
+    {
+        if (!isdigit(src[i]))
+        {
+            if (isspace(src[i])) continue; // whitespace is okay
+            if (src[i] == '-') continue; // minus is okay
+            if (src[i] == '+') continue; // plus is okay
+            if (src[i] == 'e') continue; // e is okay
+            if (src[i] != '.') return false;  // neither digit nor point
+            if (pointfound) return false;   // a second decimal point?!
+            pointfound = true;              // okay, first decimal point
+        }
+    }
+    return true;
+} // IsReal
+
+//------------------------------------------------------------------------------------
+
+long int ProduceLongOrBarf(const string& in)
+{
+    //  if (in == "") return 0;
+    long int myLong;
+    myLong = atol(in.c_str());
+    if (! IsInteger(in))
+    {
+        //Don't assert here--the menu will catch it.
+        throw data_error("Expected an integer, but got \""+in+"\"");
+    }
+    if (myLong == LONG_MAX)
+    {
+        throw data_error("The value \""+in+"\" is greater than LONG_MAX (" + Pretty(LONG_MAX) + ").");
+    }
+    if (myLong == LONG_MIN)
+    {
+        throw data_error("The value \""+in+"\" is greater than LONG_MIN (" + Pretty(LONG_MIN) + ").");
+    }
+
+    return myLong;
+} // ProduceLongOrBarf
+
+//------------------------------------------------------------------------------------
+
+double ProduceDoubleOrBarf(const string& in)
+{
+    double myDouble;
+    myDouble = atof(in.c_str());
+    if (! IsReal(in))
+    {
+        if (in == "inf")
+        {
+            if (numeric_limits<double>::has_infinity)
+            {
+                return numeric_limits<double>::infinity();
+            }
+            else
+            {
+                return DBL_BIG;
+            }
+        }
+        if (in == "-inf")
+        {
+            if (numeric_limits<double>::has_infinity)
+            {
+                return -numeric_limits<double>::infinity();
+            }
+            else
+            {
+                return -DBL_BIG;
+            }
+        }
+        if (in == "nan")
+        {
+            if (numeric_limits<double>::has_quiet_NaN)
+            {
+                return numeric_limits<double>::quiet_NaN();
+            }
+            else
+            {
+                return 0.0;
+            }
+        }
+        data_error e("Expected a real number, but got \""+in+"\"");
+        throw e;
+    }
+    if (myDouble == DBL_MAX)
+    {
+        data_error e("The value \""+in+"\" is greater than DBL_MAX ("+Pretty(DBL_MAX) + ").");
+        throw e;
+    }
+    if (myDouble == NEGMAX)
+    {
+        data_error e("The value \""+in+"\" is less than NEGMAX  (" + Pretty(NEGMAX) + ").");
+        throw e;
+    }
+    return myDouble;
+} // ProduceDoubleOrBarf
+
+//------------------------------------------------------------------------------------
+
+verbosity_type ProduceVerbosityTypeOrBarf(const string& s)
+{
+    string st = s;
+    LowerCase(st);
+    if (st == "concise") { return CONCISE; };
+    if (st == "normal") { return NORMAL; };
+    if (st == "verbose") { return VERBOSE; };
+    if (st == "none") { return NONE; };
+    data_error e("Illegal verbosity setting \""+s+"\"");
+    throw e;
+    return NONE;
+}
+
+//------------------------------------------------------------------------------------
+
+bool StringMatchesGrowthType(const string& input, growth_type type)
+{
+    string lowerCaseInput = input;
+    LowerCase(lowerCaseInput);
+
+    string lowerCaseGrowthType = ToString(type,false);
+    LowerCase(lowerCaseGrowthType);
+
+    if(lowerCaseGrowthType == lowerCaseInput)
+    {
+        return true;
+    }
+    else
+    {
+        lowerCaseGrowthType = ToString(type,true);
+        LowerCase(lowerCaseGrowthType);
+        if(lowerCaseGrowthType == lowerCaseInput)
+        {
+            return true;
+        }
+    }
+
+    return false;
+
+}
+
+growth_type ProduceGrowthTypeOrBarf(const string& input)
+{
+
+    if(StringMatchesGrowthType(input,growth_CURVE)) return growth_CURVE;
+    if(StringMatchesGrowthType(input,growth_STICK)) return growth_STICK;
+    if(StringMatchesGrowthType(input,growth_STICKEXP)) return growth_STICKEXP;
+
+    data_error e("Illegal growth-type setting \""+input+"\"");
+    throw e;
+    return growth_CURVE;
+}
+
+//------------------------------------------------------------------------------------
+
+bool StringMatchesGrowthScheme(const string& input, growth_scheme scheme)
+{
+    string lowerCaseInput = input;
+    LowerCase(lowerCaseInput);
+
+    string lowerCaseGrowthScheme = ToString(scheme,false);
+    LowerCase(lowerCaseGrowthScheme);
+
+    if(lowerCaseGrowthScheme == lowerCaseInput)
+    {
+        return true;
+    }
+    else
+    {
+        lowerCaseGrowthScheme = ToString(scheme,true);
+        LowerCase(lowerCaseGrowthScheme);
+        if(lowerCaseGrowthScheme == lowerCaseInput)
+        {
+            return true;
+        }
+    }
+
+    return false;
+
+}
+
+growth_scheme ProduceGrowthSchemeOrBarf(const string& input)
+{
+
+    if(StringMatchesGrowthScheme(input,growth_EXP)) return growth_EXP;
+    if(StringMatchesGrowthScheme(input,growth_STAIRSTEP)) return growth_STAIRSTEP;
+
+    data_error e("Illegal growth-scheme setting \""+input+"\"");
+    throw e;
+    return growth_EXP;
+}
+
+//------------------------------------------------------------------------------------
+
+bool StringMatchesModelType(const string& input, model_type type)
+{
+    string lowerCaseInput = input;
+    LowerCase(lowerCaseInput);
+
+    string lowerCaseModel = ToString(type,false);
+    LowerCase(lowerCaseModel);
+
+    if(lowerCaseModel == lowerCaseInput)
+    {
+        return true;
+    }
+    else
+    {
+        lowerCaseModel = ToString(type,true);
+        LowerCase(lowerCaseModel);
+        if(lowerCaseModel == lowerCaseInput)
+        {
+            return true;
+        }
+    }
+
+    return false;
+
+}
+
+model_type ProduceModelTypeOrBarf(const string& input)
+{
+
+    if(StringMatchesModelType(input,Brownian)) return Brownian;
+    if(StringMatchesModelType(input,F84)) return F84;
+    if(StringMatchesModelType(input,GTR)) return GTR;
+    if(StringMatchesModelType(input,KAllele)) return KAllele;
+    if(StringMatchesModelType(input,Stepwise)) return Stepwise;
+    if(StringMatchesModelType(input,MixedKS)) return MixedKS;
+
+    data_error e("Illegal model-type setting \""+input+"\"");
+    throw e;
+    return F84;
+}
+
+//------------------------------------------------------------------------------------
+
+bool StringMatchesForceType(const string& input, force_type type)
+{
+    string lowerCaseInput = input;
+    LowerCase(lowerCaseInput);
+
+    string lowerCaseForce = ToString(type);
+    LowerCase(lowerCaseForce);
+
+    string lowerCaseShortForce = ToShortString(type);
+    LowerCase(lowerCaseShortForce);
+
+    if((lowerCaseForce == lowerCaseInput) ||
+       (lowerCaseShortForce == lowerCaseInput))
+    {
+        return true;
+    }
+    return false;
+
+}
+
+force_type ProduceForceTypeOrBarf(const string& input)
+{
+    if(StringMatchesForceType(input,force_COAL)) return force_COAL;
+    if(StringMatchesForceType(input,force_DISEASE)) return force_DISEASE;
+    if(StringMatchesForceType(input,force_GROW)) return force_GROW;
+    if(StringMatchesForceType(input,force_MIG)) return force_MIG;
+    if(StringMatchesForceType(input,force_DIVMIG)) return force_DIVMIG;
+    if(StringMatchesForceType(input,force_REC)) return force_REC;
+    if(StringMatchesForceType(input,force_REGION_GAMMA)) return force_REGION_GAMMA;
+    if(StringMatchesForceType(input,force_DIVERGENCE)) return force_DIVERGENCE;
+
+    data_error e("Illegal force-type setting \""+input+"\"");
+    throw e;
+    return force_COAL;
+}
+
+bool StringMatchesMethodType(const string& input, method_type type)
+{
+    if (input == "-" && type == method_PROGRAMDEFAULT)
+    {
+        return true;
+    }
+
+    string lowerCaseInput = input;
+    LowerCase(lowerCaseInput);
+
+    string lowerCaseMethod = ToString(type,false);
+    LowerCase(lowerCaseMethod);
+    if(lowerCaseMethod == lowerCaseInput)
+    {
+        return true;
+    }
+
+    lowerCaseMethod = ToString(type,true);
+    LowerCase(lowerCaseMethod);
+    if(lowerCaseMethod == lowerCaseInput)
+    {
+        return true;
+    }
+
+    return false;
+}
+
+method_type ProduceMethodTypeOrBarf(const string & input)
+{
+    if(StringMatchesMethodType(input,method_FST)) return method_FST;
+    if(StringMatchesMethodType(input,method_PROGRAMDEFAULT)) return method_PROGRAMDEFAULT;
+    if(StringMatchesMethodType(input,method_USER)) return method_USER;
+    if(StringMatchesMethodType(input,method_WATTERSON)) return method_WATTERSON;
+
+    data_error e("Illegal method-type setting \""+input+"\"");
+    throw e;
+    return method_USER;
+}
+
+//------------------------------------------------------------------------------------
+
+bool StringMatchesProftype(const string& input, proftype type)
+{
+    if(input == "-" && type == profile_NONE) return true;
+
+    string lowerCaseInput = input;
+    LowerCase(lowerCaseInput);
+
+    string lowerCaseMethod = ToString(type);
+    LowerCase(lowerCaseMethod);
+
+    if(lowerCaseMethod == lowerCaseInput)
+    {
+        return true;
+    }
+    return false;
+}
+
+proftype ProduceProftypeOrBarf(const string& input)
+{
+    if(StringMatchesProftype(input,profile_PERCENTILE)) return profile_PERCENTILE;
+    if(StringMatchesProftype(input,profile_FIX)) return profile_FIX;
+    if(StringMatchesProftype(input,profile_NONE)) return profile_NONE;
+
+    data_error e("Illegal profile type setting \""+input+"\"");
+    throw e;
+    return profile_NONE;
+}
+
+ProftypeVec1d ProduceProftypeVec1dOrBarf(const string& input)
+{
+    StringVec1d stringVec;
+    ProftypeVec1d returnVal;
+    bool divided = FromString(input,stringVec);
+    if(divided)
+    {
+        StringVec1d::iterator i;
+        for(i=stringVec.begin(); i != stringVec.end(); i++)
+        {
+            proftype p = ProduceProftypeOrBarf(*i);
+            returnVal.push_back(p);
+        }
+
+        return returnVal;
+    }
+
+    data_error e("Error:  empty vector for the '<profiles>' tag.");
+    throw e;
+}
+
+//------------------------------------------------------------------------------------
+
+bool StringMatchesParamstatus(const string& input, pstatus type)
+{
+    if(input == "-" && type == pstat_invalid) return true;
+
+    string lowerCaseInput = input;
+    LowerCase(lowerCaseInput);
+
+    string lowerCaseMethod = ToString(type);
+    LowerCase(lowerCaseMethod);
+
+    if(lowerCaseMethod == lowerCaseInput)
+    {
+        return true;
+    }
+    return false;
+}
+
+ParamStatus ProduceParamstatusOrBarf(const string& input)
+{
+    if(StringMatchesParamstatus(input,pstat_invalid)) return ParamStatus(pstat_invalid);
+    if(StringMatchesParamstatus(input,pstat_unconstrained)) return ParamStatus(pstat_unconstrained);
+    if(StringMatchesParamstatus(input,pstat_constant)) return ParamStatus(pstat_constant);
+    if(StringMatchesParamstatus(input,pstat_identical)) return ParamStatus(pstat_identical);
+    if(StringMatchesParamstatus(input,pstat_identical_head)) return ParamStatus(pstat_identical_head);
+    if(StringMatchesParamstatus(input,pstat_multiplicative)) return ParamStatus(pstat_multiplicative);
+    if(StringMatchesParamstatus(input,pstat_multiplicative_head)) return ParamStatus(pstat_multiplicative_head);
+
+    data_error e("Illegal parameter constraint type \""+input+"\"");
+    throw e;
+}
+
+vector <ParamStatus> ProduceParamstatusVec1dOrBarf(const string& input)
+{
+    StringVec1d stringVec;
+    vector <ParamStatus> returnVal;
+    bool divided = FromString(input,stringVec);
+    if(divided)
+    {
+        StringVec1d::iterator i;
+        for(i=stringVec.begin(); i != stringVec.end(); i++)
+        {
+            ParamStatus p = ProduceParamstatusOrBarf(*i);
+            returnVal.push_back(p);
+        }
+
+        return returnVal;
+    }
+
+    throw data_error("Error:  empty vector for the '<constraints>' tag.");
+}
+
+MethodTypeVec1d ProduceMethodTypeVec1dOrBarf(const string& input)
+{
+    StringVec1d stringVec;
+    MethodTypeVec1d returnVal;
+    bool divided = FromString(input,stringVec);
+    if(divided)
+    {
+        StringVec1d::iterator i;
+        for(i=stringVec.begin(); i != stringVec.end(); i++)
+        {
+            method_type p = ProduceMethodTypeOrBarf(*i);
+            returnVal.push_back(p);
+        }
+
+        return returnVal;
+    }
+
+    throw data_error("Error:  empty vector for the '<method>' tag.");
+    return returnVal;
+}
+
+DoubleVec1d ProduceDoubleVec1dOrBarf(const string& input)
+{
+    StringVec1d stringVec;
+    DoubleVec1d returnVal;
+    bool divided = FromString(input,stringVec);
+    if(divided)
+    {
+        StringVec1d::iterator i;
+        for(i=stringVec.begin(); i != stringVec.end(); i++)
+        {
+            double p = ProduceDoubleOrBarf(*i);
+            returnVal.push_back(p);
+        }
+
+        return returnVal;
+    }
+
+    data_error e("Error:  empty vector for input that required numbers.");
+    throw e;
+}
+
+LongVec1d ProduceLongVec1dOrBarf(const string& input)
+{
+    StringVec1d stringVec;
+    LongVec1d returnVal;
+    bool divided = FromString(input,stringVec);
+    if(divided)
+    {
+        StringVec1d::iterator i;
+        for(i=stringVec.begin(); i != stringVec.end(); i++)
+        {
+            long int p = ProduceLongOrBarf(*i);
+            returnVal.push_back(p);
+        }
+
+        return returnVal;
+    }
+
+    //An empty vector here is fine.
+    return returnVal;
+
+}
+
+//------------------------------------------------------------------------------------
+
+bool StringMatchesPriorType(const string& input, priortype type)
+{
+    string lowerCaseInput = input;
+    LowerCase(lowerCaseInput);
+
+    string lowerCaseMethod = ToString(type);
+    LowerCase(lowerCaseMethod);
+
+    if(lowerCaseMethod == lowerCaseInput)
+    {
+        return true;
+    }
+    return false;
+}
+
+priortype ProducePriorTypeOrBarf(const string& input)
+{
+    if(StringMatchesPriorType(input,LINEAR)) return LINEAR;
+    if(StringMatchesPriorType(input,LOGARITHMIC)) return LOGARITHMIC;
+    data_error e("Illegal parameter constraint type \""+input+"\"");
+    throw e;
+} // ProducePriorTypeOrBarf
+
+//------------------------------------------------------------------------------------
+
+bool StringMatchesSelectionType(const string& input, selection_type type)
+{
+    string lowerCaseInput = input;
+    LowerCase(lowerCaseInput);
+
+    string lowerCaseSelectionType = ToString(type,false);
+    LowerCase(lowerCaseSelectionType);
+
+    if(lowerCaseSelectionType == lowerCaseInput)
+    {
+        return true;
+    }
+    else
+    {
+        lowerCaseSelectionType = ToString(type,true);
+        LowerCase(lowerCaseSelectionType);
+        if(lowerCaseSelectionType == lowerCaseInput)
+        {
+            return true;
+        }
+    }
+
+    return false;
+
+}
+
+selection_type ProduceSelectionTypeOrBarf(const string& input)
+{
+
+    if(StringMatchesSelectionType(input,selection_DETERMINISTIC))
+    {
+        return selection_DETERMINISTIC;
+    }
+    if(StringMatchesSelectionType(input,selection_STOCHASTIC))
+    {
+        return selection_STOCHASTIC;
+    }
+
+    data_error e("Illegal selection-type setting \""+input+"\"");
+    throw e;
+    return selection_DETERMINISTIC;
+
+}
+
+//------------------------------------------------------------------------------------
+
+// this replaces the cin.getline() function which is broken
+// in macosx gcc 3.1 and other 3.1 version and which is
+// is fixed in gcc >= 3.1.1, but macosx might not fix this for
+// quite a while
+// PB Sep 2002
+void MyCinGetline(string & sline)
+{
+    char ch=cin.get();
+    // this while loop should be capable to work with
+    // windows, mac, and unix EOL style characters
+    while(!strchr("\r\n",ch))
+    {
+        sline += ch;
+        ch = cin.get();
+
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+char getFirstInterestingChar(const string & input)
+{
+    const char * str = input.c_str();
+    int length = input.length();
+    for(int i = 0; i < length; i++)
+    {
+        if (isalnum(str[i]))
+        {
+            return toupper(str[i]);
+        }
+    }
+    return '\0';
+}
+
+//------------------------------------------------------------------------------------
+
+string cwdString()
+{
+    for(int bufSize=100;;bufSize*=2)
+    {
+#ifdef LAMARC_COMPILE_MSWINDOWS
+        // NOTE: used to be _getcwd for older mingw cross-compiles
+        char * shouldNotBeNull = getcwd(NULL,bufSize);
+#else
+        char * cwdBuf = new char[bufSize];
+        char * shouldNotBeNull = getcwd(cwdBuf,bufSize);
+        free(cwdBuf);
+#endif
+        if(shouldNotBeNull != NULL)
+        {
+#ifdef LAMARC_COMPILE_MSWINDOWS
+            string thepath(shouldNotBeNull);
+            free(shouldNotBeNull);
+            return thepath;
+#else
+            return string(cwdBuf);
+#endif
+        }
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+void StripLeadingSpaces(string & st)
+{
+    while(isspace(st[0]))
+    {
+        st.erase(0,1);
+    }
+}
+void StripTrailingSpaces(string & st)
+{
+    while(isspace(st[st.length()-1]))
+    {
+        st.erase(st.length()-1,1);
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+string SpacesToUnderscores(const string& st)
+{
+    string newstring(st);
+    char underscore('_');
+    replace_if(newstring.begin(),newstring.end(),ptr_fun<int,int>(isspace),underscore);
+    return newstring;
+}
+
+//------------------------------------------------------------------------------------
+
+string indexToKey(long int index)
+{
+    return ToString(index+1);
+}
+
+long int keyToIndex(string key)
+{
+    return -1+ProduceLongOrBarf(key);
+}
+
+//------------------------------------------------------------------------------------
+
+string buildName(const string& prefix, const string & delim, size_t digits, size_t value)
+{
+    assert(digits < 10);        // EWFIX.P3.CONSTANTS
+    assert((10^digits) > value);  // EWFIX.P3
+    char fmtString[8];          // EWFIX.P3.CONSTANTS
+    sprintf (fmtString, "%%s%%s%%0%dd", (int)digits);
+
+    size_t sizeNeeded = prefix.length()+delim.length()+digits+1;
+    char * spaceToBuild = new char[sizeNeeded];
+
+    sprintf(spaceToBuild,fmtString,prefix.c_str(),delim.c_str(),value);
+
+    string retVal(spaceToBuild);
+    free(spaceToBuild);
+    return retVal;
+}
+
+//____________________________________________________________________________________
diff --git a/src/tools/stringx.h b/src/tools/stringx.h
new file mode 100644
index 0000000..35ece72
--- /dev/null
+++ b/src/tools/stringx.h
@@ -0,0 +1,293 @@
+// $Id: stringx.h,v 1.78 2014/08/29 19:29:20 mkkuhner Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef STRINGX_H
+#define STRINGX_H
+
+#include <cmath>
+#include <functional>
+#include <iomanip>
+#include <iostream>
+#include <set>
+#include <sstream>
+#include <string>
+#include <vector>
+
+#include "arranger_types.h"
+#include "constants.h"
+#include "defaults.h"
+#include "types.h"
+#include "ui_id.h"
+#include "vectorx.h"
+
+// needed in .h because of template functions
+#ifdef DMALLOC_FUNC_CHECK
+#include "/usr/local/include/dmalloc.h"
+#endif
+
+class ParamVector;
+class ParamStatus;
+
+const long DEFLINELENGTH=75;
+const long DEFWIDTH=8;
+const long DEFINDENT=0;
+const long LINEFEED=10;
+
+void   UpperCase(std::string&);
+void   LowerCase(std::string&);
+std::string ToString(char);
+std::string ToString(int);
+std::string ToString(long);
+std::string ToString(long long);
+std::string ToString(double);
+std::string ToDecimalString(double);
+std::string ToString(unsigned int);
+std::string ToString(unsigned long);
+std::string ToString(unsigned long long);
+std::string ToString(bool);
+std::string ToString(data_source);
+std::string ToString(force_type);
+std::string ToShortString(force_type);
+std::string ToString(growth_type gType, bool getLongName = false);
+std::string ToString(growth_scheme gScheme, bool getLongName = false);
+std::string ToString(method_type method, bool getLongName = false);
+std::string ToString(model_type model, bool getLongName = false);
+std::string ToString(noval val);
+std::string ToString(paramlistcondition par);
+std::string ToString(proftype prof);
+std::string ToString(priortype ptype);
+std::string ToString(pstatus pstat);
+std::string ToString(const ParamStatus& pstat);
+std::string ToString(selection_type sType, bool getLongName = false);
+std::string ToString(std::string);
+std::string ToString(verbosity_type);
+// specialization necessary since these three things are enums!
+std::string ToString(std::vector<method_type> meths);
+std::string ToString(std::vector<proftype> profs);
+std::string ToString(std::vector<ParamStatus> pstats);
+
+std::string ToString(ParamVector&); // bogus -- throws implementation_error
+
+std::string ToStringTF(bool);  // ToString variant that returns
+// "true" or "false" rather than
+// "yes" or "no"
+std::string ToString(UIId);
+std::string ToString(std::vector<UIId> uiids);
+std::string ToString(std::vector<std::vector<UIId> > uiids);
+
+bool CaselessStrCmp(const std::string& lhs, const std::string& rhs);
+bool CaselessStrCmp(const std::string& lhs, const char* rhs);
+bool CaselessStrCmp(const char* lhs, const std::string& rhs);
+bool CaselessStrCmp(const char* lhs, const char* rhs);
+
+//------------------------------------------------------------------------------------
+
+// Scott Meyers' case-insensitive string comparison code
+
+long ciCharCompare(char c1, char c2);
+long ciStringCompareImpl(const std::string& s1, const std::string& s2);
+long ciStringCompare(const std::string& s1, const std::string& s2);
+bool ciCharLess(char c1, char c2);
+// Scott called the following ciStringCompare but then there are
+// two functions of that name....
+bool ciStringLess(const std::string& s1, const std::string& s2);
+
+struct CIStringCompare : public std::binary_function<std::string, std::string, bool>
+{
+    bool operator()(const std::string& lhs, const std::string& rhs) const
+    { return ciStringLess(lhs, rhs); }
+}; // CIStringCompare struct
+
+bool ciStringEqual(const std::string& s1, const std::string& s2);
+
+//------------------------------------------------------------------------------------
+
+template <class T>
+std::string ToString(double number, int decimals)
+{
+    std::ostringstream ostr;
+    ostr.precision(decimals);
+    ostr << " " << number;
+    std::string s(ostr.str());
+    return s;
+}
+
+template <class T>
+std::string ToString(vector <T> number, int decimals)
+{
+    std::ostringstream ostr;
+    ostr.precision(decimals);
+    ostr.unsetf(std::ios::scientific);
+    typename vector <T> :: const_iterator nit;
+    for(nit=number.begin(); nit!=number.end(); nit++)
+    {
+        ostr << " " << *nit;
+    }
+#if 0
+    ostr << std::ends;
+#endif
+    std::string s(ostr.str());
+    return s;
+}
+
+template <class T>
+std::string ToString(vector <T> number)
+{
+    int decimals = 2;
+    return ToString(number, decimals);
+}
+
+template <class T>
+std::vector < string >  VecElemToString(vector <T> number)
+{
+    std::vector < string > svec;
+    typename std::vector <T> :: iterator i;
+    for(i=number.begin(); i != number.end(); i++)
+        svec.push_back(ToString(*i));
+    return svec;
+}
+
+template <class T>
+std::vector < string >  MultisetElemToString(std::multiset <T> inputs)
+{
+    std::vector < string > svec;
+    typename std::multiset <T> :: iterator i;
+    for(i=inputs.begin(); i != inputs.end(); i++)
+        svec.push_back(ToString(*i));
+    return svec;
+}
+
+std::string Pretty(double number, int width=DEFWIDTH);
+std::string Pretty(long number, int width=DEFWIDTH);
+std::string Pretty(unsigned long number, int width=DEFWIDTH);
+std::string Pretty(std::string str, int width=DEFWIDTH);
+
+template <class T>
+std::vector < string >  VecElemToString(vector <T> number, int width)
+{
+    std::vector < string > svec;
+    typename std::vector <T> :: iterator i;
+    for(i=number.begin(); i != number.end(); i++)
+        svec.push_back(Pretty(*i, width));
+    return svec;
+}
+
+// careful: was not able to link this function
+// when its name was DoubleVecFromString(..)
+bool FromString(const std::string & in, DoubleVec1d & out);
+bool FromString(const std::string & in, LongVec1d & out);
+bool FromString(const std::string & in, MethodTypeVec1d & out);
+bool FromString(const std::string & in, ProftypeVec1d & out);
+bool FromString(const std::string & in, StringVec1d & out);
+bool FromString(const std::string & in, ParamVector & out); // bogus, throws
+bool FromString(const std::string & in, long& out);
+bool FromString(const std::string & in, double& out);
+bool FromString(const std::string & in, method_type& out);
+
+DoubleVec1d StringToDoubleVecOrBarf(const std::string& in);
+
+// functions for making xmltags and lines of xml
+std::string MakeTag(const std::string& str);
+std::string MakeCloseTag(const std::string& str);
+std::string MakeTagWithName(const std::string& tag, const std::string& name);
+std::string MakeTagWithType(const std::string& tag, const std::string& type);
+std::string MakeTagWithTypePlusPanel(const std::string& tag, const std::string& type);
+std::string MakeTagWithConstraint(const std::string& tag, const std::string& constraint);
+std::string MakeJustified(const std::string & str, long width=DEFLINELENGTH);
+std::string MakeJustified(const char* str, long width=DEFLINELENGTH);
+std::string MakeCentered(const std::string& str, long width=DEFLINELENGTH,
+                         long indent=DEFINDENT, bool trunc=true);
+std::string MakeCentered(const char* str, long width=DEFLINELENGTH,
+                         long indent=DEFINDENT, bool trunc=true);
+std::string MakeIndent(const std::string & str, unsigned long indent);
+std::string MakeIndent(const char* str, unsigned long indent);
+
+bool   StringCompare(const std::string&, const char*, long, long);
+bool   StringCompare(const std::string&, const std::string&, long, long);
+
+bool   CompareWOCase(const std::string& str1, const std::string& str2);
+bool   CompareWOCase(const char* str1, const std::string& str2);
+bool   CompareWOCase(const std::string& str1, const char* str2);
+bool   CompareWOCase(const char* str1, const char* str2);
+
+int    StringType(const std::string&);
+std::string::size_type   GetCharacter(const std::string&, char&, long);
+
+// free functions used to wrap tables
+vector<std::string> Linewrap(vector<std::string> invec, long linelength = DEFLINELENGTH);
+vector<std::string> Linewrap(std::string instring,long linelength, long indent);
+vector<std::string> LinewrapCopy(vector<std::string> original, long repeat_length, long total_length = DEFLINELENGTH);
+bool           IsSeparator(const std::string s);
+
+template<class T> bool InBounds(T val, T lower, T upper);
+
+bool IsInteger(const std::string& src);
+bool IsReal(const std::string& src);
+
+template<class T>
+bool InBounds(T val, T lower, T upper)
+{
+    return (lower <= val && val <= upper);
+} // InBounds
+
+bool ProduceBoolOrBarf(const std::string&);
+double ProduceDoubleOrBarf(const std::string&);
+long ProduceLongOrBarf(const std::string&);
+verbosity_type ProduceVerbosityTypeOrBarf(const std::string&);
+bool StringMatchesGrowthType(const std::string&, growth_type);
+growth_type ProduceGrowthTypeOrBarf(const std::string&);
+growth_scheme ProduceGrowthSchemeOrBarf(const std::string&);
+bool StringMatchesModelType(const std::string&, model_type);
+model_type ProduceModelTypeOrBarf(const std::string&);
+bool StringMatchesMethodType(const std::string&, method_type);
+method_type ProduceMethodTypeOrBarf(const std::string&);
+bool StringMatchesForceType(const std::string&, force_type);
+force_type ProduceForceTypeOrBarf(const std::string&);
+bool StringMatchesProftype(const std::string&, method_type);
+proftype ProduceProftypeOrBarf(const std::string&);
+ProftypeVec1d ProduceProftypeVec1dOrBarf(const std::string&);
+bool StringMatchesParamstatus(const std::string&, pstatus type);
+ParamStatus ProduceParamstatusOrBarf(const std::string&);
+vector < ParamStatus > ProduceParamstatusVec1dOrBarf(const std::string&);
+MethodTypeVec1d ProduceMethodTypeVec1dOrBarf(const std::string&);
+bool StringMatchesSelectionType(const std::string&, selection_type);
+selection_type ProduceSelectionTypeOrBarf(const std::string&);
+DoubleVec1d ProduceDoubleVec1dOrBarf(const std::string&);
+LongVec1d ProduceLongVec1dOrBarf(const std::string&);
+bool StringMatchesPriortype(const std::string&, priortype type);
+priortype ProducePriorTypeOrBarf(const std::string&);
+
+// hack to replace a problme with cin.getline() [see cpp for more detail]
+void MyCinGetline(std::string & sline);
+
+char getFirstInterestingChar(const string & input);
+
+// get a string containing the current working directory
+std::string cwdString();
+
+void StripLeadingSpaces(std::string &);
+void StripTrailingSpaces(std::string &);
+
+std::string SpacesToUnderscores(const std::string &);
+
+// this does ToString(index+1), and you should use this
+// version instead, in case we ever get users clamoring
+// to have printed indices start with 0
+string indexToKey(long index);
+
+// the reverse of indexToKey
+long   keyToIndex(string key);
+
+// build a name of the form <name>_#####
+std::string buildName(const std::string & prefix, const std::string & delim, size_t digits, size_t value);
+
+#endif // STRINGX_H
+
+//____________________________________________________________________________________
diff --git a/src/tools/timex.cpp b/src/tools/timex.cpp
new file mode 100644
index 0000000..0871133
--- /dev/null
+++ b/src/tools/timex.cpp
@@ -0,0 +1,69 @@
+// $Id: timex.cpp,v 1.6 2011/03/07 06:08:51 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include "timex.h"
+
+#ifdef DMALLOC_FUNC_CHECK
+#include "/usr/local/include/dmalloc.h"
+#endif
+
+/*===============================================
+  timer utilities <Peter Beerli>
+
+  ts = "%c" -> time + full date (see man strftime)
+  = "%H:%M:%S" -> time hours:minutes:seconds
+
+  This is C masquerading as C++, since it has to interface
+  with C utilities.  --Mary
+
+  WARNING!  Make sure that time_t.h exists before calling these!
+*/
+
+string PrintTime (const time_t mytime, const string format)
+{
+    const int arraylength = 80;
+
+#ifdef NOTIME_FUNC
+    // The system does not provide a clock so we return blanks.
+    string tempstring;
+    tempstring.assign(format.size,' ');
+    return(tempstring);
+#endif
+
+    struct tm *nowstruct;
+    char temparray[arraylength];
+    if (mytime != (time_t) - 1)  // invalid time marked as -1
+    {
+        nowstruct = localtime (&mytime);
+        strftime (temparray, arraylength, format.c_str(), nowstruct);
+        return string(temparray);
+    }
+    else
+    {                                   // time returned is invalid
+        string tempstring;              // so we return blanks
+        tempstring.assign(format.size(),' ');
+        return(tempstring);
+    }
+} // PrintTime
+
+//------------------------------------------------------------------------------------
+
+time_t GetTime ()
+{
+#ifdef NOTIME_FUNC
+    // The system does not provide a clock.
+    return((time_t)-1);
+#else
+    // Return the "real" time.
+    return(time(NULL));
+#endif
+} // GetTime
+
+//____________________________________________________________________________________
diff --git a/src/tools/timex.h b/src/tools/timex.h
new file mode 100644
index 0000000..8ed1606
--- /dev/null
+++ b/src/tools/timex.h
@@ -0,0 +1,37 @@
+// $Id: timex.h,v 1.7 2011/04/23 02:02:49 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef TIMEX_H
+#define TIMEX_H
+
+#include <cmath>
+#include <ctime>
+#include <iostream>
+#include <sstream>
+#include <string>
+#include <vector>
+
+#include "vectorx.h"
+#include "stringx.h"
+
+/*****************************************************************
+ These functions handle time information.  GetTime retrieves the
+ current time as a time_t (seconds since the epoch).  PrintTime
+ turns a time_t into a formatted string.
+ Mary Kuhner (based on code of Peter Beerli) October 2000
+*****************************************************************/
+
+string PrintTime(const time_t mytime, const string format = "%H:%M:%S");
+
+time_t GetTime();
+
+#endif // TIMEX_H
+
+//____________________________________________________________________________________
diff --git a/src/tools/tools.h b/src/tools/tools.h
new file mode 100644
index 0000000..d473a7e
--- /dev/null
+++ b/src/tools/tools.h
@@ -0,0 +1,22 @@
+// $Id: tools.h,v 1.4 2011/03/08 19:22:01 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+// This is just a catch-all include file for all of the program
+//    specific extensions to the standard library
+
+#ifndef TOOLS_H
+#define TOOLS_H
+
+#include "stringx.h"
+#include "vectorx.h"
+
+#endif // TOOLS_H
+
+//____________________________________________________________________________________
diff --git a/src/tools/vector_constants.cpp b/src/tools/vector_constants.cpp
new file mode 100644
index 0000000..2720771
--- /dev/null
+++ b/src/tools/vector_constants.cpp
@@ -0,0 +1,68 @@
+// $Id: vector_constants.cpp,v 1.17 2010/03/17 17:25:59 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+// vector constants, used for profile tables
+// Peter Beerli December 2002 modelled after Elizabeth Walkup's ui_string.[h][cpp] class
+
+#include "vector_constants.h"
+
+using std::vector;
+
+// vectors assigned here should be defined in vector_consts.h as
+// public static const DoubleVec1d (etc.)  members of class Vecconst
+// data file menu
+
+const long P = 10;
+const long GF = 8;
+const long LSF = 7; // logistic selection
+const long SHORT = 2;
+
+//PERC must be in order, and must go from lowest to highest.  It should also
+// not include 0.5, since that means nothing to profiling--it assumes a
+// symmetric likelihood surface.
+const double PERC[P] = {0.005, 0.025, 0.05, 0.125, 0.25, 0.75, 0.875, 0.95, 0.975, 0.995};
+const double SHORTPERC[SHORT] = {0.025, 0.975};
+const double SHORTMULT[SHORT] = {0.1, 10.0};
+const double GFIXED[GF] = {-100.0, -10.0, 0.0, 10.0, 25.0, 100.0, 250.0, 1000.0};
+const double LSFIXED[LSF] = {-0.05, -0.005, 0.0, 0.005, 0.01, 0.1, 1.0};
+
+#ifdef NDEBUG
+const long M = 8;
+const long G = 4;
+const long LS = 4;
+const double MULT[M] = {0.1, 0.2, 0.5, 0.75, 2.0, 5.0, 7.5, 10.0};
+const double GMULT[G] = {0.5, 0.66666666667, 1.5, 2.0};
+const double LSMULT[LS] = {0.5, 0.66666666667, 1.5, 2.0};
+#else //add '1.0' to the multiplier list.
+const long M = 9;
+const long G = 5;
+const long LS = 5;
+const double MULT[M] = {0.1, 0.2, 0.5, 0.75, 1.0, 2.0, 5.0, 7.5, 10.0};
+const double GMULT[G] = {0.5, 0.66666666667, 1.0, 1.5, 2.0};
+const double LSMULT[LS] = {0.5, 0.66666666667, 1.0, 1.5, 2.0};
+#endif
+
+const vector <double> vecconst::percentiles(PERC, PERC + P);
+vector <double> vecconst::multipliers(MULT, MULT + M);
+vector <double> vecconst::growthmultipliers(GMULT, GMULT + G);
+const vector <double> vecconst::growthfixed(GFIXED, GFIXED + GF);
+
+vector <double> vecconst::logisticselectionmultipliers(LSMULT, LSMULT + LS);
+const vector <double> vecconst::logisticselectionfixed(LSFIXED, LSFIXED + LSF);
+
+const vector <double> vecconst::percentiles_short(SHORTPERC, SHORTPERC + SHORT);
+const vector <double> vecconst::multipliers_short(SHORTMULT, SHORTMULT + SHORT);
+
+// The various _short vectors are used instead of the long ones in
+// force/forcesummary.cpp if CONCISE output file report is indicated by the
+// user.  This prevents excess calculations by the profiler, and ensures that
+// the reporter works, too.
+
+//____________________________________________________________________________________
diff --git a/src/tools/vector_constants.h b/src/tools/vector_constants.h
new file mode 100644
index 0000000..1cec9dc
--- /dev/null
+++ b/src/tools/vector_constants.h
@@ -0,0 +1,38 @@
+// $Id: vector_constants.h,v 1.11 2011/03/07 06:08:51 bobgian Exp $
+
+/*
+ *  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+ *
+ *  This software is distributed free of charge for non-commercial use
+ *  and is copyrighted.  Of course, we do not guarantee that the software
+ *  works, and are not responsible for any damage you may cause or have.
+ *
+ */
+
+// modelled after ui_interface/ui_strings.h
+
+#ifndef VECTOR_CONSTS_H
+#define VECTOR_CONSTS_H
+
+#include <vector>
+#include "vectorx.h"
+
+using std::vector;
+
+// values for the static const vectors below are set in vectro_consts.cpp
+class vecconst
+{
+  public:
+    static const DoubleVec1d percentiles;
+    static  DoubleVec1d multipliers;
+    static  DoubleVec1d growthmultipliers;
+    static const DoubleVec1d growthfixed;
+    static  DoubleVec1d logisticselectionmultipliers;
+    static const DoubleVec1d logisticselectionfixed;
+    static const DoubleVec1d percentiles_short;
+    static const DoubleVec1d multipliers_short;
+};
+
+#endif // VECTOR_CONSTS_H
+
+//____________________________________________________________________________________
diff --git a/src/tools/vectorx.cpp b/src/tools/vectorx.cpp
new file mode 100644
index 0000000..a7c94fd
--- /dev/null
+++ b/src/tools/vectorx.cpp
@@ -0,0 +1,62 @@
+// $Id: vectorx.cpp,v 1.14 2011/04/23 02:02:49 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+#include <cmath>
+
+#include "vectorx.h"
+
+#ifdef DMALLOC_FUNC_CHECK
+#include "/usr/local/include/dmalloc.h"
+#endif
+
+using namespace std;
+
+//------------------------------------------------------------------------------------
+// maintenance functions
+
+// forward declarations if helper functions of the maintenace functions
+double logsave(const double value);
+double log0(const double value);
+
+void LogVec0(const vector<double> &in, vector<double> &out)
+{
+    assert(in.size() == out.size());
+    transform(in.begin(),in.end(),out.begin(),log0);
+}
+
+// helper function that keeps the structural zeros intact
+// used in LogVec0() in  PostLike::Setup0()
+double log0(const double value)
+{
+    return ((value > 0.0) ? log(value) : 0.0);
+}
+
+long LongSquareRootOfLong(long shouldBeASquare)
+{
+    // this is a funny-looking way to take a square root,
+    // necessitated by the fact that the C library only provides
+    // floating-point square root, and we don't want to deal with
+    // the consequences of rounding error.
+
+    long i;
+
+    for (i = 1; i <= long(shouldBeASquare/2); ++i)
+    {
+        if (i * i == shouldBeASquare)
+        {
+            return i;
+        }
+    }
+    throw ("Attempt to take long square root of non-square");
+}
+
+//____________________________________________________________________________________
+
diff --git a/src/tools/vectorx.h b/src/tools/vectorx.h
new file mode 100644
index 0000000..39c2480
--- /dev/null
+++ b/src/tools/vectorx.h
@@ -0,0 +1,217 @@
+// $Id: vectorx.h,v 1.22 2011/03/07 06:08:51 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+// Vector utilities
+//
+// VECTOR SETTERS
+// - Sets n-dimensional vector to an initial value,
+//   assumes that the vector has allocated elements
+// VECTOR CREATION
+// - Creates 1-D and 2-D vectors with an initial value
+// VECTOR TYPES
+// - Typedefs for n-dimensional vectors
+// VECTOR MATH UTILITIES
+// - vector = Log(vector) in save and unsave (sic!) versions of it.
+// VECTOR TRANSMOGRIFIER
+// - turn a 1D into a 2D vector (must be square!)
+
+#ifndef VECTORX_H
+#define VECTORX_H
+
+#include <vector>
+#include <algorithm>
+#include <string>
+#include "constants.h"
+#include "definitions.h"
+#include "ui_id.h"
+
+using std::vector;
+using std::string;
+
+// This should be in mathx.h except that mathx.h includes
+// this file, and we use this below so we can't
+long LongSquareRootOfLong(long x);
+
+class Force;
+
+// VECTOR CREATION ------------------------------------------------------
+//    a set of vector of vectors setters:
+//    there is no error checking, these will *return* a new vector and
+
+template <class T>
+vector <T>  CreateVec1d(long n, T initial)
+{
+    vector<T> one(static_cast<typename vector<T>::size_type> (n),initial);
+    return one;
+}
+
+template <class T>
+vector < vector <T> > CreateVec2d(long n, unsigned long m, T initial)
+{
+    vector<T> one(static_cast<typename vector<T>::size_type> (m),initial);
+    vector < vector <T> > two(n,one);
+    return two;
+}
+
+// VECTOR DEFINITIONS ---------------------------------------------------
+// Typedefs for commonly used multidimensional vectors
+// WARNING:  Some version of g++ refuse to accept a 4+
+// dimensional vector of vectors unless they have already, in
+// compiling that source file, found a smaller vector.  To work
+// around this bug it may be necessary to declare a dummy vector.
+
+typedef vector<string>        StringVec1d;
+typedef vector<StringVec1d>   StringVec2d;
+typedef vector<StringVec2d>   StringVec3d;
+typedef vector<StringVec3d>   StringVec4d;
+typedef vector<StringVec4d>   StringVec5d;
+
+typedef vector<double>        DoubleVec1d;
+typedef vector<DoubleVec1d>   DoubleVec2d;
+typedef vector<DoubleVec2d>   DoubleVec3d;
+typedef vector<DoubleVec3d>   DoubleVec4d;
+typedef vector<DoubleVec4d>   DoubleVec5d;
+
+typedef vector<Force*>        ForceVec;
+
+typedef vector<long>          LongVec1d;
+typedef vector<LongVec1d>     LongVec2d;
+typedef vector<LongVec2d>     LongVec3d;
+typedef vector<LongVec3d>     LongVec4d;
+typedef vector<LongVec4d>     LongVec5d;
+
+typedef vector<unsigned long> ULongVec1d; //like the tea.
+typedef vector<ULongVec1d>    ULongVec2d;
+typedef vector<ULongVec2d>    ULongVec3d;
+
+typedef vector<int>           IntVec1d;
+typedef vector<IntVec1d>      IntVec2d;
+typedef vector<IntVec2d>      IntVec3d;
+typedef vector<IntVec3d>      IntVec4d;
+typedef vector<IntVec4d>      IntVec5d;
+
+typedef vector<model_type>          ModelTypeVec1d;
+
+typedef vector<method_type>         MethodTypeVec1d;
+typedef vector<MethodTypeVec1d>     MethodTypeVec2d;
+
+typedef vector<proftype>            ProftypeVec1d;
+typedef vector<force_type>          ForceTypeVec1d;
+
+typedef vector<UIId>                UIIdVec1d;
+typedef vector<UIIdVec1d>           UIIdVec2d;
+
+typedef vector<data_source>         DataSourceVec1d;
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+// VectorAppend() puts "vec2" onto the end of "vec1"
+template <class T>
+vector<T> VectorAppend(const vector<T>& vec1, const vector<T>& vec2)
+{
+    vector<T> vec = vec1;
+    typename vector<T>::const_iterator vit;
+    for(vit = vec2.begin(); vit != vec2.end(); ++vit)
+        vec.push_back(*vit);
+
+    return(vec);
+} // VectorAppend
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+// VECTOR MATH UTILITIES ------------------------------------------------------
+//
+void LogVec0(const vector<double> &in, vector<double> &out);
+
+//------------------------------------------------------------------------------------
+// This function takes a linear vector and turns it into
+// a square two-dimensional vector.  It will throw an exception
+// if the size of the linear vector is not a perfect square.
+// It assumes that diagonal entries ARE PRESENT.
+//
+// It is templated on the type contained in the vector.
+
+template<class T>
+vector<vector<T> > SquareOffVector(const vector<T>& src)
+{
+    long dim = LongSquareRootOfLong(src.size());
+
+    // convert linear matrix into square matrix
+
+    vector<T> vec1D;
+    vector<vector<T> > vec2D;
+    typename vector<T>::const_iterator it = src.begin();
+
+    vec1D.reserve(dim);                   // for speed
+    vec2D.reserve(dim);
+
+    long i;
+    long j;
+
+    for (i = 0; i < dim; i++)
+    {
+        for (j = 0; j < dim; ++j, ++it)
+        {
+            vec1D.push_back(*it);
+        }
+
+        vec2D.push_back(vec1D);
+        vec1D.clear();
+    }
+
+    return vec2D;
+
+} // SquareOffVector
+
+//------------------------------------------------------------------------------------
+// vector comparison with scalar
+//
+template < class T >
+bool vec_leq(vector < T > v, T comparison)
+{
+    typename vector< T > :: iterator vecit;
+    for(vecit = v.begin(); vecit != v.end(); vecit++)
+    {
+        if(*vecit > comparison)
+            return false;
+    }
+    return true;
+}
+
+template <class T>
+bool vec_greater(vector < T > v, T comparison)
+{
+    typename vector< T > :: iterator vecit;
+    for(vecit = v.begin(); vecit != v.end(); vecit++)
+    {
+        if(*vecit <= comparison)
+            return false;
+    }
+    return true;
+}
+
+//------------------------------------------------------------------------------------
+// convenience wrapper for find
+//
+
+template < class T >
+bool Contains(const std::vector<T>& collection, const T& item)
+{
+    typename std::vector<T>::const_iterator it =
+        std::find(collection.begin(), collection.end(), item);
+    return (it != collection.end());
+}
+
+
+#endif // VECTORX_H
+
+//____________________________________________________________________________________
diff --git a/src/tree/argtree.cpp b/src/tree/argtree.cpp
new file mode 100644
index 0000000..63315bc
--- /dev/null
+++ b/src/tree/argtree.cpp
@@ -0,0 +1,528 @@
+// $Id: argtree.cpp,v 1.6 2012/07/07 02:29:42 bobgian Exp $
+
+/*
+  Copyright 2012  Jim McGill, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+#include <iostream>                     // for debug cerr
+#include <map>
+#include <stdio.h>                      // for use of printf in assorted functions
+#include <utility>                      // for make_pair in recombinant
+
+#include "local_build.h"
+
+#include "argtree.h"
+#include "constants.h"                  // for FLAGDOUBLE
+#include "rangex.h"
+#include "registry.h"
+#include "stringx.h"
+#include "tinyxml.h"
+#include "tree.h"
+#include "ui_strings.h"
+#include "xml_strings.h"
+
+using namespace std;
+
+//------------------------------------------------------------------------------------
+
+ARGEdge::ARGEdge(long target, long source)
+    : m_target(target),
+      m_source(source),
+      m_targetptr(),
+      m_partitions(""),
+      m_livesites(""),
+      m_transmittedsites(""),
+      m_targetid(FLAGLONG),
+      m_type(""),
+      m_time(FLAGDOUBLE),
+      m_label(""),
+      m_recloc(FLAGLONG)
+{
+    // intentionally blank
+} // ARGEdge constructor
+
+//------------------------------------------------------------------------------------
+
+FILE* TiXmlFOpen( const char* filename, const char* mode );  // pick this up from tinyxml
+
+void ARGTree::ToLamarcTree(Tree& stump, vector<ARGEdge> argedges)
+{
+    // This is an ugly piece of code, that takes the edge centric translation
+    // of an input ARG tree and grafts it onto an existing Lamarc tree. This is something
+    // Lamarc was never designed to expect, so the process is arcane and convoluted.
+
+#ifndef NDEBUG
+    // debug print of edge list
+    printf("\n****Initial edge list passed into ARGTree::ToLamarcTree****\n");
+    for(size_t edge=0; edge<argedges.size(); edge++)
+    {
+        printf("\nedge: %li  target: %li source: %li\n", edge, (argedges[edge].GetTarget()), argedges[edge].GetSource());
+        printf("Partitions: %s\n", argedges[edge].GetPartitions().c_str());
+        printf("Live sites: %s\n",  argedges[edge].GetLiveSites().c_str());
+        printf("Transmitted sites: %s\n",  argedges[edge].GetTransmittedSites().c_str());
+        printf("Label: %s\n", argedges[edge].GetLabel().c_str());
+        printf("Type: %s  Time: %f\n", argedges[edge].GetType().c_str(), argedges[edge].GetTime());
+        printf("Rec Loc: %li\n", (argedges[edge].GetRecLoc()));
+    }
+#endif
+
+    // target/source connections
+    typedef pair<Branch_ptr, Branch_ptr> targetpair;
+    map<long, targetpair> targetmap;
+    branchpair recbranches;
+
+    // find out what kind of tree is being built
+    bool isRecTree = true;
+    //bool firstRecFound = true;  //JRM Debug
+    Tree* testTree = &stump;
+    RecTree* recTree = dynamic_cast<RecTree*>(testTree);
+    if (recTree == NULL)
+    {
+        isRecTree = false;
+    }
+    m_totalSites = 0;
+
+    // The following is tricky because it depends on the argedge vector being time ordered
+    // It will blow up if the ARG tree reading in parsetreetodata is not done right
+    // All the tips are first because they are at time 0
+    // The tree is then built rootward in time order so that at every
+    // step the targetmap will contain the branch being made
+    // It's elegant but obscure.
+    Branch_ptr newbranch;
+    bool forcefound;
+    bool recfound;
+    size_t recpar1 = 0;
+    size_t recpar2 = 0;
+    FC_Status fc_status;
+    for(size_t edge=0; edge<argedges.size(); edge++)
+    {
+        forcefound = false;
+        recfound   = false;
+        if (argedges[edge].GetType() == xmlstr::XML_BRANCHTYPE_TIP)
+        {
+            forcefound = true;
+            // find the tip branch pointer in the current tree using the name
+            string tipname = argedges[edge].GetLabel();
+            newbranch = stump.GetTip(tipname);
+            fc_status.Increment_FC_Counts(newbranch->GetLiveSites());
+
+            //cerr << "tip: " << newbranch->GetID() << endl;
+            argedges[edge].SetTargetPtr(newbranch);
+            TransferEdgeData(newbranch, argedges[edge]);
+
+#if 0
+            if (m_totalSites == 0)
+            {
+                // set this the first time through, from the tips which have the total number of sites
+                m_totalSites = newbranch->GetRangePtr()->NumRegionSites();
+            }
+#endif
+        }
+
+        else if (argedges[edge].GetType() == xmlstr::XML_BRANCHTYPE_COAL)
+        {
+            forcefound = true;
+            map<long, targetpair>::iterator pos = targetmap.find(argedges[edge].GetTarget());
+            if (pos != targetmap.end())
+            {
+                targetpair p = pos->second;
+                if ((p.first != NULL) && (p.second != NULL))
+                {
+                    // make coalesence
+                    rangeset fcsites;
+                    fcsites = Intersection(p.first->GetLiveSites(), p.second->GetLiveSites());
+                    fc_status.Decrement_FC_Counts(fcsites);
+                    fcsites = fc_status.Coalesced_Sites();
+
+                    newbranch = stump.CoalesceActive(argedges[edge].GetTime(), p.first, p.second, fcsites);
+#if 0
+                    if (firstRecFound)
+                    {
+                        cerr  << endl << "coalescence: " << newbranch->GetID() << endl;
+                        cerr << "     side 1: " << endl;
+                        p.first->GetRangePtr()->PrintInfo();
+                        cerr << "     side 2: " << endl;
+                        p.second->GetRangePtr()->PrintInfo();
+                        cerr << "     result: " << endl;
+                        newbranch->GetRangePtr()->PrintInfo();
+                    }
+#endif
+                    TransferEdgeData(newbranch, argedges[edge]);
+                }
+                else
+                {
+                    // error - child missing
+                    if (p.first == NULL)
+                    {
+                        printf("edge: %li coalesence: %li  child 1 not defined\n", edge, argedges[edge].GetTarget());
+                    }
+                    if (p.second == NULL)
+                    {
+                        printf("edge: %li coalesence: %li  child 2 not defined\n", edge, argedges[edge].GetTarget());
+                    }
+                }
+            }
+            else
+            {
+                // error - coalescence missing
+                printf("edge: %li coalesence: %li not found in target map\n", edge, argedges[edge].GetTarget());
+            }
+        }
+
+        else if (argedges[edge].GetType() == xmlstr::XML_BRANCHTYPE_DISEASE)
+        {
+            forcefound = false;
+        }
+
+        else if (argedges[edge].GetType() == xmlstr::XML_BRANCHTYPE_EPOCH)
+        {
+            forcefound = true;
+
+            map<long, targetpair>::iterator pos = targetmap.find(argedges[edge].GetTarget());
+            if (pos != targetmap.end())
+            {
+                targetpair p = pos->second;
+                if (p.first != NULL)
+                {
+                    // make migration
+                    long maxEvents = registry.GetForceSummary().GetMaxEvents(force_DIVMIG);
+                    long topop = p.first->GetPartition(force_DIVMIG);
+                    newbranch = stump.Migrate(argedges[edge].GetTime(), topop, maxEvents, p.first);
+                    TransferEdgeData(newbranch, argedges[edge]);
+                }
+                else
+                {
+                    // error - child missing
+                    printf("edge: %li div-mig: %li child not defined\n", edge, argedges[edge].GetTarget());
+                }
+            }
+            else
+            {
+                // error - div-mig missing
+                printf("edge: %li div-mig: %li not found in target map\n",
+                       edge, argedges[edge].GetTarget());
+            }
+        }
+
+        else if (argedges[edge].GetType() == xmlstr::XML_BRANCHTYPE_MIG)
+        {
+            forcefound = true;
+
+            map<long, targetpair>::iterator pos = targetmap.find(argedges[edge].GetTarget());
+            if (pos != targetmap.end())
+            {
+                targetpair p = pos->second;
+                if (p.first != NULL)
+                {
+                    // make migration
+                    long maxEvents = registry.GetForceSummary().GetMaxEvents(force_MIG);
+                    long topop = p.first->GetPartition(force_MIG);
+                    newbranch = stump.Migrate(argedges[edge].GetTime(), topop, maxEvents, p.first);
+                    TransferEdgeData(newbranch, argedges[edge]);
+                }
+                else
+                {
+                    // error - child missing
+                    printf("edge: %li migration: %li child not defined\n", edge, argedges[edge].GetTarget());
+                }
+            }
+            else
+            {
+                // error - migration missing
+                printf("edge: %li migration: %li not found in target map\n",
+                       edge, argedges[edge].GetTarget());
+            }
+        }
+
+        else if (argedges[edge].GetType() == xmlstr::XML_BRANCHTYPE_REC)
+        {
+            if (isRecTree)
+            {
+                forcefound = true;
+                recfound   = true;
+                //firstRecFound = true; //JRM debug
+
+                map<long, targetpair>::iterator pos = targetmap.find(argedges[edge].GetTarget());
+                if (pos != targetmap.end())
+                {
+                    targetpair p = pos->second;
+                    if (p.first != NULL)
+                    {
+                        if (argedges[edge+1].GetType() == xmlstr::XML_BRANCHTYPE_REC)
+                        {
+                            // make sure next edge is associated with same recombination
+                            if (argedges[edge].GetTarget() == argedges[edge+1].GetTarget())
+                            {
+                                // find parents
+                                for(size_t paredge=edge+2; paredge<argedges.size(); paredge++)
+                                {
+                                    if(argedges[paredge].GetTarget() == argedges[edge].GetSource())
+                                    {
+                                        recpar1 = paredge;
+                                    }
+                                    if(argedges[paredge].GetTarget() == argedges[edge+1].GetSource())
+                                    {
+                                        recpar2 = paredge;
+                                    }
+                                }
+
+                                if ((recpar1 > 0) && (recpar2 > 0))
+                                {
+                                    // make recombination
+                                    long maxEvents = registry.GetForceSummary().GetMaxEvents(force_REC);
+                                    FPartMap fparts; // not used, we'll override whatever RecombineActive
+                                                     // does with what is in the parent branches
+
+                                    // decide which way the lower sites go from parent A
+                                    bool lowleft = true;
+                                    string transmittedsites =  argedges[edge].GetTransmittedSites();
+
+                                    // Old format: "MIN:MAX" where both endpoints are INCLUDED.
+                                    //unsigned int idx = transmittedsites.find_last_of(':');
+                                    //string endsite = transmittedsites.substr(idx+1, transmittedsites.length());
+                                    //long endval = atol(endsite.c_str());
+
+                                    // New format: "[MIN,MAX)" where MIN is closed lower endpoint and MAX is open upper endpoint.
+                                    unsigned int idx = transmittedsites.find_last_of(',');
+                                    string endsite = transmittedsites.substr(idx+1, transmittedsites.length()-1);
+                                    long endval = atol(endsite.c_str()) - 1; // correct for open interval
+
+                                    if (endval > argedges[edge].GetRecLoc())
+                                    {
+                                        lowleft = false;
+                                    }
+
+                                    // build the rec branches
+                                    rangeset fcsites;
+                                    fcsites = fc_status.Coalesced_Sites();
+                                    recbranches = recTree->RecombineActive(argedges[edge].GetTime(), maxEvents, fparts,
+                                                                           p.first,argedges[edge].GetRecLoc(), fcsites, lowleft);
+                                    TransferEdgeData(recbranches.first, argedges[edge]);
+                                    TransferEdgeData(recbranches.second, argedges[edge+1]);
+
+                                    //cerr  << endl << "recbranch.first : " << recbranches.first->GetID() << endl;
+                                    //recbranches.first->GetRangePtr()->PrintInfo();
+
+                                    //cerr << endl << "recbranch.second : " << recbranches.second->GetID() << endl;
+                                    //recbranches.second->GetRangePtr()->PrintInfo();
+
+                                    //recTree->GetTimeList().PrintTreeList();           // JRM debug
+
+                                    // override the generated partitions with the parent partitions from the ARG tree
+                                    string par1part = argedges[recpar1].GetPartitions();
+                                    if (!par1part.empty())
+                                    {
+                                        ProcessForce(recbranches.first, par1part);
+                                    }
+
+                                    string par2part = argedges[recpar2].GetPartitions();
+                                    if (!par2part.empty())
+                                    {
+                                        ProcessForce(recbranches.second, par2part);
+                                    }
+                                }
+                                else if (recpar1 == 0)
+                                {
+                                    printf("edge: %li source: %li not defined\n", edge, argedges[edge].GetSource());
+                                }
+                                else if (recpar2 == 0)
+                                {
+                                    printf("edge: %li source: %li not defined\n", edge+1, argedges[edge+1].GetSource());
+                                }
+                            }
+                            else
+                            {
+                                // error - rec numbers don't match
+                                printf("edge: %li rec number: %li does not match edge: %li rec number: %li\n",
+                                       edge, argedges[edge].GetTarget(), edge+1, argedges[edge+1].GetTarget());
+                            }
+                        }
+                        else
+                        {
+                            // error - recombinations not paired
+                            printf("edge: %li is not a recombination, recombinations are not properly paired\n", edge+1);
+                        }
+                    }
+                    else
+                    {
+                        // error - child missing
+                        printf("edge: %li rec: %li child not defined\n", edge, argedges[edge].GetTarget());
+                    }
+                }
+                else
+                {
+                    // error - rec missing
+                    printf("edge: %li rec: %li not found in target map\n",
+                           edge, argedges[edge].GetTarget());
+                }
+            }
+            else
+            {
+                // error - nor a rec tree
+                printf("edge: %li contains a recombination and this is not a recombinant tree\n", edge);
+            }
+        }
+
+        if (forcefound)
+        {
+            // add to target map array
+            Branch_ptr nullptr;
+            if (recfound)
+            {
+                // recombination case - returns a pair of branches
+
+                // branch 1
+                map<long, targetpair>::iterator pos1 =  targetmap.find(argedges[edge].GetSource());
+                if (pos1 != targetmap.end())
+                {
+                    // second branch in coalescence case
+                    targetpair p = pos1->second;
+                    p.second = recbranches.first;
+                    pos1->second =  p;
+                }
+                else
+                {
+                    // everything else
+                    targetpair p = make_pair(recbranches.first, nullptr);
+                    targetmap.insert(make_pair(argedges[edge].GetSource(), p));
+                }
+
+                // branch 2
+                map<long, targetpair>::iterator pos2 =  targetmap.find(argedges[edge+1].GetSource());
+                if (pos2 != targetmap.end())
+                {
+                    // second branch in coalescence case
+                    targetpair p = pos2->second;
+                    p.second = recbranches.second;
+                    pos2->second =  p;
+                }
+                else
+                {
+                    // everything else
+                    targetpair q = make_pair(recbranches.second, nullptr);
+                    targetmap.insert(make_pair(argedges[edge+1].GetSource(), q));
+                }
+            }
+            else
+            {
+                map<long, targetpair>::iterator pos =  targetmap.find(argedges[edge].GetSource());
+                if (pos != targetmap.end())
+                {
+                    // second branch in coalescence case
+                    targetpair p = pos->second;
+                    p.second = newbranch;
+                    pos->second =  p;
+                }
+                else
+                {
+                    // everything else
+                    targetpair p = make_pair(newbranch, nullptr);
+                    targetmap.insert(make_pair(argedges[edge].GetSource(), p));
+                }
+            }
+
+            if (recfound)
+            {
+                edge += 1; // move forward one extra because recombination uses two edges up
+            }
+        }
+        else
+        {
+            // unknown force
+            printf("edge: %li specifies unknown force: %s\n", edge, argedges[edge].GetType().c_str());
+        }
+    }
+
+#ifndef NDEBUG
+    recTree->GetTimeList().PrintTreeList();           // JRM debug
+    TiXmlDocument * docP = recTree->GetTimeList().AssembleGraphML();
+    FILE * argOutput = TiXmlFOpen("checktree.xml","a");
+    docP->Print(argOutput,0);
+    fclose(argOutput);
+#endif
+    // add root
+    stump.AttachBase(newbranch);
+
+    assert(stump.IsValidTree());
+
+} // ARGTree::ToLamarcTree
+
+//------------------------------------------------------------------------------------
+
+void ARGTree::TransferEdgeData(Branch_ptr bptr, ARGEdge edge)
+{
+    // transfer the other data to the new branch
+
+    //  partition forces
+    string partitions =  edge.GetPartitions();
+    if (partitions.length() >0)
+    {
+        unsigned int stidx = 0;
+        unsigned int idx = partitions.find(',');
+        string psubstr;
+        while (idx<partitions.length())
+        {
+            // pull off individual forces
+            psubstr = partitions.substr(stidx, idx);
+            ProcessForce(bptr, psubstr);
+            stidx = idx + 1;
+            idx = partitions.find(',', stidx);
+        }
+        psubstr = partitions.substr(stidx, partitions.length());
+        ProcessForce(bptr, psubstr);
+    }
+
+} // ARGTree::TransferEdgeData
+
+//------------------------------------------------------------------------------------
+
+bool ARGTree::ProcessForce(Branch_ptr bptr, string forcestr)
+{
+    // process force
+    unsigned int idx = forcestr.find(':');
+    if (idx == forcestr.length())
+    {
+        printf("ARGTree::ProcessForce: forcestr: %s has no : so it is not a valid definition\n",forcestr.c_str());
+        return false;
+    }
+    else
+    {
+        string forcekind = forcestr.substr(0, idx);
+        string forcevalue = forcestr.substr(idx+1, forcestr.length());
+
+        if (forcekind == uistr::disease)
+        {
+            string error_msg = "ERROR found in ARGTree::ProcessForce: force: ";
+            error_msg += forcekind;
+            error_msg +=" not implemented.\n";
+            printf("%s", error_msg.c_str());
+            return false;
+        }
+        else if (forcekind == uistr::divmigration)
+        {
+            long partNum = registry.GetDataPack().GetPartitionNumber(force_DIVMIG, forcevalue);
+            bptr->SetPartition(force_DIVMIG, partNum);
+        }
+        else if(forcekind == uistr::migration)
+        {
+            long partNum = registry.GetDataPack().GetPartitionNumber(force_MIG, forcevalue);
+            bptr->SetPartition(force_MIG, partNum);
+        }
+        else
+        {
+            string error_msg = "ERROR found in ARGTree::ProcessForce: force: ";
+            error_msg += forcekind;
+            error_msg +=" not a known force.\n";
+            printf("%s", error_msg.c_str());
+            return false;
+        }
+    }
+    return true;
+}
+
+//____________________________________________________________________________________
diff --git a/src/tree/argtree.h b/src/tree/argtree.h
new file mode 100644
index 0000000..2934e26
--- /dev/null
+++ b/src/tree/argtree.h
@@ -0,0 +1,100 @@
+// $Id: argtree.h,v 1.3 2012/06/30 01:32:42 bobgian Exp $
+
+/*
+  Copyright 2012  Jim McGill, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef ARGTREE_H
+#define ARGTREE_H
+
+//#include <cassert>  // May be needed for inline definitions.
+#include <string>
+#include <vector>
+
+#include "branch.h" // for Branch_ptr declaration
+
+using std::string;
+
+//------------------------------------------------------------------------------------
+
+class Tree;
+
+//------------------------------------------------------------------------------------
+// This is used to collect the ARG information from the input XML
+// so it is available in phase 2 after the tree tips are created.
+
+class ARGEdge
+{
+  private:
+    // edge information
+    long   m_target;
+    long   m_source;
+    Branch_ptr m_targetptr;             // for transfer of tip to new tree
+    string m_partitions;
+    string m_livesites;
+    string m_transmittedsites;
+
+    // target information
+    long   m_targetid;                  // used internally in ParseTreeToData::DoARGtree()
+                                        // when hooking the edges to the nodes
+                                        // equals m_target when done
+    string m_type;
+    double m_time;
+    string m_label;
+    long   m_recloc;
+
+    // we accept the default dtor, copy-ctor, and operator=
+
+  public:
+
+    ARGEdge(long target, long source);
+
+    // Setter functions
+    void SetTargetId(long targetid)                     {m_targetid = targetid;};
+    void SetTarget(long target)                         {m_target = target;};
+    void SetTargetPtr(Branch_ptr targetptr)             {m_targetptr = targetptr;};
+    void SetType(string type)                           {m_type = type;};
+    void SetTime(double time)                           {m_time = time;};
+    void SetLabel(string label)                         {m_label = label;};
+    void SetRecLoc(long recloc)                         {m_recloc = recloc;};
+    void SetPartitions(string partitions)               {m_partitions = partitions;};
+    void SetLiveSites(string livesites)                 {m_livesites = livesites;};
+    void SetTransmittedSites(string transmittedsites)   {m_transmittedsites = transmittedsites;};
+
+    //Getter functions
+    long        GetTargetId()           const {return m_targetid;};
+    string      GetType()               const {return m_type;};
+    double      GetTime()               const {return m_time;};
+    string      GetLabel()              const {return m_label;};
+    long        GetRecLoc()             const {return m_recloc;};
+    long        GetTarget()             const {return m_target;};
+    Branch_ptr  GetTargetPtr()          const {return m_targetptr;};
+    long        GetSource()             const {return m_source;};
+    string      GetPartitions()         const {return m_partitions;};
+    string      GetLiveSites()          const {return m_livesites;};
+    string      GetTransmittedSites()   const {return m_transmittedsites;};
+
+};
+
+//------------------------------------------------------------------------------------
+
+class ARGTree
+{
+  private:
+    long m_totalSites;  // total sites, same for all nodes
+    void TransferEdgeData(Branch_ptr bptr, ARGEdge edge);
+    bool ProcessForce(Branch_ptr bptr, string forcestr);
+
+  public:
+    void ToLamarcTree(Tree& stump, vector<ARGEdge> argedges);
+};
+
+//------------------------------------------------------------------------------------
+
+#endif // ARGTREE_H
+
+//____________________________________________________________________________________
diff --git a/src/tree/arranger.cpp b/src/tree/arranger.cpp
new file mode 100644
index 0000000..033e0f0
--- /dev/null
+++ b/src/tree/arranger.cpp
@@ -0,0 +1,1440 @@
+// $Id: arranger.cpp,v 1.122 2013/11/08 21:46:21 mkkuhner Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+
+#include "local_build.h"
+
+#include "arranger.h"
+#include "event.h"
+#include "tree.h"
+#include "force.h"
+#include "forceparam.h"
+#include "forcesummary.h"
+#include "random.h"
+#include "errhandling.h"                // Arranger can throw data_error
+#include "mathx.h"
+#include "registry.h"
+#include "likelihood.h"                 // for Bayesian arranger functions
+#include "chainstate.h"
+#include "timemanager.h"                // used to handle all rearrangements of of stick/stair
+#include "plforces.h"                   // used in the Brownian-Bridge for stick/stair rearrangement
+#include "fc_status.h"                  // for tracking final coalescence in the ResimArranger's
+                                        // functions, Resimulate() and DropAll()
+
+#ifdef DMALLOC_FUNC_CHECK
+#include "/usr/local/include/dmalloc.h"
+#endif
+
+using namespace std;
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+Arranger::Arranger(double timing)
+    :  m_timing(timing),
+       m_savetiming(timing),
+       m_tree(NULL),
+       randomSource(&(registry.GetRandom()))
+{
+    // intentionally blank
+} // Arranger::Arranger
+
+//------------------------------------------------------------------------------------
+
+Arranger::Arranger(const Arranger& src)
+    : m_timing(src.m_timing),
+      m_savetiming(src.m_savetiming),
+      randomSource(src.randomSource)
+{
+    m_tree = NULL;
+} // Arranger::Arranger
+
+//------------------------------------------------------------------------------------
+
+void Arranger::SetTiming(double t)
+{
+    if(t < 0.0)
+    {
+        data_error e("arranger timing cannot be < 0.0");
+        throw e;
+    }
+
+    // We used to barf for t > 1.0, but now we normalize the timings later, so we don't do that.
+
+    m_timing = t;
+} // Arranger::SetTiming
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+ResimArranger::ResimArranger(const ForceSummary & fs, double timing)
+    : Arranger(timing),
+      m_eventvec(fs.CreateEventVec()),
+      m_hasLogisticSelection(false),    // set in SetParameters
+      m_logsel_cutoff(FLAGDOUBLE),      // ditto
+      m_activelist(registry.GetDataPack()),
+      m_inactivelist(registry.GetDataPack())
+{
+    // inform each Event of the Arranger for callbacks
+    vector<Event*>::iterator event = m_eventvec.begin();
+    vector<Event*>::iterator eventend = m_eventvec.begin();
+    for ( ; event != eventend; ++event)
+    {
+        (*event)->SetArranger(*this);
+    }
+} // ResimArranger ctor
+
+//------------------------------------------------------------------------------------
+
+ResimArranger::ResimArranger(const ResimArranger& src)
+    : Arranger(src),
+      m_eventvec(),
+      m_hasLogisticSelection(false),    // set in SetParameters
+      m_logsel_cutoff(FLAGDOUBLE),      // ditto
+      m_activelist(registry.GetDataPack()),
+      m_inactivelist(registry.GetDataPack())
+{
+    vector<Event*>::const_iterator event = src.m_eventvec.begin();
+    vector<Event*>::const_iterator end = src.m_eventvec.end();
+    Event* newevent;
+    for( ; event != end; ++event)
+    {
+        newevent = (*event)->Clone();
+        newevent->SetArranger(*this);
+        m_eventvec.push_back(newevent);
+    }
+
+    // We do not copy m_xactives etc. because they are only scratchpads for Event use.
+} // ResimArranger::ResimArranger
+
+//------------------------------------------------------------------------------------
+
+ResimArranger::~ResimArranger()
+{
+    ClearEventVec();
+} // ResimArranger::~ResimArranger
+
+//------------------------------------------------------------------------------------
+
+void ResimArranger::ClearEventVec()
+{
+    vector<Event*>::iterator it = m_eventvec.begin();
+    vector<Event*>::iterator end = m_eventvec.end();
+    for ( ; it != end; ++it)
+    {
+        delete *it;
+    }
+    m_eventvec.clear();
+
+} // ClearEventVec
+
+//------------------------------------------------------------------------------------
+
+void ResimArranger::SetParameters(ChainState & chainstate)
+{
+    assert(m_eventvec.size() != 0);     // *Some* events must be possible!
+
+    vector<Event*>::iterator it = m_eventvec.begin();
+    vector<Event*>::iterator end = m_eventvec.end();
+
+    m_hasLogisticSelection = false;
+
+    ForceParameters starts(chainstate.GetParameters());
+    // MDEBUG not happy with presence of force-specific overflow code here in a general use function.
+    for ( ; it != end; ++it)
+    {
+        (*it)->InstallParameters(starts);
+        if (registry.GetForceSummary().HasLogisticSelection())
+        {
+            // Compute and store the maximum starttime, beyond which over/underlow will ensue.
+            double theta_A0 = starts.GetRegionalThetas()[0];
+            double theta_a0 = starts.GetRegionalThetas()[1];
+            double s = starts.GetLogisticSelectionCoefficient()[0];
+            if (theta_A0 <= 0.0 || theta_a0 <= 0.0)
+                throw impossible_error("Invalid Theta received by ResimArranger::SetParameters().");
+            m_hasLogisticSelection = true;
+            if (s > 0.0)
+                m_logsel_cutoff = (EXPMAX - log(theta_a0) - 4.0 * LOG10) / s;
+            else if (s < 0.0)
+                m_logsel_cutoff = (EXPMAX - log(theta_A0) - 4.0 * LOG10) / (-s);
+            else
+                m_logsel_cutoff = DBL_MAX; // no cutoff
+        }
+    }
+
+} // SetParameters
+
+//------------------------------------------------------------------------------------
+
+double ResimArranger::EventTime(Event*& returnevent, double eventT, double maxtime)
+{
+    // NB "maxtime" is the time at the start of the next interval.  Each Event is responsible
+    // for returning a time that respects that boundary; the Event itself knows whether it must
+    // avoid tying the boundary or only passing it.
+    m_xactives = m_activelist.GetBranchXParts();
+    m_xinactives = m_inactivelist.GetBranchXParts();
+
+    m_pactives = m_activelist.GetBranchParts();
+    m_pinactives = m_inactivelist.GetBranchParts();
+
+    double time = 0.0;
+
+    vector<Event*>::iterator event;
+    map<double, Event *> times;
+
+    for (event = m_eventvec.begin(); event != m_eventvec.end(); ++event)
+    {
+        time = (*event)->PickTime(eventT, maxtime);
+        if (time != FLAGDOUBLE)
+        {
+            assert(time >= 0.0);        // Should not be negative, ever!
+            times.insert(make_pair(time, *event));
+        }
+    }
+
+    // The first element in the map is now the smallest, since maps are intrinsically sorted.
+
+    returnevent = NULL;
+    // No winning horse exists.
+    if (times.empty()) return FLAGLONG;
+
+    map<double, Event*>::const_iterator mapit = times.begin();
+    double newT(mapit->first);
+
+    // Winning horse is at the interval end and is not a ties-allowed horse.
+    Event* winner = (*mapit).second;
+#if 0
+    if (newT == eventT && !(winner->TiesAllowed())) return FLAGLONG;
+#endif
+
+    // Winning horse returned DBL_BIG.
+    if (newT >= DBL_BIG)
+    {
+        string msg("maximal length branch won the horse race in");
+        msg += " ResimArranger::EventTime";
+        stretched_error e(msg);
+        throw e;
+    }
+
+    // Okay, we have a valid winner!
+    if (m_hasLogisticSelection && eventT + newT > m_logsel_cutoff)
+    {
+        // Shrink this new interval a bit, to prevent over/underflow.  The cutoff is computed in SetParameters().
+        // It's a simple function of the current (driving) values of theta_A0, theta_a0, and s.
+        newT = m_logsel_cutoff - eventT;
+    }
+    returnevent = winner;
+    return newT;
+} // ResimArranger::EventTime
+
+//------------------------------------------------------------------------------------
+
+void ResimArranger::DropAll(double eventT)
+{
+    double time;
+    Event* eventptr = NULL;
+
+    m_inactivelist.Clear();
+
+    FC_Status fcstatus;
+    m_activelist.IncreaseFCCount(fcstatus);
+
+    while (m_activelist.Size() > 1)
+    {
+        time = EventTime(eventptr, eventT, DBL_MAX);
+        assert (time != FLAGLONG);
+        assert (eventptr != NULL);
+        double nextT = time;
+        eventptr->DoEvent(nextT, fcstatus);
+        eventT = nextT;
+    }
+
+    // Remove the root from the m_activelist then attach it to the tree.
+    m_tree->AttachBase(m_activelist.RemoveFirst());
+} // DropAll
+
+//------------------------------------------------------------------------------------
+
+double ResimArranger::Activate(Tree * oldtree)
+{
+    // cerr << endl << "in ResimArranger::Activate calling RevalidateRange" << endl;
+    assert(m_tree->GetTimeList().RevalidateAllRanges());
+    Branch_ptr activebranch = m_tree->ActivateBranch(oldtree);
+    m_activelist.Append(activebranch);
+    return activebranch->m_eventTime;
+} // Activate
+
+//------------------------------------------------------------------------------------
+
+void ResimArranger::Resimulate(double eventT, ChainState & chainstate)
+{
+    double nextT, time;
+    Event* eventptr;
+
+    double rootT = m_tree->RootTime();
+
+    vector<Branch_ptr> newinactives = m_tree->FirstInterval(eventT);
+    unsigned long i;
+    m_inactivelist.Clear();
+
+    for (i = 0; i < newinactives.size(); ++i)
+    {
+        m_inactivelist.Collate(newinactives[i]);
+    }
+    nextT = m_inactivelist.IntervalBottom();
+
+    FC_Status fcstatus;
+    m_activelist.IncreaseFCCount(fcstatus);
+    m_inactivelist.IncreaseFCCount(fcstatus);
+
+    // The following loop resimulates lineages down the tree.  It will terminate when the
+    // Event objects agree that no more events are possible, or when the root is reached,
+    // at which point DropAll is invoked.
+    while (true)
+    {
+        // Poll the Event objects to see if we're done yet.
+
+        if (StopNow()) return;
+
+        time = EventTime(eventptr, eventT, nextT);
+
+        // If an event is possible we carry it out, then return to the top of the loop while
+        // remaining in the same interval, since further events may occur.
+        if (time != FLAGLONG)
+        {
+            eventT = time;
+            eventptr->DoEvent(eventT, fcstatus);
+            continue;
+        }
+
+        // if we are at the root, finish up using DropAll()
+        if (nextT == rootT)
+        {
+            m_activelist.Append(m_tree->ActivateRoot(fcstatus));
+            DropAll(rootT);
+            return;
+        }
+
+        // otherwise go on to the next interval
+        eventT = nextT;
+        nextT = NextInterval(eventT, fcstatus);
+    }
+} // Resimulate
+
+//------------------------------------------------------------------------------------
+
+double ResimArranger::NextInterval(double lastT, FC_Status & fcstatus)
+{
+    // Remove first branch from inactive list.
+    Branch_ptr pBranch = m_inactivelist.RemoveFirst();
+
+    // Insert its parent into inactive list.
+    Branch_ptr pParent = pBranch->Parent(0);
+    m_inactivelist.Collate(pParent);
+
+    if (pParent->Child(1))              // If the branch has a sibling,
+    {
+        m_inactivelist.RemoveFirst();   // remove it from the inactive list also.
+        rangeset coalesced_sites = Intersection(pParent->Child(0)->GetLiveSites(), pParent->Child(1)->GetLiveSites());
+#if FINAL_COALESCENCE_ON
+        fcstatus.Decrement_FC_Counts(coalesced_sites);
+#endif
+    }
+    else if (pBranch->Parent(1))
+    {
+        // If branch has a second parent, insert it.
+        m_inactivelist.Collate(pBranch->Parent(1));
+#if FINAL_COALESCENCE_ON
+        pBranch->Parent(1)->UpdateBranchRange(fcstatus.Coalesced_Sites(), true);
+#else
+        rangeset emptyset;
+        pBranch->Parent(1)->UpdateBranchRange(emptyset, false);
+#endif
+    }
+#if FINAL_COALESCENCE_ON
+    pParent->UpdateBranchRange(fcstatus.Coalesced_Sites(), true);
+#else
+    rangeset emptyset;
+    pParent->UpdateBranchRange(emptyset, false);
+#endif
+
+    // MARYDEBUG--kludge til think of something better used to handle rectree updating --
+    // should probably move rectree updating into arranger?
+    m_tree->NextInterval(pBranch);
+
+    // return the time at the bottom of the current interval.
+    return m_inactivelist.IntervalBottom();
+} // NextInterval
+
+//------------------------------------------------------------------------------------
+
+void ResimArranger::CleanupAfterResimulate()
+{
+    m_tree->Prune();
+} // CleanupAfterResimulate
+
+//------------------------------------------------------------------------------------
+
+bool ResimArranger::StopNow() const
+{
+    // Poll the Events to see if they are Done().  If any are not, return false.
+    vector<Event*>::const_iterator event = m_eventvec.begin();
+    vector<Event*>::const_iterator end = m_eventvec.end();
+
+    for ( ; event != end; ++event)
+    {
+        if (!(*event)->Done()) return false;
+    }
+
+    return true;
+} // StopNow
+
+//------------------------------------------------------------------------------------
+
+void ResimArranger::Rearrange(ChainState & chstate)
+{
+    // This is a "template method" giving the steps of rearrangement.  It may throw
+    // a "rejecttree_error" to signal a newtree that should be discarded.
+    m_activelist.Clear();               // m_activelist "clear" happens here instead of
+    // in Activate in case we want to "Activate" more than one branch for a given "drop".
+    m_tree = chstate.GetTree();
+    Tree * oldtree = chstate.GetOldTree();
+    double eventT = Activate(oldtree);
+    Resimulate(eventT, chstate);
+    CleanupAfterResimulate();
+    assert(m_tree->IsValidTree());
+} // Rearrange
+
+//------------------------------------------------------------------------------------
+
+void ResimArranger::ScoreRearrangement(ChainState & chstate)
+{
+    // Calculate data likelihood of new tree (stores it in the Tree object).
+    chstate.GetTree()->CalculateDataLikes();
+} // ScoreRearrangement
+
+//------------------------------------------------------------------------------------
+
+bool ResimArranger::AcceptAndSynchronize(ChainState & chstate, double temperature, bool badtree)
+{
+    // NB:  We assume that the tips of the tree were not changed, and thus we only accept/reject the body of the tree.
+    if (badtree)                        // reject this tree immediately
+    {
+        chstate.OverwriteTree();
+        return false;
+    }
+
+    Tree * tree = chstate.GetTree();
+    Tree * oldtree = chstate.GetOldTree();
+
+    double test = (tree->GetDLValue() - oldtree->GetDLValue()) / temperature;
+    test += Hastings(chstate);
+
+#ifndef LAMARC_QA_SINGLE_DENOVOS        // Make LAMARC_QA_SINGLE_DENOVOS always reject.
+    if (test < 0.0)
+    {
+        if (test < log(randomSource->Float())) // rejection
+        {
+#endif
+            tree->CopyPartialBody(oldtree);
+            tree->CopyStick(oldtree);
+            return false;
+#ifndef LAMARC_QA_SINGLE_DENOVOS        // Make LAMARC_QA_SINGLE_DENOVOS always reject.
+        }
+    }
+    // acceptance
+    oldtree->CopyPartialBody(tree);
+    oldtree->CopyStick(tree);
+    chstate.TreeChanged();
+    return true;
+#endif
+} // AcceptAndSynchronize
+
+//------------------------------------------------------------------------------------
+
+double ResimArranger::Hastings(ChainState & chstate)
+{
+    Tree * tree = chstate.GetTree();
+    Tree * oldtree = chstate.GetOldTree();
+
+    return log(static_cast<double>(oldtree->GetTimeList().GetNCuttable() - 1)
+               / (tree->GetTimeList().GetNCuttable() - 1));
+}
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+Arranger* DropArranger::Clone() const
+{
+    Arranger* arr = new DropArranger(*this);
+    return arr;
+} // DropArranger::Clone
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+Arranger* DenovoArranger::Clone() const
+{
+    Arranger* arr = new DenovoArranger(*this);
+    return arr;
+} // DenovoArranger::Clone
+
+//------------------------------------------------------------------------------------
+
+void DenovoArranger::Rearrange(ChainState & chstate)
+// routine formerly known as ResimArranger::DenovoTree
+{
+    m_tree = chstate.GetTree();
+    vector<Branch_ptr> tips = m_tree->ActivateTips(chstate.GetOldTree());
+
+    // set up the active-lineage list
+    m_activelist.Clear();
+    m_inactivelist.Clear();
+    unsigned long i;
+
+    for (i = 0; i < tips.size(); ++i)
+    {
+        m_activelist.Append(tips[i]);
+    }
+
+    DropAll(0.0);
+} // DenovoArranger::Rearrange
+
+//------------------------------------------------------------------------------------
+
+bool DenovoArranger::AcceptAndSynchronize(ChainState & chstate, double temperature, bool badtree)
+{
+    // Denovo trees are always accepted unless the 'badtree' flag forbids
+    if (badtree)
+    {
+        chstate.OverwriteTree();
+        return false;
+    }
+    else
+    {
+        chstate.OverwriteOldTree();
+        chstate.TreeChanged();
+        return true;
+    }
+} // DenovoArranger::AcceptAndSynchronize
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+void BaseHapArranger::SetParameters(ChainState & chainstate)
+{
+    // Deliberately blank.
+}
+
+//------------------------------------------------------------------------------------
+
+void BaseHapArranger::ScoreRearrangement(ChainState & chstate)
+{
+    chstate.GetTree()->CalculateDataLikes();
+} // ScoreRearrangement
+
+//------------------------------------------------------------------------------------
+// We copy the full tree, as we may have changed the tips as well
+// as the state of internal nodes (i.e. by changing DLCells).
+
+bool BaseHapArranger::AcceptAndSynchronize(ChainState & chstate, double temperature, bool badtree)
+{
+    Tree * tree = chstate.GetTree();
+    Tree * oldtree = chstate.GetOldTree();
+
+    if (badtree)     // reject this tree immediately
+    {
+        tree->CopyTips(oldtree);
+        tree->CopyBody(oldtree);
+        return false;
+    }
+
+    double test = (tree->GetDLValue() - oldtree->GetDLValue()) / temperature;
+
+    if (test < 0.0)
+    {
+        if (test < log(randomSource->Float()))
+        {
+            tree->CopyTips(oldtree);
+            tree->CopyBody(oldtree);
+            return false;
+        }
+    }
+
+    oldtree->CopyTips(tree);
+    oldtree->CopyBody(tree);
+    chstate.TreeChanged();
+    return true;
+}
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+Arranger* HapArranger::Clone() const
+{
+    Arranger* arr = new HapArranger(*this);
+    return arr;
+} // HapArranger::Clone
+
+//------------------------------------------------------------------------------------
+
+void HapArranger::Rearrange(ChainState & chstate)
+{
+    m_tree = chstate.GetTree();
+    m_tree->SwapSiteDLs();
+    assert(m_tree->IsValidTree());
+}
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+Arranger* ProbHapArranger::Clone() const
+{
+    Arranger* arr = new ProbHapArranger(*this);
+    return arr;
+} // ProbHapArranger::Clone
+
+//------------------------------------------------------------------------------------
+
+void ProbHapArranger::Rearrange(ChainState & chstate)
+{
+    m_tree = chstate.GetTree();
+    m_tree->PickNewSiteDLs();
+    assert(m_tree->IsValidTree());
+}
+
+//------------------------------------------------------------------------------------
+
+void ProbHapArranger::ScoreRearrangement(ChainState & chstate)
+{
+    BaseHapArranger::ScoreRearrangement(chstate);
+}
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+TreeSizeArranger::TreeSizeArranger(const ForceSummary & fs, double timing)
+    : ResimArranger(fs, timing)
+{
+    //We only want Active events, so we re-set m_eventvec.
+    vector<Event*>::iterator evit;
+
+    // Epoch times are handled in a different way, in Resimulate()
+    for (evit = m_eventvec.begin(); evit != m_eventvec.end(); )
+    {
+        if ((*evit)->IsInactive() || (*evit)->IsEpoch())
+        {
+            delete *evit;
+            evit = m_eventvec.erase(evit);
+        }
+        else
+        {
+            evit++;
+        }
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+Arranger* TreeSizeArranger::Clone() const
+{
+    Arranger* arr = new TreeSizeArranger(*this);
+    return arr;
+} // TreeSizeArranger::Clone
+
+//------------------------------------------------------------------------------------
+
+double TreeSizeArranger::Activate(Tree * oldtree)
+{
+    Branch_ptr branch(m_tree->ChoosePreferentiallyTowardsRoot(oldtree));
+    return branch->m_eventTime;
+} // Activate
+
+//------------------------------------------------------------------------------------
+
+void TreeSizeArranger::Resimulate(double eventT, ChainState & chainstate)
+{
+    // The following loop draws new times for each event in the tree via resimulating a new event,
+    // discarding everything but its time, and storing the new time.  We will push all the new times
+    // into the tree in one fell swoop.  This minimizes the amount of work done while the tree is in
+    // a time-inconsistent state.
+
+    TimeList & newtreelist = m_tree->GetTimeList();
+
+    // Find the start of the area to be modified.
+    Branchiter startpoint;
+
+    for(startpoint = newtreelist.FirstBody();
+        startpoint != newtreelist.EndBranch(); startpoint = newtreelist.NextBody(startpoint))
+    {
+        if ((*startpoint)->m_eventTime >= eventT) break;
+    }
+    assert(startpoint != newtreelist.EndBranch());
+    assert((*startpoint)->m_eventTime == eventT);
+
+    // Set eventT so it defines the start of the interval in which the branch it belongs to resides.
+    Branchiter intervalstart(startpoint);
+    while ((*intervalstart)->m_eventTime == eventT)
+        intervalstart = newtreelist.PrevBodyOrTip(intervalstart);
+
+    eventT = (*intervalstart)->m_eventTime;
+    assert((*startpoint)->m_eventTime > eventT);
+
+    m_activelist.Clear();
+    m_activelist.Append(m_tree->FindBranchesImmediatelyTipwardOf(startpoint));
+
+    DoubleVec1d epochtimes(chainstate.GetParameters().GetEpochTimes());
+
+    Branchiter newbranch;
+    DoubleVec1d newtimes;
+    for(newbranch = startpoint; newbranch != newtreelist.EndBranch(); newbranch = newtreelist.NextBody(newbranch))
+    {
+        double nextT;
+
+        m_tree->SetCurTargetLinkweightFrom(m_activelist);
+
+        if ((*newbranch)->Event() == btypeEpoch)
+        {
+            // Epoch branches retain their old times; we will throw if that leads to inconsistency.
+            nextT = (*newbranch)->GetTime();
+        }
+        else
+        {
+            Event* eventptr;
+            nextT = EventTime(eventptr, eventT, DBL_MAX);
+            assert(eventptr != NULL);
+            // did we illegally pass an epoch boundary
+            if (NodeChangesEpoch(*newbranch, nextT, epochtimes))
+            {
+                m_tree->ClearCurTargetLinkweight();
+                string wh("crossed the epoch boundary at time ");
+                wh += ToString(eventT);
+                epoch_error e("wh");
+                throw e;
+            }
+        }
+        newtimes.push_back(nextT);
+
+        // Now to fix up the activelist--remove all the children (if any).
+        for(long i = 0; i < NELEM; ++i)
+        {
+            Branch_ptr br((*newbranch)->Child(i));
+            if (br) m_activelist.Remove(br);
+        }
+        // Then add the newbranch.
+        m_activelist.Append(*newbranch);
+
+        if ((*newbranch)->Event() == btypeRec)
+        {
+            newtimes.push_back(nextT);
+            newbranch = newtreelist.NextBody(newbranch);
+            assert((*newbranch)->Event() == btypeRec);
+            m_activelist.Append(*newbranch);
+        }
+
+        eventT = nextT;
+    }
+
+    assert(m_tree->IsValidTree());
+
+#ifndef NDEBUG
+    vector<double> sorttimes(newtimes);
+    stable_sort(sorttimes.begin(), sorttimes.end());
+    assert(newtimes == sorttimes); // times should have been in order!
+#endif
+
+    m_tree->SetNewTimesFrom(startpoint, newtimes);
+    assert(m_tree->IsValidTree());
+
+    // clean up Tree scratchpads
+    m_tree->ClearCurTargetLinkweight();
+
+} // Resimulate
+
+//------------------------------------------------------------------------------------
+
+void TreeSizeArranger::CleanupAfterResimulate()
+{
+    m_tree->TrimStickToRoot();
+} // CleanupAfterResimulate
+
+//------------------------------------------------------------------------------------
+
+bool TreeSizeArranger::NodeChangesEpoch(Branch_ptr pBranch, double newtime, const DoubleVec1d & epochtimes) const
+{
+    double tipwardtime(min(pBranch->m_eventTime, newtime)), rootwardtime(max(pBranch->m_eventTime, newtime));
+
+    DoubleVec1d::size_type epoch;
+    for(epoch = 0; epoch < epochtimes.size(); ++epoch)
+    {
+        // Does an epoch boundary lie between our two times?
+        if (tipwardtime < epochtimes[epoch] && rootwardtime > epochtimes[epoch])
+            return true;
+
+    }
+
+    return false;
+
+} // NodeStaysInSameEpoch
+
+//------------------------------------------------------------------------------------
+
+double TreeSizeArranger::Hastings(ChainState & chstate)
+{
+    //The tree size arranger does not include a hastings term for the number of cuttable branches because
+    // both should be identical.  Also, since we are choosing a random branch instead of a random time,
+    // there need be no compensation for that, either.  The return value is ln(Hastings ratio), hence the 0.
+    return 0.0;
+}
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+EpochSizeArranger::EpochSizeArranger(const ForceSummary & fs, double timing)
+    : ResimArranger(fs, timing),
+      m_tree_changed(false)
+{
+    // If there is no DivForce, do nothing--this Arranger is unused.
+    if (!fs.CheckForce(force_DIVERGENCE)) return;
+
+    // Cache priors on epoch times for later use.
+    m_epochs = fs.GetEpochs();
+    ForceVec::const_iterator myforce = registry.GetForceSummary().GetForceByTag(force_DIVERGENCE);
+    vector<Parameter> myparams = (*myforce)->GetParameters();
+    unsigned long i;
+    m_priors.clear();
+
+    for (i = 0; i < myparams.size(); ++i)
+    {
+        m_priors.push_back(myparams[i].GetPrior());
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+EpochSizeArranger::EpochSizeArranger(const EpochSizeArranger& src)
+    : ResimArranger(src),
+      m_tree_changed(src.m_tree_changed),
+      m_epochs(src.m_epochs),
+      m_priors(src.m_priors)
+{
+    // Deliberately blank.
+} // EpochSizeArranger copy constructor
+
+//------------------------------------------------------------------------------------
+
+Arranger* EpochSizeArranger::Clone() const
+{
+    Arranger* arr = new EpochSizeArranger(*this);
+    return arr;
+}
+
+//------------------------------------------------------------------------------------
+
+void EpochSizeArranger::Rearrange(ChainState & chstate)
+{
+    // Rearrange may throw a "rejecttree_error" to signal a newtree that should be discarded.
+
+    m_tree = chstate.GetTree();
+    m_tree_changed = false;
+    assert(m_tree->ConsistentWithParameters(chstate.GetParameters()));
+
+    // Time 0 is not present in the result of GetEpochTimes, so we put it in!
+    DoubleVec1d epochtimes(chstate.GetParameters().GetEpochTimes());
+    epochtimes.insert(epochtimes.begin(), 0.0);
+
+    // Pick among the n internal boundary times.  The first epoch has no
+    // pickable boundary time, but the last one DOES; there is no trailing sentinal.
+    DoubleVec1d::size_type startepoch(registry.GetRandom(). Long(epochtimes.size() - 1) + 1);
+
+    bool islastepoch = (startepoch == epochtimes.size() - 1);
+    double tipwardtime(epochtimes[startepoch - 1]);
+    double oldtime(epochtimes[startepoch]);
+    assert(oldtime != 0.0);  // zero is a fake epoch
+    double rootwardtime;
+    if (!islastepoch) rootwardtime = epochtimes[startepoch + 1];
+    else rootwardtime = FLAGDOUBLE;     // We shouldn't be using this if we're moving the last epoch.
+
+    // Obtain the prior for the boundary to be moved (note that the priors start with epoch 1.
+    const Prior& prior = m_priors[startepoch - 1];
+    // Draw from it.
+    pair<double, double> draw = prior.RandomDraw();
+    double newtime = draw.first;
+
+    // If setting the epoch to the drawn time causes a conflict, throw.
+    if (newtime <= tipwardtime || (rootwardtime != FLAGDOUBLE && newtime >= rootwardtime))
+    {
+        epoch_error e("Epoch boundary conflict in EpochSizeArranger");
+        throw e;
+    }
+
+    epochtimes[startepoch] = newtime;
+
+    // If the affected area is past the bottom of the tree, don't change the tree, just reset the time and continue.
+    if (tipwardtime >= m_tree->RootTime())
+    {
+        epochtimes.erase(epochtimes.begin());
+        chstate.GetParameters().SetEpochTimes(epochtimes);
+        return;
+    }
+
+    // Else change the tree.
+    m_tree_changed = true;
+
+    // Set m_firstInvalid correctly.
+    m_tree->ChooseFirstBranchInEpoch(tipwardtime, chstate.GetOldTree());
+
+    double tipwardchange = (newtime - tipwardtime) / (oldtime - tipwardtime);
+    double rootwardchange;
+    if (islastepoch) rootwardchange = 1.0;
+    else rootwardchange = (rootwardtime - newtime) / (rootwardtime - oldtime);
+
+    bool movingtipward(newtime < oldtime);
+
+    // First generate all the new branchtimes, storing for later use.
+    DoubleVec1d newtimes;
+
+    vector<Branch_ptr> tipwbranches(m_tree->FindBranchesStartingOnOpenInterval(tipwardtime, oldtime));
+    vector<Branch_ptr>::iterator br;
+    for(br = tipwbranches.begin(); br != tipwbranches.end(); ++br)
+    {
+        assert((*br)->Event() != btypeEpoch);
+        double newlength((*br)->m_eventTime - tipwardtime);
+        newlength *= tipwardchange;
+        newtimes.push_back(tipwardtime + newlength);
+    }
+    vector<Branch_ptr> epochbranches(m_tree->FindEpochBranchesAt(oldtime));
+    for(br = epochbranches.begin(); br != epochbranches.end(); ++br)
+    {
+        assert((*br)->Event() == btypeEpoch);
+        newtimes.push_back(newtime);
+    }
+
+    vector<Branch_ptr> rootwbranches;
+    if (islastepoch)
+        rootwbranches = m_tree->FindBranchesStartingRootwardOf(oldtime);
+    else
+        rootwbranches = m_tree->FindBranchesStartingOnOpenInterval(oldtime, rootwardtime);
+
+    for(br = rootwbranches.begin(); br != rootwbranches.end(); ++br)
+    {
+        assert((*br)->Event() != btypeEpoch);
+        if (islastepoch) {
+           newtimes.push_back((*br)->m_eventTime + (newtime - oldtime));
+        } else {
+           double newlength(rootwardtime - (*br)->m_eventTime);
+           newlength *= rootwardchange;
+           newtimes.push_back(rootwardtime - newlength);
+        }
+    }
+
+    // Now put the new times into the tree.
+    if (!newtimes.empty())
+    {
+        Branchiter startpoint;
+
+        if (!tipwbranches.empty())
+        {
+            startpoint = m_tree->GetTimeList().FindIter(*(tipwbranches.begin()));
+        }
+        else if (!epochbranches.empty())
+        {
+            startpoint = m_tree->GetTimeList().FindIter(*(epochbranches.begin()));
+        }
+        else
+            startpoint = m_tree->GetTimeList().FindIter(*(rootwbranches.begin()));
+
+        m_tree->SetNewTimesFrom(startpoint, newtimes);
+        m_tree->ClearCurTargetLinkweight();
+    }
+
+    assert(m_tree->IsValidTree());
+
+    // Set the new epoch times time 0 is not wanted, so we take it out!
+    epochtimes.erase(epochtimes.begin());
+    chstate.GetParameters().SetEpochTimes(epochtimes);
+    assert(m_tree->ConsistentWithParameters(chstate.GetParameters()));
+} // Rearrange
+
+//------------------------------------------------------------------------------------
+
+void EpochSizeArranger::ScoreRearrangement(ChainState & chstate)
+{
+    // Calculate data likelihood of new tree (stores it in the Tree object).
+    if (m_tree_changed) chstate.GetTree()->CalculateDataLikes();
+
+} // ScoreRearrangement
+
+//------------------------------------------------------------------------------------
+
+double EpochSizeArranger::Hastings(ChainState & chstate)
+{
+    Tree *tree = chstate.GetTree();
+    DoubleVec1d newepochs(chstate.GetParameters().GetEpochTimes());
+    DoubleVec1d oldepochs(chstate.GetOldParameters().GetEpochTimes());
+
+    assert(newepochs.size() == oldepochs.size());
+    DoubleVec1d::size_type index;
+    for(index = 0; index < newepochs.size(); ++index)
+    {
+       if (newepochs[index] != oldepochs[index]) break;
+    }
+    assert(index < newepochs.size()); // ran off the end!
+
+    if (index == newepochs.size()) return 0.0;
+
+//  The hastings ratio is of the form Ratio#1 x Ratio#2, where Ratio#2
+//  is just the digit one if the new epoch boundary is the most rootward
+//  epoch boundary in the tree.
+//
+//  Ratio#1, used in all cases, starts as the quantity:
+//     (newtau - tautipward) / (oldtau - tautipward),
+//     where tautipward is either the time of the epoch boundary tipwards
+//     of the changed epoch boundary, or zero if such a boundary does
+//     not exist.  This ratio is then raised to the power of the number
+//     of nodes in the tree between tauabove and newtau.
+//  Ratio#2, used conditionally (see above), starts as the quantity:
+//     (taurootward - newtau) / (taurootward - oldtau)
+//     which is then raised to the power of the number of nodes in the
+//     tree between newtau and taubelow.
+
+    double tauratio(0.0),tiptau(0.0);
+    long nodecount(0L);
+    if (index != 0) tiptau = newepochs[index-1];
+    tauratio = (newepochs[index] - tiptau) / (oldepochs[index] - tiptau);
+    nodecount = tree->CountNodesBetween(tiptau,newepochs[index]);
+    double loghratio(nodecount*log(tauratio));
+
+    if (index != newepochs.size()-1)
+    {
+       assert(newepochs[index+1] == oldepochs[index+1]);
+       tauratio = (newepochs[index+1] - newepochs[index]) /
+                  (oldepochs[index+1] - oldepochs[index]);
+       nodecount = tree->CountNodesBetween(newepochs[index],
+                                            newepochs[index+1]);
+       loghratio += nodecount*log(tauratio);
+    }
+
+    return loghratio;
+
+} // Hastings
+    
+//------------------------------------------------------------------------------------
+
+bool EpochSizeArranger::AcceptAndSynchronize(ChainState & chstate, double temperature, bool badtree)
+{
+    // NB:  We assume that the tips of the tree were not changed, and thus we only accept/reject the body of the tree.
+
+    if (badtree)    // reject this tree immediately
+    {
+        chstate.OverwriteTree();
+        chstate.OverwriteParameters();
+        return false;
+    }
+
+    // Sometimes an Epoch size change is past the bottom of the current tree and does nothing.  In this case,
+    // we accept but without changing the tree or anything to do with it.  NB:  if this check is taken out
+    // the program will DIE by trying to do data likelihood on an unchanged tree!
+    if (!m_tree_changed)
+    {
+        chstate.OverwriteOldParameters();
+        chstate.ParametersChanged();
+        return true;
+    }
+
+    Tree * tree = chstate.GetTree();
+    Tree * oldtree = chstate.GetOldTree();
+
+    ForceParameters & newparameters = chstate.GetParameters();
+    ForceParameters & oldparameters = chstate.GetOldParameters();
+    SinglePostLike & postlike = registry.GetSinglePostLike();
+
+    // Summarize tree; we end up owning this summary.
+    // WARNING:  leaks memory if intervening code throws.
+    TreeSummary * newtrsum = tree->SummarizeTree();
+    TreeSummary * oldtrsum = oldtree->SummarizeTree();
+
+    // Pass tree summary with old and new parameters to postlike routines this returns ln likelihood!
+    double oldprior = postlike.Calc_lnProbGP(oldparameters.GetRegionalParameters(),
+                                             oldparameters.GetRegionalLogParameters(),
+                                             oldtrsum);
+    double newprior = postlike.Calc_lnProbGP(newparameters.GetRegionalParameters(),
+                                             newparameters.GetRegionalLogParameters(),
+                                             newtrsum);
+    //Postlike operates in Regional parameter space (unlike the priors, in Rearrange),
+    // so we have to send them that version.
+
+    // Delete the summary.
+    delete newtrsum;
+    delete oldtrsum;
+
+#ifndef STATIONARIES
+    double newlike(tree->GetDLValue() + newprior);
+    double oldlike(oldtree->GetDLValue() + oldprior);
+#else // STATIONARIES
+    double newlike(newprior);
+    double oldlike(oldprior);
+#endif
+
+    double test = (newlike - oldlike) / temperature;
+    test += Hastings(chstate);
+
+#if 0 // Always reject, JREMOVE/JRESTORE
+    tree->CopyPartialBody(oldtree);
+    tree->CopyStick(oldtree);
+    chstate.OverwriteParameters();
+    return false;
+#endif
+
+#if 0 // Always accept, JREMOVE/JRESTORE
+
+#else
+    if (test < 0.0)
+    {
+        if (test < log(randomSource->Float())) // rejection
+        {
+            tree->CopyPartialBody(oldtree);
+            tree->CopyStick(oldtree);
+            chstate.OverwriteParameters();
+            return false;
+        }
+    }
+#endif
+
+    oldtree->CopyPartialBody(tree);     // acceptance
+    oldtree->CopyStick(tree);
+    chstate.OverwriteOldParameters();
+    chstate.TreeChanged();
+    chstate.ParametersChanged();
+    return true;
+} // AcceptAndSynchronize
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+BayesArranger::BayesArranger(const BayesArranger& src)
+    : Arranger(src),
+      m_oldlogs(src.m_oldlogs),
+      m_newlogs(src.m_newlogs)
+{
+    // Deliberately blank.
+} // copy ctor
+
+//------------------------------------------------------------------------------------
+
+Arranger* BayesArranger::Clone() const
+{
+    return new BayesArranger(*this);
+} // Clone
+
+//------------------------------------------------------------------------------------
+
+void BayesArranger::SetParameters(ChainState & chainstate)
+{
+    // We make sure that oldlogs are ready for use, which means they start out equal to newlogs.
+    m_newlogs = chainstate.GetParameters().GetRegionalLogParameters();
+    m_oldlogs = m_newlogs;
+}  // BayesArranger::SetParameters
+
+//------------------------------------------------------------------------------------
+
+void BayesArranger::Rearrange(ChainState & chstate)
+{
+    DoubleVec1d newparameters = chstate.GetParameters().GetGlobalParameters();
+    //We must operate in global parameter space, since that's what space the prior operates in.
+
+    // Choose a parameter.
+    const ParamVector pv(true);
+    long chosen = pv.ChooseSampleParameterIndex(randomSource);
+
+    // Draw from appropriate prior.
+    pair<double, double> newp = pv[chosen].DrawFromPrior();
+
+    bool islog = false;
+    registry.GetForceSummary().SetParamWithConstraints(chosen, newp.first, newparameters, islog);
+
+    chstate.GetParameters().SetGlobalParameters(newparameters);
+    chstate.UpdateNewStickParams(); // make sure stick reflects this new parameter
+    double newlogregparam = chstate.GetParameters().GetRegionalLogParameter(chosen);
+
+    // This may not be a log, if it's growth, but we let the ForceParameter worry about that.
+    // We do need to make sure it's the regional parameter, not the global parameter.
+    islog = true;
+    registry.GetForceSummary().SetParamWithConstraints(chosen, newlogregparam, m_newlogs, islog);
+
+}  // BayesArranger::Rearrange
+
+//------------------------------------------------------------------------------------
+
+void BayesArranger::ScoreRearrangement(ChainState & chstate)
+{
+    // It would be logical to compute the prior here, but currently
+    // we do that in AcceptAndSynchronize.  OPTIMIZATION possible here.
+} // ScoreRearrangement
+
+//------------------------------------------------------------------------------------
+
+bool BayesArranger::AcceptAndSynchronize(ChainState & chstate, double temperature, bool badtree)
+{
+#ifdef STATIONARIES
+
+    chstate.OverwriteOldParameters();
+    chstate.UpdateOldStickParams();
+    m_oldlogs = m_newlogs;
+    chstate.ParametersChanged();
+    return true;
+
+#else // STATIONARIES
+
+    // MDEBUG OPTIMIZATION we reset the stick much more often than we need to, as it
+    // is only invalidated by changes to Theta, but we reset it on any change.
+
+    assert(!badtree);  // Bayesian arrangers never make bad trees, since they do not make trees at all!
+
+    ForceParameters & newparameters = chstate.GetParameters();
+    ForceParameters & oldparameters = chstate.GetOldParameters();
+    SinglePostLike & postlike = registry.GetSinglePostLike();
+
+    // Summarize tree; we end up owning this summary.
+    // WARNING:  leaks memory if intervening code throws
+    TreeSummary * trsum = chstate.GetTree()->SummarizeTree();
+
+    // Pass tree summary with old and new parameters to postlike routines this returns ln likelihood!
+    double oldprior = postlike.Calc_lnProbGP(oldparameters.GetRegionalParameters(), m_oldlogs, trsum);
+    double newprior = postlike.Calc_lnProbGP(newparameters.GetRegionalParameters(), m_newlogs, trsum);
+
+    // Postlike operates in Regional parameter space (unlike the priors, in Rearrange),
+    // so we have to send them that version.
+
+    // Delete the summary.
+    delete trsum;
+
+    // choose a winner
+    double test = (newprior - oldprior) / temperature;
+
+    if (test < log(randomSource->Float()))                                              // reject
+    {
+        chstate.OverwriteParameters();
+        chstate.UpdateNewStickParams();
+        m_newlogs = m_oldlogs;
+        return false;
+    }
+
+    chstate.OverwriteOldParameters();                                                   // else accept
+    chstate.UpdateOldStickParams();
+    m_oldlogs = m_newlogs;
+    chstate.ParametersChanged();
+
+    return true;
+
+#endif // STATIONARIES
+} // BayesArranger::AcceptAndSynchronize
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+void LocusArranger::SetParameters(ChainState & chainstate)
+{
+    // Do nothing--the parameters don't change in a moving locus rearrangement.
+}
+
+//------------------------------------------------------------------------------------
+// Our Locus arranger is a Gibbs arranger--it always accepts.
+
+void LocusArranger::Rearrange(ChainState & chstate)
+{
+    RecTree * rtree = dynamic_cast<RecTree *>(chstate.GetTree());
+    for (unsigned long mloc = 0; mloc < rtree->GetNumMovingLoci(); ++mloc)
+    {
+        //We might have a mix of jumping and floating loci--only do this for the jumping ones.
+        if (rtree->DoesThisLocusJump(mloc))
+        {
+            DoubleVec1d likelihoods = rtree->CalculateDataLikesForFloatingLocus(mloc);
+            ScaleLargestToZero(likelihoods);
+            DoubleVec1d likesums;
+            double total = 0;
+            for (unsigned long site = 0; site < likelihoods.size(); ++site)
+            {
+                total += exp(likelihoods[site]);
+                likesums.push_back(total);
+            }
+            double choice = randomSource->Float() * total;
+            for (unsigned long site = 0; site < likesums.size(); ++site)
+            {
+                //LS DEBUG:  inefficient.  But earlier attempts were wrong, so.
+                rtree->SetMovingMapPosition(mloc, site);
+                if (likesums[site] > choice)
+                {
+                    break;
+                }
+            }
+        }
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+void LocusArranger::ScoreRearrangement(ChainState & chstate)
+{
+    // Calculate data likelihood of new tree (stores it in the Tree object).
+    RecTree * rtree = dynamic_cast<RecTree *>(chstate.GetTree());
+    rtree->GetTimeList().SetAllUpdateDLs();
+    for (unsigned long mloc = 0; mloc < rtree->GetNumMovingLoci(); ++mloc)
+    {
+        rtree->CalculateDataLikesForMovingLocus(mloc);
+    }
+
+    //We need to fill the DLCells with appropriate values at the new location for the next arranger.
+    // If we choose later to only rearrange a single randomly-chosen moving loci instead of all of them,
+    // we'll need to store the number of the chosen locus and only CalculateDataLikesFor... for
+    // that locus.
+}
+
+//------------------------------------------------------------------------------------
+
+bool LocusArranger::AcceptAndSynchronize(ChainState & chstate, double temperature, bool badtree)
+{
+    assert(!badtree);  //Enh?
+    chstate.MapChanged();
+    return true;
+    //Note:  We don't do any tree copying, since the tree itself doesn't own what we changed.
+    // So it's a good thing we always accept!
+}
+
+//------------------------------------------------------------------------------------
+
+Arranger* LocusArranger::Clone() const
+{
+    Arranger* arr = new LocusArranger(*this);
+    return arr;
+}
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+Arranger* ZilchArranger::Clone() const
+{
+    Arranger* arr = new ZilchArranger(*this);
+    return arr;
+
+} // ZilchArranger::Clone
+
+//------------------------------------------------------------------------------------
+
+bool ZilchArranger::AcceptAndSynchronize(ChainState & chstate, double temperature, bool badtree)
+{
+    assert(!badtree);                   //What?  We did nothing!
+    //We assume the trees are still identical.
+    return false;                       //No sense fooling the user into thinking we did something.
+
+} // ZilchArranger::AcceptAndSynchronize
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+Arranger* StairArranger::Clone() const
+{
+    Arranger* arr = new StairArranger(*this);
+    return arr;
+} // Clone
+
+//------------------------------------------------------------------------------------
+
+//  double StickMean(double freqA, double s, double toA, double toa) const;
+//  double StickVar(double freqA, double tipfreqA, double thetaA) const;
+
+double StairArranger::LnPBrownStickParams(const DoubleVec1d & param, const TreeSummary * partialtreedata) const
+{
+    const StickSelectPL& plf(registry.GetSinglePostLike().GetStickSelectPL());
+    double answ(0.0);
+
+    (void)plf;                          // Silence compiler warning about unused variable.
+
+    return answ;
+
+} // LnPBrownStickParams
+
+//------------------------------------------------------------------------------------
+
+void StairArranger::SetParameters(ChainState & chainstate)
+{
+    // Get the parameters we need, whatever those are.
+} // SetParameters
+
+//------------------------------------------------------------------------------------
+
+void StairArranger::Rearrange(ChainState & chstate)
+{
+    ForceParameters fp(chstate.GetParameters());
+    m_tree = chstate.GetTree();
+
+    m_tree->GetTimeManager()->MakeStickTilTime(fp, m_tree->RootTime());
+
+} // Rearrange
+
+//------------------------------------------------------------------------------------
+
+void StairArranger::ScoreRearrangement(ChainState & chstate)
+{
+    // In ResimulatingArrangers this routine scores P(D|G).  The analogous computation in StairArranger,
+    // like BayesArranger, is done in AcceptAndSynchronize, so that even when stationaries are being run,
+    // it will still happen.  Therefore this routine is empty.
+} // ScoreRearrangement
+
+//------------------------------------------------------------------------------------
+// Much of this code is cut and paste from BayesArranger and should probably be combined,
+// but not all of it, so this function can't simply be shared or inherited.
+
+bool StairArranger::AcceptAndSynchronize(ChainState & chstate, double temperature, bool badtree)
+{
+    assert(!badtree);                   // We don't make trees at all, how could it be bad?!
+
+    // Since we Gibbs sample, it's always accept...
+
+#ifndef STATIONARIES
+    // Summarize trees; we end up owning these summaries.
+    // WARNING:  leaks memory if intervening code throws.
+
+    // NB:  while it looks like we're comparing trees here, the trees differ only in their
+    // stairs.  We pass a whole TreeSummary only because there is no separate StairSummary.
+
+    TreeSummary * newtrsum = chstate.GetTree()->SummarizeTree();
+    TreeSummary * oldtrsum = chstate.GetOldTree()->SummarizeTree();
+
+    SinglePostLike & postlike = registry.GetSinglePostLike();
+    DoubleVec1d param = chstate.GetParameters().GetRegionalParameters();
+    DoubleVec1d logparam = chstate.GetParameters().GetRegionalLogParameters();
+    double oldprior = postlike.Calc_lnProbGS(param, logparam, oldtrsum);
+    double newprior = postlike.Calc_lnProbGS(param, logparam, newtrsum);
+    //  double oldprior = postlike.Calc_lnProbGP(param, logparam, oldtrsum);
+    //  double newprior = postlike.Calc_lnProbGP(param, logparam, newtrsum);
+
+    // Delete the summaries.
+    delete newtrsum;
+    delete oldtrsum;
+
+    // Choose the winner.
+    double test = (newprior - oldprior) / temperature;
+
+    if (test < log(randomSource->Float())) // reject
+    {
+        chstate.GetTree()->CopyStick(chstate.GetOldTree());
+        return false;
+    }
+#endif // STATIONARIES
+
+    chstate.GetOldTree()->CopyStick(chstate.GetTree()); // else accept
+    chstate.StickChanged();
+    return true;
+
+} // ScoreRearrangement
+
+//____________________________________________________________________________________
diff --git a/src/tree/arranger.h b/src/tree/arranger.h
new file mode 100644
index 0000000..4515341
--- /dev/null
+++ b/src/tree/arranger.h
@@ -0,0 +1,413 @@
+// $Id: arranger.h,v 1.62 2013/06/03 17:23:13 jyamato Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef ARRANGER_H
+#define ARRANGER_H
+
+#include <cassert>
+#include <cmath>
+#include <map>
+#include <string>
+#include <vector>
+
+#include "vectorx.h"
+#include "branchbuffer.h"
+#include "arranger_types.h"
+#include "prior.h"                      // EpochSizeArranger contains a vector<Prior>
+
+// #include "event.h"    uses Event methods in .cpp
+// #include "tree.h"
+// #include "forceparam.h"
+// #include "forcesummary.h"
+// #include "random.h"
+
+class Event;
+class Tree;
+class ForceParameters;
+class Random;
+class ForceSummary;
+class ChainState;
+class TimeList;
+class Epoch;
+
+/*******************************************************************
+ The Arranger class transforms a tree and decides whether the new ree should be accepted or rejected.
+ It provides a routine DenovoTree() to make an independent tree, and a routine Rearrange to
+ make a tree based on a previous tree.
+
+ Subclasses of Arranger carry out different types of transformation.
+
+ For all Arrangers that modify the tree, as a postcondition the Range information in all Branches
+ remaining in the tree must be correct before Tree::Prune is called.  The ResimulatingArranger as
+ currently written achieves this by making the Ranges correct at the "active face" or current point
+ of rearrangement.  This would be problematic in a recombinant tree if rearrangement stopped before
+ the root was reached, as lower Branches would not be made correct.  However, the InactiveRecEvent
+ will not allow rearrangement to terminate until the root is reached.
+
+ Written by Jim Sloan, revised by Mary Kuhner
+
+ BayesArranger added 11/26/03 Mary.  This Arranger changes the parameter values rather than the tree.
+
+ DenovoArranger added 11/26/03 Mary.  This Arranger makes trees from scratch ("de novo") and replaces
+ the Denovo member function which could not be implemented on most Arrangers.
+
+ RecSiteArranger removed 3/29/06 Mary as it was a deteriorating pseudogene.
+
+ StairArranger added 4/4/07 Mary.  This Arranger changes the stairway of doom (AKA wiggling stick).
+
+********************************************************************/
+
+class Arranger
+{
+  private:
+    Arranger();                           // undefined
+    Arranger& operator=(const Arranger&); // undefined
+
+  protected:
+    Arranger(const Arranger& src);
+
+    double  m_timing, m_savetiming;
+
+  public:
+    Tree   *m_tree;             // public for speedy access by Event subclasses; also set by Chain::StartRegion()
+    Random *randomSource;
+
+    Arranger(double timing);
+    virtual ~Arranger() {};
+
+    // does not copy the tree pointer!
+    virtual Arranger* Clone() const = 0;
+
+    // Arrangement functions
+    virtual void    SetParameters(ChainState & chainstate) = 0;
+    virtual void    Rearrange(ChainState & chstate) = 0;
+    virtual void    ScoreRearrangement(ChainState & chstate) = 0;
+    virtual bool    AcceptAndSynchronize(ChainState & chstate, double temperature, bool badtree) = 0;
+
+    // Getter/Setters
+    Tree *  GetTree() const          { return m_tree; };
+    double  GetTiming() const        { return m_timing; };
+    virtual std::string  GetName() const     { return arrangerstrings::BASE; };
+
+    void    SetTiming(double t);
+    void    SetSaveTiming()             { m_savetiming = m_timing; };
+    void    RestoreTiming()             { m_timing = m_savetiming; };
+};
+
+//------------------------------------------------------------------------------------
+
+/*********************************************************************
+ ResimArranger is an abstract base class for Arrangers which do any form of resimulation of lineages
+ downward through the tree.  It is tightly coupled with the Event class, which provides expertise needed
+ in resimulation.  The eventvec is a vector of all available types of Events (depending on forces and
+ other strategy details) which ResimArranger polls to conduct its resimulation.
+***********************************************************************/
+
+class ResimArranger : public Arranger
+{
+  protected:
+    std::vector<Event*> m_eventvec;    // code for various event types
+
+    // helper functions
+    ResimArranger(const ResimArranger& src);
+    void           ClearEventVec();
+    virtual void   DropAll(double eventT);
+    virtual double EventTime(Event*& returnevent, double eventT, double nextT);
+    virtual double Activate(Tree * oldtree);
+    virtual void   Resimulate(double eventT, ChainState & chainstate);
+    virtual bool   StopNow() const;
+    double         NextInterval(double lastT, FC_Status & fcstatus);
+    virtual void   CleanupAfterResimulate();
+
+    bool m_hasLogisticSelection;
+    double m_logsel_cutoff;
+
+  public:
+    // The following variables are helpers for use by the subclasses
+    // of Event.  They are public because otherwise Event would have to be a
+    // friend, and this would tie Arranger to the specific subclasses of Event.
+    LongVec1d m_xactives;               // dim: cross partitions
+    LongVec2d m_pactives;               // dim: part-force X partitions
+    LongVec1d m_xinactives;             // dim: cross partitions
+    LongVec2d m_pinactives;             // dim: part-force X partitions
+
+    BranchBuffer m_activelist;          // lineages currently active
+    BranchBuffer m_inactivelist;        // inactive lineages in current interval
+
+    ResimArranger(const ForceSummary & fs, double timing);
+    virtual ~ResimArranger() = 0;       // "implemented pure virtual"
+
+    // Arrangement functions
+    virtual void   SetParameters(ChainState & chainstate);
+    virtual void   Rearrange(ChainState & chstate);
+    virtual void   ScoreRearrangement(ChainState & chstate);
+    virtual bool   AcceptAndSynchronize(ChainState & chstate, double temperature, bool badtree);
+    virtual double Hastings(ChainState & chstate);
+
+    long   ActiveSize() const { return m_activelist.Size(); };
+
+    // Getters/Setters
+    virtual std::string GetName() const { return arrangerstrings::RESIM; };
+};
+
+//------------------------------------------------------------------------------------
+
+class DropArranger: public ResimArranger
+{
+  protected:
+    DropArranger(const DropArranger& src) :
+        ResimArranger(src) {};
+
+  public:
+    DropArranger(const ForceSummary & fs, double timing)
+        : ResimArranger(fs, timing) {};
+    virtual ~DropArranger() {};
+    virtual Arranger*  Clone() const;
+    virtual std::string     GetName() const { return arrangerstrings::DROP; };
+
+};
+
+//------------------------------------------------------------------------------------
+
+class DenovoArranger: public ResimArranger
+{
+  protected:
+    DenovoArranger(const DenovoArranger& src) :
+        ResimArranger(src) {};
+
+  public:
+    DenovoArranger(const ForceSummary & fs, double timing)
+        : ResimArranger(fs, timing) {};
+    virtual  ~DenovoArranger() {};
+    virtual Arranger*  Clone() const;
+    virtual void       Rearrange(ChainState & chstate);
+    virtual bool       AcceptAndSynchronize(ChainState & chstate, double temperature, bool badtree);
+    virtual std::string     GetName() const { return arrangerstrings::DENO; };
+
+};
+
+//------------------------------------------------------------------------------------
+
+class BaseHapArranger : public Arranger
+{
+  protected:
+    BaseHapArranger(const BaseHapArranger& src)
+        : Arranger(src) {};
+
+  public:
+    BaseHapArranger(double timing) : Arranger(timing)   {};
+    virtual void      SetParameters(ChainState & chainstate);
+    virtual void      Rearrange(ChainState & chstate) = 0;
+    virtual void      ScoreRearrangement(ChainState & chstate);
+    virtual bool      AcceptAndSynchronize(ChainState & chstate, double temperature, bool badtree);
+    virtual Arranger* Clone() const = 0;
+    virtual std::string    GetName() const = 0;
+};
+
+//------------------------------------------------------------------------------------
+
+class HapArranger : public BaseHapArranger
+{
+  protected:
+    HapArranger(const HapArranger& src)
+        : BaseHapArranger(src) {};
+
+  public:
+    HapArranger(double timing) : BaseHapArranger(timing) {};
+    virtual void      Rearrange(ChainState & chstate);
+    virtual Arranger* Clone() const;
+    virtual std::string    GetName() const { return arrangerstrings::HAP; };
+};
+
+//------------------------------------------------------------------------------------
+// The 'Probability Haplotype Arranger' (which I'm calling it here in the code
+//  because that's what it is; we'll probably call it the 'Trait Haplotype
+//  Arranger' for the user) is the arranger to use when you have haplotypes
+//  that need to be swapped among a set instead of among just two alternating
+//  options.
+//  It's being added here for use for trait data with sets of possible
+//  haplotype resolutions (i.e. HH, Hh, and hH).
+
+class ProbHapArranger : public BaseHapArranger
+{
+  protected:
+    ProbHapArranger(const ProbHapArranger& src)
+        : BaseHapArranger(src) {};
+
+  public:
+    ProbHapArranger(double timing):BaseHapArranger(timing) {};
+    virtual void      Rearrange(ChainState & chstate);
+    virtual void      ScoreRearrangement(ChainState & chstate);
+    virtual Arranger* Clone() const;
+    virtual std::string    GetName() const { return arrangerstrings::PROBHAP; };
+};
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+class TreeSizeArranger : public ResimArranger
+{
+  protected:
+    TreeSizeArranger(const TreeSizeArranger& src) : ResimArranger(src) {};
+    virtual double Activate(Tree * oldtree);
+    virtual void   Resimulate(double eventT, ChainState & chainstate);
+    virtual void   CleanupAfterResimulate();
+    bool NodeChangesEpoch(Branch_ptr pBranch, double newtime, const DoubleVec1d & epochtimes) const;
+
+  public:
+    TreeSizeArranger(const ForceSummary & fs, double timing);
+    virtual ~TreeSizeArranger() {};
+    virtual double     Hastings(ChainState & chstate);
+    virtual Arranger*  Clone() const;
+    virtual std::string GetName() const { return arrangerstrings::TREESIZE; };
+};
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+class EpochSizeArranger : public ResimArranger
+{
+  private:
+    bool m_tree_changed; // did our rearrangement touch the tree?
+
+  protected:
+    const std::vector<Epoch> * m_epochs;  // non-owning pointer
+    vector<Prior> m_priors;
+
+    EpochSizeArranger(const EpochSizeArranger& src);
+    // we disavow the following functions, ugh, but it's necessary
+    virtual double Activate(Tree *) { assert(false); return 0.0; };
+    virtual void Resimulate(double, ChainState &) { assert(false); };
+    virtual void CleanupAfterResimulate() { assert(false); };
+
+  public:
+    EpochSizeArranger(const ForceSummary & fs, double timing);
+    virtual ~EpochSizeArranger() {};
+    virtual double     Hastings(ChainState & chstate);
+    virtual Arranger*  Clone() const;
+    virtual std::string GetName() const { return arrangerstrings::EPOCHSIZE; };
+
+    // Arrangement functions.
+    // We accept ResimArranger::SetParameters().
+    virtual void    Rearrange(ChainState & chstate);
+    virtual void    ScoreRearrangement(ChainState & chstate);
+    virtual bool    AcceptAndSynchronize(ChainState & chstate, double temperature, bool badtree);
+};
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+class EpochNudgeArranger : public ResimArranger
+{
+  private:
+    bool m_tree_changed; // did our rearrangement touch the tree?
+
+  protected:
+    const std::vector<Epoch> * m_epochs;  // non-owning pointer
+    vector<Prior> m_priors;
+
+    EpochNudgeArranger(const EpochNudgeArranger& src);
+    // we disavow the following functions, ugh, but it's necessary
+    virtual double Activate(Tree *) { assert(false); return 0.0; };
+    virtual void Resimulate(double, ChainState &) { assert(false); };
+    virtual void CleanupAfterResimulate() { assert(false); };
+
+  public:
+    EpochNudgeArranger(const ForceSummary & fs, double timing);
+    virtual ~EpochNudgeArranger() {};
+    virtual double     Hastings(ChainState & chstate);
+    virtual Arranger*  Clone() const;
+    virtual std::string GetName() const { return arrangerstrings::EPOCHSIZE; };
+
+    // Arrangement functions.
+    // We accept ResimArranger::SetParameters().
+    virtual void    Rearrange(ChainState & chstate);
+    virtual void    ScoreRearrangement(ChainState & chstate);
+    virtual bool    AcceptAndSynchronize(ChainState & chstate, double temperature, bool badtree);
+};
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+class BayesArranger : public Arranger
+{
+  private:
+    DoubleVec1d m_oldlogs;
+    DoubleVec1d m_newlogs;
+
+  protected:
+    BayesArranger(const BayesArranger& src);
+
+  public:
+    BayesArranger(double timing) : Arranger(timing) {};
+    virtual void      SetParameters(ChainState & chainstate);
+    virtual void      Rearrange(ChainState & chstate);
+    virtual void      ScoreRearrangement(ChainState & chstate);
+    virtual bool      AcceptAndSynchronize(ChainState &, double temperature, bool badtree);
+    virtual Arranger* Clone() const;
+    virtual std::string GetName() const { return arrangerstrings::BAYES; };
+};
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+class LocusArranger : public Arranger
+{
+  protected:
+    // LocusArranger(const LocusArranger& src);  //we accept the default.
+
+  public:
+    LocusArranger(double timing) : Arranger(timing) {};
+    virtual void      SetParameters(ChainState & chainstate);
+    virtual void      Rearrange(ChainState & chstate);
+    virtual void      ScoreRearrangement(ChainState & chstate);
+    virtual bool      AcceptAndSynchronize(ChainState &, double temperature, bool badtree);
+    virtual Arranger* Clone() const;
+    virtual std::string GetName() const { return arrangerstrings::LOCUS; };
+};
+
+//------------------------------------------------------------------------------------
+
+class ZilchArranger: public Arranger
+{
+  public:
+    ZilchArranger(double timing) : Arranger(timing) {};
+    virtual      ~ZilchArranger() {};
+    virtual void SetParameters(ChainState & chainstate) {};
+    virtual void Rearrange(ChainState & chstate) {};
+    virtual void ScoreRearrangement(ChainState & chstate) {};
+    virtual bool AcceptAndSynchronize(ChainState &, double temperature, bool badtree);
+    virtual Arranger* Clone() const;
+    virtual std::string GetName() const { return arrangerstrings::ZILCH; };
+};
+
+//------------------------------------------------------------------------------------
+
+class StairArranger: public Arranger
+{
+  protected:
+    double LnPBrownStickParams(const DoubleVec1d & param, const TreeSummary * partialtreedata) const;
+
+  public:
+    StairArranger(double timing) : Arranger(timing) {};
+    virtual ~StairArranger() {};
+    virtual Arranger* Clone() const;
+    virtual std::string GetName() const { return arrangerstrings::STICK; };
+
+    // Arrangement functions
+    virtual void SetParameters(ChainState & chainstate);
+    virtual void Rearrange(ChainState & chstate);
+    virtual void ScoreRearrangement(ChainState & chstate);
+    virtual bool AcceptAndSynchronize(ChainState & chstate, double temperature, bool badtree);
+};
+
+#endif // ARRANGER_H
+
+//____________________________________________________________________________________
diff --git a/src/tree/arranger_types.cpp b/src/tree/arranger_types.cpp
new file mode 100644
index 0000000..192a584
--- /dev/null
+++ b/src/tree/arranger_types.cpp
@@ -0,0 +1,31 @@
+// $Id: arranger_types.cpp,v 1.13 2013/06/03 17:23:13 jyamato Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <string>
+#include "arranger_types.h"
+
+//------------------------------------------------------------------------------------
+
+const std::string arrangerstrings::BASE     = "Base-Arranger";
+const std::string arrangerstrings::RESIM    = "Resim-Arranger";
+const std::string arrangerstrings::DROP     = "Tree-Arranger";
+const std::string arrangerstrings::HAP      = "Haplotype-Arranger";
+const std::string arrangerstrings::BAYES    = "Bayes-Arranger";
+const std::string arrangerstrings::DENO     = "Denovo-Arranger";
+const std::string arrangerstrings::PROBHAP  = "Trait-Haplotype-Arranger";
+const std::string arrangerstrings::RECSITE  = "RecSite-Arranger";
+const std::string arrangerstrings::TREESIZE = "Tree-Size-Arranger";
+const std::string arrangerstrings::LOCUS    = "Map-Arranger";
+const std::string arrangerstrings::ZILCH    = "Do-Nothing-Arranger";
+const std::string arrangerstrings::STICK    = "Stair-Arranger";
+const std::string arrangerstrings::EPOCHSIZE = "Epoch-Size-Arranger";
+const std::string arrangerstrings::EPOCHNUDGE = "Epoch-Nudge-Arranger";
+
+//____________________________________________________________________________________
diff --git a/src/tree/arranger_types.h b/src/tree/arranger_types.h
new file mode 100644
index 0000000..67d3b01
--- /dev/null
+++ b/src/tree/arranger_types.h
@@ -0,0 +1,38 @@
+// $Id: arranger_types.h,v 1.15 2013/06/03 17:23:13 jyamato Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef ARRANGER_TYPES_H
+#define ARRANGER_TYPES_H
+
+#include <string>
+
+class arrangerstrings
+{
+  public:
+    static const std::string BASE   ;
+    static const std::string RESIM  ;
+    static const std::string DROP   ;
+    static const std::string HAP    ;
+    static const std::string BAYES  ;
+    static const std::string DENO   ;
+    static const std::string RECSITE;
+    static const std::string PROBHAP;
+    static const std::string TREESIZE;
+    static const std::string LOCUS  ;
+    static const std::string ZILCH  ;
+    static const std::string STICK  ;
+    static const std::string EPOCHSIZE;
+    static const std::string EPOCHNUDGE;
+
+};
+
+#endif  // ARRANGER_TYPES_H
+
+//____________________________________________________________________________________
diff --git a/src/tree/arrangervec.cpp b/src/tree/arrangervec.cpp
new file mode 100644
index 0000000..2113ae1
--- /dev/null
+++ b/src/tree/arrangervec.cpp
@@ -0,0 +1,313 @@
+// $Id: arrangervec.cpp,v 1.34 2013/06/03 17:23:13 jyamato Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+#include <iostream>
+#include <map>
+#include <vector>
+
+#include "local_build.h"
+
+#include "arranger.h"
+#include "arranger_types.h"
+#include "arrangervec.h"
+#include "constants.h"
+#include "registry.h"
+#include "stringx.h"
+
+//------------------------------------------------------------------------------------
+
+ArrangerVec::ArrangerVec(double dropTiming, double sizeTiming,
+                         double hapTiming, double probhapTiming,
+                         double bayesTiming, double locusTiming,
+                         double zilchTiming, double stairTiming,
+                         double epochsizeTiming, double epochnudgeTiming)
+{
+    assert(dropTiming >= 0.0);
+    assert(sizeTiming >= 0.0);
+    assert(hapTiming >= 0.0);
+    assert(probhapTiming >= 0.0);
+    assert(bayesTiming >= 0.0);
+    assert(locusTiming >= 0.0);
+    assert(zilchTiming >= 0.0);
+    assert(stairTiming >= 0.0);
+    assert(epochsizeTiming >= 0.0);
+    assert(epochnudgeTiming >= 0.0);
+    double denovoTiming = 0.0;
+
+#ifdef STATIONARIES
+#ifdef ALL_ARRANGERS_DENOVO // Stationaries with denovo arranger in ALL cases.
+    denovoTiming = dropTiming;
+    dropTiming = 0.0;
+#else // Stationaries with denovo arranger only in Bayesian case.
+    if (bayesTiming > 0)
+    {
+        denovoTiming = dropTiming;
+        dropTiming = 0.0;
+    }
+#endif // Stationaries: Bayesian versus all cases.
+#endif // STATIONARIES
+
+    const ForceSummary & fs(registry.GetForceSummary());
+
+    arrangers[arrangerstrings::DENO]     = new DenovoArranger(fs, denovoTiming);
+    arrangers[arrangerstrings::DROP]     = new DropArranger(fs, dropTiming);
+    arrangers[arrangerstrings::TREESIZE] = new TreeSizeArranger(fs, sizeTiming);
+    arrangers[arrangerstrings::HAP]      = new HapArranger(hapTiming);
+    arrangers[arrangerstrings::PROBHAP]  = new ProbHapArranger(probhapTiming);
+    arrangers[arrangerstrings::BAYES]    = new BayesArranger(bayesTiming);
+    arrangers[arrangerstrings::LOCUS]    = new LocusArranger(locusTiming);
+    arrangers[arrangerstrings::ZILCH]    = new ZilchArranger(zilchTiming);
+    arrangers[arrangerstrings::STICK]    = new StairArranger(stairTiming);
+    arrangers[arrangerstrings::EPOCHSIZE] = new EpochSizeArranger(fs, epochsizeTiming);
+//    commented out because implementation is incomplete  JY 2012/11/06
+//    arrangers[arrangerstrings::EPOCHNUDGE] = new EpochNudgeArranger(fs, epochnudgeTiming);
+
+    Normalize();
+}
+
+//------------------------------------------------------------------------------------
+
+ArrangerVec::~ArrangerVec()
+{
+    ClearAll();
+}
+
+//------------------------------------------------------------------------------------
+
+ArrangerVec::ArrangerVec(const ArrangerVec& src)
+{
+    CopyAllMembers(src);
+} // copy ctor
+
+//------------------------------------------------------------------------------------
+
+ArrangerVec& ArrangerVec::operator=(const ArrangerVec& src)
+{
+    CopyAllMembers(src);
+    return *this;
+} // op=
+
+//------------------------------------------------------------------------------------
+
+double ArrangerVec::GetArrangerTiming(const string & atype) const
+{
+    if(arrangers.find(atype) != arrangers.end())
+    {
+        const_arrangerit it = arrangers.find(atype);
+        return it->second->GetTiming();
+    }
+    else
+    {
+        throw implementation_error("Missing arranger of type " + atype);
+        return -1.0;
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+void ArrangerVec::CopyAllMembers(const ArrangerVec & cp)
+{
+    if (this != &cp)
+    {
+        ClearAll();
+        const_arrangerit it;
+        for(it = cp.arrangers.begin(); it != cp.arrangers.end(); it++)
+        {
+            Arranger* arr = it->second->Clone();
+            arrangers.insert(std::make_pair<string, Arranger*>(arr->GetName(), arr));
+        }
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+void ArrangerVec::Normalize()
+{
+    const_arrangerit ait;
+    double totalTime = 0.0;
+    for(ait = arrangers.begin(); ait != arrangers.end(); ++ait)
+    {
+        double thisTime = ((*ait).second)->GetTiming();
+        assert(thisTime >= 0.0);
+        totalTime += thisTime;
+    }
+    assert(totalTime > 0.0);
+    for(ait = arrangers.begin(); ait != arrangers.end(); ++ait)
+    {
+        double thisTime = ((*ait).second)->GetTiming();
+        ((*ait).second)->SetTiming(thisTime / totalTime);
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+unsigned long ArrangerVec::size() const
+{
+    return arrangers.size();
+}
+
+//------------------------------------------------------------------------------------
+
+bool ArrangerVec::empty() const
+{
+    return arrangers.empty();
+}
+
+//------------------------------------------------------------------------------------
+
+Arranger* ArrangerVec::GetDenovoArranger() const
+{
+    const_arrangerit it = arrangers.find(arrangerstrings::DENO);
+    if (it == arrangers.end())
+    {
+        impossible_error e("Arranger needed to make Denovo tree is missing");
+        throw e;
+    }
+    return it->second;
+} // GetDenovoArranger
+
+//------------------------------------------------------------------------------------
+
+Arranger* ArrangerVec::GetRandomArranger(double rand) const
+{
+#if 0 // start JREMOVE
+    static long int denovo;
+    if (denovo == 32) denovo = 33;
+    else denovo = 32;
+
+    const_arrangerit it;
+    if (denovo == 32) it = arrangers.find(arrangerstrings::DENO);
+    else it = arrangers.find(arrangerstrings::EPOCHSIZE);
+
+    return it->second;
+
+#endif // end JREMOVE
+
+#if 1 // JRESTORE
+    const_arrangerit it;
+    double timing;
+
+    assert(arrangers.size() > 0);
+
+    for (it = arrangers.begin(); it != arrangers.end(); ++it)
+    {
+        timing = it->second->GetTiming();
+        if (timing > rand) return it->second;
+        rand -= timing;
+    }
+
+    // in case of rounding error, we might flow through to here
+    for (it = arrangers.begin(); it != arrangers.end(); ++it)
+    {
+        if (it->second->GetTiming() > 0.0) return it->second;
+    }
+#endif
+
+    // no arrangers found!
+    assert(false);
+    return NULL;
+} // GetRandomArranger
+
+//------------------------------------------------------------------------------------
+
+StringVec1d ArrangerVec::GetAllStringsForActiveArrangers() const
+{
+    StringVec1d retvec;
+    for (const_arrangerit arr=arrangers.begin(); arr != arrangers.end(); arr++)
+    {
+        if ((*arr).second->GetTiming() > 0)
+        {
+            retvec.push_back((*arr).first);
+        }
+    }
+    return retvec;
+}
+
+//------------------------------------------------------------------------------------
+
+void ArrangerVec::ClearAll()
+{
+    arrangerit it;
+
+    for (it = arrangers.begin(); it != arrangers.end(); ++it)
+    {
+        delete it->second;
+    }
+    arrangers.clear();
+} // ClearAll
+
+//------------------------------------------------------------------------------------
+
+void ArrangerVec::ZeroHapArranger()
+{
+    if (arrangers[arrangerstrings::HAP]->GetTiming() != 0.0)
+    {
+        for (arrangerit arr = arrangers.begin(); arr != arrangers.end(); ++arr)
+        {
+            (*arr).second->SetSaveTiming();
+        }
+        arrangers[arrangerstrings::HAP]->SetTiming(0.0);
+        Normalize();
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+void ArrangerVec::ZeroProbHapArranger()
+{
+    if (arrangers[arrangerstrings::PROBHAP]->GetTiming() != 0.0)
+    {
+        for (arrangerit arr = arrangers.begin(); arr != arrangers.end(); ++arr)
+        {
+            (*arr).second->SetSaveTiming();
+        }
+        arrangers[arrangerstrings::PROBHAP]->SetTiming(0.0);
+        Normalize();
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+void ArrangerVec::ZeroLocusArranger()
+{
+    if (arrangers[arrangerstrings::LOCUS]->GetTiming() != 0.0)
+    {
+        for (arrangerit arr = arrangers.begin(); arr != arrangers.end(); ++arr)
+        {
+            (*arr).second->SetSaveTiming();
+        }
+        arrangers[arrangerstrings::LOCUS]->SetTiming(0.0);
+        Normalize();
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+void ArrangerVec::RestoreTiming()
+{
+    for (arrangerit arr = arrangers.begin(); arr != arrangers.end(); ++arr)
+    {
+        (*arr).second->RestoreTiming();
+    }
+    Normalize();
+}
+
+//------------------------------------------------------------------------------------
+
+void ArrangerVec::PrintArrangers() const
+{
+    for (arrangermap::const_iterator arr = arrangers.begin(); arr != arrangers.end(); ++arr)
+    {
+        std::cerr << (*arr).first << ": " << ToString((*arr).second->GetTiming()) << std::endl;
+    }
+}
+
+//____________________________________________________________________________________
diff --git a/src/tree/arrangervec.h b/src/tree/arrangervec.h
new file mode 100644
index 0000000..604004b
--- /dev/null
+++ b/src/tree/arrangervec.h
@@ -0,0 +1,89 @@
+// $Id: arrangervec.h,v 1.28 2013/06/03 17:23:13 jyamato Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef ARRANGERVEC_H
+#define ARRANGERVEC_H
+
+#include <map>
+#include <string>
+#include <vector>
+
+#include "arranger_types.h"
+#include "vectorx.h"
+
+//------------------------------------------------------------------------------------
+
+// This class is a managed map of Arranger objects (objects which
+// can carry out a single Markov Chain step).  Mapping is between
+// the Arranger's type and the object itself.
+
+// Written by Mary, Jon, and/or Elizabeth, probably around 11/22/2002
+
+// Added ability to manage ForceParameters, needed by BayesArranger.
+// Mary 2003/12/01
+
+// NOTE:  If you run Bayesian Stationaries, the code here will silently
+// transform all mention of the DropArranger to the DenovoArranger.
+// As of May 17, 2010, it will do so for ALL stationaries or for just the
+// Bayesian case, compile-time-selectable by defining ALL_ARRANGERS_DENOVO.
+
+//------------------------------------------------------------------------------------
+
+class Arranger;
+
+typedef std::map<std::string, Arranger*> arrangermap;
+typedef arrangermap::iterator arrangerit;
+typedef arrangermap::const_iterator const_arrangerit;
+
+class ArrangerVec
+{
+  private:
+    ArrangerVec();          // undefined
+
+    arrangermap arrangers;
+    void                    CopyAllMembers(const ArrangerVec& cp);
+    void                    Normalize();
+
+  public:
+    ArrangerVec(double dropTiming, double sizeTiming,
+                double hapTiming, double probhapTiming,
+                double bayesTiming,
+                double locusTiming, double zilchTiming,
+                double stairTiming, double epochsizeTiming,
+                double epochnudgeTiming);
+    ~ArrangerVec();
+    ArrangerVec(const ArrangerVec& src);
+    ArrangerVec& operator=(const ArrangerVec& src);
+
+    double                 GetArrangerTiming(const std::string & atype) const;
+    Arranger*              GetDenovoArranger() const;
+    Arranger*              GetRandomArranger(double) const;
+    StringVec1d            GetAllStringsForActiveArrangers() const;
+
+    void                   ClearAll();
+
+    void                   ZeroHapArranger();
+    void                   ZeroProbHapArranger();
+    void                   ZeroLocusArranger();
+    void                   RestoreTiming();
+
+    arrangerit             begin() { return arrangers.begin(); };
+    arrangerit             end() { return arrangers.end(); };
+    const_arrangerit       begin() const { return arrangers.begin(); };
+    const_arrangerit       end() const { return arrangers.end(); };
+    unsigned long          size() const;
+    bool                   empty() const;
+
+    void      PrintArrangers() const;
+};
+
+#endif  // ARRANGERVEC_H
+
+//____________________________________________________________________________________
diff --git a/src/tree/branch.cpp b/src/tree/branch.cpp
new file mode 100644
index 0000000..ca94619
--- /dev/null
+++ b/src/tree/branch.cpp
@@ -0,0 +1,2295 @@
+// $Id: branch.cpp,v 1.109 2013/11/08 21:46:21 mkkuhner Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <algorithm>
+#include <cassert>
+#include <iostream>                     // for debug cerr
+#include <numeric>                      // for std::accumulate
+
+#ifdef DMALLOC_FUNC_CHECK
+#include <dmalloc.h>
+#endif
+
+#include "local_build.h"
+
+#include "branch.h"
+#include "branchbuffer.h"
+#include "datapack.h"
+#include "range.h"                      // For Link-related typedefs and constants.
+#include "summary.h"
+#include "treesum.h"
+
+#include "force.h"                      // for RBranch::RecCopyPartitionsFrom, Branch::IsAMember
+#include "locus.h"                      // for TipData stuff used in constructor
+
+#include "tinyxml.h"
+
+// This turns on the detailed print out of the creation and analysis of each coalescence tree.
+// JRM 4/10
+//#define PRINT_TREE_DETAILS
+
+// Print details of what was used in the summary scoring.
+// JRM 4/10
+//#define PRINT_SUMMARY_DETAILS
+
+using namespace std;
+
+//------------------------------------------------------------------------------------
+
+// Initialization of static variable; needs to be in .cpp file.  It is being initialized to the result
+// of calling the implicit default constructor for Branch_ptr (ie, boost::shared_ptr<Branch>).
+Branch_ptr Branch::NONBRANCH;
+
+//------------------------------------------------------------------------------------
+
+string ToString(branch_type btype)
+{
+    switch(btype)
+    {
+        case btypeBase:   return "Base";
+        case btypeTip:    return "Tip";
+        case btypeCoal:   return "Coalescence";
+        case btypeMig:    return "Migration";
+        case btypeDivMig: return "DivMigration";
+        case btypeDisease:return "Disease";
+        case btypeRec:    return "Recombination";
+        case btypeEpoch:  return "Epochboundary";
+    }
+
+    assert(false);
+    return "";
+}
+
+//------------------------------------------------------------------------------------
+// Initializes Branch's Range pointer object to pointer value of argument, not to pointer to copy of that object.
+// Thus, be sure that caller of this constructor either constructs a new Range or constructs a new copy of one.
+// THIS CTOR does a SHALLOW COPY of its RANGE-OBJECT POINTER.
+//
+// This constructor is the only one which behaves this way.  For all classes derived from Branch, constructors which
+// take as a single argument or as one or more of several arguments a const Range * const value must be given a
+// pointer to a freshly-allocated or copied (by a deep-copying constructor) object.  ALL OTHER CTORS (for classes
+// derived from Branch) do a DEEP COPY of their RANGE-OBJECT POINTER-valued argument(s).  They usually do so by
+// allocating (using CreateRange) or copying (using a deep-copying copy constructor) and passing the result
+// to THIS constructor.
+
+Branch::Branch(Range * newrangeptr)
+    : boost::enable_shared_from_this<Branch>(),
+      // Initialize m_rangePtr to pointer to original object (having been allocated by caller), not to pointer
+      // to COPY of pointee.  Ie, this is a shallow copy.  All callers of this constructor must freshly-allocate
+      // or deep-copy the Range object whose pointer they pass to this function.
+      // "newrangeptr" may be NULL (if the BBranch ctor is called).  This is OK; do not ASSERT here.
+      // Only ASSERT if somebody tries to DEREFERENCE that NULL pointer.
+      m_rangePtr(newrangeptr),
+      m_parents(NELEM, Branch::NONBRANCH),
+      m_children(NELEM, Branch::NONBRANCH),
+      m_ID(),
+      m_equivBranch(Branch::NONBRANCH),
+      m_partitions(registry.GetDataPack().GetNPartitionForces(), FLAGLONG)
+{
+    m_eventTime     = 0.0;
+    m_updateDL      = false;
+    m_marked        = false;
+    m_wasCoalCalced = false;
+    m_isSample      = 1;
+
+} // Branch constructor
+
+//------------------------------------------------------------------------------------
+// Must delete the Range object held by pointer and always allocated on the heap.
+// All classes derived from Branch have default (ie, empty) destructors, but the
+// runtime systems calls this one for the base class (since all Branch classes
+// derive from Branch), and this single destructor deallocates the Range object.
+
+Branch::~Branch()
+{
+    // We need to delete the contained Range member, since we own it.
+    delete m_rangePtr;
+
+} // Branch destructor
+
+//------------------------------------------------------------------------------------
+// DEEP-COPYING copy constructor.
+
+Branch::Branch(const Branch & src)
+    : boost::enable_shared_from_this<Branch>(),
+      m_parents(NELEM, Branch::NONBRANCH),
+      m_children(NELEM, Branch::NONBRANCH),
+      m_equivBranch(Branch::NONBRANCH)
+{
+    CopyAllMembers(src);                // Does a deep copy of objects (namely Range) held by pointer.
+} // Branch copy constructor
+
+//------------------------------------------------------------------------------------
+
+void Branch::CopyAllMembers(const Branch & src)
+{
+    fill(m_parents.begin(), m_parents.end(), Branch::NONBRANCH);
+    fill(m_children.begin(), m_children.end(), Branch::NONBRANCH);
+
+    m_ID               = src.m_ID;
+    m_eventTime        = src.m_eventTime;
+    m_partitions       = src.m_partitions;
+    m_updateDL         = src.m_updateDL;
+    m_marked           = src.m_marked;
+    // Here's the deep copy mentioned above.  OK to copy a NULL Range pointer here.
+    m_rangePtr         = src.m_rangePtr->Clone();
+    m_DLcells          = src.m_DLcells;
+    m_movingDLcells    = src.m_movingDLcells;
+    m_isSample         = src.m_isSample;
+    m_wasCoalCalced    = src.m_wasCoalCalced;
+
+} // Branch::CopyAllMembers
+
+//------------------------------------------------------------------------------------
+
+bool Branch::IsAMember(const Force & force, const LongVec1d & membership) const
+{
+    return force.IsAMember(m_partitions, membership);
+} // Branch::IsAMember
+
+//------------------------------------------------------------------------------------
+// Checks if two branches are functionally the same.
+// If you want full equivalency, use operator==().
+
+bool Branch::IsEquivalentTo(const Branch_ptr twin)   const
+{
+    return m_ID == twin->m_ID;
+
+#if 0  // Useful for debugging, perhaps.
+    if (Event() != twin->Event())
+    {
+        // cerr << "Different Events." << endl;
+        return false;
+    }
+
+    if (m_partitions != twin->m_partitions)
+    {
+        // cerr << "Different Partitions." << endl;
+        return false;
+    }
+
+    if (GetRangePtr()->GetLiveSites() != twin->GetRangePtr()->GetLiveSites())
+    {
+        // cerr << "Different Active Sites." << endl;
+        return false;
+    }
+
+    if (m_eventTime != twin->m_eventTime)
+    {
+        // cerr << "Different Event Times." << endl;
+        return false;
+    }
+
+    bool my_sites_all_live(GetRangePtr()->LiveSitesOnly());
+    bool his_sites_all_live(twin->GetRangePtr()->LiveSitesOnly());
+    if (my_sites_all_live && his_sites_all_live) return true;  // Both branches NON-RECOMBINANT.
+    if (my_sites_all_live != his_sites_all_live) return false; // One recombinant, other not.
+    //
+    // From here to end, we require that both branches be recombinant, containing RecRange objects
+    // rather than Range objects.  That is guaranteed, because of LiveSitesOnly() test above.
+    //
+    RecRange * my_recrange_ptr(dynamic_cast<RecRange *>(GetRangePtr()));
+    assert(my_recrange_ptr);
+
+    RecRange * twin_recrange_ptr(dynamic_cast<RecRange *>(twin->GetRangePtr()));
+    assert(twin_recrange_ptr);
+
+    // RecRange::GetRecpoint() returns Littlelink (Biglink midpoint, if Biglinks enabled).
+    if (my_recrange_ptr->GetRecpoint() != twin_recrange_ptr->GetRecpoint())
+    {
+        // cerr << "Different Recombination Point." << endl;
+        return false;
+    }
+
+    return true;
+#endif
+
+} // Branch::IsEquivalentTo
+
+//------------------------------------------------------------------------------------
+
+bool Branch::HasSamePartitionsAs(const Branch_ptr twin)   const
+{
+    if (twin)
+    {
+        if (m_partitions != twin->m_partitions)
+        {
+            return false;
+        }
+    }
+
+    return true;
+
+} //HasSamePartitionsAs
+
+//------------------------------------------------------------------------------------
+
+bool Branch::PartitionsConsistentWith(const Branch_ptr child)   const
+{
+    assert(Event() == btypeRec);
+
+    if (child)
+    {
+        const ForceSummary & fs = registry.GetForceSummary();
+        if (fs.CheckForce(force_DISEASE))
+        {
+            if (GetRangePtr()->AreDiseaseSitesTransmitted())
+            {
+                long int diseaseIndex = fs.GetPartIndex(force_DISEASE);
+                long int diseaseState = child->GetPartition(force_DISEASE);
+                if (diseaseState != m_partitions[diseaseIndex])
+                {
+                    return false;
+                }
+            }
+        }
+    }
+
+    return true;
+
+} // PartitionsConsistentWith
+
+//------------------------------------------------------------------------------------
+
+void Branch::ResetBuffersForNextRearrangement()
+{
+    GetRangePtr()->SetOldInfoToCurrent();
+    GetRangePtr()->ClearNewTargetLinks();
+
+} // ResetBuffersForNextRearrangement
+
+//------------------------------------------------------------------------------------
+
+LongVec1d Branch::GetLocalPartitions() const
+{
+    LongVec1d lppartitions;
+    LongVec1d lpindex(registry.GetForceSummary().GetLocalPartitionIndexes());
+    LongVec1d::iterator lpforce;
+
+    for (lpforce = lpindex.begin(); lpforce != lpindex.end(); ++lpforce)
+    {
+        lppartitions.push_back(m_partitions[*lpforce]);
+    }
+
+    return lppartitions;
+
+} // GetLocalPartitions
+
+//------------------------------------------------------------------------------------
+
+long int Branch::GetID() const
+{
+    return m_ID.ID();
+}
+
+//------------------------------------------------------------------------------------
+
+weakBranch_ptr Branch::GetEquivBranch() const
+{
+    return m_equivBranch;
+} // Branch::GetEquivBranch
+
+//------------------------------------------------------------------------------------
+
+void Branch::SetEquivBranch(Branch_ptr twin)
+{
+    m_equivBranch = twin;
+} // Branch::SetEquivBranch
+
+//------------------------------------------------------------------------------------
+
+long int Branch::GetPartition(force_type force) const
+{
+    return m_partitions[registry.GetForceSummary().GetPartIndex(force)];
+} // Branch::GetPartition
+
+//------------------------------------------------------------------------------------
+
+void Branch::SetPartition(force_type force, long int val)
+{
+    m_partitions[registry.GetForceSummary().GetPartIndex(force)] = val;
+} // Branch::SetPartition
+
+//------------------------------------------------------------------------------------
+
+void Branch::CopyPartitionsFrom(Branch_ptr src)
+{
+    m_partitions = src->m_partitions;
+} // Branch::CopyPartitionsFrom
+
+//------------------------------------------------------------------------------------
+
+void Branch::MarkParentsForDLCalc()
+{
+    // Evil kludge necessary because we force parents/children to always have two spaces even when empty!
+    if (!m_parents[0].expired())
+    {
+        Branch_ptr parent0(m_parents[0]);
+        if (!parent0->m_updateDL)
+        {
+            parent0->SetUpdateDL();
+            parent0->MarkParentsForDLCalc();
+        }
+    }
+
+    if (!m_parents[1].expired())
+    {
+        Branch_ptr parent1(m_parents[1]);
+        if (!parent1->m_updateDL)
+        {
+            parent1->SetUpdateDL();
+            parent1->MarkParentsForDLCalc();
+        }
+    }
+
+} // Branch::MarkParentsForDLCalc
+
+//------------------------------------------------------------------------------------
+
+void Branch::ReplaceChild(Branch_ptr, Branch_ptr newchild)
+{
+    // This version is used by MBranch and DBranch; there are overrides for other branches.
+    newchild->m_parents[0] = shared_from_this();
+    m_children[0]          = newchild;
+
+} // Branch::ReplaceChild
+
+//------------------------------------------------------------------------------------
+
+bool Branch::HasSameActive(const Branch & br)
+{
+    return GetRangePtr()->SameLiveSites(br.GetRangePtr()->GetLiveSites());
+} // Branch::HasSameActive
+
+//------------------------------------------------------------------------------------
+
+const Cell_ptr Branch::GetDLCell(long int loc, long int ind, bool moving) const
+{
+    if (moving)
+    {
+        assert(loc < static_cast<long int>(m_movingDLcells.size()));
+        assert(ind < static_cast<long int>(m_movingDLcells[loc].size()));
+        return m_movingDLcells[loc][ind];
+    }
+    else
+    {
+        assert(loc < static_cast<long int>(m_DLcells.size()));
+        assert(ind < static_cast<long int>(m_DLcells[loc].size()));
+        return m_DLcells[loc][ind];
+    }
+} // Branch::GetDLCell
+
+//------------------------------------------------------------------------------------
+
+Cell_ptr Branch::GetDLCell(long int loc, long int ind, bool moving)
+{
+    if (moving)
+    {
+        assert(loc < static_cast<long int>(m_movingDLcells.size()));
+        assert(ind < static_cast<long int>(m_movingDLcells[loc].size()));
+        return m_movingDLcells[loc][ind];
+    }
+    else
+    {
+        assert(loc < static_cast<long int>(m_DLcells.size()));
+        assert(ind < static_cast<long int>(m_DLcells[loc].size()));
+        return m_DLcells[loc][ind];
+    }
+} // Branch::GetDLCell
+
+//------------------------------------------------------------------------------------
+
+double Branch::HowFarTo(const Branch & br) const
+{
+    return fabs(br.m_eventTime - m_eventTime);
+} // Branch::HowFarTo
+
+//------------------------------------------------------------------------------------
+
+Branch_ptr Branch::GetValidChild(Branch_ptr br, long int whichpos)
+{
+#ifdef PRINT_TREE_DETAILS
+    cerr << " In GetValidChild whichpos: " << whichpos << "  br: " << br << endl;
+#endif
+
+    Branch_ptr pChild = br->GetActiveChild(whichpos);
+    if (pChild)
+        br = GetValidChild(pChild, whichpos);
+
+#ifdef PRINT_TREE_DETAILS
+    cerr << " return from GetValidChild br: " << br << endl;
+#endif
+
+    return br;
+
+} // Branch::GetValidChild
+
+//------------------------------------------------------------------------------------
+
+Branch_ptr Branch::GetValidPanelChild(Branch_ptr br, long int whichpos)
+{
+#ifdef PRINT_TREE_DETAILS
+    cerr << " In GetValidChild whichpos: " << whichpos << "  br: " << br << endl;
+#endif
+
+    Branch_ptr pChild = br->GetActivePanelChild(whichpos);
+    if (pChild)
+        br = GetValidPanelChild(pChild, whichpos);
+
+#ifdef PRINT_TREE_DETAILS
+    cerr << " return from GetValidChild br: " << br << endl;
+#endif
+
+    return br;
+
+} // Branch::GetValidPanelChild
+
+//------------------------------------------------------------------------------------
+
+Branch_ptr Branch::GetValidParent(long int whichpos)
+{
+    // We are looking for an ancestor of our branch at which the site of interest coalesces.
+    Branch_ptr pParent(Parent(0));
+    if (Parent(1))
+    {
+        // This branch has two parents; which one is needed?
+        if (Parent(1)->GetRangePtr()->IsSiteLive(whichpos)) pParent = Parent(1);
+    }
+
+    // If we have reached the bottom of the tree, we give up--there is no parent.
+    if (pParent->Event() == btypeBase) return Branch::NONBRANCH;
+
+    // Otherwise, check if this could be the parent we need.
+    if (pParent->Event() == btypeCoal)
+    {
+        // This could be the coalescence we're looking for, but does our site actually coalesce here?
+        if (pParent->Child(0)->GetRangePtr()->IsSiteLive(whichpos) &&
+            pParent->Child(1)->GetRangePtr()->IsSiteLive(whichpos))
+        {
+            return pParent;
+        }
+    }
+
+    // It's not a coalescence (or the right coalescence), so keep going downward.
+    return pParent->GetValidParent(whichpos);
+
+} // GetValidParent
+
+//------------------------------------------------------------------------------------
+
+bool Branch::DiffersInDLFrom(Branch_ptr branch, long int locus, long int marker) const
+{
+    return m_DLcells[locus].DiffersFrom(branch->m_DLcells[locus], marker);
+} // Branch::DiffersInDLFrom
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+
+bool Branch::CheckInvariant() const
+{
+    // Check correct time relationships among parents and offspring.
+    long int index;
+    for (index = 0; index < NELEM; ++index)
+    {
+        // If the child exists it must be earlier.
+        if (Child(index))
+        {
+            if (Child(index)->m_eventTime > m_eventTime)
+            {
+                return false;
+            }
+        }
+
+        // If the parent exists it must be later.
+        if (Parent(index))
+        {
+            if (Parent(index)->m_eventTime < m_eventTime)
+            {
+                return false;
+            }
+        }
+    }
+
+    return true;
+
+} // CheckInvariant
+
+//------------------------------------------------------------------------------------
+
+bool Branch::operator==(const Branch & src) const
+{
+    if (Event() != src.Event()) return false;
+    if (m_partitions != src.m_partitions) return false;
+    if (m_eventTime != src.m_eventTime) return false;
+
+    return true;
+
+} // operator==
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+
+string Branch::DLCheck(const Branch & other) const
+{
+    unsigned long int locus;
+    string problems;
+
+    if (m_DLcells.size() != other.m_DLcells.size())
+    {
+        return string("   The DLCells are different sizes--error\n");
+    }
+
+    for (locus = 0; locus < m_DLcells.size(); ++locus)
+    {
+        unsigned long int ncells = m_DLcells[locus].size();
+        if (ncells != other.m_DLcells[locus].size())
+        {
+            return string("   Bad branch comparison--error\n");
+        }
+
+        unsigned long int ind;
+        for (ind = 0; ind < ncells; ++ind)
+        {
+            long int badmarker = m_DLcells[locus][ind]->DiffersFrom(other.m_DLcells[locus][ind]);
+            if (badmarker == FLAGLONG) continue;
+
+            problems += "   Branch " + ToString(m_ID.ID());
+            problems += " differs from other branch " + ToString(other.m_ID.ID());
+            problems += " at marker " + ToString(badmarker);
+            problems += "\n";
+
+            return problems;
+        }
+    }
+
+    return problems;
+
+} // Branch::DLCheck
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+
+void Branch::PrintInfo() const
+{
+    cerr << "Branch::PrintInfo ..." << endl << endl;
+    cerr << "ID: " << m_ID.ID() << " (" << ToString(Event()) << ")" << endl;
+    if (Event() == btypeCoal)
+    {
+        cerr << "Parent: " << Parent(0)->GetID() << endl;
+        cerr << "Child(0): " << Child(0)->GetID() << " Child(1): " << Child(1)->GetID() << endl;
+    }
+    else if (Event() == btypeRec)
+    {
+        cerr << "Parent(0): " << Parent(0)->GetID() << " Parent(1): " << Parent(1)->GetID() << endl;
+        cerr << "Child: " << Child(0)->GetID() << endl;
+    }
+    else if (Event() == btypeBase)
+    {
+        cerr << "Child: " << Child(0)->GetID() << endl;
+    }
+    else
+    {
+        cerr << "Parent: " << Parent(0)->GetID() << endl;
+        cerr << "Child: " << Child(0)->GetID() << endl;
+    }
+
+    cerr << "Partitions:  ";
+    for (unsigned long int i = 0; i < m_partitions.size(); ++i)
+    {
+        cerr << m_partitions[i] << " ";
+    }
+
+    cerr << endl << "Event time:  " << m_eventTime << endl << endl;
+    GetRangePtr()->PrintInfo();
+
+} // Branch::PrintInfo
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+
+vector<Branch_ptr> Branch::GetBranchChildren()
+{
+    vector<Branch_ptr> newkids;
+    vector<weakBranch_ptr>::iterator kid;
+
+    for (kid = m_children.begin(); kid != m_children.end(); ++kid)
+    {
+        newkids.push_back(kid->lock());
+    }
+
+    return newkids;
+} // Branch::GetBranchChildren
+
+//------------------------------------------------------------------------------------
+// This function is non-const because we need boost::shared_from_this()!
+
+bool Branch::ConnectedTo(const Branch_ptr family)
+{
+    if (Child(0) && Child(0) == family) return true;
+    if (Child(1) && Child(1) == family) return true;
+    if (Parent(0) && Parent(0) == family) return true;
+    if (Parent(1) && Parent(1) == family) return true;
+
+    cerr << endl << "Branch " << family->m_ID.ID();
+    cerr << " is not connected to branch " << m_ID.ID() << " ";
+    cerr << "even though it thinks it is via it's ";
+
+    Branch_ptr me(shared_from_this());
+    if (family->Child(0) && family->Child(0) == me) cerr << "child0";
+    if (family->Child(1) && family->Child(1) == me) cerr << "child1";
+    if (family->Parent(0) && family->Parent(0) == me) cerr << "parent0";
+    if (family->Parent(1) && family->Parent(1) == me) cerr << "parent1";
+
+    cerr << endl;
+    return false;
+} // Branch::ConnectedTo
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+
+bool Branch::IsSameExceptForTimes(const Branch_ptr other) const
+{
+    if (Event() != other->Event()) return false;
+    if (m_partitions != other->m_partitions) return false;
+
+    return true;
+} // Branch::IsSameExceptForTimes
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+
+bool Branch::RevalidateRange(FC_Status &) const
+{
+    bool retval = GetRangePtr()->SameAsChild(Child(0)->GetRangePtr());
+    if (!retval)
+    {
+        cerr << "Failed in Branch::RevalidateRange; ID: " << m_ID.ID() << endl;
+    }
+
+    return retval;
+} // Branch::RevalidateRange
+
+//------------------------------------------------------------------------------------
+
+void Branch::AddGraphML(TiXmlElement * elem) const
+{
+    ////////////////////////////////////////////////////////////
+    // node itself
+    long int myID = GetID();
+    long int canonicalID = GetCanonicalID();
+    if (canonicalID == myID)
+    {
+        TiXmlElement * node = new TiXmlElement("node");
+        node->SetAttribute("id", ToString(canonicalID));
+        elem->LinkEndChild(node);
+
+        // type of node
+        TiXmlElement * typeInfo = new TiXmlElement("data");
+        node->LinkEndChild(typeInfo);
+        typeInfo->SetAttribute("key", "node_type");
+        TiXmlText * nTypeText = new TiXmlText(GetGraphMLNodeType());
+        typeInfo->LinkEndChild(nTypeText);
+
+        // time of node
+        TiXmlElement * timeInfo = new TiXmlElement("data");
+        node->LinkEndChild(timeInfo);
+        timeInfo->SetAttribute("key", "node_time");
+        TiXmlText * nTimeText = new TiXmlText(ToString(m_eventTime));
+        timeInfo->LinkEndChild(nTimeText);
+
+        AddNodeInfo(node);
+    }
+
+    ////////////////////////////////////////////////////////////
+    // branch(es) themselves
+    TiXmlElement * branchElem = new TiXmlElement("edge");
+    elem->LinkEndChild(branchElem);
+    branchElem->SetAttribute("source", ToString(GetCanonicalParentID()));
+    branchElem->SetAttribute("target", ToString(canonicalID));
+
+    // partition forces
+    TiXmlElement * partElem = new TiXmlElement("data");
+    branchElem->LinkEndChild(partElem);
+    partElem->SetAttribute("key","partitions");
+
+    string partstr ="";
+    if (registry.GetForceSummary().CheckForce(force_DISEASE))
+    {
+        partstr += ToString(force_DISEASE);
+        partstr += ":";
+        partstr += registry.GetDataPack().GetPartitionName(force_DISEASE, GetPartition(force_DISEASE));
+    }
+
+    if (registry.GetForceSummary().CheckForce(force_DIVMIG))
+    {
+        if (partstr.length() > 0)
+        {
+            partstr += ",";
+        }
+        partstr += ToString(force_DIVMIG);
+        partstr += ":";
+        partstr += registry.GetDataPack().GetPartitionName(force_DIVMIG, GetPartition(force_DIVMIG));
+    }
+
+    if (registry.GetForceSummary().CheckForce(force_MIG))
+    {
+        if (partstr.length() > 0)
+        {
+            partstr += ",";
+        }
+        partstr += ToString(force_MIG);
+        partstr += ":";
+        partstr += registry.GetDataPack().GetPartitionName(force_MIG, GetPartition(force_MIG));
+    }
+
+    TiXmlText * pTypeText = new TiXmlText(partstr);
+    partElem->LinkEndChild(pTypeText);
+
+    // range info
+    TiXmlElement * lrangeElem = new TiXmlElement("data");
+    branchElem->LinkEndChild(lrangeElem);
+    lrangeElem->SetAttribute("key","live_sites");
+    rangeset lset = GetRangePtr()->GetLiveSites();
+    TiXmlText * lTypeText = new TiXmlText(ToGraphMLString(lset));
+    lrangeElem->LinkEndChild(lTypeText);
+
+    if (Event() == btypeRec)
+    {
+        TiXmlElement * trangeElem = new TiXmlElement("data");
+        branchElem->LinkEndChild(trangeElem);
+        trangeElem->SetAttribute("key","transmitted_sites");
+        rangeset tset = GetRangePtr()->GetTransmittedSites();
+        TiXmlText * tTypeText = new TiXmlText(ToGraphMLString(tset));
+        trangeElem->LinkEndChild(tTypeText);
+    }
+
+    //AddBranchInfo(branchElem);
+} // Branch::AddGraphML
+
+//------------------------------------------------------------------------------------
+
+string Branch::GetParentIDs() const
+{
+    string outstr = "";
+    long int pCount = NParents();
+    for (long int count = 0 ; count < pCount; count++)
+    {
+        const Branch_ptr p = Parent(count);
+        if (p != Branch::NONBRANCH)
+        {
+            outstr = outstr + " " + ToString(p->GetID());
+        }
+    }
+    return outstr;
+} // Branch::GetParentIDs
+
+//------------------------------------------------------------------------------------
+
+string Branch::GetChildIDs() const
+{
+    string outstr = "";
+    long int pCount = NChildren();
+    for (long int count = 0 ; count < pCount; count++)
+    {
+        const Branch_ptr p = Child(count);
+        if (p != Branch::NONBRANCH)
+        {
+            outstr = outstr + " " + ToString(p->GetID());
+        }
+    }
+    return outstr;
+} // Branch::GetChildIDs
+
+//------------------------------------------------------------------------------------
+
+long int Branch::GetCanonicalID() const
+{
+    long int myID = GetID();
+    const Branch_ptr p = GetRecPartner();
+    if (p != Branch::NONBRANCH)
+    {
+        long int otherID = p->GetID();
+        if (otherID < myID)
+        {
+            return otherID;
+        }
+    }
+    return myID;
+} // Branch::GetCanonicalID
+
+//------------------------------------------------------------------------------------
+
+long int Branch::GetCanonicalParentID() const
+{
+    long int downID = -1;
+    long int pCount = NParents();
+    long int trueCount = 0;
+    for (long int count = 0 ; count < pCount; count++)
+    {
+        const Branch_ptr p = Parent(count);
+        if (p != Branch::NONBRANCH)
+        {
+            long int canonicalParent = p->GetCanonicalID();
+            if (trueCount == 0)
+            {
+                downID = canonicalParent;
+            }
+            else
+            {
+                if (downID != canonicalParent)
+                {
+                    assert(false);
+                    return -2;
+                }
+            }
+        }
+    }
+    return downID;
+} // Branch::GetCanonicalParentID
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+// This constructor should only be called by the TimeList constructor.
+// Passes a newly-allocated Range (by pointer) to the Branch constructor, which does a shallow copy.
+// Actually, the Range object in a BBranch should never be accessed.  BBranch::CreateRange() returns
+// a NULL pointer, which the accessor function GetRangePtr() tests (ASSERTs if it is NULL).
+
+BBranch::BBranch()
+    : Branch(CreateRange())
+{
+    // Deliberately blank.
+} // BBranch constructor
+
+//------------------------------------------------------------------------------------
+// Returns a pointer to a newly-allocated Branch, which contains a deep-copied Range
+// (thanks to the deep-copying copy constructor this class inherits from Branch).
+
+Branch_ptr BBranch::Clone() const
+{
+    return Branch_ptr(new BBranch(*this));
+} // BBranch::Clone
+
+//------------------------------------------------------------------------------------
+
+void BBranch::ScoreEvent(TreeSummary &, BranchBuffer &) const
+{
+    assert(false);                      // I don't think this should ever be called.
+} // BBranch::ScoreEvent
+
+//------------------------------------------------------------------------------------
+// Third arg is a reference for consistency with same function in other classes,
+// which return a recombination weight therein.
+
+void BBranch::ScoreEvent(TreeSummary &, BranchBuffer &, Linkweight &) const
+{
+    assert(false);                      // I don't think this should ever be called.
+} // BBranch::ScoreEvent
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+
+bool BBranch::CheckInvariant() const
+{
+    // Base branches have no parents...
+    long int index;
+    for (index = 0; index < NELEM; ++index)
+    {
+        if (Parent(index)) return false;
+    }
+
+    // ...and one child
+    if (!Child(0)) return false;
+    if (Child(1)) return false;
+
+    if (!Branch::CheckInvariant()) return false;
+
+    return true;
+
+} // CheckInvariant
+
+//------------------------------------------------------------------------------------
+
+string BBranch::GetGraphMLNodeType() const
+{
+    assert(false);
+    string outstr = "Error BBranch";
+    return outstr;
+}
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+// Creates a new TBranch object containing a newly-allocated Range object (held by pointer),
+// which was allocated by CreateRange and shallow-copied by the Branch constructor.
+
+TBranch::TBranch(const TipData & tipdata, long int nsites, const rangeset & diseasesites)
+    : Branch(CreateRange(nsites, diseasesites))
+{
+    m_partitions = tipdata.GetBranchPartitions();
+    m_label = tipdata.label;
+
+    // This is used for SNP panel corrections.
+    if (tipdata.m_source == dsource_panel)
+    {
+        m_isSample = 0;
+    }
+
+#if 0
+    cerr << "Tip: " << tipdata.label;
+    if (tipdata.m_source == dsource_panel)
+        cerr << " is a panel member ";
+    else
+        cerr << " is a study member ";
+    cerr << endl;
+#endif
+
+    // WARNING:  must be changed for sequential-sampling case
+    m_eventTime = 0.0;
+
+} // TBranch constructor
+
+//------------------------------------------------------------------------------------
+// Returns a pointer to a newly-heap-allocated Range or RecRange object.
+// Please see note about different meaning of "GetAllLinks()" in Biglink versus Littlelink implementation.
+// Note ("NOTA BENE") is in file "range.h", in definition of RecRange class.
+
+Range * TBranch::CreateRange(long int nsites, const rangeset & diseasesites) const
+{
+    if (registry.GetForceSummary().CheckForce(force_REC)) // This force includes recombination.
+    {
+        // Called only when underlying data structures (trees, branches, ranges)
+        // are potentially recombinant (ie, contain RecRanges, not Ranges).
+        linkrangeset alllinks(RecRange::GetAllLinks());   // All Links were and are targetable.
+        rangeset allsites(MakeRangeset(0, nsites));       // All Sites are both live and transmitted.
+        return new RecRange(nsites, diseasesites, allsites, allsites, alllinks, alllinks, allsites, allsites);
+    }
+    else
+    {
+        return new Range(nsites);
+    }
+
+} // TBranch::CreateRange
+
+//------------------------------------------------------------------------------------
+// Returns a pointer to a newly-allocated Branch, which contains a deep-copied Range
+// (thanks to the deep-copying copy constructor this class inherits from Branch).
+
+Branch_ptr TBranch::Clone() const
+{
+    return Branch_ptr(new TBranch(*this));
+} // TBranch::Clone
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+
+bool TBranch::CheckInvariant() const
+{
+    // Tip branches have no children...
+    long int index;
+    for (index = 0; index < NELEM; ++index)
+    {
+        if (Child(index)) return false;
+    }
+
+    // ...and at least one parent.
+    if (!Parent(0)) return false;
+
+    if (!Branch::CheckInvariant()) return false;
+
+    return true;
+
+} // CheckInvariant
+
+//------------------------------------------------------------------------------------
+
+bool TBranch::operator==(const Branch & src) const
+{
+    return ((Branch::operator==(src)) && (m_label == dynamic_cast<const TBranch &>(src).m_label));
+} // operator==
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+
+bool TBranch::IsSameExceptForTimes(const Branch_ptr src) const
+{
+    return ((Branch::IsSameExceptForTimes(src)) && (m_label == boost::dynamic_pointer_cast<const TBranch>(src)->m_label));
+} // IsSameExceptForTimes
+
+//------------------------------------------------------------------------------------
+
+void TBranch::ScoreEvent(TreeSummary &, BranchBuffer &) const
+{
+    assert(false);                      // I don't think this should ever be called.
+} // TBranch::ScoreEvent
+
+//------------------------------------------------------------------------------------
+// Third arg is a reference for consistency with same function in other classes,
+// which return a recombination weight therein.
+
+void TBranch::ScoreEvent(TreeSummary &, BranchBuffer &, Linkweight &) const
+{
+    assert(false);                      // I don't think this should ever be called.
+} // TBranch::ScoreEvent
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+
+bool TBranch::RevalidateRange(FC_Status &) const
+{
+    if (GetRangePtr()->LiveSitesOnly()) return true;
+    //
+    // From here to end, we require that this function is called only on recombinant branches,
+    // which contain RecRange objects rather than Range objects.  That is guaranteed, because
+    // LiveSitesOnly() returns TRUE for non-recombinant branches.  The issue is not what kind
+    // of event we are processing; it is what kind of Range object the branch contains.
+    //
+    long int nsites(GetRangePtr()->GetNumRegionSites());
+    rangeset diseasesites(GetRangePtr()->GetDiseaseSites());
+
+    // MDEBUG following is not good form nor exception-safe; consider fixing.
+    Range * newrangeptr(CreateRange(nsites, diseasesites));
+    bool matches = (*newrangeptr == *GetRangePtr());
+    delete newrangeptr;
+    if (!matches)
+    {
+        cerr << "RevalidateRange failed in TBranch: " << m_ID.ID() << endl;
+    }
+    return matches;
+
+} // TBranch::RevalidateRange
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+
+void TBranch::PrintInfo() const
+{
+    cerr << "TBranch::PrintInfo ..." << endl << endl;
+    cerr << "Event type:  " << ToString(Event()) << endl;
+    cerr << "Label: " << m_label << endl;
+    cerr << "ID: " << m_ID.ID() << endl;
+    cerr << "Partitions:  ";
+
+    for (unsigned long int i = 0; i < m_partitions.size(); i++)
+    {
+        cerr << m_partitions[i] << " ";
+    }
+
+    cerr << endl << "Event time:  " << m_eventTime << endl << endl;
+    GetRangePtr()->PrintInfo();
+
+} // TBranch::PrintInfo
+
+//------------------------------------------------------------------------------------
+
+string TBranch::GetGraphMLNodeType() const
+{
+    return "Tip";
+}
+
+//------------------------------------------------------------------------------------
+
+void TBranch::AddNodeInfo(TiXmlElement * nodeElem) const
+{
+    // type of node
+    TiXmlElement * labelInfo = new TiXmlElement("data");
+    nodeElem->LinkEndChild(labelInfo);
+    labelInfo->SetAttribute("key", "node_label");
+    TiXmlText * labelText = new TiXmlText(m_label);
+    labelInfo->LinkEndChild(labelText);
+}
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+// Creates a new CBranch object containing a newly-allocated Range object (held by pointer),
+// which was allocated by CreateRange and shallow-copied by the Branch constructor.
+
+CBranch::CBranch(const Range * const child1rangeptr, const Range * const child2rangeptr,
+                 bool newbranchisinactive, const rangeset & fcsites)
+    : Branch(CreateRange(child1rangeptr, child2rangeptr, newbranchisinactive, fcsites))
+{
+    // Deliberately blank.
+} // CBranch constructor
+
+//------------------------------------------------------------------------------------
+// Returns a pointer to a newly-heap-allocated Range or RecRange object.
+
+Range * CBranch::CreateRange(const Range * const child1rangeptr, const Range * const child2rangeptr,
+                             bool newbranchisinactive, const rangeset & fcsites) const
+{
+    // These we can grab from either child as they should be identical in these fields.
+    long int nsites(child1rangeptr->GetNumRegionSites());
+
+    if (registry.GetForceSummary().CheckForce(force_REC)) // This force includes recombination.
+    {
+        // Called only when underlying data structures (trees, branches, ranges)
+        // are potentially recombinant (ie, contain RecRanges, not Ranges).
+        rangeset diseasesites(child1rangeptr->GetDiseaseSites());
+        rangeset allsites(MakeRangeset(0, nsites)); // All Sites are transmitted.
+        rangeset livesites(Union(child1rangeptr->GetLiveSites(), child2rangeptr->GetLiveSites()));
+        rangeset targetsites = Union(diseasesites, RemoveRangeFromRange(fcsites, livesites));
+        linkrangeset curtargetlinks(RecRange::LinksSpanningSites(targetsites));
+        linkrangeset oldtargetlinks;
+        rangeset oldtargetsites, oldlivesites;
+
+        if (newbranchisinactive)        // Copy the inactive child oldtargetlinks and oldtargetsites.
+        {
+            oldtargetlinks = child1rangeptr->GetOldTargetLinks();
+            oldtargetsites = child1rangeptr->GetOldTargetSites();
+            oldlivesites = child1rangeptr->GetOldLiveSites();
+        }
+        else
+        {
+            oldtargetlinks = curtargetlinks;
+            oldtargetsites = targetsites;
+            oldlivesites = livesites;
+        }
+
+        return new RecRange(nsites, diseasesites, allsites, livesites, curtargetlinks,
+                            oldtargetlinks, oldtargetsites, oldlivesites);
+    }
+    else
+    {
+        return new Range(nsites);
+    }
+
+} // CBranch::CreateRange
+
+//------------------------------------------------------------------------------------
+// Returns a pointer to a newly-allocated Branch, which contains a deep-copied Range
+// (thanks to the deep-copying copy constructor this class inherits from Branch).
+
+Branch_ptr CBranch::Clone() const
+{
+    return Branch_ptr(new CBranch(*this));
+} // CBranch::Clone
+
+//------------------------------------------------------------------------------------
+
+bool CBranch::CanRemove(Branch_ptr checkchild)
+{
+    if (m_marked) return true;
+
+    m_marked = true;
+    if (Child(0) == checkchild) SetChild(0, Child(1));
+    SetChild(1, Branch::NONBRANCH);
+
+    return false;
+
+} // CBranch::CanRemove
+
+//------------------------------------------------------------------------------------
+
+void CBranch::UpdateBranchRange(const rangeset & fcsites, bool dofc)
+{
+    if (m_marked)
+    {
+        GetRangePtr()->UpdateOneLeggedCRange(Child(0)->GetRangePtr());
+    }
+    else
+    {
+        GetRangePtr()->UpdateCRange(Child(0)->GetRangePtr(), Child(1)->GetRangePtr(), fcsites, dofc);
+    }
+
+} // CBranch::UpdateBranchRange
+
+//------------------------------------------------------------------------------------
+
+void CBranch::UpdateRootBranchRange(const rangeset & fcsites, bool dofc)
+{
+    if (m_marked)
+    {
+        GetRangePtr()->UpdateOneLeggedRootRange(Child(0)->GetRangePtr());
+    }
+    else
+    {
+        GetRangePtr()->UpdateRootRange(Child(0)->GetRangePtr(), Child(1)->GetRangePtr(), fcsites, dofc);
+    }
+
+} // CBranch::UpdateRootBranchRange
+
+//------------------------------------------------------------------------------------
+
+void CBranch::ReplaceChild(Branch_ptr pOldChild, Branch_ptr pNewChild)
+{
+    if (Child(0) == pOldChild)
+        SetChild(0, pNewChild);
+    else
+        SetChild(1, pNewChild);
+
+    pNewChild->SetParent(0, shared_from_this());
+
+} // CBranch::ReplaceChild
+
+//------------------------------------------------------------------------------------
+
+Branch_ptr CBranch::OtherChild(Branch_ptr badchild)
+{
+    if (Child(0) == badchild) return Child(1);
+    return Child(0);
+
+} // CBranch::OtherChild
+
+//------------------------------------------------------------------------------------
+// Both children must be both active and included in the DLcalc for this to return true.
+
+bool CBranch::CanCalcDL(long int site) const
+{
+    if ((Child(0)->GetRangePtr()->IsSiteLive(site)) && (Child(1)->GetRangePtr()->IsSiteLive(site)))
+    {
+        return true;
+    }
+    else
+    {
+        return false;
+    }
+}
+
+//------------------------------------------------------------------------------------
+// If both children are active return Branch::NONBRANCH to signal a stop; otherwise return the active child.
+
+const Branch_ptr CBranch::GetActiveChild(long int site)  const
+{
+    if (Child(0)->GetRangePtr()->IsSiteLive(site))
+    {
+        if (Child(1)->GetRangePtr()->IsSiteLive(site)) return Branch::NONBRANCH;
+        return Child(0);
+    }
+    else
+    {
+        return Child(1);
+    }
+
+} // CBranch::GetActiveChild
+
+//------------------------------------------------------------------------------------
+
+void CBranch::ScoreEvent(TreeSummary & summary, BranchBuffer & ks) const
+{
+    long int i;
+    Summary * csum = summary.GetSummary(force_COAL);
+
+    // Forces have become inconsistent!
+    assert(dynamic_cast<CoalSummary *>(csum));
+
+    long int myxpart = registry.GetDataPack().GetCrossPartitionIndex(m_partitions);
+    LongVec1d emptyvec;
+
+    // Score the event.
+    csum->AddInterval(m_eventTime, ks.GetBranchParts(), ks.GetBranchXParts(),
+                      FLAGFAULTY, myxpart, FLAGLONG, FLAGLONG, emptyvec, force_COAL);
+
+    // Adjust the branch counts.
+    for (i = 0; i < NELEM; ++i)
+        ks.UpdateBranchCounts(Child(i)->m_partitions, false);
+
+    ks.UpdateBranchCounts(m_partitions);
+
+} // CBranch::ScoreEvent
+
+//------------------------------------------------------------------------------------
+// Third arg is a reference because this function returns a recombination weight therein.
+// This version should be called on recombinant branches only (not necessarily that we are
+// processing a recombination event, just that the branch contains a RecRange).
+
+void CBranch::ScoreEvent(TreeSummary & summary, BranchBuffer & ks, Linkweight & recweight) const
+{
+    // "recweight" is a Link recombination weight (Biglink weight or number of Littlelinks).
+
+    long int i;
+    Summary * csum = summary.GetSummary(force_COAL);
+
+    // Forces have become inconsistent!
+    assert(dynamic_cast<CoalSummary *>(csum));
+
+    long int myxpart = registry.GetDataPack().GetCrossPartitionIndex(m_partitions);
+    LongVec1d emptyvec;
+
+    // Score the event.
+    csum->AddInterval(m_eventTime, ks.GetBranchParts(), ks.GetBranchXParts(),
+                      recweight, myxpart, FLAGLONG, FLAGLONG, emptyvec, force_COAL);
+
+    // Adjust the branch and Link weight.
+    // From here to end, we require that this function is called only on recombinant branches,
+    // which contain RecRange objects rather than Range objects.
+    for (i = 0; i < NELEM; ++i)
+    {
+        ks.UpdateBranchCounts(Child(i)->m_partitions, false);
+        RecRange * child_recrange_ptr(dynamic_cast<RecRange *>(Child(i)->GetRangePtr()));
+        assert(child_recrange_ptr);     // FIRE if fcn called on non-recombinant object.
+        recweight -= child_recrange_ptr->GetCurTargetLinkweight();
+    }
+
+    ks.UpdateBranchCounts(m_partitions);
+
+    RecRange * my_recrange_ptr(dynamic_cast<RecRange *>(GetRangePtr()));
+    assert(my_recrange_ptr);
+    recweight += my_recrange_ptr->GetCurTargetLinkweight();
+
+} // CBranch::ScoreEvent
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+
+bool CBranch::CheckInvariant() const
+{
+    // Coalescent branches have two children...
+    if (!Child(0)) return false;
+    if (!Child(1)) return false;
+
+    //...and at least one parent.
+    if (!Parent(0)) return false;
+
+    if (!Branch::CheckInvariant()) return false;
+
+    return true;
+
+} // CheckInvariant
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+
+bool CBranch::RevalidateRange(FC_Status & fcstatus) const
+{
+    rangeset livesites;
+    rangeset live0;
+    rangeset live1;
+
+    // Are we the same moeity and population as our children?
+    if (Child(0)->m_partitions != m_partitions)
+    {
+        cerr << "Branch changed color in CBranch: " << m_ID.ID() << endl;
+        return false;
+    }
+
+    if (Child(1) && Child(1)->m_partitions != m_partitions)
+    {
+        cerr << "Branch changed color in CBranch: " << m_ID.ID() << endl;
+        return false;
+    }
+
+    if (!Child(1))                      // We're in a one-legger.
+    {
+        const Range * const childrangeptr(Child(0)->GetRangePtr());
+        livesites = childrangeptr->GetLiveSites();
+        if (!(GetRangePtr()->SameLiveSites(livesites)))
+        {
+            cerr << "RevalidateRange found one legger in CBranch: " << m_ID.ID() << endl;
+            return false;
+        }
+        // We don't need to update fc stuff for a one-legger.
+    }
+    else
+    {
+        // We're in a normal coalescence.
+        const Range * const child0rangeptr(Child(0)->GetRangePtr());
+        const Range * const child1rangeptr(Child(1)->GetRangePtr());
+
+        live0 = child0rangeptr->GetLiveSites();
+        live1 = child1rangeptr->GetLiveSites();
+        livesites = Union(child0rangeptr->GetLiveSites(), child1rangeptr->GetLiveSites());
+        if (!(GetRangePtr()->SameLiveSites(livesites)))
+        {
+            cerr << "RevalidateRange found !SameLiveSites in CBranch: " << m_ID.ID() << endl;
+            return false;
+        }
+
+#if FINAL_COALESCENCE_ON
+        rangeset fcsites(Intersection(child0rangeptr->GetLiveSites(), child1rangeptr->GetLiveSites()));
+        fcstatus.Decrement_FC_Counts(fcsites);
+#endif
+    }
+
+    if (GetRangePtr()->LiveSitesOnly()) return true; // We're done checking.
+    //
+    // From here to end, we require that this function is called only on recombinant branches,
+    // which contain RecRange objects rather than Range objects.  That is guaranteed, because
+    // LiveSitesOnly() returns TRUE for non-recombinant branches.  The issue is not what kind
+    // of event we are processing; it is what kind of Range object the branch contains.
+    //
+    RecRange * my_recrange_ptr(dynamic_cast<RecRange *>(GetRangePtr()));
+    assert(my_recrange_ptr);
+
+#if FINAL_COALESCENCE_ON
+    linkrangeset curtargetlinks(RecRange::LinksSpanningSites
+                                (Union(my_recrange_ptr->GetDiseaseSites(),
+                                       RemoveRangeFromRange(fcstatus.Coalesced_Sites(), livesites))));
+#else
+    linkrangeset curtargetlinks(RecRange::LinksSpanningSites(Union(my_recrange_ptr->GetDiseaseSites(), livesites)));
+#endif
+
+    // Comparing Biglinks if in Biglink mode, Littlelinks if in Littlelink mode.
+    if (my_recrange_ptr->DifferentCurTargetLinks(curtargetlinks))
+    {
+        cerr << "CBranch::RevalidateRange (invalid CurTargetLinks) call to RecRange::PrintInfo()" << endl << endl;
+        my_recrange_ptr->PrintInfo();
+        //
+        cerr << "Disease Sites: " << ToString(my_recrange_ptr->GetDiseaseSites()) << endl;
+        cerr << "Live Sites: " << ToString(livesites) << endl;
+        cerr << "Live Sites Child 0: " << ToString(live0) << endl;
+        cerr << "Live Sites Child 1: " << ToString(live1) << endl;
+        cerr << "FCstatus: " << ToString(fcstatus.Coalesced_Sites()) << endl << endl;
+        //
+        cerr << "Target Links [my_recrange_ptr->GetCurTargetLinks()]:  ";
+        RecRange::PrintLinks(my_recrange_ptr->GetCurTargetLinks());
+        //
+        cerr << endl << "Target Links [curtargetlinks]:  ";
+        RecRange::PrintLinks(curtargetlinks);
+        cerr << endl;
+        //
+        return false;
+    }
+
+    // Is this a rec-only branch?  Otherwise, we may not want GetOldTargetLinks() to ASSERT.
+    if (my_recrange_ptr->DifferentNewTargetLinks(RemoveRangeFromRange(my_recrange_ptr->GetOldTargetLinks(), curtargetlinks)))
+    {
+        cerr << "RevalidateRange found DifferentNewTargetLinks(newtarget) in CBranch: " << m_ID.ID() << endl;
+        cerr << "CBranch::RevalidateRange (invalid NewTargetLinks) call to RecRange::PrintInfo()" << endl << endl;
+        //
+        my_recrange_ptr->PrintInfo();
+        //
+        cerr << "Target Links [RemoveRangeFromRange(my_recrange_ptr->GetOldTargetLinks(), curtargetlinks)]:  ";
+        RecRange::PrintLinks(RemoveRangeFromRange(my_recrange_ptr->GetOldTargetLinks(), curtargetlinks));
+        //
+        cerr << endl << "Target Links [curtargetlinks]:  ";
+        RecRange::PrintLinks(curtargetlinks);
+        cerr << endl;
+        //
+        return false;
+    }
+
+    return true;
+
+} // CBranch::RevalidateRange
+
+//------------------------------------------------------------------------------------
+
+string CBranch::GetGraphMLNodeType() const
+{
+    return "Coal";
+}
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+// Creates a new PartitionBranch object containing a newly-allocated Range object
+// (held by pointer), which was allocated by CreateRange and shallow-copied by the Branch constructor.
+
+PartitionBranch::PartitionBranch(const Range * const childrangeptr)
+    : Branch(CreateRange(childrangeptr))
+{
+    // Deliberately blank.
+} // PartitionBranch constructor
+
+//------------------------------------------------------------------------------------
+// Returns a pointer to a newly-heap-allocated Range or RecRange object.
+
+Range * PartitionBranch::CreateRange(const Range * const childrangeptr) const
+{
+    // NOT copy-constructed from the child, because a PartitionBranch always transmits all sites, and the child might not.
+    long int nsites(childrangeptr->GetNumRegionSites());
+
+    if (registry.GetForceSummary().CheckForce(force_REC)) // This force includes recombination.
+    {
+        // Called only when underlying data structures (trees, branches, ranges)
+        // are potentially recombinant (ie, contain RecRanges, not Ranges).
+        rangeset diseasesites(childrangeptr->GetDiseaseSites());
+        rangeset allsites(MakeRangeset(0, nsites)); // All Sites are transmitted.
+        rangeset livesites(childrangeptr->GetLiveSites());
+        linkrangeset curtargetlinks(childrangeptr->GetCurTargetLinks());
+        linkrangeset oldtargetlinks(childrangeptr->GetOldTargetLinks());
+        rangeset oldtargetsites(childrangeptr->GetOldTargetSites());
+        rangeset oldlivesites(childrangeptr->GetOldLiveSites());
+
+        return new RecRange(nsites, diseasesites, allsites, livesites, curtargetlinks,
+                            oldtargetlinks, oldtargetsites, oldlivesites);
+    }
+    else
+    {
+        return new Range(nsites);
+    }
+
+} // PartitionBranch::CreateRange
+
+//------------------------------------------------------------------------------------
+
+void PartitionBranch::UpdateBranchRange(const rangeset &, bool)
+{
+    GetRangePtr()->UpdateMRange(Child(0)->GetRangePtr());
+} // PartitionBranch::UpdateBranchRange
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+
+bool PartitionBranch::CheckInvariant() const
+{
+    // Partition branches have one child...
+    if (!Child(0)) return false;
+    if (Child(1)) return false;
+
+    //...and at least one parent.
+    if (!Parent(0)) return false;
+
+    if (!Branch::CheckInvariant()) return false;
+
+    return true;
+
+} // CheckInvariant
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+// Creates a new MigLikeBranch object containing a newly-allocated Range object (held by pointer),
+// which was allocated by CreateRange in the PartitionBranch constructor.
+
+MigLikeBranch::MigLikeBranch(const Range * const protorangeptr)
+    : PartitionBranch(protorangeptr)
+{
+    // Deliberately blank.
+} // MigLikeBranch constructor
+
+//------------------------------------------------------------------------------------
+// Creates a new MigLikeBranch object containing a newly-allocated Range object (held by pointer), which
+// was allocated by CreateRange and shallow-copied by the Branch constructor that PartitionBranch inherits.
+
+MigLikeBranch::MigLikeBranch(const MigLikeBranch & src)
+    : PartitionBranch(src)
+{
+    // Deliberately blank.
+} // MigLikeBranch copy constructor
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+// Creates a new MBranch object containing a newly-allocated Range object (held by pointer), which
+// was allocated by CreateRange and shallow-copied by the Branch constructor that MigLikeBranch inherits.
+
+MBranch::MBranch(const Range * const protorangeptr)
+    : MigLikeBranch(protorangeptr)
+{
+    // Deliberately blank.
+} // MBranch constructor
+
+//------------------------------------------------------------------------------------
+// Creates a new MBranch object containing a newly-allocated Range object (held by pointer), which
+// was allocated by CreateRange and shallow-copied by the Branch constructor that MigLikeBranch inherits.
+
+MBranch::MBranch(const MBranch & src)
+    : MigLikeBranch(src)
+{
+    // Deliberately blank.
+} // MBranch copy constructor
+
+//------------------------------------------------------------------------------------
+// Creates a new MBranch object containing a newly-allocated Range object (held by pointer), which
+// was deep-copied by the PartitionBranch copy constructor that MigLikeBranch inherits.
+
+Branch_ptr MBranch::Clone() const
+{
+    return Branch_ptr(new MBranch(*this));
+} // MBranch::Clone
+
+//------------------------------------------------------------------------------------
+
+void MBranch::ScoreEvent(TreeSummary & summary, BranchBuffer & ks) const
+{
+    Summary * msum = summary.GetSummary(force_MIG);
+
+    assert(dynamic_cast<MigSummary *>(msum));
+
+    long int mypop = GetPartition(force_MIG);
+    long int chpop = Child(0)->GetPartition(force_MIG);
+    LongVec1d emptyvec;
+
+    // Score the event.
+    msum->AddInterval(m_eventTime, ks.GetBranchParts(), ks.GetBranchXParts(),
+                      FLAGFAULTY, mypop, chpop, FLAGLONG, emptyvec, force_MIG);
+
+    // Adjust the branch counts.
+    assert(!Child(1));                  // too many children??
+    ks.UpdateBranchCounts(m_partitions);
+    ks.UpdateBranchCounts(Child(0)->m_partitions, false);
+
+    // We do not adjust active sites; they cannot possibly change.
+
+} // MBranch::ScoreEvent
+
+//------------------------------------------------------------------------------------
+// Third arg is a reference for consistency with same function in other classes,
+// which return a recombination weight therein.
+
+void MBranch::ScoreEvent(TreeSummary & summary, BranchBuffer & ks, Linkweight & recweight) const
+{
+    Summary * msum = summary.GetSummary(force_MIG);
+
+    // Forces have become inconsistent!
+    assert(dynamic_cast<MigSummary *>(msum));
+
+    long int mypop = GetPartition(force_MIG);
+    long int chpop = Child(0)->GetPartition(force_MIG);
+    LongVec1d emptyvec;
+
+    // Score the event.
+    msum->AddInterval(m_eventTime, ks.GetBranchParts(), ks.GetBranchXParts(),
+                      recweight, mypop, chpop, FLAGLONG, emptyvec, force_MIG);
+
+    // Adjust the branch counts.
+    assert(!Child(1));                  // too many children??
+    ks.UpdateBranchCounts(m_partitions);
+    ks.UpdateBranchCounts(Child(0)->m_partitions, false);
+
+    // We do not adjust active sites; they cannot possibly change.
+
+} // MBranch::ScoreEvent
+
+//------------------------------------------------------------------------------------
+
+string MBranch::GetGraphMLNodeType() const
+{
+    string outstr = "Mig";
+    return outstr;
+}
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+// Divergence migration branches.  Practically identical to MBranch.
+
+DivMigBranch::DivMigBranch(const Range * const protorangeptr)
+    : MigLikeBranch(protorangeptr)
+{
+    // Deliberately blank.
+} // DivMigBranch constructor
+
+//------------------------------------------------------------------------------------
+
+DivMigBranch::DivMigBranch(const DivMigBranch & src)
+    : MigLikeBranch(src)
+{
+    // Deliberately blank.
+} // DivMigBranch copy constructor
+
+//------------------------------------------------------------------------------------
+
+Branch_ptr DivMigBranch::Clone() const
+{
+    return Branch_ptr(new DivMigBranch(*this));
+} // DivMigBranch::Clone
+
+//------------------------------------------------------------------------------------
+
+void DivMigBranch::ScoreEvent(TreeSummary & summary, BranchBuffer & ks) const
+{
+    Summary * msum = summary.GetSummary(force_DIVMIG);
+
+    assert(dynamic_cast<DivMigSummary *>(msum));
+
+    long int mypop = GetPartition(force_DIVMIG);
+    long int chpop = Child(0)->GetPartition(force_DIVMIG);
+    LongVec1d emptyvec;
+
+    // Score the event.
+    msum->AddInterval(m_eventTime, ks.GetBranchParts(), ks.GetBranchXParts(),
+                      FLAGFAULTY, mypop, chpop, FLAGLONG, emptyvec, force_DIVMIG);
+
+    // Adjust the branch counts.
+    assert(!Child(1));                  // too many children??
+    ks.UpdateBranchCounts(m_partitions);
+    ks.UpdateBranchCounts(Child(0)->m_partitions, false);
+
+    // We do not adjust active sites; they cannot possibly change.
+
+} // DivMigBranch::ScoreEvent
+
+//------------------------------------------------------------------------------------
+// Third arg is a reference for consistency with same function in other classes,
+// which return a recombination weight therein.
+
+void DivMigBranch::ScoreEvent(TreeSummary & summary, BranchBuffer & ks, Linkweight & recweight) const
+{
+    Summary * msum = summary.GetSummary(force_DIVMIG);
+
+    // Forces have become inconsistent!
+    assert(dynamic_cast<DivMigSummary *>(msum));
+
+    long int mypop = GetPartition(force_DIVMIG);
+    long int chpop = Child(0)->GetPartition(force_DIVMIG);
+    LongVec1d emptyvec;
+
+    // Score the event.
+    msum->AddInterval(m_eventTime, ks.GetBranchParts(), ks.GetBranchXParts(),
+                      recweight, mypop, chpop, FLAGLONG, emptyvec, force_DIVMIG);
+
+    // Adjust the branch counts.
+    assert(!Child(1));                  // too many children??
+    ks.UpdateBranchCounts(m_partitions);
+    ks.UpdateBranchCounts(Child(0)->m_partitions, false);
+
+    // We do not adjust active sites; they cannot possibly change.
+
+} // DivMigBranch::ScoreEvent
+
+//------------------------------------------------------------------------------------
+
+string DivMigBranch::GetGraphMLNodeType() const
+{
+    string outstr = "Epoch";
+    return outstr;
+}
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+// Disease mutation branches.  NOT related to Divergence!
+
+DBranch::DBranch(const Range * const protorangeptr)
+    : PartitionBranch(protorangeptr)
+{
+    // Deliberately blank.
+} // DBranch constructor
+
+//------------------------------------------------------------------------------------
+
+DBranch::DBranch(const DBranch & src)
+    : PartitionBranch(src)
+{
+    // Deliberately blank.
+} // DBranch copy constructor
+
+//------------------------------------------------------------------------------------
+
+Branch_ptr DBranch::Clone() const
+{
+    return Branch_ptr(new DBranch(*this));
+} // DBranch::Clone
+
+//------------------------------------------------------------------------------------
+
+void DBranch::ScoreEvent(TreeSummary & summary, BranchBuffer & ks) const
+{
+    Summary * dissum = summary.GetSummary(force_DISEASE);
+
+    // Forces have become inconsistent!
+    assert(dynamic_cast<DiseaseSummary *>(dissum));
+
+    long int mydis = GetPartition(force_DISEASE);
+    long int chdis = Child(0)->GetPartition(force_DISEASE);
+    LongVec1d emptyvec;
+
+    // Score the event.
+    dissum->AddInterval(m_eventTime, ks.GetBranchParts(), ks.GetBranchXParts(),
+                        FLAGFAULTY, mydis, chdis, FLAGLONG, emptyvec, force_DISEASE);
+
+    // Adjust the branch counts.
+    assert(!Child(1));                  // too many children??
+    ks.UpdateBranchCounts(m_partitions);
+    ks.UpdateBranchCounts(Child(0)->m_partitions, false);
+
+    // We do not adjust active sites; they cannot possibly change.
+
+} // DBranch::ScoreEvent
+
+//------------------------------------------------------------------------------------
+// Third arg is a reference for consistency with same function in other classes,
+// which return a recombination weight therein.
+
+void DBranch::ScoreEvent(TreeSummary & summary, BranchBuffer & ks, Linkweight & recweight) const
+{
+    Summary * dissum = summary.GetSummary(force_DISEASE);
+
+    // Forces have become inconsistent!
+    assert(dynamic_cast<DiseaseSummary *>(dissum));
+
+    long int mydis = GetPartition(force_DISEASE);
+    long int chdis = Child(0)->GetPartition(force_DISEASE);
+    LongVec1d emptyvec;
+
+    // Score the event.
+    dissum->AddInterval(m_eventTime, ks.GetBranchParts(), ks.GetBranchXParts(),
+                        recweight, mydis, chdis, FLAGLONG, emptyvec, force_DISEASE);
+
+    // Adjust the branch counts.
+    assert(!Child(1));                  // too many children??
+    ks.UpdateBranchCounts(m_partitions);
+    ks.UpdateBranchCounts(Child(0)->m_partitions, false);
+
+    // We do not adjust active sites; they cannot possibly change.
+
+} // DBranch::ScoreEvent
+
+//------------------------------------------------------------------------------------
+
+string DBranch::GetGraphMLNodeType() const
+{
+    string outstr = "Disease";
+    return outstr;
+}
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+Branch_ptr EBranch::Clone() const
+{
+    return Branch_ptr(new EBranch(*this));
+} // EBranch::Clone
+
+//------------------------------------------------------------------------------------
+
+// NB: The code that calls this routine calls it ONLY on the
+// FIRST of a group of EBranches representing the same epoch
+// time.  It therefore does not record its child population, as
+// that would vary among the various EBranches.  This facilitates
+// handling in TreeSum->Summarize().
+void EBranch::ScoreEvent(TreeSummary & summary, BranchBuffer & ks) const
+{
+    Summary * esum = summary.GetSummary(force_DIVERGENCE);
+
+    // Forces have become inconsistent!
+    assert(dynamic_cast<EpochSummary *>(esum));
+
+    // NB While this is not a partition force, it cooperates with DIVMIG here.
+    long int mypop = GetPartition(force_DIVMIG);
+    long int chpop = Child(0)->GetPartition(force_DIVMIG);
+    LongVec1d emptyvec;
+
+    // Score the event.
+    esum->AddInterval(m_eventTime, ks.GetBranchParts(), ks.GetBranchXParts(),
+                      FLAGFAULTY, mypop, FLAGLONG, FLAGLONG, emptyvec, force_DIVERGENCE);
+
+    // Adjust the branch counts.
+    assert(!Child(1));                  // too many children??
+    ks.UpdateBranchCounts(m_partitions);
+    ks.UpdateBranchCounts(Child(0)->m_partitions, false);
+
+    // We do not adjust active sites; they cannot possibly change.
+
+} // EBranch::ScoreEvent
+
+//------------------------------------------------------------------------------------
+// Third arg is a reference for consistency with same function in other classes,
+// which return a recombination weight therein.
+
+// NB: The code that calls this routine calls it ONLY on the
+// FIRST of a group of EBranches representing the same epoch
+// time.  It therefore does not record its child population, as
+// that would vary among the various EBranches.  This facilitates
+// handling in TreeSum->Summarize().
+void EBranch::ScoreEvent(TreeSummary & summary, BranchBuffer & ks, Linkweight & recweight) const
+{
+    Summary * esum = summary.GetSummary(force_DIVERGENCE);
+
+    // Forces have become inconsistent!
+    assert(dynamic_cast<EpochSummary *>(esum));
+
+    // NB While this is not a partition force, it cooperates with DIVMIG here.
+    long int mypop = GetPartition(force_DIVMIG);
+    long int chpop = Child(0)->GetPartition(force_DIVMIG);
+    LongVec1d emptyvec;
+
+    // Score the event.
+    esum->AddInterval(m_eventTime, ks.GetBranchParts(), ks.GetBranchXParts(),
+                      recweight, mypop, FLAGLONG, FLAGLONG, emptyvec, force_DIVERGENCE);
+
+    // Adjust the branch counts.
+    assert(!Child(1));                  // too many children??
+    ks.UpdateBranchCounts(m_partitions);
+    ks.UpdateBranchCounts(Child(0)->m_partitions, false);
+
+    // We do not adjust active sites; they cannot possibly change.
+
+} // EBranch::ScoreEvent
+
+//------------------------------------------------------------------------------------
+
+string EBranch::GetGraphMLNodeType() const
+{
+    string outstr = "Epoch";
+    return outstr;
+}
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+// Creates a new RBranch object containing a newly-allocated Range object (held by pointer),
+// which was allocated by CreateRange and shallow-copied by the Branch constructor.
+
+RBranch::RBranch(const Range * const childrangeptr, bool newbranchisinactive,
+                 const rangeset & transmittedsites, const rangeset & fcsites)
+    : Branch(CreateRange(childrangeptr, newbranchisinactive, transmittedsites, fcsites))
+{
+    // Deliberately blank.
+} // RBranch constructor
+
+//------------------------------------------------------------------------------------
+
+Branch_ptr RBranch::Clone() const
+{
+    return Branch_ptr(new RBranch(*this));
+} // RBranch::Clone
+
+//------------------------------------------------------------------------------------
+// Returns a pointer to a newly-heap-allocated RecRange object.
+// ASSERTs if attempt is made to create a Range object.
+
+Range * RBranch::CreateRange(const Range * const childrangeptr, bool newbranchisinactive,
+                             const rangeset & transmittedsites, const rangeset & fcsites) const
+{
+    if (registry.GetForceSummary().CheckForce(force_REC)) // This force includes recombination.
+    {
+        // Called only when underlying data structures (trees, branches, ranges)
+        // are potentially recombinant (ie, contain RecRanges, not Ranges).
+        // The following are identical with our child.
+        long int nsites(childrangeptr->GetNumRegionSites());
+        rangeset diseasesites(childrangeptr->GetDiseaseSites());
+        rangeset livesites(Intersection(childrangeptr->GetLiveSites(), transmittedsites));
+
+        // MNEWCODE I am still not positive the following lines are correct!
+        // But I can't find the case that would prove them wrong.
+        rangeset targetsites = Union(diseasesites, RemoveRangeFromRange(fcsites, livesites));
+        linkrangeset curtargetlinks(RecRange::LinksSpanningSites(targetsites));
+        linkrangeset oldtargetlinks;
+        rangeset oldtargetsites, oldlivesites;
+
+        if (newbranchisinactive)        // Copy the inactive child oldtargetlinks and oldtargetsites.
+        {
+            oldtargetlinks = childrangeptr->GetOldTargetLinks();
+            oldtargetsites = childrangeptr->GetOldTargetSites();
+            oldlivesites = childrangeptr->GetOldLiveSites();
+        }
+        else
+        {
+            oldtargetlinks = curtargetlinks;
+            oldtargetsites = targetsites;
+            oldlivesites = livesites;
+        }
+
+        return new RecRange(nsites, diseasesites, transmittedsites, livesites, curtargetlinks,
+                            oldtargetlinks, oldtargetsites, oldlivesites);
+    }
+    else
+    {
+        assert(false);                  // An RBranch in a non-recombinant run??
+        return NULL;                    // To silence compiler warning.
+    }
+
+} // RBranch::CreateRange
+
+//------------------------------------------------------------------------------------
+
+void RBranch::CopyPartitionsFrom(Branch_ptr src)
+{
+    m_partitions = src->m_partitions;
+} // Branch::CopyPartitionsFrom
+
+//------------------------------------------------------------------------------------
+// Called only when underlying data structures (trees, branches, ranges)
+// are potentially recombinant (ie, contain RecRanges, not Ranges).
+
+void RBranch::RecCopyPartitionsFrom(Branch_ptr src, FPartMap fparts, bool islow)
+{
+    m_partitions = src->m_partitions; //fparts may override.
+
+    ForceVec forces(registry.GetForceSummary().GetPartitionForces());
+
+    for (unsigned long int force = 0; force < forces.size(); force++)
+    {
+        FPartMapiter thisforcepart = fparts.find(forces[force]->GetTag());
+        if (thisforcepart != fparts.end())
+        {
+            long int chosenpart = thisforcepart->second;
+            // RBranch::GetRecpoint() returns Littlelink (Biglink midpoint, if Biglinks enabled).
+            long int partition = dynamic_cast<PartitionForce *>(forces[force])->
+                ChoosePartition(src->m_partitions[force], chosenpart, islow, GetRecpoint());
+            m_partitions[force] = partition;
+        }
+    }
+
+} // RBranch::RecCopyPartitionsFrom
+
+//------------------------------------------------------------------------------------
+
+void RBranch::UpdateBranchRange(const rangeset & fcsites, bool dofc)
+{
+    GetRangePtr()->UpdateRRange(Child(0)->GetRangePtr(), fcsites, dofc);
+} // RBranch::UpdateBranchRange
+
+//------------------------------------------------------------------------------------
+
+void RBranch::ReplaceChild(Branch_ptr oldchild, Branch_ptr newchild)
+{
+    SetChild(0, newchild);
+
+    Branch_ptr myself = shared_from_this();
+
+    if (oldchild->Parent(0) == myself)
+    {
+        newchild->SetParent(0, myself);
+    }
+    else
+    {
+        newchild->SetParent(1, myself);
+    }
+
+} // RBranch::ReplaceChild
+
+//------------------------------------------------------------------------------------
+
+bool RBranch::IsRemovableRecombinationLeg(const rangeset & fcsites) const
+{
+    return (!GetRangePtr()->AreChildTargetSitesTransmitted(Child(0)->GetRangePtr(), fcsites));
+} // RBranch::IsRemovableRecombinationLeg
+
+//------------------------------------------------------------------------------------
+
+bool RBranch::operator==(const Branch & other) const
+{
+    return (Branch::operator==(other) && (*GetRangePtr() == *(other.GetRangePtr())));
+}
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+
+bool RBranch::IsSameExceptForTimes(const Branch_ptr other) const
+{
+    return (Branch::IsSameExceptForTimes(other) && (*GetRangePtr() == *(other->GetRangePtr())));
+}
+
+//------------------------------------------------------------------------------------
+// Recombinant branches only (returns Branch::NONBRANCH in non-recombinant case).
+
+Branch_ptr RBranch::GetRecPartner() const
+{
+    Branch_ptr partner((Child(0)->Parent(0) == shared_from_this()) ?
+                       Child(0)->Parent(1) : Child(0)->Parent(0));
+
+    return partner;
+
+} // GetRecPartner
+
+//------------------------------------------------------------------------------------
+
+void RBranch::ScoreEvent(TreeSummary &, BranchBuffer &) const
+{
+    assert(false);                      // This should never be called.
+} // RBranch::ScoreEvent
+
+//------------------------------------------------------------------------------------
+// Third arg is a reference because this function returns a recombination weight therein.
+// Called only when underlying data structures (trees, branches, ranges)
+// are potentially recombinant (ie, contain RecRanges, not Ranges).
+
+void RBranch::ScoreEvent(TreeSummary & summary, BranchBuffer & ks, Linkweight & recweight) const
+{
+    // "recweight" is a Link recombination weight (Biglink weight or number of Littlelinks).
+
+    // One interval with a recombination at the top involves *two* RBranches.  Only one
+    // will be summarized into the TreeSummary.  Thus, this peculiar-looking code only calls
+    // AddInterval() if the RBranch in question is its Child's *first* Parent, though
+    // it does clean-up bookkeeping in any case.
+    if (Child(0)->Parent(0) == shared_from_this())
+    {
+        RecSummary * rsum = dynamic_cast<RecSummary *>(summary.GetSummary(force_REC));
+        RecTreeSummary & rtreesum = dynamic_cast<RecTreeSummary &>(summary);
+
+        // Forces have become inconsistent!?
+        assert(rsum);
+
+        // Obtain the partnerpicks information for the branch which had it, which is not necessarily
+        // this one.  MDEBUG:  This code assumes there is at most one local partition force and one
+        // non-local partition force.  It will need to be more complex when multiples of either are allowed.
+
+        // I assume that either both parents match the child, and it doesn't matter which we use,
+        // or one parent does not match the child, in which case that parent must be used.
+
+        LongVec1d childpartitions = Child(0)->m_partitions;
+        LongVec1d otherpartitions = Child(0)->Parent(1)->m_partitions;  // other branch of recombination
+        LongVec1d partnerpicks;
+        LongVec1d lpindex(registry.GetForceSummary().GetLocalPartitionIndexes());
+        LongVec1d::iterator lpforce;
+        LongVec1d pickedmembership;
+
+        // MDEBUG vestigal code assuming multiple lpforces are possible here.
+        // This is not carried through correctly elsewhere in function!
+        for (lpforce = lpindex.begin(); lpforce != lpindex.end(); ++lpforce)
+        {
+            if (m_partitions[*lpforce] == childpartitions[*lpforce])
+            {
+                pickedmembership = otherpartitions;
+            }
+            else
+            {
+                pickedmembership = m_partitions;
+            }
+            partnerpicks.push_back(pickedmembership[*lpforce]);
+            rsum->AddToRecombinationCounts(pickedmembership);
+        }
+
+        // Score the event.  RBranch::GetRecpoint() returns Littlelink (Biglink midpoint, if Biglinks enabled).
+        rsum->AddInterval(m_eventTime, ks.GetBranchParts(), ks.GetBranchXParts(), recweight,
+                          FLAGLONG, FLAGLONG, GetRecpoint(), partnerpicks, force_REC);
+
+        // Update list of recombinations by partition.
+        rtreesum.AddRecToRecsByPart(pickedmembership, rsum->GetLastAdded());
+
+        // Adjust the branch counts and Link weight for removal of the child.
+        ks.UpdateBranchCounts(Child(0)->m_partitions, false);
+        RecRange * child_recrange_ptr(dynamic_cast<RecRange *>(Child(0)->GetRangePtr()));
+        assert(child_recrange_ptr);     // FIRE if fcn called on non-recombinant object.
+        recweight -= child_recrange_ptr->GetCurTargetLinkweight();
+    }
+
+    // Adjust the branch and Link weight for addition of this branch.
+    ks.UpdateBranchCounts(m_partitions);
+    RecRange * my_recrange_ptr(dynamic_cast<RecRange *>(GetRangePtr()));
+    assert(my_recrange_ptr);
+    recweight += my_recrange_ptr->GetCurTargetLinkweight();
+
+} // RBranch::ScoreEvent
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+
+bool RBranch::CheckInvariant() const
+{
+    // Recombinant branches have one child...
+    if (!Child(0))
+    {
+        return false;
+    }
+
+    if (Child(1))
+    {
+        return false;
+    }
+
+    //...and at least one parent.
+    if (!Parent(0))
+    {
+        return false;
+    }
+
+    if (!Branch::CheckInvariant()) return false;
+    return true;
+
+} // CheckInvariant
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+// Call only when underlying data structures (trees, branches, ranges)
+// are potentially recombinant (ie, contain RecRanges, not Ranges).
+
+bool RBranch::RevalidateRange(FC_Status & fcstatus) const
+{
+    RecRange * my_recrange_ptr(dynamic_cast<RecRange *>(GetRangePtr()));
+    assert(my_recrange_ptr);            // FIRE if fcn called on non-recombinant object.
+
+#if 0 // Include this branch if RevalidateRange is only being called after Prune().
+    long int recpoint(my_recrange_ptr->GetRecpoint()); // Littlelink (Biglink midpoint, if Biglinks enabled).
+
+    RecRange * child_recrange_ptr(dynamic_cast<RecRange *>(Child(0)->GetRangePtr()));
+    assert(child_recrange_ptr);
+
+    if (!(child_recrange_ptr->IsLinkTargetable(recpoint)))
+    {
+        cerr << "Non-targetable recombination site found in RBranch: " << m_ID.ID() << endl;
+        return false;
+    }
+#endif
+
+    rangeset livesites(Intersection(Child(0)->GetRangePtr()->GetLiveSites(), GetRangePtr()->GetTransmittedSites()));
+    if (!(GetRangePtr()->SameLiveSites(livesites)))
+    {
+        cerr << "RevalidateRange found !SameLiveSites in RBranch: " << m_ID.ID() << endl;
+        return false;
+    }
+
+    // Did we change color inappropriately?
+    if (GetRangePtr()->AreDiseaseSitesTransmitted())
+    {
+        if (Child(0)->m_partitions != m_partitions)
+        {
+            cerr << "Branch changed color in RBranch: " << m_ID.ID() << endl;
+            return false;
+        }
+    }
+
+    if (GetRangePtr()->LiveSitesOnly()) return true; // We're done.
+    //
+    // From here to end, we require that this function is called only on recombinant branches,
+    // which contain RecRange objects rather than Range objects.  That is guaranteed, because
+    // LiveSitesOnly() returns TRUE for non-recombinant branches.  The issue is not what kind
+    // of event we are processing; it is what kind of Range object the branch contains.
+    //
+#if FINAL_COALESCENCE_ON
+    linkrangeset curtargetlinks(RecRange::LinksSpanningSites
+                                (Union(my_recrange_ptr->GetDiseaseSites(),
+                                       RemoveRangeFromRange(fcstatus.Coalesced_Sites(), livesites))));
+#else
+    linkrangeset curtargetlinks(RecRange::LinksSpanningSites(Union(my_recrange_ptr->GetDiseaseSites(), livesites)));
+#endif
+
+    if (my_recrange_ptr->DifferentCurTargetLinks(curtargetlinks))
+    {
+        cerr << "RevalidateRange found DifferentTargetLinks in RBranch: " << m_ID.ID() << endl;
+        //
+        cerr << "RBranch::RevalidateRange (invalid CurTargetLinks) call to RecRange::PrintInfo()" << endl << endl;
+        my_recrange_ptr->PrintInfo();
+        //
+        cerr << "Target Links [my_recrange_ptr->GetCurTargetLinks()]:  ";
+        RecRange::PrintLinks(my_recrange_ptr->GetCurTargetLinks());
+        //
+        cerr << endl << "Target Links [curtargetlinks]:  ";
+        RecRange::PrintLinks(curtargetlinks);
+        cerr << endl;
+        //
+        return false;
+    }
+
+    if (my_recrange_ptr->DifferentNewTargetLinks(RemoveRangeFromRange(my_recrange_ptr->GetOldTargetLinks(), curtargetlinks)))
+    {
+        cerr << "RevalidateRange found DifferentNewTargetLinks in RBranch: " << m_ID.ID() << endl;
+        //
+        cerr << "RBranch::RevalidateRange (invalid NewTargetLinks) call to RecRange::PrintInfo()" << endl << endl;
+        my_recrange_ptr->PrintInfo();
+        //
+        cerr << "Target Links [RemoveRangeFromRange(my_recrange_ptr->GetOldTargetLinks(), curtargetlinks)]:  ";
+        RecRange::PrintLinks(RemoveRangeFromRange(my_recrange_ptr->GetOldTargetLinks(), curtargetlinks));
+        //
+        cerr << endl << "Target Links [curtargetlinks]:  ";
+        RecRange::PrintLinks(curtargetlinks);
+        cerr << endl;
+        //
+        return false;
+    }
+
+    return true;
+
+} // RBranch::RevalidateRange
+
+//------------------------------------------------------------------------------------
+
+string RBranch::GetGraphMLNodeType() const
+{
+    return "Rec";
+}
+
+//------------------------------------------------------------------------------------
+// Called only when underlying data structures (trees, branches, ranges)
+// are potentially recombinant (ie, contain RecRanges, not Ranges).
+
+void RBranch::AddNodeInfo(TiXmlElement * nodeElem) const
+{
+    // Testing recombination location information.
+    TiXmlElement * recElem = new TiXmlElement("data");
+    nodeElem->LinkEndChild(recElem);
+    recElem->SetAttribute("key", "rec_location");
+
+    RecRange * my_recrange_ptr(dynamic_cast<RecRange *>(GetRangePtr()));
+    assert(my_recrange_ptr);                            // FIRE if fcn called on non-recombinant object.
+    long int recpoint = my_recrange_ptr->GetRecpoint(); // Littlelink (Biglink midpoint, if Biglinks enabled).
+
+    recpoint += 2; // increment to make consistent with GraphML conventions
+    TiXmlText * rlTypeText = new TiXmlText(ToString(recpoint));
+    recElem->LinkEndChild(rlTypeText);
+}
+
+//____________________________________________________________________________________
diff --git a/src/tree/branch.h b/src/tree/branch.h
new file mode 100644
index 0000000..259af40
--- /dev/null
+++ b/src/tree/branch.h
@@ -0,0 +1,682 @@
+// $Id: branch.h,v 1.69 2012/06/30 01:32:42 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+/*******************************************************************
+
+ Class Branch represents a branch in the tree, and is polymorphic on the type of event at its top
+ (a coalescence, migration, etc.).  It contains auxiliary objects (a Range * and a DLCell) to manage
+ likelihood and site information.
+
+ A Branch's parents and children are represented by vectors of Branch_ptr.
+ There may be 0, 1 or 2 parents and 0, 1 or 2 children; other values are illegal.
+
+ As Branches are polymorphic, they are only put in containers as boost::shared_ptrs.
+ To copy a Branch use the Clone() function, which will create a new Branch of appropriate subtype.
+
+ Branches are either Cuttable (can be cut during rearrangement) or not, and signal this by returning
+ their "count of cuttable branches" which is currently either 0 or 1.  Partition and stick branches
+ are currently non-cuttable.
+
+ Written by Jim Sloan, heavily revised by Jon Yamato
+ -- dlcell turned into a container Mary 2002/05/28
+ -- derivation hierarchy reworked, TreeBranch class inserted
+    Branch class narrowed Jon 2003/02/24
+ -- Adding semi-unique ID numbers for each branch (the "same" branch
+    in two different trees will share the same ID number).  First
+    implementation via reference counted pointer objects.  Jon 2007/01/09
+
+********************************************************************/
+
+#ifndef BRANCH_H
+#define BRANCH_H
+
+#include <cassert>
+#include <cmath>
+#include <deque>
+#include <functional>
+#include <string>
+#include <vector>
+
+#include "vectorx.h"
+#include "constants.h"
+#include "defaults.h"
+#include "range.h"                      // For Link-related typedefs and constants.
+#include "dlcell.h"
+#include "locuscell.h"
+#include "shared_ptr.hpp"               // for Branch_ptr (boost::shared_ptr)
+#include "enable_shared_from_this.hpp"  // for shared_ptr support
+#include "branchtag.h"
+#include "rangex.h"                     // for class Range (and subclass) factory duties
+#include "fc_status.h"                  // for RevalidateRange debug function
+
+//------------------------------------------------------------------------------------
+
+class TreeSummary;
+class TipData;
+class BranchBuffer;
+class Force;
+class Branch;
+class TBranch;
+class TiXmlElement;
+
+enum branch_type {btypeBase, btypeTip, btypeCoal, btypeMig, btypeDivMig, btypeDisease, btypeRec, btypeEpoch};
+enum branch_group {bgroupTip, bgroupBody};
+
+std::string ToString(branch_type btype);
+
+typedef boost::shared_ptr<TBranch> TBranch_ptr;
+
+//------------------------------------------------------------------------------------
+
+class Branch : public boost::enable_shared_from_this<Branch>
+{
+  private:
+    Range * m_rangePtr;                 // Accessed via GetRangePtr(), which tests for NULLness (in DEBUG mode).
+
+    Branch();                               // Default ctor is undefined.
+    Branch & operator=(const Branch & src); // Assignment operator is undefined.
+    vector<weakBranch_ptr> m_parents;
+    vector<weakBranch_ptr> m_children;
+
+  protected:
+    bool m_updateDL;
+    BranchTag m_ID;
+    weakBranch_ptr m_equivBranch;
+
+    // We own what these point to.
+    vector<LocusCell> m_DLcells;
+    vector<LocusCell> m_movingDLcells;
+
+    virtual void CopyAllMembers(const Branch & src);
+    LongVec1d GetLocalPartitions() const; // Used by ScoreEvent.
+
+    // Each derived branch must implement some form of Range factory function that will be called
+    // in that branch's ctor to create the new Range for that branch.  We did not implement a pure
+    // virtual base function because of the different signature needs of each of the derived
+    // branch's Range.  The current factory is named CreateRange(), which returns a Range *,
+    // a pointer to a newly-heap-allocated Range.
+    //
+    // NB: the C++ standard does not currently (2010/03/31) allow you to call polymorphically
+    // from within a ctor of base.  You will get the base instantiation of the function; if one
+    // does not exist, the compiler will/should fail.
+
+  public:
+    LongVec1d m_partitions;
+    double m_eventTime;
+    bool m_marked;
+
+    //************Panel Correction control parameters
+    // this exists so we don't have to reach all the way back into the xml
+    // 0 if a Panel member
+    // 1 if a Sample member
+    // 2 if a coalescence has only sample tips above it
+    int m_isSample;
+    bool m_wasCoalCalced; // coalescence DLCell has been calculated once
+    // used to prevent recalculation of type 2 coalesences
+    //************Panel Correction control parameters
+
+    static Branch_ptr NONBRANCH;
+
+    Branch(Range * newrangeptr);
+    Branch(const Branch & src);
+
+    // Destructor.  Defined to deallocated the Range object in every Branch object.
+    // The destructors for all the derived classes are default dtors which do nothing.
+    // Since this base class dtor gets called after the dtor for each derived class,
+    // this dtor will be the only one which deletes the Range object once and only once.
+    virtual ~Branch();
+
+    // Accessor for Range pointer also tests (in DEBUG mode) for NULLness.
+    // BBranch objects have a NULL value for "m_rangePtr", but it should never be dereferenced.
+    Range * GetRangePtr() const { assert(m_rangePtr) ; return m_rangePtr; };
+
+    // RTTI
+    virtual branch_type Event()                 const = 0;
+    virtual branch_group BranchGroup()          const = 0;
+
+    virtual Branch_ptr Clone() const { return Branch::NONBRANCH; };
+
+    // Convenience getters and setters.
+    long int GetID() const;
+    weakBranch_ptr GetEquivBranch() const;
+    void     SetEquivBranch(Branch_ptr twin);
+
+    long int GetPartition(force_type) const;
+    void     SetPartition(force_type force, long int val);
+    virtual void    CopyPartitionsFrom(Branch_ptr src);
+
+    Branch_ptr    Child(long int which) { return m_children[which].lock(); };
+    Branch_ptr    Parent(long int which) { return m_parents[which].lock(); };
+    const Branch_ptr Child(long int which) const { return m_children[which].lock(); };
+    const Branch_ptr Parent(long int which) const { return m_parents[which].lock(); };
+    void     SetChild(long int which, Branch_ptr val) { m_children[which] = val; };
+    void     SetParent(long int which, Branch_ptr val) { m_parents[which] = val; };
+    long int NParents() const { return m_parents.size(); };
+    long int NChildren() const { return m_children.size(); };
+
+    // Arrangement helpers.
+    bool    IsAMember(const Force & force, const LongVec1d & membership) const;
+    virtual long int Cuttable()                const { return 0; };
+    virtual bool     CanRemove(Branch_ptr)           { return true; };
+    virtual long int CountDown()               const { return 0; };
+    virtual void     UpdateBranchRange(const rangeset & fcsites, bool dofc) {};
+    virtual void     UpdateRootBranchRange(const rangeset & fcsites, bool dofc) {};
+    virtual bool     IsEquivalentTo(const Branch_ptr)   const; // Virtual, but there is only 1 definition.
+    virtual bool     HasSamePartitionsAs(const Branch_ptr) const;
+    virtual bool     PartitionsConsistentWith(const Branch_ptr) const;
+    long int         Nsites() const { return GetRangePtr()->GetNumRegionSites(); };
+
+    rangeset         GetLiveSites() const { return GetRangePtr()->GetLiveSites(); };
+
+    virtual bool     IsRemovableRecombinationLeg(const rangeset &) const { return false; };
+
+    virtual Branch_ptr GetRecPartner() const { return Branch::NONBRANCH; };
+    void ResetBuffersForNextRearrangement();
+    void ResetOldTargetSites(const rangeset & fcsites) { GetRangePtr()->ResetOldTargetSites(fcsites); };
+
+    // The following routine is a no-op except in a branch where updateDL is
+    // allowed to be true, in which case it will be overridden.
+    virtual void    SetUpdateDL()                   {};
+    void    ClearUpdateDL()                 { m_updateDL = false; };
+    bool    GetUpdateDL()             const { return m_updateDL; };
+    void    MarkParentsForDLCalc();
+    virtual void    ReplaceChild(Branch_ptr oldchild, Branch_ptr newchild);
+    virtual bool    HasSameActive(const Branch & br);
+    const Cell_ptr GetDLCell(long int loc, long int ind, bool moving) const;
+    Cell_ptr GetDLCell(long int loc, long int ind, bool moving);
+
+    void     SetDLCells(const std::vector<LocusCell> & src) { m_DLcells = src; };
+    long int GetNcells(long int locus)  const { return m_DLcells[locus].size(); };
+    void     SetMovingDLCells(const std::vector<LocusCell> & src) { m_movingDLcells = src; };
+
+    // long int GetNcells(long int locus)  const { return m_DLcells[locus].size(); };
+
+    double GetTime() { return m_eventTime; };
+
+    // Subtree maker helper.
+    // Value returned is a Littlelink (well, code for a non-existent one here).
+    virtual long int GetRecpoint() const { return FLAGLONG; };
+
+    // Likelihood calculation helpers.
+    virtual double HowFarTo(const Branch & br) const;
+    virtual bool   CanCalcDL(long int)         const { return false; };
+    virtual bool   CanCalcPanelDL(long int)    const { return false; };
+    virtual bool   ShouldCalcDL(long int)      const { return false; };
+    Branch_ptr GetValidChild(Branch_ptr br, long int whichpos);
+    Branch_ptr GetValidPanelChild(Branch_ptr br, long int whichpos);
+    Branch_ptr GetValidParent(long int whichpos);
+
+    // Haplotyping helpers.
+    virtual bool    DiffersInDLFrom(Branch_ptr branch, long int locus, long int marker) const;
+
+    // Tree summarization helpers.
+    // Some subclass functions return results via the reference second and third arguments.
+    virtual void    ScoreEvent(TreeSummary & summary, BranchBuffer & ks) const = 0;
+    virtual void    ScoreEvent(TreeSummary & summary, BranchBuffer & ks, Linkweight & recweight) const = 0;
+
+    // Invariant checking.
+    virtual bool    operator==(const Branch & src) const;
+    bool    operator!=(const Branch & src) const { return !(*this == src); };
+
+    // Debugging functions.
+    virtual bool    CheckInvariant()              const;
+    virtual string  DLCheck(const Branch & other)  const;
+    void            InvertUpdateDL() { m_updateDL = !m_updateDL; };
+    virtual void    PrintInfo() const;
+    vector<Branch_ptr> GetBranchChildren(); // Used by TimeList::PrintTimeList()
+    virtual bool    IsSameExceptForTimes(const Branch_ptr other) const;
+
+    // Used by TimeList::IsValidTimeList(), non-const because of
+    // use of boost::shared_from_this()!
+    bool            ConnectedTo(const Branch_ptr family);
+
+    // Debugging function.
+    // Used by TimeList::RevalidateAllRanges(), must be able to handle "one-legged" forms
+    // of all branchtypes (eg. coalescence and recombination).
+    //
+    // The base class implementation will assume that there exists exactly one child and
+    // that there are no changes in the Range object between parent and child, and no
+    // changes needed to the passed argument.
+    virtual bool    RevalidateRange(FC_Status &) const;
+
+    // GetActiveChild() is a helper function for GetValidChild()
+    virtual const Branch_ptr GetActiveChild(long int) const { return Child(0); };
+
+    // GetActivePanelChild() is a helper function for GetValidPanelChild()
+    virtual const Branch_ptr GetActivePanelChild(long int) const { return Child(0); };
+
+    // for writing GraphML output
+    virtual void     AddGraphML(TiXmlElement *) const;
+    virtual void     AddNodeInfo(TiXmlElement *) const {return;} ;
+    virtual string   GetGraphMLNodeType() const = 0;
+    virtual string   GetParentIDs() const;
+    virtual string   GetChildIDs() const;
+    virtual long int GetCanonicalID() const;
+    virtual long int GetCanonicalParentID() const;
+};
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+// BASE branch - root end of tree - one child, no parent.
+
+class BBranch : public Branch
+{
+  private:
+    BBranch & operator=(const BBranch & src); // Assignment operator is undefined.
+
+  protected:
+    // Rather than returning a "fake" range for this bottom-most branch, we return a Null pointer.
+    // Callers use an accessor function which checks this pointer with ASSERT.
+    virtual Range * CreateRange() const { return NULL; };
+
+  public:
+    // Need this for TimeList constructor; will construct using CreateRange().  But a BBranch's Range
+    // should never be accessed.  It's Range pointer is NULL, and GetRangePtr() will ASSERT on it.
+    BBranch();
+
+    BBranch(const BBranch & src): Branch(src) {}; // Copies the NULL Range pointer.
+
+    // Base-class dtor deletes the Range object pointed to by "m_rangePtr" (assuming ptr is non-NULL).
+    virtual ~BBranch() {};
+
+    virtual Branch_ptr Clone()               const;
+    virtual branch_type Event()              const { return btypeBase; };
+    virtual branch_group BranchGroup()       const { return bgroupBody; };
+
+    // Tree summarization helpers.
+    // Some subclass functions (not this one) return results via the reference third argument.
+    virtual void    ScoreEvent(TreeSummary & summary, BranchBuffer & ks) const;
+    virtual void    ScoreEvent(TreeSummary & summary, BranchBuffer & ks, Linkweight & recweight) const;
+
+    // Debugging function.
+    virtual bool    CheckInvariant()          const;
+
+    // for writing GraphML output
+    virtual string GetGraphMLNodeType() const;
+};
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+// TIP branch - tip of tree - one parent, no children.
+
+class TBranch : public Branch
+{
+  private:
+    TBranch();                                // Default ctor is undefined.
+    TBranch & operator=(const TBranch & src); // Assignment operator is undefined.
+
+  protected:
+    virtual Range * CreateRange(long int nsites, const rangeset & diseasesites) const;
+
+  public:
+    string  m_label;
+
+    TBranch(const TipData & tipdata, long int nsites, const rangeset & diseasesites);
+    TBranch(const TBranch & src) : Branch(src), m_label(src.m_label) {};
+
+    // Base-class dtor deletes the Range object pointed to by "m_rangePtr" (assuming ptr is non-NULL).
+    virtual ~TBranch() {};
+
+    virtual Branch_ptr Clone()                        const;
+    virtual branch_type Event()                       const { return btypeTip; };
+    virtual branch_group BranchGroup()                const { return bgroupTip; };
+
+    virtual long int Cuttable()                       const { return 1; };
+    virtual const Branch_ptr GetActiveChild(long int) const { return Branch::NONBRANCH; };
+
+    // Tree summarization helpers.
+    // Some subclass functions (not this one) return results via the reference third argument.
+    virtual void    ScoreEvent(TreeSummary & summary, BranchBuffer & ks) const;
+    virtual void    ScoreEvent(TreeSummary & summary, BranchBuffer & ks, Linkweight & recweight) const;
+
+    // Debugging functions.
+    virtual bool    CheckInvariant()              const;
+    virtual bool    operator==(const Branch & src) const;
+    virtual bool    IsSameExceptForTimes(const Branch_ptr other) const;
+    virtual bool    RevalidateRange(FC_Status &) const;
+    virtual void    PrintInfo() const;
+
+    // for writing GraphML output
+    virtual void   AddNodeInfo(TiXmlElement *) const;
+    virtual string GetGraphMLNodeType() const;
+};
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+// COALESCENCE branch - one parent, two children.
+
+class CBranch : public Branch
+{
+  private:
+    CBranch();                                // Default ctor is undefined.
+    CBranch & operator=(const CBranch & src); // Assignment operator is undefined.
+
+  protected:
+    // If newbranchisinactive == true, then child1rangeptr is assumed to point to the inactive branch Range
+    // and child2rangeptr to point to the active branch Range.
+    virtual Range * CreateRange(const Range * const child1rangeptr, const Range * const child2rangeptr,
+                                bool  newbranchisinactive, const rangeset & fcsites) const;
+
+  public:
+    CBranch(const Range * const child1rangeptr, const Range * const child2rangeptr, bool newbranchisinactive, const rangeset & fcsites);
+    CBranch(const CBranch & src) : Branch(src) {};
+
+    // Base-class dtor deletes the Range object pointed to by "m_rangePtr" (assuming ptr is non-NULL).
+    virtual ~CBranch() {};
+
+    virtual Branch_ptr Clone()                  const;
+    virtual branch_type Event()                 const { return btypeCoal; };
+    virtual branch_group BranchGroup()          const { return bgroupBody; };
+
+    virtual long int Cuttable()                 const { return 1; };
+    virtual bool     CanRemove(Branch_ptr checkchild);
+    virtual long int CountDown()                const { return -2; };
+    virtual void     UpdateBranchRange(const rangeset & fcsites, bool dofc);
+    virtual void     UpdateRootBranchRange(const rangeset & fcsites, bool dofc);
+    virtual void     SetUpdateDL()                    { m_updateDL = true; };
+    virtual void     ReplaceChild(Branch_ptr oldchild, Branch_ptr newchild);
+    virtual Branch_ptr OtherChild(Branch_ptr badchild);
+    virtual bool CanCalcDL(long int site) const;
+    virtual bool ShouldCalcDL(long int site) const { return m_updateDL && CanCalcDL(site); };
+    virtual const Branch_ptr GetActiveChild(long int site) const;
+
+    // Tree summarization helpers.
+    // Some subclass functions (this one does) return results via the reference third argument.
+    virtual void    ScoreEvent(TreeSummary & summary, BranchBuffer & ks) const;
+    virtual void    ScoreEvent(TreeSummary & summary, BranchBuffer & ks, Linkweight & recweight) const;
+
+    // Debugging functions.
+    virtual bool    CheckInvariant()          const;
+    virtual bool    RevalidateRange(FC_Status & fcstatus) const;
+
+    // for writing GraphML output
+    virtual string GetGraphMLNodeType() const;
+};
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+// BASE CLASS for many branchs like migration and disease - one parent, one child.
+
+class PartitionBranch : public Branch
+{
+  private:
+    PartitionBranch();                                        // Default ctor is undefined.
+    PartitionBranch & operator=(const PartitionBranch & src); // Assignment operator is undefined.
+
+  protected:
+    Range * CreateRange(const Range * const childrangeptr) const;
+
+  public:
+    PartitionBranch(const Range * const childrangeptr);
+    PartitionBranch(const PartitionBranch & src) : Branch(src) {};
+
+    // Base-class dtor deletes the Range object pointed to by "m_rangePtr" (assuming ptr is non-NULL).
+    virtual ~PartitionBranch() {};
+
+    virtual long int Cuttable()            const { return 0; };
+    virtual branch_group BranchGroup()     const { return bgroupBody; };
+    virtual void UpdateBranchRange(const rangeset & fcsites, bool dofc);
+    virtual void UpdateRootBranchRange(const rangeset & fcsites, bool dofc) { assert(false); };
+
+    // Debugging function.
+    virtual bool CheckInvariant()       const;
+};
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+// Abstract base class for migration-like branches (MIG, DIVMIG).
+
+class MigLikeBranch : public PartitionBranch
+{
+  private:
+    MigLikeBranch();                                      // Default ctor is undefined.
+    MigLikeBranch & operator=(const MigLikeBranch & src); // Assignment operator is undefined.
+
+  protected:
+    // We accept PartitionBranch::CreateRange().
+
+  public:
+    MigLikeBranch(const Range * const protorangeptr);
+    MigLikeBranch(const MigLikeBranch & src);
+
+    // Base-class dtor deletes the Range object pointed to by "m_rangePtr" (assuming ptr is non-NULL).
+    virtual ~MigLikeBranch() {};
+};
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+// MIGRATION branch - one parent, one child.
+
+class MBranch : public MigLikeBranch
+{
+  private:
+    MBranch();                                // Default ctor is undefined.
+    MBranch & operator=(const MBranch & src); // Assignment operator is undefined.
+
+  protected:
+    // We accept PartitionBranch::CreateRange().
+
+  public:
+    MBranch(const Range * const protorangeptr);
+    MBranch(const MBranch & src);
+
+    // Base-class dtor deletes the Range object pointed to by "m_rangePtr" (assuming ptr is non-NULL).
+    virtual ~MBranch() {};
+
+    virtual Branch_ptr Clone()             const;
+    virtual branch_type Event()            const { return btypeMig; };
+
+    // Tree summarization helpers.
+    // Some subclass functions (not this one) return results via the reference third argument.
+    virtual void    ScoreEvent(TreeSummary & summary, BranchBuffer & ks) const;
+    virtual void    ScoreEvent(TreeSummary & summary, BranchBuffer & ks, Linkweight & recweight) const;
+
+    // for writing GraphML output
+    virtual string GetGraphMLNodeType() const;
+};
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+// MIGRATION when modeling divergence branch - one parent, one child.
+// Migration nodes for Divergence Migration.
+
+class DivMigBranch : public MigLikeBranch
+{
+  private:
+    DivMigBranch();                                     // Default ctor is undefined.
+    DivMigBranch & operator=(const DivMigBranch & src); // Assignment operator is undefined.
+
+  protected:
+    // We accept PartitionBranch::CreateRange().
+
+  public:
+    DivMigBranch(const Range * const protorangeptr);
+    DivMigBranch(const DivMigBranch & src);
+
+    // Base-class dtor deletes the Range object pointed to by "m_rangePtr" (assuming ptr is non-NULL).
+    virtual ~DivMigBranch() {};
+
+    virtual Branch_ptr Clone()             const;
+    virtual branch_type Event()            const { return btypeDivMig; };
+
+    // Tree summarization helpers.
+    // Some subclass functions (not this one) return results via the reference third argument.
+    virtual void    ScoreEvent(TreeSummary & summary, BranchBuffer & ks) const;
+    virtual void    ScoreEvent(TreeSummary & summary, BranchBuffer & ks, Linkweight & recweight) const;
+
+    // for writing GraphML output
+    virtual string GetGraphMLNodeType() const;
+};
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+// DISEASE mutation branches.  NOT related to Divergence! - one parent one child.
+
+class DBranch : public PartitionBranch
+{
+  private:
+    DBranch();                                // Default ctor is undefined.
+    DBranch & operator=(const DBranch & src); // Assignment operator is undefined.
+
+  protected:
+    // We accept PartitionBranch::CreateRange().
+
+  public:
+    DBranch(const Range * const protorangeptr);
+    DBranch(const DBranch & src);
+
+    // Base-class dtor deletes the Range object pointed to by "m_rangePtr" (assuming ptr is non-NULL).
+    virtual ~DBranch() {};
+
+    virtual Branch_ptr Clone()             const;
+    virtual branch_type Event()            const { return btypeDisease; };
+    virtual branch_group BranchGroup()     const { return bgroupBody; };
+
+    // Tree summarization helpers.
+    // Some subclass functions (not this one) return results via the reference third argument.
+    virtual void    ScoreEvent(TreeSummary & summary, BranchBuffer & ks) const;
+    virtual void    ScoreEvent(TreeSummary & summary, BranchBuffer & ks, Linkweight & recweight) const;
+
+    // for writing GraphML output
+    virtual string GetGraphMLNodeType() const;
+};
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+// EPOCH branch (used for divergence, but d was already taken by disease) - one parent, xxx child.
+
+class EBranch : public PartitionBranch
+{
+  private:
+    EBranch();                                // Default ctor is undefined.
+    EBranch & operator=(const EBranch & src); // Assignment operator is undefined.
+
+  protected:
+    // We accept PartitionBranch::CreateRange().
+
+  public:
+    EBranch(const Range * const protorangeptr) : PartitionBranch(protorangeptr) {};
+    EBranch(const EBranch & src) : PartitionBranch(src) {};
+
+    // Base-class dtor deletes the Range object pointed to by "m_rangePtr" (assuming ptr is non-NULL).
+    virtual ~EBranch() {};
+
+    virtual Branch_ptr Clone()         const;
+    virtual branch_type Event()        const { return btypeEpoch; };
+    virtual branch_group BranchGroup() const { return bgroupBody; };
+
+    // Tree summarization helpers.
+    // Some subclass functions (not this one) return results via the reference third argument.
+    virtual void    ScoreEvent(TreeSummary & summary, BranchBuffer & ks) const;
+    virtual void    ScoreEvent(TreeSummary & summary, BranchBuffer & ks, Linkweight & recweight) const;
+
+    // for writing GraphML output
+    virtual string GetGraphMLNodeType() const;
+};
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+// RECOMBINATION branch - one parent, one child
+// Shares the child with another recombination branch which has a different parent.
+// Use GetRecPartner() to find this partner.
+// The other RBranch has the same time but transmits a different range of sites.
+
+class RBranch : public Branch
+{
+  private:
+    RBranch();                                // Default ctor is undefined.
+    RBranch & operator=(const RBranch & src); // Assignment operator is undefined.
+
+  protected:
+    Range * CreateRange(const Range * const childrangeptr, bool isinactive,
+                        const rangeset & transmittedsites, const rangeset & fcsites) const;
+
+  public:
+    RBranch(const Range * const childrangeptr, bool isinactive,
+            const rangeset & transmittedsites, const rangeset & fcsites);
+    RBranch(const RBranch & src) : Branch(src) {};
+
+    // Base-class dtor deletes the Range object pointed to by "m_rangePtr" (assuming ptr is non-NULL).
+    virtual ~RBranch() {};
+
+    virtual Branch_ptr Clone()             const;
+    virtual branch_type Event()            const { return btypeRec; };
+    virtual branch_group BranchGroup()     const { return bgroupBody; };
+
+    virtual void    CopyPartitionsFrom(Branch_ptr src);
+    void    RecCopyPartitionsFrom(Branch_ptr src, FPartMap fparts, bool islow);
+
+    virtual long int   Cuttable()          const { return 1; };
+    virtual long int   CountDown()         const { return 1; };
+    virtual void       UpdateBranchRange(const rangeset & fcsites, bool dofc);
+    virtual void       UpdateRootBranchRange(const rangeset & fcsites, bool dofc) { assert(false); };
+    virtual void       ReplaceChild(Branch_ptr oldchild, Branch_ptr newchild);
+    virtual bool       IsRemovableRecombinationLeg(const rangeset &) const;
+    virtual Branch_ptr GetRecPartner() const;
+
+    // Can be called on recombinant branch only; returns Littlelink (Biglink midpoint, if Biglinks enabled).
+    virtual long int GetRecpoint() const { return GetRangePtr()->GetRecpoint(); };
+
+    // Tree summarization helpers.
+    // Some subclass functions (this one does) return results via the reference third argument.
+    virtual void    ScoreEvent(TreeSummary & summary, BranchBuffer & ks) const;
+    virtual void    ScoreEvent(TreeSummary & summary, BranchBuffer & ks, Linkweight & recweight) const;
+
+    virtual bool    operator==(const Branch & src) const;
+
+    // Debugging functions.
+    virtual bool    CheckInvariant()          const;
+    virtual bool    IsSameExceptForTimes(const Branch_ptr other) const;
+    virtual bool    RevalidateRange(FC_Status & fcstatus) const;
+
+    // for writing GraphML output
+    virtual string GetGraphMLNodeType() const;
+    virtual void   AddNodeInfo(TiXmlElement *) const;
+};
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+// Free functions for use as STL predicates
+
+class IsTipGroup : public std::unary_function<Branch_ptr, bool>
+{
+  public:
+    bool operator()(const Branch_ptr t) { return t->BranchGroup() == bgroupTip; };
+};
+
+//------------------------------------------------------------------------------------
+
+class IsBodyGroup : public std::unary_function<Branch_ptr, bool>
+{
+  public:
+    bool operator()(const Branch_ptr t) { return t->BranchGroup() == bgroupBody; };
+};
+
+//------------------------------------------------------------------------------------
+
+class IsCoalGroup : public std::unary_function<Branch_ptr, bool>
+{
+  public:
+    bool operator()(const Branch_ptr t) { return t->Event() == btypeCoal; };
+};
+
+#endif // BRANCH_H
+
+//____________________________________________________________________________________
diff --git a/src/tree/branchbuffer.cpp b/src/tree/branchbuffer.cpp
new file mode 100644
index 0000000..1f799c3
--- /dev/null
+++ b/src/tree/branchbuffer.cpp
@@ -0,0 +1,413 @@
+// $Id: branchbuffer.cpp,v 1.27 2012/06/30 01:32:43 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+#include <iostream>                     // debugging
+
+#include "local_build.h"
+
+#include "branchbuffer.h"
+#include "datapack.h"                   // for BranchBuffer ctor
+#include "errhandling.h"                // for exceptions
+#include "force.h"                      // for force ctor in BranchBuffer::RemoveBranch()
+#include "fc_status.h"                  // for BranchBuffer::SetupFC()
+#include "range.h"                      // For Link-related typedefs and constants.
+#include "registry.h"                   // for access to global datapack for handling
+                                        // partition logic in BranchBuffer class
+
+#ifdef DMALLOC_FUNC_CHECK
+#include "/usr/local/include/dmalloc.h"
+#endif
+
+using namespace std;
+
+//------------------------------------------------------------------------------------
+
+extern Registry registry;
+
+//------------------------------------------------------------------------------------
+
+BranchBuffer::BranchBuffer(const DataPack& dpack)
+    :
+    m_branchxparts(dpack.GetNCrossPartitions(), 0L),
+    m_branchparts(dpack.GetNPartitionForces())
+{
+    long force, nforces = dpack.GetNPartitionForces();
+    for(force = 0; force < nforces; ++force)
+    {
+        LongVec1d parts(dpack.GetNPartitions(force), 0L);
+        m_branchparts[force] = parts;
+    }
+} // BranchBuffer::BranchBuffer
+
+//------------------------------------------------------------------------------------
+
+void BranchBuffer::Clear()
+{
+    m_branches.clear();
+
+    m_branchxparts.assign(m_branchxparts.size(), 0L);
+    LongVec2d::iterator force;
+
+    for(force = m_branchparts.begin(); force != m_branchparts.end(); ++force)
+        force->assign(force->size(), 0L);
+} // BranchBuffer::Clear
+
+//------------------------------------------------------------------------------------
+
+long BranchBuffer::GetNBranches(force_type force, const LongVec1d & membership) const
+{
+    long count = m_branchxparts[registry.GetDataPack().GetCrossPartitionIndex(membership)];
+
+    return count;
+} // BranchBuffer::GetNBranches
+
+//------------------------------------------------------------------------------------
+
+long BranchBuffer::GetNPartBranches(force_type force, long part) const
+{
+    long count = m_branchparts[registry.GetForceSummary().GetPartIndex(force)][part];
+
+    return count;
+} // BranchBuffer::GetNPartBranches
+
+//------------------------------------------------------------------------------------
+
+void BranchBuffer::UpdateBranchCounts(const LongVec1d & newpartitions, bool addbranch)
+{
+    unsigned long xpartindex = registry.GetDataPack().GetCrossPartitionIndex(newpartitions);
+
+    assert(xpartindex < m_branchxparts.size());
+
+    if (addbranch)
+    {
+        ++m_branchxparts[xpartindex];
+        long force, nforces = newpartitions.size();
+        for(force = 0; force < nforces; ++force)
+            ++m_branchparts[force][newpartitions[force]];
+    }
+    else
+    {
+        --m_branchxparts[xpartindex];
+        long force, nforces = newpartitions.size();
+        for(force = 0; force < nforces; ++force)
+            --m_branchparts[force][newpartitions[force]];
+    }
+} // BranchBuffer::UpdateBranchCounts
+
+//------------------------------------------------------------------------------------
+
+LongVec1d BranchBuffer::GetBranchParts(force_type force) const
+{
+    return m_branchparts[registry.GetForceSummary().GetPartIndex(force)];
+} // BranchBuffer::GetBranchParts
+
+//------------------------------------------------------------------------------------
+
+LongVec1d BranchBuffer::GetBranchXParts() const
+{
+    return m_branchxparts;
+} // BranchBuffer::GetBranchXParts
+
+//------------------------------------------------------------------------------------
+
+void BranchBuffer::Append(Branch_ptr newbranch)
+{
+    // OPTIMIZE -- should we change to list push_back?
+    m_branches.push_back(newbranch);
+
+    UpdateBranchCounts(newbranch->m_partitions);
+} // BranchBuffer::Append
+
+//------------------------------------------------------------------------------------
+
+void BranchBuffer::Append(vector<Branch_ptr> newbranches)
+{
+    vector<Branch_ptr>::iterator branch;
+
+    for(branch = newbranches.begin(); branch != newbranches.end(); ++branch)
+        Append(*branch);
+} // BranchBuffer::Append
+
+//------------------------------------------------------------------------------------
+
+void BranchBuffer::AddAfter(Branchiter here, Branch_ptr newbranch)
+{
+    // debug DEBUG warning WARNING--assume that we can keep time sorted
+    //    ordering by simple append after here.
+    ++here;
+    m_branches.insert(here, newbranch);
+
+    UpdateBranchCounts(newbranch->m_partitions);
+} // BranchBuffer::AddAfter
+
+//------------------------------------------------------------------------------------
+
+void BranchBuffer::Collate(Branch_ptr newbranch)
+{
+    // Degenerate collate into an empty list
+    if (m_branches.empty())
+    {
+        m_branches.push_front(newbranch);
+        UpdateBranchCounts(newbranch->m_partitions);
+        return;
+    }
+
+    // Regular collate
+    Branchiter br = m_branches.begin();
+
+    // Catch insertion before the first entry
+    if ((*br)->Parent(0)->m_eventTime > newbranch->Parent(0)->m_eventTime)
+    {
+        m_branches.push_front(newbranch);
+        UpdateBranchCounts(newbranch->m_partitions);
+        return;
+    }
+
+    // Insertion after an entry
+    Branchiter end = m_branches.end();
+    Branchiter previous = br;
+    br++;                               // First entry has been taken care of already.
+
+    for ( ; br != end; ++br)
+    {
+        if ((*br)->Parent(0)->m_eventTime > newbranch->Parent(0)->m_eventTime) break;
+        previous = br;
+    }
+
+    // We rely on flowthrough for the case where this will be the new bottommost branch.
+    AddAfter(previous, newbranch);
+
+} // BranchBuffer::Collate
+
+//------------------------------------------------------------------------------------
+
+void BranchBuffer::Remove(Branchiter here)
+{
+    UpdateBranchCounts((*here)->m_partitions, false);
+    m_branches.erase(here);
+
+} // BranchBuffer::Remove
+
+void BranchBuffer::Remove(Branch_ptr branch)
+{
+    UpdateBranchCounts(branch->m_partitions, false);
+    m_branches.remove(branch);
+}
+
+//------------------------------------------------------------------------------------
+
+Branch_ptr BranchBuffer::GetFirst()
+{
+    return m_branches.front();
+} // BranchBuffer::GetFirst
+
+//------------------------------------------------------------------------------------
+
+Branch_ptr BranchBuffer::RemoveFirst()
+{
+    Branch_ptr pbranch = GetFirst();
+    UpdateBranchCounts(pbranch->m_partitions, false);
+    m_branches.pop_front();
+
+    return pbranch;
+} // BranchBuffer::RemoveFirst
+
+//------------------------------------------------------------------------------------
+
+Branch_ptr BranchBuffer::RemovePartitionBranch(force_type force, long part, double rnd)
+{
+    long count = static_cast<long>(rnd * GetNPartBranches(force, part));
+    Branchiter brit;
+
+    for(brit = m_branches.begin(); brit != m_branches.end(); ++brit)
+        if ((*brit)->GetPartition(force) == part)
+        {
+            if (!count) break;
+            --count;
+        }
+
+    assert(count == 0);                 // Didn't find a branch.
+    Branch_ptr retbranch = *brit;
+    Remove(brit);
+
+    return retbranch;
+} // BranchBuffer::RemovePartitionBranch
+
+//------------------------------------------------------------------------------------
+
+Branch_ptr BranchBuffer::RemoveBranch(force_type forceid, long xpartindex, double rnd)
+{
+    const LongVec1d & membership = registry.GetDataPack().GetBranchMembership(xpartindex);
+    long count = static_cast<long>(rnd * GetNBranches(forceid, membership));
+    const Force & force(**registry.GetForceSummary().GetForceByTag(forceid));
+    Branchiter brit;
+
+    for(brit = m_branches.begin(); brit != m_branches.end(); ++brit)
+    {
+        if ((*brit)->IsAMember(force, membership))
+        {
+            if (!count) break;
+            --count;
+        }
+    }
+
+    assert(count == 0);                 // Didn't find a branch.
+    assert(brit != m_branches.end());
+
+    Branch_ptr retbranch = *brit;
+    Remove(brit);
+
+    return retbranch;
+} // BranchBuffer::RemoveBranch
+
+//------------------------------------------------------------------------------------
+
+Branch_ptr BranchBuffer::RemoveBranch(force_type forceid, const LongVec1d & membership, double rnd)
+{
+    long count = static_cast<long>(rnd * GetNBranches(forceid, membership));
+    const Force & force(**registry.GetForceSummary().GetForceByTag(forceid));
+    Branchiter brit;
+
+    for(brit = m_branches.begin(); brit != m_branches.end(); ++brit)
+        if ((*brit)->IsAMember(force, membership))
+        {
+            if (!count) break;
+            --count;
+        }
+
+    assert(count == 0); // didn't find a branch
+
+    Branch_ptr retbranch = *brit;
+    Remove(brit);
+
+    return retbranch;
+} // BranchBuffer::RemoveBranch
+
+//------------------------------------------------------------------------------------
+
+double BranchBuffer::IntervalBottom()
+{
+    return GetFirst()->Parent(0)->m_eventTime;
+} // BranchBuffer::IntervalBottom()
+
+//------------------------------------------------------------------------------------
+
+vector<Branch_ptr> BranchBuffer::ExtractConstBranches() const
+{
+    vector<Branch_ptr> br;
+    Branchconstiter brit;
+
+    for(brit = m_branches.begin(); brit != m_branches.end(); ++brit)
+        br.push_back(*brit);
+
+    return br;
+} // BranchBuffer::ExtractConstBranches
+
+//------------------------------------------------------------------------------------
+
+long BranchBuffer::Nsites() const
+{
+    if (m_branches.empty()) return 0L;
+
+    return (*BeginBranch())->Nsites();
+} // BranchBuffer::Nsites
+
+//------------------------------------------------------------------------------------
+
+void BranchBuffer::IncreaseFCCount(FC_Status & fcstatus) const
+{
+#if FINAL_COALESCENCE_ON
+    Branchconstiter brit;
+    for(brit = m_branches.begin(); brit != m_branches.end(); ++brit)
+    {
+        fcstatus.Increment_FC_Counts((*brit)->GetLiveSites());
+    }
+#endif
+} // BranchBuffer::IncreaseFCCount
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+
+void BranchBuffer::PrintEvents() const
+{
+    Branchconstiter brit;
+
+    for(brit = m_branches.begin(); brit != m_branches.end(); ++brit)
+        cerr << ToString((*brit)->Event()) << ", ";
+
+    cerr << endl;
+} // BranchBuffer::PrintEvents
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+
+void BranchBuffer::PrintRanges() const
+{
+    cerr << "Ranges of branches in this list:" << endl;
+    Branchconstiter brit;
+
+    for(brit = m_branches.begin(); brit != m_branches.end(); ++brit)
+    {
+        (*brit)->GetRangePtr()->PrintInfo();
+    }
+} // BranchBuffer::PrintRanges
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+
+void BranchBuffer::PrintXParts() const
+{
+    cerr << "Xpartition values for this buffer:" << endl;
+    LongVec1d::size_type i;
+
+    for(i = 0; i < m_branchxparts.size(); i++)
+    {
+        cerr << " pop: " << i
+             << " xpart: " << m_branchxparts[i]
+             << endl;
+    }
+} // BranchBuffer::PrintXParts
+
+//------------------------------------------------------------------------------------
+// Debugging function (used only in ASSERT); call for recombinant branches only.
+// Returns accumulated weight of Current Target Links over all branches in BranchBuffer object.
+
+Linkweight BranchBuffer::AccumulatedCurTargetLinkweight() const
+{
+    Linkweight recweight(ZERO);
+    Branchconstiter brit;
+
+    for(brit = m_branches.begin(); brit != m_branches.end(); ++brit)
+    {
+        recweight += (*brit)->GetRangePtr()->GetCurTargetLinkweight();
+    }
+
+    return recweight;
+} // BranchBuffer::AccumulatedCurTargetLinkweight
+
+//------------------------------------------------------------------------------------
+// Debugging function (used only in ASSERT); call for recombinant branches only.
+// Returns accumulated weight of New Target Links over all branches in BranchBuffer object.
+
+Linkweight BranchBuffer::AccumulatedNewTargetLinkweight() const
+{
+    Linkweight recweight(ZERO);
+    Branchconstiter brit;
+
+    for(brit = m_branches.begin(); brit != m_branches.end(); ++brit)
+    {
+        recweight += (*brit)->GetRangePtr()->GetNewTargetLinkweight();
+    }
+
+    return recweight;
+} // BranchBuffer::AccumulatedNewTargetLinkweight
+
+//____________________________________________________________________________________
diff --git a/src/tree/branchbuffer.h b/src/tree/branchbuffer.h
new file mode 100644
index 0000000..daa56be
--- /dev/null
+++ b/src/tree/branchbuffer.h
@@ -0,0 +1,115 @@
+// $Id: branchbuffer.h,v 1.17 2012/06/30 01:32:43 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+// This file defines a time ordered container of Branch_ptr, BranchBuffer.  The BranchBuffer
+// is a storage buffer used during rearrangement to store active and inactive branches.
+
+#ifndef BRANCHBUFFER_H
+#define BRANCHBUFFER_H
+
+#include <map>
+#include <vector>
+
+#include "branch.h"
+#include "defaults.h"
+#include "definitions.h"
+#include "range.h"                      // For Link-related typedefs and constants.
+#include "types.h"
+#include "vectorx.h"
+
+//------------------------------------------------------------------------------------
+
+// #include "branch.h"--to create the "base" branch, TimeList constructor
+//                      to create a Tbranch, CreateTip
+//                      to initialize branch innards, both of the above
+//                      to maintain tree child and parent relationships
+//                         in CopyBody()
+//                      to maintain tree child and parent relationships
+//                         in CopyPartialBody()
+//                      to track ncuttable via branch.Cuttable()
+//                      to track marked status via branch.marked,
+//                         branch.SetUpdateDL()
+
+class TipData;
+class DataPack;
+class FC_Status;
+
+//------------------------------------------------------------------------------------
+
+class BranchBuffer
+{
+  private:
+    Branchlist m_branches;              // We do not own these branches!
+    LongVec1d m_branchxparts;           // dim: cross partitions
+    LongVec2d m_branchparts;            // dim: partition force X partitions
+
+    BranchBuffer(const BranchBuffer & src);             // undefined
+    BranchBuffer & operator=(const BranchBuffer & src); // undefined
+    BranchBuffer();                                     // undefined
+
+    long GetNBranches(force_type force, const LongVec1d & membership) const;
+    long GetNPartBranches(force_type force, long part) const;
+
+  public:
+    BranchBuffer(const DataPack& dpack);
+    ~BranchBuffer() {};
+    void Clear();
+    LongVec2d GetBranchParts()                    const { return m_branchparts; };
+    LongVec1d GetBranchParts(force_type force)    const;
+    LongVec1d GetBranchXParts()                   const;
+    void Append(Branch_ptr newbranch);
+    void Append(std::vector<Branch_ptr> newbranches);
+    void AddAfter(Branchiter here, Branch_ptr newbranch);
+    void Collate(Branch_ptr newbranch);
+    long Size()                          const { return m_branches.size(); };
+    Branchiter BeginBranch()                   { return m_branches.begin(); };
+    Branchiter EndBranch()                     { return m_branches.end(); };
+    Branchconstiter BeginBranch() const        { return m_branches.begin(); };
+    Branchconstiter EndBranch() const          { return m_branches.end(); };
+    void       Remove(Branchiter here);
+    void       Remove(Branch_ptr branch);
+    Branch_ptr GetFirst();
+    Branch_ptr RemoveFirst();
+    Branch_ptr RemoveBranch(force_type force, long xpartindex, double rnd);
+    Branch_ptr RemoveBranch(force_type force, const LongVec1d & membership, double rnd);
+    Branch_ptr RemovePartitionBranch(force_type force, long part, double rnd);
+
+    // This routine updates the "current branch count" storage in the BranchBuffer to make
+    // its count of partitions and crosspartitions accurate for a new time slice.  Passing
+    // true means that the partitions being passed are newly added; passing false means that they
+    // are newly removed.  For example, if you were moving down past a coalescence, you would want
+    // to remove the two lineages that were coalescing, and add the lineage they coalesce into.
+
+    void UpdateBranchCounts(const LongVec1d & newpartitions, bool addbranch = true);
+
+    // Meant to only be used on arranger::inactivelist().
+    double  IntervalBottom();
+    double  NextIntervalBottom();
+
+    // Used by TreeSizeArranger.
+    vector<Branch_ptr> ExtractConstBranches() const;
+
+    // Used by ResimArranger::Resimulate & DropAll to aid in constructing a fc_status object.
+    long   Nsites() const;
+
+    // Used by ResimArranger::Resimulate & DropAll to setup the fc object's site counts.
+    void   IncreaseFCCount(FC_Status & fcstatus) const;
+
+    // Debugging functions.
+    void PrintEvents() const;
+    void PrintRanges() const;
+    void PrintXParts() const;
+    Linkweight AccumulatedCurTargetLinkweight() const;
+    Linkweight AccumulatedNewTargetLinkweight() const;
+};
+
+#endif // BRANCHBUFFER_H
+
+//____________________________________________________________________________________
diff --git a/src/tree/branchtag.cpp b/src/tree/branchtag.cpp
new file mode 100644
index 0000000..45ba60a
--- /dev/null
+++ b/src/tree/branchtag.cpp
@@ -0,0 +1,112 @@
+// $Id: branchtag.cpp,v 1.6 2012/06/30 01:32:43 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+
+#ifdef DMALLOC_FUNC_CHECK
+#include <dmalloc.h>
+#endif
+
+#include "branchtag.h"
+
+//------------------------------------------------------------------------------------
+
+BranchTag::BranchTag()
+{
+    assert(!NextID().empty());
+    m_id = NextID().front();
+    NextID().pop_front();
+    assert(m_id < RefCount().size());
+    RefCount()[m_id] = 1;
+} // BranchTag::ctor
+
+//------------------------------------------------------------------------------------
+
+BranchTag::BranchTag(const BranchTag& src)
+{
+    m_id = src.m_id;
+    RefCount()[m_id]++;
+}
+
+//------------------------------------------------------------------------------------
+
+BranchTag& BranchTag::operator=(const BranchTag& src)
+{
+    DecrementID(m_id);
+    m_id = src.m_id;
+    RefCount()[m_id]++;
+    return *this;
+}
+
+//------------------------------------------------------------------------------------
+
+void BranchTag::DecrementID(long oldid)
+{
+    assert(RefCount()[oldid] > 0);
+    if (--RefCount()[oldid])
+        return;
+
+    NextID().push_back(oldid);
+}
+
+//------------------------------------------------------------------------------------
+
+BranchTag::~BranchTag()
+{
+    DecrementID(m_id);
+}
+
+//------------------------------------------------------------------------------------
+
+void BranchTag::BeginBranchIDs(long nids)
+{
+    assert(RefCount().empty());
+    RefCount().assign(nids, 0L);
+    assert(RefCount().size()==static_cast<LongVec1d::size_type>(nids));
+    assert(NextID().empty());
+    long i;
+    for(i = 0; i < nids; ++i)
+        NextID().push_back(i);
+}
+
+//------------------------------------------------------------------------------------
+
+bool BranchTag::operator==(const BranchTag& src) const
+{
+    return m_id == src.m_id;
+}
+
+//------------------------------------------------------------------------------------
+
+long BranchTag::ID() const
+{
+    return m_id;
+}
+
+//------------------------------------------------------------------------------------
+// See FAQ 16.15 for an explanation of this opaque code....
+//------------------------------------------------------------------------------------
+
+LongVec1d& BranchTag::RefCount()
+{
+    static LongVec1d* refcount = new LongVec1d();
+    return *refcount;
+}
+
+//------------------------------------------------------------------------------------
+
+std::deque<long>& BranchTag::NextID()
+{
+    // clearly it is not behaving as I expected when I wrote it!
+    static std::deque<long> * nextid = new std::deque<long>();
+    return *nextid;
+}
+
+//____________________________________________________________________________________
diff --git a/src/tree/branchtag.h b/src/tree/branchtag.h
new file mode 100644
index 0000000..df87e92
--- /dev/null
+++ b/src/tree/branchtag.h
@@ -0,0 +1,62 @@
+// $Id: branchtag.h,v 1.4 2011/03/07 06:08:52 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+/*******************************************************************
+ Class BranchTag tracks and manages the semi-unique ID number of each
+ "branch".  Semi-unique, in that if two (or more) branches represent the
+ same "branch", then they will share the same ID number.
+
+ Implementationwise, BranchTags are a shared pointer that on construction
+ pulls a new unique ID number and initializes a counter.  Copying one
+ increments the counter.  Destroying one decrements the counter.  On
+ decrement if the counter reaches "zero", then the ID is freed for reuse.
+
+ This class makes heavy use of static members--the member BeginBranchIDs,
+ with an argument equal to the number of needed ids, must be called before
+ any branch ids are handed out.  The call will look something like this:
+     long max_number_of_concurrent_branches = 1000000;
+     BranchTag::BeginBranchIDs(max_number_of_concurrent_branches);
+
+ Written by Jon Yamato
+********************************************************************/
+
+#ifndef BRANCHTAG_H
+#define BRANCHTAG_H
+
+#include <deque>
+
+#include "vectorx.h"
+
+class BranchTag
+{
+  private:
+    LongVec1d::size_type m_id;
+
+    void DecrementID(long oldid);
+    static LongVec1d& RefCount();
+    static std::deque<long>& NextID();
+
+  protected:
+
+  public:
+    BranchTag();
+    BranchTag(const BranchTag& src);
+    BranchTag& operator=(const BranchTag& src);
+    virtual ~BranchTag();
+
+    void static BeginBranchIDs(long nids);
+
+    bool operator==(const BranchTag& src) const;
+    long ID() const;
+};
+
+#endif // BRANCHTAG_H
+
+//____________________________________________________________________________________
diff --git a/src/tree/chain.cpp b/src/tree/chain.cpp
new file mode 100644
index 0000000..e4f6bc7
--- /dev/null
+++ b/src/tree/chain.cpp
@@ -0,0 +1,684 @@
+// $Id: chain.cpp,v 1.99 2013/11/07 22:52:33 jyamato Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <algorithm>
+#include <cassert>
+#include <fstream>
+#include <iostream>                     // debugging
+#include <map>
+#include <vector>
+
+#include "local_build.h"
+
+#include "arrangervec.h"
+#include "chain.h"
+#include "chainparam.h"
+#include "constants.h"
+#include "defaults.h"
+#include "errhandling.h"
+#include "forcesummary.h"
+#include "likelihood.h"
+#include "newick.h"                     // for TREETRACK writing out of trees
+#include "random.h"
+#include "region.h"                     // for Region::MakeUserTree() and Region::HasUserTree()
+#include "registry.h"
+#include "runreport.h"
+#include "timemanager.h"                // for StartReplicate's use of MakeStickTilTime()
+                                        // when dealing with a user defined tree.
+#include "tree.h"
+#include "treesum.h"
+#include "types.h"
+
+#ifdef DMALLOC_FUNC_CHECK
+#include "/usr/local/include/dmalloc.h"
+#endif
+
+using namespace std;
+
+//------------------------------------------------------------------------------------
+
+Chain::Chain (Random & rnd, RunReport & runrep,
+              const ChainParameters & chainparam,
+              CollectionManager& collmanager, double temper)
+    : m_tempident(temper, chainparam.GetAllStringsForActiveArrangers()),
+      m_ndiscard(0),
+      m_chaintype(0),
+      m_nsamples(0),
+      m_sampleinterval(0),
+      m_realstep(0),
+      m_arrangers(chainparam.CloneArrangers()),
+      m_randomsource (rnd),
+      m_runreport (runrep),
+      m_collmanager(collmanager)
+{
+    // deliberately blank
+} // Chain::Chain
+
+//------------------------------------------------------------------------------------
+
+Chain::Chain (const Chain & src)
+    :
+    m_tempident(src.m_tempident),
+    m_arrangers(src.m_arrangers),
+    m_randomsource(src.m_randomsource),
+    m_runreport(src.m_runreport),
+    m_collmanager(src.m_collmanager)
+{
+    CopyMembers(src);
+} // Chain::Chain
+
+//------------------------------------------------------------------------------------
+
+Chain::~Chain ()
+{
+    m_arrangers.ClearAll();
+} // Chain::~Chain
+
+//------------------------------------------------------------------------------------
+
+Chain & Chain::operator=(const Chain & src)
+{
+    if (&src != this)                   // self assignment test
+    {
+        m_arrangers.ClearAll();
+
+        CopyMembers(src);
+    }
+
+    return *this;
+
+} // Chain::operator=
+
+//------------------------------------------------------------------------------------
+
+// Does not set the reference members: Chain::m_randomsource,
+// Chain::m_runreport and Chain::m_collmanager.
+
+void Chain::CopyMembers(const Chain & src)
+{
+    // temperature identity variables
+    m_tempident = src.m_tempident;
+
+    // non-temperature-identity variables
+    m_nsamples = src.m_nsamples;
+    m_ndiscard = src.m_ndiscard;
+    m_chaintype = src.m_chaintype;
+    m_sampleinterval = src.m_sampleinterval;
+    m_realstep = src.m_realstep;
+    m_chainstate = src.m_chainstate;
+
+    m_arrangers = src.m_arrangers;
+
+} // Chain::CopyMembers
+
+//------------------------------------------------------------------------------------
+
+Arranger * Chain::ChooseArranger (double rnd)
+{
+    return m_arrangers.GetRandomArranger(rnd);
+} // Chain::ChooseArranger
+
+//------------------------------------------------------------------------------------
+
+void Chain::SetChainType (long chtype, const ChainParameters & chparam)
+{
+    m_chaintype = chtype;
+    m_nsamples = chparam.GetNSamples(chtype);
+    m_ndiscard = chparam.GetNDiscard(chtype);
+    m_sampleinterval = chparam.GetInterval(chtype);
+
+} // Chain::SetChainType
+
+//------------------------------------------------------------------------------------
+
+double Chain::GetCurrentDataLlike()
+{
+    return m_chainstate.GetTree()->GetDLValue();
+} // GetCurrentDataLike
+
+//------------------------------------------------------------------------------------
+
+void Chain::StartRegion (Tree * regiontree)
+{
+#ifdef DENOVO
+    m_runreport.ReportNormal("Running denovo case--no rearrangement!");
+#endif // DENOVO
+    m_chainstate.SetTree(regiontree);
+
+    if (regiontree->NoPhaseUnknownSites())
+    {
+        m_arrangers.ZeroHapArranger();
+    }
+    if (registry.GetForceSummary().CheckForce(force_REC))
+    {
+        if (dynamic_cast<RecTree*>(regiontree)->GetNumMovingLoci() == 0)
+        {
+            m_arrangers.ZeroLocusArranger();
+        }
+        if (dynamic_cast<RecTree*>(regiontree)->AnyRelativeHaplotypes() == false)
+        {
+            m_arrangers.ZeroProbHapArranger();
+        }
+    }
+} // Chain::StartRegion
+
+//------------------------------------------------------------------------------------
+
+void Chain::EndRegion()
+{
+    // In case we turned haplotyping off in a previous region, turn it back on.
+    m_arrangers.RestoreTiming();
+
+} // Chain::EndRegion
+
+//------------------------------------------------------------------------------------
+// StartRegion() must be called before StartReplicate().
+
+void Chain::StartReplicate (const ForceSummary & forcesum, Region& region)
+{
+    bool tree_is_bad = false;
+    long bad_denovo = 0;
+    Arranger* denovoarranger = m_arrangers.GetDenovoArranger();
+    ForceParameters fp(forcesum.GetStartParameters(), region.GetID());
+    m_chainstate.SetParameters(fp);
+
+    if (region.HasUserTree())
+    {
+        // JDEBUG--this creates a tree that ignores the stick,
+        // if the user hasn't given us branchlengths, I don't know
+        // what this will do to the rest of the program
+        region.MakeUserTree(m_chainstate.GetTree());
+        if (forcesum.UsingStick())
+            m_chainstate.GetTree()->GetTimeManager()->
+                MakeStickTilTime(fp, m_chainstate.GetTree()->RootTime());
+    }
+    else
+    {
+        // no user tree is provided, so we make a de novo tree
+        denovoarranger->SetParameters (m_chainstate);
+        DestroyStick(forcesum);
+        do {
+            tree_is_bad = false;
+            try
+            {
+                denovoarranger->Rearrange(m_chainstate);
+                m_chainstate.SimulateDataIfNeeded();
+                if (region.AnySimulatedLoci())
+                {
+                    string msg = "Number of recombinations in this tree:  "
+                        + ToString(m_chainstate.GetTree()->NumberOfRecombinations());
+                    registry.GetRunReport().ReportNormal(msg);
+                }
+#ifndef STATIONARIES
+                m_chainstate.GetTree()->CalculateDataLikes();
+                //LS NOTE:  It's possible to reject a tree during dl calculation.
+#endif // STATIONARIES
+            }
+            catch (rejecttree_error& e)
+            {
+                tree_is_bad = true;
+                ++bad_denovo;
+                // LS TEST:  First-tree rejection messages.
+#if 1
+                switch (e.GetType())
+                {
+                    case OVERRUN:
+                        cerr << "error = OVERRUN" << endl;
+                        break;
+                    case TINYPOP:
+                        cerr << "error = TINYPOP" << endl;
+                        break;
+                    case ZERODL:
+                        cerr << "error = ZERODL" << endl;
+                        break;
+                    case STRETCHED:
+                        cerr << "error = STRETCHED" << endl;
+                        break;
+                    case EPOCHBOUNDARYCROSS:
+                        cerr << "error = EPOCHBOUNDS" << endl;
+                        break;
+                }
+#endif
+
+                if (bad_denovo > defaults::tooManyDenovo)
+                {
+                    string errorMessage = "Unable to create initial tree.  Starting parameter ";
+                    errorMessage += "\n values may be too extreme; try using more conservative ones.";
+                    denovo_failure e(errorMessage);
+                    throw e;
+                }
+                // reject the unsatisfactory tree and prepare to try again
+                denovoarranger->AcceptAndSynchronize(m_chainstate, m_tempident.GetTemperature(), tree_is_bad);
+            }
+        } while(tree_is_bad);
+    }
+
+    assert(m_chainstate.GetTree()->IsValidTree());
+
+    // accept the tree and make the oldtree identical to it
+    denovoarranger->AcceptAndSynchronize(m_chainstate, m_tempident.GetTemperature(), tree_is_bad);
+    assert(m_chainstate.GetOldTree()->IsValidTree());
+
+} // Chain::StartReplicate
+
+//------------------------------------------------------------------------------------
+// SetChainType() must be called before StartChain()
+
+void Chain::StartChain (long chainnumber, long chaintype, const ForceSummary & forces, const ForceParameters & starts)
+{
+    if (IsCold())
+    {
+        long nsteps = m_nsamples * m_sampleinterval + m_ndiscard;
+        m_runreport.SetBarParameters(nsteps, m_ndiscard, chainnumber, chaintype);
+        m_runreport.PrintBar(0L);       // initialize printed bar
+    }
+
+    m_tempident.StartChain();  // set up temperature identity tracking info
+    m_chainstate.AllChanged();
+
+    m_realstep = 0;
+
+    m_chainstate.SetParameters(starts);
+    m_chainstate.OverwriteOldParameters();
+    m_chainstate.UpdateOldStickParams();
+    m_chainstate.UpdateNewStickParams();
+
+    arrangerit arr;
+    for (arr = m_arrangers.begin (); arr != m_arrangers.end (); ++arr)
+        (*arr).second->SetParameters(m_chainstate);
+
+#ifdef TREETRACK
+    if (m_tempident.IsCold())
+    {
+        ofstream of;
+        of.open("treedls", ios::app);
+        of << "chain" << chainnumber << "={" << endl;
+        of.close();
+    }
+#endif
+
+} // Chain::StartChain
+
+//------------------------------------------------------------------------------------
+// SetChainType() must be called before StartBayesianChain()
+// This is a version of StartChain which does not change the
+// parameters.
+
+void Chain::StartBayesianChain(long chainnumber, long chaintype, const ForceSummary& forces)
+{
+    if (IsCold())
+    {
+        long nsteps = m_nsamples * m_sampleinterval + m_ndiscard;
+        m_runreport.SetBarParameters(nsteps, m_ndiscard, chainnumber, chaintype);
+        m_runreport.PrintBar(0L);       // initialize printed bar
+    }
+    m_tempident.StartChain();  // set up temperature identity tracking info
+    m_chainstate.AllChanged();
+
+    m_realstep = 0;
+
+    m_chainstate.OverwriteOldParameters();
+    m_chainstate.UpdateOldStickParams();
+    m_chainstate.UpdateNewStickParams();
+
+    // The following is still needed in case this is the first use of
+    // this Chain object
+    arrangerit arr;
+    for (arr = m_arrangers.begin (); arr != m_arrangers.end (); ++arr)
+        (*arr).second->SetParameters(m_chainstate);
+
+#ifdef TREETRACK
+    if (m_tempident.IsCold())
+    {
+        ofstream of;
+        of.open("treedls", ios::app);
+        of << "chain" << chainnumber << "={" << endl;
+        of.close();
+    }
+#endif
+} // Chain::StartBayesianChain
+
+//------------------------------------------------------------------------------------
+
+void Chain::DoOneChain (long nsteps, bool lastchain)
+{
+    long step;
+    bool tree_is_bad;
+
+#ifdef LAMARC_QA_SINGLE_DENOVOS
+    long denovo_tree_reject_count = 0;
+#endif // LAMARC_QA_SINGLE_DENOVOS
+
+    for (step = 0; step < nsteps; ++step)
+    {
+        // The eventloop call used to be here when we were supporting
+        // Mac OS 9 eventloop();
+
+        ++m_realstep;  // running total of all steps
+
+        // Make sure all Arrangers are up to date.
+        m_collmanager.UpdateArrangers(m_arrangers, m_chainstate);
+
+        // If we're doing purely DENOVO arrangement, don't let
+        // ourselves choose any other arranger.
+
+#ifndef DENOVO
+#ifndef LAMARC_QA_SINGLE_DENOVOS
+        Arranger * arranger = ChooseArranger(m_randomsource.Float());
+#else
+        Arranger * arranger = m_arrangers.GetDenovoArranger();
+#endif // LAMARC_QA_SINGLE_DENOVOS
+#else
+        Arranger * arranger = m_arrangers.GetDenovoArranger();
+#endif // DENOVO
+
+        assert(m_chainstate.GetTree()->IsValidTree());
+        assert(m_chainstate.GetOldTree()->IsValidTree());
+
+#ifdef LAMARC_QA_SINGLE_DENOVOS
+        tree_is_bad = true;
+        while(tree_is_bad)
+        {
+#endif // LAMARC_QA_SINGLE_DENOVOS
+
+            tree_is_bad = false;
+            try
+            {
+                arranger->Rearrange(m_chainstate);
+#ifndef STATIONARIES
+                arranger->ScoreRearrangement(m_chainstate);
+#endif // STATIONARIES
+                // In the Stationaries code, the tree retains its initial likelihood.
+                // We must nevertheless call Accept because of the Hastings ratio issue.
+                // Similiarly, we call Accept with a "bad" tree to clean it up correctly.
+            }
+            catch (rejecttree_error& e)
+            {
+                tree_is_bad = true;
+#ifdef LAMARC_QA_SINGLE_DENOVOS
+                denovo_tree_reject_count++;
+#endif // LAMARC_QA_SINGLE_DENOVOS
+                switch (e.GetType())
+                {
+                    case OVERRUN:
+                        m_tempident.NoteBadTree();
+                        break;
+                    case TINYPOP:
+                        m_tempident.NoteTinyPopTree();
+                        break;
+                    case ZERODL:
+                        m_tempident.NoteZeroDLTree();
+                        break;
+                    case STRETCHED:
+                        m_tempident.NoteStretchedTree();
+                        break;
+                    case EPOCHBOUNDARYCROSS:
+                        // We don't bookkeep the number of such events at this time.
+                        break;
+                }
+            }
+#ifdef LAMARC_QA_SINGLE_DENOVOS
+        }
+#endif // LAMARC_QA_SINGLE_DENOVOS
+
+        bool accepted = arranger->AcceptAndSynchronize(m_chainstate, m_tempident.GetTemperature(), tree_is_bad);
+
+// MREMOVE special-case divergence stationaries code
+        if (m_tempident.IsCold())
+        {
+           ofstream of;
+           of.open("divstat", ios::app);
+           TimeList& tl = m_chainstate.GetTree()->GetTimeList();
+           Branchconstiter br;
+           for (br = tl.FirstBody(); br != tl.EndBranch(); br = tl.NextBody(br)) {
+              if ((*br)->Event() == btypeCoal) {
+                of << "coal" << endl;
+                break;
+              }
+              if ((*br)->Event() == btypeEpoch) {
+                of << "epoch" << endl;
+                break;
+              }
+           }
+           of.close();
+        }
+
+// end MREMOVE
+
+#ifdef TREETRACK
+        if (m_tempident.IsCold())
+        {
+            ofstream of;
+            of.open("treedls", ios::app);
+            //   of << m_chainstate.GetTree()->GetDLValue() << "," << endl;
+            of << m_chainstate.GetTree()->GetDLValue() << ",";
+            of << "step=" << step << ",";
+
+#if 0
+            TreeSummary* trsum = m_chainstate.GetTree()->SummarizeTree();
+
+            double pscore = registry.GetSinglePostLike().Calc_lnProbGP
+                (m_chainstate.GetParameters().GetRegionalParameters(),
+                 m_chainstate.GetParameters().GetRegionalLogParameters(),
+                 trsum);
+
+            delete trsum;
+            of << pscore << endl;
+#endif
+
+            NewickConverter nc;
+            of << nc.LamarcTreeToNewickString(*(m_chainstate.GetTree())) << endl;
+
+            of.close();
+
+            of.open("mstrees", ios::app);
+            of << "//" << endl;
+            of << nc.LamarcTreeToMSNewickString(*(m_chainstate.GetTree()));
+            of << endl;
+            of.close();
+        }
+#endif // TREETRACK
+
+#ifndef LAMARC_QA_SINGLE_DENOVOS
+        // this fouls up LAMARC_QA_SINGLE_DENOVOS, not sure why
+
+        // We don't count acceptances during burn-in.
+        if (m_realstep > m_ndiscard)
+        {
+            string arrangername = arranger->GetName();
+            m_tempident.ScoreRearrangement(arrangername, accepted);
+        }
+#endif  // ! LAMARC_QA_SINGLE_DENOVOS
+
+#if LIKETRACK
+        ofstream of;
+        of.open("likes1", ios::app);
+        of << newtree << endl;
+        of.close();
+#endif
+
+        if ((m_realstep > m_ndiscard) &&
+            ((m_realstep - m_ndiscard) % m_sampleinterval == 0))
+        {
+            m_tempident.Sample(m_collmanager, m_chainstate, m_chaintype, lastchain);   // EWFIX.CHAINTYPE
+        }
+
+        if (IsCold())
+        {
+            m_runreport.PrintBar(m_realstep);
+        }
+    } // end of "for" loop over steps
+
+#ifdef LAMARC_QA_SINGLE_DENOVOS
+    registry.AddDenovoTreeRejectCount(denovo_tree_reject_count);
+#endif // LAMARC_QA_SINGLE_DENOVOS
+
+} // Chain::DoOneChain
+
+//------------------------------------------------------------------------------------
+
+ChainOut Chain::EndChain()
+{
+    ChainOut& chout = m_tempident.EndChain();
+    chout.SetLlikedata(m_chainstate.GetTree()->GetDLValue());
+    chout.SetEndtime();
+
+#ifdef TREETRACK
+    if (m_tempident.IsCold())
+    {
+        ofstream of;
+        of.open("treedls", ios::app);
+        of << "}" << endl;
+        of.close();
+    }
+#endif
+
+    return chout;
+
+} // Chain::EndChain
+
+//------------------------------------------------------------------------------------
+
+/* Swap trees between two different-temperature Chains, if
+   appropriate.  The equation is:
+
+   hotlike ^ (1/coldtemp) * coldlike ^ (1/hottemp)
+   -----------------------------------------------
+   hotlike ^ (1/hottemp) * coldlike ^ (1/coldtemp)
+
+   but we use log units for flow-protection reasons.
+   This is a Chain function in order to use private parts of
+   Chain; it is actually symmetrical in its two Chain arguments.
+
+   We actually swap the temperatures (and associated variables)
+   between the Chains, not the Trees, as it's easier to
+   parallelize this way.
+
+   NB:  Since we DO NOT swap the Tree, we also DO NOT swap anything
+   that should stay with the Tree, such as (in a Bayesian run) the
+   ForceParameters.  Watch out for this!  Only things that go with
+   the temperature, which should all be in the TempIdent package,
+   are swapped.
+
+   NB:  The equation above uses likelihoods; the code below
+   uses log likelihoods.
+*/
+
+bool Chain::SwapTemperatureIdentities(Chain& hot)
+{
+    bool bayesian = registry.GetChainParameters().IsBayesian();
+
+    Chain& cold = *this;
+    double hottemp = hot.m_tempident.GetTemperature();
+    double coldtemp = cold.m_tempident.GetTemperature();
+    cold.m_tempident.SwapTried();
+
+    // these are log likelihoods
+    double hotlike = hot.m_chainstate.GetTree()->GetDLValue();
+    double coldlike = cold.m_chainstate.GetTree()->GetDLValue();
+
+    double test;
+
+    if (bayesian)
+    {
+        SinglePostLike& postlike = registry.GetSinglePostLike();
+
+        ForceParameters& hotparams = hot.m_chainstate.GetParameters();
+        ForceParameters& coldparams = cold.m_chainstate.GetParameters();
+        vector<double> hotlogs = hotparams.GetRegionalLogParameters();
+        vector<double> coldlogs = coldparams.GetRegionalLogParameters();
+
+        // WARNING program is not exception safe while these pointers
+        // are held!
+        TreeSummary* hotsum = hot.m_chainstate.GetTree()->SummarizeTree();
+        TreeSummary* coldsum = cold.m_chainstate.GetTree()->SummarizeTree();
+
+        double hotprior = postlike.Calc_lnProbGP(hotparams.GetRegionalParameters(),
+                                                 hotlogs, hotsum);
+        double coldprior = postlike.Calc_lnProbGP(coldparams.GetRegionalParameters(),
+                                                  coldlogs, coldsum);
+
+        delete hotsum;
+        delete coldsum;
+
+        test = ((hotlike + hotprior) / coldtemp) + ((coldlike + coldprior) / hottemp)
+            - (((hotlike + hotprior) / hottemp) + ((coldlike + coldprior) / coldtemp));
+
+    }
+    else
+    {   // non-bayesian
+        test = (hotlike/coldtemp) + (coldlike/hottemp) -
+            ((hotlike/hottemp) + (coldlike/coldtemp));
+    }
+
+    if (test >= 0.0)
+    {
+        // always accept improvements
+        cold.SwapTemperatures(hot);
+        // DO NOT DO THIS: if (bayesian) cold.SwapParameters(hot);
+        return true;
+    }
+
+    // conditionally accept non-improvements
+    double comparison = log(m_randomsource.Float());
+    if (test < comparison)
+    {
+        return false;
+    }
+    else
+    {
+        cold.SwapTemperatures(hot);
+        // DO NOT DO THIS:  if (bayesian) cold.SwapParameters(hot);
+        return true;
+    }
+} // SwapTemperatureIdentities
+
+//------------------------------------------------------------------------------------
+
+void Chain::SwapTemperatures(Chain& hot)
+{
+    // We swap only the "temperature identity" variables.
+    // NB:  this code assumes that it's been called on the colder
+    // of the two arguments!
+    m_tempident.NoteSwap(hot.m_tempident, hot.m_chainstate, m_chainstate);
+    swap(m_tempident, hot.m_tempident);
+} // SwapTemperatures
+
+//------------------------------------------------------------------------------------
+
+void Chain::SwapParameters(Chain& hot)
+{
+    // swaps the working parameters of two chains; needed in
+    // Bayesian runs only.
+    m_chainstate.SwapParameters(hot.m_chainstate);
+} // SwapParameters
+
+//------------------------------------------------------------------------------------
+
+void Chain::DestroyStick(const ForceSummary & forces)
+{
+    if (forces.UsingStick())
+    {
+        m_chainstate.GetTree()->DestroyStick();
+        if (m_chainstate.GetOldTree())
+            m_chainstate.GetOldTree()->DestroyStick();
+    }
+} // DestroyStick
+
+//------------------------------------------------------------------------------------
+
+void Chain::RecalculateDataLikes()
+{
+    m_chainstate.GetTree()->GetTimeList().SetAllUpdateDLs();
+    m_chainstate.GetTree()->CalculateDataLikes();
+    m_chainstate.OverwriteOldTree();
+}
+
+//____________________________________________________________________________________
diff --git a/src/tree/chain.h b/src/tree/chain.h
new file mode 100644
index 0000000..40913a5
--- /dev/null
+++ b/src/tree/chain.h
@@ -0,0 +1,143 @@
+// $Id: chain.h,v 1.34 2012/06/30 01:32:43 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+/*******************************************************************
+  Chain is a helper class of the ChainManager whose main
+  responsiblities are to choose the right arranger to use, call that
+  arranger, then manage tree sampling using Arranger::Accept() and
+  TreeSummary::SummarizeTree() as appropriate.  It also manages the
+  runtime progress bar using RunReport expertise.  Finally it tracks
+  some summary statistics on a chain such as the number of
+  rearrangements accepted.
+
+  Each Chain owns a Tree representing the chain's previous state,
+  (Chain::oldtree); and a stable of arrangers used in the rearrangement
+  process (Chain::arrangers).
+
+  To use a Chain, you must call StartRegion() and StartReplicate()
+  (in that order) and SetChainType() (at any point before StartChain())
+  to initialize the Chain.  Then the functions StartChain(), DoOneChain(),
+  and EndChain() run the actual single chain.
+
+  Statistics on the run are passed back in the form of a ChainOut by
+  EndChain().
+
+  Written by Jon Yamato
+
+  Changes:
+  Mary:  changed from swapping Trees (expensive, and I can't
+  swap the pointers without rewriting a lot of code) to swapping
+  temperatures and associated variables.  This fixes a bug in
+  heating, and will also make parallelization easier.
+
+  Mary:  refactored "temperature identity" into a class, moved it to
+  a separate file (tempident.h/cpp).  2004/03/23
+
+********************************************************************/
+
+#ifndef CHAIN_H
+#define CHAIN_H
+
+#include "chainout.h"
+#include "arrangervec.h"
+#include "tempident.h"
+#include "collmanager.h"
+#include "chainstate.h"
+
+class ChainParameters;
+class RunReport;
+class Random;
+class Region;
+
+class Chain
+{
+  private:
+
+    // This set of variables is part of a Chain's "temperature
+    // identity" and will be swapped if the Chain's temperature is
+    // swapped.
+
+    TemperatureIdentity m_tempident;
+
+    // This set of variables is not part of "temperature identity"
+    // and remains the same when temperatures are swapped.
+
+    long m_ndiscard;                     // length of burn-in
+    long m_chaintype;                    // initial versus final
+    long m_nsamples;                     // number to sample
+    long m_sampleinterval;               // how long between samples
+    long m_realstep;                     // what step of the chain are we on?
+    ArrangerVec m_arrangers;             // we own these
+    Random& m_randomsource;
+    RunReport& m_runreport;
+    CollectionManager& m_collmanager;
+    ChainState m_chainstate;
+
+    Chain();                              // undefined
+
+    void CopyMembers(const Chain& src);
+    Arranger* ChooseArranger(double rnd);
+    void SwapTemperatures(Chain& other);
+    void SwapParameters(Chain& other);
+    void DestroyStick(const ForceSummary & forces);
+
+  public:
+
+    Chain(Random& rnd, RunReport& runrep, const ChainParameters& chainparam,
+          CollectionManager& collection, double temper);
+    Chain(const Chain& src);
+    ~Chain();
+    Chain& operator=(const Chain& src);
+
+    void SetChainType(long chtype, const ChainParameters& chparam);
+
+    void SetTemperature(double n)      { m_tempident.SetTemperature(n); };
+    void ClearSwaps() { m_tempident.ClearSwaps(); };
+    void ClearTotalSwaps() { m_tempident.ClearTotalSwaps(); };
+    void SwapTried() { m_tempident.SwapTried(); };
+    void SetChainStateOldTree(Tree *thistree) { m_chainstate.SetOldTree(thistree); };
+
+    double GetCurrentDataLlike();
+    double GetTemperature()       const { return m_tempident.GetTemperature(); };
+    double GetSwapRate()          const { return m_tempident.GetSwapRate(); };
+    double GetTotalSwapRate()     const { return m_tempident.GetTotalSwapRate(); };
+
+    void StartRegion(Tree* regiontree);
+    void EndRegion();
+
+    // must call StartRegion() before calling
+    void StartReplicate(const ForceSummary & forcesum, Region& region);
+    void EndReplicate()                {};
+
+    // must call SetChainType() before calling
+    void StartChain(long chnumber, long chtype, const ForceSummary & forces,
+                    const ForceParameters& starts);
+
+    // StartBayesianChain() does not change the parameters that the
+    // Chain currently has; this fixes pervasive problems with Divergence
+    // where trying to change parameters without chaning the tree corrupts
+    // the tree.
+    void StartBayesianChain(long chnumber, long chtype, const ForceSummary & forces);
+
+    void DoOneChain(long nsteps, bool lastchain);
+    ChainOut EndChain();
+
+    // for heating
+    bool SwapTemperatureIdentities(Chain& hot);
+    bool IsCold() const { return m_tempident.IsCold(); };
+
+    // for optimizing the data model (MixedKS)
+    Tree* GetTree() { return m_chainstate.GetTree(); };
+    void  RecalculateDataLikes();
+};
+
+#endif // CHAIN_H
+
+//____________________________________________________________________________________
diff --git a/src/tree/chainstate.cpp b/src/tree/chainstate.cpp
new file mode 100644
index 0000000..0c36ff7
--- /dev/null
+++ b/src/tree/chainstate.cpp
@@ -0,0 +1,238 @@
+// $Id: chainstate.cpp,v 1.20 2012/06/30 01:32:43 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+
+#include "chainstate.h"
+#include "tree.h"
+
+// WARNING:  This code assumes that no operation changes more than one
+// of the Tree, the Jointed Stick, and the Parameters at the same time!
+// If such changes can occur this scheme will have to be heavily revised!
+
+//------------------------------------------------------------------------------------
+
+ChainState::ChainState()
+    : m_tree(NULL),
+      m_oldtree(NULL),
+      m_parameters(global_region),
+      m_oldparameters(global_region),
+      //Since the ChainState works in region space, this will require that both
+      // of these member variables get overwritten before being used.  In other
+      // words, 'global_region' is wrong, and it's set to that on purpose
+      // because it's a flag that it's wrong.
+      unsampled_tree(true),
+      unsampled_parameters(true),
+      unsampled_stick(true),
+      unsampled_map(true),
+      unexported_parameters(true)
+{
+    // deliberately blank
+} // ctor
+
+//------------------------------------------------------------------------------------
+
+ChainState::ChainState(const ChainState& src)
+    : m_tree(src.m_tree), // yes, a shallow copy
+      m_parameters(src.m_parameters),
+      m_oldparameters(src.m_oldparameters),
+      unsampled_tree(src.unsampled_tree),
+      unsampled_parameters(src.unsampled_parameters),
+      unsampled_stick(src.unsampled_stick),
+      unsampled_map(src.unsampled_map),
+      unexported_parameters(src.unexported_parameters)
+{
+    if (m_tree) m_oldtree = src.m_tree->Clone();  // deep copy
+    else m_oldtree = NULL;
+} // copy ctor
+
+//------------------------------------------------------------------------------------
+
+ChainState::~ChainState()
+{
+    delete m_oldtree;
+} // dtor
+
+//------------------------------------------------------------------------------------
+
+ChainState& ChainState::operator=(const ChainState& src)
+{
+    m_tree = src.m_tree;  // yes, a shallow copy
+
+    delete m_oldtree;
+    if (m_tree) m_oldtree = src.m_tree->Clone();  // deep copy
+    else m_oldtree = NULL;
+
+    m_parameters = src.m_parameters;
+    m_oldparameters = src.m_oldparameters;
+    unsampled_tree = src.unsampled_tree;
+    unsampled_parameters = src.unsampled_parameters;
+    unsampled_stick = src.unsampled_stick;
+    unsampled_map = src.unsampled_map;
+    unexported_parameters = src.unexported_parameters;
+
+    return *this;
+} // operator=
+
+//------------------------------------------------------------------------------------
+
+void ChainState::SetTree(Tree* tree)
+// This routine sets m_tree and then makes m_oldtree a tips-and-stump-and-stick
+// copy of it
+{
+    assert(tree);   // We can't set things up with no tree!
+    m_tree = tree;
+    delete m_oldtree;
+    m_oldtree = m_tree->MakeStump();
+    m_oldtree->CopyTips(m_tree);
+    m_oldtree->CopyStick(m_tree);
+    TreeChanged();
+    if (m_tree->UsingStick()) StickChanged();
+    else unsampled_stick = false;
+
+} // SetTree
+
+//------------------------------------------------------------------------------------
+
+void ChainState::SetOldTree(Tree* tree)
+// This routine sets m_oldtree to be a full, valid copy of tree.
+// Added October 2004 by erynes for use by ChainManager.
+{
+    assert(tree);   // We can't set things up with no tree!
+    m_oldtree->CopyTips(tree);
+    m_oldtree->CopyBody(tree);
+    m_oldtree->CopyStick(tree);
+    // NB This is called only at the start of a chain; therefore, it does
+    // not need to mark the tree as modified.  Be careful if using it
+    // anywhere else!  --Mary
+} // SetOldTree
+
+//------------------------------------------------------------------------------------
+
+void ChainState::UpdateOldStickParams()
+{
+    m_oldtree->SetStickParams(m_parameters);
+} // UpdateOldStickParams
+
+//------------------------------------------------------------------------------------
+
+void ChainState::UpdateNewStickParams()
+{
+    m_tree->SetStickParams(m_parameters);
+} // UpdateNewStickParams
+
+//------------------------------------------------------------------------------------
+
+void ChainState::SimulateDataIfNeeded()
+{
+    assert(m_tree);   // We can't set things up with no tree!
+    if (m_tree->SimulateDataIfNeeded())
+    {
+        //This changes the tips, so:
+        m_oldtree->CopyTips(m_tree);
+        m_oldtree->CopyBody(m_tree);
+        // we assume the stick is still germane
+    }
+} // SimulateDataIfNeeded
+
+//------------------------------------------------------------------------------------
+
+void ChainState::SetParameters(const ForceParameters& params)
+{
+    m_parameters = params;
+    ParametersChanged();
+} // SetParameters
+
+//------------------------------------------------------------------------------------
+
+void ChainState::OverwriteOldTree()
+{
+    m_oldtree->CopyBody(m_tree);
+    m_oldtree->CopyStick(m_tree);
+} // OverwriteOldTree
+
+//------------------------------------------------------------------------------------
+
+void ChainState::OverwriteTree()
+{
+    m_tree->CopyBody(m_oldtree);
+    m_tree->CopyStick(m_oldtree);
+    TreeChanged();
+} // OverwriteTree
+
+//------------------------------------------------------------------------------------
+
+void ChainState::OverwriteParameters()
+{
+    m_parameters = m_oldparameters;
+} // OvewriteParameters
+
+//------------------------------------------------------------------------------------
+
+void ChainState::OverwriteOldParameters()
+{
+    m_oldparameters = m_parameters;
+} // OvewriteOldParameters
+
+//------------------------------------------------------------------------------------
+
+void ChainState::TreeChanged()
+{
+    unsampled_tree = true;
+    unsampled_map  = true;              // Only actually true if we're doing a floating
+                                        // analysis, but probably not worth checking.
+} // TreeChanged
+
+//------------------------------------------------------------------------------------
+
+void ChainState::ParametersChanged()
+{
+    unsampled_parameters = true;
+    unexported_parameters = true;
+} // ParametersChanged
+
+//------------------------------------------------------------------------------------
+
+void ChainState::StickChanged()
+{
+    unsampled_stick = true;
+} // StickChanged
+
+//------------------------------------------------------------------------------------
+
+void ChainState::MapChanged()
+{
+    unsampled_map = true;
+} // MapChanged
+
+//------------------------------------------------------------------------------------
+
+void ChainState::AllChanged()
+{
+    TreeChanged();
+    ParametersChanged();
+    if (m_tree->UsingStick())
+    {
+        StickChanged();
+    }
+    MapChanged();
+} // AllChanged
+
+//------------------------------------------------------------------------------------
+
+void ChainState::SwapParameters(ChainState& other)
+{
+    std::swap(m_parameters, other.m_parameters);
+    std::swap(m_oldparameters, other.m_oldparameters);
+    ParametersChanged();
+    other.ParametersChanged();
+} // SwapParameters
+
+//____________________________________________________________________________________
diff --git a/src/tree/chainstate.h b/src/tree/chainstate.h
new file mode 100644
index 0000000..ee0e2ec
--- /dev/null
+++ b/src/tree/chainstate.h
@@ -0,0 +1,102 @@
+// $Id: chainstate.h,v 1.13 2011/03/07 06:08:52 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+// ChainState encapsulates the parts of Chain's internal state that
+// need to be used by Arranger, Event, CollectionManager and Collector.
+// It is owned and managed by Chain.
+// This is a "leaf" class, and is not meant to be derived from.
+
+#ifndef CHAINSTATE_H
+#define CHAINSTATE_H
+
+#include <cassert>
+#include "vectorx.h"
+#include "forceparam.h"
+
+//------------------------------------------------------------------------------------
+
+class Tree;
+
+//------------------------------------------------------------------------------------
+
+class ChainState
+{
+  private:
+    Tree* m_tree;               // non-owning pointer to regional tree
+    // (but every chainstate points to a distinct tree!)
+    Tree* m_oldtree;            // owning pointer to a copy of regional tree
+    ForceParameters m_parameters;
+    ForceParameters m_oldparameters;
+
+    bool unsampled_tree;        // tree has changed since last sampling
+    bool unsampled_parameters;  // parameters have changed since last sampling
+    bool unsampled_stick;       // stick has changed since last sampling
+    bool unsampled_map;         // map position or tree changed since last sampling
+
+    bool unexported_parameters; // parameters have changed since last
+    // arranger update
+
+  public:
+    ChainState();
+    ~ChainState();
+    ChainState(const ChainState& src);
+    ChainState& operator=(const ChainState& src);
+
+    // Setters
+    void SetTree(Tree* tree);   // sets m_tree and makes m_oldtree a partial copy of it
+    void SetParameters(const ForceParameters& params);
+    void OverwriteTree();       // makes m_tree a full copy of m_oldtree
+    void OverwriteOldTree();    // makes m_oldtree a full copy of m_tree
+    void OverwriteParameters();
+    void OverwriteOldParameters();
+    void SetOldTree(Tree* tree);
+    void UpdateOldStickParams();
+    void UpdateNewStickParams();
+
+    void SimulateDataIfNeeded();
+
+    // The following are called to record modification to an object
+    // The first three are used by Arranger, the last by Chain
+    void TreeChanged();
+    void ParametersChanged();
+    void StickChanged();
+    void MapChanged();
+    void AllChanged();   // used at start of a new chain
+
+    // The following are called to record sampling of an object
+    void TreeSampled()          { unsampled_tree = false; };
+    void ParametersSampled()    { unsampled_parameters = false; };
+    void StickSampled()         { unsampled_stick = false; };
+    void MapSampled()           { unsampled_map = false; };
+    void ParametersExported()   { unexported_parameters = false; };
+
+    // Getters; used by Chain and Arrangers
+    Tree*       GetTree()                  { assert(m_tree); return m_tree; };
+    Tree*       GetOldTree()               { assert(m_oldtree); return m_oldtree; };
+    ForceParameters& GetParameters()       { return m_parameters; };
+    ForceParameters& GetOldParameters()    { return m_oldparameters; };
+
+    // The following test if an object has been modified since it was last sampled
+    // Used by Collectors
+    bool TreeNeedsSampling()      const      { return unsampled_tree; };
+    bool ParametersNeedSampling() const      { return unsampled_parameters; };
+    bool StickNeedsSampling()     const      { return unsampled_stick; };
+    bool MapNeedsSampling()       const      { return unsampled_map; };
+    bool ParametersNeedExport()   const      { return unexported_parameters; };
+
+    // Swapping routine for heated Bayesian runs
+    void SwapParameters(ChainState& other);
+};
+
+//------------------------------------------------------------------------------------
+
+#endif  // CHAINSTATE_H
+
+//____________________________________________________________________________________
diff --git a/src/tree/collector.cpp b/src/tree/collector.cpp
new file mode 100644
index 0000000..c132ce2
--- /dev/null
+++ b/src/tree/collector.cpp
@@ -0,0 +1,467 @@
+// $Id: collector.cpp,v 1.32 2012/06/30 01:32:43 bobgian Exp $
+
+/*
+  Copyright 2003 Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+#include <iostream>
+#include <fstream>
+
+#include "local_build.h"
+
+#include "arranger.h"
+#include "arrangervec.h"
+#include "collector.h"
+#include "registry.h"
+#include "region.h"
+#include "sumfilehandler.h"
+#include "treesum.h"
+#include "xmlsum_strings.h"
+
+using namespace std;
+
+//------------------------------------------------------------------------------------
+
+Collector::Collector()
+    : m_areWriting(false), m_sumout()
+{
+    // intentionally blank
+}
+
+//------------------------------------------------------------------------------------
+
+void Collector::WriteCollectionWhenScore(ofstream* out)
+{
+    m_areWriting = true;
+    m_sumout = out;
+}
+
+//------------------------------------------------------------------------------------
+
+TreeCollector::TreeCollector()
+    : forceParameters(unknown_region),
+      m_findbesttree(true /* false */), // JDEBUG--set from menu/xml/compileflag?
+      m_besttreedatallike(NEGMAX)
+{
+    // intentionally blank
+}
+
+//------------------------------------------------------------------------------------
+
+TreeCollector::~TreeCollector()
+{
+    vector<TreeSummary*>::iterator treeiter;
+    for(treeiter = treeSummaries.begin(); treeiter != treeSummaries.end(); treeiter++)
+    {
+        delete(*treeiter);
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+void TreeCollector::Score(ChainState& chstate)
+{
+    if (chstate.TreeNeedsSampling() || chstate.StickNeedsSampling())
+    {
+        // a novel tree; summarize it
+        if (m_areWriting)
+        {
+            WriteLastSummary();
+            // Write the last tree summary.  Note that this will fail to write
+            //  the one we're collecting--do that later.
+        }
+        treeSummaries.push_back(chstate.GetTree()->SummarizeTree());
+    }
+    else
+    {
+        // a previously seen tree; increment its count
+        treeSummaries.back()->AddCopy();
+    }
+    chstate.TreeSampled();
+
+} // Score
+
+//------------------------------------------------------------------------------------
+
+void TreeCollector::WriteLastSummary()
+{
+    if (m_areWriting && treeSummaries.size())
+    {
+        treeSummaries[treeSummaries.size()-1]->WriteTreeSummary(*m_sumout);
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+void TreeCollector::WriteAllSummaries(ofstream& out)
+{
+    for (unsigned long index=0; index<treeSummaries.size(); index++)
+    {
+        treeSummaries[index]->WriteTreeSummary(out);
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+void TreeCollector::CorrectForFatalAttraction(long region)
+{
+    vector<TreeSummary*>::iterator treesum = treeSummaries.begin();
+    map<force_type, DoubleVec1d> totals;
+    map<force_type, DoubleVec1d>::iterator totit;
+    map<force_type, DoubleVec1d>::iterator newit;
+
+    if (treeSummaries.empty()) return;  // nothing to do--possible in the
+    // case of a Bayesian run
+
+    // accumulate, over all treesummaries, total events for each force
+    totals = (*treesum)->InspectSummary();   // first summary establishes number of params
+    ++treesum;
+
+    for (; treesum != treeSummaries.end(); ++treesum)
+    {
+        map<force_type, DoubleVec1d> newtot = (*treesum)->InspectSummary();
+        assert(totals.size() == newtot.size());  // different number of forces?!
+        for (totit = totals.begin(), newit = newtot.begin();
+             totit != totals.end();
+             ++totit, ++newit)
+        {
+            assert(totit->second.size() == newit->second.size());  // different n. of parameters?!
+            // this line assumes that forces are in same order everywhere!
+            transform(totit->second.begin(), totit->second.end(),
+                      newit->second.begin(),
+                      totit->second.begin(),
+                      plus<double>());
+        }
+    }
+
+    // We adjust the bin counts in the first tree summary.  This is
+    // an arbitrary choice; as far as I know any tree summary would do.
+    // The first tree might be atypical anyway, and therefore messing it
+    // up is relatively harmless.
+
+    // This call will do nothing if all parameters have at least one
+    // event, but that condition cannot be detected here because of
+    // legitimate zero entries on the diagonal for some forces.
+
+#ifndef LAMARC_QA_SINGLE_DENOVOS
+    treeSummaries.front()->AdjustSummary(totals, region);
+#endif // LAMARC_QA_SINGLE_DENOVOS
+
+} // CorrectForFatalAttraction
+
+//------------------------------------------------------------------------------------
+
+void TreeCollector::AddTreeSummary(TreeSummary* ts)
+{
+    assert(ts);  // bad tree summary!
+    treeSummaries.push_back(ts);
+
+} // AddTreeSummary
+
+//------------------------------------------------------------------------------------
+
+long TreeCollector::TreeCount() const
+{
+    long ntrees(0L);
+
+    vector<TreeSummary*>::const_iterator treesum = treeSummaries.begin();
+    for (; treesum != treeSummaries.end(); ++treesum)
+        ntrees += (*treesum)->GetNCopies();
+
+    return ntrees;
+} // TreeCount
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+void ParamCollector::Score(ChainState& chstate)
+{
+    if (chstate.ParametersNeedSampling())
+    {
+        // a novel set of parameters; summarize them
+        if (m_areWriting)
+        {
+            WriteLastSummary();
+        }
+        ForceParameters& fp = chstate.GetParameters();
+        AddParamSummary(fp, 1L);
+    }
+    else
+    {
+        // a previously seen set; increment their count
+        pair<ForceParameters, long>& previous = m_paramsum.back();
+        ++previous.second;
+    }
+    chstate.ParametersSampled();
+
+} // Score
+
+//------------------------------------------------------------------------------------
+
+void ParamCollector::AddParamSummary(ForceParameters fp, long ncopy)
+{
+    m_paramsum.push_back(make_pair<ForceParameters, long>(fp, ncopy));
+}
+
+//------------------------------------------------------------------------------------
+
+void ParamCollector::WriteLastSummary()
+{
+    long index = static_cast<long>(m_paramsum.size())-1;
+    if (index >= 0)
+    {
+        WriteParamSumm(index);
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+void ParamCollector::WriteAllSummaries(ofstream& out)
+{
+    m_sumout = &out;
+    m_areWriting = true;
+    for (unsigned long index = 0; index<m_paramsum.size(); index++)
+    {
+        WriteParamSumm(index);
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+void ParamCollector::WriteParamSumm(unsigned long index)
+{
+    assert (m_paramsum.size() > index);
+    assert (m_areWriting);
+
+    ForceParameters fp = m_paramsum[index].first;
+    long ncopy         = m_paramsum[index].second;
+    if ( m_sumout->is_open() )
+    {
+        *m_sumout << "\t" << xmlsum::PARAM_SUMMARY_START << endl;
+        *m_sumout << "\t\t" << xmlsum::NCOPY_START << " " << ncopy
+                  << " "    << xmlsum::NCOPY_END   << endl;
+        fp.WriteForceParameters(*m_sumout, 2);
+        *m_sumout << "\t" << xmlsum::PARAM_SUMMARY_END << endl;
+    }
+    else
+        SumFileHandler::HandleSumOutFileFormatError("WriteParamSumm");
+}
+
+//------------------------------------------------------------------------------------
+
+void ParamCollector::UpdateArrangers(ArrangerVec& arrangers, ChainState& chstate) const
+{
+    // If the parameters have changed, inform the Arrangers
+    if (chstate.ParametersNeedExport())
+    {
+        arrangerit arr = arrangers.begin();
+        for ( ; arr != arrangers.end(); ++arr)
+        {
+            (*arr).second->SetParameters(chstate);
+        }
+        chstate.UpdateOldStickParams();
+        chstate.UpdateNewStickParams();
+        chstate.ParametersExported();
+    }
+} // UpdateArrangers
+
+//------------------------------------------------------------------------------------
+
+const DoubleVec1d ParamCollector::GetLastParameterVec() const
+{
+    assert (m_paramsum.size() > 0);
+    return m_paramsum[m_paramsum.size()-1].first.GetGlobalParameters();
+}
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+MapCollector::MapCollector(long region)
+    : Collector(),
+      m_region(region),
+      m_nsamples(registry.GetChainParameters().GetNSamples(defaults::final)),
+      m_mapsum(),
+      m_lastmapsum(),
+      m_lastcount(0)
+{
+    DoubleVec1d zeroes(registry.GetDataPack().GetRegion(region).GetNumSites(), 0.0);
+    DoubleVec2d mapsum(registry.GetDataPack().GetNumMovingLoci(region), zeroes);
+    m_mapsum = mapsum;
+}
+
+//------------------------------------------------------------------------------------
+
+bool MapCollector::DoWeCollectFor(bool lastchain)
+{
+    if (m_mapsum.size() == 0)
+    {
+        return false;
+    }
+    if ((registry.GetDataPack().GetRegion(m_region).AnyJumpingAnalyses()) || (lastchain))
+    {
+        return true;
+    }
+    return false;
+}
+
+//------------------------------------------------------------------------------------
+
+void MapCollector::Score(ChainState& chstate)
+{
+    if (chstate.MapNeedsSampling())
+    {
+        // a novel tree; summarize it
+        if (m_areWriting)
+        {
+            WriteLastSummary();
+            // Write the last tree summary.  Note that this will fail to write
+            //  the one we're collecting--do that later.
+        }
+        AccumulateMap();
+        m_lastmapsum = dynamic_cast<RecTree*>(chstate.GetTree())->GetMapSummary();
+        m_lastcount = 1;
+    }
+    else
+    {
+        // a previously seen tree; increment its count
+        m_lastcount++;
+    }
+    chstate.MapSampled();
+
+} // ScoreMap
+
+//------------------------------------------------------------------------------------
+
+void MapCollector::AddMapSummary(DoubleVec2d& maplikes, long ncopy)
+{
+    AccumulateMap();
+    m_lastmapsum = maplikes;
+    m_lastcount = ncopy;
+}
+
+//------------------------------------------------------------------------------------
+
+void MapCollector::WriteLastSummary()
+{
+    if (m_lastmapsum.size() == 0) return;
+    if ( m_sumout->is_open() )
+    {
+        *m_sumout << "\t" << xmlsum::MAP_SUMMARIES_START << endl;
+        *m_sumout << "\t\t" << xmlsum::NCOPY_START << " " << m_lastcount
+                  << " "    << xmlsum::NCOPY_END   << endl;
+        for (unsigned long summary=0; summary < m_lastmapsum.size(); summary++)
+        {
+            *m_sumout << "\t\t" << xmlsum::MAP_SUMMARY_START;
+            rangeset range = registry.GetDataPack().GetRegion(m_region).GetMovingLocus(summary).GetAllowedRange();
+            WriteVec1D(*m_sumout, m_lastmapsum[summary], range);
+            *m_sumout << " " << xmlsum::MAP_SUMMARY_END << endl;
+        }
+        *m_sumout << "\t" << xmlsum::MAP_SUMMARIES_END << endl;
+    }
+    else
+        SumFileHandler::HandleSumOutFileFormatError("WriteLastSummary");
+}
+
+//------------------------------------------------------------------------------------
+
+void MapCollector::WriteAllSummaries(ofstream& out)
+{
+    AccumulateMap();
+    if ( out.is_open() )
+    {
+        out << "\t" << xmlsum::MAP_SUMMARIES_START << endl;
+        out << "\t\t" << xmlsum::NCOPY_START << " " << m_nsamples
+            << " "    << xmlsum::NCOPY_END   << endl;
+        for (unsigned long mlocus=0; mlocus < m_mapsum.size(); mlocus++)
+        {
+            out << "\t\t" << xmlsum::MAP_SUMMARY_START;
+            const Locus& mloc = registry.GetDataPack().GetRegion(m_region).GetMovingLocus(mlocus);
+            rangeset range = mloc.GetAllowedRange();
+            if (mloc.GetAnalysisType() == mloc_mapfloat)
+            {
+                DoubleVec1d logs = SafeLog(m_mapsum[mlocus]);
+                WriteVec1D(out, logs, range);
+            }
+            else
+            {
+                WriteVec1D(out, m_mapsum[mlocus], range);
+            }
+            out << " " << xmlsum::MAP_SUMMARY_END << endl;
+        }
+        out << "\t" << xmlsum::MAP_SUMMARIES_END << endl;
+    }
+    else
+        SumFileHandler::HandleSumOutFileFormatError("WriteAllSummaries");
+}
+
+//------------------------------------------------------------------------------------
+
+DoubleVec2d MapCollector::GetMapSummary()
+{
+    AccumulateMap();
+    return m_mapsum;
+}
+
+//------------------------------------------------------------------------------------
+
+void MapCollector::AccumulateMap()
+{
+    if (m_lastmapsum.size() == 0)
+    {
+        //Nothing to accumulate
+        return;
+    }
+    for (unsigned long locus=0; locus<m_lastmapsum.size(); locus++)
+    {
+        DoubleVec1d reallikes = m_lastmapsum[locus];
+        if (registry.GetDataPack().GetRegion(m_region).GetMovingLocus(locus).GetAnalysisType() == mloc_mapfloat)
+        {
+            reallikes = SafeExp(m_lastmapsum[locus]);
+        }
+        //Multiply m_lastmapsum by m_lastcount/m_nsamples
+        double weight = static_cast<double>(m_lastcount) / static_cast<double>(m_nsamples);
+        transform(reallikes.begin(),
+                  reallikes.end(),
+                  reallikes.begin(),
+                  bind2nd(multiplies<double>(),weight));
+        assert(reallikes.size() == m_mapsum[locus].size());
+
+        transform(reallikes.begin(),
+                  reallikes.end(),
+                  m_mapsum[locus].begin(),
+                  m_mapsum[locus].begin(),
+                  plus<double>());
+    }
+    m_lastmapsum.clear();
+    m_lastcount = 0;
+}
+
+//------------------------------------------------------------------------------------
+
+void MapCollector::WriteVec1D(ofstream& sumout, DoubleVec1d& vec, rangeset range)
+{
+    if (sumout.is_open())
+    {
+        StringVec1d strings(vec.size(), "-");
+        for(rangeset::iterator currRange=range.begin(); currRange != range.end(); currRange++)
+        {
+            for (long site = currRange->first; site < currRange->second; ++site)
+            {
+                strings[site] = ToString(vec[site]);
+            }
+        }
+        for (StringVec1d::iterator num = strings.begin(); num != strings.end(); ++num)
+        {
+            sumout << *num << " ";
+        }
+    }
+    else
+        SumFileHandler::HandleSumOutFileFormatError("WriteVec1D");
+} // WriteVec1D
+
+//____________________________________________________________________________________
diff --git a/src/tree/collector.h b/src/tree/collector.h
new file mode 100644
index 0000000..5fb80fa
--- /dev/null
+++ b/src/tree/collector.h
@@ -0,0 +1,138 @@
+// $Id: collector.h,v 1.20 2012/06/30 01:32:43 bobgian Exp $
+
+/*
+  Copyright 2003 Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+// Collector classes store information from the ongoing chain for
+// later analysis.  They are related via a base class for convenience
+// in initialization, but mostly used via pointer-to-derived because
+// they summarize such different information.
+
+// Mary 2003/12/17
+
+#ifndef COLLECTOR_H
+#define COLLECTOR_H
+
+#include <fstream>
+#include <utility>                      // for pair
+#include <vector>
+#include "chainstate.h"
+#include "tree.h"
+#include "vectorx.h"
+#include "mathx.h"
+
+//------------------------------------------------------------------------------------
+
+typedef std::vector<std::pair<ForceParameters,long> > ParamSumm;
+typedef std::vector<std::pair<std::vector<double>,long> > StickSumm;
+
+class ArrangerVec;
+
+//------------------------------------------------------------------------------------
+// Abstract base class for all Collectors
+//------------------------------------------------------------------------------------
+
+class Collector
+{
+  public:
+    Collector();
+    virtual ~Collector() {};
+    virtual void Score(ChainState& chstate) = 0;
+    virtual void CorrectForFatalAttraction(long region) { };
+    virtual void UpdateArrangers(ArrangerVec& arrangers, ChainState& chstate) const { };
+    virtual void WriteCollectionWhenScore(std::ofstream* out);
+    virtual void WriteLastSummary() = 0;
+    virtual void WriteAllSummaries(std::ofstream& out) = 0;
+
+  private:
+    // This class is not meant to be copied
+    Collector(const Collector&);  // not defined
+    Collector& operator=(const Collector&);  // not defined
+
+  protected:
+    bool m_areWriting;
+    std::ofstream *m_sumout; //non-owning pointer
+};
+
+//------------------------------------------------------------------------------------
+// Tree summary collector, also includes jointed stick (if any)
+//------------------------------------------------------------------------------------
+
+class TreeCollector : public Collector
+{
+  public:
+    TreeCollector(); //Can't be default because of ForceParameters object.
+    virtual ~TreeCollector();
+    // these two are used directly by postlike routines
+    vector<TreeSummary*> treeSummaries;  // public for speed of access
+    ForceParameters forceParameters;     // public for speed of access
+
+    virtual void Score(ChainState& chstate);
+    virtual void WriteLastSummary();
+    virtual void WriteAllSummaries(std::ofstream& out);
+    virtual void CorrectForFatalAttraction(long region);
+    virtual void AddTreeSummary(TreeSummary* ts);
+    void SetStartParameters(const ForceParameters& src)
+    { forceParameters = src; };
+    long TreeCount() const;
+
+  private:
+    bool m_findbesttree;
+    double m_besttreedatallike;
+};
+
+//------------------------------------------------------------------------------------
+// Parameter summary collectors
+//------------------------------------------------------------------------------------
+
+class ParamCollector : public Collector
+{
+  public:
+    virtual void Score(ChainState& chstate);
+    virtual void AddParamSummary(ForceParameters fp, long ncopy);
+    virtual void WriteLastSummary();
+    virtual void WriteAllSummaries(std::ofstream& out);
+    virtual void UpdateArrangers(ArrangerVec& arrangers, ChainState& chstate) const;
+    const ParamSumm& GetParamSumm() const { return m_paramsum; }
+    const DoubleVec1d GetLastParameterVec() const;
+
+  private:
+    ParamSumm m_paramsum;
+    virtual void WriteParamSumm(unsigned long index);
+};
+
+//------------------------------------------------------------------------------------
+// Map summary collector
+//------------------------------------------------------------------------------------
+
+class MapCollector : public Collector
+{
+  public:
+    MapCollector(long region);
+    virtual bool DoWeCollectFor(bool lastchain);
+    virtual void Score(ChainState& chstate);
+    virtual void AddMapSummary(DoubleVec2d& maplikes, long ncopy);
+    virtual void WriteLastSummary();
+    virtual void WriteAllSummaries(std::ofstream& out);
+    virtual DoubleVec2d GetMapSummary();
+
+  private:
+    MapCollector(); //undefined
+    long m_region;
+    long m_nsamples;
+    DoubleVec2d m_mapsum;     //Never logs!
+    DoubleVec2d m_lastmapsum; //Logs if we're 'floating', 0's and a 1 if 'jumping'
+    long m_lastcount;
+
+    virtual void WriteVec1D(std::ofstream& sumout, DoubleVec1d& vec, rangeset range);
+    virtual void AccumulateMap();
+};
+
+#endif // COLLECTOR_H
+
+//____________________________________________________________________________________
diff --git a/src/tree/collmanager.cpp b/src/tree/collmanager.cpp
new file mode 100644
index 0000000..4154224
--- /dev/null
+++ b/src/tree/collmanager.cpp
@@ -0,0 +1,451 @@
+// $Id: collmanager.cpp,v 1.35 2012/07/05 04:31:23 bobgian Exp $
+
+/*
+  Copyright 2003 Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+#include <iostream>
+
+#include "local_build.h"
+
+#include "collmanager.h"
+#include "registry.h"
+#include "stringx.h"
+#include "newick.h"
+#include "parameter.h"
+
+#include "tinyxml.h"
+
+using namespace std;
+
+//------------------------------------------------------------------------------------
+
+CollectionManager::CollectionManager(long numreg, long numrep)
+    : m_region(0),
+      m_replicate(0)
+{
+    //This used to be the EstablishSamplingStrategy function, but it works
+    // semantically better here --LS
+
+    // This function implements a very simple rule for which aspects
+    // of the chain to sample.  It will need to get more complex
+    // eventually.
+    ChainParameters& chainparam = registry.GetChainParameters();
+
+    bool bayesian = chainparam.IsBayesian();
+
+    if (bayesian)
+    {
+        m_sample_params = true;
+#ifdef STATIONARIES
+        m_sample_trees = true;
+#else // STATIONARIES
+        m_sample_trees = false;
+#endif // STATIONARIES
+    }
+    else
+    {
+        m_sample_params = false;
+        m_sample_trees = true;
+    }
+
+    m_sample_maps = false;    //We'll set this to true in StartChain if needed.
+
+    // set up internal storage
+    long reg, rep;
+    vector<TreeCollector*> temptree;
+    vector<ParamCollector*> tempparam;
+    vector<MapCollector*> tempmap;
+    for (reg = 0; reg < numreg; ++reg)
+    {
+        for (rep = 0; rep < numrep; ++rep)
+        {
+            temptree.push_back(new TreeCollector);
+            tempparam.push_back(new ParamCollector);
+            tempmap.push_back(new MapCollector(reg));
+            //Note:  These usually get destroyed by StartChain(), but not in the
+            // case of reading in from a summary file (ChainManager::ReadInSumFile)
+        }
+        m_treecoll.push_back(temptree);
+        m_paramcoll.push_back(tempparam);
+        m_mapcoll.push_back(tempmap);
+        temptree.clear();
+        tempparam.clear();
+        tempmap.clear();
+    }
+
+} // CollectionManager ctor
+
+//------------------------------------------------------------------------------------
+
+CollectionManager::~CollectionManager()
+{
+    // Clean up the Collectors
+    unsigned long reg, rep;
+    for (reg = 0; reg < m_treecoll.size(); ++reg)
+    {
+        for (rep = 0; rep < m_treecoll[reg].size(); ++rep)
+        {
+            delete m_treecoll[reg][rep];
+            delete m_paramcoll[reg][rep];
+            delete m_mapcoll[reg][rep];
+        }
+    }
+} // CollectionManager dtor
+
+//------------------------------------------------------------------------------------
+
+void CollectionManager::Collect(ChainState& chstate, long initialOrFinal, bool lastchain)
+{
+    // stick sampling is automatically done as part of tree sampling
+    if (m_sample_trees) m_treecoll[m_region][m_replicate]->Score(chstate);
+    if (m_sample_params) m_paramcoll[m_region][m_replicate]->Score(chstate);
+    if (m_sample_maps) m_mapcoll[m_region][m_replicate]->Score(chstate);
+    chstate.ParametersSampled();             // Might as well make it consistent.
+    WriteTraceFile(chstate, initialOrFinal); // EWFIX.CHAINTYPE
+    WriteNewickTreeFile(chstate);
+    WriteReclocsFile(chstate, lastchain);
+#ifdef LAMARC_QA_TREE_DUMP
+    WriteArgFile(chstate,lastchain);
+#endif
+} // Collect
+
+//------------------------------------------------------------------------------------
+
+void CollectionManager::WriteTraceFile(ChainState& chstate, long initialOrFinal)
+{
+    //Note:  The column headers for this file are written in
+    // UserParameters::UpdateFileNamesAndSteps(...) in userparam.cpp
+    UserParameters& userparams = registry.GetUserParameters();
+    if (userparams.GetWriteTraceFiles())
+    {
+        string tracefilename = userparams.GetCurrentTraceFileName();
+        long stepnumber(userparams.GetNextStep(initialOrFinal));  // EWFIX.CHAINTYPE
+        DoubleVec1d values;
+        values.push_back(chstate.GetTree()->GetDLValue());
+        if (registry.GetChainParameters().IsBayesian())
+        {
+            DoubleVec1d params = m_paramcoll[m_region][m_replicate]->GetLastParameterVec();
+            const ParamVector pvec(true);
+            assert(pvec.size() == params.size());
+            for (size_t p=0; p<pvec.size(); p++)
+            {
+                if (pvec[p].IsValidParameter())
+                {
+                    values.push_back(params[p]);
+                }
+            }
+        }
+        ofstream of;
+        of.open(tracefilename.c_str(),ios::out | ios::app);
+        of << ToString(stepnumber);
+        for (size_t i=0; i<values.size(); i++)
+        {
+            of << "\t" << ToString(values[i]);
+        }
+        of << endl;
+        of.close();
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+void CollectionManager::WriteNewickTreeFile(ChainState& chstate)
+{
+    UserParameters& userparams = registry.GetUserParameters();
+    if (userparams.GetWriteNewickTreeFiles())
+    {
+        const Tree* tree = chstate.GetTree();
+        double dl = tree->GetDLValue();
+        double bestlike = userparams.GetCurrentBestLike();
+        string newickfilename = userparams.GetCurrentNewickTreeFileName();
+        if (dl > bestlike)
+        {
+            userparams.SetCurrentBestLike(dl);
+            NewickConverter nc;
+            string newick = nc.LamarcTreeToNewickString(*tree);
+            ofstream of;
+            of.open(newickfilename.c_str(),ios::out | ios::trunc);
+            of << dl << endl;
+            of << newick << endl;
+            of.close();
+            userparams.AddNewickTreeFileName(newickfilename);
+        }
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+void CollectionManager::WriteReclocsFile(ChainState& chstate, bool lastchain)
+{
+    if(lastchain)
+    {
+        if (registry.GetForceSummary().CheckForce(force_REC) && registry.GetUserParameters().GetWriteReclocFiles())
+        {
+            const Tree* tree = chstate.GetTree();
+
+            UserParameters& userparams = registry.GetUserParameters();
+            string reclocfilename = userparams.GetCurrentReclocFileName();
+            ofstream of;
+            of.open(reclocfilename.c_str(),ios::out | ios::app);
+
+            long offset = registry.GetCurrentReclocOffset();
+            bool mustConvert = registry.GetConvertOutputToEliminateZeroes();
+
+            const TimeList & timeList = tree->GetTimeList();
+            Branchconstiter brit;
+            std::set<long> recs;
+            for (brit = timeList.FirstBody(); brit != timeList.EndBranch(); brit = timeList.NextBody(brit))
+            {
+                Branch_ptr pBranch = *brit;
+                if (pBranch->Event() == btypeRec)
+                {
+                    long site = boost::dynamic_pointer_cast<RBranch>(*brit)->GetRecpoint();
+                    recs.insert(site);
+                }
+            }
+
+            std::set<long>::iterator it;
+            for(it=recs.begin(); it != recs.end(); it++)
+            {
+                long newindex = *it + offset;
+                if (mustConvert && (newindex <= 0))
+                {
+                    newindex = newindex - 1;
+                }
+
+                of << newindex << endl;
+            }
+
+            of.close();
+        }
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+#ifdef LAMARC_QA_TREE_DUMP
+
+FILE* TiXmlFOpen( const char* filename, const char* mode );  // pick this up from tinyxml
+
+void CollectionManager::WriteArgFile(ChainState& chstate, bool lastchain)
+{
+    if(lastchain)
+    {
+        if (registry.GetUserParameters().GetWriteArgFiles())
+        {
+            const Tree* tree = chstate.GetTree();
+            const TimeList & tl = tree->GetTimeList();
+
+            TiXmlDocument * docP = tl.AssembleGraphML();
+
+            UserParameters& userparams = registry.GetUserParameters();
+            string argfilename = userparams.GetCurrentArgFileName();
+            bool writeMany = userparams.GetWriteManyArgs();
+            if (!writeMany)
+            {
+                ofstream of;
+                of.open(argfilename.c_str(),ios::trunc);
+                of.close();
+            }
+
+            FILE * argOutput = TiXmlFOpen(argfilename.c_str(),"a");
+
+            docP->Print(argOutput,0);
+            fclose(argOutput);
+
+            // EWFIX.TREEDUMP -- clean up pointer storage
+        }
+    }
+}
+
+#endif
+
+//------------------------------------------------------------------------------------
+
+void CollectionManager::CorrectForFatalAttraction(long region)
+{
+    if (m_sample_trees)
+        m_treecoll[m_region][m_replicate]->CorrectForFatalAttraction(region);
+    if (m_sample_params)
+        m_paramcoll[m_region][m_replicate]->CorrectForFatalAttraction(region);
+    if (m_sample_maps)
+        m_mapcoll[m_region][m_replicate]->CorrectForFatalAttraction(region);
+} // CorrectForFatalAttraction
+
+//------------------------------------------------------------------------------------
+
+void CollectionManager::StartChain(long reg, long rep, bool lastchain)
+{
+    //LS Note:  If you add code in this function, make sure that this doesn't
+    // break reading in partially-written summary files.
+    // See ChainManager::ReadInSumFile and ChainManager::ReadInRecover.
+
+    // this is another chain for the same region and replicate.
+    // We store only one chain per region/replicate pair, so throw
+    // away the old one and store this one.
+    delete m_treecoll[reg][rep];
+    delete m_paramcoll[reg][rep];
+    delete m_mapcoll[reg][rep];
+
+    m_region = reg;
+    m_replicate = rep;
+
+    m_treecoll[reg][rep]  = new TreeCollector;
+    m_paramcoll[reg][rep] = new ParamCollector;
+    m_mapcoll[reg][rep] = new MapCollector(reg);
+
+    m_sample_maps = m_mapcoll[reg][rep]->DoWeCollectFor(lastchain);
+} // StartChain
+
+//------------------------------------------------------------------------------------
+
+void CollectionManager::WriteThisChainsCollections(ofstream* out) const
+{
+    if (m_sample_trees)
+        m_treecoll[m_region][m_replicate]->WriteCollectionWhenScore(out);
+    if (m_sample_params)
+        m_paramcoll[m_region][m_replicate]->WriteCollectionWhenScore(out);
+    if (m_sample_maps)
+        m_mapcoll[m_region][m_replicate]->WriteCollectionWhenScore(out);
+}
+
+//------------------------------------------------------------------------------------
+
+void CollectionManager::WriteLastSummaries() const
+{
+    if (m_sample_trees)
+        m_treecoll[m_region][m_replicate]->WriteLastSummary();
+    if (m_sample_params)
+        m_paramcoll[m_region][m_replicate]->WriteLastSummary();
+    if (m_sample_maps)
+        m_mapcoll[m_region][m_replicate]->WriteLastSummary();
+}
+
+//------------------------------------------------------------------------------------
+
+void CollectionManager::WriteAllSummaries(long reg, long rep, ofstream& out) const
+{
+    if (m_sample_trees)
+        m_treecoll[reg][rep]->WriteAllSummaries(out);
+    if (m_sample_params)
+        m_paramcoll[reg][rep]->WriteAllSummaries(out);
+    if (m_sample_maps)
+        m_mapcoll[reg][rep]->WriteAllSummaries(out);
+}
+
+//------------------------------------------------------------------------------------
+
+TreeCollector* CollectionManager::GetTreeColl(long region, long rep) const
+{
+    // are we trying to retrieve a chain we didn't expect?  That's bad.
+    assert(region < static_cast<long>(m_treecoll.size()));
+    assert(rep < static_cast<long>(m_treecoll[region].size()));
+    assert(m_sample_trees);
+
+    return m_treecoll[region][rep];
+} // GetTreeColl
+
+//------------------------------------------------------------------------------------
+
+vector<TreeCollector*> CollectionManager::GetTreeColl(long region) const
+{
+    // are we trying to retrieve a chain we didn't expect?  That's bad.
+    assert(region < static_cast<long>(m_treecoll.size()));
+    assert(m_sample_trees);
+
+    return m_treecoll[region];
+} // GetTreeColl
+
+//------------------------------------------------------------------------------------
+
+vector<vector<TreeCollector*> > CollectionManager::GetTreeColl() const
+{
+    // are we trying to retrieve a chain we didn't expect?  That's bad.
+    assert(m_sample_trees);
+
+    return m_treecoll;
+} // GetTreeColl
+
+//------------------------------------------------------------------------------------
+
+ParamCollector* CollectionManager::GetParamColl(long region, long rep) const
+{
+    // are we trying to retrieve a chain we didn't expect?  That's bad.
+    assert(region < static_cast<long>(m_paramcoll.size()));
+    assert(rep < static_cast<long>(m_paramcoll[region].size()));
+    assert(m_sample_params);
+
+    return m_paramcoll[region][rep];
+} // GetParamColl
+
+//------------------------------------------------------------------------------------
+
+vector<ParamCollector*> CollectionManager::GetParamColl(long region) const
+{
+    // are we trying to retrieve a chain we didn't expect?  That's bad.
+    assert(region < static_cast<long>(m_paramcoll.size()));
+    assert(m_sample_params);
+
+    return m_paramcoll[region];
+} // GetParamColl
+
+//------------------------------------------------------------------------------------
+
+vector<vector<ParamCollector*> > CollectionManager::GetParamColl() const
+{
+    // are we trying to retrieve a chain we didn't expect?  That's bad.
+    assert(m_sample_params);
+
+    return m_paramcoll;
+
+} // GetParamColl
+
+//------------------------------------------------------------------------------------
+
+MapCollector* CollectionManager::GetMapColl(long region, long rep) const
+{
+    // are we trying to retrieve a chain we didn't expect?  That's bad.
+    assert(region < static_cast<long>(m_mapcoll.size()));
+    assert(rep < static_cast<long>(m_mapcoll[region].size()));
+    assert(m_mapcoll[region][rep]->DoWeCollectFor(true));
+
+    return m_mapcoll[region][rep];
+} // GetMapColl
+
+//------------------------------------------------------------------------------------
+
+vector<MapCollector*> CollectionManager::GetMapColl(long region) const
+{
+    // are we trying to retrieve a chain we didn't expect?  That's bad.
+    assert(region < static_cast<long>(m_mapcoll.size()));
+    assert(m_mapcoll[region][0]->DoWeCollectFor(true));
+
+    return m_mapcoll[region];
+} // GetMapColl
+
+//------------------------------------------------------------------------------------
+
+vector<vector<MapCollector*> > CollectionManager::GetMapColl() const
+{
+    // We only collect for some regions, and can't tell (here) which ones.
+    // assert(m_sample_maps);
+
+    return m_mapcoll;
+} // GetMapColl
+
+//------------------------------------------------------------------------------------
+
+void CollectionManager::UpdateArrangers(ArrangerVec& arrangers, ChainState& chstate) const
+{
+    // Currently, only parameter changes trigger arranger updates.
+    // This can easily be changed here.
+    m_paramcoll[m_region][m_replicate]->UpdateArrangers(arrangers, chstate);
+} // UpdateArrangers
+
+//____________________________________________________________________________________
diff --git a/src/tree/collmanager.h b/src/tree/collmanager.h
new file mode 100644
index 0000000..8dd290d
--- /dev/null
+++ b/src/tree/collmanager.h
@@ -0,0 +1,98 @@
+// $Id: collmanager.h,v 1.18 2012/07/05 04:31:23 bobgian Exp $
+
+/*
+  Copyright 2003 Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+// The CollectionManager class contains all the Collectors and hands out
+// information about them.
+
+// Mary 2003/12/17
+
+#ifndef COLLMANAGER_H
+#define COLLMANAGER_H
+
+#include <vector>
+#include <utility>    // for pair
+#include "vectorx.h"
+#include "arranger.h"
+#include "collector.h"
+#include "forcesummary.h"
+#include "chainstate.h"
+#include "local_build.h"
+
+class ArrangerVec;
+
+//------------------------------------------------------------------------------------
+// Collection manager
+//------------------------------------------------------------------------------------
+
+// typedef std::vector<std::pair<ForceSummary, long> > ParamSummary;
+// ForceSummary object is supposed to be a Singleton.  Commenting typedef
+// out to make sure it's not being used.  Jon 2012/05/21
+
+class CollectionManager
+{
+  private:
+    // This class is a singleton and is not to be copied.
+    CollectionManager();                                    // not defined
+    CollectionManager(const CollectionManager&);            // not defined
+    CollectionManager& operator=(const CollectionManager&); // not defined
+
+    // Dimensions of these are regions x replicates.
+    std::vector<std::vector<TreeCollector*> > m_treecoll;
+    std::vector<std::vector<ParamCollector*> > m_paramcoll;
+    std::vector<std::vector<MapCollector*> > m_mapcoll;
+
+    long m_region;
+    long m_replicate;
+    bool m_sample_trees;
+    bool m_sample_params;
+    bool m_sample_maps;
+
+  public:
+    CollectionManager(long nregions, long nreplicates);
+    ~CollectionManager();
+
+    // Sample
+    void Collect(ChainState& chstate, long initialOrFinal, bool lastchain); // EWFIX.CHAINTYPE
+    void WriteTraceFile(ChainState& chstate, long initialOrFinal);          // EWFIX.CHAINTYPE
+    void WriteNewickTreeFile(ChainState& chstate);
+    void WriteReclocsFile(ChainState& chstate, bool lastchain);             // EWFIX.CHAINTYPE
+
+#ifdef LAMARC_QA_TREE_DUMP
+    void WriteArgFile(ChainState& chstate, bool lastchain);
+#endif
+
+    void CorrectForFatalAttraction(long region);
+    void StartChain(long reg, long rep, bool lastchain);
+    void WriteThisChainsCollections(std::ofstream* out) const;
+    void WriteLastSummaries() const;
+    void WriteAllSummaries(long reg, long rep, std::ofstream& out) const;
+
+    // Fetch packages of samples.
+    TreeCollector* GetTreeColl(long region, long rep) const;
+    std::vector<TreeCollector*> GetTreeColl(long region) const;
+    std::vector<std::vector<TreeCollector*> > GetTreeColl() const;
+
+    ParamCollector* GetParamColl(long region, long rep) const;
+    std::vector<ParamCollector*> GetParamColl(long region) const;
+    std::vector<std::vector<ParamCollector*> > GetParamColl() const;
+
+    MapCollector* GetMapColl(long region, long rep) const;
+    std::vector<MapCollector*> GetMapColl(long region) const;
+    std::vector<std::vector<MapCollector*> > GetMapColl() const;
+
+    // Inform Arrangers of changes
+    void UpdateArrangers(ArrangerVec& arrangers, ChainState& chstate) const;
+
+    bool GetSampleTrees() const { return m_sample_trees; };
+};
+
+#endif  // COLLMANAGER_H
+
+//____________________________________________________________________________________
diff --git a/src/tree/fc_status.cpp b/src/tree/fc_status.cpp
new file mode 100644
index 0000000..88875a8
--- /dev/null
+++ b/src/tree/fc_status.cpp
@@ -0,0 +1,359 @@
+// $Id: fc_status.cpp,v 1.15 2012/06/30 01:32:43 bobgian Exp $
+
+/*
+ * Copyright 2010 Mary Kuhner, Jon Yamato, Joseph Felsenstein, and Bob Giansiracusa
+ *
+ * This software is distributed free of charge for non-commercial use
+ * and is copyrighted.  Of course, we do not guarantee that the software
+ * works, and are not responsible for any damage you may cause or have.
+ */
+
+//------------------------------------------------------------------------------------
+
+#include <set>
+#include <map>
+#include <iostream>
+
+#include "fc_status.h"
+#include "range.h"
+
+//------------------------------------------------------------------------------------
+
+using namespace std;
+
+//------------------------------------------------------------------------------------
+// Construct object representing live-site-branch-counts for arbitrary set of sites.
+// Site indices are arbitrary; site indices (keys in map) are constructed as entries are made via update functions.
+// Initial object is empty.
+
+FC_Status::FC_Status()
+    : m_site_index_lower_limit(MAXLONG), // LOWER end of lowest range inserted so far; the BEGINNING of the FC_Grid.
+      m_site_index_upper_limit(0L)       // UPPER end of highest range inserted so far; the END of the FC_Grid.
+{
+    // Member m_fc_grid is an empty map, representing a live-site-branch-count of zero for all sites.
+    // Member m_coalesced_sites contains an empty rangeset at object construction,
+    // representing an empty set of final coalesced sites.
+}
+
+//------------------------------------------------------------------------------------
+// Returns set of ranges representing all sites that have coalesced, where "coalesced" means "have branch count one",
+// whether the count was higher before and has decremented to one in "true coalescence" or the count just started off
+// at zero and has now incremented to one.
+//
+// Objects (RANGESETs) returned are by-value copies and thus remain constant despite changes in internal state
+// of FC_Status object (as horserace proceeds to other time intervals).  Client owns copied return-value objects.
+
+rangeset FC_Status::Coalesced_Sites()
+{
+    // This function is called often and simply returns a value pre-computed and cached by Adjust_FC_Counts().
+    return m_coalesced_sites;
+}
+
+//------------------------------------------------------------------------------------
+// Verification of final-coalescence algorithm: test for equality of two FC_Status objects computed different ways.
+// Simply checks that both internal subobjects are equal (equality defined by the respective container classes).
+
+bool operator==(const FC_Status & fc_counter_lhs, const FC_Status & fc_counter_rhs)
+{
+    return ((fc_counter_lhs.m_fc_grid == fc_counter_rhs.m_fc_grid)
+            &&
+            (fc_counter_lhs.m_coalesced_sites == fc_counter_rhs.m_coalesced_sites));
+}
+
+//----------------------------------------
+// Both operators are provided for completeness.
+
+inline bool operator!=(const FC_Status & fc_counter_lhs, const FC_Status & fc_counter_rhs)
+{
+    return !(fc_counter_lhs == fc_counter_rhs);
+}
+
+//------------------------------------------------------------------------------------
+// Increment_FC_Counts() and Decrement_FC_Counts() are defined inline in class declaration.
+// They both call this function, which does all the work.
+
+void FC_Status::Adjust_FC_Counts(const rangeset & sites_to_adjust, long int adjustment)
+{
+    // Even though "adjustment" can only be +1 or -1, all the operands it is added to are long ints,
+    // so it will be coerced to long anyway.  Might as well start out that way.
+
+    // Used mostly for debugging and error-checking.  Once fully debugged, may remove this.
+    int iteration(0);
+
+    // "Current range" is the range that iterator "range_iter" is pointing at for current iteration.
+    for(rangeset::const_iterator range_iter(sites_to_adjust.begin()) ;
+        range_iter != sites_to_adjust.end() ;
+        ++range_iter)
+    {
+        // This loop inserts new nodes if there is none pre-existing at the site index marking the LOWER or UPPER
+        // edge of a range (one of the ranges in the "sites_to_adjust" set).  A new node always indicates a change
+        // in the associated branch count.  However, it is possible that one endpoint (perhaps both) of an input
+        // range might coincide with pre-existing node in the FC_Grid (if the endpoint index happens to be a value
+        // at which the count stored internally changes).  In such a "coincident edge endpoint" case, the
+        // pre-existing node can simply be updated to reflect the new count for the region it represents (upward).
+
+        const long int range_lower_index(range_iter->first);
+        const long int range_upper_index(range_iter->second);
+
+        if(range_lower_index < range_upper_index) // Test for empty current range.  Shouldn't happen  ...
+        {
+            // Iterator "lower_edge" starts pointing to first node with key equal to or greater than current range
+            // LOWER index or equal to m_fc_grid.end() if no nodes have yet been inserted into the FC_Grid above the
+            // current range (because this is the first being inserted or just the highest so far being inserted).
+            FC_Grid::iterator lower_edge(m_fc_grid.lower_bound(range_lower_index));
+
+            // No nodes have yet been inserted into FC_Grid with keys HIGHER than (or EQUAL to) the LOWER end
+            // of the current range (this range is either the first one inserted or the highest one inserted so far).
+            // Insert one with branch count zero (will be adjusted later).
+            if(lower_edge == m_fc_grid.end())
+            {
+                // "Hint" form of insert(), using END iterator.  Now "lower_edge" points to the node just inserted.
+                lower_edge = m_fc_grid.insert(lower_edge, pair<const long int, long int>(range_lower_index, 0L));
+
+                if(range_lower_index < m_site_index_lower_limit)
+                {
+                    m_site_index_lower_limit = range_lower_index;
+                }
+            }
+
+            // Find node marking LOWER end of current range.  If it exists, denote it with iterator "lower_edge"
+            // and leave its pre-adjustment count in place (to be updated in traversal of current range later).
+            else if(lower_edge->first == range_lower_index)
+            {
+                // Iterator "lower_edge" points to pre-existing node marking current range LOWER end.
+                // This marks the BEGINNING of the upcoming traversal (the first node that WILL be adjusted).
+            }
+
+            // If no existing node was found with key searched for above (ie, LOWER end of current range is between
+            // two existing nodes in FC_Grid), find first node going lower from the one pointed at by iterator
+            // "lower_edge)" (which is the first existing node with key going UP from current range LOWER end),
+            // extract its count, and insert new node marking actual LOWER end of current range (using just-extracted
+            // pre-adjustment branch count).  If NO node exists with a key below the value being searched for, DON'T
+            // decrement the iterator and DO use zero as the branch count.
+            else
+            {
+                long int branch_count = 0L;     // Default branch count if no node exists "below" insert location.
+
+                // Don't attempt to point iterator "below" the LOWEST node in the entire FC_Grid.
+                if(range_lower_index > m_site_index_lower_limit)
+                {
+                    // If a node "below" LOWER end of current range exists, read branch count and reset iterator.
+                    branch_count = (--lower_edge)->second;
+                    ++lower_edge;               // Reset iterator to value outside this conditional.
+                }
+
+                // "Hint" form of insert() used here, with "hint" being original position returned by lower_bound().
+                // Now "lower_edge" points to the node just inserted.
+                lower_edge = m_fc_grid.insert(lower_edge, pair<const long, long>(range_lower_index, branch_count));
+
+                if(range_lower_index < m_site_index_lower_limit)
+                {
+                    m_site_index_lower_limit = range_lower_index;
+                }
+            }
+            // Now iterator "lower_edge" points to node (not yet adjusted) at included LOWER end of current range.
+
+            // Iterator "upper_edge" starts pointing to first node with key equal to or greater than current range
+            // UPPER index or equal to m_fc_grid.end() if no nodes have yet been inserted into the FC_Grid above the
+            // current range (because this is first range inserted or just highest range so far inserted).
+            FC_Grid::iterator upper_edge(m_fc_grid.lower_bound(range_upper_index));
+
+            // No nodes have been inserted into FC_Grid with keys HIGHER than (or EQUAL to) the UPPER end of current
+            // range (this range is either first inserted or highest inserted so far).  Insert one with branch count
+            // zero (to be adjusted later).
+            if(upper_edge == m_fc_grid.end())
+            {
+                // "Hint" form of insert(), with "hint" being END iterator.  Now "upper_edge" points to node inserted.
+                upper_edge = m_fc_grid.insert(upper_edge, pair<const long int, long int>(range_upper_index, 0L));
+
+                if(range_upper_index > m_site_index_upper_limit)
+                {
+                    m_site_index_upper_limit = range_upper_index;
+                }
+            }
+
+            // Find node marking UPPER end of current range.  If it exists, denote it with iterator "upper_edge"
+            // and leave its pre-adjustment count in place (to be updated in traversal of current range later).
+            else if(upper_edge->first == range_upper_index)
+            {
+                // Iterator "upper_edge" points to pre-existing node marking current range UPPER end.
+                // This marks the END of the upcoming traversal (the first node that will NOT be adjusted).
+            }
+
+            // If no existing node was found with key searched for above (ie, UPPER end of current range is between
+            // two existing nodes in FC_Grid), find first node going lower from the one pointed at by iterator
+            // "upper_edge)" (which is the first existing node with key going DOWN from current range UPPER end),
+            // extract its count, and insert new node marking actual UPPER end of current range (using just-extracted
+            // pre-adjustment branch count).
+            //
+            // If NO node exists with a key below the value being searched for,
+            // DON'T decrement the iterator and DO use zero as the branch count.
+            else
+            {
+                long int branch_count = 0L;     // Branch count if no node exists below current insert location.
+
+                // Don't attempt to point iterator "below" the LOWEST node in the entire FC_Grid.
+                if(range_upper_index > m_site_index_lower_limit)
+                {
+                    // If a node "below" UPPER end of current range exists, read branch count and reset iterator.
+                    branch_count = (--upper_edge)->second;
+                    ++upper_edge;               // Reset iterator to value outside this conditional.
+                }
+
+                // "Hint" form of insert(), with "hint" being original position returned by lower_bound() above.
+                // Now "upper_edge" points to the node just inserted.
+                upper_edge = m_fc_grid.insert(upper_edge, pair<const long, long>(range_upper_index, branch_count));
+
+                if(range_upper_index > m_site_index_upper_limit)
+                {
+                    m_site_index_upper_limit = range_upper_index;
+                }
+            }
+            // Now iterator "upper_edge" points to node (not yet adjusted) at included UPPER end of current range
+            // (whether pre-existing or just inserted).
+            //
+            // Now we can traverse all nodes within the current range, starting with the node marking the LOWER edge
+            // (inclusive - will be adjusted) and continuing to (but excluded - will not be adjusted) the node
+            // marking UPPER edge of current range.
+            for(FC_Grid::iterator node_iter(lower_edge); node_iter != upper_edge; ++node_iter)
+            {
+                node_iter->second += adjustment;
+            }
+        }
+        else
+        {
+            // ERROR: Empty or malformed range presented.  Here now for debugging purposes.
+            cerr << "Adjust_FC_Counts[14]: Empty or malformed range presented.  Iteration: " << iteration << endl << endl;
+            assert(false);
+        }
+    }
+
+    // Now re-compute and cache the "coalesced sites", which is simple a rangeset of all sites whose branch count
+    // is now one. Note that a site may have a count of one either via coalescence (decrement of count to one from a
+    // higher value) or simply because the count is now one (initialized to zero by constructor, incremented
+    // once, and never changed again).  Thus we must recompute the cache after a range's incremented branch count
+    // (first increment, in case no more occur) as well as after a decremented branch count (a "true" coalescence).
+    //
+    // Hopefully soon this section will contain a beautiful functional "STL algorithm-style" method for updating a
+    // data-structure.  For now, we simply start from scratch each time and build a set holding the appropriate data.
+    // For now, since we are scanning the entire FC_Grid here in order to construct the cached rangeset each time,
+    // we also do a bunch of consistency tests which can be eliminated once this code passes its startup sanity tests.
+
+    // Clear it and start from scratch each time.  Later we will update rather than rebuild this object.
+    m_coalesced_sites.clear();
+
+    FC_Grid::iterator fc_grid_limit(m_fc_grid.end());
+    long int previous_site_index = 0L;
+    long int previous_branch_count = 0L;
+    long int current_site_index = 0L;
+    long int current_branch_count = 0L;
+
+    iteration = 0;                              // Reset iteration variable to count iters through a different loop.
+
+    for(FC_Grid::iterator node_iter( m_fc_grid.begin()) ; node_iter != fc_grid_limit ; /* increment inside loop */ )
+    {
+        current_site_index = node_iter->first;
+        current_branch_count = node_iter->second;
+
+        if(current_branch_count < 0L)
+        {
+            cerr << "Adjust_FC_Counts[17]: Negative branch count on computing cached coalescences.  Iteration: "
+                 << iteration << "  Previous: (" << previous_site_index << ' ' << previous_branch_count
+                 << "),  Current: (" << current_site_index << ' ' << current_branch_count << ')' << endl;
+            cerr << endl << "Emergency exit." << endl << endl;
+            assert(false);
+        }
+
+        if(previous_branch_count == current_branch_count)
+        {
+            // If two adjacent ranges meet in a node and have the same (post-adjustment) branch counts, we can merge
+            // those regions by deleting the node marking their junction.  It is also OK to delete the LOWEST and/or
+            // HIGHEST node(s) if their branch count values are zero (on the LOWER end)
+            // or if they match that to the left (on the UPPER end).
+
+            FC_Grid::iterator delete_me(node_iter);
+            ++node_iter;                // Increment iterator before deleting node; then continue traversal.
+            m_fc_grid.erase(delete_me);
+        }
+        else
+        {
+            // Regions abutting at this node have different branch counts.  If LOWER region (to left of current node)
+            // has count of 1, then it represents a final coalescence (or a count climbing up from zero).  Insert that
+            // region into the cache.  Otherwise, keep looking.
+            if(previous_branch_count == 1L)
+            {
+                // "Hinted" insertion location is always just preceding the END (UPPER end of entire FC_Grid) iterator
+                // position, because we are always doing a "push_back" insertion.
+                m_coalesced_sites.insert(m_coalesced_sites.end(),
+                                         pair<const long, long>(previous_site_index, current_site_index));
+            }
+
+            ++node_iter;                // Increment inside loop since we already incremented inside other branch of conditional.
+
+            previous_site_index = current_site_index;
+            previous_branch_count = current_branch_count;
+        }
+    }
+}
+
+//------------------------------------------------------------------------------------
+// JDEBUG: quick and dirty GDB Debugging functions.
+
+long int FC_Status::GridSize() const
+{
+    return m_fc_grid.size();
+}
+
+void FC_Status::PrintGrid() const
+{
+    for(FC_Grid::const_iterator node_iter = m_fc_grid.begin(); node_iter != m_fc_grid.end(); ++node_iter)
+    {
+        cerr << node_iter->first << ":" << node_iter->second << ",";
+    }
+
+    cerr << endl;
+}
+
+//------------------------------------------------------------------------------------
+// Testing and debugging function; remove in production version.
+
+#if 0
+
+void FC_Status::PrintFCStatus(bool print_output, const rangeset & sites_to_adjust, long int adjustment)
+{
+    cerr << "Input rangeset:           " << ToString(sites_to_adjust) << "  <==>  ( ";
+
+    for(rangeset::const_iterator range_iter(sites_to_adjust.begin()) ; range_iter != sites_to_adjust.end() ; ++range_iter)
+    {
+        cerr << '(' << range_iter->first << ' ' << range_iter->second << ") ";
+    }
+
+    cerr << ')' << endl
+         << "Adjustment:               " << adjustment << endl;
+
+    cerr << "site_index_limits:        " << m_site_index_lower_limit << " (lower), "
+         << m_site_index_upper_limit << " (upper)" << endl;
+
+    cerr << "m_fc_grid:                ( ";
+    for(FC_Grid::const_iterator node_iter(m_fc_grid.begin()); node_iter != m_fc_grid.end(); ++node_iter)
+    {
+        cerr << '(' << node_iter->first << ' ' << node_iter->second << ") ";
+    }
+    cerr << ')' << endl;
+
+    if(print_output)
+    {
+        cerr << "m_coalesced_sites:        ( ";
+        for(rangeset::const_iterator range_iter(m_coalesced_sites.begin()) ;
+            range_iter != m_coalesced_sites.end() ;
+            ++range_iter)
+        {
+            cerr << '(' << range_iter->first << ' ' << range_iter->second << ") ";
+        }
+        cerr << ')' << endl;
+    }
+}
+
+#endif
+
+//____________________________________________________________________________________
diff --git a/src/tree/fc_status.h b/src/tree/fc_status.h
new file mode 100644
index 0000000..2d4a7ac
--- /dev/null
+++ b/src/tree/fc_status.h
@@ -0,0 +1,186 @@
+// $Id: fc_status.h,v 1.14 2012/06/30 01:32:43 bobgian Exp $
+
+/*
+ * Copyright 2010 Mary Kuhner, Jon Yamato, Joseph Felsenstein, and Bob Giansiracusa
+ *
+ * This software is distributed free of charge for non-commercial use
+ * and is copyrighted.  Of course, we do not guarantee that the software
+ * works, and are not responsible for any damage you may cause or have.
+ */
+
+//------------------------------------------------------------------------------------
+
+#ifndef FC_STATUS_H
+#define FC_STATUS_H
+
+#include <set>
+#include <map>
+
+#include "range.h"
+
+//------------------------------------------------------------------------------------
+
+// An object of this type (FC_Grid) maps site range-starting-indices to coalescence branch-counts in the FC_Status
+// object.  The FC_Grid data member is a map populated by objects of type FC_Node, each a PAIR whose KEY
+// (first of the pair) represents the (included) beginning startpoint of a range of site index values whose
+// (excluded) endpoint is given by the starting site index of the next range (in the increasing-index direction).
+//
+// Each FC_Node object in the FC_Grid consists of a pair containing two values: a "key" (site index value) and a
+// "value"  (the count of branches at current tree level carrying the corresponding site "live" to some non-empty
+// set of tips).
+//
+// The term "site index" refers to the index number identifying a site, which value is used as the "key" component
+// of an FC_Node.  The term "branch count" refers to the number of branches crossing the current "level" in the tree
+// which are carrying the set of sites live to the tips, which value is used as the "value" component of an FC_Node.
+
+// Type of the container object comprising the site-index -> branch-count map:
+typedef   std::map<long int, long int, std::less<long int> >   FC_Grid;
+
+// Type of objects ("nodes") inserted into FC_Grid map.
+// FIRST (key):    Site index (included starting site number for current range,
+//                 also serves as excluded site number for "previous" range).
+// SECOND (value): Live site branch count (number of branches carrying the sites represented by the current range).
+//                 ALL sites in this range are indicated as being carried "live" by this value as a count of the
+//                 number of branches carrying these sites live to at least one tip.  The branch count is the number
+//                 of branches that cross a surface cutting the tree at a given level (this "level" is defined by the
+//                 "time interval" or phase of the "horserace" portion of the tree-rearrangement algorithm; it is
+//                 NOT the total number of branches in the tree which carry these sites "live" to some tips.
+//
+typedef   std::pair<long int, long int>   FC_Node;
+
+//------------------------------------------------------------------------------------
+
+// A class to support maintenance of information on which sites have attained Final Coalescence.  Given a site number,
+// the functions of this class maintain counts of how many branches at a given "level" are "passing" sites through
+// to the tips.  "Final Coalescence" is attained when that count (for a given site or range of sites) decreases to
+// unity.  The class object contains a cached representation (a rangeset) of such sites.  This cache is simply the
+// set of all ranges whose count value is currently unity, and it is updated whenever the count computation results
+// in a change in count.
+//
+// Note that the "coalesced sites rangeset cache" must be updated whenever branch counts for sites change, both on
+// increment and on decrement.  Decrement makes sense: when the number of branches carrying a site decreases to one,
+// that is a "final coalescence".  However, it is possible that the count may increase from zero to one and then never
+// change again.  If such an increment were not tallied, those sites (which have already coalesced in the tree being
+// sampled at that level) would be missed altogether.
+
+// Operation of FC_Status object -- the object contains two sub-objects:
+//
+//   Data member "m_fc_grid" is an FC_Grid object, which is a map containing nodes, each of whose key is a site index
+//   (an integer representing the site index at which a branch-count changes) and whose "value" is a count of the
+//   number of branches carrying this site "live" (visible at some set of tips) for the region STARTING (inclusive)
+//   at the associated key index value and ENDING (exclusive) at the next higher (in the map's ordering) node's key
+//   index value.  To determine the branch count for an arbitrary site, one can invoke the lower_bound() map member
+//   function and interpret the resulting iterator:
+//
+//     - If the iterator equals end(), then there are no node(s) with keys equal to or greater than the given site
+//       index.  Either there are no nodes at all in the FC_Grid, or the branch count is that of the value component
+//       of the first node in the DECREASING direction.  It is an error for the value component of the HIGHEST node
+//       in the map to be anything other than zero.  Such a lookup can be interpreted (if the assumption is that such
+//       a site in in a range which HAS been entered into the map) as an error condition or as an indication that
+//       information for a range containing the site in question has not yet been entered.
+//
+//     - If the iterator points to a node whose key matches the input search site index, then associated branch
+//       count is the value component of the corresponding node.
+//
+//     - If there is no node with this key (so that lower_bound() returns an iterator to the next "higher" node),
+//       one must decrement the iterator to obtain the next "lower" node, whose "value" slot is the count to apply
+//       to the region represented by the range of site indices bounded by the keys of the two nodes (which includes
+//       the site of interest).  If there is no "lower" node, then the input site index corresponds to a site for
+//       which no information has been stored, which can be interpreted either as an error condition or as a branch
+//       count of zero.
+//
+//   As the site-to-branch-count-maintenance operation proceeds, the algorithm watches for regions which abut
+//   at a common node and have the same count value on both sides, combining them into a single region by deleting
+//   the commom middle node.
+//
+//   Data member "m_coalesced_sites" is simply a rangeset (set of PAIRs, each indicating a range of sites by index,
+//   with the same convention of including START edge and excluding END edge).  The union of the ranges denoted by
+//   the pairs in the rangeset denotes the set of all sites which have come to Final Coalescence (in other words,
+//   whose branch counts have decreased to or now are unity).  This information is updated by the FC_Grid maintenance
+//   algorithm when computing branch counts and cached in this data member, making the information easy to retrieve.
+
+class FC_Status
+{
+  public:
+
+    // Construct object representing number of live-site-carrying-branches, indexed by (sparse array of) site numbers.
+    // Initial object stores coalescence count of zero for each site.
+    // Prevent accidental conversion from integer to FC_Status object via "explicit".
+    explicit FC_Status();
+
+    // Interface to the count-maintaining algorithm: Increase the count at a set of sites by ONE.
+    void Increment_FC_Counts(const rangeset & sites_to_increment)
+    {
+        Adjust_FC_Counts(sites_to_increment, +1);
+    }
+
+    // Interface to the count-maintaining algorithm: Decrease the count at a set of sites by ONE.
+    void Decrement_FC_Counts(const rangeset & sites_to_decrement)
+    {
+        Adjust_FC_Counts(sites_to_decrement, -1);
+    }
+
+    // Returns set of ranges (a RANGESET) representing all sites that have coalesced.  Once coalesced at a given
+    // level, those sites are coalesced at all levels rootward; the rangeset represents cumulative coalescence
+    // (this level and all rootward), not just sites that coalesce AT this level.   Objects (RANGESETs) returned
+    // are by-value copies and thus remain valid despite changes in the internal state of the FC_Status object cache
+    // (as the "horserace" proceeds to other time intervals).
+    rangeset Coalesced_Sites();
+
+    // The FC_Status object is allocated on the stack and is destructed when its containing variable goes out
+    // of scope.  The FC_Status object contains two STL containers (which manage their own memory) and an built-in
+    // int as data members (ie, no dynamically-allocated objects or pointers) and therefore there is no need
+    // for other than a default destructor.
+    ~FC_Status() {}
+
+    // Verification of final-coalescence algorithm: test for equality of two FC_Status objects holding data
+    // computed different ways.
+    friend bool operator==(const FC_Status & lhs, const FC_Status & rhs);
+    friend bool operator!=(const FC_Status & lhs, const FC_Status & rhs);
+
+#if 0                                   // For debugging/testing; remove in production version.
+    void PrintFCStatus(bool print_output, const rangeset & sites_to_adjust, long int adjustment);
+#endif
+
+    // JDEBUG: quick and dirty GDB Debugging functions.
+    long GridSize() const;              // m_fc_grid.size()
+    void PrintGrid() const;             // print the contents of m_fc_grid
+
+  private:
+
+    // Site index value for LOWER end of lowest range inserted so far; effectively, the BEGINNING of the FC_Grid.
+    long int m_site_index_lower_limit;
+
+    // Site index value for UPPER end of highest range inserted so far; effectively, the END of the FC_Grid.
+    long int m_site_index_upper_limit;
+
+    // This FC_Grid maps keys (representing site number ranges) to coalescence counts.  The (included) lower endpoint
+    // site index of the range is the key of the FC_Node object (a PAIR) stored in the FC_Grid (the container).
+    // The range represented is from the node's site index (included) to the site index (excluded) stored in the
+    // adjacent (next in upward direction) FC_Node in the grid.
+    //
+    // The lowest LOWER endpoint is represented by the LOWER range endpoint site index for the LOWEST range entered
+    // so far.  This site index value is remembered by the private data member "m_site_index_lower_limit".
+    //
+    // The highest UPPER endpoint is represented by the UPPER range endpoint site index for the HIGHEST range entered
+    // so far.  This site index value is remembered by the private data member "m_site_index_upper_limit".
+    //
+    FC_Grid m_fc_grid;
+
+    // Cached representation of coalesced sites (those whose coalescence count has reached unity).
+    // This result is updated by the increment/decrement member functions.
+    rangeset m_coalesced_sites;
+
+    // Privately-declared and undefined functions.
+    FC_Status(FC_Status &);             // Don't let clients copy this object.
+    FC_Status & operator=(FC_Status &); // Don't let clients assign to this object.
+
+    // Does all the work of Increment_FC_Counts() and Decrement_FC_Counts().
+    void Adjust_FC_Counts(const rangeset & sites_to_adjust, long int adjustment);
+};
+
+//------------------------------------------------------------------------------------
+
+#endif // FC_STATUS_H
+
+//____________________________________________________________________________________
diff --git a/src/tree/individual.cpp b/src/tree/individual.cpp
new file mode 100644
index 0000000..a3db677
--- /dev/null
+++ b/src/tree/individual.cpp
@@ -0,0 +1,431 @@
+// $Id: individual.cpp,v 1.30 2012/10/02 02:27:06 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+
+#include "branch.h"
+#include "errhandling.h"
+#include "individual.h"
+#include "random.h"
+#include "registry.h"
+#include "stringx.h"
+#include "xml_strings.h"
+#include "dlmodel.h"  // for GetAllelesFromDLs()
+
+#ifdef DMALLOC_FUNC_CHECK
+#include "/usr/local/include/dmalloc.h"
+#endif
+
+using namespace std;
+
+//------------------------------------------------------------------------------------
+
+StringVec1d Individual::GetAllTipNames() const
+{
+    StringVec1d names;
+    vector<Branch_ptr>::const_iterator tip;
+
+    for(tip = m_ptips.begin(); tip != m_ptips.end(); ++tip)
+    {
+        assert(boost::dynamic_pointer_cast<TBranch>(*tip));
+        names.push_back(boost::dynamic_pointer_cast<TBranch>(*tip)->m_label);
+    }
+
+    return names;
+
+} // GetAllTipNames
+
+//------------------------------------------------------------------------------------
+
+bool Individual::AnyPhaseUnknownSites() const
+{
+    LongVec2d::const_iterator locus;
+    for(locus = m_phasemarkers.begin(); locus < m_phasemarkers.end(); ++locus)
+        if (!(locus->empty())) return true;
+
+    return false;
+
+} // AnyPhaseUnknownSites
+
+//------------------------------------------------------------------------------------
+
+bool Individual::MultipleTraitHaplotypes() const
+{
+    map<pair<string, long>, Haplotypes >::const_iterator hap;
+    for (hap = m_haplotypesmap.begin(); hap != m_haplotypesmap.end(); hap++)
+    {
+        if (hap->second.MultipleHaplotypes()) return true;
+    }
+    return false;
+}
+
+//------------------------------------------------------------------------------------
+
+void Individual::PruneSamePhaseUnknownSites()
+{
+    // remove homozygous sites from list of phase-unknown sites
+    unsigned long locus;
+    LongVec2d newphases;
+
+    for(locus = 0; locus < m_phasemarkers.size(); ++locus)
+    {
+        LongVec1d newphase;
+        const LongVec1d& locusphase = m_phasemarkers[locus];
+        if (locusphase.empty()) continue;
+        LongVec1d::const_iterator marker;
+        for (marker = locusphase.begin(); marker != locusphase.end(); ++marker)
+        {
+            Branch_ptr firsttip = m_ptips.front();
+            vector<Branch_ptr>::const_iterator tip = m_ptips.begin();
+            for(++tip; tip != m_ptips.end(); ++tip)
+            {
+                if (firsttip->DiffersInDLFrom(*tip, locus, *marker))
+                {
+                    newphase.push_back(*marker);
+                    break;
+                }
+            }
+        }
+        newphases.push_back(newphase);
+    }
+
+    m_phasemarkers = newphases;
+
+} // Individual::PruneSamePhaseUnknownSites
+
+//------------------------------------------------------------------------------------
+
+pair<long,long> Individual::PickRandomPhaseMarker(Random& rs) const
+{
+    long nmarkers(0L);
+    LongVec2d::const_iterator locus;
+    for(locus = m_phasemarkers.begin(); locus < m_phasemarkers.end(); ++locus)
+    {
+        nmarkers += locus->size();
+    }
+
+    long picked = rs.Long(nmarkers);
+    unsigned long pickedlocus;
+    for(pickedlocus = 0; pickedlocus < m_phasemarkers.size(); ++pickedlocus)
+    {
+        picked -= m_phasemarkers[pickedlocus].size();
+        if (picked < 0)
+        {
+            picked += m_phasemarkers[pickedlocus].size();
+            break;
+        }
+    }
+
+    return make_pair(pickedlocus, m_phasemarkers[pickedlocus][picked]);
+
+} // Individual::PickRandomPhaseMarker
+
+//------------------------------------------------------------------------------------
+
+pair<string,long> Individual::PickRandomHaplotypeMarker() const
+{
+    assert(m_haplotypesmap.size() > 0); //ChooseAllHaplotypes needs to be run.
+    unsigned long whichhap = registry.GetRandom().Long(m_haplotypesmap.size());
+    map<pair<string, long>, Haplotypes >::const_iterator hapmap=m_haplotypesmap.begin();
+    for (unsigned long hapnum=0;
+         hapnum<m_haplotypesmap.size() && hapmap != m_haplotypesmap.end();
+         hapnum++, hapmap++)
+    {
+        if (hapnum == whichhap)
+        {
+            return hapmap->first;
+        }
+    }
+    assert(false); //should have found one.
+    return m_haplotypesmap.begin()->first;
+
+} // Individual::PickRandomHaplotypeMarker
+
+//------------------------------------------------------------------------------------
+
+void Individual::SetPhaseMarkers(const LongVec2d& pm)
+{
+    m_phasemarkers = pm;
+}
+
+//------------------------------------------------------------------------------------
+
+void Individual::SetPhaseSites(const LongVec2d& ps)
+{
+    m_phasesites = ps;
+}
+
+//------------------------------------------------------------------------------------
+
+void Individual::AddHaplotype(long regnum, string lname, long marker,
+                              const StringVec1d& alleles, double penetrance)
+{
+    map<pair<string, long>, Haplotypes>::iterator hap;
+    hap = m_haplotypesmap.find(make_pair(lname, marker));
+    if (hap == m_haplotypesmap.end())
+    {
+        Haplotypes haplo(regnum, lname);
+        m_haplotypesmap.insert(make_pair(make_pair(lname, marker),haplo));
+        hap = m_haplotypesmap.find(make_pair(lname, marker));
+    }
+    hap->second.AddHaplotype(alleles, penetrance);
+}
+
+//------------------------------------------------------------------------------------
+
+StringVec1d Individual::GetAllelesFor(string lname, long marker) const
+{
+    map<pair<string, long>, Haplotypes>::const_iterator hap;
+    hap = m_haplotypesmap.find(make_pair(lname, marker));
+
+    if (hap == m_haplotypesmap.end())
+    {
+        StringVec1d emptyvec;
+        return emptyvec;
+    }
+
+    return hap->second.GetAlleles();
+}
+
+//------------------------------------------------------------------------------------
+
+vector<LocusCell> Individual::GetLocusCellsFor(string lname, long marker) const
+{
+    map<pair<string, long>, vector<LocusCell> >::const_iterator hap;
+    hap = m_currentHapsMap.find(make_pair(lname, marker));
+    if (hap == m_currentHapsMap.end())
+    {
+        assert(false); //Sholdn't ask for a haplotype we haven't chosen.
+        // This is probably another phase 1/phase 2 issue
+        string msg = "Can't find haplotypes for locus " + lname
+            + ", marker " + ToString(marker) + ", individual " + m_name
+            + ".";
+        throw implementation_error(msg);
+    }
+    return hap->second;
+}
+
+//------------------------------------------------------------------------------------
+
+Haplotypes Individual::GetHaplotypesFor(string lname, long marker) const
+{
+    map<pair<string, long>, Haplotypes >::const_iterator hap;
+    hap = m_haplotypesmap.find(make_pair(lname, marker));
+    if (hap == m_haplotypesmap.end())
+    {
+        assert(false); //Sholdn't ask for a haplotype we haven't chosen.
+        // This is probably another phase 1/phase 2 issue
+        string msg = "Can't find haplotypes for segment " + lname
+            + ", marker " + ToString(marker) + ", individual " + m_name
+            + ".";
+        throw implementation_error(msg);
+    }
+    return hap->second;
+}
+
+//------------------------------------------------------------------------------------
+
+string Individual::GetMarkerDataFor(string lname, long marker) const
+{
+    map<pair<string, long>, Haplotypes>::const_iterator hap;
+    hap = m_haplotypesmap.find(make_pair(lname, marker));
+    if (hap == m_haplotypesmap.end())
+    {
+        return "";
+    }
+    return hap->second.GetMarkerData();
+}
+
+//------------------------------------------------------------------------------------
+
+void Individual::ChooseNewHaplotypesFor(string lname, long marker)
+{
+    map<pair<string, long>, Haplotypes>::iterator hap;
+    pair<string, long> tag = make_pair(lname, marker);
+    hap = m_haplotypesmap.find(tag);
+    if (hap == m_haplotypesmap.end())
+    {
+        assert(false);                  // We shouldn't ask about haplotypes for which we don't have data.
+        throw implementation_error("Can't choose a haplotype for locus" +
+                                   lname + ".");
+        //If we want to handle this anyway, the proper thing to do would
+        // be to make some DLCells with the equivalent of ?s in them.
+    }
+    vector<LocusCell> cells = hap->second.ChooseNewHaplotypes();
+    map<pair<string, long>, vector<LocusCell> >::iterator oldhap;
+    oldhap = m_currentHapsMap.find(tag);
+    bool newhap = true;
+    if (oldhap != m_currentHapsMap.end())
+    {
+        if (oldhap->second == cells)
+        {
+            newhap = false;
+        }
+    }
+    m_currentHapsMap.erase(tag);
+    m_currentHapsMap.insert(make_pair(tag, cells));
+    assert(newhap); //Should always pick new site DLs.
+}
+
+//------------------------------------------------------------------------------------
+
+bool Individual::ChooseRandomHaplotypesFor(string lname, long marker)
+{
+    map<pair<string, long>, Haplotypes>::iterator hap;
+    pair<string, long> tag = make_pair(lname, marker);
+    hap = m_haplotypesmap.find(tag);
+    if (hap == m_haplotypesmap.end())
+    {
+        assert(false);                  // We shouldn't ask about haplotypes for which we don't have data.
+        throw implementation_error("Can't choose a haplotype for locus" + lname + ".");
+        //If we want to handle this anyway, the proper thing to do would
+        // be to make some DLCells with the equivalent of ?s in them.
+    }
+    vector<LocusCell> cells = hap->second.ChooseRandomHaplotypes();
+    map<pair<string, long>, vector<LocusCell> >::iterator oldhap;
+    oldhap = m_currentHapsMap.find(tag);
+    bool newhap = true;
+    if (oldhap != m_currentHapsMap.end())
+    {
+        if (oldhap->second == cells)
+        {
+            newhap = false;
+        }
+    }
+    m_currentHapsMap.erase(tag);
+    m_currentHapsMap.insert(make_pair(tag, cells));
+    return newhap;
+}
+
+//------------------------------------------------------------------------------------
+
+void Individual::RandomizeAllHaplotypes()
+{
+    map<pair<string, long>, Haplotypes>::iterator hap;
+    for (hap = m_haplotypesmap.begin(); hap != m_haplotypesmap.end(); hap++)
+    {
+        ChooseRandomHaplotypesFor(hap->first.first, hap->first.second);
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+void Individual::ChooseFirstHaplotypeFor(string lname, long marker)
+{
+    map<pair<string, long>, Haplotypes>::iterator hap;
+    pair<string, long> tag = make_pair(lname, marker);
+    hap = m_haplotypesmap.find(tag);
+    if (hap == m_haplotypesmap.end())
+    {
+        assert(false);                  // We shouldn't ask about haplotypes for which we don't have data.
+        string msg("Can't choose a haplotype for segment" + lname + ".");
+        throw implementation_error(msg);
+    }
+    vector<LocusCell> cells = hap->second.ChooseFirstHaplotypes();
+    m_currentHapsMap.erase(tag);
+    m_currentHapsMap.insert(make_pair(tag, cells));
+}
+
+//------------------------------------------------------------------------------------
+
+bool Individual::ChooseNextHaplotypeFor(string lname, long marker)
+{
+    map<pair<string, long>, Haplotypes>::iterator hap;
+    pair<string, long> tag = make_pair(lname, marker);
+    hap = m_haplotypesmap.find(tag);
+    if (hap == m_haplotypesmap.end())
+    {
+        assert(false);                  // We shouldn't ask about haplotypes for which we don't have data.
+        string msg("Can't choose a haplotype for segment" + lname + ".");
+        throw implementation_error(msg);
+    }
+    vector<LocusCell> cells = hap->second.ChooseNextHaplotypes();
+    if (cells.size() == 0)
+    {
+        //returning an empty vector means that we've gone through all the haplotypes
+        return false;
+    }
+    m_currentHapsMap.erase(tag);
+    m_currentHapsMap.insert(make_pair(tag, cells));
+    return true;
+}
+
+//------------------------------------------------------------------------------------
+
+void Individual::SetHaplotypes(string lname, long marker, Haplotypes haps)
+{
+    map<pair<string, long>, Haplotypes>::iterator hap;
+    pair<string, long> whichlocus = make_pair(lname, marker);
+    hap = m_haplotypesmap.find(whichlocus);
+    if (hap != m_haplotypesmap.end())
+    {
+        m_haplotypesmap.erase(hap);
+    }
+    m_haplotypesmap.insert(make_pair(whichlocus, haps));
+    //LS DEBUG:  Probably change this back to ChooseRandom--was trying
+    //to not use the random number generator here.
+    ChooseFirstHaplotypeFor(lname, marker);
+}
+
+//------------------------------------------------------------------------------------
+
+StringVec1d Individual::GetAllelesFromDLs(long locus, long marker, bool moving,
+                                          DataModel_ptr model)
+{
+    StringVec1d alleles;
+    for (size_t tip=0; tip<m_ptips.size(); tip++)
+    {
+        Cell_ptr cell = m_ptips[tip]->GetDLCell(locus, marker, moving);
+        string allele = model->CellToData(cell, marker);
+        alleles.push_back(allele);
+    }
+    return alleles;
+}
+
+//------------------------------------------------------------------------------------
+
+bool Individual::IsValidIndividual() const
+{
+    // all we can check is that the pointers are non-NULL
+
+    if (m_ptips.empty()) return true;
+    unsigned long i;
+    for (i = 0; i < m_ptips.size(); ++i)
+    {
+        if (m_ptips[0] == NULL) return false;  // bad branch!
+    }
+
+    return true;
+} // IsValidIndividual
+
+//------------------------------------------------------------------------------------
+
+StringVec1d Individual::GetTraitXML(long nspaces) const
+{
+    StringVec1d retvec;
+    if (m_haplotypesmap.size() == 0) return retvec;
+
+    string spaces(nspaces, ' ');
+    nspaces += 2;
+
+    map<pair<string, long>, Haplotypes >::const_iterator hap;
+    for (hap = m_haplotypesmap.begin(); hap != m_haplotypesmap.end(); hap++)
+    {
+        retvec.push_back(spaces + MakeTag(xmlstr::XML_TAG_GENOTYPE_RESOLUTIONS));
+        retvec.push_back(spaces + "  " + MakeTag(xmlstr::XML_TAG_TRAIT_NAME) + " "
+                         + hap->first.first + " " + MakeCloseTag(xmlstr::XML_TAG_TRAIT_NAME));
+        StringVec1d haplotypes = hap->second.GetHaplotypesXML(nspaces);
+        retvec.insert(retvec.end(), haplotypes.begin(), haplotypes.end());
+        retvec.push_back(spaces + MakeCloseTag(xmlstr::XML_TAG_GENOTYPE_RESOLUTIONS));
+    }
+    return retvec;
+}
+
+//____________________________________________________________________________________
diff --git a/src/tree/individual.h b/src/tree/individual.h
new file mode 100644
index 0000000..d9a110e
--- /dev/null
+++ b/src/tree/individual.h
@@ -0,0 +1,104 @@
+// $Id: individual.h,v 1.21 2012/06/30 01:32:43 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+// This file defines the class that stores "individual" specific information.
+
+#ifndef INDIVIDUAL_H
+#define INDIVIDUAL_H
+
+#include <vector>
+#include <string>
+#include <map>
+
+#include "haplotypes.h"
+#include "types.h"                      // for Branch_ptr declaration
+#include "vectorx.h"
+
+//------------------------------------------------------------------------------------
+
+class Branch;
+class Random;
+class DLCell;
+
+//------------------------------------------------------------------------------------
+
+class Individual
+{
+  private:
+    long               m_id;
+    LongVec2d          m_phasemarkers; // dim:  loci by markers
+    LongVec2d          m_phasesites;   // dim:  loci by sites (for XML output)
+    string             m_name;
+    vector<Branch_ptr> m_ptips;
+
+    std::map<std::pair<string, long>, Haplotypes> m_haplotypesmap;
+    std::map<std::pair<string, long>, vector<LocusCell> > m_currentHapsMap;
+    // The string is the name of the locus, and the long is the marker (will
+    //  almost always just be 0, but might not be).
+
+  public:
+    Individual()                                  {};
+    ~Individual()                                 {};
+    //We accept the default for:
+    //Individual& operator=(const Individual& src);
+    //Individual(const Individual& src);
+
+    string          GetName()         const       { return m_name; };
+    vector<Branch_ptr> GetAllTips()   const       { return m_ptips; };
+    StringVec1d     GetAllTipNames()  const;
+    long            GetId()           const       { return m_id; };
+    const LongVec2d& GetPhaseMarkers() const      { return m_phasemarkers; };
+    const LongVec2d& GetPhaseSites() const        { return m_phasesites; };
+    bool            AnyPhaseUnknownSites() const;
+    bool            MultipleTraitHaplotypes() const;
+
+    void            PruneSamePhaseUnknownSites();
+    std::pair<long,long> PickRandomPhaseMarker(Random& rs) const;
+    std::pair<string,long> PickRandomHaplotypeMarker() const;
+
+    void SetPhaseMarkers(const LongVec2d& pm);
+    void SetPhaseSites(const LongVec2d& ps);
+    void SetName(const string& newname)           { m_name = newname; };
+    void SetTips(vector<Branch_ptr> tps)          { m_ptips = tps; };
+    void AddTip(Branch_ptr tip)                   { m_ptips.push_back(tip); };
+    void SetId(long newid)                        { m_id = newid; };
+    void AddHaplotype(long regnum, string lname, long marker, const StringVec1d& alleles, double penetrance);
+    StringVec1d GetAllelesFor(string lname, long marker) const; //phase 1
+    vector<LocusCell> GetLocusCellsFor(string lname, long marker) const; //phase 2
+    Haplotypes GetHaplotypesFor(string lname, long marker) const;
+    string GetMarkerDataFor(string lname, long marker) const; //phase 3/Output.
+    void ChooseNewHaplotypesFor(string lname, long marker);
+    bool ChooseRandomHaplotypesFor(string lname, long marker);
+    void RandomizeAllHaplotypes();
+
+    void ChooseFirstHaplotypeFor(string lname, long marker);
+    bool ChooseNextHaplotypeFor(string lname, long marker);
+
+    // For simulated data.
+    void SetHaplotypes(string lname, long marker, Haplotypes haps);
+    StringVec1d GetAllelesFromDLs(long locus, long marker, bool moving, DataModel_ptr model);
+
+    bool IsValidIndividual() const;
+    StringVec1d GetTraitXML(long nspaces) const;
+
+    // Debugging function.
+    void PrintHaplotypesFor(string lname, long marker) const;
+
+};
+
+//------------------------------------------------------------------------------------
+
+typedef vector<Individual> IndVec;
+
+//------------------------------------------------------------------------------------
+
+#endif // INDIVIDUAL_H
+
+//____________________________________________________________________________________
diff --git a/src/tree/intervaldata.cpp b/src/tree/intervaldata.cpp
new file mode 100644
index 0000000..7bcd5f9
--- /dev/null
+++ b/src/tree/intervaldata.cpp
@@ -0,0 +1,208 @@
+// $Id: intervaldata.cpp,v 1.25 2012/06/30 01:32:43 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <fstream>
+#include <iostream>
+
+#include "force.h"
+#include "intervaldata.h"
+#include "range.h"                      // For Link-related typedefs and constants.
+#include "runreport.h"
+#include "stringx.h"
+#include "summary.h"
+#include "xmlsum_strings.h"             // for xml sumfile strings
+
+using namespace std;
+
+//------------------------------------------------------------------------------------
+
+Interval * IntervalData::AddInterval(Interval * prev, double time, const LongVec2d & pk,
+                                     const LongVec1d & xk, Linkweight recweight, xpart_t ostat, xpart_t nstat,
+                                     long int recpoint, const LongVec1d & picks, force_type type)
+{
+    // "recweight" is a Link recombination weight (Biglink weight or number of Littlelinks).
+    // "recpoint" is a Littlelink recombination location (or FLAGLONG).
+    // Intervals are assumed to be in time-sorted order.
+    Interval interval(time, pk, xk, recweight, ostat, nstat, recpoint, picks, type);
+    m_intervals.push_back(interval);
+    Interval * thisinterval = &(m_intervals.back());
+    if (prev)
+    {
+        prev->m_next = thisinterval;
+    }
+    return thisinterval;
+} // AddInterval
+
+//------------------------------------------------------------------------------------
+// This AddInterval variant is used when a dummy interval is being added as part
+// of fatal attraction prevention code.  It is always called with arguments which
+// specify a "nonexistent" interval (enough to enable non-zero recombination rate,
+// but "fake" in that it does not "distort" the summary's intervals.
+
+Interval * IntervalData::AddDummyInterval(Interval * prev, Linkweight recweight, xpart_t ostat,
+                                          xpart_t nstat, long int recpoint, force_type type)
+{
+    // We will construct some dummy vectors of appropriate size.
+    // NB:  All previous code made them gratuitously big.
+
+    // "recweight" is a Link recombination weight (Biglink weight or number of Littlelinks).
+    // This function is always called with "recpoint" either = 0 (recombinant case) or FLAGLONG (non-recombinant case).
+
+    // "fake_xk" is number of lineages per crosspartition.
+    xpart_t nxparts = registry.GetDataPack().GetNCrossPartitions();
+    LongVec1d fake_xk(nxparts, 0L);
+
+    // "fake_pk" is partition-forces x number of lineages per partition.
+    LongVec2d fake_pk;
+    const ForceVec& partforces = registry.GetForceSummary().GetPartitionForces();
+    unsigned long int i;
+
+    for (i = 0; i < partforces.size(); ++i)
+    {
+        xpart_t nparts = partforces[i]->GetNParams();
+        LongVec1d fake_partlines(nparts, 0L);
+        fake_pk.push_back(fake_partlines);
+    }
+
+    // MDEBUG:  should this have real info in it??
+    // "npicks" is number of local partition-forces.
+    long int npicks = registry.GetForceSummary().GetNLocalPartitionForces();
+    LongVec1d fake_picks(npicks, 0L);
+
+    return AddInterval(prev, 0.0, fake_pk, fake_xk, recweight, ostat, nstat, recpoint, fake_picks, type);
+} // AddDummyInteral
+
+//------------------------------------------------------------------------------------
+
+void IntervalData::PrintIntervalData() const
+{
+    list<Interval>::const_iterator it;
+
+    for (it = m_intervals.begin(); it != m_intervals.end(); ++it)
+    {
+        const Interval& inter = *it;
+        cerr << "time " << inter.m_endtime << " recweight " <<
+            inter.m_recweight << " old status " << inter.m_oldstatus <<
+            " new status " << inter.m_newstatus << " recpoint " << inter.m_recpoint <<
+            endl;
+        cerr << "xpartition lines are ";
+        unsigned long int i;
+        for (i = 0; i < inter.m_xpartlines.size(); ++i)
+            cerr << inter.m_xpartlines[i] << " ";
+        cerr << endl;
+        cerr << "partition lines are ";
+        for (i = 0; i < inter.m_partlines.size(); ++i)
+        {
+            unsigned long int j;
+            for(j = 0; j < inter.m_partlines[i].size(); ++j)
+                cerr << inter.m_partlines[i][j] << " ";
+            cerr << endl;
+        }
+        cerr << "Next is " << inter.m_next << endl;
+    }
+} // PrintIntervalData
+
+//------------------------------------------------------------------------------------
+// WriteIntervalData is used when writing tree summaries and growth
+// has been turned on, since the tree summaries cannot be summarized.
+
+void IntervalData::WriteIntervalData(ofstream& sumout) const
+{
+    list<Interval>::const_iterator it;
+    string error;
+
+    for (it = m_intervals.begin(); it != m_intervals.end(); ++it)
+    {
+        const Interval& inter = *it;
+
+        force_type type = inter.m_type;
+
+        // All forces have a type and an endtime.
+
+        sumout << "\t\t" << xmlsum::FORCE_START << " " << ToString(type) << " "
+               << xmlsum::FORCE_END << endl;
+        sumout << "\t\t\t" << xmlsum::ENDTIME_START << " "
+               << inter.m_endtime << " " << xmlsum::ENDTIME_END << endl;
+
+        // For clarity, we'll do a big if/else loop here for the member
+        // variables that are only used for certain forces.
+
+        switch (type)
+        {
+            case force_COAL:
+                // Oldstatus is used for all partition forces and coalescence, not recombination.
+                sumout << "\t\t\t" << xmlsum::OLDSTATUS_START << " "
+                       << inter.m_oldstatus << " " << xmlsum::OLDSTATUS_END << endl;
+                break;
+
+            case force_MIG:
+            case force_DIVMIG:
+            case force_DISEASE:
+                // Oldstatus is used for all partition forces and coalescence, not recombination.
+                sumout << "\t\t\t" << xmlsum::OLDSTATUS_START << " "
+                       << inter.m_oldstatus << " " << xmlsum::OLDSTATUS_END << endl;
+
+                // Newstatus is used for all partition forces (and not coalescence).
+                sumout << "\t\t\t" << xmlsum::NEWSTATUS_START << " "
+                       << inter.m_newstatus << " " << xmlsum::NEWSTATUS_END << endl;
+                break;
+
+            case force_REC:
+                // m_recpoint (Littlelink location) and m_recweight (Link weight) used only for recombination.
+                sumout << "\t\t\t" << xmlsum::RECWEIGHT_START << " "
+                       << inter.m_recweight << " " << xmlsum::RECWEIGHT_END << endl;
+                sumout << "\t\t\t" << xmlsum::RECPOINT_START << " "
+                       << inter.m_recpoint << " " << xmlsum::RECPOINT_END << endl;
+                // m_partnerpicks is only used for recombination; this vector will be
+                // empty if no local partition force is also active.
+                if (!inter.m_partnerpicks.empty())
+                {
+                    sumout << "\t\t\t" << xmlsum::PARTNERPICKS_START << " ";
+                    for (unsigned long int i = 0; i < inter.m_partnerpicks.size(); ++i)
+                        sumout << inter.m_partnerpicks[i] << " ";
+                    sumout << xmlsum::PARTNERPICKS_END << endl;
+                }
+                break;
+
+            case force_GROW:
+                error = "Error:  No interval of type " + lamarcstrings::GROW
+                    + " should be possible.  Exiting IntervalData::WriteIntervalData.";
+                throw implementation_error(error);
+                break;
+
+            default:
+                error = "Error:  unknown interval type '" + ToString(type)
+                    + "' encountered.  Exiting IntervalData::WriteIntervalData.";
+                throw implementation_error(error);
+                break;
+        }
+
+        // All forces have m_xpartlines.
+        sumout << "\t\t\t" << xmlsum::XPARTLINES_START << " ";
+        for (unsigned long int i = 0; i < inter.m_xpartlines.size(); ++i)
+            sumout << inter.m_xpartlines[i] << " ";
+        sumout << xmlsum::XPARTLINES_END << endl;
+
+        // Partlines can be tested vs. their size, so we don't need to check the force type.
+        if (inter.m_partlines.size())
+        {
+            sumout << "\t\t\t" << xmlsum::PARTLINES_START << " ";
+            for (unsigned long int i = 0; i < inter.m_partlines.size(); ++i)
+            {
+                for(unsigned long int j = 0; j < inter.m_partlines[i].size(); ++j)
+                    sumout << inter.m_partlines[i][j] << " ";
+                sumout << ". ";
+            }
+            sumout << xmlsum::PARTLINES_END << endl;
+        }
+    }
+} // WriteIntervalData
+
+//____________________________________________________________________________________
diff --git a/src/tree/intervaldata.h b/src/tree/intervaldata.h
new file mode 100644
index 0000000..2a6f420
--- /dev/null
+++ b/src/tree/intervaldata.h
@@ -0,0 +1,148 @@
+// $Id: intervaldata.h,v 1.25 2012/06/30 01:32:43 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+/*
+  This file contains two closely related classes used in storing
+  TreeSummaries.  The first class, Interval, represents all data
+  gathered from one time interval in a given tree.  It holds a
+  "next" pointer to the next Intervals *of a given event type* (for
+  example, a coalescent Interval, one with a coalescence event at
+  the bottom, will hold a pointer to the next coalescent Interval).
+  These pointers are NULL when there is no such Interval.
+
+  Intervals are meant to be kept within an ordered std::list that
+  keeps them in time order; the "next" system is independent of
+  that and is used by maximizer code that requires lightning-fast
+  access to the next event of the same type.
+
+  The second class, IntervalData, contains and manages Intervals
+  and also contains and manages the force-specific Summary objects
+  that offer access to them.
+
+  Written by Mary Kuhner
+*/
+
+#ifndef INTERVALDATA_H
+#define INTERVALDATA_H
+
+#include <fstream>
+#include <list>
+#include <map>
+
+#include "constants.h"
+#include "defaults.h"
+#include "range.h"                      // For Link-related typedefs and constants.
+#include "types.h"
+#include "vectorx.h"
+// #include "summary.h" for management of summaries
+
+//------------------------------------------------------------------------------------
+
+class Summary;
+
+typedef std::map<string, double> Forcemap;
+typedef std::map<string, double>::iterator Forcemapiter;
+typedef std::map<force_type, Summary*> Summap;
+
+//------------------------------------------------------------------------------------
+
+struct Interval
+{
+    double m_endtime;                   // time at the rootward end of the interval
+    // We do not store starttime but rely on computing it on the fly.
+    LongVec1d m_xpartlines;             // length is cross partition
+    LongVec2d m_partlines;              // length is partition force by partition
+    Linkweight m_recweight;             // used only for recombination (Link weight)
+    xpart_t m_oldstatus;                // used for partition events
+    // Also used for coal events, tracks xpart index.
+    xpart_t m_newstatus;                // used for partition events
+    long int m_recpoint;                // used only for recombination (Littlelink)
+    Interval * m_next;                  // points to the next interval of the same force or NULL
+
+    // The following is used only if both recombination and a local partition force are in effect.
+    LongVec1d m_partnerpicks;           // length is (local) partition forces
+
+    force_type m_type;
+
+    // The Interval class could be refactored to be polymorphic by force, which would alleviate
+    // some issues here with writing out the data, but would be tricky elsewhere, since
+    // the interval list would have to be pointers.
+    Interval(double etime, const LongVec2d& plines,
+             const LongVec1d& xlines, Linkweight recweight, long int ostat, long int nstat,
+             long int recpoint, const LongVec1d& picks, force_type ftype)
+        : m_endtime(etime),
+          m_xpartlines(xlines),
+          m_partlines(plines),
+          m_recweight(recweight),
+          m_oldstatus(ostat),
+          m_newstatus(nstat),
+          m_recpoint(recpoint),
+          m_next(NULL),
+          m_partnerpicks(picks),
+          m_type(ftype)
+    {};
+
+    // Warning WARNING -- this will lead to intertwined lists if much usage
+    //   is made of std::list functionality!
+    // Believe it or not, we accept the default copy constructor and
+    // operator=.  The pointers are NOT owning, and copying them is
+    // necessary for its current usage.
+};
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+class IntervalData
+{
+  private:
+    IntervalData& operator=(const IntervalData&); // not defined
+    IntervalData(const IntervalData& src);        // not defined
+
+  public:
+
+    // The following is a List for iterator stability
+    // warning WARNING -- do not use std::list functionality that
+    // modifies the whole list at once (like splice or operator=).
+    // Non-modifying or single element modifying are usable.
+    std::list<Interval> m_intervals;
+
+    // We must define the default constructor, since we disallow
+    // the copy constructor and that prevents compiler generation of it.
+    IntervalData() {};
+
+    Interval * AddInterval(Interval * prev, double time, const LongVec2d & pk,
+                           const LongVec1d & xk, Linkweight recweight, xpart_t ostat, xpart_t nstat,
+                           long int recpoint, const LongVec1d & picks, force_type type);
+
+    // This AddInterval variant is used when a dummy interval is being added as part of fatal attraction
+    // prevention code.  It's sole purpose is to prevent "fatal attraction" to a zero recombination rate.
+    // It is always called with arguments specifying a "fake" interval.
+    Interval * AddDummyInterval(Interval * prev, Linkweight recweight, xpart_t ostat,
+                                xpart_t nstat, long int recpoint, force_type type);
+
+    unsigned long int size() { return m_intervals.size(); };
+
+    std::list<Interval>::iterator begin() { return m_intervals.begin(); };
+    std::list<Interval>::const_iterator begin() const { return m_intervals.begin(); };
+    std::list<Interval>::iterator end() { return m_intervals.end(); };
+    std::list<Interval>::const_iterator end() const { return m_intervals.end(); };
+    void clear() { m_intervals.clear(); };
+
+    // This function is used when writing tree summaries with growth.
+    void WriteIntervalData(std::ofstream& sumout) const;
+
+    // debug function
+    void PrintIntervalData() const;
+
+};
+
+#endif // INTERVALDATA_H
+
+//____________________________________________________________________________________
diff --git a/src/tree/newick.cpp b/src/tree/newick.cpp
new file mode 100644
index 0000000..0dfbaaf
--- /dev/null
+++ b/src/tree/newick.cpp
@@ -0,0 +1,437 @@
+// $Id: newick.cpp,v 1.32 2013/11/07 22:52:33 jyamato Exp $
+
+#include <cassert>
+#include <utility>                     // for make_pair in recombinant LamarcTreeToNewickString
+
+#include "newick.h"
+#include "tree.h"
+#include "stringx.h"
+#include "constants.h"                  // for FLAGDOUBLE
+#include "branch.h"
+#include "timelist.h"                   // to build the tree
+#include "registry.h"
+#include "force.h"                      // for Force::GetMaximum()
+#include "runreport.h"                  // for ReportUrgent() in NewickConverter::
+                                        //   LamarcTreeToNewickString()
+
+//------------------------------------------------------------------------------------
+
+const double DEFAULTLENGTH = 0.01;
+
+// DEBUG Comments, including migration nodes, are ignored
+// DEBUG Nested comments do not work yet (need to fix ProcessComments)
+// NB No provision is made for recombination
+
+//------------------------------------------------------------------------------------
+
+NewickTree::NewickTree(const string& tree)
+    : m_newicktree(tree),
+      m_curr_char(0),
+      m_numbers("0123456789.-+eE"),     // things found in numbers
+      m_terminators(":,)[")             // things terminating names
+{
+    // intentionally blank
+} // NewickTree constructor
+
+//------------------------------------------------------------------------------------
+
+void NewickTree::ToLamarcTree(Tree& stump)
+{
+    // create a holder structure to store ongoing info about the tree
+    NewickNode base(stump);
+
+    // create a working pointer into the holder
+    NewickNode* current = &base;
+
+    // begin at start of tree string
+    m_curr_char = 0;
+
+    while (m_newicktree[m_curr_char] != ';') // semicolon is end of tree
+    {
+        switch(m_newicktree[m_curr_char])
+        {
+            case '(':
+                ++m_curr_char;
+                // create first daughter
+                current = current->AddChild();
+                break;
+            case ',':
+                ++m_curr_char;
+                // create additional daughter
+                current = current->GetParent()->AddChild();
+                break;
+            case ')':
+                ++m_curr_char;
+                // coalesce daughters
+                current = current->GetParent()->Terminate();
+                break;
+            case ' ':
+            case '\n':
+            case '\t':
+            case '\r':
+                ++m_curr_char;
+                // skip whitespace
+                break;
+            case ':':
+                current->SetLength(ProcessLength());
+                break;
+            case '[':
+                ProcessComment();
+                break;
+            default:
+                // Anything unrecognized must be a tip name
+                current->AddBranch(ProcessName(stump));
+                break;
+        }
+    }
+
+    // hookup the lamarc tree
+    base.Coalesce();
+
+    stump.AttachBase(base.GetBranch());
+
+} // ToLamarcTree
+
+//------------------------------------------------------------------------------------
+
+double NewickTree::ProcessLength()
+{
+    ++m_curr_char;  // skip the colon
+    unsigned long endpos = m_newicktree.find_first_not_of(m_numbers, m_curr_char);
+    unsigned long ndigits = endpos - m_curr_char;
+    string lengthstr = m_newicktree.substr(m_curr_char, ndigits);
+    double length;
+    if (FromString(lengthstr,length))
+    {
+        m_curr_char = endpos;
+        return length;
+    }
+    else
+    { // error handling
+        assert(false); // bad newick tree, not handled yet
+        return FLAGDOUBLE;
+    }
+
+} // ProcessLength
+
+//------------------------------------------------------------------------------------
+
+Branch_ptr NewickTree::ProcessName(const Tree& stump)
+{
+    unsigned long endpos = m_newicktree.find_first_of(m_terminators, m_curr_char);
+    unsigned long nchars = endpos - m_curr_char;
+    string name = m_newicktree.substr(m_curr_char, nchars);
+    m_curr_char = endpos;
+    return stump.GetTip(name);
+} // ProcessName
+
+//------------------------------------------------------------------------------------
+
+void NewickTree::ProcessComment()
+// NB: We don't do anything with the contents of the comment yet
+{
+    unsigned long endpos = m_newicktree.find(']', m_curr_char);
+    // comment parsing would go in here
+    m_curr_char = endpos;
+} // ProcessComment
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+NewickNode::NewickNode(Tree& stump)
+    : m_tree(stump),
+      m_parent(NULL),
+      m_length(0.0)
+{
+    // deliberately blank
+} // NewickTree constructor
+
+//------------------------------------------------------------------------------------
+
+NewickNode::~NewickNode()
+{
+    vector<NewickNode*>::iterator it;
+
+    for (it = m_children.begin(); it != m_children.end(); ++it)
+    {
+        delete *it;
+    }
+} // NewickNode dtor
+
+//------------------------------------------------------------------------------------
+
+NewickNode* NewickNode::AddChild()
+{
+    NewickNode* newnode = new NewickNode(m_tree);
+    m_children.push_back(newnode);
+    newnode->m_parent = this;
+    return newnode;
+} // AddChild
+
+//------------------------------------------------------------------------------------
+
+NewickNode* NewickNode::Terminate()
+{
+    assert(m_children.size() > 1);
+
+    return this;
+} // Terminate
+
+//------------------------------------------------------------------------------------
+
+NewickNode* NewickNode::GetParent() const
+{
+    return m_parent;
+} // GetParent
+
+//------------------------------------------------------------------------------------
+
+Branch_ptr NewickNode::GetBranch() const
+{
+    return m_branch;
+} // GetBranch
+
+//------------------------------------------------------------------------------------
+
+void NewickNode::AddBranch(Branch_ptr br)
+{
+    m_branch = br;
+} // AddBranch
+
+//------------------------------------------------------------------------------------
+
+void NewickNode::SetLength(double newlength)
+{
+    m_length = newlength;
+} // SetLength
+
+//------------------------------------------------------------------------------------
+
+double NewickNode::Coalesce()
+{
+    if (m_children.empty())             // we're at a tip
+    {
+        return m_length;
+    }
+
+    double eventtime = FLAGDOUBLE;      // initialized to invalid value
+
+    vector<NewickNode*>::const_iterator kid;
+    for(kid = m_children.begin(); kid != m_children.end(); ++kid)
+    {
+        eventtime = (*kid)->Coalesce(); // These should all be the same.
+                                        // There may be differences due to
+                                        // rounding, but we ignore them.
+    }
+
+    assert(m_children[0]->m_branch && m_children[1]->m_branch);
+    assert(eventtime != FLAGDOUBLE);
+
+    rangeset fcsites;                   // No sites are fc, so this is empty.
+    m_branch = m_tree.Coalesce(m_children[0]->m_branch, m_children[1]->m_branch, eventtime, fcsites);
+
+    return eventtime + m_length;
+} // Coalesce
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+string NewickConverter::LamarcTreeToMSNewickString(const Tree& tree) const
+{
+    string newick;
+
+    if (registry.GetForceSummary().CheckForce(force_REC))
+    {
+        rangepair span(std::make_pair(0L, tree.GetNsites()));
+        rangevector subtrees(tree.GetLocusSubtrees(span));
+        long nsubtrees(subtrees.size());
+        long tr;
+        for(tr = 0; tr < nsubtrees; ++tr)
+        {
+            // print the range
+            long length = subtrees[tr].second - subtrees[tr].first;
+            newick += "[" + ToString(length) + "]";
+            // print the rangetree
+            long endval(subtrees[tr].second);
+            --endval; // to deal with open ended ranges
+            RecurseWriteIntervalTreeNewick(newick,endval,tree.GetTimeList().Root());
+            newick += ":0.0;\n";
+        }
+    }
+    else
+    {
+        if (tree.GetTimeList().ContainsOnlyTipsAndCoals())
+        {
+            RecurseWriteNewick(newick,tree.GetTimeList().Root());
+            newick += ":0.0;\n";
+        }
+        else
+        {
+            string msg("WARNING--tried to make a newick tree from an ");
+            msg += "enhanced coalescent tree.  Newick formatting failed.\n";
+            registry.GetRunReport().ReportUrgent(msg);
+        }
+    }
+
+    return newick;
+} // LamarcTreeToMSNewickString
+
+//------------------------------------------------------------------------------------
+
+string NewickConverter::LamarcTreeToNewickString(const Tree& tree) const
+{
+    string newick;
+
+    if (registry.GetForceSummary().CheckForce(force_REC))
+    {
+        rangepair span(std::make_pair(0L, tree.GetNsites()));
+        rangevector subtrees(tree.GetLocusSubtrees(span));
+        long nsubtrees(subtrees.size());
+        long tr;
+        for(tr = 0; tr < nsubtrees; ++tr)
+        {
+            // print the range
+            newick += "[" + ToString(subtrees[tr].first);
+            long endval(subtrees[tr].second);
+            --endval; // to deal with open ended ranges
+            newick += "," + ToString(endval) + "]\n";
+            // print the rangetree
+            RecurseWriteIntervalTreeNewick(newick,endval,tree.GetTimeList().Root());
+            newick += ":0.0;\n";
+        }
+    }
+    else
+    {
+        if (tree.GetTimeList().ContainsOnlyTipsAndCoals())
+        {
+            RecurseWriteNewick(newick,tree.GetTimeList().Root());
+            newick += ":0.0;\n";
+        }
+        else
+        {
+            string msg("WARNING--tried to make a newick tree from an ");
+            msg += "enhanced coalescent tree.  Newick formatting failed.\n";
+            registry.GetRunReport().ReportUrgent(msg);
+        }
+    }
+
+    return newick;
+} // LamarcTreeToNewickString
+
+//------------------------------------------------------------------------------------
+
+double NewickConverter::RecurseWriteNewick(string& newick, Branch_ptr pBranch) const
+{
+    // NB This code makes some simplifying assumptions:
+    // No recombination; mutation and disease are ignored.
+    // No more than 2 children per branch; no "one-legged coalescence nodes".
+
+    double time, newtime;
+
+    // Skip a disease, epoch, or migration branch.
+    if (pBranch->Event() == btypeMig || pBranch->Event() == btypeDisease ||
+        pBranch->Event() == btypeEpoch || pBranch->Event() == btypeDivMig)
+    {
+        time = RecurseWriteNewick(newick, pBranch->Child(0));
+        return time + (pBranch->m_eventTime - pBranch->Child(0)->m_eventTime);
+    }
+
+    // Terminate on a tip branch.
+    if (pBranch->BranchGroup() == bgroupTip)
+    {
+        // write tip name
+        newick += boost::dynamic_pointer_cast<TBranch>(pBranch)->m_label;
+        return 0.0;
+    }
+
+    // Process a coalescence branch and recurse.
+    assert(pBranch->Event() == btypeCoal);
+    assert(pBranch->Child(0));
+    assert(pBranch->Child(1));
+
+    newick += '(';
+
+    // Recurse left.
+    time = RecurseWriteNewick(newick, pBranch->Child(0));
+    newick += ':';
+    newtime = time + pBranch->m_eventTime - pBranch->Child(0)->m_eventTime;
+    newick += ToString(newtime);
+    newick += ',';
+
+    // Recurse right.
+    time = RecurseWriteNewick(newick, pBranch->Child(1));
+    newick += ':';
+    newtime = time + pBranch->m_eventTime - pBranch->Child(1)->m_eventTime;
+    newick += ToString(newtime) + ')';
+
+    // Return newtime; this return value will lead to node times being printed.
+    return 0.0;  // return zero if not trying to accumulate lengths
+
+} // RecurseWriteNewick
+
+//____________________________________________________________________________________
+
+double NewickConverter::RecurseWriteIntervalTreeNewick(string& newick, long int site,
+                                                       Branch_ptr pBranch) const
+{
+    // NB This code makes some simplifying assumptions:
+    // Mutation and disease are legal but ignored.
+    // No more than 2 children per branch.
+
+    double time, newtime;
+
+    // Skip a migration, disease, recombination, epoch, or div-mig branch.
+    if (pBranch->Event() == btypeMig || pBranch->Event() == btypeDisease ||
+        pBranch->Event() == btypeRec || pBranch->Event() == btypeEpoch ||
+        pBranch->Event() == btypeDivMig)
+    {
+        time = RecurseWriteIntervalTreeNewick(newick, site, pBranch->Child(0));
+        return time + (pBranch->m_eventTime - pBranch->Child(0)->m_eventTime);
+    }
+
+    // Terminate on a tip branch.
+    if (pBranch->BranchGroup() == bgroupTip)
+    {
+        // write tip name
+        newick += boost::dynamic_pointer_cast<TBranch>(pBranch)->m_label;
+        return 0.0;
+    }
+
+    // Process a coalescence branch and recurse.
+    assert(pBranch->Event() == btypeCoal);
+    assert(pBranch->Child(0));
+    assert(pBranch->Child(1));
+
+    // Deal with a "one-legged" coalescence for site "site".
+    if (!pBranch->Child(0)->GetRangePtr()->IsSiteLive(site))
+    {
+        time = RecurseWriteIntervalTreeNewick(newick, site, pBranch->Child(1));
+        return time + (pBranch->m_eventTime - pBranch->Child(1)->m_eventTime);
+    }
+
+    if (!pBranch->Child(1)->GetRangePtr()->IsSiteLive(site))
+    {
+        time = RecurseWriteIntervalTreeNewick(newick, site, pBranch->Child(0));
+        return time + (pBranch->m_eventTime - pBranch->Child(0)->m_eventTime);
+    }
+
+    // We must have a normal "two-legged" coalescence here.
+    newick += '(';
+
+    time = RecurseWriteIntervalTreeNewick(newick, site, pBranch->Child(0));
+    newick += ':';
+    newtime = time + pBranch->m_eventTime - pBranch->Child(0)->m_eventTime;
+    newick += ToDecimalString(newtime);
+    newick += ',';
+
+    time = RecurseWriteIntervalTreeNewick(newick, site, pBranch->Child(1));
+    newick += ':';
+    newtime = time + pBranch->m_eventTime - pBranch->Child(1)->m_eventTime;
+    newick += ToDecimalString(newtime) + ')';
+
+    // Return newtime; this return value will lead to node times being printed.
+    return 0.0;  // return zero if not trying to accumulate lengths
+
+} // RecurseWriteIntervalTreeNewick
+
+//____________________________________________________________________________________
diff --git a/src/tree/newick.h b/src/tree/newick.h
new file mode 100644
index 0000000..f09fbd4
--- /dev/null
+++ b/src/tree/newick.h
@@ -0,0 +1,126 @@
+// $Id: newick.h,v 1.10 2013/11/07 22:52:33 jyamato Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef NEWICK_H
+#define NEWICK_H
+
+#include <cassert>  // May be needed for inline definitions.
+#include <string>
+#include <vector>
+
+#include "branch.h" // for Branch_ptr declaration
+
+//------------------------------------------------------------------------------------
+
+class Tree;
+
+//------------------------------------------------------------------------------------
+
+class UserTree
+{
+    // we accept the default ctor, copy-ctor, and operator=
+  public:
+
+    virtual ~UserTree() {};
+
+    virtual bool Exists() { return false; };
+    virtual void ToLamarcTree(Tree& stump) = 0;
+};
+
+//------------------------------------------------------------------------------------
+
+class NullUserTree : public UserTree
+{
+    // we accept the default ctor, copy-ctor, and operator=
+  public:
+
+    virtual ~NullUserTree() {};
+
+    virtual void ToLamarcTree(Tree& stump) { assert(false); }; // instantiating NULL tree
+};
+
+//------------------------------------------------------------------------------------
+
+class NewickTree : public UserTree
+{
+  private:
+    std::string m_newicktree;
+    unsigned long m_curr_char;
+    const std::string m_numbers;
+    const std::string m_terminators;
+
+    double ProcessLength();
+    Branch_ptr ProcessName(const Tree& stump);
+    void ProcessComment();  // currently throws comments away
+
+    // we accept the default copy-ctor, and operator=
+
+  public:
+
+    NewickTree(const std::string& tree);
+    virtual ~NewickTree() {};
+
+    virtual bool Exists() { return true; };
+    void ToLamarcTree(Tree& stump);  // not const due to use m_curr_char
+
+};
+
+//------------------------------------------------------------------------------------
+// This is a helper class for NewickTree, representing one unfinished
+// node in the tree.
+
+class NewickNode
+{
+  private:
+
+    Tree& m_tree;
+    std::vector<NewickNode*> m_children;
+    Branch_ptr m_branch;
+    NewickNode* m_parent;
+    double m_length;
+
+    NewickNode(const NewickNode&); // not defined
+    NewickNode& operator=(const NewickNode&);  // not defined
+
+  public:
+    NewickNode(Tree& stump);
+    ~NewickNode();
+
+    NewickNode* AddChild();
+    NewickNode* Terminate();
+    NewickNode* GetParent() const;
+    Branch_ptr  GetBranch() const;
+    void AddBranch(Branch_ptr br);
+    void SetLength(double newlength);
+
+    // recursively coalesce all nodes contained within this one,
+    // returning the time (not length!) of coalescence.
+    double Coalesce();  // not const because assigns to m_branch
+}; // NewickNode
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+class NewickConverter
+{
+  private:
+    double RecurseWriteNewick(std::string& newick, Branch_ptr br) const;
+    double RecurseWriteIntervalTreeNewick(std::string& newick, long site, Branch_ptr br) const;
+
+    // We accept the default ctor, dtor, copy ctor and operator=
+  public:
+    std::string LamarcTreeToNewickString(const Tree& tree) const;
+    std::string LamarcTreeToMSNewickString(const Tree& tree) const;
+
+}; // NewickConverter
+
+#endif // NEWICK_H
+
+//____________________________________________________________________________________
diff --git a/src/tree/parameter.cpp b/src/tree/parameter.cpp
new file mode 100644
index 0000000..2be1d19
--- /dev/null
+++ b/src/tree/parameter.cpp
@@ -0,0 +1,678 @@
+// $Id: parameter.cpp,v 1.50 2012/06/30 01:32:43 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+
+#include "local_build.h"                // for definition of LAMARC_NEW_FEATURE_RELATIVE_SAMPLING
+
+#include "parameter.h"
+#include "forcesummary.h"
+#include "prior.h"
+#include "registry.h"
+#include "mathx.h"
+#include "random.h"
+#include "stringx.h"
+#include "ui_vars_prior.h"
+#include "ui_strings.h"                 // for kludgy Parameter::GetUserName()
+
+using namespace std;
+
+//------------------------------------------------------------------------------------
+
+class RegionGammaInfo;
+
+//------------------------------------------------------------------------------------
+
+double ResultStruct::GetMLE(long region) const
+{
+    assert(region >= 0 && static_cast<unsigned long>(region) < mles.size());
+    return mles[region];
+} // GetMLE
+
+//------------------------------------------------------------------------------------
+
+double ResultStruct::GetOverallMLE() const
+{
+    // if there is exactly one region, we will return the regional
+    // mle as the overall mle.  If there are multiple regions and
+    // no overall mle, something is wrong.
+    assert(!(overallmle.empty() && mles.size() != 1));
+
+    if (overallmle.empty()) return mles[0];
+    else return overallmle[0];
+} // GetOverallMLE
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d ResultStruct::GetAllMLEs()  const
+{
+    assert(!mles.empty());
+    DoubleVec1d result = mles;
+    if (!overallmle.empty()) result.push_back(overallmle[0]);
+    return result;
+} // GetAllMLEs
+
+//------------------------------------------------------------------------------------
+
+const ProfileStruct& ResultStruct::GetProfile(long region) const
+{
+    assert(region >= 0 && static_cast<unsigned long>(region) < profiles.size());
+    return profiles[region];
+} // GetProfile
+
+//------------------------------------------------------------------------------------
+
+const ProfileStruct& ResultStruct::GetOverallProfile() const
+{
+    // if there is exactly one region, we will return its profile.
+    // otherwise, it is an error to ask for overalls if there aren't
+    // any.
+
+    assert(!(overallprofile.empty() && profiles.size() != 1));
+
+    if (overallprofile.empty()) return profiles[0];
+    else return overallprofile[0];
+} // GetOverallProfile
+
+//------------------------------------------------------------------------------------
+
+vector<ProfileStruct> ResultStruct::GetAllProfiles() const
+{
+    assert(!profiles.empty());
+    vector<ProfileStruct> result = profiles;
+    result.push_back(overallprofile[0]);
+    return result;
+} // GetAllProfiles
+
+const ProfileLineStruct& ResultStruct::GetProfileFor(double centile,
+                                                     long reg) const
+{
+    if (reg == registry.GetDataPack().GetNRegions())
+    {
+        return GetOverallProfile().GetProfileLine(centile);
+    }
+    return GetProfile(reg).GetProfileLine(centile);
+}
+
+void ResultStruct::AddMLE(double mle, long region)
+{
+    //We have to replace the MLE sometimes when reading from a summary file.
+    if (region < static_cast<long>(mles.size()))
+    {
+        mles[region] = mle;
+    }
+    else
+    {
+        assert(region == static_cast<long>(mles.size()));
+        mles.push_back(mle);
+    }
+}
+
+void ResultStruct::AddOverallMLE(double mle)
+{
+    if (overallmle.size() > 0)
+    {
+        overallmle[0] = mle;
+    }
+    else
+    {
+        overallmle.push_back(mle);
+    }
+}
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+Parameter::Parameter(   const ParamStatus& status,
+                        unsigned long paramvecIndex,
+                        const string sname,
+                        const string lname,
+                        force_type thisforce,
+                        method_type meth,
+                        proftype prof,
+                        const UIVarsPrior & uiprior,
+                        double truevalue
+    )
+    : m_status(status),
+      m_paramvecIndex(paramvecIndex),
+      m_shortname(sname),
+      m_name(lname),
+      m_force(thisforce),
+      m_method(meth),
+      m_profiletype(prof),
+      m_truevalue(truevalue),
+      m_prior(uiprior)
+{
+    if (m_status.Status() == pstat_constant)
+    {
+        m_profiletype = profile_NONE;
+        m_name += " (held constant)";
+    } // The names of grouped parameters are set in registry.cpp later.
+} // Parameter constructor
+
+//------------------------------------------------------------------------------------
+
+Parameter::Parameter(const ParamStatus& status, unsigned long paramvecIndex)
+    : m_status(status),
+      m_paramvecIndex(paramvecIndex),
+      m_method(method_PROGRAMDEFAULT),
+      m_profiletype(profile_NONE),
+      m_prior(status)
+{
+    if (m_status.Valid())
+    {
+        throw implementation_error("Tried to create a valid parameter object using the invalid parameter constructor.");
+    }
+} // Invalid parameter constructor
+
+//------------------------------------------------------------------------------------
+
+string Parameter::GetUserName() const
+{
+    return m_shortname;
+} // GetUserName
+
+//------------------------------------------------------------------------------------
+
+bool Parameter::IsEasilyBayesianRearrangeable() const
+{
+    // currently epoch boundary times are not casually movable
+    // and require their own seperate arranger (the EpochSizeArranger)
+    return (IsVariable() && m_force != force_DIVERGENCE);
+} // IsEasilyBayesianRearrangeable
+
+//------------------------------------------------------------------------------------
+
+pair<double, double> Parameter::DrawFromPrior() const
+{
+    return m_prior.RandomDraw();
+} // DrawFromPrior
+
+//------------------------------------------------------------------------------------
+
+bool Parameter::IsZeroTrueMin()
+{
+    if (m_force == force_GROW)
+        return false;
+    return true;
+}
+
+//------------------------------------------------------------------------------------
+
+vector<centilepair> Parameter::GetPriorLikes(long region) const
+{
+
+    assert(IsValidParameter());
+    vector<centilepair> answer;
+
+    const ProfileStruct& profile = m_results.GetProfile(region);
+    vector<ProfileLineStruct>::const_iterator it;
+
+    for (it = profile.profilelines.begin();
+         it != profile.profilelines.end();
+         ++it)
+    {
+        centilepair newpair(it->percentile, it->loglikelihood);
+        answer.push_back(newpair);
+    }
+    return answer;
+} // GetPriorLikes
+
+//------------------------------------------------------------------------------------
+
+vector<centilepair> Parameter::GetOverallPriorLikes() const
+{
+    assert(IsValidParameter());
+
+    vector<centilepair> answer;
+
+    const ProfileStruct& profile = m_results.GetOverallProfile();
+    vector<ProfileLineStruct>::const_iterator it;
+
+    for (it = profile.profilelines.begin();
+         it != profile.profilelines.end();
+         ++it)
+    {
+        centilepair newpair(it->percentile, it->loglikelihood);
+        answer.push_back(newpair);
+    }
+    return answer;
+} // GetOverallPriorLikes
+
+//------------------------------------------------------------------------------------
+
+vector<vector<centilepair> > Parameter::GetProfiles(long region) const
+{
+    assert(IsValidParameter());
+
+    const ProfileStruct& profile = m_results.GetProfile(region);
+    vector<ProfileLineStruct>::const_iterator line;
+
+    vector<vector<centilepair> > answer;
+    vector<centilepair> answerline;
+
+    long parameter;
+    long numparams = 0;
+    // hack, sorry
+    for (line = profile.profilelines.begin(); line != profile.profilelines.end(); ++line)
+    {
+        if (!line->profparam.empty())
+        {
+            numparams = line->profparam.size();
+            break;
+        }
+    }
+
+    // if this assert fires, profiles have been requested but none
+    // whatsoever can be found....
+    // assert(numparams > 0);
+    // LS NOTE:  In a bayesian analysis, having no profiles is expected--we
+    //  use GetCIs and GetLikes, etc. instead.  It's convenient to call this
+    //  routine anyway, so I don't have to special-case the calling code in
+    //  runreports.  Hence, commenting out the above 'assert'.
+
+    //long numparams = profile.profilelines[0].profparam.size();
+
+    for (parameter = 0; parameter < numparams; ++parameter)
+    {
+        for (line = profile.profilelines.begin();
+             line != profile.profilelines.end();
+             ++line)
+        {
+            double thisparam = line->profparam[parameter];
+            centilepair newpair(line->percentile, thisparam);
+            answerline.push_back(newpair);
+        }
+        answer.push_back(answerline);
+        answerline.clear();
+    }
+
+    return answer;
+
+} // GetProfiles
+
+//------------------------------------------------------------------------------------
+
+vector<vector<centilepair> > Parameter::GetOverallProfile() const
+{
+    assert(IsValidParameter());
+
+    vector<vector<centilepair> > answer;
+    vector<centilepair> answerline;
+
+    const ProfileStruct& profile = m_results.GetOverallProfile();
+    vector<ProfileLineStruct>::const_iterator line;
+
+    long parameter;
+    // hack, sorry
+    long numparams = profile.profilelines[0].profparam.size();
+
+    for (parameter = 0; parameter < numparams; ++parameter)
+    {
+        for (line = profile.profilelines.begin();
+             line != profile.profilelines.end();
+             ++line)
+        {
+            double thisparam = line->profparam[parameter];
+            centilepair newpair(line->percentile, thisparam);
+            answerline.push_back(newpair);
+        }
+        answer.push_back(answerline);
+        answerline.clear();
+    }
+
+    return answer;
+
+} // GetOverallProfile
+
+//------------------------------------------------------------------------------------
+
+vector<centilepair> Parameter::GetCIs(long region) const
+{
+    assert(IsValidParameter());
+
+    vector<centilepair> answer;
+
+    const ProfileStruct& profile = m_results.GetProfile(region);
+    vector<ProfileLineStruct>::const_iterator it;
+
+    for (it = profile.profilelines.begin();
+         it != profile.profilelines.end();
+         ++it)
+    {
+        centilepair newpair(it->percentile, it->profilevalue);
+        answer.push_back(newpair);
+    }
+    return answer;
+
+} // GetCIs
+
+//------------------------------------------------------------------------------------
+
+vector<centilepair> Parameter::GetOverallCIs() const
+{
+    assert(IsValidParameter());
+
+    vector<centilepair> answer;
+
+    const ProfileStruct& profile = m_results.GetOverallProfile();
+    vector<ProfileLineStruct>::const_iterator it;
+
+    for (it = profile.profilelines.begin();
+         it != profile.profilelines.end();
+         ++it)
+    {
+        centilepair newpair(it->percentile, it->profilevalue);
+        answer.push_back(newpair);
+    }
+    return answer;
+
+} // GetOverallCIs
+
+bool Parameter::CentileIsExtremeLow(double centile, long reg) const
+{
+    return m_results.GetProfileFor(centile, reg).isExtremeLow;
+}
+
+bool Parameter::CentileIsExtremeHigh(double centile, long reg) const
+{
+    return m_results.GetProfileFor(centile, reg).isExtremeHigh;
+}
+
+bool Parameter::CentileHadWarning(double centile, long reg) const
+{
+    return m_results.GetProfileFor(centile, reg).maximizerWarning;
+}
+
+//------------------------------------------------------------------------------------
+
+void Parameter::AddProfile(const ProfileStruct& prof, likelihoodtype like)
+{
+    assert(IsValidParameter());
+    switch (like)
+    {
+        case ltype_ssingle:
+            m_results.AddProfile(prof);
+            break;
+        case ltype_replicate:
+            m_results.AddProfile(prof);
+            break;
+        case ltype_region:
+            m_results.AddOverallProfile(prof);
+            break;
+        case ltype_gammaregion:
+        {
+            RegionGammaInfo *pRegionGammaInfo = registry.GetRegionGammaInfo();
+            if (!pRegionGammaInfo ||
+                !pRegionGammaInfo->CurrentlyPerformingAnalysisOverRegions())
+            {
+                string msg = "Parameter::AddProfile() was told to add a profile ";
+                msg += "for an overall estimate over all genomic regions with ";
+                msg += "gamma-distributed background mutation rates, but the ";
+                msg += "necessary RegionGammaInfo object was not found, or was ";
+                msg += "found in the \"off\" state, neither of which should happen.";
+                throw implementation_error(msg);
+            }
+            m_results.AddOverallProfile(prof);
+            if (!pRegionGammaInfo->HaveProfile() && force_REGION_GAMMA == m_force)
+                pRegionGammaInfo->AddProfile(prof);
+            break;
+        }
+    }
+} // AddProfile
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+// Initialization of static variable, needs to be in .cpp
+bool ParamVector::s_locked = false;
+
+//------------------------------------------------------------------------------------
+
+// This constructor can make a read-only ParamVector
+
+ParamVector::ParamVector(bool readonly)
+    : m_readonly(readonly),
+      forcesum(registry.GetForceSummary())
+{
+    if (m_readonly)
+    {
+        parameters = forcesum.GetAllParameters();
+    }
+    else
+    {
+        assert(!s_locked);        // attempt to check out a second set of parameters!
+        parameters = forcesum.GetAllParameters();
+        s_locked = true;
+    }
+
+    const RegionGammaInfo *pRegionGammaInfo = registry.GetRegionGammaInfo();
+    if (pRegionGammaInfo && pRegionGammaInfo->CurrentlyPerformingAnalysisOverRegions())
+    {
+        UIVarsPrior gammaprior(force_REGION_GAMMA);
+        Parameter paramAlpha(pRegionGammaInfo->GetParamStatus(),
+                             parameters.size(),
+                             "alpha",
+                             "alpha (shape parameter for gamma over regions)",
+                             force_REGION_GAMMA,
+                             method_PROGRAMDEFAULT,
+                             pRegionGammaInfo->GetProfType(),
+                             gammaprior,
+                             FLAGDOUBLE);
+
+        if (pRegionGammaInfo->HaveMLE())
+        {
+            paramAlpha.AddOverallMLE(pRegionGammaInfo->GetMLE());
+            if (pRegionGammaInfo->HaveProfile())
+                paramAlpha.AddProfile(pRegionGammaInfo->GetProfile(), ltype_gammaregion);
+        }
+
+        parameters.push_back(paramAlpha);
+    }
+
+} // ParamVector
+
+//------------------------------------------------------------------------------------
+
+ParamVector::~ParamVector()
+{
+    if (m_readonly) return;
+
+    assert(s_locked);        // how did it get unlocked before destruction??
+    forcesum.SetAllParameters(parameters);
+    s_locked = false;
+
+} // ParamVector destructor
+
+//------------------------------------------------------------------------------------
+
+Parameter& ParamVector::operator[](long index)
+{
+    assert(ParamVector::s_locked);  // should only be unlocked in 'const' context
+
+    // bounds checking
+    assert(index >= 0);
+    assert(index < static_cast<long>(parameters.size()));
+
+    return parameters[index];
+
+} // ParamVector operator[]
+
+//------------------------------------------------------------------------------------
+
+const Parameter& ParamVector::operator[](long index) const
+{
+    assert(index >= 0);
+    assert(index < static_cast<long>(parameters.size()));
+
+    return parameters[index];
+
+} // ParamVector operator[] const
+
+//------------------------------------------------------------------------------------
+
+paramlistcondition ParamVector::CheckCalcProfiles() const
+{
+    vector <Parameter> :: const_iterator pit;
+    long ok=0;
+    long nok=0;
+    for(pit=parameters.begin(); pit != parameters.end(); pit++)
+    {
+        if(pit->IsValidParameter())
+        {
+            switch(pit->GetProfileType())
+            {
+                case profile_FIX:
+                case profile_PERCENTILE:
+                    ok++;
+                    break;
+                case profile_NONE:
+                    nok++;
+                    continue;
+            }
+        }
+    }
+    long sum = ok + nok;
+    if(nok==sum)
+        return paramlist_NO;
+    else
+    {
+        if(ok==sum)
+            return paramlist_YES;
+        else
+            return paramlist_MIX;
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+paramlistcondition ParamVector::CheckCalcPProfiles() const
+{
+    vector <Parameter> :: const_iterator pit;
+    long fok=0;
+    long pok=0;
+    long nok=0;
+    for(pit=parameters.begin(); pit != parameters.end(); pit++)
+    {
+        if(pit->IsValidParameter())
+        {
+            switch(pit->GetProfileType())
+            {
+                case profile_FIX:
+                    fok++;
+                    break;
+                case profile_PERCENTILE:
+                    pok++;
+                    break;
+                case profile_NONE:
+                    nok++;
+                    continue;
+            }
+        }
+    }
+    long sum = fok + pok + nok;
+    if(nok==sum)
+        return paramlist_NO;
+    else
+    {
+        if(pok==sum)
+            return paramlist_YES;
+        else
+            return paramlist_MIX;
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+long ParamVector::NumProfiledParameters() const
+{
+    vector<Parameter>::const_iterator it;
+
+    long result = 0;
+
+    for (it = parameters.begin(); it != parameters.end(); ++it)
+    {
+        if (it->IsProfiled())
+            ++result;
+    }
+
+    return result;
+} // NumProfiledParameters
+
+//------------------------------------------------------------------------------------
+
+long ParamVector::NumVariableParameters() const
+{
+    vector<Parameter>::const_iterator it;
+
+    long result = 0;
+
+    for (it = parameters.begin(); it != parameters.end(); ++it)
+    {
+        ParamStatus mystatus = ParamStatus(it->GetStatus());
+        if (mystatus.Varies()) ++result;
+    }
+
+    return result;
+} // NumVariableParameters
+
+//------------------------------------------------------------------------------------
+
+long ParamVector::ChooseSampleParameterIndex(Random * randomSource) const
+{
+#ifdef LAMARC_NEW_FEATURE_RELATIVE_SAMPLING
+
+    long totalWeight = 0;
+    vector<Parameter>::const_iterator it;
+    for (it = parameters.begin(); it != parameters.end(); ++it)
+    {
+        if (it->IsEasilyBayesianRearrangeable())
+        {
+            long thisRate = it->GetPrior().GetSamplingRate();
+            assert(thisRate > 0); // EWFIX -- really must handle this.
+            totalWeight += thisRate;
+        }
+    }
+    assert(totalWeight > 0);
+
+    long weightedIndex = randomSource->Long(totalWeight);
+
+    long remainingWeight = weightedIndex;
+    long chosenIndex = 0;
+    for (it = parameters.begin(); it != parameters.end(); ++it)
+    {
+        long thisWeight = it->GetPrior().GetSamplingRate();
+        remainingWeight -= thisWeight;
+        if (remainingWeight < 0)
+        {
+            return chosenIndex;
+        }
+        chosenIndex++;
+    }
+    assert(false);
+    return FLAGLONG;
+
+#else
+    long chosen;
+    while (true)
+    {
+        long chosen = randomSource->Long(size());
+        if (operator[](chosen).IsEasilyBayesianRearrangeable()) return chosen;
+        //LS DEBUG:  Potential for an infinite loop here if no parameter is set
+        // to be variable, but this should be caught when exiting the menu.
+
+    }
+    assert(false);
+    return(FLAGLONG);
+#endif
+
+}
+
+//____________________________________________________________________________________
diff --git a/src/tree/parameter.h b/src/tree/parameter.h
new file mode 100644
index 0000000..883423f
--- /dev/null
+++ b/src/tree/parameter.h
@@ -0,0 +1,257 @@
+// $Id: parameter.h,v 1.46 2012/06/30 01:32:43 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+/*************************************************************************
+   The Parameter class and its helper ResultStruct are used to store
+   information about individual parameters of the force model.  For
+   example, a single Parameter object might represent the migration
+   rate from population 2 to population 1.
+
+   Parameters contain information needed to describe a parameter in
+   input/output, and also store information about results for that
+   parameter, such as its MLE, profiles, and perhaps someday plots.
+
+   Because everything about a parameter except its results is fixed
+   after the user-input phase, only the results have set methods
+
+   A Parameter has an IsValidParameter() method establishing its validity.
+   No other field in an invalid Parameter should be used!  Invalid
+   Parameters are used as placeholders, for example representing the
+   non-existent "migration rate from 1 to 1".
+
+   **
+
+  The ParamVector class is a structure that holds a "checked out" set
+  of all Parameters.  This is done so that parts of the program which
+  need rapid, read-write access to all Parameters can get it.  One
+  creates a ParamVector to "check out" the Parameters and destroys it
+  (or lets it go out of scope) to automatically "check in" your changes.
+  It is a an error, and will throw a runtime exception, to check out
+  a second ParamVector while the first is still out.
+
+  If you want only read access to a ParamVector, construct one with an
+  argument of "true" (meaning read-only) (and make it const to keep you
+  honest).  This ParamVector will not attempt a checkin.  It is fine to
+  have as many read-only ParamVectors as you like.  However, bear in mind
+  that if you check out a writable one and write to it, the read-only ones
+  will present a stale view (they are never updated).
+
+  NB:  There may be invalid Parameters in a ParamVector (such as
+  the nonexistent migration rates along the diagonal).  It is a fatal
+  mistake to try to extract results, names, etc. from such a Parameter.
+
+Written by Mary Kuhner October 1 2001
+****************************************************************************/
+
+#ifndef PARAMETER_H
+#define PARAMETER_H
+
+#include <cassert>
+#include <vector>
+#include <string>
+#include <map>
+
+#include "plotstat.h"
+#include "prior.h"
+#include "constants.h"
+#include "defaults.h"
+#include "types.h"
+#include "paramstat.h"
+
+//------------------------------------------------------------------------------------
+
+class ForceSummary;
+class Random;
+
+//------------------------------------------------------------------------------------
+
+struct ResultStruct
+{
+  private:
+    DoubleVec1d mles;                // dim: regions
+    DoubleVec1d overallmle;          // vector of one element
+    vector<ProfileStruct> profiles;        // dim: regions
+    vector<ProfileStruct> overallprofile;  // vector of one element
+
+  public:
+
+    // Creation and Destruction
+    // ResultStruct();                         // we accept the default
+    // ResultStruct(const ResultStruct& src);  // we accept the default
+    // ~ResultStruct();                        // we accept the default
+    // ResultStruct& operator=(const ResultStruct& src); // we accept the default
+
+    // Getters (mostly not inline due to error checking code)
+    double GetMLE(long region) const;
+    DoubleVec1d GetRegionMLEs() const { return mles; };
+    double GetOverallMLE() const;
+    DoubleVec1d GetAllMLEs() const;
+
+    const ProfileStruct& GetProfile(long region) const;
+    const vector<ProfileStruct>& GetRegionProfiles() const { return profiles; };
+    const ProfileStruct& GetOverallProfile() const;
+    vector<ProfileStruct> GetAllProfiles() const;
+    const ProfileLineStruct& GetProfileFor(double centile, long reg) const;
+
+    // Setters
+    void AddMLE(double mle, long region);
+    void AddOverallMLE(double mle);
+
+    void AddProfile(const ProfileStruct& profile)
+    { profiles.push_back(profile); };
+    void AddOverallProfile(const ProfileStruct& profile)
+    { overallprofile.push_back(profile); };
+
+}; // ResultStruct definition
+
+//------------------------------------------------------------------------------------
+
+class Parameter
+// EWCOMMENT -- keep an eye on how much "special case" code is added
+// to this class. If we get a lot, then we need to think about
+// using subclasses.  We don't do it now because we stuff these
+// into vectors.
+{
+  private:
+    ParamStatus m_status;               // if this is "invalid" nothing else matters
+    unsigned long m_paramvecIndex;
+    string m_shortname;
+    string m_name;
+    force_type m_force;                 // parameter knows to which force it belongs
+
+    ResultStruct m_results;             // used to be "mutable", but no longer needed.
+    method_type m_method;               // method used to calculate this parameter
+    proftype m_profiletype;
+    double m_truevalue;                 // true value of variable
+
+    // bayesian variables
+    Prior  m_prior;                     // a 'Prior' object (can be linear, log, ...?)
+
+    Parameter();                        // not defined
+
+  public:
+
+    // Creation and destruction
+    Parameter(const ParamStatus& status,
+              unsigned long paramIndex,
+              const string sname,
+              const string lname,
+              force_type thisforce,
+              method_type meth,
+              proftype prof,
+              const UIVarsPrior & uiprior,
+              double truevalue);
+
+    //Use only to create invalid parameters.
+    Parameter(const ParamStatus& status, unsigned long paramIndex);
+
+    // We accept default copy ctor, op=, dtor
+
+    // Getters
+    ParamStatus GetStatus()   const { return m_status; };
+    unsigned long GetParamVecIndex()   const { return m_paramvecIndex; };
+    bool IsValidParameter()   const { return (m_status.Valid()); };
+    bool IsVariable()         const { return (m_status.Inferred()); };
+    bool IsForce(force_type thistag)  const { return (m_force == thistag); };
+    bool IsEasilyBayesianRearrangeable() const;
+    force_type WhichForce()       const { return m_force; };
+    string GetShortName()     const { assert (IsValidParameter()); return m_shortname; };
+    string GetName()          const { assert (IsValidParameter()); return m_name; };
+    string GetUserName()      const;
+    proftype GetProfileType() const { return m_profiletype; };
+    bool IsProfiled()         const { return (m_profiletype != profile_NONE); };
+    method_type GetMethod()   const { return m_method; };
+    double GetTruth()         const { return m_truevalue; };
+
+    // The Prior interface (for Bayesian analysis)
+    Prior      GetPrior()         const { return m_prior; };
+
+    void       SetShortName(string n) { m_shortname = n; };
+    void       SetName(string n) { m_name = n; };
+    std::pair<double, double> DrawFromPrior() const;
+    bool IsZeroTrueMin();
+
+    // MLEs
+    double GetMLE(long region) const { assert (IsValidParameter()); return m_results.GetMLE(region); };
+    DoubleVec1d GetRegionMLEs() const { assert (IsValidParameter()); return m_results.GetRegionMLEs(); };
+    double GetOverallMLE() const { assert (IsValidParameter()); return m_results.GetOverallMLE(); };
+    DoubleVec1d GetAllMLEs() const { assert (IsValidParameter()); return m_results.GetAllMLEs(); };
+
+    // Profiles
+    vector<vector<centilepair> > GetProfiles(long region) const;
+    vector<vector<centilepair> > GetOverallProfile() const;
+    vector<centilepair> GetPriorLikes(long region) const;
+    vector<centilepair> GetOverallPriorLikes() const;
+    vector<centilepair> GetCIs(long region) const;
+    vector<centilepair> GetOverallCIs() const;
+
+    bool CentileIsExtremeLow (double centile, long reg) const;
+    bool CentileIsExtremeHigh(double centile, long reg) const;
+    bool CentileHadWarning(double centile, long reg) const;
+
+    // Setters
+    //   no setters for most things outside of m_results, since everything
+    //   is set in the constructor.  The m_results setters can be const
+    //   since m_results is mutable.  (The prior can be set, but in that
+    //   case the Parameter must be non-const).
+
+    // MLEs
+    void AddMLE(double mle, long region) { assert (IsValidParameter()); m_results.AddMLE(mle, region); };
+    void AddOverallMLE(double mle) { assert (IsValidParameter()); m_results.AddOverallMLE(mle); };
+
+    // Profiles
+    void AddProfile(const ProfileStruct& prof, likelihoodtype like);
+};
+
+class ParamVector
+{
+  private:
+
+    // This class is a lock.  NO COPYING ALLOWED.  Don't define these.
+    ParamVector(const ParamVector&);              // not defined
+    ParamVector& operator=(const ParamVector&);   // not defined
+
+    static bool s_locked;
+    bool m_readonly;
+    ForceSummary& forcesum;      // to allow check-in in destructor
+
+    // not very private, as we hand out references....
+    vector<Parameter> parameters;
+
+  public:
+
+    // The constructor checks out the Parameters and the destructor checks them
+    // back in.  If you check out a read-only version, you will do yourself a
+    // favor if you also make it const (i.e. 'const ParamVector pvec(true)' )
+    ParamVector(bool rd);
+    ~ParamVector();
+
+    // vector emulation routines
+    typedef vector<Parameter>::iterator iterator;
+    typedef vector<Parameter>::const_iterator const_iterator;
+    Parameter&     operator[](long index);
+    const Parameter&     operator[](long index) const;
+    unsigned long  size()  const { return parameters.size(); };
+    bool           empty() const { return parameters.empty(); };
+    iterator       begin()       { return parameters.begin(); };
+    const_iterator begin() const { return parameters.begin(); };
+    iterator       end()         { return parameters.end(); };
+    const_iterator end()   const { return parameters.end(); };
+
+    paramlistcondition CheckCalcProfiles() const;
+    paramlistcondition CheckCalcPProfiles() const;
+    long           NumProfiledParameters() const;
+    long           NumVariableParameters() const;
+    long           ChooseSampleParameterIndex(Random * randomSource) const;
+};
+
+#endif // PARAMETER_H
+
+//____________________________________________________________________________________
diff --git a/src/tree/partition.cpp b/src/tree/partition.cpp
new file mode 100644
index 0000000..fe8cb78
--- /dev/null
+++ b/src/tree/partition.cpp
@@ -0,0 +1,16 @@
+// $Id: partition.cpp,v 1.5 2011/03/08 19:22:01 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include "force.h"
+#include "partition.h"
+
+XPartition::~XPartition() {};
+
+//____________________________________________________________________________________
diff --git a/src/tree/partition.h b/src/tree/partition.h
new file mode 100644
index 0000000..f7768ec
--- /dev/null
+++ b/src/tree/partition.h
@@ -0,0 +1,108 @@
+// $Id: partition.h,v 1.10 2011/03/12 20:03:02 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef PARTITION_H
+#define PARTITION_H
+
+#include <map>
+#include <vector>
+#include <string>
+#include "vectorx.h"
+#include "defaults.h"                   // for force_type definition
+
+class TimeSize;
+
+class XPartition
+{
+  private:
+    LongVec1d m_membership;             // branch membership match
+    vector<XPartition*> m_partners;     // related xpartitions
+    bool m_hasparam;
+
+    // way to find jointed stick? can't be pointer due to tree copying
+    // way to find appropiate theta and g (if any)
+  public:
+    XPartition() {};
+    ~XPartition();
+
+    bool HasParam() const { return m_hasparam; };
+
+    void SetHasParam(bool val) { m_hasparam = val; };
+
+};
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+class Partition
+{
+
+  private:
+    vector<XPartition*> m_xparts;       // non-owning
+    string m_forcename;                 // what force are we part of?
+
+  public:
+    Partition(const string& fname) : m_forcename(fname) {};
+    ~Partition() {};
+
+};
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+typedef std::map<force_type,StringVec1d> PartitionNames;
+
+class PartitionSummary
+{
+  private:
+    vector<Partition> m_partitions;
+    vector<XPartition> m_xpartitions;
+
+    PartitionNames m_partnames;
+    StringVec1d m_outputpopnames; // in the single population case
+    // save the MIG partition names here
+    LongVec1d m_finalcount; // this is set by SetFinalPartitionCounts()
+
+  public:
+    // we accept the default ctor, copy-ctor and dtor
+
+    long AddPartition(const string& forcename, const string& partname);
+
+    long GetNPartitions(const string& forcename) const;
+    string GetPartitionName(const string& forcename, long index) const;
+    long GetPartitionNumber(const string& forcename, const string& name) const;
+    StringVec1d GetAllPartitionNames(force_type) const;
+    StringVec1d GetAllCrossPartitionNames() const;
+
+    // this function is provided to interface with tipdata objects
+    // it's also used by GetAllCrossPartitionNames which needs to pass
+    // false as an argument (it cares about partition forces with only
+    // one partition.
+    std::vector<std::map<string,string> >
+    GetCrossPartitionIds(bool ignoresinglepop = true) const;
+
+    // This function will return 0 if the membership vector is empty.
+    // It does this to interface with the BranchBuffer and Branch::ScoreEvent.
+    long GetCrossPartitionIndex(const LongVec1d& membership) const;
+    LongVec1d GetBranchMembership(long xpartindex) const;
+    std::map<string,string> GetTipId(long xpartindex) const;
+
+    long GetNPartitionForces() const;
+    long GetNCrossPartitions() const;
+    long GetNPartitions(unsigned long index) const;
+    LongVec1d GetAllNPartitions() const;
+
+    // called by ForceSummary::SummarizeData
+    void SetFinalPartitionCounts(LongVec1d pcounts);
+};
+
+#endif // PARTITION_H
+
+//____________________________________________________________________________________
diff --git a/src/tree/prior.cpp b/src/tree/prior.cpp
new file mode 100644
index 0000000..752cc92
--- /dev/null
+++ b/src/tree/prior.cpp
@@ -0,0 +1,157 @@
+// $Id: prior.cpp,v 1.13 2013/06/03 17:23:13 jyamato Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+
+#include "local_build.h"                // for definition of LAMARC_NEW_FEATURE_RELATIVE_SAMPLING
+
+#include "defaults.h"
+#include "mathx.h"
+#include "paramstat.h"
+#include "prior.h"
+#include "random.h"
+#include "registry.h"
+#include "stringx.h"
+#include "ui_vars_prior.h"
+
+//------------------------------------------------------------------------------------
+
+Prior::Prior(UIVarsPrior uiprior)
+    : m_priortype(uiprior.GetPriorType()),
+      m_lowerbound(uiprior.GetLowerBound()),
+      m_upperbound(uiprior.GetUpperBound()),
+      m_binwidth(uiprior.GetBinwidth())
+#ifdef LAMARC_NEW_FEATURE_RELATIVE_SAMPLING
+    , m_samplingRate(uiprior.GetRelativeSampling())
+#endif
+{
+    //We don't worry about growth, since those are constrained to not have log priors.
+    m_lnlower = SafeLog(m_upperbound);
+    m_lnupper = SafeLog(m_lowerbound);
+}
+
+//------------------------------------------------------------------------------------
+
+Prior::Prior(ParamStatus shouldBeInvalid)
+    : m_priortype(LINEAR),
+      m_lowerbound(0),
+      m_upperbound(0),
+      m_lnlower(0),
+      m_lnupper(0),
+      m_binwidth(1)
+#ifdef LAMARC_NEW_FEATURE_RELATIVE_SAMPLING
+    , m_samplingRate(defaults::samplingRate)
+#endif
+
+{
+    assert(shouldBeInvalid.Status() == pstat_invalid);
+}
+
+//------------------------------------------------------------------------------------
+
+Prior::~Prior()
+{
+    // intentionally blank
+}
+
+//------------------------------------------------------------------------------------
+
+bool Prior::operator==(const Prior src) const
+{
+    if (m_priortype != src.m_priortype) return false;
+    if (m_lowerbound != src.m_lowerbound) return false;
+    if (m_upperbound != src.m_upperbound) return false;
+#ifdef LAMARC_NEW_FEATURE_RELATIVE_SAMPLING
+    if (m_samplingRate != src.m_samplingRate) return false;
+#endif
+    return true;
+}
+
+//------------------------------------------------------------------------------------
+
+std::pair<double, double> Prior::RandomDraw() const
+{
+    Random& rnd = registry.GetRandom();
+    double newparam, newlnparam;
+    switch (m_priortype)
+    {
+        case LINEAR:
+            newparam = rnd.Float() * (m_upperbound - m_lowerbound) + m_lowerbound;
+            newlnparam = log(newparam);
+            return std::make_pair<double, double>(newparam, newlnparam);
+            break;
+        case LOGARITHMIC:
+            newlnparam = rnd.Float() * (m_lnupper - m_lnlower) + m_lnlower;
+            newparam = exp(newlnparam);
+            return std::make_pair<double, double>(newparam, newlnparam);
+            break;
+    }
+    string e = "Unknown prior type " + ToString(m_priortype) +
+        " in Parameter::DrawFromPrior";
+    throw implementation_error(e);
+} // RandomDraw
+
+//____________________________________________________________________________________
+
+std::pair<double,double> Prior::RandomDrawWithReflection(double current, double windowfraction) const
+{
+    Random& rnd = registry.GetRandom();
+    double newparam, newlnparam;
+    double delta = windowfraction * (m_upperbound - m_lowerbound);
+    switch (m_priortype)
+    {
+         case LINEAR:
+            newparam = current + (rnd.Float() - 0.5) * delta;
+            if (newparam <= m_lowerbound) 
+            {
+                newparam = - newparam + 2.0 * m_lowerbound; 
+            }
+            else if (newparam >= m_upperbound) 
+            {
+                newparam = 2.0 * m_upperbound - newparam;
+            }
+            return std::make_pair<double,double>(newparam, log(newparam));
+            break;
+         case LOGARITHMIC:
+            string e = "Cannot use prior type " + ToString(m_priortype) +
+                " in Parameter::RandomDrawWithReflection";
+            throw implementation_error(e);
+            break;
+    }
+    string e = "Unknown prior type " + ToString(m_priortype) +
+        " in Parameter::RandomDrawWithReflection";
+    throw implementation_error(e);
+} // RandomDrawWithReflection
+
+//______________________________________________________________________________________________
+
+std::pair<double, double> Prior::RandomDrawWithinBounds(double lower, double upper) const
+{
+  Random& rnd = registry.GetRandom();
+  double newparam, newlnparam;
+  double lowbound = std::max(m_lowerbound, lower);
+  double upbound = std::min(m_upperbound, upper);
+  switch (m_priortype)
+  {
+         case LINEAR:
+            newparam = rnd.Float() * (upbound - lowbound) + lowbound;
+            newlnparam = log(newparam);
+            return std::make_pair<double, double>(newparam, newlnparam);
+            break;
+         case LOGARITHMIC:
+            lowbound = log(lowbound);
+            upbound = log(upbound);
+            newlnparam = rnd.Float() * (upbound - lowbound) + lowbound;
+            newparam = exp(newlnparam);
+            return std::make_pair<double, double>(newparam, newlnparam);
+            break;
+  }
+
+} // RandomDrawWithinBounds
diff --git a/src/tree/prior.h b/src/tree/prior.h
new file mode 100644
index 0000000..affcdc5
--- /dev/null
+++ b/src/tree/prior.h
@@ -0,0 +1,67 @@
+// $Id: prior.h,v 1.9 2013/06/03 17:23:13 jyamato Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+//A prior is a shape.  Currently, we have two possible shapes:  a rectangle
+// in linear space, and a rectangle in log space.  Those are simple enough
+// that we don't really need a class for them, but if we want more complex
+// priors in the future (say, a gaussian curve) we will probably want this.
+// RandomDraw() encapsulates the usefulness of a prior--it picks a random
+// number within its shape, with the chance based on volume.
+
+//------------------------------------------------------------------------------------
+
+#ifndef PRIOR_H
+#define PRIOR_H
+
+#include "local_build.h"                // for definition of LAMARC_NEW_FEATURE_RELATIVE_SAMPLING
+#include "constants.h"
+
+class UIVarsPrior;
+class ParamStatus;
+
+//------------------------------------------------------------------------------------
+
+class Prior
+{
+  private:
+    Prior();                            // undefined
+    priortype  m_priortype;
+    double     m_lowerbound;
+    double     m_upperbound;
+    double     m_lnlower;               // speed optimization--precalculated.
+    double     m_lnupper;
+    double     m_binwidth;
+#ifdef LAMARC_NEW_FEATURE_RELATIVE_SAMPLING
+    long int   m_samplingRate;
+#endif
+
+  public:
+    Prior(UIVarsPrior uiprior);
+    Prior(ParamStatus shouldBeInvalid);
+    //Use the default copy constructor.
+    virtual ~Prior();
+    bool      operator==(const Prior src) const; // compare priors
+
+    virtual priortype GetPriorType()  const { return m_priortype; };
+    virtual double    GetLowerBound() const { return m_lowerbound; };
+    virtual double    GetUpperBound() const { return m_upperbound; };
+    virtual double    GetBinwidth()   const { return m_binwidth; };
+#ifdef LAMARC_NEW_FEATURE_RELATIVE_SAMPLING
+    virtual long      GetSamplingRate()   const { return m_samplingRate; };
+#endif
+
+    virtual std::pair<double, double> RandomDraw() const;
+    virtual std::pair<double,double> RandomDrawWithReflection(double current, double windowpercent) const;
+    virtual std::pair<double, double> RandomDrawWithinBounds(double lower, double upper) const;
+};
+
+#endif // PRIOR_H
+
+//____________________________________________________________________________________
diff --git a/src/tree/range.cpp b/src/tree/range.cpp
new file mode 100644
index 0000000..da81e30
--- /dev/null
+++ b/src/tree/range.cpp
@@ -0,0 +1,1289 @@
+// $Id: range.cpp,v 1.50 2013/11/08 21:46:21 mkkuhner Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+#include <iostream>                     // For debugging.
+#include <iomanip>                      // For setw().
+
+#include "local_build.h"
+#include "dynatracer.h"                 // Defines some debugging macros.
+
+#include "errhandling.h"
+#include "force.h"                      // For LocalPartitionForce combos.
+#include "range.h"
+#include "rangex.h"
+#include "region.h"
+#include "registry.h"                   // For random.bool.
+#include "stringx.h"
+#include "vectorx.h"                    // For ForceVec.
+
+#ifdef DMALLOC_FUNC_CHECK
+#include "/usr/local/include/dmalloc.h"
+#endif
+
+#ifdef ENABLE_REGION_DUMP
+#include "datatype.h"                   // For the data_type enum.
+#endif
+
+using namespace std;
+
+
+//------------------------------------------------------------------------------------
+// Class BiglinkMapitem.
+//------------------------------------------------------------------------------------
+
+#ifdef RUN_BIGLINKS
+
+BiglinkMapitem::BiglinkMapitem(long int lowerlink, long int upperlink, double linkweight)
+    : m_lower_littlelink(lowerlink),
+      m_upper_littlelink(upperlink),
+      m_linkweight(linkweight)
+{
+    // Link arguments (first, second) refer to Littlelinks, and hence their type is SIGNED LONG INT.
+    // We reserve UNSIGNED LONG INTs to refer to Biglink indices.
+    // Intentionally blank; everything is accomplished in initialization list above.
+} // "Real" constructor for BiglinkMapitem class.
+
+#endif // RUN_BIGLINKS
+
+
+//------------------------------------------------------------------------------------
+// Class Range.
+//------------------------------------------------------------------------------------
+
+//------------------------------------------------------------------------------------
+// Setter, for resetting this protected static variable when starting processing of a new region.
+// Static: Sets static variable but may be called before any Range objects are created.
+// Needs to be here rather than in "range.h" as inline because of some include-file dependency.
+//
+// RSGFIXUP:  Range::s_numRegionSites is computed here.  Seems to be same value as Tree::m_totalSites.
+// Either merge the two variables or guaranteed they track each other (or test with ASSERT that they do).
+
+void Range::SetNumRegionSites(const Region & curregion)
+{
+    s_numRegionSites = curregion.GetNumSites();
+}
+
+//------------------------------------------------------------------------------------
+// The "real" constructor for class Range.
+// This single-argument constructor is declared EXPLICIT to avoid accidental implicit conversion.
+
+Range::Range(long int nsites)
+{
+    // "s_numRegionSites" should change ONLY when we begin processing a new region,
+    // and it is set then by Range::SetNumRegionSites() before we ever create a Range/RecRange.
+    // So we have nothing to do here.  All we do is check argument consistency via ASSERT.
+#if 1
+    DebugAssert2(nsites == s_numRegionSites, nsites, s_numRegionSites);
+#else // Equivalent to DebugAssert2 above, in case it is removed later.
+    assert(nsites == s_numRegionSites);
+#endif
+
+} // "Real" constructor for Range class.
+
+//------------------------------------------------------------------------------------
+// Virtual function (Range and RecRange).
+
+Range * Range::Clone() const
+{
+    return new Range(*this);
+} // Range::Clone
+
+//------------------------------------------------------------------------------------
+// This copy constructor is callable only by Range::Clone.
+
+Range::Range(const Range & src)
+{
+    // Intentionally blank; everything is accomplished in initialization list above.
+} // Copy constructor for Range class.
+
+//------------------------------------------------------------------------------------
+// Virtual function (Range and RecRange).
+// A non-recombinant Range is defined only by the total number of sites ("s_numRegionSites" is its only
+// member variable), and that value (the length of the sequence) is a fixed property of the input data,
+// for a given Region (and all Ranges are valid only within a single Region at a time).
+// In this sense, all non-recombinant Ranges are equal.
+
+bool Range::operator==(const Range & other) const
+{
+    return true;
+} // Range::operator==
+
+//------------------------------------------------------------------------------------
+// Virtual function (Range and RecRange).
+
+void Range::UpdateCRange(const Range * const child1rangeptr, const Range * const child2rangeptr,
+                         const rangeset & fcsites, bool dofc)
+{
+    // Intentionally empty; nothing to do in non-recombinant case.
+} // Range::UpdateCRange
+
+//------------------------------------------------------------------------------------
+// Virtual function (Range and RecRange).
+
+void Range::UpdateRootRange(const Range * const child1rangeptr, const Range * const child2rangeptr,
+                            const rangeset & fcsites, bool dofc)
+{
+    // Intentionally empty; nothing to do in non-recombinant case.
+} // Range::UpdateRootRange
+
+//------------------------------------------------------------------------------------
+// Virtual function (Range and RecRange).
+
+void Range::UpdateOneLeggedCRange(const Range * const childrangeptr)
+{
+    // Intentionally empty; nothing to do in non-recombinant case.
+} // Range::UpdateOneLeggedCRange
+
+//------------------------------------------------------------------------------------
+// Virtual function (Range and RecRange).
+
+void Range::UpdateOneLeggedRootRange(const Range * const childrangeptr)
+{
+    // Intentionally empty; nothing to do in non-recombinant case.
+} // Range::UpdateOneLeggedRootRange
+
+//------------------------------------------------------------------------------------
+// Virtual function (Range and RecRange).
+
+void Range::UpdateMRange(const Range * const child1rangeptr)
+{
+    // Intentionally empty; nothing to do in non-recombinant case.
+} // Range::UpdateMRange
+
+//------------------------------------------------------------------------------------
+// Virtual function (Range and RecRange).
+// Polymorphic - Used in recombinant case; should never be called (thus ASSERTs) in non-recombinant case.
+
+void Range::UpdateRRange(const Range * const child1rangeptr, const rangeset & fcsites, bool dofc)
+{
+    assert(false);
+} // Range::UpdateRRange
+
+//------------------------------------------------------------------------------------
+// Virtual function (Range and RecRange).
+
+void Range::ClearNewTargetLinks()
+{
+    // Intentionally empty; nothing to do in non-recombinant case.
+} // Range::ClearNewTargetLinks
+
+//------------------------------------------------------------------------------------
+// Virtual function (Range and RecRange).
+
+void Range::SetOldInfoToCurrent()
+{
+    // Intentionally empty; nothing to do in non-recombinant case.
+} // Range::SetOldInfoToCurrent()
+
+//------------------------------------------------------------------------------------
+// Virtual function (Range and RecRange).
+
+void Range::ResetOldTargetSites(const rangeset & fcsites)
+{
+    // Intentionally empty; nothing to do in non-recombinant case.
+} // Range::ResetOldTargetSites
+
+//------------------------------------------------------------------------------------
+// Virtual function (Range and RecRange).
+// Polymorphic - Used in recombinant case; should never be called (thus ASSERTs) in non-recombinant case.
+
+bool Range::AreChildTargetSitesTransmitted(const Range * const childrangeptr, const rangeset & fcsites) const
+{
+    assert(false);                      // Why are we asking this in a non-recombinant case?
+    return false;                       // To silence compiler warning.
+} // Range::AreChildTargetSitesTransmitted
+
+//------------------------------------------------------------------------------------
+// Virtual function (Range and RecRange).
+// Called only by debugging function; virtual (must work differently on recombinant and non-recombinant Ranges).
+
+bool Range::SameAsChild(const Range * const childrangeptr) const
+{
+    return true;
+} // Range::SameAsChild
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+// Virtual function (Range and RecRange).
+// Prints in "internal" units, not "user" units.
+
+void Range::PrintLive() const
+{
+    cerr << "Live Sites:             " << ToString(MakeRangeset(0L, s_numRegionSites)) << endl;
+} // Range::PrintLive
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+// Virtual function (Range and RecRange).
+
+void Range::PrintInfo() const
+{
+    cerr << "Range::PrintInfo() ..." << endl << endl;
+    cerr << "Total number of Sites in Region:   " << s_numRegionSites << endl;
+    cerr << "All Sites transmitted in non-recombinant Range." << endl;
+    PrintLive();
+    cerr << "No Current Target Links in a non-recombinant Range." << endl;
+    cerr << "No New Target Links in a non-recombinant Range." << endl;
+    cerr << "No Old Target Sites in a non-recombinant Range." << endl;
+    cerr << "No Old Target Links in a non-recombinant Range." << endl << endl;
+}
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+// Virtual function (Range and RecRange).
+
+void Range::PrintNewTargetLinks() const
+{
+    cerr << "There are no Newly-Targetable Links in a non-recombinant Range." << endl;
+} // Range::PrintNewTargetLinks
+
+
+//------------------------------------------------------------------------------------
+// Class RecRange.
+//------------------------------------------------------------------------------------
+
+//------------------------------------------------------------------------------------
+// Builds the Biglink Vector Map from recombination-visible weighted-span Biglinks
+// to inter-site Littlelinks.  Defined only in the Biglink implementation.
+// Static member function (RecRange-only, but called before first object is created).
+
+#ifdef RUN_BIGLINKS
+
+void RecRange::BuildBiglinkMap(const Region & curregion)
+{
+    const long int numloci(curregion.GetNloci());
+    const long int numtips(curregion.GetNTips());  // All loci in region have data for same number of tips.
+
+#ifdef ENABLE_BIGLINKMAP_DUMP
+    cerr << "Region Name (name of Region):   \"" << curregion.GetRegionName() << "\"" << endl;
+    cerr << "Number of Sites in the Region:  " << curregion.GetNumSites() << endl;  // All sites, whether in a locus or between them.
+    cerr << "Number of Loci in this Region:  " << numloci << endl;
+    cerr << "Number of Tips for each Locus:  " << numtips << endl << endl;
+#endif // ENABLE_BIGLINKMAP_DUMP
+
+    // Initialize a Biglink Associative Set to an empty set.  This is used locally in this function to store
+    // the lower index of a candidate Biglink.  After all loci have been scanned and all Biglink lowerbound
+    // Littlelink indices entered, the items in this set are used to construct a vector (the Biglink Vector Map)
+    // of BiglinkMapitem objects.  This object is stored as the value of a static member variable for use by rest
+    // of program.  The map-construction is done anew for each region as each map is valid only per-region.
+    //
+    // Note that this ASSOCIATIVE map will store one entry for each SITE (either EVERY site for EMULATE_LITTLELINKS
+    // mode or one VARIANT or DISEASE site for regular RUN_BIGLINKS non-emulation mode).  When the VECTOR Biglink Map
+    // is constructed, the one-fewer entries in it are constructed from the successive differences in the values
+    // stored into adjacent entries in this ASSOCIATIVE map.  Thus total number of LITTLELINKS will be one less than
+    // the total number of SITES in the Region in one mode, or the total number of BIGLINKS will be one less than
+    // the total number of VARIANT SITES plus DISEASE SITE(S) in the Region.
+    BiglinkAssocSet biglinkAssocSet;
+    BiglinkAssocSet::iterator setiter(biglinkAssocSet.end());
+
+#ifdef EMULATE_LITTLELINKS
+    //
+    // To emulate Littlelinks, we must iterate over ALL SITES in the REGION, not over all MARKERS in the LOCUS.
+    // We must allocate a Map of size the total number of Littlelinks (one less than total number of sites in Region)
+    // and store ONE BIGLINK PER SLOT in this map, with trivial component values.
+    const long int numsites(curregion.GetNumSites());
+    for (long int siteindex = 0L; siteindex < numsites; ++siteindex)
+    {
+        // "Hint" form of insert(), using END iterator.  Old value of "setiter" used as hint; since within
+        // a locus we are scanning in increasing site index value.  Updated "setiter" points to the site
+        // index just inserted and will be used as "hint" for the next insertion.
+        setiter = biglinkAssocSet.insert(setiter, siteindex);
+    }
+    //
+#else // EMULATE_LITTLELINKS
+    //
+    // If disease site(s) are present, then include them; ie, treat them as variant sites in map construction.
+    if (registry.GetForceSummary().CheckForce(force_DISEASE))
+    {
+        const ForceSummary & fs(registry.GetForceSummary());
+        //
+        if (fs.GetNLocalPartitionForces() > 0)
+        {
+#ifdef ENABLE_BIGLINKMAP_DUMP
+            cerr << "BuildBiglinkMap: Disease Site(s) present ..." << endl;
+#endif
+            //
+            const ForceVec lpforces(fs.GetLocalPartitionForces());
+            ForceVec::const_iterator lpforce;
+            for (lpforce = lpforces.begin(); lpforce != lpforces.end(); ++lpforce)
+            {
+                const LocalPartitionForce * lpf(dynamic_cast<const LocalPartitionForce *>(* lpforce));
+                assert(lpf);            // Make sure pointer is non-NULL.
+                const long int disease_site(lpf->GetLocalSite());
+                biglinkAssocSet.insert(setiter, disease_site);
+                //
+#ifdef ENABLE_BIGLINKMAP_DUMP
+                cerr << "Disease Site at index: " << disease_site << endl;
+#endif
+            }
+            //
+#ifdef ENABLE_BIGLINKMAP_DUMP
+            cerr << endl;
+#endif
+        }
+    }
+    //
+    for (long int locIdx = 0L; locIdx < numloci; ++locIdx)
+    {
+        const Locus & locus(curregion.GetLocus(locIdx));
+        const long int nmarkers(locus.GetNmarkers());
+        const LongVec1d markerVector(locus.GetMarkerLocations());
+        const vector<TipData> tipdatavec(locus.GetTipData());
+        const DataType_ptr datatype_ptr(locus.GetDataTypePtr());
+        //
+#ifdef ENABLE_BIGLINKMAP_DUMP
+        const data_type datatype_enum(locus.GetDataType());
+        bool found_marker = false;
+        cerr << "Name of this Locus in Region:   " << locus.GetName() << endl;
+        cerr << "Index of this Locus in Region:  " << locIdx << " (" << locIdx + 1 << " of " << numloci << ")" << endl;
+        cerr << "DataType stored in this Locus:  " << datatype_enum << " (type " << ToString(datatype_enum) << ")" << endl;
+        cerr << "Number of Sites in this Locus:  " << locus.GetNsites() << endl;
+        cerr << "Number of Markers, this Locus:  " << nmarkers << endl;
+        cerr << "RegionalMapPosition for Locus:  " << locus.GetRegionalMapPosition() << endl << endl;
+#endif
+        //
+        // Compare each tip vector (>= 1) with the zeroth.  If any pair of markers compares "non-IsEquivalent()",
+        // consider the site as VARIANT and compute the weighted distance (Littlelink count, for now) to the
+        // preceding such variant site.  Note that this distance is meaningfull only BETWEEN variant sites;
+        // the first and last variant sites have no Biglinks toward the "outside".  Note also that this
+        // distance applies across ALL loci (ie, between last variant site in one locus and first in the
+        // next locus).  These distances must be reflected back to actual sequence site indices (as stored
+        // in vector returned by GetMarkerLocations) rather than in per-locus site indices.
+        for (long int markerindex = 0L; markerindex < nmarkers; ++markerindex)
+        {
+            const long int markerLocationSiteIndex(markerVector[markerindex]);
+            const string tipZeroMarker(tipdatavec[0].data[markerindex]);
+            for (long int tipindex = 1; tipindex < numtips; ++tipindex)
+            {
+                // If we find a variant site, insert the site index into the set.  At this point we don't know
+                // whether there are variant sites "above" or "below" this one, because there might be more loci
+                // "above" or "below" the one being processed.  So we will scan the entire set when done.
+                // If emulating Littlelinks, every Littlelink defines a Biglink.
+                // For native Biglink operation, only variant sites (and disease) define Biglink boundaries.
+                if ( ! datatype_ptr->IsEquivalent(tipZeroMarker, tipdatavec[tipindex].data[markerindex]) )
+                {
+#ifdef ENABLE_BIGLINKMAP_DUMP
+                    // The "setw" manipulator works "per output field" and so must be re-applied each time needed.
+                    found_marker = true;
+                    cerr << "Compare: " << tipZeroMarker << ":" << tipdatavec[tipindex].data[markerindex]
+                         << " at Tip " << setw(4) << tipindex << ", Tip_Array_Idx " << setw(4) << markerindex
+                         << ", Region_Site_Idx " << setw(4) << markerLocationSiteIndex
+                         << "; Inserting: " << setw(4) << markerLocationSiteIndex << endl;
+#endif
+                    //
+                    // "Hint" form of insert(), using END iterator.  Old value of "setiter" used as hint; since within
+                    // a locus we are scanning in increasing site index value.  Updated "setiter" points to the site
+                    // index just inserted and will be used as "hint" for the next insertion.
+                    setiter = biglinkAssocSet.insert(setiter, markerLocationSiteIndex);
+                    break;              // Exit inner loop once the first variant marker pair is discovered.
+                }
+            }
+        }
+#ifdef ENABLE_BIGLINKMAP_DUMP
+        if (found_marker)
+        {
+            cerr << endl;
+        }
+#endif
+    }
+    //
+#endif // EMULATE_LITTLELINKS
+
+    // (Re)initialize the Biglink Vector Map to an empty vector.  This must be done afresh for each Region processed.
+    s_biglink_vectormap.clear();
+
+    // Reserve capacity for exactly the known needed size, which is one less than the number of variant sites
+    // (including disease site(s)) discovered, since Biglinks span BETWEEN adjacent variant (or disease) sites.
+    unsigned long int biglink_map_size(biglinkAssocSet.size() - 1);
+    //
+    // The Map had better be of non-zero size.  This ASSERT will catch errors that can lead to an empty map.
+    // If this happens, we need an upstream diagnostic in the user-input phase to detect and report the situation
+    // rather than crashing here.  But this is temporary, for debugging.
+    assert(biglink_map_size > 0UL);
+    //
+    // Assuming everything is OK, let's allocate and fill up the Map.
+    s_biglink_vectormap.reserve(biglink_map_size);
+
+#ifdef ENABLE_BIGLINKMAP_DUMP
+    cerr << "Biglink Vector Map size:  " << biglink_map_size << endl;
+    cerr << "Biglink Vector Map (<lower_index> : <upper_index> : <biglink_weight>) ..." << endl << endl;
+#endif
+
+    // Now sweep the set accumulating Biglink start points ("low" end of a section between two variant sites, whether
+    // in same locus or between the "highest" variant in one locus and the "lowest" in the next "higher" locus.
+    // Each Biglink interval is indexed in the Biglink Associative Set by the site index of its LOWER end.
+    long int lower_index(FLAGLONG);
+    long int mapindex(0L);
+    for (BiglinkAssocSet::iterator setiter(biglinkAssocSet.begin()) ; setiter != biglinkAssocSet.end() ; ++setiter)
+    {
+        const long int upper_index(*setiter);
+        // If there IS a variant site "before" the current entry (indicated by "lower_index" being non-negative,
+        // that is not equal to FLAGLONG), record the distance.  There is no Biglink below the "start" end of the
+        // first variant (or disease) site in the region.
+        if (lower_index != FLAGLONG)
+        {
+            // Calculate range (Littlelink count) between variant sites.  This (times a recombination probability
+            // of unity for now) provides the Link weight for this BiglinkMapitem object.
+            const double linkweight(1.0 * static_cast<double>(upper_index - lower_index));
+
+#ifdef ENABLE_BIGLINKMAP_DUMP
+            cerr << "Map item " << setw(4) << mapindex << ": " << setw(4) << lower_index
+                 << " : " << setw(4) << upper_index << " : " << linkweight << endl;
+#endif
+
+            // Now construct a BiglinkMapitem object and enter it (push_back) into the Biglink Vector Map.
+            s_biglink_vectormap.push_back(BiglinkMapitem(lower_index, upper_index, linkweight));
+            ++mapindex;                 // Increment index after map item written (not so on first iteration).
+        }
+
+        lower_index = upper_index;      // Update the index for the next iteration.
+    }
+
+#ifdef ENABLE_BIGLINKMAP_DUMP
+    cerr << endl;
+#endif
+
+} // RecRange::BuildBiglinkMap
+
+#endif // RUN_BIGLINKS
+
+//------------------------------------------------------------------------------------
+// The "real" constructor for class RecRange.
+
+RecRange::RecRange(long int nsites, const rangeset & diseasesites, const rangeset & transmittedsites,
+                   const rangeset & livesites, const linkrangeset & curtargetlinks, const linkrangeset & oldtargetlinks,
+                   const rangeset & oldtargetsites, const rangeset & oldlivesites)
+    : Range(nsites),
+      m_liveSites(livesites),
+      m_diseaseSites(diseasesites),
+      m_transmittedSites(transmittedsites),
+      m_curTargetLinks(curtargetlinks),
+      m_curTargetLinkweight(AccumulatedLinkweight(curtargetlinks)),
+      m_oldTargetSites(oldtargetsites),
+      m_oldLiveSites(oldlivesites),
+      m_oldTargetLinks(oldtargetlinks),
+      m_newTargetLinks(RemoveRangeFromRange(oldtargetlinks, m_curTargetLinks)),
+      m_newTargetLinkweight(AccumulatedLinkweight(m_newTargetLinks))
+{
+    // Intentionally blank; everything is accomplished in initialization list above.
+} // "Real" constructor for RecRange class.
+
+//------------------------------------------------------------------------------------
+// Virtual function (Range and RecRange).
+
+Range * RecRange::Clone() const
+{
+    return new RecRange(*this);
+} // RecRange::Clone
+
+//------------------------------------------------------------------------------------
+// This copy constructor is callable only by RecRange::Clone.
+
+RecRange::RecRange(const RecRange & src)
+    : Range(src),
+      m_liveSites(src.m_liveSites),
+      m_diseaseSites(src.m_diseaseSites),
+      m_transmittedSites(src.m_transmittedSites),
+      m_curTargetLinks(src.m_curTargetLinks),
+      m_curTargetLinkweight(src.m_curTargetLinkweight),
+      m_oldTargetSites(src.m_oldTargetSites),
+      m_oldLiveSites(src.m_oldLiveSites),
+      m_oldTargetLinks(src.m_oldTargetLinks),
+      m_newTargetLinks(src.m_newTargetLinks),
+      m_newTargetLinkweight(src.m_newTargetLinkweight)
+{
+    // Intentionally blank; everything is accomplished in initialization list above.
+} // Copy constructor for RecRange class.
+
+//------------------------------------------------------------------------------------
+// Virtual function (Range and RecRange).
+// The value of "other" (a reference to a Range object) is being cast to RecRange and then used
+// to dereference "m_liveSites", not the value of the "m_liveSites" variable of the THIS object.
+
+bool RecRange::operator==(const Range & other) const
+{
+    return (m_liveSites == dynamic_cast<const RecRange &>(other).m_liveSites &&
+            m_transmittedSites == dynamic_cast<const RecRange &>(other).m_transmittedSites &&
+            m_curTargetLinks == dynamic_cast<const RecRange &>(other).m_curTargetLinks);
+
+} // RecRange::operator==
+
+//------------------------------------------------------------------------------------
+// Virtual function (Range and RecRange).  Called only on recombinant trees, branches, RecRanges.
+//
+// Assume that the transmitted sites set is one contiguous set (ie, a single recombination point
+// divides the full set into two sets, each represented by a lower-closed and upper-open interval.
+// If recombination happens, one of those sets is transmitted and the other is not.  In this case,
+// this function returns the Littlelink index of the link separating those two sets.  There are
+// three cases:
+//
+//   A: No recombination; ALL sites are transmitted - lowest has index 0, high open endpoint
+//      (one above highest transmitted) has index = total number of sites.  Return FLAGLONG
+//      (not a valid value for ANY site or link index) to indicate NO RECOMBINATION.
+//
+//   B: Recombination divides set; LOW sites ARE transmitted; HIGH sites are NOT.  Recpoint
+//      Littlelink is the one just above highest transmitted site, which has index of one less
+//      than index of open upper endpoint of transmitted sites interval.  Return Littlelink index.
+//
+//   C: Recombination divides set; HIGH sites ARE transmitted; LOW sites are NOT.  Recpoint
+//      Littlelink is the one just below lowest transmitted site, which has same index as
+//      highest site NOT transmitted, which is same as index of lowest site TRANSMITTED
+//      MINUS ONE.  Return Littlelink index.
+
+long int RecRange::GetRecpoint() const
+{
+    if (m_transmittedSites.begin()->first == 0) // LOW sites YES, HIGH sites YES or NO.
+    {
+        if (m_transmittedSites.begin()->second == s_numRegionSites) // HIGH sites YES.
+        {
+            // ALL sites ARE transmitted ==> No recombination; return flag indicating so.
+            return FLAGLONG;            // Case A.
+        }
+        else                            // HIGH sites NO.
+        {
+            // LOW sites ARE transmitted but HIGH sites are NOT; return the Littlelink
+            // to right (just above) the HIGHEST site transmitted (numbered same as that site).
+            // SECOND indexes the first site NOT transmitted (open upper end); hence the "- 1".
+            return m_transmittedSites.begin()->second - 1; // Case B.
+        }
+    }
+    else                                // LOW sites NO, HIGH sites YES.
+    {
+        // If in debug mode, check that HIGH sites are indeed transmitted.
+        assert(m_transmittedSites.begin()->second == s_numRegionSites);
+
+        // LOW sites are NOT transmitted but HIGH sites ARE; return the Littlelink to left
+        // (just below) the LOWEST site transmitted (Link is numbered same as the Site just
+        // below it, which is the HIGHEST site NOT transmitted or one less than the LOWEST
+        // site that IS transmitted).
+        return m_transmittedSites.begin()->first - 1; // Case C.
+    }
+
+} // RecRange::GetRecpoint
+
+//------------------------------------------------------------------------------------
+// Virtual function (Range and RecRange).
+
+void RecRange::UpdateCRange(const Range * const child1rangeptr, const Range * const child2rangeptr,
+                            const rangeset & fcsites, bool dofc)
+{
+    m_liveSites = Union(child1rangeptr->GetLiveSites(), child2rangeptr->GetLiveSites());
+
+    rangeset targetsites(m_liveSites);
+    if (dofc) targetsites = RemoveRangeFromRange(fcsites, m_liveSites);
+    targetsites = Union(m_diseaseSites, targetsites);
+    m_curTargetLinks = LinksSpanningSites(targetsites);
+    m_curTargetLinkweight = AccumulatedLinkweight(m_curTargetLinks);
+    m_newTargetLinks = RemoveRangeFromRange(m_oldTargetLinks, m_curTargetLinks);
+    m_newTargetLinkweight = AccumulatedLinkweight(m_newTargetLinks);
+
+#ifndef NDEBUG
+    TestInvariants();
+#endif
+
+} // RecRange::UpdateCRange
+
+//------------------------------------------------------------------------------------
+// Virtual function (Range and RecRange).
+
+void RecRange::UpdateRootRange(const Range * const child1rangeptr, const Range * const child2rangeptr,
+                               const rangeset & fcsites, bool dofc)
+{
+    m_liveSites = Union(child1rangeptr->GetLiveSites(), child2rangeptr->GetLiveSites());
+
+    rangeset targetsites(m_liveSites);
+    if (dofc) targetsites = RemoveRangeFromRange(fcsites, m_liveSites);
+    targetsites = Union(m_diseaseSites, targetsites);
+    m_curTargetLinks = LinksSpanningSites(targetsites);
+    m_curTargetLinkweight = AccumulatedLinkweight(m_curTargetLinks);
+
+    // All sites were live and targetable on the old root, so there are no new target sites.
+    m_newTargetLinks.clear();
+    m_newTargetLinkweight = ZERO;
+    m_oldTargetSites = GetAllSites();
+    m_oldTargetLinks = GetAllLinks();   // See "NOTA BENE" in definition of class RecRange, file "range.h".
+
+#ifndef NDEBUG
+    TestInvariants();
+#endif
+
+} // RecRange::UpdateRootRange
+
+//------------------------------------------------------------------------------------
+// Virtual function (Range and RecRange).
+
+void RecRange::UpdateOneLeggedCRange(const Range * const childrangeptr)
+{
+    m_liveSites = childrangeptr->GetLiveSites();
+
+    m_curTargetLinks = childrangeptr->GetCurTargetLinks();
+    m_curTargetLinkweight = childrangeptr->GetCurTargetLinkweight();
+
+    // As a one-legged coalescence may coincide with a change in m_oldTargetLinks,
+    // we must recalculate m_newTargetLinks here.
+    m_newTargetLinks = RemoveRangeFromRange(m_oldTargetLinks, m_curTargetLinks);
+    m_newTargetLinkweight = AccumulatedLinkweight(m_newTargetLinks);
+
+#ifndef NDEBUG
+    TestInvariants();
+#endif
+
+} // RecRange::UpdateOneLeggedCRange
+
+//------------------------------------------------------------------------------------
+// Virtual function (Range and RecRange).
+
+void RecRange::UpdateOneLeggedRootRange(const Range * const childrangeptr)
+{
+    m_liveSites = childrangeptr->GetLiveSites();
+
+    m_curTargetLinks = childrangeptr->GetCurTargetLinks();
+    m_curTargetLinkweight = childrangeptr->GetCurTargetLinkweight();
+
+    // All sites were live and targetable on the old root, so there are no new target sites.
+    m_newTargetLinks.clear();
+    m_newTargetLinkweight = ZERO;
+    m_oldTargetSites = GetAllSites();
+    m_oldTargetLinks = GetAllLinks();   // See "NOTA BENE" in definition of class RecRange, file "range.h".
+
+#ifndef NDEBUG
+    TestInvariants();
+#endif
+
+} // RecRange::UpdateOneLeggedRootRange
+
+//------------------------------------------------------------------------------------
+// Virtual function (Range and RecRange).
+
+void RecRange::UpdateMRange(const Range * const child1rangeptr)
+{
+    m_liveSites = child1rangeptr->GetLiveSites();
+
+    m_curTargetLinks = child1rangeptr->GetCurTargetLinks();
+    m_curTargetLinkweight = child1rangeptr->GetCurTargetLinkweight();
+
+    m_newTargetLinks = child1rangeptr->GetNewTargetLinks();
+    m_newTargetLinkweight = child1rangeptr->GetNewTargetLinkweight();
+
+#ifndef NDEBUG
+    TestInvariants();
+#endif // NDEBUG
+
+} // RecRange::UpdateMRange
+
+//------------------------------------------------------------------------------------
+// Virtual function (Range and RecRange).
+// Called only in RBranch::UpdateBranchRange (on a recombinant branch); ASSERTs in a non-recombinant situation.
+
+void RecRange::UpdateRRange(const Range * const child1rangeptr, const rangeset & fcsites, bool dofc)
+{
+    m_liveSites = Intersection(child1rangeptr->GetLiveSites(), m_transmittedSites);
+
+    rangeset targetsites(m_liveSites);
+    if (dofc) targetsites = RemoveRangeFromRange(fcsites, targetsites);
+    targetsites = Union(m_diseaseSites, targetsites);
+
+    m_curTargetLinks = LinksSpanningSites(targetsites);
+    m_curTargetLinkweight = AccumulatedLinkweight(m_curTargetLinks);
+
+    m_newTargetLinks = RemoveRangeFromRange(m_oldTargetLinks, m_curTargetLinks);
+    m_newTargetLinkweight = AccumulatedLinkweight(m_newTargetLinks);
+
+#ifndef NDEBUG
+    TestInvariants();
+#endif // NDEBUG
+
+} // RecRange::UpdateRRange
+
+//------------------------------------------------------------------------------------
+// Virtual function (Range and RecRange).
+// Called only in RecTree::RecombineInactive (recombinant case).
+
+bool RecRange::AreLowSitesOnInactiveBranch(long int recpoint) const
+{
+    // If all disease site(s) are on one side, that's the inactive side.
+    // For now, we consider only a single site as possibly being "diseased".
+    if ( ! m_diseaseSites.empty() )
+    {
+        // -1 because it's a half-open interval; we want the actual last site.
+        // Less-than or EQUAL because RECPOINT is indexed same as the SITE to its LEFT.
+        return (m_diseaseSites.rbegin()->second - 1) <= recpoint;
+    }
+
+    // If all old target sites are on one side, that's the inactive side.
+    if ( ! m_oldTargetSites.empty() )
+    {
+        // There are some old target sites, and if ALL of them are on one side, that's the inactive side.
+        bool recpointBelowLowestOldTargetSite = (recpoint < (m_oldTargetSites.begin()->first));
+        bool recpointAboveHighestOldTargetSite = ((m_oldTargetSites.rbegin()->second - 1) <= recpoint);
+
+        // Assert that all the old target sites are on the same side of the recpoint.
+        assert( recpointBelowLowestOldTargetSite || recpointAboveHighestOldTargetSite );
+
+        // If all the old target sites are on the low side, the low side is the inactive side.
+        return recpointAboveHighestOldTargetSite;
+    }
+
+    // Beyond this point, for the branch to be valid (and thus not pruned out by now):
+    //    (1) Final Coalescence must be activated,
+    //    (2) Some old live sites must exist on this branch,
+    //    (3) SOME of those old live sites must have been FC on the previous arrangement, and
+    //    (4) ALL of those old live sites must have been FC.
+    // In other words, ALL non-DEAD sites on this branch must have been FC on previous arrangement.
+    // Since we don't store FC sites from previous arrangements, we can't test those conditions.
+    //
+#ifndef NDEBUG                          // We will ASSERT here, so may as well print a message as to why.
+    if ( ! FINAL_COALESCENCE_ON || m_oldLiveSites.empty() )
+    {
+        DebugPrint2(FINAL_COALESCENCE_ON, recpoint);
+        PrintInfo();
+        assert( FINAL_COALESCENCE_ON );      // Final Coalescence must be activated.
+        assert( ! m_oldLiveSites.empty() );  // SOME old live sites must exist on this branch.
+    }
+#endif
+
+    // There are some old live sites (which have gone FC), and if ALL are on one side, that's the inactive side.
+    bool recpointBelowLowestOldLiveSite = (recpoint < (m_oldLiveSites.begin()->first));
+    bool recpointAboveHighestOldLiveSite = ((m_oldLiveSites.rbegin()->second - 1) <= recpoint);
+    if (recpointBelowLowestOldLiveSite || recpointAboveHighestOldLiveSite)
+    {
+        return recpointAboveHighestOldLiveSite;
+    }
+
+    // But if they are NOT all on the same side, then we make a 50/50 random choice of inactive side.
+    return registry.GetRandom().Bool();
+
+} // RecRange::AreLowSitesOnInactiveBranch
+
+//------------------------------------------------------------------------------------
+// Virtual function (Range and RecRange).
+
+bool RecRange::AreDiseaseSitesTransmitted() const
+{
+    if (m_diseaseSites.empty()) return true;
+
+    for (set<rangepair>::const_iterator ri = m_diseaseSites.begin(); ri != m_diseaseSites.end(); ++ri)
+    {
+        // RSGNOTE:  If there is only a single disease site, this works.  But we may want it to work
+        // in general for arbitrary sets of (multiple) disease sites.  Will have to fix it then.
+        if (!IsInRangeset(m_transmittedSites, ri->first)) return false;
+    }
+
+    return true;
+
+} // RecRange::AreDiseaseSitesTransmitted
+
+//------------------------------------------------------------------------------------
+// Virtual function (Range and RecRange).
+
+void RecRange::ClearNewTargetLinks()
+{
+    m_newTargetLinks = linkrangeset();
+    m_newTargetLinkweight = ZERO;
+
+} // RecRange::ClearNewTargetLinks
+
+//------------------------------------------------------------------------------------
+// Virtual function (Range and RecRange).
+
+void RecRange::SetOldInfoToCurrent()
+{
+    m_oldLiveSites = m_liveSites;
+    m_oldTargetLinks = m_curTargetLinks;
+
+} // RecRange::SetOldInfoToCurrent()
+
+//------------------------------------------------------------------------------------
+// Virtual function (Range and RecRange).
+
+void RecRange::ResetOldTargetSites(const rangeset & fcsites)
+{
+    m_oldTargetSites = Union(m_diseaseSites, RemoveRangeFromRange(fcsites, m_liveSites));
+} // RecRange::ResetOldTargetSites
+
+//------------------------------------------------------------------------------------
+// Virtual function (Range and RecRange).
+
+bool RecRange::AreChildTargetSitesTransmitted(const Range * const childrangeptr, const rangeset & fcsites) const
+{
+    rangeset childtargetsites(Union(childrangeptr->GetDiseaseSites(),
+                                    RemoveRangeFromRange(fcsites, childrangeptr->GetLiveSites())));
+    rangeset included =  Intersection(childtargetsites, m_transmittedSites);
+    return (!included.empty());
+
+} // RecRange::AreChildTargetSitesTransmitted
+
+//------------------------------------------------------------------------------------
+// Virtual function (Range and RecRange).
+// Called only by debugging function; virtual (must work differently on recombinant and non-recombinant Ranges).
+
+bool RecRange::SameAsChild(const Range * const childrangeptr) const
+{
+    return m_liveSites == childrangeptr->GetLiveSites() && m_curTargetLinks == childrangeptr->GetCurTargetLinks();
+} // RecRange::SameAsChild
+
+//------------------------------------------------------------------------------------
+// Utility function in RecRange class only.
+// Accumulates the total weight represented by a LINKRANGESET of Links.
+
+Linkweight RecRange::AccumulatedLinkweight(const linkrangeset & setoflinks)
+{
+    if (setoflinks.empty()) return ZERO;
+
+    Linkweight recweight(ZERO);
+    for (linkrangesetconstiter link_iter = setoflinks.begin() ; link_iter != setoflinks.end() ; ++link_iter)
+    {
+#ifdef RUN_BIGLINKS
+        // Biglink version is static member function; needs access to Biglink Vector Map.
+        const unsigned long int lower_index(link_iter->first);
+        const unsigned long int upper_index(link_iter->second);
+        //
+#if 1
+        // Since "lower_index" is UNSIGNED, "lower_index < s_biglink_vectormap.size()" is also equivalent to "lower_index >= 0".
+        DebugAssert2(lower_index < s_biglink_vectormap.size(), lower_index, s_biglink_vectormap.size());
+#else   // Equivalent to DebugAssert2 above, in case it is removed later.
+        assert(lower_index < s_biglink_vectormap.size());
+#endif
+        //
+#if 1
+        // "upper_index" is the "one-beyond" value; it is the limit at which a vector index value becomes invalid.
+        // Since "upper_index" is UNSIGNED, "upper_index <= s_biglink_vectormap.size()" is also equivalent to "upper_index >= 0".
+        DebugAssert2(upper_index <= s_biglink_vectormap.size(), upper_index, s_biglink_vectormap.size());
+#else   // Equivalent to DebugAssert2 above, in case it is removed later.
+        assert(upper_index <= s_biglink_vectormap.size());
+#endif
+        //
+        for (unsigned long int index = lower_index ; index < upper_index ; ++index)
+        {
+            recweight += s_biglink_vectormap[index].GetBiglinkWeight();
+        }
+        //
+#else   // RUN_BIGLINKS
+        //
+        // Littlelink version simply counts number of Littlelinks in its input LINKRANGESET.
+        recweight += link_iter->second - link_iter->first;
+        //
+#endif  // RUN_BIGLINKS
+    }
+
+#if 0
+    DebugPrint1(recweight);
+#endif // 0
+
+    return recweight;
+
+} // RecRange::AccumulatedLinkweight
+
+//------------------------------------------------------------------------------------
+// Utility function in RecRange class only.
+// Static member function; needs access to Biglink Vector Map but is otherwise independent of RecRange objects.
+//
+// Returns a LINKRANGESET consisting of a singleton LINKRANGEPAIR denoting a set of Biglinks spanning the input sites.
+//
+// Disease sites (if any) are treated as variant sites in construction of the Biglink Map.
+// Therefore, output is the interval spanning the lowest Biglink bounded by a variant or a disease
+// site to the highest Biglink bounded by such in the LINKRANGESET presented as input.
+//
+// "Spanning" means returning the LINKRANGESET consisting of a single LINKRANGEPAIR whose first element denotes the Biglink
+// bounded (on its LOWER edge) by the LOWEST included target site and whose second element denotes the the Biglink
+// bounded (on its UPPER edge) by the HIGHEST included target site (or by the FIRST EXCLUDED target site on its
+// upper boundary, using "half-open interval terminology").  That "upper Biglink", again using "half-open interval
+// terminology", has as its index (in the Biglink Vector Map) the index of the FIRST EXCLUDED Biglink, the one
+// just above the "last INCLUDED" Biglink.  Such an "upper Biglink" would be non-existent, that is, identified by
+// the index of the "just beyond the end" vector entry in the Map, if the LINKRANGEPAIR second element is denoting the
+// highest-indexed entry in the Map.
+
+#ifdef RUN_BIGLINKS
+
+linkrangeset RecRange::LinksSpanningSites(const rangeset & targetsites)
+{
+    linkrangeset curtargetlinks;        // Create an empty LINKRANGESET.
+
+    if (targetsites.empty())
+    {
+        return curtargetlinks;          // If no targetsites, return the empty LINKRANGESET.
+    }
+    else
+    {
+        const long int lower_targetsite(targetsites.begin()->first);       // Lowest INCLUDED target site.
+        const long int upper_targetsite(targetsites.rbegin()->second - 1); // Highest target site (LAST-INCLUDED).
+        const unsigned long int mapend_index(s_biglink_vectormap.size());  // Excluded upper map end index.
+
+        // If the First-to-Last INCLUDED site indices span a non-empty interval and are in the correct order, proceed.
+        if (lower_targetsite < upper_targetsite)
+        {
+            unsigned long int track_biglinkmap_idx;               // Tracks from below here.
+            unsigned long int lower_biglinkmap_idx(mapend_index); // Included lower index starts at high (EXCLUDED) end.
+            unsigned long int upper_biglinkmap_idx(0UL);          // Excluded upper index starts at low (INCLUDED) end.
+
+            // Seek up from bottom; find lowest Biglink whose low end is at or above low end of target site interval.
+            BiglinkVectormap::const_iterator vecmapiter;
+            //
+            // "track_biglinkmap_idx" here finds the first (from the bottom) BiglinkMapitem entry
+            // whose lower-end Littlelink index is EQUAL TO or ABOVE "lower_targetsite".
+            //
+            for (vecmapiter = s_biglink_vectormap.begin(), track_biglinkmap_idx = 0UL;
+                 vecmapiter != s_biglink_vectormap.end();
+                 ++vecmapiter, ++track_biglinkmap_idx)
+            {
+                if (vecmapiter->GetBiglinkLowerLittlelink() >= lower_targetsite)
+                {
+                    lower_biglinkmap_idx = track_biglinkmap_idx;
+                    break;
+                }
+            }
+
+            // Seek down from top; find highest Biglink whose high end is at or below high end of target site interval.
+            BiglinkVectormap::const_reverse_iterator revmapiter;
+            //
+            // "track_biglinkmap_idx" here tracks the first (from the top) BiglinkMapitem entry whose upper-end
+            // Littlelink index is BELOW or EQUAL TO "upper_targetsite".  It is initialized to the "one-beyond" or
+            // "FIRST-EXCLUDED" value (size of Biglink Vector Map) and decrements from there; "revmapiter" starts
+            // with the highest INCLUDED Biglink and decrements.  Thus when the IF triggers, the value assigned to
+            // "upper_biglinkmap_idx will be the index "one-beyond" that Biglink, ie, a "FIRST-EXCLUDED" value.
+            //
+            for (revmapiter = s_biglink_vectormap.rbegin(), track_biglinkmap_idx = mapend_index;
+                 revmapiter != s_biglink_vectormap.rend();
+                 ++revmapiter, --track_biglinkmap_idx)
+            {
+                if (revmapiter->GetBiglinkUpperLittlelink() <= upper_targetsite)
+                {
+                    upper_biglinkmap_idx = track_biglinkmap_idx;     // Already is FIRST-EXCLUDED value; don't add one.
+                    break;
+                }
+            }
+
+            // "lower_biglinkmap_idx" points to lower (INCLUDED) BiglinkMapitem entry and "upper_biglinkmap_idx" points
+            // to upper (EXCLUDED) BiglinkMapitem entry of a contiguous set of Biglinks spanning the target sites.
+            if (lower_biglinkmap_idx < upper_biglinkmap_idx)
+            {
+                // Normally, these endpoints will be in the proper order (this branch of IF), and we
+                // return a LINKRANGESET consisting of the single LINKRANGEPAIR spanning that interval.
+                linkrangepair newlinks(lower_biglinkmap_idx, upper_biglinkmap_idx);
+                return AddPairToRange(newlinks, curtargetlinks);
+            }
+            else
+            {
+                // But it is possible (if enough sites are removed due to going FC that ALL target sites are inside
+                // a single Biglink) that the "lowest above low end" Biglink is ABOVE the "highest below high end"
+                // Biglink.  In that case, NO Biglinks "span" the target site set, and we return an empty rangeset.
+                return curtargetlinks;
+            }
+        }
+        else
+        {
+            // Sites ("targetsites") exist but don't span anything (maybe the set consists of only a single site);
+            // return an empty LINKRANGESET again.
+            return curtargetlinks;
+        }
+    }
+} // RecRange::LinksSpanningSites
+
+#else // RUN_BIGLINKS
+
+// Also static, since called in RecRange namespace and without reference to an object,
+// although Littlelink version does not need access to any static member variables.
+
+linkrangeset RecRange::LinksSpanningSites(const rangeset & targetsites)
+{
+    linkrangeset curtargetlinks;
+
+    // If the targetsites are empty we really do want to return an empty LINKRANGESET.
+    if (!targetsites.empty())
+    {
+        const unsigned long int firstsite(targetsites.begin()->first);
+        const unsigned long int lastsite(targetsites.rbegin()->second - 1);
+
+        if (firstsite < lastsite)
+        {
+            linkrangepair newlinks(firstsite, lastsite);
+            curtargetlinks = AddPairToRange(newlinks, curtargetlinks);
+        }
+    }
+
+    return curtargetlinks;
+} // RecRange::LinksSpanningSites
+
+#endif // RUN_BIGLINKS
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+// Virtual function (Range and RecRange).
+// Prints in "internal" units, not "user" units.
+
+void RecRange::PrintLive() const
+{
+    cerr << "Live Sites:             " << ToString(m_liveSites) << endl;
+} // RecRange::PrintLive
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+// Virtual function (Range and RecRange).
+// Prints in "internal" units, not "user" units.
+
+void RecRange::PrintInfo() const
+{
+    cerr << "RecRange::PrintInfo() ..." << endl << endl;
+    cerr << "Total number of Sites:  " << s_numRegionSites << endl;
+    cerr << "Disease Sites:          " << ToString(m_diseaseSites) << endl;
+    cerr << "Transmitted Sites:      " << ToString(m_transmittedSites) << endl;
+
+    PrintLive();
+
+    cerr << "Old Live Sites:         " << ToString(m_oldLiveSites) << endl;
+    cerr << "Current Target Links:   ";
+    PrintLinks(m_curTargetLinks);
+
+    PrintNewTargetLinks();
+
+    cerr << "Old Target Sites:       " << ToString(m_oldTargetSites) << endl;
+    cerr << "Old Target Links:       ";
+    PrintLinks(m_oldTargetLinks);
+
+    cerr << "Cur Target Linkweight:  " << m_curTargetLinkweight << endl;
+    cerr << "New Target Linkweight:  " << m_newTargetLinkweight << endl << endl;
+
+} // RecRange::PrintInfo
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+// Virtual function (Range and RecRange).
+// Prints in "internal" units, not "user" units.
+
+void RecRange::PrintNewTargetLinks() const
+{
+    cerr << "Newly Targetable Links:  ";
+    PrintLinks(m_newTargetLinks);
+
+} // RecRange::PrintNewTargetLinks
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+// Utility function (RecRange-only).
+// Static member function; needs access to Biglink Vector Map but is otherwise independent of RecRange objects.
+
+void RecRange::PrintLinks(const linkrangeset & setoflinks)
+{
+    if (setoflinks.empty())
+    {
+        cerr << "(none)" << endl;
+    }
+    else
+    {
+        linkrangesetconstiter biglink_iter = setoflinks.begin();
+
+#ifdef RUN_BIGLINKS
+        //
+        for ( ; biglink_iter != setoflinks.end() ; ++biglink_iter)
+        {
+            const unsigned long int lower_index(biglink_iter->first);
+            const unsigned long int upper_index(biglink_iter->second);
+            double weightsum(0.0);
+            long int counter(0);
+            //
+            assert(lower_index < s_biglink_vectormap.size());
+            assert(upper_index <= s_biglink_vectormap.size());
+            //
+            cerr << "Biglink, map index range [" << lower_index << "," << upper_index << "), includes these Littlelinks:" << endl;
+            for (unsigned long int index = lower_index ; index < upper_index ; ++index)
+            {
+                double linkweight(s_biglink_vectormap[index].GetBiglinkWeight());
+                cerr << "    [" << s_biglink_vectormap[index].GetBiglinkLowerLittlelink()
+                     << "," << s_biglink_vectormap[index].GetBiglinkUpperLittlelink()
+                     << ")Wt:" << linkweight;
+                weightsum += linkweight;
+                if (++counter == 10)
+                {
+                    cerr << endl;
+                    counter = 0;
+                }
+            }
+            if (counter > 0)
+            {
+                cerr << endl;
+            }
+            cerr << "    Total Accumulated Weight:  " << weightsum << endl;
+        }
+        //
+#else
+        ToString(setoflinks);
+        //
+#endif // RUN_BIGLINKS
+    }
+
+} // RecRange::PrintLinks
+
+//------------------------------------------------------------------------------------
+// Debugging function; RecRange only.
+// Prints in "internal" units, not "user" units.
+
+void RecRange::PrintDiseaseSites() const
+{
+    cerr << "Disease Sites:           " << ToString(m_diseaseSites) << endl;
+} // RecRange::PrintDiseaseSites
+
+//------------------------------------------------------------------------------------
+// Debugging function; RecRange-only.
+
+void RecRange::PrintRightOrLeft() const
+{
+    cerr << "Right or left:  ";
+    if (m_transmittedSites.begin()->first != 0L)
+    {
+        cerr << "left";
+    }
+    else if (m_transmittedSites.begin()->second != s_numRegionSites)
+    {
+        cerr << "right";
+    }
+    else
+    {
+        cerr << "unset";
+    }
+
+    cerr << endl;
+
+} // RecRange::PrintRightOrLeft
+
+//------------------------------------------------------------------------------------
+// Debugging function in RecRange class only.
+
+void RecRange::TestInvariants() const
+{
+    // JDEBUG -- augment to test that the "num" variables are correct; possibly also add a rangeset validator.
+    bool good = true;
+
+    for (set<rangepair>::const_iterator ri = m_diseaseSites.begin(); ri != m_diseaseSites.end(); ri++)
+    {
+        if (ri->first < 0L) good = false;
+        if (ri->first >= s_numRegionSites) good = false;
+    }
+
+    assert(good);
+
+} // RecRange::TestInvariants
+
+//------------------------------------------------------------------------------------
+// Helper debugging function at global scope.
+//------------------------------------------------------------------------------------
+
+//------------------------------------------------------------------------------------
+// Dumps data used for building Biglink Vector Map (for debugging).
+
+#ifdef ENABLE_REGION_DUMP
+
+//-----------------
+
+void PrintRegionData(long int regionnum, const Region & curregion)
+{
+    // This function writes to STANDARD-ERROR, which (hopefully) redirects to a file in a unique directory for each test run.
+    const long int numloci(curregion.GetNloci());
+    const long int numtips(curregion.GetNTips());   // All loci in region have data for same number of tips.
+
+    cerr << "Region Name (name of Region):            \"" << curregion.GetRegionName() << "\"" << endl;
+    cerr << "Region Number (index of Region):         " << regionnum << endl;
+    cerr << "Number of Sites in this Region:          " << curregion.GetNumSites() << endl; // All sites, whether in a locus or between them.
+    cerr << "Number of Loci in this Region:           " << numloci << endl;
+    cerr << "Number of Tips for each Locus:           " << numtips << endl << endl;
+
+    for (long int locIdx = 0L; locIdx < numloci; ++locIdx)
+    {
+        const Locus & locus(curregion.GetLocus(locIdx));
+        const long int regionalmapposition(locus.GetRegionalMapPosition());
+        const long int nmarkers(locus.GetNmarkers());
+        const data_type datatype_enum(locus.GetDataType());
+        const DataType_ptr datatype_ptr(locus.GetDataTypePtr());
+
+        cerr << "Name of this Locus in Region:            " << locus.GetName() << endl;
+        cerr << "Index of this Locus in Region:           " << locus.GetIndex() << " (locus " << locIdx + 1 << " of " << numloci << ")" << endl;
+        cerr << "DataType stored in this Locus:           " << datatype_enum << " (type " << ToString(datatype_enum) << ")" << endl;
+        cerr << "Number of Markers in this Locus:         " << nmarkers << endl;
+        cerr << "Number of Sites in this Locus:           " << locus.GetNsites() << endl;
+        cerr << "Offset of this Locus's sites in Region:  " << regionalmapposition << endl << endl;
+
+        const LongVec1d markerVector(locus.GetMarkerLocations());
+        assert(nmarkers == static_cast<long int>(markerVector.size())); // One is signed, other is unsigned.
+        cerr << "GetMarkerLocations size (marker vector size for this locus):  " << nmarkers << " (shown if location of Site != index + RegionalMapPosition) ..." << endl;
+        for (long int markerindex = 0L; markerindex < nmarkers; ++markerindex)
+        {
+            const long int val(markerVector[markerindex]);
+            if ( val != (markerindex + regionalmapposition) )
+            {
+                // The "setw" manipulator works "per output field" and so must be re-applied each time needed.
+                cerr << "GetMarkerLocations[" << setw(4) << markerindex << "]:  " << val << endl;
+            }
+        }
+        cerr << endl;
+
+        const vector<TipData> tipdatavec(locus.GetTipData());
+        assert(nmarkers == static_cast<long int>(tipdatavec[0].data.size())); // One is signed, other is unsigned.
+
+        StringVec1d compdata;
+        compdata.reserve(nmarkers);     // Pre-allocate for speed.
+        // Preload the comparison vector with one of the tip vectors (the zeroth, known to exist).
+        for (long int markerindex = 0L; markerindex < nmarkers; ++markerindex)
+        {
+            compdata.push_back(tipdatavec[0].data[markerindex]);
+        }
+
+        // If disease site(s) are present, then include them; ie, treat them as variant sites in map construction.
+        if (registry.GetForceSummary().CheckForce(force_DISEASE))
+        {
+            const ForceSummary & fs(registry.GetForceSummary());
+            if (fs.GetNLocalPartitionForces() > 0L)
+            {
+                cerr << "Disease Site(s) present ..." << endl;
+                const ForceVec lpforces(fs.GetLocalPartitionForces());
+                for (ForceVec::const_iterator lpforce = lpforces.begin(); lpforce != lpforces.end(); ++lpforce)
+                {
+                    const LocalPartitionForce * lpf(dynamic_cast<const LocalPartitionForce *>(* lpforce));
+                    assert(lpf);        // Make sure pointer is non-NULL.
+                    cerr << "Disease Site at index:  " << lpf->GetLocalSite() << endl;
+                }
+            }
+            cerr << endl;
+        }
+
+        for (long int tipindex = 0L; tipindex < numtips; ++tipindex)
+        {
+            // The "setw" manipulator works "per output field" and so must be re-applied each time needed.
+            cerr << "Tip " << setw(4) << tipindex << " data:                    ";
+            const TipData tipdata(tipdatavec[tipindex]);
+            const StringVec1d markerdata(tipdata.data);
+            for (long int markerindex = 0L; markerindex < nmarkers; ++markerindex)
+            {
+                cerr << markerdata[markerindex]; // Draw all marker data (iteratively printing each single-character string) for one Tip.
+                // Compare each tip vector (>= 1) with the zeroth (stored in comparison vector).
+                // If any pair of markers compares "non-IsEquivalent()", mark comparison vector slot with "*".
+                // On zeroth iteration, tip 0 will compare with itself (this iteration is needed only to print that tip),
+                // but since that comparison will always succeed, we waste time but don't change comparison vector then.
+                if ( ! datatype_ptr->IsEquivalent(markerdata[markerindex], compdata[markerindex]) )
+                {
+                    compdata[markerindex] = "*";
+                }
+            }
+            cerr << endl;
+        }
+
+        cerr << "Variant data:                     ";
+        for (long int markerindex = 0L; markerindex < nmarkers; ++markerindex)
+        {
+            cerr << compdata[markerindex];
+        }
+        cerr << endl << endl;
+    }
+}
+
+//-----------------
+
+#endif // ENABLE_REGION_DUMP
+
+//____________________________________________________________________________________
diff --git a/src/tree/range.h b/src/tree/range.h
new file mode 100644
index 0000000..1dbd7f4
--- /dev/null
+++ b/src/tree/range.h
@@ -0,0 +1,536 @@
+// $Id: range.h,v 1.48 2013/11/08 21:46:21 mkkuhner Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+//------------------------------------------------------------------------------------
+// General comments about the meanings of terms as they apply to the Biglink versus Littlelink implementations.
+//------------------------------------------------------------------------------------
+//
+// Classes "Range" and "RecRange" implement an ordered set of half-open intervals of "Links".  Both obey the same
+// convention of being half-open intervals (closed at the lower end and open at the upper end).  RecRange objects
+// are meant to be embedded within branches which allow for the possibility of recombination events.  Range objects
+// (the base class) handle non-recombination-only events.
+//
+// RecRanges track two different ranges, the active sites and the newly-active sites, plus derived quantities,
+// the active Links and newly-active Links.
+//
+// These classes provide getter/setter pairs for all of their internal variables.
+//
+// Range provides IsSiteLive() which answers whether the passed site is active on the owning branch.
+//
+// Range and RecRange also provide a set of virtual functions (1 per force) for handling active
+// and newly-active site management during rearrangement.  These functions are currently:
+//    UpdateCRange(), UpdateOneLeggedCRange(), UpdateMRange(), UpdateRootRange(),
+//    UpdateOneLeggedRootRange(), and UpdateRRange().
+//
+// In RecRange objects, a LINK represents generically an inter-site span of genomic information.
+// Specifically, in the Littlelink implementation, a LINK is the span connecting ADJACENT SITES.
+// In the Biglink implementation, a LINK is the span connecting adjacent VARIANT SITES, where "variant"
+// also includes any site denoted as a "disease" site.
+//
+// When the Biglink optimization is enabled, recombination is modeled as happening in the middle of the Biglink
+// (no matter where it "really" happens) as an approximation, since the available data suffice only to tell that
+// it happened between two particular variant sites (that is, "somewhere inside" a given Biglink).  Transmitted
+// sites are calculated based on splitting the possible sites by that "fictitious" midpoint.
+//
+// Note that the word "Link" in any context (by itself in a comment or as part of a function or variable name)
+// always means "Biglink" (interval between variant sites) if the Biglink optimization is enabled or "Littlelink"
+// (single link between adjacent sites) if the Biglink optimization is NOT enabled.  If the Biglink optimization
+// IS enabled but the implementation is emulating Littlelinks, then the two meanings are synomomous (the native
+// implementation is Biglinks, but each Biglink is modeling a single Littlelink via a degenerate Biglink Map).
+//
+// A RANGESET is an ordered set of PAIRs of LONG INTs.  In each pair, the first element is the start of a group
+// of objects (Sites or Links), and the second element is one past the lower end of that group.  So for example,
+// you could express the idea that sites 1 - 10 and 15 - 19 are active with a rangeset containing (1 - 11), (15 - 20).
+//
+// A LINKRANGESET is an ordered set of PAIRs of UNSIGNED LONG INTs.  Same as for a rangeset, except that
+// this type is defined to work only with Links (rather than Sites).
+//
+// However, the meaning of those integer pairs is different in the Biglink versus Littlelink case.  For Littlelinks,
+// they are Littlelink indices denoting the interval of Littlelinks (ie, index of the lowest included Littlelink
+// at the low end and index of the first excluded Littlelink at the high end).  For Biglinks, they are Biglink Map
+// indices, that is, indices into the Biglink Vector Map whose stored values at that entry represent the Littlelink
+// indices (again, as a half-open interval).  Each entry in the Map represents a single Biglink, and therefore a
+// RANGPAIR of Biglink indices represents a SET of Map entries and thus a SET of Biglinks (each one in turn
+// representing an interval of Littlelinks).  Within a single LINKRANGEPAIR representing an interval of Biglinks,
+// the Littlelinks encompassed by those Biglinks will be contiguous.
+//
+// So the generic term "LINK" always denotes an interval of genomic information, represented as a half-open interval
+// of indices.  In the Littlelink model, LINK means Littlelink exactly (the prior meaning of the word throughout
+// Lamarc).  In the Biglink model, it means a Biglink (a contiguous interval of Littlelinks bounded by variant or
+// disease sites), and it represents a genomic region over which recombination is detectable in the data.
+//
+// A Biglink integer conveys the same information but via one level of indirection: it indicates an INDEX
+// into the Biglink Vector Map, each of whose entries (valid while processing a single region but changed
+// between processing of different regions) is an object of class BiglinkMapitem, which in turn contains
+// three items of information:
+//
+//   1. The index of the LOWER-bound Littlelink INCLUDED in the represented Biglink.
+//
+//   2. The index of the UPPER-bound Littlelink (the first one EXCLUDED) at the upper end of the represented
+//      Biglink.  Note that (by coincidence) this index is numerically equal to the index of the upper-bounding
+//      SITE of the interval included in that Biglink.
+//
+//   3. The "weight" of the current Biglink, which currently is simply a double-float number whose value is
+//      a count of the number of Littlelinks included in the current Biglink's span.  Later this simple count
+//      of spanned Littlelinks will be generalized to weight each by a possibly non-uniform recombination
+//      probability, allowing the Biglink system to represent recombination hotspots and other variations.
+//
+//   "m_curTargetLinks" is a LINKRANGESET of links currently targetable - Littlelinks in one implementation and
+//   Biglinks (indices into the Biglink Vector Map) in the other, but in BOTH implementations this variable
+//   denotes a LINKRANGESET of spans of the genome in which recombination can happen.
+//
+//   "GetAllLinks()" is a member function which returns a LINKRANGESET of Links, of either type as appropriate
+//   for the particular implementation (ie, Littlelinks implementation: a LINKRANGESET of indices of Littlelinks;
+//   Biglink implementation: a LINKRANGESET of indices of BiglinkMapitem class objects in the Biglink Vector Map).
+//
+//   "GetNewTargetLinks()" is a member function which returns a LINKRANGESET of Links, again, as appropriate
+//   for the particular implementation.
+//
+//   "m_curTargetLinkweight" is a "Linkweight" of links currently targetable, where "Linkweight" is defined
+//   via a typedef to be a long int (count of Littlelinks currently targetable) in one implementation and a
+//   double-float (sum of all currently targetable Biglinks) in the other.
+//
+// RSGNOTE (not relevant to anything about BigLinks; just put here to avoid losing the idea): Perhaps precision
+// with respect to time can be attained by changing some usages of TIME from FLOAT to LONG INT, whose values,
+// unlike floats, are EXACT (as long as they remain in range).
+//
+//------------------------------------------------------------------------------------
+
+#ifndef RANGE_H
+#define RANGE_H
+
+#include <algorithm>
+#include <cassert>
+#include <iostream>
+#include <cstdlib>
+
+#include "local_build.h"
+
+#include "constants.h"
+#include "rangex.h"
+
+class Region;
+
+//------------------------------------------------------------------------------------
+
+#ifdef RUN_BIGLINKS
+
+// Used to indicate zero Biglink weight in Biglink system.
+#define ZERO 0.0
+#define FLAGFAULTY FLAGDOUBLE
+
+#include <set>
+
+class BiglinkMapitem;
+
+// Typedef for the Biglink Associative Set object (temporary object used to build the Biglink Vector Map).
+typedef   std::set < long int, std::less < long int > >   BiglinkAssocSet;
+
+// Typedef for the Biglink Vector Map object (held by static member variable in class RecRange).
+typedef   std::vector < BiglinkMapitem >   BiglinkVectormap;
+
+// Type for recombination weights in Biglink system.
+typedef   double   Linkweight;
+
+
+//------------------------------------------------------------------------------------
+// Class BiglinkMapitem.
+//------------------------------------------------------------------------------------
+// Class used to represent objects describing spans of Littlelinks in the Biglink Vector Map.
+// This class contains, and its accessor functions return, Littlelinks, which are declared
+// as type LONG INT.  We reserve UNSIGNED LONG INT for Biglink object indicators, ie, indices
+// into the BiglinkVectormap.
+
+class BiglinkMapitem
+{
+  private:
+
+    BiglinkMapitem();                           // Default constructor is undefined.
+
+    // SITE just below lowermost Littlelink included at lower end of the interval.
+    // Equivalently, lowermost Littlelink included at lower end of the interval.
+    long int m_lower_littlelink;
+
+    // SITE just above uppermost Littlelink INCLUDED at higher end of the interval.
+    // Equivalently, lowermost Littlelink EXCLUDED at higher end of the interval.
+    long int m_upper_littlelink;
+
+    // Recombination probability (if it varies, else 1.0) over interval represented by this Biglink.
+    double m_linkweight;
+
+  public:
+
+    // We accept the default (compiler-generated) COPY CONSTRUCTOR, ASSIGNMENT OPERATOR, and DESTRUCTOR.
+
+    // The "real" constructor for class BiglinkMapitem.
+    // Link arguments (first, second) refer to Littlelinks, and hence their type is SIGNED LONG INT.
+    // We reserve UNSIGNED LONG INTs to refer to Biglink indices.
+    BiglinkMapitem(long int lowerlink, long int upperlink, double linkweight);
+
+    // Accessors.
+    long int GetBiglinkLowerLittlelink() const { return m_lower_littlelink; };
+    long int GetBiglinkUpperLittlelink() const { return m_upper_littlelink; };
+    double   GetBiglinkWeight() const { return m_linkweight; };
+};
+
+#else // RUN_BIGLINKS
+
+// Used to indicate zero Littlelink count or weight in Littlelink system.
+#define ZERO 0L
+#define FLAGFAULTY FLAGLONG
+
+// Type for recombination weights (Littlelink counts) in Littlelink system.
+typedef   long int   Linkweight;
+
+#endif // RUN_BIGLINKS
+
+
+//------------------------------------------------------------------------------------
+// Class Range.
+//------------------------------------------------------------------------------------
+// Base class used to represent non-recombinant Ranges and as superclass for RecRange class.
+
+class Range
+{
+  private:
+
+    Range();                              // Default constructor is undefined.
+    Range & operator=(const Range & src); // Assignment operator is undefined.
+
+    // No processing is done by Range except to present total number of sites.
+    // This is used when there is no recombination.
+
+  protected:
+
+    // Information about permanent state (they are data-dependent and never change).
+    // In a non-recombinant Range, ALL sites are LIVE sites; they pass through every node and reach every tip.
+    // Value of this variable is fixed for all Ranges (and RecRanges) within a Region, and Range objects
+    // are valid only within a single Region at a time anyway.  This variable is initialized to FLAGLONG
+    // at program startup and then reset to the correct value for each Region when process enters a new
+    // Region (same time that the Biglink Map is built for a new Region, if Biglinks are enabled).
+    // Note that this number includes sites within all Loci in the region plus sites in all inter-locus
+    // areas, for which recombination must be modeled even though we have no data for these sites.
+    static long int s_numRegionSites;
+
+    Range(const Range & src);           // Copy constructor is callable only by Range::Clone and RecRange copy CTORs.
+
+    rangeset GetAllSites() const { assert(s_numRegionSites > 0L); return MakeRangeset(0L, s_numRegionSites); };
+
+  public:
+
+    // Setter, for resetting this protected static variable when starting processing of a new region.
+    // Static: Sets static variable but may be called before any Range objects are created.
+    // This is declaration only; definition is in "range.cpp" because it needs "region.h", but
+    // including that here would cause a circular dependency.
+    static void SetNumRegionSites(const Region & curregion);
+
+    // Setter, for setting this protected static variable when reading in an ARG tree.
+    // This is used ONLY when reading in an ARG tree.  Use ANYWHERE ELSE is likely DISASTROUS.
+    // This would not need to be STATIC were it not merely an overloaded definition of STATIC function above.
+    // RSGNOTE: Commented out by JMcGill - test whether still called.
+#if 0
+    static void SetNumRegionSites(long int totalsites)  {s_numRegionSites = totalsites; };
+#endif
+
+    // "explicit" to prevent accidental type conversion via assignment.
+    //
+    // RSGNOTE:  The argument "nsites" is not really needed, because its value can only be "s_numRegionSites".
+    // I've left it unchanged to minimize upsetting the interface when going from Littlelink to Biglink
+    // version.  A later optimization could remove this argument.
+    explicit Range(long int nsites);    // The "real" constructor.
+
+    // Destructor.
+    //
+    // We accept an empty destructor, which does nothing other than to call the destructor for each
+    // element (STL object) needing deallocation.  Even though it is empty, we need to declare it
+    // and define it so we can make it VIRTUAL (the compiler default destructor is NON-virtual).
+    virtual ~Range() {};
+
+    // Clone serves as a polymorphic copy constructor (handles memory allocation and returns a pointer).
+    virtual Range * Clone() const;
+
+    // Getter, inherited and used by both Range and RecRange (only such function).
+    long int GetNumRegionSites() const { assert(s_numRegionSites > 0L); return s_numRegionSites; };
+
+    // Tests.
+    virtual bool operator==(const Range & other)     const;
+    virtual bool SameLiveSites(const rangeset & src) const { return true; };
+    virtual bool LiveSitesOnly()                     const { return true; };
+    virtual bool IsSiteLive(long int site)           const { return true; };
+
+    // Getters.
+    // Functions which ASSERT here are defined to return a RANGESET (or LINKRANGESET) simply to satisfy the compiler
+    // on return type.  They should never be called; hence we don't care what the rangeset they "return" contains.
+    virtual rangeset     GetLiveSites()            const { return GetAllSites(); };
+    virtual rangeset     GetDiseaseSites()         const { assert(false); return rangeset(); };
+    virtual linkrangeset GetCurTargetLinks()       const { assert(false); return linkrangeset(); };
+    virtual linkrangeset GetNewTargetLinks()       const { assert(false); return linkrangeset(); };
+    virtual linkrangeset GetOldTargetLinks()       const { assert(false); return linkrangeset(); };
+    virtual Linkweight   GetCurTargetLinkweight()  const { assert(false); return FLAGFAULTY; };
+    virtual Linkweight   GetNewTargetLinkweight()  const { assert(false); return FLAGFAULTY; };
+    virtual rangeset     GetOldTargetSites()       const { assert(false); return rangeset(); };
+    virtual rangeset     GetOldLiveSites()         const { assert(false); return rangeset(); };
+    virtual rangeset     GetTransmittedSites()     const { assert(false); return rangeset(); };
+    virtual long int     GetRecpoint()             const { assert(false); return FLAGLONG; };
+
+    // Updaters.
+    // The following six functions all are NO-OPs on Range objects.
+    // They change member variables only on RecRange objects.
+    virtual void UpdateCRange(const Range * const child1rangeptr, const Range * const child2rangeptr,
+                              const rangeset & fcsites, bool dofc);
+    virtual void UpdateRootRange(const Range * const child1rangeptr, const Range * const child2rangeptr,
+                                 const rangeset & fcsites, bool dofc);
+    virtual void UpdateOneLeggedCRange(const Range * const childrangeptr);
+    virtual void UpdateOneLeggedRootRange(const Range * const childrangeptr);
+    virtual void UpdateMRange(const Range * const childrangeptr);
+    virtual void UpdateRRange(const Range * const childrangeptr, const rangeset & fcsites, bool dofc);
+
+    virtual bool AreLowSitesOnInactiveBranch(long int recpoint) const
+    { assert(false); return false; };
+
+    virtual bool AreDiseaseSitesTransmitted() const { return true; };
+
+    // Used by RecTree::Prune and Branch::ResetBuffersForNextRearrangement.
+    virtual void ClearNewTargetLinks();
+    virtual void SetOldInfoToCurrent();
+    virtual void ResetOldTargetSites(const rangeset & fcsites);
+
+    // Used by RBranch::IsRemovableRecombinationLeg (ASSERTs on Range object).
+    virtual bool AreChildTargetSitesTransmitted(const Range * const childrangeptr, const rangeset & fcsites) const;
+
+    // Used in revalidation to compare a branch to its child.
+    virtual bool SameAsChild(const Range * const childrangeptr) const;
+
+    // Debugging functions for Range and RecRange (virtual).
+    virtual void PrintLive() const;
+    virtual void PrintInfo() const;
+    virtual void PrintNewTargetLinks() const;
+    virtual bool NoLiveAndNoTransmittedDiseaseSites() const { return false; };
+    virtual bool DifferentCurTargetLinks(const linkrangeset & src) const { return false; };
+    virtual bool DifferentNewTargetLinks(const linkrangeset & src) const { return false; };
+
+};
+
+
+//------------------------------------------------------------------------------------
+// Class RecRange
+//------------------------------------------------------------------------------------
+// Derived class used to represent recombinant Ranges.
+// Full recombinant processing is done by RecRange.
+//
+// The member variable whose name starts with "s_..." is STATIC (value is a BiglinkVectormap object).
+//
+// Member variables whose names end with "...Linkweight" have Linkweight numbers as their values (double for Biglink,
+// long int for Littlelink); the value is the accumulated weight (count for Littlelinks) summed over all Links.
+//
+// In the future (in Biglink implementation), a location-dependent recombination probability will weight
+// the Littlelinks in this summation.  Currently, that probability is flat with a unity weighting.  Thus
+// double-float weight sums (for Biglink system) and long int Littlelink counts (for Littlelink system)
+// are equivalent numerically (aside from double-vs-int datatypes).
+//
+// The rest of the member variables have values which are a RANGESET (a set of PAIRs of LONG INTs representing SITEs)
+// or a LINKRANGESET (a set of PAIRs of UNSIGNED LONG INTs representing Links - either Biglinks or Littlelinks,
+// depending on the system enabled) where the integer values denote a half-open (inclusive start, exclusive end)
+// interval of Link indices.  For the Biglink system, these are BiglinkMapitem indices; for the Littlelink system,
+// they are Littlelink indices.
+
+class RecRange : public Range
+{
+  private:
+
+    RecRange();                         // Default constructor is undefined.
+    RecRange(const RecRange & src);     // Copy constructor is callable only by RecRange::Clone.
+    RecRange & operator=(const RecRange & src); // Assignment operator is undefined.
+
+#ifdef RUN_BIGLINKS
+    static BiglinkVectormap s_biglink_vectormap;
+#endif
+
+    // LIVE sites pass through the branch holding this RecRange and reach one or more tips.
+    rangeset m_liveSites;               // Sites currently live (transmitted from here to at least one tip).
+
+    rangeset m_diseaseSites;            // Locations (SITEs) of all disease traits.
+    rangeset m_transmittedSites;        // Sites transmitted through this branch.
+
+    // "m_curTargetLinks" are legal targets (Links) for recombination (variable during life of a RecRange object).
+    // Remember that in Biglink system, "Links" means "Biglinks"; in Littlelink system, "Links" means "Littlelinks".
+    linkrangeset m_curTargetLinks;      // Links currently targetable.
+    Linkweight m_curTargetLinkweight;   // Weight of Links currently targetable.
+
+    // Stored so that recombinations that result from "m_newTargetLinks" can be handled reversibly.
+    // Modified by recombinations in RecRange.
+    rangeset m_oldTargetSites;          // Sites that framed targetable Links when tree last created/modified.
+    rangeset m_oldLiveSites;            // Sites that were live when this tree was last created/modified.
+
+    // "m_oldTargetLinks" were targetable before rearrangement began.
+    // Modified in by recombinations in RecRange.
+    linkrangeset m_oldTargetLinks;      // Links previously targetable.
+
+    // "m_newTargetLinks" were not legal targets before rearrangement began, but have become legal.
+    linkrangeset m_newTargetLinks;      // Links newly targetable; m_curTargetLinks - m_oldTargetLinks.
+    Linkweight m_newTargetLinkweight;   // Weight of Links newly targetable.
+
+  public:
+
+#ifdef RUN_BIGLINKS
+    // Builds map from inter-site Littlelinks to recombination-visible Biglinks.
+    // Static member function (RecRange-only, but may be called before first object is created).
+    static void BuildBiglinkMap(const Region & curregion);
+    //
+    // Static member function (gives access to Biglink Map from outside class; independent of RecRange objects).
+    static BiglinkVectormap GetBiglinkVectormap() { return s_biglink_vectormap; };
+#endif
+
+    // NOTA BENE: The meaning of "GetAllLinks" is different for Littlelinks vs Biglinks.
+    // For Littlelinks, the function returns the LINKRANGESET containing ALL links between the first and last site.
+    // For Biglinks, it returns a LINKRANGESET denoting all the Biglinks between the first and last site, but such
+    // Biglinks MIGHT NOT EXTEND down to the lowest and/or up to the highest site (and, in general, they WON'T,
+    // unless both end sites happen to be either disease or variant sites).  In other words, for Biglinks,
+    // it returns the set denoting all possible Biglinks, even though the Littlelinks directly spanned by that
+    // set of Biglinks might not include all sites out to the very ends.
+    //
+    // Also, corresponding to the difference between Littlelinks and Biglinks, a Littlelink is denoted directly
+    // by a SITE index (that of the site to its "left"), while a Biglink is denoted by the index in the
+    // Biglink Vector Map of BiglinkMapitem object describing an interval of Littlelinks.
+    //
+#ifdef RUN_BIGLINKS
+    // Returns a LINKRANGESET of all indices in the Biglink Vector Map.  This is a static member function;
+    // it needs access to the Biglink Vector Map, but it is otherwise independent of RecRange objects.
+    // Also, it could be called before the first RecRange object is created.
+    // For each Region, this may be called only after BuildBiglinkMap() has built the map.
+    // Second argument to MakeRangeset is EXCLUDED endpoint; size of Map is how many Biglinks are in the set.
+    static linkrangeset GetAllLinks() { return MakeRangeset(0UL, s_biglink_vectormap.size()); };
+#else
+    // Simply returns a LINKRANGESET of all links (ie, those BETWEEN the first and last site,
+    // NOT including the non-existent "link" after the last site).
+    // Second argument to MakeRangeset is EXCLUDED endpoint; "s_numRegionSites - 1" is how many Littlelinks are in the set.
+    static linkrangeset GetAllLinks() { assert(s_numRegionSites > 0L); return MakeRangeset(0UL, s_numRegionSites - 1UL); };
+#endif
+
+    // The "real" constructor.
+    // Note that args 5 and 6 are "Link" linkrangesets (Biglink or Littlelink, depending on system).
+    RecRange(long int nsites, const rangeset & diseasesites,
+             const rangeset & transmittedsites, const rangeset & livesites,
+             const linkrangeset & curtargetlinks, const linkrangeset & oldtargetlinks,
+             const rangeset & oldtargetsites, const rangeset & oldlivesites);
+
+    // Destructor
+    // We accept an empty destructor, which does nothing other than to call the destructor for each
+    // element (STL object) needing deallocation.  Even though it is empty, we need to declare it
+    // and define it so we can make it VIRTUAL (the compiler default destructor is NON-virtual).
+    virtual ~RecRange() {};
+
+    // Clone serves as a polymorphic copy constructor (handles memory allocation and returns a pointer).
+    virtual Range * Clone() const;
+
+    // Tests.
+    virtual bool operator==(const Range & other)     const;
+    virtual bool SameLiveSites(const rangeset & src) const { return m_liveSites == src; };
+    virtual bool LiveSitesOnly()                     const { return false; };
+    virtual bool IsSiteLive(long int site)           const { return IsInRangeset(m_liveSites, site); };
+
+    // Getters.
+    virtual rangeset     GetLiveSites()            const { return m_liveSites; };
+    virtual rangeset     GetDiseaseSites()         const { return m_diseaseSites; };
+    virtual linkrangeset GetCurTargetLinks()       const { return m_curTargetLinks; };
+    virtual linkrangeset GetNewTargetLinks()       const { return m_newTargetLinks; };
+    virtual linkrangeset GetOldTargetLinks()       const { return m_oldTargetLinks; };
+    virtual Linkweight   GetCurTargetLinkweight()  const { return m_curTargetLinkweight; };
+    virtual Linkweight   GetNewTargetLinkweight()  const { return m_newTargetLinkweight; };
+    virtual rangeset     GetOldTargetSites()       const { return m_oldTargetSites; };
+    virtual rangeset     GetOldLiveSites()         const { return m_oldLiveSites; };
+    virtual rangeset     GetTransmittedSites()     const { return m_transmittedSites; };
+    virtual long int     GetRecpoint()             const;
+
+    // Setters
+    // This is used ONLY when reading in an ARG tree.  Use ANYWHERE ELSE is likely DISASTROUS.
+#if 0 // RSGNOTE:  Unused so far; reserved for future expansion in ARGtree hackery.
+    virtual void SetLiveSites(rangeset livesites)  {m_liveSites = livesites; };
+#endif
+
+    // Updaters.
+    // The following six functions modify a RecRange to reflect tipward changes during rearrangement,
+    // including modifying m_liveSites, m_curTargetLinks, and m_newTargetLinks plus their respective counts.
+    virtual void UpdateCRange(const Range * const child1rangeptr, const Range * const child2rangeptr,
+                              const rangeset & fcsites, bool dofc);
+    virtual void UpdateRootRange(const Range * const child1rangeptr, const Range * const child2rangeptr,
+                                 const rangeset & fcsites, bool dofc);
+    virtual void UpdateOneLeggedCRange(const Range * const childrangeptr);
+    virtual void UpdateOneLeggedRootRange(const Range * const childrangeptr);
+    virtual void UpdateMRange(const Range * const childrangeptr);
+    virtual void UpdateRRange(const Range * const childrangeptr, const rangeset & fcsites, bool dofc);
+
+    // When placing an inactive (hidden passages) recombination, should the branch that remains
+    // in the residual tree carry the low-numbered sites (as opposed to the high-numbered sites)?
+    virtual bool AreLowSitesOnInactiveBranch(long int recpoint) const;
+
+    virtual bool AreDiseaseSitesTransmitted() const;
+
+    // Used by RecTree::Prune and Branch::ResetBuffersForNextRearrangement.
+    virtual void ClearNewTargetLinks();
+    virtual void SetOldInfoToCurrent();
+    virtual void ResetOldTargetSites(const rangeset & fcsites);
+
+    // Used in RBranch::IsRemovableRecombinationLeg (recombinant case only).
+    virtual bool AreChildTargetSitesTransmitted(const Range * const childrangeptr, const rangeset & fcsites) const;
+
+    // Used in revalidation to compare a branch to its child; does not require m_transmittedSites to match.
+    virtual bool SameAsChild(const Range * const childrangeptr) const;
+
+    // Utility functions, RecRange only.
+
+    // Static member function (Biglink version needs access to the Biglink Vector Map).
+    static Linkweight AccumulatedLinkweight(const linkrangeset & setoflinks);
+
+    // Biglink version returns a LINKRANGESET of Biglinks spanning lowest to highest target site (including Disease sites).
+    // Static member function (needs access to the Biglink Vector Map).
+    //
+    // Littlelink version returns a LINKRANGESET of Littlelinks.
+    // Needs no access to statics, but declared static for calling consistency with one just above.
+    //
+    // Both versions have same declaration, but function definitions have different semantics.
+    static linkrangeset LinksSpanningSites(const rangeset & targetsites);
+
+    virtual bool NoLiveAndNoTransmittedDiseaseSites() const
+    { return (m_liveSites.empty() && !AreDiseaseSitesTransmitted()); };
+
+    virtual bool DifferentCurTargetLinks(const linkrangeset & src) const { return m_curTargetLinks != src; };
+    virtual bool DifferentNewTargetLinks(const linkrangeset & src) const { return m_newTargetLinks != src; };
+
+    // Debugging functions for Range and RecRange (virtual).
+    virtual void PrintLive() const;
+    virtual void PrintInfo() const;
+    virtual void PrintNewTargetLinks() const;
+
+    // Utility function, RecRange only, used in above two functions.
+    // Static member function; needs access to Biglink Vector Map but is otherwise independent of RecRange objects.
+    static void PrintLinks(const linkrangeset & setoflinks);
+
+    // Debugging functions for RecRange only.
+    void PrintDiseaseSites() const;
+    void PrintRightOrLeft()  const;
+    void TestInvariants()    const;
+
+};
+
+
+//------------------------------------------------------------------------------------
+// Helper debugging function at global scope.
+//------------------------------------------------------------------------------------
+
+#ifdef ENABLE_REGION_DUMP
+void PrintRegionData(long int regionnum, const Region & curregion);
+#endif // ENABLE_REGION_DUMP
+
+//------------------------------------------------------------------------------------
+
+#endif // RANGE_H
+
+//____________________________________________________________________________________
diff --git a/src/tree/rectree.cpp b/src/tree/rectree.cpp
new file mode 100644
index 0000000..72ec9d5
--- /dev/null
+++ b/src/tree/rectree.cpp
@@ -0,0 +1,1168 @@
+// $Id: rectree.cpp,v 1.81 2013/11/08 21:46:21 mkkuhner Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+#include <iostream>                     // debugging
+
+#include "local_build.h"
+#include "dynatracer.h"                 // Defines some debugging macros.
+
+#include "branchbuffer.h"               // Used in SetCurTargetLinkweightFrom for BranchBuffer::ExtractConstBranches.
+#include "errhandling.h"                // Can throw overrun_error.
+#include "fc_status.h"                  // Used in Prune() to determine what recombinations can be removed.
+#include "force.h"                      // For TimeSize object.
+#include "mathx.h"
+#include "range.h"
+#include "runreport.h"
+#include "tree.h"
+
+#ifdef DMALLOC_FUNC_CHECK
+#include "/usr/local/include/dmalloc.h"
+#endif
+
+using namespace std;
+
+//------------------------------------------------------------------------------------
+
+typedef boost::shared_ptr<RBranch> RBranch_ptr;
+
+//------------------------------------------------------------------------------------
+
+RecTree::RecTree()
+    : Tree(),
+      m_pMovingLocusVec(),
+      m_protoMovingCells()
+{
+    m_curTargetLinkweight = ZERO;
+    m_newTargetLinkweight = ZERO;
+}
+
+//------------------------------------------------------------------------------------
+
+RecTree::RecTree(const RecTree & tree, bool makestump)
+    : Tree(tree, makestump),
+      m_pMovingLocusVec(tree.m_pMovingLocusVec),
+      m_protoMovingCells(tree.m_protoMovingCells)
+
+{
+    m_curTargetLinkweight = ZERO;
+    m_newTargetLinkweight = ZERO;
+}
+
+//------------------------------------------------------------------------------------
+
+Tree * RecTree::Clone() const
+{
+    RecTree * tree = new RecTree(*this, false);
+    return tree;
+} // RecTree::Clone
+
+//------------------------------------------------------------------------------------
+
+Tree * RecTree::MakeStump() const
+{
+    RecTree * tree = new RecTree(*this, true);
+    return tree;
+}
+
+//------------------------------------------------------------------------------------
+
+void RecTree::Clear()
+{
+    Tree::Clear();
+    m_curTargetLinkweight = ZERO;
+    m_newTargetLinkweight = ZERO;
+}
+
+//------------------------------------------------------------------------------------
+
+void RecTree::CopyTips(const Tree * tree)
+{
+    Tree::CopyTips(tree);
+    m_curTargetLinkweight = ZERO;
+    m_newTargetLinkweight = ZERO;
+    for (vector<Locus>::const_iterator locus = m_pMovingLocusVec->begin(); locus != m_pMovingLocusVec->end(); ++locus)
+    {
+        m_aliases.push_back(locus->GetDLCalc()->RecalculateAliases(*this, *locus));
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+void RecTree::CopyBody(const Tree * tree)
+{
+    Tree::CopyBody(tree);
+    m_curTargetLinkweight = ZERO;
+    m_newTargetLinkweight = ZERO;
+}
+
+//------------------------------------------------------------------------------------
+
+void RecTree::CopyPartialBody(const Tree * tree)
+{
+    Tree::CopyPartialBody(tree);
+    m_curTargetLinkweight = ZERO;
+    m_newTargetLinkweight = ZERO;
+}
+
+//------------------------------------------------------------------------------------
+
+void RecTree::Break(Branch_ptr pBranch)
+{
+    Branch_ptr pParent = pBranch->Parent(0);
+
+    if (pParent->CanRemove(pBranch))
+    {
+        Break(pParent);
+        m_timeList.Remove(pParent);
+
+        pParent = pBranch->Parent(1);
+        if (pParent)
+        {
+            Break(pParent);
+            m_timeList.Remove(pParent);
+        }
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+const vector<LocusCell> & RecTree::CollectMovingCells()
+{
+    if (m_protoMovingCells.empty())
+    {
+        unsigned long int i;
+        for (i = 0; i < m_pMovingLocusVec->size(); ++i)
+        {
+            m_protoMovingCells.push_back((*m_pMovingLocusVec)[i].GetProtoCell());
+        }
+    }
+    return m_protoMovingCells;
+} // RecTree::CollectMovingCells
+
+//------------------------------------------------------------------------------------
+
+vector<Branch_ptr> RecTree::ActivateTips(Tree * othertree)
+{
+    vector<Branch_ptr> tips = Tree::ActivateTips(othertree);
+
+#ifdef RUN_BIGLINKS
+    m_curTargetLinkweight = m_timeList.GetNTips() * RecRange::AccumulatedLinkweight(RecRange::GetAllLinks());
+#else
+    m_curTargetLinkweight = m_timeList.GetNTips() * (m_totalSites - 1); // -1 because there is no link after last site.
+#endif
+
+    m_newTargetLinkweight = ZERO;
+
+#if defined(EMULATE_LITTLELINKS) || ! defined(RUN_BIGLINKS)
+#if 1
+    DebugAssert2(m_curTargetLinkweight == m_timeList.GetNTips() * (m_totalSites - 1),
+                 m_curTargetLinkweight,
+                 m_timeList.GetNTips() * (m_totalSites - 1));
+#else // Equivalent to DebugAssert2 above, in case it is removed later.
+    assert(m_curTargetLinkweight == m_timeList.GetNTips() * (m_totalSites - 1));
+#endif
+#endif // defined(EMULATE_LITTLELINKS) || ! defined(RUN_BIGLINKS)
+
+    return tips;
+}
+
+//------------------------------------------------------------------------------------
+
+Branch_ptr RecTree::ActivateBranch(Tree * othertree)
+{
+    Branch_ptr pActive = Tree::ActivateBranch(othertree);
+    m_curTargetLinkweight += pActive->GetRangePtr()->GetCurTargetLinkweight();
+    return pActive;
+}
+
+//------------------------------------------------------------------------------------
+
+Branch_ptr RecTree::ActivateRoot(FC_Status & fcstatus)
+{
+    Branch_ptr pRoot = Tree::ActivateRoot(fcstatus);
+    m_curTargetLinkweight += pRoot->GetRangePtr()->GetCurTargetLinkweight();
+    m_newTargetLinkweight = ZERO;
+    return pRoot;
+}
+
+//------------------------------------------------------------------------------------
+
+void RecTree::AttachBase(Branch_ptr newroot)
+{
+    Tree::AttachBase(newroot);
+    m_curTargetLinkweight = ZERO;
+}
+
+//------------------------------------------------------------------------------------
+
+vector<Branch_ptr> RecTree::FirstInterval(double eventT)
+{
+    Branch_ptr pBranch;
+
+    vector<Branch_ptr> newinactives = Tree::FirstInterval(eventT);
+
+    m_newTargetLinkweight = ZERO;       // Initialize the weight of active Links.
+
+    return newinactives;
+}
+
+//------------------------------------------------------------------------------------
+
+void RecTree::NextInterval(Branch_ptr pBranch)
+{
+    Branch_ptr pParent = pBranch->Parent(0);
+    m_newTargetLinkweight -= pParent->Child(0)->GetRangePtr()->GetNewTargetLinkweight();
+
+    // If the branch has a sibling, we must remove it as well (it's a coalescence event replaced by its parent).
+    if (pParent->Child(1))
+    {
+        m_newTargetLinkweight -= pParent->Child(1)->GetRangePtr()->GetNewTargetLinkweight();
+    }
+    else if (pBranch->Parent(1))        // If the branch has a second parent, insert it.
+    {
+        m_newTargetLinkweight += pBranch->Parent(1)->GetRangePtr()->GetNewTargetLinkweight();
+    }
+
+    m_newTargetLinkweight += pParent->GetRangePtr()->GetNewTargetLinkweight();
+}
+
+//------------------------------------------------------------------------------------
+
+void RecTree::Prune()
+{
+    // This routine DEMANDS that all current-state Range information in the tree is correct!  Thus:
+    assert(m_timeList.RevalidateAllRanges());
+
+    // This routine locates recombinations which are no longer relevant due to occuring at Links that
+    // are no longer targetable.  It removes such recombinations and everything logically dependent
+    // on them.  It also makes all "old state" information in the Ranges equal to the current
+    // state, in preparation for the next cycle.
+
+    m_newTargetLinkweight = ZERO;
+    Branch_ptr pBranch, pChild;
+    // The original cut branch never needs to be updated or removed by Prune().
+    Branchiter start(m_timeList.NextNonTimeTiedBranch(m_firstInvalid));
+
+    FC_Status fcstatus;
+    vector<Branch_ptr> branches(FindBranchesImmediatelyTipwardOf(start));
+    vector<Branch_ptr>::iterator branch;
+
+#if FINAL_COALESCENCE_ON
+    for (branch = branches.begin(); branch != branches.end(); ++branch)
+    {
+        fcstatus.Increment_FC_Counts((*branch)->GetLiveSites());
+    }
+#endif
+
+    // Loop over pBranch starting from cut point.
+    Branchiter brit;
+    for (brit = start; brit != m_timeList.EndBranch(); /* increments inside loop */ )
+    {
+        pBranch = *brit;
+
+#if FINAL_COALESCENCE_ON
+        // If we're a fully functional, two-legged coalescence ...
+        if (pBranch->Event() == btypeCoal && !pBranch->m_marked)
+        {
+            rangeset coalesced_sites =
+                Intersection(pBranch->Child(0)->GetLiveSites(), pBranch->Child(1)->GetLiveSites());
+            fcstatus.Decrement_FC_Counts(coalesced_sites);
+        }
+#endif
+
+        rangeset fcsites;
+#if FINAL_COALESCENCE_ON
+        fcsites = fcstatus.Coalesced_Sites();
+#endif
+
+        // Update this branch's "m_oldTargetSites" info, which is no longer needed in this cycle,
+        // so that it will be right for next cycle.  Done here because it requires FC information.
+        pBranch->ResetOldTargetSites(fcsites);
+
+        if (pBranch->IsRemovableRecombinationLeg(fcsites))
+        {
+#ifndef NDEBUG
+            // Validation check.
+            Range * childrangeptr = pBranch->Child(0)->GetRangePtr();
+            rangeset childtargetsites(Union(childrangeptr->GetDiseaseSites(),
+                                            RemoveRangeFromRange(fcsites, childrangeptr->GetLiveSites())));
+            linkrangeset childtargetlinks(RecRange::LinksSpanningSites(childtargetsites));
+            //
+            if (!(childtargetlinks == childrangeptr->GetCurTargetLinks()))
+            {
+                cerr << "RecTree::Prune (invalid self or child CurTargetLinks) call to Self->RecRange::PrintInfo()" << endl << endl;
+                pBranch->GetRangePtr()->PrintInfo();
+                cerr << "RecTree::Prune (invalid self or child CurTargetLinks) call to Child->RecRange::PrintInfo()" << endl << endl;
+                childrangeptr->PrintInfo();
+                cerr << "RecRange::LinksSpanningSites(childtargetsites) (Supposed Child Current Target Links):  ";
+                RecRange::PrintLinks(childtargetlinks);
+                cerr << endl << "childrangeptr->GetCurTargetLinks() (Actual Child Current Target Links):  ";
+                RecRange::PrintLinks(childrangeptr->GetCurTargetLinks());
+                cerr << endl;
+                assert(false);
+            }
+            //
+#endif // NDEBUG
+
+            Branch_ptr pSpouse = pBranch->GetRecPartner();
+            Branch_ptr pRemove(pBranch), pRemain(pSpouse);
+            // Which one to remove completely?
+            if (pSpouse->IsRemovableRecombinationLeg(fcsites))
+            {
+                // They are both removable based on targetable sites.
+                if (pBranch->GetRangePtr()->GetLiveSites().empty() && pSpouse->GetRangePtr()->GetLiveSites().empty())
+                {
+                    // ... and we can't break the tie on live sites, so we randomize.
+                    bool removespouse = registry.GetRandom().Bool();
+                    if (removespouse)
+                    {
+                        pRemove = pSpouse;
+                        pRemain = pBranch;
+                    }
+                }
+                else
+                {
+                    // ... and we break the tie on live sites.
+                    if (pSpouse->GetRangePtr()->GetLiveSites().empty())
+                    {
+                        pRemove = pSpouse;
+                        pRemain = pBranch;
+                    }
+                }
+            }
+
+            // Unhook branch which will be completely removed.
+            Branch_ptr pChild = pRemove->Child(0);
+            if (pChild->Parent(0) == pRemove)
+            {
+                pChild->SetParent(0, pChild->Parent(1));
+            }
+            pChild->SetParent(1, Branch::NONBRANCH);
+
+            // Remove descending material below it.
+            Break(pRemove);
+
+            // Get an interator to the next still-valid interval.
+            brit = m_timeList.NextBody(brit);
+            if (*brit == pSpouse) brit = m_timeList.NextBody(brit);
+
+            // Splice out branch which remains in tree.
+            Branch_ptr pParent0 = pRemain->Parent(0);
+            pParent0->ReplaceChild(pRemain, pRemain->Child(0));
+            m_timeList.SetUpdateDLs(pParent0);
+            Branch_ptr pParent1 = pRemain->Parent(1);
+            if (pParent1)
+            {
+                pParent1->ReplaceChild(pRemain, pRemain->Child(0));
+                m_timeList.SetUpdateDLs(pParent1);
+            }
+
+            // Remove the dead stuff.
+            m_timeList.Remove(pRemove);
+            m_timeList.Remove(pRemain);
+        }
+        else
+        {
+#if FINAL_COALESCENCE_ON
+            pBranch->UpdateBranchRange(fcsites, true);
+#else
+            pBranch->UpdateBranchRange(fcsites, false);
+#endif
+            brit = m_timeList.NextBody(brit);
+        }
+    }
+
+    Tree::Prune();
+
+    // validating the ranges
+    assert(m_timeList.RevalidateAllRanges());
+
+} // RecTree::Prune
+
+//------------------------------------------------------------------------------------
+
+void RecTree::ReassignDLsFor(string lname, long int marker, long int ind)
+{
+    // Find out which locus we're dealing with.  We have its name.  If the locus in question
+    // is in m_pLocusVec, we call through to Tree::ReassignDLsFor.
+    long int locus = FLAGLONG;
+    for (unsigned long int lnum = 0; lnum < m_pMovingLocusVec->size(); ++lnum)
+    {
+        if ((*m_pMovingLocusVec)[lnum].GetName() == lname)
+        {
+            locus = lnum;
+        }
+    }
+    if (locus == FLAGLONG)
+    {
+        // The locus must be in m_pLocusVec--call the base function.
+        return Tree::ReassignDLsFor(lname, marker, ind);
+    }
+
+    vector<Branch_ptr> haps = m_individuals[ind].GetAllTips();
+    vector<LocusCell> cells = m_individuals[ind].GetLocusCellsFor(lname, marker);
+
+    for (unsigned long int tip = 0; tip < haps.size(); ++tip)
+    {
+        Cell_ptr origcell = haps[tip]->GetDLCell(locus, markerCell, true);
+        Cell_ptr newcell  = cells[tip][0];
+        origcell->SetSiteDLs(marker, newcell->GetSiteDLs(marker));
+        MarkForDLRecalc(haps[tip]);
+    }
+
+    // Don't recalculate aliases, since there are no aliases for the moving locus.
+} // RecTree::ReassignDLsFor
+
+//------------------------------------------------------------------------------------
+
+Branch_ptr RecTree::CoalesceActive(double eventT, Branch_ptr active1, Branch_ptr active2, const rangeset & fcsites)
+{
+    Branch_ptr pBranch = Tree::CoalesceActive(eventT, active1, active2, fcsites);
+
+    pBranch->SetMovingDLCells(CollectMovingCells());
+
+    m_curTargetLinkweight -= active1->GetRangePtr()->GetCurTargetLinkweight();
+    m_curTargetLinkweight -= active2->GetRangePtr()->GetCurTargetLinkweight();
+    m_curTargetLinkweight += pBranch->GetRangePtr()->GetCurTargetLinkweight();
+
+    return pBranch;
+}
+
+//------------------------------------------------------------------------------------
+
+Branch_ptr RecTree::CoalesceInactive(double eventT, Branch_ptr active, Branch_ptr inactive, const rangeset & fcsites)
+{
+    Branch_ptr pBranch = Tree::CoalesceInactive(eventT, active, inactive, fcsites);
+
+    pBranch->SetMovingDLCells(CollectMovingCells());
+
+    m_newTargetLinkweight -= inactive->GetRangePtr()->GetNewTargetLinkweight();
+    m_curTargetLinkweight -= active->GetRangePtr()->GetCurTargetLinkweight();
+    m_newTargetLinkweight += pBranch->GetRangePtr()->GetNewTargetLinkweight();
+
+    return pBranch;
+}
+
+//------------------------------------------------------------------------------------
+
+Branch_ptr RecTree::Migrate(double eventT, long int topop, long int maxEvents, Branch_ptr active)
+{
+    Branch_ptr pBranch = Tree::Migrate(eventT, topop, maxEvents, active);
+
+    //    pBranch->GetRangePtr()->SetMRange(pBranch->Child(0)->GetRangePtr());  // PRUNE?
+    // A migration does not change the active or newly active Links.
+    return pBranch;
+}
+
+//------------------------------------------------------------------------------------
+
+Branch_ptr RecTree::DiseaseMutate(double eventT, long int endstatus, long int maxEvents, Branch_ptr active)
+{
+    Branch_ptr pBranch = Tree::DiseaseMutate(eventT, endstatus, maxEvents, active);
+
+    //    pBranch->GetRangePtr()->SetMRange(pBranch->Child(0)->GetRangePtr());  // PRUNE?
+    // A disease mutation does not change the active or newly active Links.
+    return pBranch;
+}
+
+//------------------------------------------------------------------------------------
+
+branchpair RecTree::RecombineActive(double eventT, long int maxEvents, FPartMap fparts,
+                                    Branch_ptr pActive, long int recpoint, const rangeset & fcsites, bool lowSitesOnLeft)
+{
+    // "recpoint" is a recombination Littlelink (middle of target Biglink).
+
+    if (NumberOfRecombinations() >= maxEvents)
+    {
+        rec_overrun e;
+        throw e;
+    }
+
+    // Create left parent of the active branch, saving partition handling until end.
+    // When done, "transmittedsites1" will be a RANGESET containing a single RANGEPAIR;
+    // that is, it will represent a single contiguous interval of sites.
+    bool newbranchisinactive(false);
+    rangeset transmittedsites1;
+    long int startleft;
+    long int endleft;
+
+    if (lowSitesOnLeft)
+    {
+        startleft = 0;                  // Transmitted sites are ZERO to first NOT transmitted (open upper end).
+        endleft = recpoint + 1;
+    }
+    else
+    {
+        startleft = recpoint + 1;       // Transmitted sites are first above RECPOINT to (open) upper end.
+        endleft = m_totalSites;
+    }
+
+    rangepair transmit1(startleft, endleft);
+    transmittedsites1 = AddPairToRange(transmit1, transmittedsites1);
+
+    RBranch_ptr pParentA = RBranch_ptr(new RBranch(pActive->GetRangePtr(), newbranchisinactive, transmittedsites1, fcsites));
+    assert(pParentA);
+
+    pParentA->m_eventTime  = eventT;    // Set the event time.
+    pParentA->SetChild(0, pActive);
+    pActive->SetParent(0, pParentA);
+
+    // Create right parent of the active branch, saving partition handling until end.  Same as for left branch above.
+    rangeset transmittedsites2;
+
+    long int startright;
+    long int endright;
+
+    if (lowSitesOnLeft)
+    {
+        startright = recpoint + 1;      // Transmitted sites are first above RECPOINT to (open) upper end.
+        endright = m_totalSites;
+    }
+    else
+    {
+        startright = 0;                 // Transmitted sites are ZERO to first NOT transmitted (open upper end).
+        endright = recpoint + 1;
+    }
+
+    rangepair transmit2(startright, endright);
+    transmittedsites2 = AddPairToRange(transmit2, transmittedsites2);
+
+    RBranch_ptr pParentB = RBranch_ptr(new RBranch(pActive->GetRangePtr(), false, transmittedsites2, fcsites));
+    assert(pParentB);
+
+    pParentB->m_eventTime  = eventT;    // Set the event time.
+    pParentB->SetChild(0, pActive);
+    pActive->SetParent(1, pParentB);
+
+    // Now deal with partitions.
+    if (fparts.empty())
+    {
+        pParentA->CopyPartitionsFrom(pActive);
+        pParentB->CopyPartitionsFrom(pActive);
+    }
+    else
+    {
+        pParentA->RecCopyPartitionsFrom(pActive, fparts, true);
+        pParentB->RecCopyPartitionsFrom(pActive, fparts, false);
+    }
+
+    m_curTargetLinkweight -= pActive->GetRangePtr()->GetCurTargetLinkweight();
+    m_curTargetLinkweight += pParentA->GetRangePtr()->GetCurTargetLinkweight();
+    m_curTargetLinkweight += pParentB->GetRangePtr()->GetCurTargetLinkweight();
+
+    // Now put both branches in place.
+    m_timeList.Collate(pParentA, pParentB);
+
+    return make_pair(pParentA, pParentB);
+}
+
+//------------------------------------------------------------------------------------
+
+branchpair RecTree::RecombineInactive(double eventT, long int maxEvents, FPartMap fparts,
+                                      Branch_ptr pBranch, long int recpoint, const rangeset & fcsites)
+{
+    // "recpoint" is a recombination Littlelink (middle of target Biglink).
+
+    if (NumberOfRecombinations() >= maxEvents)
+    {
+        rec_overrun e;
+        throw e;
+    }
+
+    // This is the branch already in the tree.
+    bool inactive_is_low = pBranch->GetRangePtr()->AreLowSitesOnInactiveBranch(recpoint);
+
+    // All this might eventually be one constructor call.
+    rangepair transmit0;
+    if (inactive_is_low)
+    {
+        transmit0.first = 0L;
+        transmit0.second = recpoint + 1; // Transmitted sites are ZERO to first site NOT transmitted (open upper end).
+    }
+    else
+    {
+        transmit0.first = recpoint + 1;  // Transmitted sites are first site above "recpoint" to (open) upper end.
+        transmit0.second = m_totalSites;
+    }
+
+    // When done, "transmittedsitesInactive" will be a RANGESET containing a single RANGEPAIR;
+    // that is, it will represent a single contiguous interval of sites.
+    rangeset transmittedsitesInactive;
+    transmittedsitesInactive = AddPairToRange(transmit0, transmittedsitesInactive);
+
+    RBranch_ptr pParentInactive = RBranch_ptr(new RBranch(pBranch->GetRangePtr(), true, transmittedsitesInactive, fcsites));
+    pParentInactive->m_eventTime  = eventT;
+    pParentInactive->CopyPartitionsFrom(pBranch);
+
+    pParentInactive->SetChild(0, pBranch);
+    pBranch->Parent(0)->ReplaceChild(pBranch, pParentInactive);
+    pBranch->SetParent(0, pParentInactive);
+    if (pBranch->Parent(1))
+    {
+        pBranch->Parent(1)->ReplaceChild(pBranch, pParentInactive);
+        pBranch->SetParent(1, Branch::NONBRANCH);
+    }
+
+    // This is the new, active branch.
+    if (!inactive_is_low)
+    {
+        transmit0.first = 0L;
+        transmit0.second = recpoint + 1; // Transmitted sites are ZERO to first site NOT transmitted (open upper end).
+    }
+    else
+    {
+        transmit0.first = recpoint + 1;  // Transmitted sites are first site above "recpoint" to (open) upper end.
+        transmit0.second = m_totalSites;
+    }
+
+    // When done, "transmittedsitesActive" will be a RANGESET containing a single RANGEPAIR;
+    // that is, it will represent a single contiguous interval of sites.
+    rangeset transmittedsitesActive;
+    transmittedsitesActive = AddPairToRange(transmit0, transmittedsitesActive);
+    assert(Intersection(transmittedsitesActive, pBranch->GetRangePtr()->GetOldTargetSites()).empty());
+
+    RBranch_ptr pParentActive = RBranch_ptr(new RBranch(pBranch->GetRangePtr(), false, transmittedsitesActive, fcsites));
+    pParentActive->m_eventTime = eventT;
+
+    pParentActive->SetChild(0, pBranch);
+    pBranch->SetParent(1, pParentActive);
+
+    if (fparts.empty())
+    {
+        pParentActive->CopyPartitionsFrom(pBranch);
+    }
+    else
+    {
+        pParentActive->RecCopyPartitionsFrom(pBranch, fparts, !inactive_is_low);
+    }
+
+    // Now put both branches in place.
+    m_timeList.Collate(pParentInactive, pParentActive);
+
+    m_newTargetLinkweight -= pBranch->GetRangePtr()->GetNewTargetLinkweight();
+    m_newTargetLinkweight += pParentInactive->GetRangePtr()->GetNewTargetLinkweight();
+    m_curTargetLinkweight += pParentActive->GetRangePtr()->GetCurTargetLinkweight();
+
+    return make_pair(pParentInactive, pParentActive);
+}
+
+//------------------------------------------------------------------------------------
+
+rangevector RecTree::GetLocusSubtrees(rangepair span) const
+{
+    // Get Interval tree start sites (each is a site just to the right of a recombination breakpoint or is an endmarker).
+    set<long int> startsites(GetIntervalTreeStartSites());
+
+    // Push rangepairs representing each breakpoint into subtree vector.  The (pt + 1) avoids creating a rangepair
+    // starting at the last site.  The strange iterator maneuver is because (pt + 1) is not allowed in g++.  We do
+    // not risk an empty set; we know it contains at least 2 elements.
+    rangevector subtrees;
+    set<long int>::const_iterator pt = startsites.begin();
+    set<long int>::const_iterator nextpt = pt;
+    ++nextpt;
+    long int begin = span.first;
+    long int end = span.second;
+
+    for ( ; nextpt != startsites.end(); ++pt, ++nextpt)
+    {
+        // No overlap so we don't add it.
+        if (*nextpt <= begin || *pt >= end) continue;
+        // Some overlap, but ends may need adjustment.
+        long int first = *pt;
+        long int last = *nextpt;
+        if (first < begin) first = begin;
+        if (last > end) last = end;
+        subtrees.push_back(rangepair(first, last));
+    }
+
+    return subtrees;
+} // RecTree::GetLocusSubtrees
+
+//------------------------------------------------------------------------------------
+
+void RecTree::SetMovingLocusVec(vector<Locus> * loc)
+{
+    m_pMovingLocusVec = loc;
+    // We don't have to worry about the range--the moving loci move around within the range of the fixed loci.
+} // RecTree::SetMovingLocusVec
+
+//------------------------------------------------------------------------------------
+
+void RecTree::SetMovingMapPosition(long int mloc, long int site)
+{
+    assert(mloc < static_cast<long int>(m_pMovingLocusVec->size()));
+    (*m_pMovingLocusVec)[mloc].SetRegionalMapPosition(site);
+}
+
+//------------------------------------------------------------------------------------
+
+TBranch_ptr RecTree::CreateTip(const TipData & tipdata, const vector<LocusCell> & cells,
+                               const vector<LocusCell> & movingcells, const rangeset & diseasesites)
+{
+    TBranch_ptr pTip = m_timeList.CreateTip(tipdata, cells, movingcells, m_totalSites, diseasesites);
+    return pTip;
+}
+
+//------------------------------------------------------------------------------------
+
+TBranch_ptr RecTree::CreateTip(const TipData & tipdata, const vector<LocusCell> & cells,
+                               const vector<LocusCell> & movingcells, const rangeset & diseasesites,
+                               const vector<Locus> & loci)
+{
+    TBranch_ptr pTip = m_timeList.CreateTip(tipdata, cells, movingcells, m_totalSites, diseasesites, loci);
+    return pTip;
+}
+
+//------------------------------------------------------------------------------------
+
+bool RecTree::DoesThisLocusJump(long int mloc) const
+{
+    assert(mloc < static_cast<long int>(m_pMovingLocusVec->size()));
+    return ((*m_pMovingLocusVec)[mloc].GetAnalysisType() == mloc_mapjump);
+}
+
+//------------------------------------------------------------------------------------
+
+bool RecTree::AnyRelativeHaplotypes() const
+{
+    for (IndVec::const_iterator ind = m_individuals.begin(); ind != m_individuals.end(); ++ind)
+    {
+        if (ind->MultipleTraitHaplotypes()) return true;
+    }
+    return false;
+}
+
+//------------------------------------------------------------------------------------
+
+void RecTree::CalculateDataLikes()
+{
+    m_overallDL = CalculateDataLikesForFixedLoci();
+    //The base function accumulates a likelihood into m_overallDL.
+    for (unsigned long int loc = 0; loc < m_pMovingLocusVec->size(); ++loc)
+    {
+        if ((*m_pMovingLocusVec)[loc].GetAnalysisType() == mloc_mapjump)
+        {
+            m_overallDL += CalculateDataLikesForMovingLocus(loc);
+        }
+    }
+    m_timeList.ClearUpdateDLs();        // Reset the updating flags.
+}
+
+//------------------------------------------------------------------------------------
+
+double RecTree::CalculateDataLikesForMovingLocus(long int loc)
+{
+    double likelihood;
+
+    const Locus & locus = (*m_pMovingLocusVec)[loc];
+    try                                 // Check for need to switch to normalization.
+    {
+        likelihood = locus.CalculateDataLikelihood(*this, true);
+    }
+
+    catch (datalikenorm_error & ex)     // Normalization is set by thrower.
+    {
+        m_timeList.SetAllUpdateDLs();   // All subsequent loci will recalculate the entire tree.
+        RunReport & runreport = registry.GetRunReport();
+        runreport.ReportChat("\n", 0);
+        runreport.ReportChat("Subtree of likelihood 0.0 found:  Turning on normalization and re-calculating.");
+
+        likelihood = locus.CalculateDataLikelihood(*this, true);
+    }
+
+    return likelihood;
+} // RecTree::CalculateDataLikesForMovingLocus
+
+//------------------------------------------------------------------------------------
+// Called only when underlying data structures (trees, branches, ranges)
+// are potentially recombinant (ie, contain RecRanges, not Ranges).
+
+set<long int> RecTree::GetIntervalTreeStartSites() const
+{
+    // Create set of Interval tree start sites (each is a site just to the right
+    // of a recombination breakpoint or is an endmarker).
+    set<long int> startsites;
+    startsites.insert(0);
+    startsites.insert(m_totalSites);
+
+    Branchconstiter brit;
+    long int littlelink;
+    for (brit = m_timeList.FirstBody(); brit != m_timeList.EndBranch(); brit = m_timeList.NextBody(brit))
+    {
+        // RBranch::GetRecpoint() returns Littlelink (Biglink midpoint) or "no-recombination" flag.
+        littlelink = (*brit)->GetRecpoint();
+        if (littlelink != FLAGLONG)     // FLAGLONG means there is no recombination here.
+        {
+            // Insert "littlelink + 1" because what we are inserting is the START SITE of an Interval,
+            // that is, the first site AFTER the Littlelink which marks the recombination point.
+            startsites.insert(littlelink + 1);
+        }
+    }
+
+    return startsites;
+} // RecTree::GetIntervalTreeStartSites
+
+//------------------------------------------------------------------------------------
+
+DoubleVec2d RecTree::GetMapSummary()
+{
+    DoubleVec2d retvec;
+    DoubleVec1d zeroes(m_totalSites, 0.0);
+    for (unsigned long int mloc = 0; mloc < m_pMovingLocusVec->size(); ++mloc)
+    {
+        DoubleVec1d mlikes = zeroes;
+        long int currentsite;
+        switch((*m_pMovingLocusVec)[mloc].GetAnalysisType())
+        {
+            case mloc_mapjump:
+                //Find out where it is, make the likelihood at that value 1.
+                currentsite = (*m_pMovingLocusVec)[mloc].GetRegionalMapPosition();
+                mlikes[currentsite] = 1.0;
+                break;
+            case mloc_mapfloat:
+                //Calculate the likelihoods over all subtrees, return vectors with it.
+                //mlikes = CalculateDataLikesWithRandomHaplotypesForFloatingLocus(mloc);
+                CalculateDataLikesForAllHaplotypesForFloatingLocus(mloc, mlikes);
+                break;
+            case mloc_data:
+            case mloc_partition:
+                assert(false);
+                throw implementation_error("We seem to want to collect mapping data for a segment without that type"
+                                           " of analysis.  This is our fault; e-mail us at lamarc at gs.washington.edu");
+        }
+        retvec.push_back(mlikes);
+    }
+
+    return retvec;
+}
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d RecTree::CalculateDataLikesWithRandomHaplotypesForFloatingLocus(long int mloc)
+{
+    RandomizeMovingHaplotypes(mloc);
+    return CalculateDataLikesForFloatingLocus(mloc);
+}
+
+//------------------------------------------------------------------------------------
+
+void RecTree::CalculateDataLikesForAllHaplotypesForFloatingLocus(long int mloc, DoubleVec1d & mlikes)
+{
+    long int ind = 0;
+    //We need to change the vector of mlikes from zeroes to EXPMINS.
+    mlikes = SafeLog(mlikes);
+    UpdateDataLikesForIndividualsFrom(ind, mloc, mlikes);
+}
+
+//------------------------------------------------------------------------------------
+
+bool RecTree::UpdateDataLikesForIndividualsFrom(long int ind, long int mloc, DoubleVec1d & mlikes)
+{
+    if (static_cast<unsigned long int>(ind) == m_individuals.size()) return true;
+    string lname = (*m_pMovingLocusVec)[mloc].GetName();
+    //LS NOTE: If this ASSERTs, we are mapping something with more than one marker.
+    // Might be OK, but should be checked.
+    assert ((*m_pMovingLocusVec)[mloc].GetNmarkers() == 1);
+    for (long int marker = 0; marker < (*m_pMovingLocusVec)[mloc].GetNmarkers(); ++marker)
+    {
+        bool newhaps = true;
+        for (m_individuals[ind].ChooseFirstHaplotypeFor(lname, marker);
+             newhaps;
+             newhaps = m_individuals[ind].ChooseNextHaplotypeFor(lname, marker))
+        {
+            ReassignDLsFor(lname, marker, ind);
+            if (UpdateDataLikesForIndividualsFrom(ind + 1, mloc, mlikes))
+            {
+                //We're on the last one--update the data likelihood.
+                DoubleVec1d newlikes = CalculateDataLikesForFloatingLocus(mloc);
+                //LS TEST:
+                //cerr << ToString(newlikes, 5) << endl;
+                mlikes = AddValsOfLogs(mlikes, newlikes);
+            }
+        }
+    }
+    return false;
+}
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d RecTree::CalculateDataLikesForFloatingLocus(long int mloc)
+{
+    m_timeList.SetAllUpdateDLs();
+
+    // We always need these on because there's only enough storage for a single subtree,
+    // and the last subtree we used was at the other end of the range.
+    DoubleVec1d datalikes(m_totalSites);
+    rangeset allowedranges = (*m_pMovingLocusVec)[mloc].GetAllowedRange();
+
+    // "startsites" is a set of Interval tree start sites (each is a site
+    // just to the right of a recombination breakpoint or is an endmarker).
+    set<long int> startsites = GetIntervalTreeStartSites();
+    startsites = IgnoreDisallowedSubTrees(startsites, allowedranges);
+    set<long int>::iterator left(startsites.begin()), right(startsites.begin());
+
+    ++right;
+    for ( ; right != startsites.end(); ++left, ++right )
+    {
+        (*m_pMovingLocusVec)[mloc].SetRegionalMapPosition(*left);
+        double datalike = CalculateDataLikesForMovingLocus(mloc);
+        for (long int siteindex = *left; siteindex < *right; ++siteindex)
+        {
+            datalikes[siteindex] = datalike;
+        }
+    }
+
+    datalikes = ZeroDisallowedSites(datalikes, allowedranges);
+    m_timeList.ClearUpdateDLs();
+
+    //LS NOTE: We could theoretically not call ClearUpdateDLs if there was only one subtree,
+    // but only if the *next* call to this function also had the exact same subtree, which
+    // we can't enforce.  So, always call ClearUpdateDLs
+    return datalikes;
+}
+
+//------------------------------------------------------------------------------------
+
+set<long int> RecTree::IgnoreDisallowedSubTrees(set<long int> startsites, rangeset allowedranges)
+{
+    // "startsites" and "newstartsites" are sets of Interval tree start sites.
+    // Each member is a site just to the right of a recombination breakpoint or is an endmarker).
+    set<long int> newstartsites;
+    newstartsites.insert(allowedranges.begin()->first);
+    newstartsites.insert(m_totalSites); // To close out the final range.
+
+    for (rangeset::iterator range = allowedranges.begin(); range != allowedranges.end(); ++range)
+    {
+        for (long int site = range->first; site < range->second; ++site)
+        {
+            if (startsites.find(site) != startsites.end())
+            {
+                newstartsites.insert(site);
+            }
+        }
+    }
+    return newstartsites;
+}
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d RecTree::ZeroDisallowedSites(DoubleVec1d datalikes, rangeset allowedranges)
+{
+    DoubleVec1d newdatalikes(datalikes.size(), -DBL_BIG);
+    for (rangeset::iterator range = allowedranges.begin(); range != allowedranges.end(); ++range)
+    {
+        assert(range->first >= 0);
+        assert(static_cast<unsigned long int>(range->second) <= datalikes.size());
+        for (long int site = range->first; site < range->second; ++site)
+        {
+            newdatalikes[site] = datalikes[site];
+        }
+    }
+    return newdatalikes;
+}
+
+//------------------------------------------------------------------------------------
+
+void RecTree::RandomizeMovingHaplotypes(long int mlocus)
+{
+    //This function loops over all individuals in m_individuals and randomizes the haplotypes.
+    string lname = (*m_pMovingLocusVec)[mlocus].GetName();
+    long int nmarkers = (*m_pMovingLocusVec)[mlocus].GetNmarkers();
+    for (long int marker = 0; marker < nmarkers; ++marker)
+    {
+        for (unsigned long int ind = 0; ind < m_individuals.size(); ++ind)
+        {
+            if (m_individuals[ind].ChooseRandomHaplotypesFor(lname, marker))
+            {
+                //The chosen one is different from last time.
+                ReassignDLsFor(lname, marker, ind);
+            }
+        }
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+bool RecTree::SimulateDataIfNeeded()
+{
+    if (Tree::SimulateDataIfNeeded())
+    {
+        //LS DEBUG: Current version uses same data for all moving loci.  If we want to simulate data individually,
+        // uncomment the following lines and replace all the subsequent loops over lnums to locus.whatever calls.
+        //
+        //  for (unsigned long int mloc = 0; mloc < m_pMovingLocusVec->size(); ++mloc)
+        //  {
+        //      Locus & locus = (*m_pMovingLocusVec)[mloc];
+        if ((*m_pMovingLocusVec).size() > 0)
+        {
+            Locus & locus = (*m_pMovingLocusVec)[0];
+            if (locus.GetShouldSimulate())
+            {
+                for (size_t lnum = 0; lnum < (*m_pMovingLocusVec).size(); ++lnum)
+                {
+                    (*m_pMovingLocusVec)[lnum].ClearVariability();
+                    (*m_pMovingLocusVec)[lnum].SetNewMapPositionIfMoving();
+                }
+
+                // Check to see if this locus's site was already simulated.
+                Locus * simlocus = NULL; //to prevent compiler warnings
+                for (unsigned long int loc = 0; loc < m_pLocusVec->size(); ++loc)
+                {
+                    Locus & oldlocus = (*m_pLocusVec)[loc];
+                    if (oldlocus.GetShouldSimulate())
+                    {
+                        if (oldlocus.SiteInLocus(locus.GetRegionalMapPosition()))
+                        {
+                            simlocus = & oldlocus;
+                        }
+                    }
+                }
+                if (simlocus != NULL)
+                {
+                    //If there was at least one site in the original that overlapped with the range on the moving
+                    // locus, we take that chances as the chance that we hit the original at all, but we want
+                    // to choose a variable site from that range.  This introduces an ascertainment bias that I'm
+                    // not exactly sure how to compensate for, or if it needs to be compensated for.  But anyway.
+                    long int newsite = simlocus->ChooseVariableSiteFrom(locus.GetAllowedRange());
+                    if (newsite != FLAGLONG)
+                    {
+                        for (size_t lnum = 0; lnum < (*m_pMovingLocusVec).size(); ++lnum)
+                        {
+                            (*m_pMovingLocusVec)[lnum].SetRegionalMapPosition(newsite);
+                            (*m_pMovingLocusVec)[lnum].SetTrueSite(newsite);
+                            //LS TEST
+                            //simlocus->PrintOnesAndZeroesForVariableSites();
+                            (*m_pMovingLocusVec)[lnum].SetVariableRange(simlocus->CalculateVariableRange());
+                        }
+                        //LS DEBUG:  Not really for release.
+                        //simlocus->CalculateDisEqFor(newsite);
+                        //If there were no variable sites in the original, we keep the originally chosen site.
+                        // Our trait isn't going to be variable, but them's the breaks.
+                    }
+                    for (size_t lnum = 0; lnum < (*m_pMovingLocusVec).size(); ++lnum)
+                    {
+                        (*m_pMovingLocusVec)[lnum].CopyDataFrom(*simlocus, *this);
+                    }
+                    if (locus.IsNighInvariant())
+                    {
+                        registry.GetRunReport().ReportNormal("All simulated data was nigh invariant for the"
+                                                             " source segment, giving us nigh invariant data"
+                                                             " for the simulated segment "
+                                                             + locus.GetName() + " as well.");
+                    }
+                    //LS DEBUG SIM:  This would delete information again, for simulations where
+                    // the data exists but you never sequenced it.
+                    // simlocus->RandomizeHalf(*this);
+                }
+                else
+                {
+                    locus.SimulateData(*this, m_totalSites);
+                    // Now copy this data into any other moving loci.
+                    long int newsite = locus.GetRegionalMapPosition();
+                    for (size_t lnum = 1; lnum < (*m_pMovingLocusVec).size(); ++lnum)
+                    {
+                        (*m_pMovingLocusVec)[lnum].SetRegionalMapPosition(newsite);
+                        (*m_pMovingLocusVec)[lnum].SetTrueSite(newsite);
+                        (*m_pMovingLocusVec)[lnum].CopyDataFrom(locus, *this);
+                    }
+                }
+                for (size_t lnum = 0; lnum < (*m_pMovingLocusVec).size(); ++lnum)
+                {
+                    (*m_pMovingLocusVec)[lnum].MakePhenotypesFor(m_individuals);
+                }
+            }
+        }
+        //Redo the aliases, in case we randomized the data in the original locus.
+        SetupAliases(*m_pLocusVec);
+        return true;
+    }
+    return false;
+}
+
+//------------------------------------------------------------------------------------
+
+long int RecTree::NumberOfRecombinations()
+{
+    return (m_timeList.HowMany(btypeRec) / 2);
+}
+
+//------------------------------------------------------------------------------------
+
+void RecTree::SetCurTargetLinkweightFrom(const BranchBuffer & brbuffer)
+{
+    m_curTargetLinkweight = ZERO;
+
+    vector<Branch_ptr> branches(brbuffer.ExtractConstBranches());
+    vector<Branch_ptr>::const_iterator branch;
+
+    for (branch = branches.begin(); branch != branches.end(); ++branch)
+    {
+        m_curTargetLinkweight += (*branch)->GetRangePtr()->GetCurTargetLinkweight();
+    }
+}
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+
+void RecTree::SetNewTargetLinkweightFrom(const BranchBuffer & brbuffer)
+{
+    m_newTargetLinkweight = ZERO;
+
+    vector<Branch_ptr> branches(brbuffer.ExtractConstBranches());
+    vector<Branch_ptr>::const_iterator branch;
+
+    for (branch = branches.begin(); branch != branches.end(); ++branch)
+    {
+        m_newTargetLinkweight += (*branch)->GetRangePtr()->GetNewTargetLinkweight();
+    }
+}
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+
+void RecTree::PrintTipData(long int mloc, long int marker)
+{
+    cerr << endl << "Tip data:" << endl;
+    for (Branchconstiter tip = m_timeList.FirstTip(); tip != m_timeList.FirstBody(); ++tip)
+    {
+        cerr << " " << (*m_pMovingLocusVec)[mloc].GetDataModel()->CellToData((*tip)->GetDLCell(mloc, marker, true), marker);
+    }
+    cerr << endl;
+}
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+
+void RecTree::PrintRangeSetCount(const rangesetcount & rsc)
+{
+    for (RSCcIter rsci = rsc.begin(); rsci != rsc.end(); ++rsci)
+    {
+        cerr << (*rsci).first << ": " << ToString((*rsci).second) << endl;
+    }
+}
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+
+rangesetcount RecTree::RemoveEmpty(const rangesetcount & rsc)
+{
+    rangesetcount result;
+    for (RSCcIter rsci = rsc.begin(); rsci != rsc.end(); ++rsci)
+    {
+        if (!(*rsci).second.empty())
+        {
+            result.insert(make_pair((*rsci).first, (*rsci).second));
+        }
+    }
+    return result;
+}
+
+//____________________________________________________________________________________
diff --git a/src/tree/sticksum.h b/src/tree/sticksum.h
new file mode 100644
index 0000000..f4790c3
--- /dev/null
+++ b/src/tree/sticksum.h
@@ -0,0 +1,55 @@
+// $Id: sticksum.h,v 1.5 2011/04/23 02:02:50 bobgian Exp $
+
+/*
+ *    Copyright 2006  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph
+ *    Felsenstein
+ *
+ *    This software is distributed free of charge for non-commercial
+ *    use and is copyrighted.  Of course, we do not guarantee that the
+ *    software works, and are not responsible for any damage you may
+ *    cause or have.
+ */
+
+/********************************************************************
+Class StickSummary.
+Summarizes the information from a single stick (or run of identical
+sticks) for the use of the maximizer.  Conceptually, it has the same
+polymorphism as the stick object.
+
+Written by Jon Yamato
+*********************************************************************/
+
+#ifndef STICKSUM_H
+#define STICKSUM_H
+
+#include "vectorx.h"
+
+class Tree;
+
+class StickSummary
+{
+  private:
+    StickSummary();  // no default ctor
+
+  public:
+    // We accept default copy ctor, and operator=
+    StickSummary(Tree& tr);
+    virtual ~StickSummary() {};
+
+    long ncopies;
+};
+
+class StairStepStickSummary : public StickSummary
+{
+  private:
+    StairStepStickSummary();  // no default ctor
+
+  public:
+    // We accept default copy ctor, and operator=
+    StairStepStickSummary(Tree& tr);
+    virtual ~StairStepStickSummary() {};
+};
+
+#endif  // STICKSUM_H
+
+//____________________________________________________________________________________
diff --git a/src/tree/summary.cpp b/src/tree/summary.cpp
new file mode 100644
index 0000000..e882bbb
--- /dev/null
+++ b/src/tree/summary.cpp
@@ -0,0 +1,707 @@
+// $Id: summary.cpp,v 1.54 2013/11/08 21:46:21 mkkuhner Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+#include <cmath>
+#include <iostream>
+
+#include "constants.h"
+#include "force.h"
+#include "intervaldata.h"
+#include "range.h"                      // For Link-related typedefs and constants.
+#include "region.h"
+#include "summary.h"
+
+using namespace std;
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+const DoubleVec1d & Summary::GetShortPoint() const
+{
+    return m_shortpoint;
+
+} // Summary::ShortPoint
+
+//------------------------------------------------------------------------------------
+
+const DoubleVec1d & Summary::GetShortWait() const
+{
+    return m_shortwait;
+} // Summary::ShortWait
+
+//------------------------------------------------------------------------------------
+
+const LongVec2d & Summary::GetShortPicks() const
+{
+    return m_shortpicks;
+} // Summary::ShortPicks
+
+//------------------------------------------------------------------------------------
+
+Interval const * Summary::GetLongPoint() const
+{
+    assert(!m_shortness);  // tried to get long-form inappropriately?
+    return m_front;
+} // Summary::LongPoint
+
+//------------------------------------------------------------------------------------
+
+const list<Interval> & Summary::GetLongWait() const
+{
+    assert(!m_shortness);  // tried to get long-form inappropriately?
+    return m_intervalData.m_intervals;
+}  // Summary::LongWait
+
+//------------------------------------------------------------------------------------
+
+bool Summary::GetShortness() const
+{
+    return m_shortness;
+}
+
+//------------------------------------------------------------------------------------
+
+void Summary::AddInterval(double time, const LongVec2d & pk, const LongVec1d & xk,
+                          Linkweight recweight, xpart_t ostat, xpart_t nstat,
+                          long int recpoint, const LongVec1d & picks, force_type type)
+{
+    // "recweight" is a Link recombination weight (Biglink weight or number of Littlelinks).
+    // "recpoint" is a recombination Littlelink (middle of target Biglink) or FLAGLONG.
+    // Add interval, retaining a pointer to it.
+    Interval* thisinterval = m_intervalData.AddInterval(m_back, time, pk, xk, recweight, ostat, nstat, recpoint, picks, type);
+    // Put that pointer in appropriate place(s).
+    if (!m_front) m_front = thisinterval;
+    m_back = thisinterval;
+    // m_intervalData.PrintIntervalData(); // JRM debug
+
+} // Summary::AddInterval
+
+//------------------------------------------------------------------------------------
+
+bool Summary::Compress()
+{
+    ComputeShortWait();
+    ComputeShortPoint();
+    ComputeShortPicks();
+    return m_shortness;
+} // Summary::Compress
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+Summary * CoalSummary::Clone(IntervalData& interval) const
+{
+    // NB:  This takes a reference to the IntervalData to which
+    // the new Summary will belong.  It does *not* copy the
+    // contents of the old Summary; only the type and m_shortness.
+
+    return new CoalSummary(interval, m_shortness);
+} // Clone
+
+//------------------------------------------------------------------------------------
+
+void CoalSummary::ComputeShortPoint()
+{
+    // Set up the recipient vector.
+    m_shortpoint.assign(m_nparams, 0.0);
+
+    Interval* in;
+
+    for (in = m_front; in != NULL; in = in->m_next)
+    {
+        ++m_shortpoint[in->m_oldstatus];
+    }
+
+} // CoalSummary::ComputeShortPoint
+
+//------------------------------------------------------------------------------------
+
+void CoalSummary::ComputeShortWait()
+{
+    // Set up the recipient vector.
+    m_shortwait.assign(m_nparams, 0.0);
+
+    list<Interval>::const_iterator interval = m_intervalData.begin();
+    list<Interval>::const_iterator end = m_intervalData.end();
+    double starttime = 0.0;
+
+    for ( ; interval != end; ++interval)
+    {
+        double deltaTime = interval->m_endtime - starttime;
+        long int param;
+        for (param = 0; param < m_nparams; ++param)
+        {
+            // OPT:  pre-calculate k(k-1) in storage routines....
+            double k = interval->m_xpartlines[param];
+            m_shortwait[param] += deltaTime * k * (k - 1);
+        }
+        starttime = interval->m_endtime;
+    }
+
+} // CoalSummary::ComputeShortWait
+
+//------------------------------------------------------------------------------------
+
+void CoalSummary::ComputeShortPicks()
+{
+    const ForceSummary & fs = registry.GetForceSummary();
+    long int numlocalforces = fs.GetNLocalPartitionForces();
+
+    // If there are no local-partition-forces, do nothing.
+    if (numlocalforces == 0) return;
+    // If there is no recombination, do nothing.
+    if (!fs.CheckForce(force_REC)) return;
+
+    m_shortpicks.clear();
+    const ForceVec& forces = fs.GetPartitionForces();
+    unsigned long int i;
+    long int index = 0;
+    for (i = 0; i < forces.size(); ++i)
+    {
+        if (forces[i]->IsLocalPartitionForce())
+        {
+            const PartitionForce* partforce =
+                dynamic_cast<const PartitionForce*>(forces[i]);
+            long int nparts = partforce->GetNPartitions();
+            LongVec1d counts(nparts, 0L);
+            list<Interval>::const_iterator interval = m_intervalData.begin();
+            list<Interval>::const_iterator end = m_intervalData.end();
+            for ( ; interval != end; ++interval)
+            {
+                if (!interval->m_partnerpicks.empty())
+                {
+                    ++counts[interval->m_partnerpicks[index]];
+                }
+            }
+            m_shortpicks.push_back(counts);
+            ++index;
+        }
+    }
+} // CoalSummary::ComputeShortPicks
+
+//------------------------------------------------------------------------------------
+
+void CoalSummary::AdjustSummary(const DoubleVec1d & totals, long int region)
+{
+
+    Interval* fakefront = NULL;
+    Interval* fakeback = NULL;
+    assert(totals.size()==m_shortpoint.size()); // inconsistent input?
+    unsigned long int nparams(totals.size());
+    bool didadjust = false;
+
+    unsigned long int param;
+    for(param = 0; param < nparams; ++param)
+    {
+        if (totals[param] != 0.0) continue;
+
+        if (!m_shortness)
+        {
+            // Add fake interval to correct for "fatal attraction"; args specify "nonexistent" interval.
+            Interval * newinterval = m_fakeIntervals.AddDummyInterval(fakeback, ZERO, param, FLAGLONG, FLAGLONG, force_COAL);
+
+            if (!fakefront) fakefront = newinterval;
+            fakeback = newinterval;
+
+            didadjust = true;
+        }
+
+        // adjust short-form summary statistics
+        m_shortpoint[param] += 1.0;
+    }
+
+    if (didadjust)
+    {
+        // Hook fake intervals onto front of real intervals.
+        fakeback->m_next = m_front;
+        m_front = fakefront;
+    }
+
+} // CoalSummary::AdjustSummary
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+Summary * MigSummary::Clone(IntervalData& interval) const
+{
+    // NB:  This takes a reference to the IntervalData to which
+    // the new Summary will belong.  It does *not* copy the
+    // contents of the old Summary; only the type and m_shortness.
+
+    return new MigSummary(interval, m_shortness);
+} // Clone
+
+//------------------------------------------------------------------------------------
+
+void MigSummary::ComputeShortPoint()
+{
+    // Set up the recipient vector.
+    m_shortpoint.assign(m_npop * m_npop, 0.0);
+
+    Interval* in;
+
+    for (in = m_front; in != NULL; in = in->m_next)
+    {
+        // Migration rates vary newstatus on rows and oldstatus on columns.
+        ++m_shortpoint[m_npop * in->m_newstatus + in->m_oldstatus];
+    }
+
+} // MigSummary::ComputeShortPoint
+
+//------------------------------------------------------------------------------------
+
+void MigSummary::ComputeShortWait()
+{
+    // Set up the recipient vector.
+    m_shortwait.assign(m_npop, 0.0);
+
+    list<Interval>::const_iterator interval = m_intervalData.begin();
+    list<Interval>::const_iterator end = m_intervalData.end();
+    long int pop;
+    double starttime = 0.0;
+
+    for ( ; interval != end; ++interval)
+    {
+        double deltaTime = interval->m_endtime - starttime;
+        for (pop = 0; pop < m_npop; ++pop)
+        {
+            double k = interval->m_partlines[m_migpartindex][pop];
+            m_shortwait[pop] += deltaTime * k;
+        }
+        starttime = interval->m_endtime;
+    }
+
+} // MigSummary::ComputeShortWait
+
+//------------------------------------------------------------------------------------
+
+void MigSummary::AdjustSummary(const DoubleVec1d & totals, long int region)
+{
+    assert(static_cast<xpart_t>(totals.size()) == m_npop*m_npop);
+    Interval* fakefront = NULL;
+    Interval* fakeback = NULL;
+
+    //LS NOTE: It's possible that this will add a migration event for a
+    // migration that's been constrained to be zero.  However, the maximizer
+    // ignores all such events in this case, so adding one is not fatal.  Though
+    // one should be wary.
+
+    bool didadjust = false;
+
+    xpart_t frompop, topop;
+
+    for (frompop = 0; frompop != m_npop; ++frompop)
+    {
+        for (topop = 0; topop != m_npop; ++topop)
+        {
+
+            // no adjustment to diagonal entries
+            if (frompop == topop) continue;
+
+            long int param = frompop * m_npop + topop; // index into linearized vector
+            if (totals[param] == 0.0)                  // no events of this type
+            {
+                if (!m_shortness)
+                {
+                    // Add fake interval to correct for "fatal attraction"; args specify "nonexistent" interval.
+                    Interval* newinterval = m_fakeIntervals.AddDummyInterval(fakeback, ZERO, frompop, topop, FLAGLONG, force_MIG);
+
+                    if (!fakefront) fakefront = newinterval;
+                    fakeback = newinterval;
+
+                    didadjust = true;
+                }
+
+                // adjust short-form summary statistics
+                m_shortpoint[param] += 1.0;
+            }
+        }
+    }
+
+    if (didadjust)
+    {
+        // Hook fake intervals onto front of real intervals.
+        fakeback->m_next = m_front;
+        m_front = fakefront;
+    }
+} // MigSummary::AdjustSummary
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+Summary * DiseaseSummary::Clone(IntervalData& interval) const
+{
+    // NB:  This takes a reference to the IntervalData to which
+    // the new Summary will belong.  It does *not* copy the
+    // contents of the old Summary; only the type and m_shortness.
+
+    return new DiseaseSummary(interval, m_shortness);
+} // Clone
+
+//------------------------------------------------------------------------------------
+
+void DiseaseSummary::ComputeShortPoint()
+{
+    // Set up the recipient vector.
+    m_shortpoint.assign(m_nstati * m_nstati, 0.0);
+
+    Interval* in;
+
+    for (in = m_front; in != NULL; in = in->m_next)
+    {
+        // Disease rates vary newstatus on rows and oldstatus on columns.
+        ++m_shortpoint[m_nstati * in->m_newstatus + in->m_oldstatus];
+    }
+
+} // DiseaseSummary::ComputeShortPoint
+
+//------------------------------------------------------------------------------------
+
+void DiseaseSummary::ComputeShortWait()
+{
+    // Set up the recipient vector.
+    m_shortwait.assign(m_nstati, 0.0);
+
+    list<Interval>::const_iterator interval = m_intervalData.begin();
+    list<Interval>::const_iterator end = m_intervalData.end();
+    long int status;
+    double starttime = 0.0;
+
+    for ( ; interval != end; ++interval)
+    {
+        double deltaTime = interval->m_endtime - starttime;
+        for (status = 0; status < m_nstati; ++status)
+        {
+            double k = interval->m_partlines[m_dispartindex][status];
+            m_shortwait[status] += deltaTime * k;
+        }
+        starttime = interval->m_endtime;
+    }
+
+} // DiseaseSummary::ComputeShortWait
+
+//------------------------------------------------------------------------------------
+
+void DiseaseSummary::AdjustSummary(const DoubleVec1d & totals, long int region)
+{
+    assert(static_cast<xpart_t>(totals.size()) == m_nstati*m_nstati);
+    Interval* fakefront = NULL;
+    Interval* fakeback = NULL;
+
+    bool didadjust = false;
+
+    xpart_t oldstatus, newstatus;
+
+    for (oldstatus = 0; oldstatus != m_nstati; ++oldstatus)
+    {
+        for (newstatus = 0; newstatus != m_nstati; ++newstatus)
+        {
+
+            // no adjustment to diagonal entries
+            if (oldstatus == newstatus) continue;
+
+            // index into linearized vector
+            long int param = oldstatus * m_nstati + newstatus;
+
+            if (totals[param] == 0.0)   // no events of this type
+            {
+                if (!m_shortness)
+                {
+                    // Add fake interval to correct for "fatal attraction"; args specify "nonexistent" interval.
+                    Interval* newinterval = m_fakeIntervals.AddDummyInterval(fakeback, ZERO, oldstatus, newstatus, FLAGLONG, force_DISEASE);
+
+                    if (!fakefront) fakefront = newinterval;
+                    fakeback = newinterval;
+
+                    didadjust = true;
+                }
+
+                // adjust short-form summary statistics
+                m_shortpoint[param] += 1.0;
+            }
+        }
+    }
+
+    if (didadjust)
+    {
+        // Hook fake intervals onto front of real intervals.
+        fakeback->m_next = m_front;
+        m_front = fakefront;
+    }
+} // DiseaseSummary::AdjustSummary
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+Summary * EpochSummary::Clone(IntervalData& interval) const
+{
+    // NB:  This takes a reference to the IntervalData to which
+    // the new Summary will belong.  It does *not* copy the
+    // contents of the old Summary; only the type and m_shortness.
+
+    return new EpochSummary(interval, m_shortness);
+} // Clone
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+RecSummary::RecSummary(IntervalData& interval, bool shortform)
+    :
+    Summary(interval,registry.GetForceSummary().GetNParameters(force_REC), shortform),
+    m_nrecsbyxpart(registry.GetForceSummary().GetNParameters(force_COAL), 0L),
+    m_nrecs(0L)
+{
+    long int i(0L);
+    const ForceVec forces(registry.GetForceSummary().GetPartitionForces());
+    ForceVec::const_iterator force;
+
+    (void)i;  // Silence compiler warning about unused variable.
+
+    for(force = forces.begin(); force != forces.end(); ++force)
+    {
+        if ((*force)->IsLocalPartitionForce())
+        {
+            LongVec1d fparts((*force)->GetNPartitions(), 0L);
+            m_nrecsbydispart = fparts;
+        }
+    }
+
+} // RecSummary::ctor
+
+//------------------------------------------------------------------------------------
+
+Summary * RecSummary::Clone(IntervalData& interval) const
+{
+    // NB:  This takes a reference to the IntervalData to which
+    // the new Summary will belong.  It does *not* copy the
+    // contents of the old Summary; only the type and m_shortness.
+
+    return new RecSummary(interval, m_shortness);
+} // Clone
+
+//------------------------------------------------------------------------------------
+
+void RecSummary::ComputeShortPoint()
+{
+    // Set up the recipient vector.
+    m_shortpoint.assign(m_nparams, 0.0);
+
+    Interval* in;
+    for (in = m_front; in != NULL; in = in->m_next)
+    {
+        ++m_shortpoint[0];
+    }
+
+} // RecSummary::ComputeShortPoint
+
+//------------------------------------------------------------------------------------
+
+void RecSummary::ComputeShortWait()
+{
+    // Set up the recipient vector.
+    m_shortwait.assign(m_nparams, 0.0);
+
+    list<Interval>::const_iterator interval = m_intervalData.begin();
+    list<Interval>::const_iterator end = m_intervalData.end();
+    double starttime = 0.0;
+    //  long int count = 1;
+    for ( ; interval != end; ++interval)
+    {
+        double deltaTime = interval->m_endtime - starttime;
+        m_shortwait[0] += deltaTime * interval->m_recweight;
+        starttime = interval->m_endtime;
+#if 0
+        cerr << "ShortWait "
+             << count++
+             << ": ts="
+             << starttime
+             << " te="
+             << interval->m_endtime
+             << " recweight="
+             << interval->m_recweight
+             << " r="
+             << m_shortwait[0]
+             << endl;
+#endif
+    }
+
+} // RecSummary::ComputeShortWait
+
+//------------------------------------------------------------------------------------
+
+void RecSummary::AdjustSummary(const DoubleVec1d & totals, long int region)
+{
+    assert(totals.size() == 1);  // inconsistent forces?!
+    if (!registry.GetDataPack().GetRegion(region).RecombinationCanBeEstimated())
+    {
+        return;
+    }
+    Interval* fakefront = NULL;
+    Interval* fakeback = NULL;
+    bool didadjust = false;
+
+    long int param = 0;
+    if (totals[param] == 0.0)  // no events of this type
+    {
+        if (!m_shortness)
+        {
+            // Add fake interval to correct for "fatal attraction"; args specify "nonexistent" interval.
+            Interval* newinterval = m_fakeIntervals.AddDummyInterval(fakeback, ZERO, FLAGLONG, FLAGLONG, 0L, force_REC);
+
+            if (!fakefront) fakefront = newinterval;
+            fakeback = newinterval;
+
+            didadjust = true;
+        }
+
+        // adjust short-form summary statistics
+        m_shortpoint[param] += 1.0;
+    }
+    if (didadjust)
+    {
+        // Hook fake intervals onto front of real intervals.
+        fakeback->m_next = m_front;
+        m_front = fakefront;
+    }
+} // RecSummary::AdjustSummary
+
+//------------------------------------------------------------------------------------
+
+void RecSummary::AddToRecombinationCounts(const LongVec1d & membership)
+{
+    // NB: THIS CODE ASSUMES A DISEASE FORCE EXISTS!
+    // m_nrecsbyxpart
+    m_nrecsbyxpart[registry.GetDataPack().GetCrossPartitionIndex(membership)]++;
+
+    // m_nrecsbydispart
+    // it also assumes that Disease is the first (and only) local partition force
+    long int lpforce = registry.GetForceSummary().GetLocalPartitionIndexes()[0];
+    m_nrecsbydispart[membership[lpforce]]++;
+    m_nrecs++;
+
+} // RecSummary::AddToRecombinationCounts
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+Summary * DivMigSummary::Clone(IntervalData& interval) const
+{
+    // NB:  This takes a reference to the IntervalData to which
+    // the new Summary will belong.  It does *not* copy the
+    // contents of the old Summary; only the type and m_shortness.
+
+    return new DivMigSummary(interval, m_shortness);
+} // Clone
+
+//------------------------------------------------------------------------------------
+
+void DivMigSummary::ComputeShortPoint()
+{
+
+    // Set up the recipient vector.
+    m_shortpoint.assign(m_npop * m_npop, 0.0);
+
+    Interval* in;
+
+    for (in = m_front; in != NULL; in = in->m_next)
+    {
+        // Migration rates vary newstatus on rows and oldstatus on columns.
+        ++m_shortpoint[m_npop * in->m_newstatus + in->m_oldstatus];
+    }
+
+} // DivMigSummary::ComputeShortPoint
+
+//------------------------------------------------------------------------------------
+
+void DivMigSummary::ComputeShortWait()
+{
+    // Set up the recipient vector.
+    m_shortwait.assign(m_npop * m_npop, 0.0);
+
+    list<Interval>::const_iterator interval = m_intervalData.begin();
+    list<Interval>::const_iterator end = m_intervalData.end();
+    long int pop;
+    vector<Epoch>::const_iterator curepoch(m_epochs->begin());
+    LongVec1d epochpops(curepoch->PopulationsHere());
+    double starttime = 0.0;
+
+    for ( ; interval != end; ++interval)
+    {
+        double deltaTime = interval->m_endtime - starttime;
+
+        for (pop = 0; pop < m_npop; ++pop)
+        {
+            double k = interval->m_partlines[m_divmigpartindex][pop];
+
+            // now add k * deltaTime to all populations extant in this epoch
+            LongVec1d::iterator epop;
+            for(epop = epochpops.begin(); epop != epochpops.end(); ++epop)
+            {
+                if (pop == (*epop)) continue;
+                // Migration rates vary newstatus on rows and oldstatus on columns.
+                m_shortwait[pop * m_npop + (*epop)] += deltaTime * k;
+            }
+        }
+
+        if (interval->m_type == force_DIVERGENCE)
+        {
+            ++curepoch;
+            assert(curepoch != m_epochs->end());
+            epochpops = curepoch->PopulationsHere();
+        }
+
+        starttime = interval->m_endtime;
+    }
+
+} // DivMigSummary::ComputeShortWait
+
+//------------------------------------------------------------------------------------
+
+void DivMigSummary::AdjustSummary(const DoubleVec1d & totals, long int region)
+// NB:  AdjustSummary prevents fatal attraction in likelihood runs.  DivMig is
+//  not compatible with likelihood runs, therefore we do nothing here.
+{
+} // DivMigSummary::AdjustSummary
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+void Summary::AddShortPoint(string s)
+{
+    double a = static_cast<long int>(atoi(s.c_str()));
+    m_shortpoint.push_back(a);
+}
+
+//------------------------------------------------------------------------------------
+
+void Summary::AddShortWait(string s)
+{
+    double a = static_cast<double>(atof(s.c_str()));
+    m_shortwait.push_back(a);
+}
+
+//------------------------------------------------------------------------------------
+
+void Summary::AddShortPicks(const StringVec1d & svec)
+{
+    LongVec1d newpicks;
+    StringVec1d::const_iterator values;
+
+    for(values = svec.begin(); values != svec.end(); ++values)
+    {
+        long int a = static_cast<long int>(atoi(values->c_str()));
+        newpicks.push_back(a);
+    }
+
+    m_shortpicks.push_back(newpicks);
+}
+
+//____________________________________________________________________________________
diff --git a/src/tree/summary.h b/src/tree/summary.h
new file mode 100644
index 0000000..be18871
--- /dev/null
+++ b/src/tree/summary.h
@@ -0,0 +1,259 @@
+// $Id: summary.h,v 1.41 2012/06/30 01:32:43 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+/*
+  This is a Force-polymorphic class which works with the IntervalData
+  class (non-polymorphic) to provide needed summary information about
+  a tree.  It can provide either Short or Long forms of its summary,
+  depending on the presence or absence of perturbing forces like Growth
+  (if Growth is in effect Long forms are necessary).
+
+  Written by Mary Kuhner
+*/
+
+#ifndef SUMMARY_H
+#define SUMMARY_H
+
+#include <iostream>
+#include <list>
+#include <string>
+
+#include "constants.h"
+#include "intervaldata.h"               // For IntervalData member.
+#include "range.h"                      // For Link-related typedefs and constants.
+#include "registry.h"                   // For use of GetDataPack() and GetForceSummary() to set npop & m_nparams.
+#include "vectorx.h"
+
+class Interval;
+
+class Summary
+{
+  private:
+    Summary(const Summary &);             // not defined
+    Summary & operator=(const Summary &); // not defined
+
+  protected:
+
+    IntervalData& m_intervalData;
+    DoubleVec1d m_shortpoint;           // pre-computed ShortPoint results
+    // dimensioned by # of parameters
+    DoubleVec1d m_shortwait;            // pre-computed ShortWait results
+    // dimensioned by # of parameters
+    LongVec2d m_shortpicks;             // pre-computed ShortPicks results
+    // dimensioned by #localpartitionforces
+    // and #alleles per force
+    long m_nparams;
+    bool m_shortness;
+    IntervalData m_fakeIntervals;
+
+    // pointers into the linked list in m_intervalData, or possibly
+    // into m_fakeIntervals instead
+    Interval* m_front;                  // front interval of our type
+    Interval* m_back;                   // most recently added interval of our type
+
+    virtual void ComputeShortPoint() = 0;
+    virtual void ComputeShortWait() = 0;
+    virtual void ComputeShortPicks() {};
+
+  public:
+
+    Summary(IntervalData& interval, long npars, bool shortform)
+        : m_intervalData(interval), m_nparams(npars), m_shortness(shortform),
+          m_front(NULL), m_back(NULL)
+    {};
+
+    virtual ~Summary() {};
+
+    virtual Summary * Clone(IntervalData& interval) const = 0;
+
+    const DoubleVec1d & GetShortPoint() const; // nCoalescences, etc.
+    const DoubleVec1d & GetShortWait() const;  // k(k-1)t, etc.
+    // GetShortPicks must return a completely empty vector
+    // if there are no localpartition+recombination forces on
+    const LongVec2d & GetShortPicks() const; // forces x alleles
+
+    Interval const * GetLongPoint() const;
+    const std::list<Interval> & GetLongWait() const;
+    // there is no GetLongPicks() because GetLongPoint() does its job
+
+    void AddInterval(double time, const LongVec2d & pk, const LongVec1d & xk,
+                     Linkweight recweight, xpart_t ostat, xpart_t nstat,
+                     long int recpoint, const LongVec1d & picks, force_type type);
+
+    virtual void AdjustSummary(const DoubleVec1d & totals, long region) = 0;
+
+    bool Compress();
+
+    virtual string GetType() const = 0;
+    bool GetShortness() const;
+
+    // Functions for reading in data from a summary file:
+    void AddShortPoint(string);
+    void AddShortWait(string);
+    void AddShortPicks(const StringVec1d &);
+    void SetShortness( bool a ) { m_shortness = a; };
+
+    // Function added for tree summarization in presence of
+    // recombination & disease
+    Interval* GetLastAdded() { return m_back; };
+};
+
+//------------------------------------------------------------------------------------
+
+class CoalSummary : public Summary
+{
+  private:
+
+    virtual void ComputeShortPoint();  // nCoalescences, etc.
+    virtual void ComputeShortWait();   // k(k-1)t, etc.
+    virtual void ComputeShortPicks();  // partitions picked during recombinations
+
+  public:
+
+    CoalSummary(IntervalData& interval, bool shortform)
+        : Summary(interval, registry.GetForceSummary().GetNParameters(force_COAL), shortform)
+    {};
+
+    virtual ~CoalSummary() {};
+    virtual Summary * Clone(IntervalData& interval) const;
+    virtual void AdjustSummary(const DoubleVec1d & totals, long region);
+    virtual string GetType() const { return lamarcstrings::COAL; };
+
+};
+
+//------------------------------------------------------------------------------------
+
+class MigSummary : public Summary
+{
+  private:
+
+    virtual void ComputeShortPoint();
+    virtual void ComputeShortWait();
+    xpart_t m_npop;
+    xpart_t m_migpartindex;
+
+  public:
+
+    MigSummary(IntervalData& interval, bool shortform)
+        : Summary(interval, registry.GetForceSummary().GetNParameters(force_MIG), shortform),
+          m_npop(registry.GetDataPack().GetNPartitionsByForceType(force_MIG)),
+          m_migpartindex(registry.GetForceSummary().GetPartIndex(force_MIG))
+    {};
+
+    virtual ~MigSummary() {};
+    virtual Summary * Clone(IntervalData& interval) const;
+    virtual void AdjustSummary(const DoubleVec1d & totals, long region);
+    virtual string GetType() const { return lamarcstrings::MIG; };
+
+};
+
+//------------------------------------------------------------------------------------
+
+class DiseaseSummary : public Summary
+{
+  private:
+    virtual void ComputeShortPoint();
+    virtual void ComputeShortWait();
+    xpart_t m_nstati;
+    xpart_t m_dispartindex;
+
+  public:
+    DiseaseSummary(IntervalData& interval, bool shortform)
+        : Summary(interval, registry.GetForceSummary().GetNParameters(force_DISEASE), shortform),
+          m_nstati(registry.GetDataPack().GetNPartitionsByForceType(force_DISEASE)),
+          m_dispartindex(registry.GetForceSummary().GetPartIndex(force_DISEASE))
+    {};
+
+    virtual ~DiseaseSummary() {};
+    virtual Summary * Clone(IntervalData& interval) const;
+    virtual void AdjustSummary(const DoubleVec1d & totals, long region);
+    virtual string GetType() const { return lamarcstrings::DISEASE; };
+
+};
+
+//------------------------------------------------------------------------------------
+
+class EpochSummary : public Summary
+{
+  private:
+    // The following two functions do nothing, as no summary statistics can be
+    // collected on occurance of Epochs (the number of Epochs is fixed).
+    virtual void ComputeShortPoint() {};
+    virtual void ComputeShortWait() {};
+    xpart_t m_npop;
+    xpart_t m_migpartindex;
+
+  public:
+    EpochSummary(IntervalData& interval, bool shortform)
+        : Summary(interval, registry.GetForceSummary().GetNParameters(force_DIVERGENCE), shortform),
+          m_npop(registry.GetDataPack().GetNPartitionsByForceType(force_DIVMIG)),
+          m_migpartindex(registry.GetForceSummary().GetPartIndex(force_DIVMIG))
+    {};
+
+    virtual ~EpochSummary() {};
+    virtual Summary * Clone(IntervalData& interval) const;
+    virtual void AdjustSummary(const DoubleVec1d & totals, long region) {};
+    virtual string GetType() const { return lamarcstrings::DIVERGENCE; };
+
+};
+
+//------------------------------------------------------------------------------------
+
+class RecSummary : public Summary
+{
+  private:
+    virtual void ComputeShortPoint();
+    virtual void ComputeShortWait();
+
+    LongVec1d m_nrecsbyxpart;
+    LongVec1d m_nrecsbydispart;
+    long m_nrecs;
+
+  public:
+    RecSummary(IntervalData& interval, bool shortform);
+    virtual ~RecSummary() {};
+    virtual Summary * Clone(IntervalData& interval) const;
+    virtual void AdjustSummary(const DoubleVec1d & totals, long region);
+    virtual string GetType() const { return lamarcstrings::REC; };
+
+    void AddToRecombinationCounts(const LongVec1d & membership);
+    const LongVec1d & GetNRecsByXPart() const { return m_nrecsbyxpart; };
+    const LongVec1d & GetNRecsByDiseasePart() const { return m_nrecsbydispart; };
+    long GetNRecs() const { return m_nrecs; };
+};
+
+//------------------------------------------------------------------------------------
+
+class DivMigSummary : public Summary
+{
+  private:
+    virtual void ComputeShortPoint();
+    virtual void ComputeShortWait();
+    xpart_t m_npop;
+    xpart_t m_divmigpartindex;
+    const std::vector<Epoch> * m_epochs;
+
+  public:
+    DivMigSummary(IntervalData& interval, bool shortform)
+        : Summary(interval, registry.GetForceSummary().GetNParameters(force_DIVMIG), shortform),
+          m_npop(registry.GetDataPack().GetNPartitionsByForceType(force_DIVMIG)),
+          m_divmigpartindex(registry.GetForceSummary().GetPartIndex(force_DIVMIG)),
+          m_epochs(registry.GetForceSummary().GetEpochs())
+    {};
+
+    virtual ~DivMigSummary() {};
+    virtual Summary * Clone(IntervalData& interval) const;
+    virtual void AdjustSummary(const DoubleVec1d & totals, long region);
+    virtual string GetType() const { return lamarcstrings::DIVMIG; };
+};
+
+#endif // SUMMARY_H
+
+//____________________________________________________________________________________
diff --git a/src/tree/tempident.cpp b/src/tree/tempident.cpp
new file mode 100644
index 0000000..54d9813
--- /dev/null
+++ b/src/tree/tempident.cpp
@@ -0,0 +1,183 @@
+// $Id: tempident.cpp,v 1.23 2013/11/08 21:46:21 mkkuhner Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+#include <map>
+
+#include "tempident.h"
+#include "registry.h"
+#include "treesum.h"
+#include "constants.h"
+#include "types.h"
+#include "chainstate.h"
+#include "collmanager.h"
+
+#ifdef DMALLOC_FUNC_CHECK
+#include "/usr/local/include/dmalloc.h"
+#endif
+
+using namespace std;
+
+class Tree;
+
+//------------------------------------------------------------------------------------
+
+TemperatureIdentity::TemperatureIdentity(double temperature, StringVec1d arrstrings)
+    : m_naccepted(0),
+      m_badtrees(0),
+      m_tinypoptrees(0),
+      m_zerodltrees(0),
+      m_stretchedtrees(0),
+      m_temperature(temperature),
+      m_swapsmade(0),
+      m_swapstried(0),
+      m_totalswapsmade(0),
+      m_totalswapstried(0),
+      m_averagetemp(0),
+      m_totaltrees(0)
+{
+    // deduce the object's own coldness
+    if (m_temperature == 1.0) m_iscold = true;
+    else m_iscold = false;
+    for (size_t arr=0; arr<arrstrings.size(); arr++)
+    {
+        m_acceptances.insert(make_pair(arrstrings[arr], make_pair(0, 0)));
+    }
+} // TemperatureIdentity ctor
+
+//------------------------------------------------------------------------------------
+
+void TemperatureIdentity::NoteSwap(TemperatureIdentity& hot, ChainState & hotstate, ChainState & coldstate)
+{
+    ++m_swapsmade;
+    ++m_totalswapsmade;
+    hotstate.TreeChanged();
+    coldstate.TreeChanged();
+} // NoteSwap
+
+//------------------------------------------------------------------------------------
+
+void TemperatureIdentity::StartChain()
+{
+    m_naccepted = 0;
+    m_badtrees = 0;
+    m_tinypoptrees = 0;
+    m_stretchedtrees = 0;
+    m_zerodltrees = 0;
+    m_swapsmade = 0;
+    m_swapstried = 0;
+    m_totalswapsmade = 0;
+    m_totalswapstried = 0;
+    m_totaltrees = 0;
+    m_chainout.SetStarttime();
+    ClearAcceptances();
+} // StartChain
+
+//------------------------------------------------------------------------------------
+
+ChainOut& TemperatureIdentity::EndChain()
+{
+    m_chainout.SetNumBadTrees(m_badtrees);
+    m_chainout.SetNumTinyPopTrees(m_tinypoptrees);
+    m_chainout.SetNumStretchedTrees(m_stretchedtrees);
+    m_chainout.SetNumZeroDLTrees(m_zerodltrees);
+    m_chainout.SetAccrate (static_cast<double>(m_naccepted) / m_totaltrees);
+    if (m_iscold) m_chainout.SetAllAccrates(m_acceptances);
+    return m_chainout;
+
+} // EndChain
+
+//------------------------------------------------------------------------------------
+// Do bookkeeping based on a rearrangement just completed by the Chain.
+
+void TemperatureIdentity::ScoreRearrangement(const string & arrangername, bool accepted)
+{
+    ++m_totaltrees;
+    if (accepted)
+    {
+        ++m_naccepted;
+    }
+
+    // We keep track of per-arranger acceptance only for the cold chain
+    if (m_iscold)
+    {
+        ratemap::iterator mapit = m_acceptances.find(arrangername);
+        // is this a never-before-seen arranger?
+        if (mapit == m_acceptances.end())
+        {
+            assert(false); //under the new scheme, we should never see this.
+            mapit = m_acceptances.insert(make_pair(arrangername, make_pair(0, 0))).first;
+        }
+
+        // score an attempt at rearrangement
+        ++mapit->second.second;
+        // score a successful rearrangement
+        if (accepted) ++mapit->second.first;
+    }
+
+} // ScoreRearrangement
+
+//------------------------------------------------------------------------------------
+
+void TemperatureIdentity::Sample(CollectionManager& collmanager, ChainState & chstate, long initialOrFinal, bool lastchain)
+{
+#ifndef STATIONARIES
+    if (m_iscold)
+    {
+        collmanager.Collect(chstate, initialOrFinal, lastchain); // EWFIX.CHAINTYPE
+    }
+#else // STATIONARIES
+    if (m_iscold)
+    {
+        registry.GetProtoTreeSummary().DumpStationariesData(*(chstate.GetTree()), chstate.GetParameters());
+    }
+#endif // STATIONARIES
+
+} // Sample
+
+//------------------------------------------------------------------------------------
+
+void TemperatureIdentity::SetTemperature(double temperature)
+{
+    m_temperature = temperature;
+    if (m_temperature == 1.0) m_iscold = true;
+    else m_iscold = false;
+} // SetTemperature
+
+//------------------------------------------------------------------------------------
+
+double TemperatureIdentity::GetSwapRate() const
+{
+    if (m_swapstried != 0)
+        return static_cast<double>(m_swapsmade) / m_swapstried;
+    else return FLAGDOUBLE;
+} // GetSwapRate
+
+//------------------------------------------------------------------------------------
+
+double TemperatureIdentity::GetTotalSwapRate() const
+{
+    if (m_totalswapstried != 0)
+        return static_cast<double>(m_totalswapsmade) / m_totalswapstried;
+    else return FLAGDOUBLE;
+
+} // GetTotalSwapRate
+
+void TemperatureIdentity::ClearAcceptances()
+{
+    for (ratemap::iterator mapit = m_acceptances.begin();
+         mapit != m_acceptances.end(); mapit++)
+    {
+        mapit->second.first = 0;
+        mapit->second.second = 0;
+    }
+}
+
+//____________________________________________________________________________________
diff --git a/src/tree/tempident.h b/src/tree/tempident.h
new file mode 100644
index 0000000..380477f
--- /dev/null
+++ b/src/tree/tempident.h
@@ -0,0 +1,87 @@
+// $Id: tempident.h,v 1.17 2012/06/30 01:32:43 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+/*******************************************************************
+  TemperatureIdentity contains the temperature-dependent parts of
+  a Chain; having this as a separate object facilitates swapping two
+  Chains by swapping their TemperatureIdentities.  (This is easier
+  than swapping their Trees because too many sub-objects hold pointers
+  into the Tree.)
+
+  Written by Mary Kuhner 3/24/04
+********************************************************************/
+
+#ifndef TEMPIDENT_H
+#define TEMPIDENT_H
+
+#include <string>
+#include "chainout.h"
+
+class Tree;
+class CollectionManager;
+class ChainState;
+
+class TemperatureIdentity
+{
+  private:
+    // variables to manage sampling
+    bool m_iscold;                      // is this the cold chain?
+    long m_naccepted;                   // how many accepted samples so far?
+    long m_badtrees;                    // how many trees exceeding MAXEVENTS?
+    long m_tinypoptrees;                // how many trees with too small populations?
+    long m_zerodltrees;                 // how many trees with zero data likelihood?
+    long m_stretchedtrees;              // how many trees with too long branches
+
+    // variables to manage heating
+    double m_temperature;               // MCMCMC "temperature" for heating
+
+    // variables to manage temperature swapping (adaptive heating)
+    long m_swapsmade;                   // how many swaps since last temperature change??
+    long m_swapstried;                  // how many attempted since last change?
+    long m_totalswapsmade;              // how many swaps this chain?
+    long m_totalswapstried;             // how many attempted this chain?
+    long m_averagetemp;                 // summation of temperatures visited
+
+    // variables to manage recordkeeping
+    long m_totaltrees;                  // how many trees seen?
+    ratemap m_acceptances;              // per-arranger acceptance rates
+    ChainOut m_chainout;                // summary of chain success
+    void   ClearAcceptances();
+
+  public:
+    TemperatureIdentity(double temperature, StringVec1d arrstrings);
+
+    // We accept defaults for copy ctor, operator=, and dtor
+
+    void SetTemperature(double temperature);
+    void ClearSwaps() { m_swapstried = 0; m_swapsmade = 0; };
+    void ClearTotalSwaps() { ClearSwaps(); m_totalswapstried=0; m_totalswapsmade=0; };
+    void SwapTried() { ++m_swapstried; ++m_totalswapstried; };
+
+    double GetTemperature()             const { return m_temperature; };
+    double GetSwapRate()                const;
+    double GetTotalSwapRate()           const;
+    bool  IsCold()                      const { return m_iscold; };
+
+    void  StartChain();
+    ChainOut& EndChain();
+    void  NoteBadTree() { ++m_badtrees; };
+    void  NoteTinyPopTree() {++m_tinypoptrees; };
+    void  NoteStretchedTree() {++m_stretchedtrees; };
+    void  NoteZeroDLTree() {++m_zerodltrees; };
+    void  ScoreRearrangement(const std::string & arrangername, bool accepted);
+    void  Sample(CollectionManager& collmanager, ChainState & chstate, long initialOrFinal, bool lastchain);
+    void  NoteSwap(TemperatureIdentity& hot, ChainState & hotstate, ChainState & coldstate);
+
+};
+
+#endif // TEMPIDENT_H
+
+//____________________________________________________________________________________
diff --git a/src/tree/timelist.cpp b/src/tree/timelist.cpp
new file mode 100644
index 0000000..b469060
--- /dev/null
+++ b/src/tree/timelist.cpp
@@ -0,0 +1,1621 @@
+// $Id: timelist.cpp,v 1.60 2013/11/08 21:46:21 mkkuhner Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+#include <functional>                   // for mem_fun
+#include <iostream>                     // debugging
+#include <fstream>                      // for WriteBestTree
+#include <set>                          // for PrintTreeList()
+
+#include <algorithm>                    // used only in debug code
+#include <boost/algorithm/string.hpp>   // for trim function
+
+#include "local_build.h"
+#include "dynatracer.h"                 // Defines some debugging macros.
+
+#include "force.h"                      // for SetStickParams() use of StickForce.
+#include "timelist.h"
+#include "stringx.h"                    // for access to Pretty() in PrintTimeList()
+                                        //     also for ToString() in PrintTreeList()
+#include "datapack.h"                   // for TipData class
+#include "errhandling.h"                // for exceptions
+#include "tinyxml.h"
+
+#ifdef DMALLOC_FUNC_CHECK
+#include "/usr/local/include/dmalloc.h"
+#endif
+
+using namespace std;
+
+//------------------------------------------------------------------------------------
+
+bool is_sorted(Branchconstiter begin, Branchconstiter end)
+{
+    if (begin == end) return true;
+    Branchconstiter i = begin;
+    Branchconstiter j = i;
+    for (j++; j != end && i != end; i++, j++)
+    {
+        if ((*j)->m_eventTime < (*i)->m_eventTime) return false;
+        if ((*j)->m_eventTime == (*i)->m_eventTime &&
+            (*j)->GetID() < (*i)->GetID()) return false;
+    }
+    return true;
+}
+
+//------------------------------------------------------------------------------------
+
+// TimeList constructor
+
+TimeList::TimeList()
+{
+    m_base = Branch_ptr(new BBranch);
+    m_base->m_eventTime = DBL_MAX;
+
+    m_firsttip = m_branches.end();
+    m_firstbody = m_branches.end();
+    m_firstcoal = m_branches.end();
+
+    m_ntips = m_ncuttable = 0;
+
+} // TimeList::TimeList
+
+//------------------------------------------------------------------------------------
+
+TimeList::TimeList(const TimeList & src)
+{
+    m_base = Branch_ptr(new BBranch);
+    CopyTips(src);
+    CopyBody(src);
+
+} // TimeList copy ctor
+
+//------------------------------------------------------------------------------------
+
+TimeList & TimeList::operator=(const TimeList & src)
+{
+    CopyTips(src);
+    CopyBody(src);
+
+    return *this;
+
+} // TimeList::operator=
+
+//------------------------------------------------------------------------------------
+
+TimeList::~TimeList()
+{
+    // empty due to shared_ptr
+} // TimeList::~TimeList
+
+//------------------------------------------------------------------------------------
+
+void TimeList::Clear()
+{
+    m_branches.clear();
+
+    m_firsttip = m_branches.end();
+    m_firstbody = m_branches.end();
+    m_firstcoal = m_branches.end();
+
+    m_ntips = m_ncuttable = 0;
+    ClearBranchCount();
+
+} // TimeList::Clear
+
+//------------------------------------------------------------------------------------
+
+void TimeList::ClearBody()
+{
+    ClearBranchCount();                 // clears all branches from counter
+
+    Branchiter brit;
+    for(brit = FirstTip(); brit != EndBranch(); brit = NextTip(brit))
+    {
+        (*brit)->SetParent(0, Branch::NONBRANCH);
+        (*brit)->SetParent(1, Branch::NONBRANCH);
+        CountBranch((*brit)->Event());  // re-introduces tips only
+    }
+
+    m_branches.remove_if(IsBodyGroup());
+    m_ncuttable = GetNTips();
+
+    // reset stored locations into body
+    m_firstbody = m_branches.end();
+    m_firstcoal = m_branches.end();
+
+} // TimeList::ClearBody
+
+//------------------------------------------------------------------------------------
+
+void TimeList::ClearPartialBody(Branchiter firstinvalid)
+{
+    double cuttime = (*firstinvalid)->m_eventTime;
+
+    Branchiter brit;
+    for (brit = BeginBranch(); brit != firstinvalid; ++brit)
+    {
+        Branch_ptr branch = *brit;
+        long pa(0L);
+        for (pa = 0; pa < NELEM; ++pa)
+        {
+            // WARNING warning -- does this comparison fail for tied non-tip branches?
+            if (branch->Parent(pa))
+            {
+                // I believe this assert checks for tie-related failure--Mary
+                assert(!(branch->Parent(pa) == *firstinvalid && branch->Parent(pa)->m_eventTime != cuttime));
+                if (branch->Parent(pa)->m_eventTime > cuttime)
+                {
+                    branch->SetParent(pa, Branch::NONBRANCH);
+                }
+            }
+        }
+    }
+
+    // Uncount the dying branches.
+    for (brit = firstinvalid; brit != m_branches.end(); brit = NextBody(brit))
+    {
+        m_ncuttable -= (*brit)->Cuttable();
+        UncountBranch((*brit)->Event());
+    }
+
+    bool neednewbody(cuttime <= (*m_firstbody)->m_eventTime);
+    bool neednewcoal(cuttime <= (*m_firstcoal)->m_eventTime);
+
+    // Erase unwanted entries.
+    m_branches.erase(remove_if(firstinvalid, EndBranch(), IsBodyGroup()), EndBranch());
+
+    if (neednewbody)
+        m_firstbody = find_if(m_branches.begin(), m_branches.end(), IsBodyGroup());
+    if (neednewcoal)
+        m_firstcoal = find_if(m_branches.begin(), m_branches.end(), IsCoalGroup());
+
+} // ClearPartialBody
+
+//------------------------------------------------------------------------------------
+
+long TimeList::GetNBodyBranches() const
+{
+    return count_if(BeginBranch(), EndBranch(), IsBodyGroup());
+} // TimeList::GetNBodyBranches
+
+//------------------------------------------------------------------------------------
+
+TBranch_ptr TimeList::CreateTip(const TipData & tipdata, const vector<LocusCell> & cells,
+                                const vector<LocusCell> & movingcells, long nsites, rangeset diseasesites)
+{
+    // used in sample only case
+    TBranch_ptr ptip = TBranch_ptr(new TBranch(tipdata, nsites, diseasesites));
+    ptip->SetDLCells(cells);
+    ptip->SetMovingDLCells(movingcells);
+
+    // This way tips are added the same way everybody is, and it's ensured they'll be in the right order.
+    Collate(ptip);
+
+    ++m_ntips;
+
+    return ptip;
+
+} // TimeList::CreateTip
+
+//------------------------------------------------------------------------------------
+
+TBranch_ptr TimeList::CreateTip(const TipData & tipdata, const vector<LocusCell> & cells,
+                                const vector<LocusCell> & movingcells, long nsites, rangeset diseasesites,
+                                const vector<Locus> & loci)
+{
+    // used in panel case
+    TBranch_ptr ptip = TBranch_ptr(new TBranch(tipdata, nsites, diseasesites));
+    ptip->SetDLCells(cells);
+    ptip->SetMovingDLCells(movingcells);
+
+    // This way tips are added the same way everybody is, and it's ensured they'll be in the right order.
+    Collate(ptip);
+
+    ++m_ntips;
+
+    // create invariant mask for samples
+    DoubleVec1d unkmask(INVARIANTS,1);
+
+    // loop over loci because different loci can have different models
+    for (unsigned int i = 0; i<loci.size(); i++)
+    {
+        // fill the tip invariant cells
+        Cell_ptr pdlcell = ptip->GetDLCell(loci[i].GetIndex(), invariantCell, false);
+
+        if (ptip->m_isSample == 0)
+        {
+            // panel tip masks - use DatatoLikes to accommodate error
+            // seems like this could all happen in the SetAllCategories calls, but I couldn't get the types to get along - JRMfix
+            DoubleVec1d amask = loci[i].GetDataModel()->DataToLikes(SINGLEBASES[baseA]);
+            DoubleVec1d cmask = loci[i].GetDataModel()->DataToLikes(SINGLEBASES[baseC]);
+            DoubleVec1d gmask = loci[i].GetDataModel()->DataToLikes(SINGLEBASES[baseG]);
+            DoubleVec1d tmask = loci[i].GetDataModel()->DataToLikes(SINGLEBASES[baseT]);
+
+            pdlcell->SetAllCategoriesTo(amask, baseA);
+            pdlcell->SetAllCategoriesTo(cmask, baseC);
+            pdlcell->SetAllCategoriesTo(gmask, baseG);
+            pdlcell->SetAllCategoriesTo(tmask, baseT);
+        }
+        else
+        {
+            // sample tip - set everything to unknown
+            for (int i=0; i<INVARIANTS; i++)
+            {
+                pdlcell->SetAllCategoriesTo(unkmask, i);
+            }
+        }
+    }
+    return ptip;
+} // TimeList::CreateTip
+
+//------------------------------------------------------------------------------------
+
+TBranch_ptr TimeList::GetTip(const string & tipname) const
+{
+    TBranch_ptr ptip;
+    string loctipname = tipname;
+    boost::trim(loctipname);
+
+    Branchconstiter brit;
+    for(brit = FirstTip(); brit != m_branches.end(); brit = NextTip(brit))
+    {
+        ptip = boost::dynamic_pointer_cast<TBranch>(*brit);
+        assert(ptip);
+        string label = ptip->m_label;
+        boost::trim(label);
+        if (label == loctipname) return ptip;
+    }
+
+    throw data_error("Unable to find tip:  " + tipname);
+
+} // TimeList::GetTip
+
+//------------------------------------------------------------------------------------
+
+void TimeList::CopyTips(const TimeList & src)
+{
+    // Clear out a timelist, then copy in tips from another timelist.
+    Clear();
+
+    Branchconstiter brit;
+    for (brit = src.FirstTip(); brit != src.EndBranch(); brit = src.NextTip(brit))
+    {
+        CollateAndSetCrossrefs((*brit)->Clone(), *brit);
+        CountBranch((*brit)->Event());
+    }
+
+    m_ntips = src.m_ntips;
+    m_firsttip = find_if(m_branches.begin(), m_branches.end(), IsTipGroup());
+
+} // TimeList::CopyTips
+
+//------------------------------------------------------------------------------------
+// Assuming that by "equivalent" we mean, has the same event, partition info,
+// eventtime, Link weight, recombination point, and label (if any).
+
+Branchiter TimeList::FindEquivBranch(const Branch_ptr target)
+{
+    Branchiter myit;
+    assert(target);
+
+    for (myit = BeginBranch(); myit != EndBranch(); ++myit)
+        if (target->IsEquivalentTo(*myit)) return myit;
+
+    assert(false);
+
+    return myit;
+
+} // TimeList::FindEquivBranch
+
+//------------------------------------------------------------------------------------
+// CopyBody() assumes that the tips of src and *this are identical!  It also assumes
+// that no internal *node* of the tree has both 2 parents and 2 children.
+
+void TimeList::CopyBody(const TimeList & src)
+{
+    ClearBody();
+
+    Branch_ptr newbranch;
+    Branchconstiter brit = src.FirstBody();
+
+    // If the src tree has no body, we will not attempt to copy it
+    // nor to hook up the Base.  Given an incomplete tree, we return
+    // an incomplete tree.  Mary August 2001
+    if (brit != src.EndBranch())
+    {
+        for( ; brit != src.EndBranch(); brit = src.NextBody(brit))
+        {
+            newbranch = (*brit)->Clone();
+            Branch_ptr srcchild = (*brit)->Child(0);
+            Branch_ptr newchild = srcchild->GetEquivBranch().lock();
+            assert(newchild);
+            newbranch->SetChild(0, newchild);
+
+            if (srcchild->Parent(0) == *brit)
+            {
+                newchild->SetParent(0, newbranch);
+                srcchild = (*brit)->Child(1);
+                if (srcchild)
+                {
+                    newchild = srcchild->GetEquivBranch().lock();
+                    assert(newchild);
+                    newbranch->SetChild(1, newchild);
+                    newchild->SetParent(0, newbranch);
+                }
+            }
+            else
+            {
+                newchild->SetParent(1, newbranch);
+            }
+
+            CollateAndSetCrossrefs(newbranch, *brit);
+        }
+
+        m_base->SetChild(0, newbranch);
+        newbranch->SetParent(0, m_base);
+    }
+
+    m_ncuttable = src.m_ncuttable;
+    m_branchmap = src.m_branchmap;
+
+} // TimeList::CopyBody
+
+//------------------------------------------------------------------------------------
+
+void TimeList::CopyPartialBody(const TimeList & src, const Branchiter & srcstart, Branchiter & mystart)
+{
+    // This differs from CopyBody in that it only copies the part of the
+    // TimeList that differs between this and src, as an optimization.
+
+    // Possible speedup involving moving the functionality of
+    // ClearPartialBody inside the loop over branches below!?
+    ClearPartialBody(mystart);
+
+    Branch_ptr newbranch;
+    Branchconstiter brit = src.FirstBody();
+
+    // If the src tree has no body, we will not attempt to copy it nor to hook up the Base.
+    // Given an incomplete tree, we return an incomplete tree.  Mary August 2001
+    if (brit != src.EndBranch())
+    {
+        for(brit = srcstart; brit != src.EndBranch(); brit = src.NextBody(brit))
+        {
+            newbranch = (*brit)->Clone();
+            Branch_ptr srcchild = (*brit)->Child(0);
+            Branch_ptr newchild = srcchild->GetEquivBranch().lock();
+            assert(newchild);
+            newbranch->SetChild(0, newchild);
+
+            if (srcchild->Parent(0) == *brit)
+            {
+                newchild->SetParent(0, newbranch);
+                srcchild = (*brit)->Child(1);
+                if (srcchild)
+                {
+                    newchild = srcchild->GetEquivBranch().lock();
+                    assert(newchild);
+                    newbranch->SetChild(1, newchild);
+                    newchild->SetParent(0, newbranch);
+                }
+            }
+            else
+            {
+                newchild->SetParent(1, newbranch);
+            }
+
+            CollateAndSetCrossrefs(newbranch, *brit);
+        }
+
+        m_base->SetChild(0, newbranch);
+        newbranch->SetParent(0, m_base);
+    }
+    assert(is_sorted(m_branches.begin(), m_branches.end()));
+
+    m_ncuttable = src.m_ncuttable;
+    m_branchmap = src.m_branchmap;
+
+} // TimeList::CopyPartialBody
+
+//------------------------------------------------------------------------------------
+
+bool TimeIsLessThan(Branch_ptr b1, Branch_ptr b2)
+{
+    //This could also check IDs, but it was already too slow.  We would
+    // use it for lower_bound, if we found a way to make that faster.
+    return (b1->GetTime() < b2->GetTime());
+}
+
+//------------------------------------------------------------------------------------
+
+Branchiter TimeList::Collate(Branch_ptr newbranch)
+{
+    Branchiter brit;
+
+    // Degenerate collate into an empty list.  This might not be necessary at all,
+    // except that it set 'm_ncuttable' to 0 and called 'ClearBranchCount'.
+    // Do we need this?  At least it doesn't hurt anything.
+    if (m_branches.empty())
+    {
+        m_branches.push_front(newbranch);
+        brit = m_branches.begin();  // since we used push_front
+        m_ncuttable = 0;
+        ClearBranchCount();
+        UpdateBookkeeping(brit);
+        return brit;
+    }
+
+    //LS NOTE:  This is one way to find where to insert the new branch.  It ended
+    // up being slightly slower and it's a bit more obscure, so below is the
+    // slightly-faster, more-plodding version.  I've left it here as a template
+    // if anyone wants to try using lower_bound or something like it later.
+#if 0
+    Branchiter lowerbound = lower_bound(m_branches.begin(), m_branches.end(), newbranch, &TimeIsLessThan);
+
+    while (lowerbound != m_branches.end() &&
+           (*lowerbound)->m_eventTime == newbranch->m_eventTime &&
+           (*lowerbound)->GetID() < newbranch->GetID())
+    {
+        lowerbound++;
+    }
+
+    if (lowerbound != m_branches.begin())
+    {
+        Branchiter onelower = lowerbound;
+        onelower--;
+        while (onelower != m_branches.begin() &&
+               (*onelower)->m_eventTime == newbranch->m_eventTime &&
+               (*onelower)->GetID() > newbranch->GetID())
+        {
+            onelower--;
+            lowerbound--;
+        }
+    }
+#endif
+
+    //LS NOTE:  This routine can take up 1/4 of the time spent in rearrangement,
+    // (for particularly high values of recombination) so if you see a way to
+    // speed it up, take it.  Do note that a version that used 'lower_bound'
+    // ended up being a bit slower, sadly.
+    Branchiter lowerbound = m_branches.begin();
+
+    // Iterate to the first branch whose time is greater than or equal to the new branch's time.
+    while (lowerbound != m_branches.end() &&
+           (*lowerbound)->m_eventTime < newbranch->m_eventTime)
+    {
+        ++lowerbound;
+    }
+
+    // Iterate to the first branch whose ID is greater than the current branch's ID,
+    // assuming we don't go beyond the current time.
+    while (lowerbound != m_branches.end() &&
+           (*lowerbound)->m_eventTime == newbranch->m_eventTime &&
+           (*lowerbound)->GetID() < newbranch->GetID())
+    {
+        ++lowerbound;
+    }
+
+    brit = m_branches.insert(lowerbound, newbranch);
+    UpdateBookkeeping(brit);
+
+    return brit;
+
+} // TimeList::Collate
+
+//------------------------------------------------------------------------------------
+
+Branchiter TimeList::Collate(Branch_ptr newbranch, Branch_ptr spouse)
+{
+    //Used only to add recombination events, which are tied for their times.
+    // This is done purely as a time-saving measure for highly-recombinant trees.
+    // Currently, the return value is discarded, but that could change--it
+    // now points to the iter of where 'newbranch' went in (and not, say, the
+    // earlier of the two possibilities).
+    Branchiter spousepoint = Collate(spouse);
+
+    if (newbranch->GetID() > spouse->GetID())
+    {
+        // We should add 'newbranch' after 'spouse', not before.
+        ++spousepoint;
+    }
+
+    Branchiter brit = m_branches.insert(spousepoint, newbranch);
+    UpdateBookkeeping(brit);
+
+    return brit;
+}
+
+//------------------------------------------------------------------------------------
+
+Branchiter TimeList::CollateAndSetCrossrefs(Branch_ptr newbranch, Branch_ptr oldbranch)
+{
+    assert (newbranch != Branch::NONBRANCH);
+    assert (oldbranch != Branch::NONBRANCH);
+    newbranch->SetEquivBranch(oldbranch);
+    oldbranch->SetEquivBranch(newbranch);
+    return Collate(newbranch);
+}
+
+//------------------------------------------------------------------------------------
+
+void TimeList::UpdateBookkeeping(Branchiter newbranch)
+{
+    m_ncuttable += (*newbranch)->Cuttable();
+    CountBranch((*newbranch)->Event());
+    UpdateFirstsIfNecessary(newbranch);
+}
+
+//------------------------------------------------------------------------------------
+
+void TimeList::Remove(Branch_ptr badbranch)
+{
+    switch(badbranch->BranchGroup())
+    {
+        case bgroupTip:
+            if (badbranch == *m_firsttip) m_firsttip = NextTip(m_firsttip);
+            break;
+        case bgroupBody:
+            if (badbranch == *m_firstbody) m_firstbody = NextBody(m_firstbody);
+            if (badbranch == *m_firstcoal) m_firstcoal = NextCoal(m_firstcoal);
+            break;
+        default:
+            throw implementation_error("TimeList::Unknown branch group.");
+    }
+
+    m_branches.remove(badbranch);
+    m_ncuttable -= badbranch->Cuttable();
+    UncountBranch(badbranch->Event());
+
+} // TimeList::Remove
+
+//------------------------------------------------------------------------------------
+
+Branchiter TimeList::NextTip(Branchiter & it)
+{
+    Branchiter mit(it);
+    ++mit;
+    return ((mit == EndBranch()) ? EndBranch() : find_if(mit, EndBranch(), IsTipGroup()));
+
+} // TimeList::NextTip
+
+//------------------------------------------------------------------------------------
+
+Branchconstiter TimeList::NextTip(Branchconstiter & it) const
+{
+    Branchconstiter mit(it);
+    ++mit;
+    return ((mit == EndBranch()) ? EndBranch() : find_if(mit, EndBranch(), IsTipGroup()));
+
+} // TimeList::NextTip
+
+//------------------------------------------------------------------------------------
+
+Branchiter TimeList::NextBody(Branchiter & it)
+{
+    Branchiter mit(it);
+    ++mit;
+    return ((mit == EndBranch()) ? EndBranch() : find_if(mit, EndBranch(), IsBodyGroup()));
+
+} // TimeList::NextBody
+
+//------------------------------------------------------------------------------------
+
+Branchconstiter TimeList::NextBody(Branchconstiter & it) const
+{
+    Branchconstiter mit(it);
+    ++mit;
+    return ((mit == EndBranch()) ? EndBranch() : find_if(mit, EndBranch(), IsBodyGroup()));
+
+} // TimeList::NextBody
+
+//------------------------------------------------------------------------------------
+
+Branchiter TimeList::NextCoal(Branchiter & it)
+{
+    Branchiter mit(it);
+    ++mit;
+    return ((mit == EndBranch()) ? EndBranch() : find_if(mit, EndBranch(), IsCoalGroup()));
+
+} // TimeList::NextCoal
+
+//------------------------------------------------------------------------------------
+
+Branchconstiter TimeList::NextCoal(Branchconstiter & it) const
+{
+    Branchconstiter mit(it);
+    ++mit;
+    return ((mit == EndBranch()) ? EndBranch() : find_if(mit, EndBranch(), IsCoalGroup()));
+
+} // TimeList::NextCoal
+
+//------------------------------------------------------------------------------------
+
+Branchiter TimeList::PrevBodyOrTip(Branchiter & it)
+{
+    Branchiter mit(it);
+    --mit;
+    IsBodyGroup bodycheck;
+    IsTipGroup tipcheck;
+    while(!bodycheck(*mit) && !tipcheck(*mit)) --mit;
+    return mit;
+
+} // TimeList::PrevBody
+
+//------------------------------------------------------------------------------------
+
+Branchiter TimeList::NextNonTimeTiedBranch(Branchiter & it)
+{
+    Branchiter mit(it);
+    ++mit;
+    double badtyme((*it)->m_eventTime);
+    while ((*mit)->m_eventTime == badtyme) ++mit;
+    return mit;
+
+} // TimeList::NextNonTimeTiedBranch
+
+//------------------------------------------------------------------------------------
+
+Branchiter TimeList::FindIter(Branch_ptr branch)
+{
+    return ((m_branches.empty()) ? EndBranch() : find(BeginBranch(), EndBranch(), branch));
+} // TimeList::FindIter
+
+//------------------------------------------------------------------------------------
+
+Branchconstiter TimeList::FindIter(Branch_ptr branch) const
+{
+    Branchconstiter brit;
+
+    for(brit = BeginBranch(); brit != EndBranch(); ++brit)
+        if (*brit == branch) break;  // yes, we compare pointers!
+
+    return brit;
+
+} // TimeList::FindIter
+
+//------------------------------------------------------------------------------------
+
+void TimeList::Prune()
+{
+    // Excise marked branches.
+    Branchiter brit;
+    for(brit = FirstBody() ; brit != EndBranch() ; )
+    {
+        Branch_ptr pbranch = *brit;
+        if (pbranch->m_marked)
+        {
+            Branch_ptr pparent = pbranch->Parent(0);
+            pparent->ReplaceChild(pbranch, pbranch->Child(0));
+            SetUpdateDLs(pparent);
+
+            pparent = pbranch->Parent(1);
+            if (pparent)
+            {
+                pparent->ReplaceChild(pbranch, pbranch->Child(0));
+                SetUpdateDLs(pparent);
+            }
+            brit = NextBody(brit);
+            Remove(pbranch);            // deletes what pbranch points to
+            continue;
+        }
+        brit = NextBody(brit);
+    }
+    assert(IsValidTimeList());
+
+    // Check for loops in the root.
+    long nbranches = 2 * m_ntips;
+    for(brit = FirstBody() ; ; brit = NextBody(brit))
+    {
+        nbranches += (*brit)->CountDown();
+        if (nbranches <= 2) break;
+    }
+
+    if (*brit != Root())
+    {
+        m_base->SetChild(0, *brit);
+        (*brit)->SetParent(0, m_base);
+        (*brit)->SetParent(1, Branch::NONBRANCH);
+
+        // We could use std::remove_if, but we'd still need to do this handloop
+        // for stuff like Uncount, and updating of m_firstfoo pointers.
+        for(brit = NextBody(brit) ; brit != m_branches.end() ; )
+        {
+            Branch_ptr pbranch = *brit;
+            brit = NextBody(brit);
+            Remove(pbranch);
+        }
+    }
+
+    assert(IsValidTimeList());
+
+} // TimeList::Prune
+
+//------------------------------------------------------------------------------------
+
+void TimeList::SetUpdateDLs(Branch_ptr pBranch)
+{
+    if (pBranch->Event() == btypeBase) return; // The base terminates recursion.
+
+    if (!pBranch->GetUpdateDL())
+    {
+        pBranch->SetUpdateDL();
+        SetUpdateDLs(pBranch->Parent(0));
+
+        if (pBranch->Parent(1))
+            SetUpdateDLs(pBranch->Parent(1));
+    }
+
+} // TimeList::SetUpdateDLs
+
+//------------------------------------------------------------------------------------
+// Marks the entire tree as needing updating--a debug function.
+
+void TimeList::SetAllUpdateDLs()
+{
+    Branchiter brit;
+    for (brit = m_branches.begin(); brit != m_branches.end(); ++brit)
+        (*brit)->SetUpdateDL();
+    --brit;
+    (*brit)->SetUpdateDL();
+
+} // SetAllUpdateDLs()
+
+//------------------------------------------------------------------------------------
+
+void TimeList::ClearUpdateDLs()
+{
+    Branchiter brit;
+    for(brit = m_branches.begin(); brit != m_branches.end(); ++brit)
+        (*brit)->ClearUpdateDL();
+
+} // TimeList::ClearUpdateDLs
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+
+void TimeList::PrintTimeList() const
+{
+    PrintTimeList(cerr);
+}
+
+//------------------------------------------------------------------------------------
+
+void TimeList::PrintTimeList(ostream& os) const
+{
+    os << endl;
+    os << " New tree start " << endl;
+    Branchconstiter branch;
+
+    for(branch = BeginBranch(); branch != EndBranch(); ++branch)
+    {
+        os << "**Branch " << (*branch)->GetID() << endl;
+        (*branch)->PrintInfo();
+    }
+
+    vector<Branch_ptr> endbranch;
+    vector<Branch_ptr>::iterator br;
+    //Branchiter branch;
+    double tyme = 0.0;
+    bool newinterval = true;
+    for(branch = BeginBranch() ; branch != EndBranch() ; )
+    {
+        if (newinterval) os << Pretty(tyme) << ": branch(s) ";
+        if ((*branch)->m_eventTime == tyme)
+        {
+            os << (*branch)->GetID();
+            os << (*branch)->Event() << endl;
+            (*branch)->GetRangePtr()->PrintLive();
+            (*branch)->GetRangePtr()->PrintNewTargetLinks();
+            os << (*branch)->GetUpdateDL();
+            os << " ";
+            long site;
+            os << endl;
+            if ((*branch)->Event() == btypeTip)
+            {
+                for(site = 0; site < 4; ++site)
+                {
+                    DNACell * pcell = dynamic_cast<DNACell *>((*branch)->GetDLCell(0, markerCell, false).get());
+                    os << "   " << Pretty(pcell->GetSiteDLs(site)[0][baseA]);
+                    os << "   " << Pretty(pcell->GetSiteDLs(site)[0][baseC]);
+                    os << "   " << Pretty(pcell->GetSiteDLs(site)[0][baseG]);
+                    os << "   " << Pretty(pcell->GetSiteDLs(site)[0][baseT]);
+                    os << endl;
+                }
+            }
+            newinterval = false;
+        }
+        else
+        {
+            os << "start" << endl;
+            newinterval = true;
+            if (!endbranch.empty())
+            {
+                os << "      branch(s) ";
+                for(br = endbranch.begin(); br != endbranch.end(); ++br)
+                {
+                    if (*br != Branch::NONBRANCH)
+                    {
+                        os << (*br)->GetID();
+                        os << (*br)->Event() << endl;
+                        (*br)->GetRangePtr()->PrintLive();
+                        (*br)->GetRangePtr()->PrintNewTargetLinks();
+                        os << (*br)->GetUpdateDL();
+                        os << " ";
+                    }
+                }
+                os << " end" << endl;
+            }
+            endbranch = (*branch)->GetBranchChildren();
+            tyme = (*branch)->m_eventTime;
+            continue;
+        }
+        ++branch;
+    }
+
+    // Catch the branches that end at the root.
+    os << endl << "      branch(s) ";
+    for(br = endbranch.begin(); br != endbranch.end(); ++br)
+    {
+        if (*br != Branch::NONBRANCH)
+        {
+            os << (*br)->GetID();
+            os << (*br)->Event();
+            (*br)->GetRangePtr()->PrintLive();
+            (*br)->GetRangePtr()->PrintNewTargetLinks();
+            os << (*br)->GetUpdateDL();
+            os << " ";
+        }
+    }
+
+    os << " end at the root" << endl;
+
+} // TimeList::PrintTimeList
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+// Prints in "internal" units, not "user" units.
+
+void TimeList::PrintTreeList() const
+{
+    PrintTreeList(cerr);
+}
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+// Prints in "internal" units, not "user" units.
+
+void TimeList::PrintTreeList(ostream & of) const
+{
+    set<string> branches;
+
+    Branchconstiter branch = FirstTip();
+    of << endl;
+    of << ToString((*branch)->GetID()) << " ";
+    of << ToString((*branch)->Event()) << "/" << (*branch)->m_eventTime << ":";
+    for( ; branch != EndBranch() ; branch = NextTip(branch))
+    {
+        string id = ToString((*branch)->GetID());
+        branches.insert(id);
+        of << " " << id;
+    }
+    of << endl;
+
+    for(branch = FirstBody() ; branch != EndBranch() ; branch = NextBody(branch))
+    {
+        string id = ToString((*branch)->GetID());
+        branch_type event = (*branch)->Event();
+        //of << id << ToString(event) << "/" << (*branch)->m_eventTime;
+        of << id << " " << ToString(event) << ":" << ToString((*branch)->GetUpdateDL())
+           << "/" << (*branch)->m_eventTime;
+
+        string chid = ToString((*branch)->Child(0)->GetID());
+        branches.erase(chid);
+        branches.insert(id);
+
+        switch(event)
+        {
+            case btypeCoal:
+                of << ":";
+                if ((*branch)->Child(1) != NULL) // defends against 1-leg coal
+                {
+                    chid = ToString((*branch)->Child(1)->GetID());
+                    branches.erase(chid);
+                }
+                break;
+            case btypeRec:
+                ++branch;
+                id = ToString((*branch)->GetID());
+                event = (*branch)->Event();
+                of << "/" << id << event << ":";
+                branches.insert(id);
+                break;
+            case btypeMig:
+            case btypeDivMig:
+            case btypeDisease:
+            case btypeEpoch:
+                of << ":";
+                break;
+            case btypeBase:
+            case btypeTip:
+                assert(false);          // unknown branch type
+        }
+        set<string>::iterator br;
+        for(br = branches.begin(); br != branches.end(); ++br)
+            of << " " << *br;
+        if (!(*branch)->m_partitions.empty())
+            of << " " << "status = " << (*branch)->m_partitions[0];
+        of << endl;
+    }
+
+    of << "End of Tree" << endl;
+
+} // TimeList::PrintTreeList()
+
+//------------------------------------------------------------------------------------
+
+TiXmlDocument * TimeList::AssembleGraphML() const
+{
+
+    TiXmlDocument * docP = new TiXmlDocument();
+
+    TiXmlDeclaration * decl = new TiXmlDeclaration("1.0", "", "");
+    docP->LinkEndChild( decl );
+
+    TiXmlElement * graphml = new TiXmlElement("graphml");
+    docP->LinkEndChild( graphml );
+
+    /////////////////////////////////////////////////////////////////////////
+    // stuff at the top of graphML; used to specify what in-house tags and
+    // attributes we are using
+
+    // partitions
+    TiXmlElement * ptype = new TiXmlElement("key");
+    ptype->SetAttribute("id", "partitions");
+    ptype->SetAttribute("for", "edge");
+    ptype->SetAttribute("attr.name", "ptype");
+    ptype->SetAttribute("attr.type", "string");
+    graphml->LinkEndChild(ptype);
+
+    // recombination range for edge
+    TiXmlElement * rtype = new TiXmlElement("key");
+    rtype->SetAttribute("id", "live_sites");
+    rtype->SetAttribute("for", "edge");
+    rtype->SetAttribute("attr.name", "asites");
+    rtype->SetAttribute("attr.type", "string");
+    graphml->LinkEndChild(rtype);
+
+    // location of recombination event
+    TiXmlElement * rltype = new TiXmlElement("key");
+    rltype->SetAttribute("id", "rec_location");
+    rltype->SetAttribute("for", "node");
+    rltype->SetAttribute("attr.name", "rloc");
+    rltype->SetAttribute("attr.type", "long");
+    graphml->LinkEndChild(rltype);
+
+    // type of node (coal, rec, divmig, etc)
+    TiXmlElement * ntype = new TiXmlElement("key");
+    ntype->SetAttribute("id", "node_type");
+    ntype->SetAttribute("for", "node");
+    ntype->SetAttribute("attr.name", "ntype");
+    ntype->SetAttribute("attr.type", "string");
+    graphml->LinkEndChild(ntype);
+
+    // integer ID for node
+    TiXmlElement * ntime = new TiXmlElement("key");
+    ntime->SetAttribute("id", "node_time");
+    ntime->SetAttribute("for", "node");
+    ntime->SetAttribute("attr.name", "ntime");
+    ntime->SetAttribute("attr.type", "double");
+    graphml->LinkEndChild(ntime);
+
+    // node label -- should only be for tips
+    TiXmlElement * nlabel = new TiXmlElement("key");
+    nlabel->SetAttribute("id", "node_label");
+    nlabel->SetAttribute("for", "node");
+    nlabel->SetAttribute("attr.name", "nlabel");
+    nlabel->SetAttribute("attr.type", "string");
+    graphml->LinkEndChild(nlabel);
+
+    /////////////////////////////////////////////////////////////////////////
+    // now we start the actual graph
+
+    TiXmlElement * graph = new TiXmlElement("graph");
+    graph->SetAttribute("id", "myGraph");
+    graph->SetAttribute("edgedefault", "directed");
+    graphml->LinkEndChild( graph );
+
+    for(Branchconstiter branch = BeginBranch(); branch != EndBranch(); ++branch)
+    {
+        (*branch)->AddGraphML(graph);
+    }
+
+    return docP;
+
+}
+//------------------------------------------------------------------------------------
+// Debugging function.
+
+void TimeList::PrintIDs() const
+{
+    for (Branchconstiter brit = m_branches.begin(); brit != m_branches.end(); brit++)
+    {
+        cerr << (*brit)->GetID() << "\t"
+             << (*brit)->GetTime() << "\t"
+             << ToString((*brit)->Event()) << endl;
+    }
+}
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+
+void TimeList::PrintTips()
+{
+    cerr << endl << " The tips are " << endl;
+    Branchiter branch;
+
+    for(branch = FirstTip(); branch != EndBranch(); branch = NextTip(branch))
+    {
+        TBranch_ptr tip = boost::dynamic_pointer_cast<TBranch>(*branch);
+        assert(tip);
+        cerr << "   " << tip->m_label << " with id#";
+        cerr << tip->GetID();
+        if (tip->m_partitions.size())
+        {
+            cerr << " " << tip->m_partitions[0];
+        }
+        cerr << endl;
+    }
+
+    cerr << "End tips" << endl;
+
+} // TimeList::PrintTips
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+
+void TimeList::PrintDirectionalMutationEventCountsToFile(ofstream & of) const
+{
+    if (!registry.GetForceSummary().CheckForce(force_DISEASE)) return;
+
+    LongVec1d::size_type nparts(registry.GetDataPack(). GetNPartitionsByForceType(force_DISEASE));
+
+    LongVec1d footemp(nparts, 0L);
+    LongVec2d nevents(nparts, footemp);
+    Branchconstiter brit;
+    for (brit = FirstBody(); brit != m_branches.end(); ++brit)
+    {
+        if ((*brit)->Event() == btypeDisease)
+        {
+            nevents[(*brit)->GetPartition(force_DISEASE)][(*brit)->Child(0)->GetPartition(force_DISEASE)]++;
+        }
+    }
+
+    of << "DiseaseEventCounts: ";
+    LongVec2d::size_type startdis;
+    for(startdis = 0; startdis < nparts; ++startdis)
+    {
+        LongVec2d::size_type enddis;
+        for(enddis = 0; enddis < nparts; ++enddis)
+        {
+            if (startdis == enddis) continue;
+            of << startdis << "->" << enddis << "=";
+            of << nevents[startdis][enddis] << "; ";
+        }
+    }
+    of << endl;
+
+} // PrintDirectionMutationEventCountsToFile
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+
+void TimeList::PrintTimeTilFirstEventToFile(ofstream & of) const
+{
+    of << "FirstEvent at " << (*FirstBody())->m_eventTime;
+} // PrintTimeTilFirstEventToFile
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+
+void TimeList::PrintTraitPhenotypeAtLastCoalescence(ofstream & of) const
+{
+    // we assume the last branch will be a coalescence
+    of << "Last coalescence in " << m_branches.back()->GetPartition(force_DISEASE);
+
+} // PrintTraitPhenotypeAtLastCoalescence
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+
+void TimeList::MakeCoalescent(double theta)
+{
+    assert (HowMany(btypeMig) == 0 && HowMany(btypeRec) == 0);
+
+    long nbranches = GetNTips();
+    Branchiter branch;
+
+    for(branch = FirstBody(); branch != EndBranch(); branch = NextBody(branch))
+    {
+        assert(nbranches > 0);
+
+        (*branch)->m_eventTime = theta / (nbranches * (nbranches - 1));
+        --nbranches;
+    }
+
+} // TimeList::MakeCoalescent
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+
+string TimeList::DLCheck(const TimeList & other) const
+{
+    // This code should be commented in if DLCheck is being used to test for strict identity,
+    // and commented out if you know the trees are not in fact identical but want to find out where.
+
+#if 1  // Conditional compilation based on comment above.
+    long ncoals = HowMany(btypeCoal), otherncoals = other.HowMany(btypeCoal);
+    if (ncoals != otherncoals)
+    {
+        return string("The trees differ in number of coalescences!\n");
+    }
+#endif
+
+    string problems;
+    Branchconstiter branch, otherbranch;
+
+    for(branch = BeginBranch(), otherbranch = other.BeginBranch();
+        branch != EndBranch() && otherbranch != other.EndBranch();
+        ++branch, ++otherbranch)
+    {
+        problems += (*branch)->DLCheck(**otherbranch);
+    }
+
+    return problems;
+
+} // TimeList::DLCheck
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+
+void TimeList::CloneCheck() const
+{
+    vector<Branch_ptr> newbranches;
+    Branchconstiter branch;
+
+    for(branch = BeginBranch() ; branch != EndBranch() ; ++branch)
+    {
+        Branch_ptr newbranch = Branch::NONBRANCH;
+        newbranch = (*branch)->Clone();
+        if (!newbranch)
+        {
+            cerr << "Failed to clone branch " << (*branch)->GetID() << endl;
+        }
+        newbranches.push_back(newbranch);
+    }
+
+} // TimeList::CloneCheck
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+
+bool TimeList::IsPresent(const Branchiter & branch) const
+{
+    bool foundequiv = false;
+    Branchconstiter brit;
+
+    for(brit = BeginBranch() ; brit != EndBranch() ; ++brit)
+    {
+        if (*brit == *branch)
+            return true;
+        foundequiv = (**brit == **branch);
+    }
+
+    if (foundequiv)
+        cerr << "Found equivalent but not same pointer!" << endl;
+
+    return false;
+
+} // TimeList::IsPresent()
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+
+bool TimeList::IsSameExceptForTimes(const TimeList & other) const
+{
+    Branchconstiter brit, otherbrit;
+    for(brit = BeginBranch(), otherbrit = other.BeginBranch(); brit != EndBranch();
+        ++brit, ++otherbrit)
+    {
+        if (!(*brit)->IsSameExceptForTimes(*otherbrit)) return false;
+    }
+
+    return true;
+
+} // TimeList::IsSameExceptForTimes
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+
+void TimeList::PrintIntervalLengthsToFile() const
+{
+    ofstream fs;
+    fs.open("timeints.out", ios::app);
+
+    long interval;
+    Branchconstiter top, bottom;
+    for(top = BeginBranch(), bottom = FirstBody(), interval = 0;
+        bottom != EndBranch();
+        top = bottom, bottom = NextBody(bottom), ++interval)
+    {
+        if ((*top)->Event() == btypeRec)
+        {
+            top = bottom;
+            bottom = NextBody(bottom);
+        }
+        double length((*bottom)->m_eventTime - (*top)->m_eventTime);
+        fs << "int" << interval << " " << length << endl;
+    }
+
+    fs.close();
+
+} // TimeList::PrintIntervalLengthsToFile
+
+//------------------------------------------------------------------------------------
+// The following functions manage the counters for branches of each type.
+//------------------------------------------------------------------------------------
+
+void TimeList::CountBranch(const branch_type tag)
+{
+    BranchMap::iterator it = m_branchmap.find(tag);
+
+    if (it == m_branchmap.end())
+    {   // entry not found
+        m_branchmap.insert(make_pair(tag, 1L));
+    }
+    else
+    {
+        it->second++;
+    }
+
+} // CountBranch
+
+//------------------------------------------------------------------------------------
+
+void TimeList::UncountBranch(const branch_type tag)
+{
+    BranchMap::iterator it = m_branchmap.find(tag);
+    assert(it != m_branchmap.end());    // why are we trying to remove a branch type that isn't there?
+    it->second--;
+    assert(it->second >= 0);            // why are we trying to remove a branch that isn't there?
+
+} // UncountBranch
+
+//------------------------------------------------------------------------------------
+
+void TimeList::ClearBranchCount()
+{
+    // We don't get rid of entries, since they may be useful later when the same kind of branch comes round again.
+
+    BranchMap::iterator it = m_branchmap.begin();
+    BranchMap::iterator end = m_branchmap.end();
+
+    for ( ; it != end; ++it)
+    {
+        it->second = 0L;
+    }
+
+} // ClearBranchCount
+
+//------------------------------------------------------------------------------------
+
+void TimeList::UpdateFirstsIfNecessary(Branchiter & branch)
+{
+    switch((*branch)->BranchGroup())
+    {
+        case bgroupTip:
+            m_firsttip = find_if(m_branches.begin(), m_branches.end(), IsTipGroup());
+            return;
+        case bgroupBody:
+            if (m_firstbody == m_branches.end() ||
+                (*m_firstbody)->m_eventTime > (*branch)->m_eventTime)
+            {
+                m_firstbody = branch;
+            }
+            else
+            {
+                if ((*m_firstbody)->m_eventTime == (*branch)->m_eventTime)
+                {
+                    m_firstbody = find_if(m_branches.begin(), m_branches.end(), IsBodyGroup());
+                }
+            }
+            if ((*branch)->Event() == btypeCoal)
+            {
+                if (m_firstcoal == m_branches.end() || (*m_firstcoal)->m_eventTime > (*branch)->m_eventTime)
+                {
+                    m_firstcoal = branch;
+                }
+                else
+                {
+                    if ((*m_firstcoal)->m_eventTime == (*branch)->m_eventTime)
+                    {
+                        m_firstcoal = find_if(m_branches.begin(), m_branches.end(), IsCoalGroup());
+                    }
+                }
+            }
+            return;
+    }
+
+    throw implementation_error("TimeList::Unknown branch group.");
+
+} // UpdateFirstsIfNecessary
+
+//------------------------------------------------------------------------------------
+
+long TimeList::HowMany(branch_type tag) const
+{
+    BranchMap::const_iterator it = m_branchmap.find(tag);
+    if (it == m_branchmap.end())        // entry not found
+        return 0L;
+    else return it->second;
+
+} // HowMany
+
+//------------------------------------------------------------------------------------
+
+bool TimeList::ContainsOnlyTipsAndCoals() const
+{
+    // Trees must always contain tips and coalescences...
+    if (m_branchmap.size() > 2) return false;
+
+    return true;
+
+} // ContainsOnlyTipsAndCoals
+
+//------------------------------------------------------------------------------------
+
+bool TimeList::ContainsOnlyTipsCoalsAndRecombs() const
+{
+    // Trees must always contain tips and coalescences...
+    // so if there are 3 types and one is btypeRec...
+    if(m_branchmap.size() == 3 && m_branchmap.find(btypeRec) != m_branchmap.end())
+        return true;
+
+    return false;
+} // ContainsOnlyTipsCoalsAndRecombs
+
+//------------------------------------------------------------------------------------
+
+bool TimeList::operator==(const TimeList & src) const
+{
+    // We do not check for equivalence of m_firstbody, m_firstcoal, and m_firsttip.
+
+    if (m_branchmap != src.m_branchmap) return false;
+    if (m_ntips != src.m_ntips) return false;
+    if (m_ncuttable != src.m_ncuttable) return false;
+
+    if (m_branches.size() != src.m_branches.size()) return false;
+
+    Branchconstiter mybr = m_branches.begin();
+    Branchconstiter srcbr = src.m_branches.begin();
+
+    for ( ; mybr != m_branches.end(); ++mybr, ++srcbr)
+    {
+        if (**mybr != **srcbr) return false;
+    }
+
+    return true;
+
+} // operator==
+
+//------------------------------------------------------------------------------------
+
+Branchiter TimeList::ResolveTiedTimes(Branchconstiter firstinvalid)
+{
+    // the general case
+    double firsttime((*firstinvalid)->m_eventTime);
+    Branchiter br;
+    for(br = FirstBody(); br != EndBranch(); br = NextBody(br))
+    {
+        if ((*br)->m_eventTime == firsttime) return br;
+    }
+
+    assert(false);                      // ResolveTiedTimes asked for a time that's not here!
+
+    return EndBranch();
+
+} // ResolveTiedTimes
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+
+bool TimeList::IsValidTimeList() const
+{
+    Branchconstiter it;
+    long i = 0;
+    bool okay = true;
+
+    for (it = BeginBranch(); it != EndBranch(); ++it, ++i)
+    {
+        if (*it == Branch::NONBRANCH)
+        {
+            okay = false;               // Are there any null branches in this tree?
+        }
+        else
+        {
+            if ((*it)->GetRangePtr()->NoLiveAndNoTransmittedDiseaseSites())
+            {
+                // JDEBUG -- there could be a more rigorous test involving FC here.
+                //
+                // This branch of the IF handles only recombinant branches, guaranteed because
+                // NoLiveAndNoTransmittedDiseaseSites() returns FALSE for non-recombinant branches.
+                // The issue is the type of Range objects the branch contains, not the current event type.
+                //
+                cerr << "A " << ToString((*it)->Event()) << " branch with ";
+                (*it)->GetRangePtr()->PrintLive();
+                if ((*it)->Event() == btypeRec)
+                {
+                    // RBranch::GetRecpoint() returns a Littlelink (Biglink midpoint).
+                    cerr << "  and recombination point at " << (*it)->GetRecpoint();
+                }
+                cerr << endl;
+                const Branch_ptr br((*it)->Child(0));
+                cerr << "  Active Sites in child: ";
+                br->GetRangePtr()->PrintLive();
+                cerr << "  Current Target Link Weight in child: " << br->GetRangePtr()->GetCurTargetLinkweight() << endl << endl;
+                okay = false;
+                cerr << "  Branch printout before ASSERT:" << endl << endl;
+                (*it)->PrintInfo();
+                assert(false);
+            }
+
+            branch_type event = ((*it)->Event());
+            switch (event)
+            {
+                case btypeTip:
+                    if (i >= m_ntips) okay = false;
+                    break;
+                default:
+                    if (i < m_ntips) okay = false;
+            }
+            assert(okay);
+
+            Branch_ptr parent1 = (*it)->Parent(0);
+            Branch_ptr parent2 = (*it)->Parent(1);
+            if (parent2 && !parent1) okay = false;
+            if (parent1 && !parent1->ConnectedTo(*it)) okay = false;
+            if (parent2 && !parent2->ConnectedTo(*it)) okay = false;
+
+            Branch_ptr child1 = (*it)->Child(0);
+            Branch_ptr child2 = (*it)->Child(1);
+            if (child2 && !child1) okay = false;
+            if (child1 && !child1->ConnectedTo(*it)) okay = false;
+            if (child2 && !child2->ConnectedTo(*it)) okay = false;
+            switch (event)
+            {
+                case btypeCoal:
+                    if ((!(*it)->HasSamePartitionsAs(child1)) || (!(*it)->HasSamePartitionsAs(child2)))
+                    {
+                        okay = false;
+                        assert(false);
+                    }
+                    break;
+                case btypeMig:
+                case btypeDivMig:
+                case btypeDisease:
+                case btypeEpoch:
+                    if ((*it)->HasSamePartitionsAs(child1))
+                    {
+                        okay = false;
+                        assert(false);
+                    }
+                    break;
+                case btypeRec:
+                    // Value returned by Branch::GetRecpoint() (FLAGLONG) is actually a code for NO RECOMBINATION.
+                    if ((*it)->GetRecpoint() == FLAGLONG) okay = false;
+                    if (!((*it)->PartitionsConsistentWith(child1))) okay = false;
+                    DebugAssert2(((*it)->GetRecpoint() != FLAGLONG) && (*it)->PartitionsConsistentWith(child1),
+                                 (*it)->GetRecpoint(),
+                                 (*it)->PartitionsConsistentWith(child1));
+#if 0 // Equivalent to DebugAssert2 above, in case it is removed later.
+                    assert(okay);
+#endif
+                    break;
+                case btypeBase:
+                case btypeTip:
+                    //Check anything here?
+                    break;
+            }
+        }
+    }
+
+    assert(m_firsttip == find_if(m_branches.begin(), m_branches.end(), IsTipGroup()));
+    assert(m_firstbody == find_if(m_branches.begin(), m_branches.end(), IsBodyGroup()));
+    assert(m_firstcoal == find_if(m_branches.begin(), m_branches.end(), IsCoalGroup()));
+    assert(okay);
+
+    return okay;
+
+} // IsValidTimeList
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+
+void TimeList::PrintNodeInfo(long nodenumber) const
+{
+    Branchconstiter br;
+    for (br = BeginBranch(); br != EndBranch(); ++br)
+    {
+        long id = (*br)->GetID();
+        if (id == nodenumber)
+        {
+            //cerr << "Node " << id << endl;
+            (*br)->PrintInfo();
+            return;
+        }
+    }
+
+} // PrintNodeInfo
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+
+bool TimeList::RevalidateAllRanges() const
+{
+    Branchconstiter br;
+    FC_Status fcstatus;
+
+#if FINAL_COALESCENCE_ON
+    for(br = FirstTip(); br != EndBranch(); br = NextTip(br))
+    {
+        fcstatus.Increment_FC_Counts((*br)->GetLiveSites());
+    }
+#endif
+
+    int nbr = 0;
+    for (br = BeginBranch(); br != EndBranch(); ++br)
+    {
+        // cerr << endl << "in TimeList::RevalidateAllRanges checking branch: " << nbr << ", ID: " << (*br)->GetID() << endl;
+        // (*br)->GetRangePtr()->PrintInfo();
+        if (!(*br)->RevalidateRange(fcstatus)) return false;
+        ++nbr;
+    }
+
+    return true;
+
+} // RevalidateAllRanges
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+
+void TimeList::CorrectAllRanges()
+{
+    Branchiter br;
+    bool usefc = false;;
+    FC_Status fcstatus;
+
+#if FINAL_COALESCENCE_ON
+    usefc = true;
+
+    for(br = FirstTip(); br != EndBranch(); br = NextTip(br))
+    {
+        fcstatus.Increment_FC_Counts((*br)->GetLiveSites());
+    }
+#endif
+
+    for (br = BeginBranch(); br != EndBranch(); ++br)
+    {
+        // A hack, makes the fcstatus right, we ignore the return value.
+        (*br)->RevalidateRange(fcstatus);
+        (*br)->UpdateBranchRange(fcstatus.Coalesced_Sites(), usefc);
+    }
+
+} // CorrectAllRanges
+
+//____________________________________________________________________________________
diff --git a/src/tree/timelist.h b/src/tree/timelist.h
new file mode 100644
index 0000000..8ddb2b3
--- /dev/null
+++ b/src/tree/timelist.h
@@ -0,0 +1,187 @@
+// $Id: timelist.h,v 1.30 2013/11/08 21:46:21 mkkuhner Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+// This files defines a time ordered container of Branch_ptr,
+// TimeList.  The TimeList is a wrapped std::list of all Branch
+// derived things associated with a particular tree.
+// Because of the use of shared_ptr to hold Branch objects,
+// no deletes should ever be needed.
+
+#ifndef TIMELIST_H
+#define TIMELIST_H
+
+#include <map>
+#include <vector>
+
+#include "vectorx.h"
+#include "defaults.h"
+#include "definitions.h"
+#include "types.h"
+#include "branch.h"
+#include "locus.h"      // for DataModel
+#include "dlmodel.h"    // for DataModel
+
+// #include "branch.h"--to create the "base" branch, TimeList constructor
+//                      to create a Tbranch, CreateTip
+//                      to initialize branch innards, both of the above
+//                      to maintain tree child and parent relationships
+//                         in CopyBody()
+//                      to maintain tree child and parent relationships
+//                         in CopyPartialBody()
+//                      to track ncuttable via branch.Cuttable()
+//                      to track marked status via branch.marked,
+//                         branch.SetUpdateDL()
+
+//------------------------------------------------------------------------------------
+
+class TipData;
+class DataPack;
+
+class TiXmlDocument;
+
+typedef std::map<branch_type, long> BranchMap;
+typedef Branchlist::const_reverse_iterator       Branchconstriter;
+
+//------------------------------------------------------------------------------------
+
+class TimeList
+{
+  private:
+    Branch_ptr m_base;
+    Branchlist m_branches;                 // TimeList owns the branches!!!
+    BranchMap m_branchmap;                 // counts branches of each type
+
+    // cached start-points, for speed
+    Branchiter m_firsttip;
+    Branchiter m_firstbody;
+    Branchiter m_firstcoal;
+
+    long m_ntips, m_ncuttable;
+
+    void CountBranch(const branch_type tag);
+    void UncountBranch(const branch_type tag);
+    void ClearBranchCount();
+
+    // the argument is non-const since it may be assigned to a non-const member
+    void UpdateFirstsIfNecessary(Branchiter & brit);
+
+    Branchiter CollateAndSetCrossrefs(Branch_ptr newbranch, Branch_ptr oldbranch);
+    void UpdateBookkeeping(Branchiter newbranch);
+
+  public:
+
+    TimeList();
+    TimeList(const TimeList & src);
+    TimeList & operator=(const TimeList & src);
+    ~TimeList();
+    void Clear();
+    void ClearBody();
+    void ClearPartialBody(Branchiter firstinvalid);
+
+    // "Copies" tree linkage as well; destroys previous elements.
+    void CopyTips(const TimeList & src);
+    void CopyBody(const TimeList & src);  // Destroys previous elements.
+    // "Copies" tree linkage as well; assumes tips are already identical.
+    void CopyPartialBody(const TimeList & src, const Branchiter & srcstart, Branchiter & mystart);
+    // Destroys previous elements from start on, "copies" tree linkage as well;
+    // assumes tips are already identical.
+
+    Branchiter FindEquivBranch(const Branch_ptr target);
+
+    long HowMany(const branch_type tag) const; // How many branchs of type 'tag'?
+
+    // Following functions used to prevent Newick writing of non-Newick-legal trees.
+    bool ContainsOnlyTipsAndCoals() const; // any branches other than tips & coals?
+    bool ContainsOnlyTipsCoalsAndRecombs() const; // any branches other than tips, coals &
+                                                  //   recombinations?
+    Branchiter Collate(Branch_ptr newbranch);
+    Branchiter Collate(Branch_ptr newbranch, Branch_ptr spouse);
+    void Remove(Branch_ptr badbranch);
+
+    Branchiter       BeginBranch()        { return m_branches.begin(); };
+    Branchconstiter  BeginBranch()  const { return m_branches.begin(); };
+    Branchiter       EndBranch()          { return m_branches.end(); };
+    Branchconstiter  EndBranch()    const { return m_branches.end(); };
+    Branchconstriter RBeginBranch() const { return m_branches.rbegin(); };
+    Branchconstriter REndBranch()   const { return m_branches.rend(); };
+
+    // Return the first branch of a specific sort.
+    Branchiter       FirstTip() { return m_firsttip; };
+    Branchconstiter  FirstTip() const { return m_firsttip; };
+    Branchiter       FirstBody() { return m_firstbody; };
+    Branchconstiter  FirstBody() const { return m_firstbody; };
+    Branchiter       FirstCoal() { return m_firstcoal; };
+    Branchconstiter  FirstCoal() const { return m_firstcoal; };
+
+    // Return the next branch of a specific sort.
+    Branchiter       NextTip(Branchiter & it);
+    Branchconstiter  NextTip(Branchconstiter & it) const;
+    Branchiter       NextBody(Branchiter & it);
+    Branchconstiter  NextBody(Branchconstiter & it) const;
+    Branchiter       NextCoal(Branchiter & it);
+    Branchconstiter  NextCoal(Branchconstiter & it) const;
+    Branchiter       PrevBodyOrTip(Branchiter & it);
+    Branchiter       NextNonTimeTiedBranch(Branchiter & it);
+    Branchconstiter  NextNonTimeTiedBranch(Branchconstiter & it) const;
+
+    Branch_ptr       Base()            { return m_base; };
+    const Branch_ptr Base()      const { return m_base; };
+    Branch_ptr       Root()      const { return m_base->Child(0); };
+    double           RootTime()  const { return Root()->m_eventTime; };
+    Branchiter       FindIter(Branch_ptr br);
+    Branchconstiter  FindIter(Branch_ptr br) const;
+
+    long          GetNTips()     const { return m_ntips; };
+    long          GetNCuttable() const { return m_ncuttable; };
+    long          GetNBranches() const { return m_branches.size(); };
+    long          GetNBodyBranches() const;
+    TBranch_ptr   CreateTip(const TipData & tipdata, const vector<LocusCell> & cells,
+                            const vector<LocusCell> & movingcells, long nsites, rangeset diseasesites);
+    TBranch_ptr   CreateTip(const TipData & tipdata, const vector<LocusCell> & cells,
+                            const vector<LocusCell> & movingcells, long nsites, rangeset diseasesites,
+                            const vector<Locus> & loci);
+
+    TBranch_ptr   GetTip(const string & name) const;
+    void          Prune();
+    void          SetUpdateDLs(Branch_ptr);
+    void          ClearUpdateDLs();
+    void          SetAllUpdateDLs();
+
+    bool operator==(const TimeList & src) const;
+    bool operator!=(const TimeList & src) const { return !(*this == src); };
+
+    Branchiter ResolveTiedTimes(Branchconstiter firstinvalid);
+
+    // Debugging functions.
+    void PrintTimeList() const;
+    void PrintTimeList(std::ostream & of) const;
+    void PrintTreeList() const;
+    void PrintTreeList(std::ostream & of) const;
+    TiXmlDocument * AssembleGraphML() const;
+    void PrintIDs() const;
+    void PrintTips();
+    bool IsValidTimeList() const;
+    void MakeCoalescent(double theta);
+    string DLCheck(const TimeList & other) const;
+    void CloneCheck() const;
+    bool IsPresent(const Branchiter & branch) const;
+    bool IsSameExceptForTimes(const TimeList & other) const;
+    void PrintIntervalLengthsToFile() const;
+    void PrintDirectionalMutationEventCountsToFile(std::ofstream & of) const;
+    void PrintTimeTilFirstEventToFile(std::ofstream & of) const;
+    void PrintTraitPhenotypeAtLastCoalescence(std::ofstream & of) const;
+    void PrintNodeInfo(long nodenumber) const;
+    bool RevalidateAllRanges() const;
+    void CorrectAllRanges();
+};
+
+#endif // TIMELIST_H
+
+//____________________________________________________________________________________
diff --git a/src/tree/tree.cpp b/src/tree/tree.cpp
new file mode 100644
index 0000000..49a6fb0
--- /dev/null
+++ b/src/tree/tree.cpp
@@ -0,0 +1,1565 @@
+// $Id: tree.cpp,v 1.115 2013/06/03 17:23:13 jyamato Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <algorithm>                    // for std::max and min
+#include <cassert>
+#include <iostream>                     // debugging
+#include <fstream>
+#include <limits>                       // for numeric_limits<double>.epsilon()
+
+#include "local_build.h"
+
+#include "branchbuffer.h"               // for Tree::Groom
+#include "datapack.h"
+#include "dlcell.h"
+#include "errhandling.h"                // for datalikenorm_error in Tree::CalculateDataLikes
+#include "locus.h"
+#include "mathx.h"                      // for exp()
+#include "registry.h"
+#include "runreport.h"
+#include "timemanager.h"
+#include "tree.h"
+#include "treesum.h"
+
+#ifdef DMALLOC_FUNC_CHECK
+#include "/usr/local/include/dmalloc.h"
+#endif
+
+//#define TEST // erynes test
+
+using namespace std;
+
+//------------------------------------------------------------------------------------
+
+Tree::Tree()
+    : m_overallDL(FLAGDOUBLE),
+      m_randomSource(&(registry.GetRandom())),
+      m_totalSites(0),
+      m_timeManager(NULL)
+{
+    // Deliberately blank.
+}
+
+//------------------------------------------------------------------------------------
+
+Tree::~Tree()
+{
+    // Deliberately blank due to shared_ptr auto cleanup.
+    delete m_timeManager;
+
+} // Tree destructor
+
+//------------------------------------------------------------------------------------
+
+// MCHECK Can we do this now using Lucian's new mechanism?  NB This copy constructor does *not* copy
+// m_firstInvalid (branch equivalence problem needs to be solved).
+
+Tree::Tree(const Tree & tree, bool makestump)
+    : m_protoCells(tree.m_protoCells),
+      m_individuals(tree.m_individuals),
+      m_overallDL(tree.m_overallDL),
+      m_randomSource(tree.m_randomSource),
+      m_pLocusVec(tree.m_pLocusVec),
+      m_totalSites(tree.m_totalSites)
+{
+    const ForceSummary & fs(registry.GetForceSummary());
+    m_timeManager = fs.CreateTimeManager();
+
+    if (!makestump)
+    {
+        *m_timeManager = *(tree.m_timeManager); // Make sure the TimeManager state is the same in both trees.
+        m_timeList = tree.m_timeList;   // Deep copy the branchlist.
+
+        // Now setup the tip pointers in the individuals correctly, since the
+        // individual copy ctor only makes empty containers of tips.
+        unsigned long indiv;
+        for(indiv = 0; indiv < m_individuals.size(); ++indiv)
+        {
+            StringVec1d tipnames = tree.m_individuals[indiv].GetAllTipNames();
+            vector<Branch_ptr> itips = GetTips(tipnames);
+            m_individuals[indiv].SetTips(itips);
+        }
+    }
+
+}  // Tree copy ctor
+
+//------------------------------------------------------------------------------------
+
+void Tree::Clear()
+{
+    m_timeList.Clear();
+}
+
+//------------------------------------------------------------------------------------
+
+const vector<LocusCell> & Tree::CollectCells()
+{
+    if (m_protoCells.empty())
+    {
+        unsigned long i;
+        for (i = 0; i < m_pLocusVec->size(); ++i)
+        {
+            m_protoCells.push_back((*m_pLocusVec)[i].GetProtoCell());
+        }
+    }
+    return m_protoCells;
+
+} // CollectCells
+
+//------------------------------------------------------------------------------------
+
+void Tree::CopyTips(const Tree * tree)
+{
+    m_timeList.CopyTips(tree->m_timeList);
+    m_firstInvalid = m_timeList.BeginBranch();
+    m_totalSites = tree->m_totalSites;
+
+    m_overallDL = FLAGDOUBLE;           // It won't be valid anymore.
+    m_individuals = tree->m_individuals;
+
+    // Now setup the tip pointers in the individuals correctly.
+    unsigned long indiv;
+    for(indiv = 0; indiv < m_individuals.size(); ++indiv)
+    {
+        StringVec1d tipnames = tree->m_individuals[indiv].GetAllTipNames();
+        vector<Branch_ptr> itips = GetTips(tipnames);
+        m_individuals[indiv].SetTips(itips);
+    }
+
+    vector<Locus>::const_iterator locus;
+    m_aliases.clear();
+
+    for(locus = m_pLocusVec->begin(); locus != m_pLocusVec->end(); ++locus)
+        m_aliases.push_back(locus->GetDLCalc()->RecalculateAliases(*this, *locus));
+}
+
+//------------------------------------------------------------------------------------
+
+void Tree::CopyBody(const Tree * tree)
+{
+    m_timeList.CopyBody(tree->m_timeList);
+    m_overallDL = tree->m_overallDL;
+}
+
+//------------------------------------------------------------------------------------
+
+void Tree::CopyStick(const Tree * tree)
+{
+    m_timeManager->CopyStick(*(tree->m_timeManager));
+}
+
+//------------------------------------------------------------------------------------
+
+void Tree::CopyPartialBody(const Tree * tree)
+{
+    // It's wrong to call CopyPartialBody if you don't have a starting point.
+    assert((*m_firstInvalid) != Branch::NONBRANCH);
+
+    // We can't do a partial copy if the tree was changed too high up.
+    if ((*m_firstInvalid)->BranchGroup() == bgroupTip || (*tree->m_firstInvalid)->BranchGroup() == bgroupTip)
+    {
+        m_timeList.CopyBody(tree->m_timeList);
+    }
+    else
+    {
+        m_timeList.CopyPartialBody(tree->m_timeList, tree->m_firstInvalid, m_firstInvalid);
+    }
+
+    m_overallDL = tree->m_overallDL;
+}
+
+//------------------------------------------------------------------------------------
+
+TBranch_ptr Tree::CreateTip(const TipData & tipdata, const vector<LocusCell> & cells,
+                            const vector<LocusCell> & movingcells, const rangeset & diseasesites)
+{
+    // used in sample only case
+    TBranch_ptr pTip = m_timeList.CreateTip(tipdata, cells, movingcells, m_totalSites, diseasesites);
+    return pTip;
+}
+
+//------------------------------------------------------------------------------------
+
+TBranch_ptr Tree::CreateTip(const TipData & tipdata, const vector<LocusCell> & cells,
+                            const vector<LocusCell> & movingcells, const rangeset & diseasesites,
+                            const vector<Locus> & loci)
+{
+    // used in panel case
+    TBranch_ptr pTip = m_timeList.CreateTip(tipdata, cells, movingcells, m_totalSites, diseasesites, loci);
+    return pTip;
+}
+//------------------------------------------------------------------------------------
+
+void Tree::SetTreeTimeManager(TimeManager * tm)
+{
+    m_timeManager = tm;
+}
+
+//------------------------------------------------------------------------------------
+
+void Tree::SetIndividualsWithTips(const vector<Individual> & indvec)
+{
+    m_individuals = indvec;
+} // SetIndividualsWithTips
+
+//------------------------------------------------------------------------------------
+// RSGFIXUP:  Tree::m_totalSites is computed here.  Seems to be same value as Range::s_numRegionSites.
+// Either merge the two variables or guaranteed they track each other (or test with ASSERT that they do).
+
+void Tree::SetLocusVec(vector<Locus> * loc)
+{
+    m_pLocusVec = loc;
+    m_totalSites = 0;
+    vector<Locus>::const_iterator locit;
+    for(locit = loc->begin(); locit != loc->end(); ++locit)
+    {
+        if (locit->GetSiteSpan().second > m_totalSites)
+            m_totalSites = locit->GetSiteSpan().second;
+    }
+
+} // SetLocusVec
+
+//------------------------------------------------------------------------------------
+
+TreeSummary * Tree::SummarizeTree() const
+{
+    TreeSummary * treesum = registry.GetProtoTreeSummary().Clone();
+    treesum->Summarize(*this);
+    return treesum;
+
+} // SummarizeTree
+
+//------------------------------------------------------------------------------------
+
+double Tree::CalculateDataLikesForFixedLoci()
+{
+    unsigned long loc;
+#ifndef NDEBUG
+    vector<double> likes;
+#endif
+    double likelihood, overalllike(0);
+
+    for (loc = 0; loc < m_pLocusVec->size(); ++loc)
+    {
+        const Locus & locus = (*m_pLocusVec)[loc];
+        try
+        {                               // Check for need to switch to normalization.
+            likelihood = locus.CalculateDataLikelihood(*this, false);
+        }
+        catch (datalikenorm_error & ex) // Normalization is set by thrower.
+        {
+            m_timeList.SetAllUpdateDLs(); // All subsequent loci will recalculate the entire tree.
+            RunReport & runreport = registry.GetRunReport();
+            runreport.ReportChat("\n", 0);
+            runreport.ReportChat("Subtree of likelihood 0.0 found:  Turning on normalization and re-calculating.");
+
+            likelihood = locus.CalculateDataLikelihood(*this, false);
+        }
+#ifndef NDEBUG
+        likes.push_back(likelihood);
+#endif
+        //cerr << "short loc: " << loc << " likelihood: " << likelihood << endl;
+        overalllike += likelihood;      // Add to accumulator.
+
+#if LIKETRACK
+        ofstream of;
+        of.open("likes1", ios::app);
+        of << GetDLValue() << " ";
+        of.close();
+#endif
+    }
+
+#ifndef NDEBUG
+    // This code tests a full likelihood recalculation against the time-saving partial recalculation.  Slow but sure.
+    double checkDL = 0.0;
+    m_timeList.SetAllUpdateDLs();
+    for (loc = 0; loc < m_pLocusVec->size(); ++loc)
+    {
+        const Locus & locus = (*m_pLocusVec)[loc];
+        double likelihood = locus.CalculateDataLikelihood(*this, false);
+        //cerr << "long loc: " << loc << " likelihood: " << likelihood << endl << endl ;
+        checkDL += likelihood;
+#ifndef LAMARC_QA_SINGLE_DENOVOS
+        // likelihood can be zero in single denovos test
+        assert(fabs(likelihood - likes[loc]) / likelihood < EPSILON);
+#endif // LAMARC_QA_SINGLE_DENOVOS
+    }
+#ifndef LAMARC_QA_SINGLE_DENOVOS
+    // overalllike can be zero in single denovos test
+    assert(fabs(overalllike - checkDL) / overalllike < EPSILON);
+#endif // LAMARC_QA_SINGLE_DENOVOS
+#endif // NDEBUG
+
+    return overalllike;
+
+} // CalculateDataLikesForFixedLoci
+
+//------------------------------------------------------------------------------------
+
+void Tree::SetupAliases(const vector<Locus> & loci)
+{
+    // Set up the aliases; this must be done after filling the tips with data!
+    m_aliases.clear();
+    vector<Locus>::const_iterator locus;
+
+    for (locus = loci.begin(); locus != loci.end(); ++locus)
+    {
+        m_aliases.push_back(locus->GetDLCalc()->RecalculateAliases(*this, *locus));
+    }
+
+} // SetupAliases
+
+//------------------------------------------------------------------------------------
+
+vector<Branch_ptr> Tree::ActivateTips(Tree * othertree)
+{
+    vector<Branch_ptr> tips;
+    m_firstInvalid = m_timeList.BeginBranch();
+    othertree->SetFirstInvalid();
+
+    m_timeList.ClearBody();
+
+    Branchiter brit;
+    for (brit = m_timeList.FirstTip(); brit != m_timeList.EndBranch(); brit = m_timeList.NextTip(brit))
+    {
+        tips.push_back(*brit);
+    }
+
+    return tips;
+}
+
+//------------------------------------------------------------------------------------
+
+Branch_ptr Tree::ActivateBranch(Tree * othertree)
+{
+    // NCuttable - 1 because the root branch is of a cuttable type but is not, in fact, a cuttable branch.
+    // + 1 because it's helpful to count cuttable branches from 1, not 0.
+    long rn = m_randomSource->Long(m_timeList.GetNCuttable() - 1) + 1;
+
+    Branchiter brit;
+    Branch_ptr pActive = Branch::NONBRANCH;
+    assert(m_timeList.IsValidTimeList());
+
+    for (brit = m_timeList.BeginBranch(); rn > 0; ++brit)
+    {
+        m_firstInvalid = brit;
+        pActive = *brit;
+        rn -= pActive->Cuttable();
+    }
+
+    Branchconstiter constfirstinvalid = m_firstInvalid;
+    othertree->SetFirstInvalidFrom(constfirstinvalid);
+
+    assert(pActive != Branch::NONBRANCH); // We didn't find any cuttable branches?
+
+    Break(pActive);
+    pActive->SetParent(0, Branch::NONBRANCH);
+    pActive->SetParent(1, Branch::NONBRANCH);
+    return pActive;
+}
+
+//------------------------------------------------------------------------------------
+
+void Tree::SetFirstInvalidFrom(Branchconstiter & target)
+{
+    m_firstInvalid = m_timeList.FindEquivBranch(*target);
+} // SetFirstInvalidFrom
+
+//------------------------------------------------------------------------------------
+
+Branch_ptr Tree::ActivateRoot(FC_Status & fcstatus)
+{
+    Branch_ptr pBase   = m_timeList.Base(); // Get the tree base and root.
+    Branch_ptr pRoot   = pBase->Child(0);
+    pBase->SetChild(0, Branch::NONBRANCH); // Disconnect them.
+    pRoot->SetParent(0, Branch::NONBRANCH);
+
+    // Update the fcstatus with root info.
+#if FINAL_COALESCENCE_ON
+    if (pRoot->Child(1))                // We have two children.
+    {
+        rangeset sites(Intersection(pRoot->Child(0)->GetLiveSites(), pRoot->Child(1)->GetLiveSites()));
+        fcstatus.Decrement_FC_Counts(sites);
+    }
+    pRoot->UpdateRootBranchRange(fcstatus.Coalesced_Sites(), true); // Update the root's range object.
+#else
+    rangeset emptyset;
+    pRoot->UpdateRootBranchRange(emptyset, false);
+#endif
+    return pRoot;
+}
+
+//------------------------------------------------------------------------------------
+
+Branch_ptr Tree::ChoosePreferentiallyTowardsRoot(Tree * othertree)
+{
+    // Choose a random body branch, triangle weighted towards the root.
+    long nbranches = m_timeList.GetNBodyBranches();
+    long weightedbranches = (nbranches * (nbranches + 1)) / 2;
+    // weightedbranches = 0; //JRM remove
+    long chosenweight = m_randomSource->Long(weightedbranches) + 1;
+    long chosenbranch(0L), weight(0L);
+    while(weight < chosenweight)
+    {
+        ++chosenbranch;
+        weight += chosenbranch;
+    }
+
+    long thisbranch(1L);
+    for(m_firstInvalid = m_timeList.FirstBody();
+        m_firstInvalid != m_timeList.EndBranch();
+        m_firstInvalid = m_timeList.NextBody(m_firstInvalid), ++thisbranch)
+    {
+        if (thisbranch == chosenbranch) break;
+    }
+    assert(m_firstInvalid != m_timeList.EndBranch() && thisbranch == chosenbranch);
+
+    // m_firstInvalid may be set incorrectly when there are multiple branches with the same eventtime.
+    m_firstInvalid = m_timeList.ResolveTiedTimes(m_firstInvalid);
+
+    Branchconstiter constfirstinvalid = m_firstInvalid;
+    othertree->SetFirstInvalidFrom(constfirstinvalid);
+
+    return *m_firstInvalid;
+}
+
+//------------------------------------------------------------------------------------
+
+Branch_ptr Tree::ChooseFirstBranchInEpoch(double targettime, Tree * othertree)
+{
+    for(m_firstInvalid = m_timeList.FirstBody();
+        m_firstInvalid != m_timeList.EndBranch();
+        m_firstInvalid = m_timeList.NextBody(m_firstInvalid))
+    {
+        if ((*m_firstInvalid)->GetTime() > targettime) break;
+    }
+
+    assert(m_firstInvalid != m_timeList.EndBranch());
+
+    // m_firstInvalid may be set incorrectly when there are multiple branches with the same eventtime.
+    m_firstInvalid = m_timeList.ResolveTiedTimes(m_firstInvalid);
+
+    Branchconstiter constfirstinvalid = m_firstInvalid;
+    othertree->SetFirstInvalidFrom(constfirstinvalid);
+    return *m_firstInvalid;
+}
+
+//------------------------------------------------------------------------------------
+
+void Tree::AttachBase(Branch_ptr newroot)
+{
+    Branch_ptr pBase     = m_timeList.Base();  // Get the tree base.
+    pBase->SetChild(0, newroot);               // Link the root to the base.
+    newroot->SetParent(0, pBase);
+}
+
+//------------------------------------------------------------------------------------
+
+vector<Branch_ptr> Tree::FindAllBranchesAtTime(double eventT)
+{
+    Branch_ptr pBranch;
+    vector<Branch_ptr> inactives;
+
+    Branchiter brit;
+    for (brit = m_timeList.BeginBranch(); brit != m_timeList.EndBranch(); ++brit)
+    {
+        pBranch = *brit;
+        if (eventT < pBranch->m_eventTime)
+        {
+            break;
+        }
+
+        if (pBranch->Parent(0))
+        {
+            if (eventT < pBranch->Parent(0)->m_eventTime)
+            {
+                inactives.push_back(pBranch);
+            }
+        }
+        else assert(!pBranch->Parent(1));
+    }
+
+    return inactives;
+}
+
+//------------------------------------------------------------------------------------
+
+vector<Branch_ptr> Tree::FirstInterval(double eventT)
+{
+    return FindAllBranchesAtTime(eventT);
+}
+
+//------------------------------------------------------------------------------------
+
+vector<Branch_ptr> Tree::FindBranchesImmediatelyTipwardOf(Branchiter startbr)
+{
+    assert((*startbr)->m_eventTime != 0.0);
+    Branchiter prevbr(startbr);
+    --prevbr;                           // Assumes we have reversible iterator.
+    while((*prevbr)->m_eventTime == (*startbr)->m_eventTime)
+    {
+        assert(prevbr != m_timeList.BeginBranch()); // Too far!
+        --prevbr;
+    }
+    return FindAllBranchesAtTime((*prevbr)->m_eventTime);
+}
+
+//------------------------------------------------------------------------------------
+
+vector<Branch_ptr> Tree::FindBranchesBetween(double startint, double endint)
+{
+    vector<Branch_ptr> branches;
+
+    Branchiter brit;
+    for (brit = m_timeList.BeginBranch(); brit != m_timeList.EndBranch(); ++brit)
+    {
+        Branch_ptr pBranch = *brit;
+        if (startint <= pBranch->m_eventTime && pBranch->m_eventTime < endint)
+        {
+            branches.push_back(pBranch);
+        }
+    }
+
+    return branches;
+}
+
+//------------------------------------------------------------------------------------
+
+void Tree::NextInterval(Branch_ptr branch)
+{
+    // Deliberately blank; needed only in rectree.
+}
+
+//------------------------------------------------------------------------------------
+
+void Tree::Prune()
+{
+    m_timeList.Prune();
+    // This is done last so that the information it overwrites can be used during
+    // pruning and for validation--do not move it into the main loop!
+    Branchiter brit;
+    for (brit = m_timeList.FirstBody(); brit != m_timeList.EndBranch(); brit = m_timeList.NextBody(brit))
+    {
+        (*brit)->ResetBuffersForNextRearrangement();
+    }
+    TrimStickToRoot();
+}
+
+//------------------------------------------------------------------------------------
+
+void Tree::TrimStickToRoot()
+{
+    m_timeManager->ChopOffStickAt(m_timeList.RootTime());
+} // TrimStickToRoot
+
+//------------------------------------------------------------------------------------
+
+Branch_ptr Tree::Coalesce(Branch_ptr child1, Branch_ptr child2, double tevent, const rangeset & fcsites)
+{
+    assert(child1->m_partitions == child2->m_partitions);
+
+    bool newbranchisinactive(false);
+    Branch_ptr parent = Branch_ptr(new CBranch(child1->GetRangePtr(), child2->GetRangePtr(), newbranchisinactive, fcsites));
+    parent->m_eventTime = tevent;
+    parent->CopyPartitionsFrom(child1);
+
+    parent->SetChild(0, child1);
+    parent->SetChild(1, child2);
+    child1->SetParent(0, parent);
+    child2->SetParent(0, parent);
+
+    parent->SetDLCells(CollectCells());
+    parent->SetUpdateDL();              // Mark this branch for updating.
+
+    m_timeList.Collate(parent);
+    assert(child1->m_partitions == parent->m_partitions);
+
+    return parent;
+
+} // Tree::Coalesce
+
+//------------------------------------------------------------------------------------
+
+Branch_ptr Tree::CoalesceActive(double eventT, Branch_ptr pActive1, Branch_ptr pActive2, const rangeset & fcsites)
+{
+    // Create the parent of the two branches.
+    Branch_ptr pParent = Coalesce(pActive1, pActive2, eventT, fcsites);
+    return pParent;
+}
+
+//------------------------------------------------------------------------------------
+
+Branch_ptr Tree::CoalesceInactive(double eventT, Branch_ptr pActive, Branch_ptr pInactive, const rangeset & fcsites)
+{
+    // Create the parent of the two branches.
+    bool newbranchisinactive(true);
+    Branch_ptr pParent = Branch_ptr(new CBranch(pInactive->GetRangePtr(), pActive->GetRangePtr(), newbranchisinactive, fcsites));
+    pParent->m_eventTime  = eventT;
+    pParent->CopyPartitionsFrom(pActive);
+
+    // Hook children to parent.
+    pParent->SetChild(0, pInactive);
+    pParent->SetChild(1, pActive);
+
+    // Hook pInactive's old parent up to new parent.
+    pInactive->Parent(0)->ReplaceChild(pInactive, pParent);
+    pInactive->SetParent(0, pParent);
+    if (pInactive->Parent(1))
+    {
+        pInactive->Parent(1)->ReplaceChild(pInactive, pParent);
+        pInactive->SetParent(1, Branch::NONBRANCH);
+    }
+
+    pActive->SetParent(0, pParent);
+
+    pParent->SetDLCells(CollectCells());
+    m_timeList.SetUpdateDLs(pParent);   // Set the update data likelihood flag.
+
+    m_timeList.Collate(pParent);
+    return pParent;
+}
+
+//------------------------------------------------------------------------------------
+
+Branch_ptr Tree::Migrate(double eventT, long pop, long maxEvents, Branch_ptr pActive)
+{
+    if (m_timeList.HowMany(btypeMig) >= maxEvents)
+    {
+        mig_overrun e;
+        throw e;
+    }
+
+    force_type force;
+    if (registry.GetForceSummary().CheckForce(force_MIG))
+    {
+        force = force_MIG;
+    }
+    else
+    {
+        force = force_DIVMIG;
+    }
+
+    Branch_ptr pParent;
+    if (force == force_MIG)
+    {
+        pParent = Branch_ptr(new MBranch(pActive->GetRangePtr()));
+    }
+    else
+    {
+        pParent = Branch_ptr(new DivMigBranch(pActive->GetRangePtr()));
+    }
+
+    pParent->m_eventTime  = eventT;
+    pParent->CopyPartitionsFrom(pActive);
+    pParent->SetPartition(force, pop);
+
+    pParent->SetChild(0, pActive);
+    pActive->SetParent(0, pParent);
+
+    m_timeList.Collate(pParent);
+    return pParent;
+}
+
+//------------------------------------------------------------------------------------
+
+Branch_ptr Tree::DiseaseMutate(double eventT, long endstatus, long maxevents, Branch_ptr pActive)
+{
+    if (m_timeList.HowMany(btypeDisease) >= maxevents)
+    {
+        dis_overrun e;
+        throw e;
+    }
+
+    Branch_ptr pParent = Branch_ptr(new DBranch(pActive->GetRangePtr()));
+    pParent->m_eventTime  = eventT;
+    pParent->CopyPartitionsFrom(pActive);
+    pParent->SetPartition(force_DISEASE, endstatus);
+
+    pParent->SetChild(0, pActive);
+    pActive->SetParent(0, pParent);
+
+    m_timeList.Collate(pParent);
+
+    return pParent;
+
+} // DiseaseMutate
+
+//------------------------------------------------------------------------------------
+
+Branch_ptr Tree::TransitionEpoch(double eventT, long newpop, long maxevents, Branch_ptr pActive)
+{
+    if (m_timeList.HowMany(btypeEpoch) >= maxevents)
+    {
+        epoch_overrun e;
+        throw e;
+    }
+
+    Branch_ptr pParent = Branch_ptr(new EBranch(pActive->GetRangePtr()));
+    pParent->m_eventTime = eventT;
+    pParent->CopyPartitionsFrom(pActive);
+    pParent->SetPartition(force_DIVMIG, newpop);
+
+    pParent->SetChild(0, pActive);
+    pActive->SetParent(0, pParent);
+
+    m_timeList.Collate(pParent);
+    return pParent;
+
+} // TransitionEpoch
+
+//------------------------------------------------------------------------------------
+
+vector<Branch_ptr> Tree::FindBranchesStartingOnOpenInterval(
+    double starttime, double endtime)
+{
+    vector<Branch_ptr> branches;
+    Branchiter br;
+    for(br = m_timeList.BeginBranch(); br != m_timeList.EndBranch(); ++br)
+    {
+        if ((*br)->m_eventTime <= starttime) continue;
+        if ((*br)->m_eventTime >= endtime) break;
+        branches.push_back(*br);
+    }
+
+    return branches;
+
+} // FindBranchesStartingOnOpenInterval
+
+//------------------------------------------------------------------------------------
+
+vector<Branch_ptr> Tree::FindEpochBranchesAt(double time)
+{
+    vector<Branch_ptr> branches;
+    Branchiter br;
+    for(br = m_timeList.BeginBranch(); br != m_timeList.EndBranch(); ++br)
+    {
+        if ((*br)->m_eventTime == time && (*br)->Event() == btypeEpoch)
+            branches.push_back(*br);
+    }
+
+    return branches;
+
+} // FindEpochBranchesAt
+
+//------------------------------------------------------------------------------------
+
+vector<Branch_ptr> Tree::FindBranchesStartingRootwardOf(double time)
+{
+    vector<Branch_ptr> branches;
+    Branchiter br;
+    for(br = m_timeList.BeginBranch(); br != m_timeList.EndBranch(); ++br)
+    {
+        if ((*br)->m_eventTime > time)
+            branches.push_back(*br);
+    }
+
+    return branches;
+
+} // FindBranchesStartingRootwardOf
+
+//------------------------------------------------------------------------------------
+
+void Tree::SwapSiteDLs()
+{
+    Individual ind;
+
+    // Pick an individual with phase-unknown sites at random.
+    do {
+        ind  = m_individuals[m_randomSource->Long(m_individuals.size())];
+    } while (!ind.AnyPhaseUnknownSites());
+
+    // Pick a phase marker at random.
+    pair<long, long> locusmarker = ind.PickRandomPhaseMarker(*m_randomSource);
+    long locus = locusmarker.first;
+    long posn = locusmarker.second;
+
+    // Pick two tips in the individual at random.
+    vector<Branch_ptr> haps = ind.GetAllTips();
+    long size = haps.size();
+    long rnd1 = m_randomSource->Long(size);
+    long rnd2 = m_randomSource->Long(size - 1);
+    if (rnd1 <= rnd2)
+        rnd2++;
+
+    Branch_ptr pTip1   = haps[rnd1];
+    Branch_ptr pTip2   = haps[rnd2];
+
+    // NB: Swap first DLCell only!
+
+    assert(pTip1->GetNcells(0) == pTip2->GetNcells(0));
+
+    // Get the data likelihood arrays.
+    Cell_ptr dlcell1 = pTip1->GetDLCell(locus, markerCell, false);
+    Cell_ptr dlcell2 = pTip2->GetDLCell(locus, markerCell, false);
+
+    // Swap the cells at the marker position.
+    dlcell1->SwapDLs(dlcell2, posn);
+
+    // Take care of needed data-likelihood corrections.
+    const Locus & loc = (*m_pLocusVec)[locus];
+    m_aliases[locus] = loc.GetDLCalc()->RecalculateAliases(*this, loc);
+
+    MarkForDLRecalc(pTip1);
+    MarkForDLRecalc(pTip2);
+
+} // Tree::SwapSiteDLs
+
+//------------------------------------------------------------------------------------
+// PickNewSiteDLs is used when haplotype arranging for non-50/50 haplotype probabilities.
+// Otherwise, SwapSiteDLs is used.
+
+void Tree::PickNewSiteDLs()
+{
+    long ind;
+
+    // Pick an individual with phase-unknown sites at random.
+    do {
+        ind  = m_randomSource->Long(m_individuals.size());
+    } while (!m_individuals[ind].MultipleTraitHaplotypes());
+
+    // Pick a locus/marker at random.
+    pair<string, long> locusmarker = m_individuals[ind].PickRandomHaplotypeMarker();
+    string lname = locusmarker.first;
+    long marker  = locusmarker.second;
+
+    // Have the individual in question pick up a new set of haplotypes.
+    m_individuals[ind].ChooseNewHaplotypesFor(lname, marker);
+
+    // Finish this routine in a subroutine, because chances are good that the locus in question
+    // is a member of m_pMovingLocusVec, which we only have if we're a RecTree.
+
+    ReassignDLsFor(lname, marker, ind);
+    return;
+
+} // PickNewSiteDLs
+
+//------------------------------------------------------------------------------------
+
+void Tree::ReassignDLsFor(string lname, long marker, long ind)
+{
+    // Find out which locus we're dealing with.  We have its name.  If the locus in question
+    // is in m_pMovingLocusVec, we've already dealt with it in RecTree::ReassignDLsFor.
+    long locus = FLAGLONG;
+    for (unsigned long lnum = 0; lnum < m_pLocusVec->size(); lnum++)
+    {
+        if ((*m_pLocusVec)[lnum].GetName() == lname)
+        {
+            locus = lnum;
+        }
+    }
+    if (locus == FLAGLONG)
+    {
+        throw implementation_error("We have the segment name " + lname
+                                   + ", but no segment matches it.  Did something happen"
+                                   " to this tree?  To the segment in question?");
+    }
+
+    vector<Branch_ptr> haps = m_individuals[ind].GetAllTips();
+    vector<LocusCell> cells = m_individuals[ind].GetLocusCellsFor(lname, marker);
+
+    for (unsigned long tip = 0; tip < haps.size(); tip++)
+    {
+        Cell_ptr origcell = haps[tip]->GetDLCell(locus, markerCell, false);
+        Cell_ptr newcell  = cells[tip][0];
+        origcell->SetSiteDLs(marker, newcell->GetSiteDLs(marker));
+        MarkForDLRecalc(haps[tip]);
+    }
+
+    // take care of needed data-likelihood corrections
+    const Locus & loc = (*m_pLocusVec)[locus];
+    m_aliases[locus] = loc.GetDLCalc()->RecalculateAliases(*this, loc);
+
+} // Tree::ReassignDLsFor
+
+//------------------------------------------------------------------------------------
+
+void Tree::MarkForDLRecalc(Branch_ptr markbr)
+{
+    markbr->SetUpdateDL();
+    markbr->MarkParentsForDLCalc();
+
+} // Tree::MarkForDLRecalc
+
+//------------------------------------------------------------------------------------
+// This version is used to limit a multi-locus case to just the locus under consideration.
+
+rangevector Tree::GetLocusSubtrees(rangepair span) const
+{
+    rangevector subtrees;
+    subtrees.push_back(span);
+    return subtrees;
+
+} // GetLocusSubtrees
+
+//------------------------------------------------------------------------------------
+
+vector<Branch_ptr> Tree::GetTips(StringVec1d & tipnames) const
+{
+    vector<Branch_ptr> tips;
+    StringVec1d::iterator tname;
+    for(tname = tipnames.begin(); tname != tipnames.end(); ++tname)
+        tips.push_back(GetTip(*tname));
+
+    return tips;
+
+} // GetTips
+
+//------------------------------------------------------------------------------------
+
+Branch_ptr Tree::GetTip(const string & name) const
+{
+    return m_timeList.GetTip(name);
+} // GetTip
+
+//------------------------------------------------------------------------------------
+
+bool Tree::NoPhaseUnknownSites() const
+{
+    IndVec::const_iterator ind;
+    unsigned long locus;
+    for (locus = 0; locus < m_pLocusVec->size(); ++locus)
+    {
+        for(ind = m_individuals.begin(); ind != m_individuals.end(); ++ind)
+            if (ind->AnyPhaseUnknownSites()) return false;
+    }
+
+    return true;
+
+} // Tree:NoPhaseUnknownSites
+
+//------------------------------------------------------------------------------------
+
+long Tree::GetNsites() const
+{
+    long nsites(0L);
+    vector<Locus>::iterator locus;
+    for(locus = m_pLocusVec->begin() ; locus != m_pLocusVec->end(); ++locus)
+    {
+        nsites += locus->GetNsites();
+    }
+
+    return nsites;
+
+} // Tree::GetNsites
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+
+bool Tree::IsValidTree() const
+{
+    // NB:  This can only be called on a completed tree, not one in mid-rearrangement.
+    // Each Branch validates itself.
+    Branchconstiter brit = m_timeList.BeginBranch();
+    long ncuttable = 0;
+
+    for ( ; brit != m_timeList.EndBranch(); ++brit)
+    {
+        //LS TEST
+        //(*brit)->PrintInfo();
+        if (!(*brit)->CheckInvariant())
+        {
+            return false;
+        }
+        ncuttable += (*brit)->Cuttable();
+    }
+
+    if (ncuttable != m_timeList.GetNCuttable())
+    {
+        return false;
+    }
+
+    return m_timeList.IsValidTimeList();
+
+    //  if (FindContradictParts()) return false;
+    //  return true;
+
+} // IsValidTree
+
+//------------------------------------------------------------------------------------
+
+bool Tree::ConsistentWithParameters(const ForceParameters& fp) const
+{
+    Branchconstiter brit;
+    vector<double> epochtimes = fp.GetEpochTimes();
+
+    for (brit=m_timeList.FirstBody(); brit != m_timeList.EndBranch();
+         brit = m_timeList.NextBody(brit))
+    {
+        if ((*brit)->Event() == btypeEpoch)
+        {
+            if (find(epochtimes.begin(), epochtimes.end(), (*brit)->m_eventTime) == epochtimes.end())
+            {
+                return false; // tree inconsistent with parameters
+            }
+        }
+    }
+
+    return true;
+}
+
+//------------------------------------------------------------------------------------
+
+bool Tree::operator==(const Tree & src) const
+{
+    if (m_pLocusVec->size() != src.m_pLocusVec->size()) return false;
+
+    unsigned long i;
+
+    for (i = 0; i < m_pLocusVec->size(); ++i)
+    {
+        if ((*m_pLocusVec)[i].GetNsites() != (*src.m_pLocusVec)[i].GetNsites()) return false;
+    }
+
+    if (m_overallDL != src.m_overallDL) return false;
+    if (m_timeList != src.m_timeList) return false;
+
+    // WARNING I don't guarantee these are the same tree, especially if
+    // we are in mid-rearrangement, but they are at least very similar. --Mary
+    return true;
+
+} // operator==
+
+//------------------------------------------------------------------------------------
+
+bool Tree::SimulateDataIfNeeded()
+{
+    bool simulated = false;
+    for (unsigned long loc = 0; loc < m_pLocusVec->size(); loc++)
+    {
+        Locus & locus = (*m_pLocusVec)[loc];
+        if (locus.GetShouldSimulate())
+        {
+            simulated = true;
+            locus.SimulateData(*this, m_totalSites);
+        }
+    }
+
+    // Redo the aliases.
+    SetupAliases(*m_pLocusVec);
+
+    // Note that we have to calculate all DLCells.
+    m_timeList.SetAllUpdateDLs();
+
+    return simulated;
+}
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+
+void Tree::DLCheck(const Tree & other) const
+{
+    cerr << m_timeList.DLCheck(other.GetTimeList()) << endl;
+} // DLCheck
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+
+void Tree::PrintStickThetasToFile(ofstream & of) const
+{
+    m_timeManager->PrintStickThetasToFile(of);
+} // PrintStickThetasToFile
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+
+void Tree::PrintStickFreqsToFile(ofstream & of) const
+{
+    m_timeManager->PrintStickFreqsToFile(of);
+} // PrintStickFreqsToFile
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+
+void Tree::PrintStickFreqsToFileAtTime(ofstream & of, double time) const
+{
+    m_timeManager->PrintStickFreqsToFileAtTime(of, time);
+} // PrintStickFreqsToFileAtTime
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+
+void Tree::PrintStickThetasToFileForJoint300(ofstream & of) const
+{
+    m_timeManager->PrintStickThetasToFileForJoint300(of);
+} // PrintStickThetasToFileForJoint300
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+
+void Tree::PrintStickToFile(ofstream & of) const
+{
+    m_timeManager->PrintStickToFile(of);
+} // PrintStickAndSummaryToFile
+
+//------------------------------------------------------------------------------------
+
+void Tree::PrintDirectionalMutationEventCountsToFile(ofstream & of) const
+{
+    m_timeList.PrintDirectionalMutationEventCountsToFile(of);
+} // PrintDirectionalMutationEventCountsToFile
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+
+void Tree::PrintTimeTilFirstEventToFile(ofstream & of) const
+{
+    m_timeList.PrintTimeTilFirstEventToFile(of);
+} // PrintTimeTilFirstEventToFile
+
+//------------------------------------------------------------------------------------
+// Debugging function.
+
+void Tree::PrintTraitPhenotypeAtLastCoalescence(ofstream & of) const
+{
+    m_timeList.PrintTraitPhenotypeAtLastCoalescence(of);
+} // PrintTraitPhenotypeAtLastCoalescence
+
+//------------------------------------------------------------------------------------
+
+void Tree::DestroyStick()
+{
+    m_timeManager->ClearStick();
+} // DestroyStick
+
+//------------------------------------------------------------------------------------
+
+void Tree::SetStickParams(const ForceParameters & fp)
+{
+    m_timeManager->SetStickParameters(fp);
+} // SetStickParams
+
+//------------------------------------------------------------------------------------
+
+bool Tree::UsingStick() const
+{
+    return m_timeManager->UsingStick();
+} // UsingStick
+
+//------------------------------------------------------------------------------------
+
+void Tree::ScoreStick(TreeSummary & treesum) const
+{
+    m_timeManager->ScoreStick(treesum);
+} // ScoreStick
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d Tree::XpartThetasAtT(double time, const ForceParameters & fp) const
+{
+    return m_timeManager->XpartThetasAtT(time, fp);
+} // XpartThetasAtT
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d Tree::PartitionThetasAtT(double time, const force_type force, const ForceParameters & fp) const
+{
+    return m_timeManager->PartitionThetasAtT(time, force, fp);
+} // XpartThetasAtT
+
+//------------------------------------------------------------------------------------
+
+void Tree::SetNewTimesFrom(Branchiter startpoint, const DoubleVec1d & newtimes)
+{
+    assert(IsValidTree());
+    assert(!newtimes.empty());
+
+    bool updatefirstinvalid(startpoint == m_firstInvalid);
+
+    // First retime and remove all the branches from startpoint on...
+    vector<Branch_ptr> newbranches;
+    Branchiter branch(startpoint);
+    DoubleVec1d::const_iterator newtime(newtimes.begin());
+
+    for( ; newtime != newtimes.end(); ++newtime)
+    {
+        assert(branch != m_timeList.EndBranch());
+        // No special handling for recombinations needed; assumed handled in calling code that sets up "newtimes".
+        (*branch)->m_eventTime = *newtime;
+        Branch_ptr newbr(*branch);
+        branch = m_timeList.NextBody(branch);
+        m_timeList.Remove(newbr);
+        newbranches.push_back(newbr);
+    }
+
+    // Now put them back in, setting datalikelihood update flags as needed.
+    vector<Branch_ptr>::iterator newbranch;
+    for(newbranch = newbranches.begin(); newbranch != newbranches.end(); ++newbranch)
+    {
+        Branchiter brit(m_timeList.Collate(*newbranch));
+        if (newbranch == newbranches.begin() && updatefirstinvalid)
+        {
+            m_firstInvalid = brit;
+        }
+        m_timeList.SetUpdateDLs(*newbranch);
+    }
+
+    assert(IsValidTree());
+
+} // SetNewTimesFrom
+
+//------------------------------------------------------------------------------------
+// Method to, if necessary, shrink interval lengths, usually at the root, in an attempt to prevent explosions in the
+// next maximization.  See the extensive comment in ChainManager::DoChain(), which is where this method is called.
+//
+// TEMPERATURE arg used only for debugging.
+
+bool Tree::GroomForGrowth(const DoubleVec1d & thetas, const DoubleVec1d & growths, double temperature)
+{
+    if (thetas.empty() || growths.empty() || thetas.size() != growths.size())
+        throw implementation_error("Tree::GroomForGrowth() received an invalid theta and/or growth vector");
+
+    bool treeWasModified = false;
+    double starttime(0.0), cumulativeShift(0.0), prevIntervalLength(DBL_BIG);
+    Branchiter brit;
+    BranchBuffer lineages(registry.GetDataPack());
+    for (brit = m_timeList.FirstTip(); brit != m_timeList.EndBranch(); brit = m_timeList.NextTip(brit))
+        lineages.UpdateBranchCounts((*brit)->m_partitions, true);
+
+    // Iterate through the timelist, looking for fatal intervals.
+    for (brit = m_timeList.FirstBody(); brit != m_timeList.EndBranch(); brit = m_timeList.NextBody(brit))
+    {
+        Branch_ptr pBranch = *brit;
+        unsigned long nchildren(0UL), worst_pop(0UL), npops = thetas.size();
+        double smallestExpectedLength(DBL_BIG); // Smallest E[dt] among the pops.
+        double expectedLength;
+        LongVec1d xpartitions = lineages.GetBranchXParts();
+        string msg;
+
+        // Once we shrink an interval, we need to shift all intervals below it by this amount.
+        // The amount accumulates.
+        if (cumulativeShift > 0.0)
+        {
+            pBranch->m_eventTime -= cumulativeShift;
+            pBranch->SetUpdateDL();
+        }
+
+        // Determine how many child branches we will need to update.
+        switch(pBranch->Event())
+        {
+            case btypeCoal:
+                nchildren = static_cast<unsigned long>(NELEM);
+                break;
+            case btypeMig:
+            case btypeDivMig:
+            case btypeDisease:
+            case btypeEpoch:
+                nchildren = 1UL;
+                break;
+            case btypeRec:
+                nchildren = 0UL;        // Not literally true; this is a "magic value".
+                break;
+            case btypeBase:
+            case btypeTip:
+            {
+                // MDEBUG We assume that we won't find a Tip; this will need to be fixed when serial sampling
+                // is added.  Unknown event type; we don't know how many children or parents this event type has.
+                assert(false);
+                string msg = " Unrecognized branch type (\""
+                    +ToString(pBranch->Event())
+                    + "\") received by Tree::Groom().";
+                throw implementation_error(msg);
+                break;
+            }
+        }
+        double intervalLength = pBranch->m_eventTime - starttime;
+        if (intervalLength <= 0.0)
+        {
+            if (0.0 == intervalLength && btypeRec == pBranch->Event() &&
+                prevIntervalLength > 0.0)
+            {
+                // This is perfectly okay--rec branches come in pairs w/equal timestamps.
+                lineages.UpdateBranchCounts(pBranch->m_partitions, true);
+                // Recombination:  The opposite of a coalescence--it has one child and two parents.
+                // Each parent branch comes through separately and gets updated.  We need to avoid
+                // updating their shared child twice, so we choose to update the child when the
+                // "left" parent comes through.  (nchildren was set to the "magic value" of 0.)
+                if (pBranch->Child(0)->Parent(0) == pBranch)
+                {
+                    lineages.UpdateBranchCounts(pBranch->Child(0)->m_partitions, false);
+                }
+                starttime = pBranch->m_eventTime;
+                prevIntervalLength = intervalLength;
+                continue;
+            }
+
+            if (0.0 == intervalLength && btypeEpoch == pBranch->Event())
+            {
+                lineages.UpdateBranchCounts(pBranch->m_partitions, true);
+                lineages.UpdateBranchCounts(pBranch->Child(0)->m_partitions, false);
+                starttime = pBranch->m_eventTime;
+                prevIntervalLength = intervalLength;
+                continue;
+            }
+
+            string msg = "Tree::GroomForGrowth(), encountered a time interval length of ";
+            msg += ToString(intervalLength) + ", followed by an event of type ";
+            msg += ToString(pBranch->Event()) + ".";
+
+            throw impossible_error(msg);
+        }
+
+        // Calculate and store E[dt] for each population.  Calculate E[dt] for coalescence + growth,
+        // regardless of which event occurs at the bottom of the interval, because coalescence
+        // + growth has the most extreme/sensitive values for lnWait() and DlnWait().
+        for (unsigned long pop = 0; pop < npops; pop++)
+        {
+            const unsigned long k = xpartitions[pop];
+            if (k > 1 && growths[pop] > 0.0)
+            {
+                expectedLength =
+                    (1.0 / growths[pop])
+                    *
+                    ExpE1(SafeProductWithExp(k * (k - 1) / (growths[pop] * thetas[pop]), growths[pop] * starttime));
+            }
+            else
+            {
+                continue;
+            }
+
+            if (expectedLength < smallestExpectedLength)
+            {
+                smallestExpectedLength = expectedLength;
+                worst_pop = pop;        // For debugging message only.
+            }
+        }
+
+        // Note:  Calling an interval fatal if it's 100 times its new expectation value is handwaving. So far,
+        // 100 seems to be OK, but this value might need to be reduced to 20 or 10 if growth is very large.
+        if (smallestExpectedLength < DBL_BIG && intervalLength >= 100.0 * smallestExpectedLength)
+        {
+            msg = "\nTemperature " + ToString(temperature) + ", population "
+                + ToString(worst_pop) + ", k = " + ToString(xpartitions[worst_pop])
+                + ", interval length is " + ToString(intervalLength)
+                + ", expected value is " + ToString(smallestExpectedLength)
+                + ".  Interval ends with a " + ToString(pBranch->Event())
+                + " event.  Time stamps are " + ToString(starttime) + " and "
+                + ToString(pBranch->m_eventTime) + "; theta = " + ToString(thetas[worst_pop])
+                + " and g = " + ToString(growths[worst_pop]) + ".  ";
+
+            // Shrink this overlong interval to a value that is between 1/2 and 3/2 times the
+            // expectation value under the new parameter values for the "worst" population.
+            double factor = 0.5 + m_randomSource->Float();
+            double newIntervalLength = factor * smallestExpectedLength;
+
+            if (newIntervalLength < starttime * numeric_limits<double>::epsilon())
+            {
+                msg += "Tried to shrink this interval to a length of "
+                    + ToString(newIntervalLength) + ", but this is smaller than the "
+                    + "minimum length of "
+                    + ToString(starttime * numeric_limits<double>::epsilon())
+                    + " that can be added to " + ToString(starttime)
+                    + " without being lost to rounding error.  Giving up and "
+                    + "copying the cold tree into the tree for temperature "
+                    + ToString(temperature) + "....\n";
+                registry.GetRunReport().ReportDebug(msg);
+                return false;
+            }
+
+            pBranch->m_eventTime = starttime + newIntervalLength;
+            pBranch->SetUpdateDL();
+            treeWasModified = true;
+
+            msg += "Shrinking this interval to a length of "
+                + ToString(newIntervalLength) + ", with a new ending time "
+                + "stamp of " + ToString(pBranch->m_eventTime) + ".\n";
+            registry.GetRunReport().ReportDebug(msg);
+
+            // Once we shrink an interval, we need to shift all intervals below it to reflect this.
+            cumulativeShift += intervalLength - newIntervalLength;
+
+        }
+
+        // Prepare for next loop iteration (next branch in the timelist).
+        for (unsigned long i = 0; i < nchildren; i++)
+            lineages.UpdateBranchCounts(pBranch->Child(i)->m_partitions, false);
+        lineages.UpdateBranchCounts(pBranch->m_partitions, true);
+
+        // Recombination:  The opposite of a coalescence--it has one child and two parents.
+        // Each parent branch comes through separately and gets updated.  We need to avoid updating
+        // their shared child twice, so we choose to update the child when the "left" parent comes
+        // through.  (nchildren was set to the "magic value" of 0.)
+        if (btypeRec == pBranch->Event() && pBranch->Child(0)->Parent(0) == pBranch)
+        {
+            lineages.UpdateBranchCounts(pBranch->Child(0)->m_partitions, false);
+        }
+
+        starttime = pBranch->m_eventTime;
+        prevIntervalLength = intervalLength; // Used to detect successive zero-lengths, if any.
+    }
+
+    if (treeWasModified)
+        CalculateDataLikes();
+
+    return true;
+}
+
+//------------------------------------------------------------------------------------
+// Method to, if necessary, shrink interval lengths, usually at the root, in an attempt to prevent
+// explosions in the next maximization. See the extensive comment in ChainManager::DoChain(),
+// which is where this method is called.
+
+bool Tree::GroomForLogisticSelection(const DoubleVec1d & thetas,
+                                     double s,           // logistic selection coefficient
+                                     double temperature) // temperature used only for debugging
+
+{
+    if (0.0 == s)
+        return true;
+    if (2 != thetas.size() || 0.0 == thetas[0] || 0.0 == thetas[1])
+    {
+        string msg = "Tree:GroomForLogisticSelection(), received invalid Theta vector.";
+        throw implementation_error(msg);
+    }
+
+    bool treeWasModified = false;
+    double starttime(0.0), cumulativeShift(0.0), shrinkageFactor(1.0);
+    double max_allowed_starttime(DBL_BIG);
+    Branchiter brit, firstBadInterval, lastBadInterval;
+    string msg;
+
+    if (s > 0.0)
+        max_allowed_starttime = (EXPMAX - log(thetas[1])) / s;
+    else
+        max_allowed_starttime = (EXPMAX - log(thetas[0])) / (-s);
+
+    firstBadInterval = lastBadInterval = m_timeList.EndBranch();
+
+    // Iterate through the timelist, looking for fatal intervals.
+    for (brit = m_timeList.FirstBody(); brit != m_timeList.EndBranch();
+         brit = m_timeList.NextBody(brit))
+    {
+        if ((*brit)->m_eventTime >= max_allowed_starttime)
+        {
+            Branchiter brit2 = brit;
+            firstBadInterval = brit;
+            while (brit2 != m_timeList.EndBranch())
+            {
+                lastBadInterval = brit2;
+                brit2 = m_timeList.NextBody(brit2);
+            }
+            break;
+        }
+        starttime = (*brit)->m_eventTime;
+    }
+
+    if (firstBadInterval == m_timeList.EndBranch())
+        return true;                    // nothing to shrink
+
+    if ((*lastBadInterval)->m_eventTime - starttime <= 0.0)
+    {
+        msg += "Tree::GroomForLogisticSelection(), encountered an interval ";
+        msg += "(or sum of intervals) of length ";
+        msg += ToString((*lastBadInterval)->m_eventTime - starttime) + ".";
+        throw implementation_error(msg);
+    }
+
+    shrinkageFactor = (max_allowed_starttime - starttime) /
+        ((*lastBadInterval)->m_eventTime - starttime);
+
+    if (shrinkageFactor <= 0.0)
+    {
+        msg += "Tree::GroomForLogisticSelection(), computed an invalid shrinkage ";
+        msg += "factor of " + ToString(shrinkageFactor) + ".";
+        throw implementation_error(msg);
+    }
+
+    // If there are any fatal intervals, iterate through these, and shrink them.
+    for (brit = firstBadInterval; brit != m_timeList.EndBranch();
+         brit = m_timeList.NextBody(brit))
+    {
+        Branch_ptr pBranch = *brit;
+
+        // Once we shrink an interval, we need to shift all intervals below it by this amount.
+        // The amount accumulates.
+        if (cumulativeShift > 0.0)
+        {
+            pBranch->m_eventTime -= cumulativeShift;
+            pBranch->SetUpdateDL();
+        }
+
+        double intervalLength = pBranch->m_eventTime - starttime;
+        pBranch->m_eventTime = starttime + intervalLength * shrinkageFactor;
+        pBranch->SetUpdateDL();
+        treeWasModified = true;
+
+        // Once we shrink an interval, we need to shift all intervals below it to reflect this.
+        cumulativeShift += intervalLength - intervalLength * shrinkageFactor;
+
+        starttime = pBranch->m_eventTime;
+    }
+
+    if (treeWasModified)
+        CalculateDataLikes();
+
+    return true;
+}
+
+//------------------------------------------------------------------------------------
+
+long Tree::CountNodesBetween(double toptime, double bottomtime) const
+{
+  long nodecount(0);
+  Branchconstiter br;
+
+  for (br = m_timeList.FirstBody(); 
+       br != m_timeList.EndBranch();
+       br = m_timeList.NextBody(br)) {
+  // walk down timelist to just past toptime
+    if ((*br)->m_eventTime <= toptime) continue;
+  // continue walking, counting nodes, to bottomtime; don't count that
+    if ((*br)->m_eventTime >= bottomtime) break;
+    ++nodecount;
+  }
+  return nodecount;
+} // CountNodesBetween
+
+//------------------------------------------------------------------------------------
+
+Tree * PlainTree::Clone() const
+{
+    return new PlainTree(*this, false);
+} // PlainTree::Clone
+
+//------------------------------------------------------------------------------------
+
+Tree * PlainTree::MakeStump() const
+{
+    return new PlainTree(*this, true);
+} // PlainTree stump maker
+
+//------------------------------------------------------------------------------------
+
+void PlainTree::CalculateDataLikes()
+{
+    m_overallDL = CalculateDataLikesForFixedLoci();
+    m_timeList.ClearUpdateDLs();        // Reset the updating flags.
+}
+
+//------------------------------------------------------------------------------------
+
+void PlainTree::Break(Branch_ptr pBranch)
+{
+    Branch_ptr pParent = pBranch->Parent(0);
+
+    if (pParent->CanRemove(pBranch))
+    {
+        Break(pParent);                 // Recursion
+        m_timeList.Remove(pParent);
+
+        pParent = pBranch->Parent(1);
+        if (pParent)
+        {
+            Break(pParent);             // Recursion
+            m_timeList.Remove(pParent);
+        }
+    }
+}
+
+//____________________________________________________________________________________
diff --git a/src/tree/tree.h b/src/tree/tree.h
new file mode 100644
index 0000000..6b4c13a
--- /dev/null
+++ b/src/tree/tree.h
@@ -0,0 +1,362 @@
+// $Id: tree.h,v 1.83 2013/06/03 17:23:13 jyamato Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+/*******************************************************************
+ Class Tree represents a genealogy (not necessarily a "tree" in the recombinant cases).  It has two subtypes,
+ a PlainTree (no recombination) and a RecTree (with recombination) because the recombination machinery is too
+ expensive to carry if you don't need it.
+
+ Normally all Trees used during rearrangement come from either copying an existing tree
+ or copying the prototype tree in Registry.
+
+ DEBUG:  This class is a monster.  Anything to make it simpler would be good.
+
+ Written by Jim Sloan, heavily revised by Mary Kuhner
+
+ 02/08/05 Pulled out locus specific stuff into LocusLike class -- Mary
+ 2004/9/15 Killed the LocusLike class, merging into Locus
+ 2005/3/1 Moved alias info, reluctantly, from DLCalc to Tree (because
+    DLCalc is a shared object and the alias is not shareable!) -- Mary
+ 2006/02/13 Begun to add jointed stick
+*******************************************************************/
+
+#ifndef TREE_H
+#define TREE_H
+
+#include <cmath>
+#include <string>
+#include <vector>
+
+#include "constants.h"
+#include "definitions.h"
+#include "individual.h"                 // for IndVec member
+#include "locus.h"                      // for DataModel
+#include "range.h"
+#include "rangex.h"
+#include "timelist.h"                   // for TimeList member
+#include "vectorx.h"
+
+//------------------------------------------------------------------------------------
+
+// typedef also used by Event and Arranger
+typedef std::pair<Branch_ptr, Branch_ptr> branchpair;
+
+class TreeSummary;                      // return type of SummarizeTree()
+class TipData;
+class Random;
+class ForceParameters;
+class TimeManager;
+
+//------------------------------------------------------------------------------------
+
+class Tree
+{
+  private:
+    Tree(const Tree &);                 // undefined
+    Tree & operator=(const Tree &);     // undefined
+
+    vector<LocusCell> m_protoCells;     // cache of locuscells for all loci
+
+    vector<Branch_ptr> FindAllBranchesAtTime(double eventT);
+
+  protected:
+    Tree();
+    Tree(const Tree & tree, bool makestump);
+    IndVec          m_individuals;        // associates tips from same individual
+    double          m_overallDL;          // data log-likelihood
+    LongVec2d       m_aliases;            // dim: loci x markers
+    Random *        m_randomSource;       // non-owning pointer
+    TimeList        m_timeList;           // all lineages
+    vector<Locus> * m_pLocusVec;          // not owning.  Also, not const because of simulation
+    long int        m_totalSites;         // span of entire region, including gaps
+    TimeManager *   m_timeManager;        // our timemanager....
+    bool            m_hasSnpPanel;        // the region that created this tree includes SNP panel data
+
+    // A "timelist" iterator to the the branch after the cut during rearrangement. Set by ActivateBranch().
+    // Used during mid-rearrangement so that CopyPartialBody() & Rectree::Prune() can be done quicker.
+    // This member is not copied!
+    Branchiter    m_firstInvalid;
+
+    // Protected rearrangement primitives.
+    virtual void    Break(Branch_ptr pBranch) = 0;
+    void    SetFirstInvalidFrom(Branchconstiter & target);
+    void    SetFirstInvalid() { m_firstInvalid = m_timeList.BeginBranch(); };
+    void    MarkForDLRecalc(Branch_ptr br);
+
+    // Protected Branch-management primitives.
+    const vector<LocusCell> & CollectCells(); // Get locuscells for all loci.
+    vector<Branch_ptr>  GetTips(StringVec1d & names)  const;
+
+  public:
+    // Creation and destruction.
+    virtual           ~Tree();
+    virtual Tree     *Clone()                 const = 0;
+    virtual Tree     *MakeStump()             const = 0;
+    virtual void      Clear();
+    virtual void      CopyTips(const Tree * tree);
+    virtual void      CopyBody(const Tree * tree);
+    virtual void      CopyStick(const Tree * tree);
+    virtual void      CopyPartialBody(const Tree * tree);
+    virtual TBranch_ptr CreateTip(const TipData & tipdata, const vector<LocusCell> & cells,
+                                  const vector<LocusCell> & movingcells, const rangeset & diseasesites);
+    virtual TBranch_ptr CreateTip(const TipData & tipdata, const vector<LocusCell> & cells,
+                                  const vector<LocusCell> & movingcells, const rangeset & diseasesites,
+                                  const vector<Locus> & loci);
+    void      SetTreeTimeManager(TimeManager * tm);
+
+    // Getters.
+    TimeList & GetTimeList()                 { return m_timeList; };
+    const TimeList & GetTimeList()           const { return m_timeList; };
+    virtual rangevector GetLocusSubtrees(rangepair span) const;
+    Branch_ptr     GetTip(const std::string & name) const;
+    TimeManager * GetTimeManager()           { return m_timeManager; };
+    const TimeManager * GetTimeManager()     const { return m_timeManager; };
+    double      RootTime()                  const { return m_timeList.RootTime(); };
+    bool        NoPhaseUnknownSites()       const;
+    long int    GetNsites()                 const;
+    long int    GetTotalSites()             { return m_totalSites; };
+
+    bool GroomForGrowth(const DoubleVec1d & thetas, const DoubleVec1d & growths, double temperature);
+    bool GroomForLogisticSelection(const DoubleVec1d & thetas,
+                                   double s, // logistic selection coefficient or zero
+                                   double temperature);
+    bool GetSnpPanelFlag() { return m_hasSnpPanel; };
+
+    // Setters.
+    void SetIndividualsWithTips(const vector<Individual> & indvec);
+    void SetLocusVec(vector<Locus> * loc);
+    void SetSnpPanelFlag(bool flag) { m_hasSnpPanel = flag; };
+
+    // Likelihood manipulation.
+    virtual void      CalculateDataLikes() = 0;
+    virtual double    CalculateDataLikesForFixedLoci();
+    double    GetDLValue()            const { return m_overallDL; };
+    const LongVec1d & GetAliases(long int loc) const { return m_aliases[loc]; };
+    void      SetupAliases(const std::vector<Locus> & loci);
+
+    // Rearrangement primitives.
+    virtual vector<Branch_ptr> ActivateTips(Tree * othertree);
+    virtual Branch_ptr   ActivateBranch(Tree * othertree);
+    virtual Branch_ptr   ActivateRoot(FC_Status & fcstatus);
+    long CountNodesBetween(double tipwards_time, double rootwards_time) const;
+
+    // ChoosePreferentiallyTowardsRoot does not break the tree, but does set m_firstInvalid.
+    // to the interval containing the return value.  So does ChooseFirstBranchInEpoch.
+    Branch_ptr   ChoosePreferentiallyTowardsRoot(Tree * othertree);
+    Branch_ptr   ChooseFirstBranchInEpoch(double targettime, Tree * othertree);
+
+    virtual void      AttachBase(Branch_ptr newroot);
+    virtual vector<Branch_ptr> FindBranchesImmediatelyTipwardOf(Branchiter start);
+    vector<Branch_ptr> FindBranchesBetween(double startinterval, double stopinterval);
+    virtual vector<Branch_ptr> FirstInterval(double eventT);
+    virtual void      NextInterval(Branch_ptr branch );
+    virtual void      Prune();
+    void      TrimStickToRoot();
+    void      SwapSiteDLs();
+    virtual void      PickNewSiteDLs();
+    virtual void      ReassignDLsFor(std::string lname, long int marker, long int ind);
+    void      SetNewTimesFrom(Branchiter start, const DoubleVec1d & newtimes);
+    Branch_ptr TransitionEpoch(double eventT, long int newpop, long int maxevents, Branch_ptr pActive);
+    vector<Branch_ptr> FindBranchesStartingOnOpenInterval(double starttime, double endtime);
+    vector<Branch_ptr> FindEpochBranchesAt(double time);
+    vector<Branch_ptr> FindBranchesStartingRootwardOf(double time);
+
+    // Force-specific rearrangement primitives.
+    Branch_ptr   Coalesce(Branch_ptr child1, Branch_ptr child2, double tevent, const rangeset & fcsites);
+
+    virtual Branch_ptr   CoalesceActive(double eventT, Branch_ptr active1,
+                                        Branch_ptr active2, const rangeset & fcsites);
+    virtual Branch_ptr   CoalesceInactive(double eventT, Branch_ptr active,
+                                          Branch_ptr inactive, const rangeset & fcsites);
+    virtual Branch_ptr   Migrate(double eventT, long int topop, long int maxEvents, Branch_ptr active);
+    virtual Branch_ptr   DiseaseMutate(double eventT, long int endstatus, long int maxEvents, Branch_ptr active);
+
+    // TreeSummaryFactory.
+    TreeSummary * SummarizeTree() const;
+
+    // Invariant checking.
+    bool      IsValidTree()               const;  // check invariants (debugging function)
+    bool      ConsistentWithParameters(const ForceParameters& fp) const;  // debugging function
+    bool      operator==(const Tree & src) const; // compare trees
+    bool      operator!=(const Tree & src) const { return !(*this == src); };
+
+    // TimeManager call throughs (we provide a public front for TimeManager).  CopyStick() is also in this category.
+    void DestroyStick();
+    void SetStickParams(const ForceParameters & fp);
+    bool UsingStick() const;
+    void ScoreStick(TreeSummary & treesum) const;
+    DoubleVec1d XpartThetasAtT(double time, const ForceParameters & fp) const;
+    DoubleVec1d PartitionThetasAtT(double time, force_type force, const ForceParameters & fp) const;
+
+    // TreeSizeArranger.
+    virtual void  SetCurTargetLinkweightFrom(const BranchBuffer & brbuffer) { }; // no-op
+    virtual void  ClearCurTargetLinkweight() { };                                // no-op
+
+    // Simulation.
+    virtual bool     SimulateDataIfNeeded();
+    virtual long int NumberOfRecombinations() = 0;
+
+    // Debugging functions.
+    // MakeCoalescent strips all migration nodes from the tree and forces all remaining
+    // coalescences to their expectation times.  DO NOT use with migration!
+    void MakeCoalescent(double theta) { m_timeList.MakeCoalescent(theta); };
+    //
+    // DLCheck returns an error message with the first marker at which each branch pair differs;
+    // unmentioned branches don't differ.
+    void DLCheck(const Tree & other) const;
+    //
+    // Call through debugging code.
+    void PrintStickThetasToFile(std::ofstream & of) const;
+    void PrintStickFreqsToFile(std::ofstream & of) const;
+    void PrintStickFreqsToFileAtTime(std::ofstream & of, double time) const;
+    void PrintStickThetasToFileForJoint300(std::ofstream & of) const;
+    void PrintStickToFile(std::ofstream & of) const;
+    void PrintDirectionalMutationEventCountsToFile(std::ofstream & of) const;
+    void PrintTimeTilFirstEventToFile(std::ofstream & of) const;
+    void PrintTraitPhenotypeAtLastCoalescence(std::ofstream & of) const;
+};
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+class PlainTree : public Tree
+{
+  public:
+    // Creation and destruction.
+    PlainTree() : Tree()  {};
+    PlainTree(const Tree & tree, bool makestump) :
+        Tree(tree, makestump)       {};
+    virtual          ~PlainTree()        {};
+    virtual Tree     *Clone()                 const;
+    virtual Tree     *MakeStump()             const;
+    virtual void      CalculateDataLikes();
+    virtual void      Break(Branch_ptr pBranch);
+    virtual long int  NumberOfRecombinations() { return 0L; };
+
+    // Yes, everything else is the same as Tree.
+};
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+// To keep track of final coalescences:
+typedef std::map<long int, rangeset> rangesetcount;
+typedef std::map<long int, rangeset>::iterator RSCIter;
+typedef std::map<long int, rangeset>::const_iterator RSCcIter;
+typedef std::list<std::pair<double, rangesetcount> > sitecountlist;
+
+//------------------------------------------------------------------------------------
+
+class RecTree : public Tree
+{
+  private:
+    Linkweight m_curTargetLinkweight;
+    Linkweight m_newTargetLinkweight;
+    std::set<long int> GetIntervalTreeStartSites() const;
+
+  protected:
+    vector<Locus> *    m_pMovingLocusVec; // Not owning.  But we can change 'em!
+    vector<LocusCell> m_protoMovingCells;
+
+    virtual  void     Break(Branch_ptr branch);
+    const vector<LocusCell> & CollectMovingCells();
+
+  public:
+    // Creation and destruction.
+    RecTree();
+    RecTree(const RecTree & tree, bool makestump);
+    ~RecTree() {};
+
+    virtual  Tree    *Clone()     const;
+    virtual  Tree    *MakeStump() const;
+    virtual  void     Clear();
+    virtual  void     CopyTips(const Tree * tree);
+    virtual  void     CopyBody(const Tree * tree);
+    virtual  void     CopyPartialBody(const Tree * tree);
+
+    // Getters.
+    virtual  rangevector GetLocusSubtrees(rangepair span) const;
+    Linkweight GetCurTargetLinkweight() const { return m_curTargetLinkweight; };
+    Linkweight GetNewTargetLinkweight() const { return m_newTargetLinkweight; };
+    unsigned long int GetNumMovingLoci() { return m_pMovingLocusVec->size(); };
+
+    // Testers.
+    virtual  bool DoesThisLocusJump(long int mloc) const;
+    virtual  bool AnyRelativeHaplotypes() const;
+
+    // Setters.
+    void      SetMovingLocusVec(vector<Locus> * locs);
+    void      SetMovingMapPosition(long int mloc, long int site);
+
+    virtual TBranch_ptr CreateTip(const TipData & tipdata, const vector<LocusCell> & cells,
+                                  const vector<LocusCell> & movingcells, const rangeset & diseasesites);
+
+    virtual TBranch_ptr CreateTip(const TipData & tipdata, const vector<LocusCell> & cells,
+                                  const vector<LocusCell> & movingcells, const rangeset & diseasesites,
+                                  const vector<Locus> & loci);
+
+    // Likelihood manipulation.
+    virtual void        CalculateDataLikes();
+    virtual double      CalculateDataLikesForMovingLocus(long int mloc);
+    virtual DoubleVec1d CalculateDataLikesForFloatingLocus(long int mloc);
+    virtual DoubleVec1d CalculateDataLikesWithRandomHaplotypesForFloatingLocus(long int mloc);
+    virtual void CalculateDataLikesForAllHaplotypesForFloatingLocus(long int mloc, DoubleVec1d & mlikes);
+    virtual bool UpdateDataLikesForIndividualsFrom(long int ind, long int mloc, DoubleVec1d & mlikes);
+
+    // Rearrangement primitives.
+    virtual  vector<Branch_ptr> ActivateTips(Tree * othertree);
+    virtual  Branch_ptr  ActivateBranch(Tree * othertree);
+    virtual  Branch_ptr  ActivateRoot(FC_Status & fcstatus);
+    virtual  void     AttachBase(Branch_ptr newroot);
+    virtual  vector<Branch_ptr> FirstInterval(double eventT);
+    virtual  void     NextInterval(Branch_ptr branch);
+    virtual  void     Prune();
+    virtual  void     ReassignDLsFor(std::string lname, long int marker, long int ind);
+
+    // Force-specific rearrangement primitives.
+    virtual Branch_ptr   CoalesceActive(double eventT, Branch_ptr active1,
+                                        Branch_ptr active2, const rangeset & fcsites);
+    virtual Branch_ptr   CoalesceInactive(double eventT, Branch_ptr active,
+                                          Branch_ptr inactive, const rangeset & fcsites);
+    virtual Branch_ptr   Migrate(double eventT, long int topop, long int maxEvents, Branch_ptr active);
+    virtual Branch_ptr   DiseaseMutate (double eventT, long int endstatus, long int maxEvents, Branch_ptr active);
+    branchpair RecombineActive(double eventT, long int maxEvents, FPartMap fparts,
+                               Branch_ptr active, long int recpoint, const rangeset & fcsites, bool lowSitesOnLeft);
+    branchpair RecombineInactive(double eventT, long int maxEvents, FPartMap fparts,
+                                 Branch_ptr active, long int recpoint, const rangeset & fcsites);
+
+    // Map Summary.
+    DoubleVec2d  GetMapSummary();
+
+    // "startsites" is a set of Interval tree start SITEs, each just to the right of a
+    // Littlelink recombination recpoint, or it is an endmarker (first or last site).
+    std::set<long int> IgnoreDisallowedSubTrees(std::set<long int> startsites, rangeset allowedranges);
+
+    DoubleVec1d ZeroDisallowedSites(DoubleVec1d datalikes, rangeset allowedranges);
+    void RandomizeMovingHaplotypes(long int mlocus);
+
+    // Simulation.
+    virtual bool SimulateDataIfNeeded();
+    virtual long int NumberOfRecombinations();
+
+    // TreeSizeArranger.
+    virtual void SetCurTargetLinkweightFrom(const BranchBuffer & brbuffer);
+    virtual void ClearCurTargetLinkweight() { m_curTargetLinkweight = ZERO; };
+
+    // Debugging functions.
+    virtual void SetNewTargetLinkweightFrom(const BranchBuffer & brbuffer);
+    virtual void PrintTipData(long int mloc, long int marker);
+    virtual void PrintRangeSetCount(const rangesetcount & rsc);
+    virtual rangesetcount RemoveEmpty(const rangesetcount & rsc);
+};
+
+#endif // TREE_H
+
+//____________________________________________________________________________________
diff --git a/src/tree/treesum.cpp b/src/tree/treesum.cpp
new file mode 100644
index 0000000..52f89d8
--- /dev/null
+++ b/src/tree/treesum.cpp
@@ -0,0 +1,930 @@
+// $Id: treesum.cpp,v 1.116 2013/11/08 21:46:21 mkkuhner Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+#include <fstream>
+#include <iostream>
+#include <iterator>
+#include <map>
+#include <sstream>
+#include <string>
+
+#include "local_build.h"
+
+#include "chainmanager.h"
+#include "constants.h"
+#include "enable_shared_from_this.hpp"  // For shared_from_this in RecTreeSummary handling of
+                                        // recombination events for partnerpicks logic.
+#include "force.h"                      // For setting up the recsbypartition vector of intervals
+                                        // in a RecTreeSummary.
+#include "range.h"                      // For Link-related typedefs and constants.
+#include "registry.h"
+#include "summary.h"
+#include "tree.h"
+#include "treesum.h"
+#include "xmlsum_strings.h"             // For xml sumfile strings.
+
+#ifdef DMALLOC_FUNC_CHECK
+#include "/usr/local/include/dmalloc.h"
+#endif
+
+// This turns on the detailed print out of the creation and analysis of each coalescence tree.
+// JRM 4/10
+//#define PRINT_TREE_DETAILS
+
+// Dumps all coalesences to rdata if defined; if not, only final coalescence is written.
+// JRM 4/10
+//#define SAVE_ALL_COAL
+
+using namespace std;
+
+//------------------------------------------------------------------------------------
+
+typedef map<string, double> Forcemap;
+typedef map<string, double>::iterator Forcemapiter;
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+// This constructor is used only to make the initial TreeSummary prototype.
+
+TreeSummary::TreeSummary()
+{
+    // insure that the pointers are NULL; later code
+    // expects them to be NULL if they have not been set.
+
+    m_coalsummary = NULL;
+    m_migsummary = NULL;
+    m_recsummary = NULL;
+    m_growsummary = NULL;
+    m_diseasesummary = NULL;
+    m_epochsummary = NULL;
+    m_divmigsummary = NULL;
+
+} // TreeSummary ctor
+
+//------------------------------------------------------------------------------------
+// This copy constructor produces a usable EMPTY TreeSummary (no tree in it).
+// No method is provided to copy the contents of a TreeSummary; it is considered too
+// expensive.  (They can't be put in STL containers anyway as they are polymorphic.)
+
+TreeSummary::TreeSummary(const TreeSummary & src)
+    : m_intervalData(),
+      m_nCopies(0)
+{
+    Sumconst_iter it = src.m_summaries.begin();
+    for ( ; it != src.m_summaries.end(); ++it)
+    {
+        Summary * sum = it->second->Clone(m_intervalData);
+        m_summaries.insert(make_pair(it->first, sum));
+    }
+    CacheSummaries();
+} // Copy ctor
+
+//------------------------------------------------------------------------------------
+
+TreeSummary * TreeSummary::Clone() const
+{
+    return new TreeSummary(*this);
+} // TreeSummary::Clone
+
+//------------------------------------------------------------------------------------
+
+TreeSummary::~TreeSummary()
+{
+    Sumiter it = m_summaries.begin();
+    for ( ; it != m_summaries.end(); ++it)
+    {
+        delete it->second;
+    }
+} // dtor
+
+//------------------------------------------------------------------------------------
+
+void TreeSummary::Compress()
+{
+    Summap::iterator it = m_summaries.begin();
+    bool allshort = true;
+
+    for ( ; it != m_summaries.end(); ++it)
+    {
+        allshort = it->second->Compress() && allshort;
+    }
+    if (allshort) m_intervalData.clear();
+
+} // Compress
+
+//------------------------------------------------------------------------------------
+// MDEBUG Currently the tree is summarized if either the tree or the stick has changed.
+// This may be inefficient if tree changes are rare and stick changes are common.
+// Revisit this decision if maximization slows down.
+
+void TreeSummary::Summarize(const Tree & tree)
+{
+    m_nCopies = 1;
+
+    const TimeList & timeList = tree.GetTimeList();
+#ifdef PRINT_TREE_DETAILS
+    cerr << "in TreeSummary::Summarize" << endl;
+    timeList.PrintTreeList();           // JRM debug
+    cerr << "RootTime: " << timeList.RootTime() << endl;
+#endif
+    // Tally up populations of tips into lineages vector.
+
+    BranchBuffer lineages(registry.GetDataPack());
+    Branchconstiter brit;
+#ifdef DUMP_TREE_COAL_RDATA
+    //DUMP_TREE_COAL_RDATA is defined in chainmanager.h
+    //rdata is opened and closed in chainmanager.cpp
+    double roottime = 0;
+    for (brit = timeList.FirstBody(); brit != timeList.EndBranch();
+         brit = timeList.NextBody(brit))
+    {
+
+        roottime = (*brit)->m_eventTime;
+
+#ifdef SAVE_ALL_COAL
+        rdata << (*brit)->m_eventTime << ", "; // write every coal time // JRM debug
+#endif
+    }
+
+#ifndef SAVE_ALL_COAL
+    rdata << roottime; // write root time // JRM debug
+#endif
+
+    rdata << endl; // end coal time line // JRM debug
+#endif
+
+
+    // Update the lineages.
+    for (brit = timeList.FirstTip(); brit != timeList.EndBranch(); brit = timeList.NextTip(brit))
+    {
+
+        lineages.UpdateBranchCounts((*brit)->m_partitions, true);
+#ifdef PRINT_TREE_DETAILS
+        cerr << " updated branch count to add tip " << *brit
+             << " ID: " << (*brit)->GetID()
+             << " to partitions"
+             << endl; // JRM print
+#endif
+    }
+
+    // Traverse body of tree collecting information.
+
+#ifdef PRINT_TREE_DETAILS
+    cerr << " before scoreEvent loop" << endl;
+    lineages.PrintXParts();
+#endif
+    double epochtime(0.0);
+    for (brit = timeList.FirstBody(); brit != timeList.EndBranch();
+         brit = timeList.NextBody(brit))
+    {
+        // Score the branch.
+        Branch_ptr pBranch = *brit;
+
+        // We do not score EpochBranches for an already encountered epoch time.
+        // This call adjusts lineages as a side effect.
+        if (pBranch->GetTime() != epochtime)
+        {
+            pBranch->ScoreEvent(*this, lineages);
+        }
+
+        if (pBranch->Event() == btypeEpoch)
+        {
+            if (pBranch->GetTime() == epochtime)
+            {
+                // No ScoreEvent for this branch; it's later in a set.
+                lineages.UpdateBranchCounts(pBranch->m_partitions);
+                lineages.UpdateBranchCounts(pBranch->Child(0)->m_partitions, false);
+            }
+            else
+            {
+                // This branch did ScoreEvent above; it defines the new epoch time.
+                epochtime = pBranch->GetTime();
+            }
+        }
+#ifdef PRINT_TREE_DETAILS
+        cerr << " after scoreEvent call ";
+        lineages.PrintXParts();
+#endif
+
+    }
+#ifdef PRINT_TREE_DETAILS
+    cerr << " after scoreEvent loop" << endl;
+    lineages.PrintXParts();
+#endif
+
+    // now summarize the stick
+    if (tree.UsingStick())
+    {
+        tree.ScoreStick(*this);
+    }
+
+    Compress();
+    CacheSummaries(); // JCHECK--this isn't called in RecTreeSummary::Summarize
+
+} // Summarize
+
+//------------------------------------------------------------------------------------
+
+#ifdef STATIONARIES
+
+void TreeSummary::DumpStationariesData(const Tree & tree, const ForceParameters & fp) const
+{
+    ofstream of,migdir;
+    of.open(INTERVALFILE.c_str(), ios::app);
+
+    const TimeList & timeList = tree.GetTimeList();
+    Branchconstiter brit;
+
+    long int interval = 0;
+    double topTime = 0.0;
+
+#ifdef DUMP_TREE_COAL_RDATA
+    // MDEBUG:  following code is a braindead way to do this!
+    //DUMP_TREE_COAL_RDATA is defined in chainmanager.h
+    //rdata is opened and closed in chainmanager.cpp
+    double roottime;
+    for (brit = timeList.FirstBody(); brit != timeList.EndBranch();
+         brit = timeList.NextBody(brit))
+    {
+        roottime = (*brit)->m_eventTime;
+    }
+    rdata << roottime << endl; // dump root time 
+#endif
+
+    migdir.open("migdir.out", ios::app);
+    double lastepochtime(0.0);
+
+    for (brit = timeList.FirstBody(); brit != timeList.EndBranch();
+         brit = timeList.NextBody(brit))
+    {
+        Branch_ptr pBranch = *brit;
+
+        if (pBranch->Event() == btypeEpoch) {
+           lastepochtime = pBranch->GetTime();
+           continue;
+        }
+
+        double bottomTime = pBranch->m_eventTime;
+        of << "int" << interval << " " << bottomTime - topTime << endl;
+        ++interval;
+        topTime = bottomTime;
+
+        if (pBranch->Event() == btypeDivMig) {
+           if (pBranch->GetTime() <= lastepochtime) 
+              throw(1);
+           migdir << "root" << pBranch->GetPartition(force_DIVMIG);
+           migdir << "tip";
+           migdir << pBranch->Child(0)->GetPartition(force_DIVMIG) << endl;
+        }
+    }
+    migdir.close();
+
+    of.close();
+
+    interval = 0;
+    topTime = 0.0;
+    ofstream coalint;
+    coalint.open("coalint.out", ios::app);
+    for (brit = timeList.FirstCoal(); brit != timeList.EndBranch();
+         brit = timeList.NextCoal(brit))
+    {
+        Branch_ptr pBranch = *brit;
+
+        double bottomTime = pBranch->m_eventTime;
+        coalint << "cint" << interval << " " << bottomTime - topTime;
+        coalint << endl;
+        ++interval;
+        topTime = bottomTime;
+    }
+    coalint.close();
+
+    of.open(MIGFILE.c_str(), ios::app);
+    long int migs = timeList.HowMany(btypeMig);
+    migs += timeList.HowMany(btypeDivMig);
+    of << migs << " " << endl;
+    of.close();
+
+    of.open(DISFILE.c_str(), ios::app);
+    long int ndisevents = timeList.HowMany(btypeDisease);
+    of << ndisevents << " " << endl;
+    of.close();
+
+    of.open(RECFILE.c_str(), ios::app);
+    long int recs = timeList.HowMany(btypeRec);
+    // divide by two because HowMany gives the number
+    // of branches, not events
+    recs = recs / 2;
+    of << recs  << " " << endl;
+    of.close();
+
+    if (fp.GetEpochTimes().size() > 0)
+    {
+        of.open(EPOCHFILE.c_str(), ios::app);
+        vector<double> etimes(fp.GetEpochTimes());
+        vector<double>::const_iterator et;
+        for(et = etimes.begin(); et != etimes.end(); ++et)
+           of << (*et) << " ";
+        of << endl;
+        of.close();
+    }
+
+} // DumpStationariesData
+
+#endif // STATIONARIES
+
+//------------------------------------------------------------------------------------
+// This is OPTIMIZATION code which supports the specialized GetMigSummary
+// and so forth functions.  It's ugly and force-specific but a significant
+// speedup over calling GetSummary(lamarcstrings::MIG).
+
+void TreeSummary::CacheSummaries()
+{
+    m_coalsummary = GetSummary(force_COAL);
+    m_migsummary = GetSummary(force_MIG);
+    m_diseasesummary = GetSummary(force_DISEASE);
+    m_recsummary = GetSummary(force_REC);
+    m_growsummary = GetSummary(force_GROW);
+    m_epochsummary = GetSummary(force_DIVERGENCE);
+    m_divmigsummary = GetSummary(force_DIVMIG);
+
+} // CacheSummaries
+
+//------------------------------------------------------------------------------------
+
+map<force_type, DoubleVec1d> TreeSummary::InspectSummary() const
+{
+    // for each force, get a vector containing the number of events
+    // for each parameter; return them in a map indexed by force
+
+    Summap::const_iterator summary;
+    map<force_type, DoubleVec1d> counts;
+
+    for (summary = m_summaries.begin(); summary != m_summaries.end(); ++summary)
+    {
+        counts.insert(make_pair(summary->first, summary->second->GetShortPoint()));
+    }
+
+    return counts;
+} // InspectSummary
+
+//------------------------------------------------------------------------------------
+
+void TreeSummary::AdjustSummary(const map<force_type, DoubleVec1d> & counts, long int region)
+{
+    map<force_type, DoubleVec1d>::const_iterator fit;
+
+    for (fit = counts.begin(); fit != counts.end(); ++fit)
+    {
+        Summary * sum = GetSummary(fit->first);
+        assert(sum);  // no summary?!
+        sum->AdjustSummary(fit->second, region);
+    }
+
+} // AdjustSummary
+
+//------------------------------------------------------------------------------------
+
+Summary * TreeSummary::GetSummary(force_type type)
+{
+    // Returning NULL is not necessarily indicative of an error
+    // state--some forces, notably Growth, do not have corresponding
+    // Summaries.
+
+    Sumiter it = m_summaries.find(type);
+    if (it == m_summaries.end()) return NULL;
+    else return it->second;
+
+} // GetSummary
+
+//------------------------------------------------------------------------------------
+
+Summary const* TreeSummary::GetSummary(force_type type) const
+{
+    // Returning NULL is not necessarily indicative of an error
+    // state--some forces, notably Growth, do not have corresponding
+    // Summaries.
+
+    Sumconst_iter it = m_summaries.find(type);
+    if (it == m_summaries.end()) return NULL;
+    else return it->second;
+
+} // GetSummary const
+
+//------------------------------------------------------------------------------------
+
+void TreeSummary::AddSummary(force_type type, Summary * sum)
+{
+    // we should not be adding an already present type!
+    assert(m_summaries.find(type) == m_summaries.end());
+
+    m_summaries.insert(make_pair(type, sum));
+
+} // AddSummary
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+RecTreeSummary::RecTreeSummary()
+    : TreeSummary()
+{
+    const ForceVec partforces(registry.GetForceSummary().GetPartitionForces());
+    ForceVec::const_iterator pforce;
+    for(pforce = partforces.begin(); pforce != partforces.end(); ++pforce)
+    {
+        vector<Interval*> sums;
+        vector<vector<Interval*> > recs((*pforce)->GetNPartitions(), sums);
+        m_recsbypart.push_back(recs);
+    }
+    m_diseasepresent = registry.GetForceSummary().CheckForce(force_DISEASE);
+} // RecTreeSummary::ctor
+
+//------------------------------------------------------------------------------------
+
+RecTreeSummary::RecTreeSummary(const RecTreeSummary & src)
+    : TreeSummary(src),
+      m_recsbypart(src.m_recsbypart)
+{
+    m_diseasepresent = src.m_diseasepresent;
+} // RecTreeSummary::copy ctor
+
+//------------------------------------------------------------------------------------
+
+TreeSummary * RecTreeSummary::Clone() const
+{
+    return new RecTreeSummary(*this);
+} // RecTreeSummary::Clone
+
+//------------------------------------------------------------------------------------
+
+void RecTreeSummary::Summarize(const Tree & tree)
+{
+    m_nCopies = 1;
+    BranchBuffer lineages(registry.GetDataPack());
+
+    const TimeList & timeList = tree.GetTimeList();
+
+    // Tally up populations of tips into lineages vector
+
+    Branchconstiter brit;
+    Linkweight recweight(ZERO);
+
+    for (brit = timeList.FirstTip(); brit != timeList.EndBranch(); brit = timeList.NextTip(brit))
+    {
+        lineages.UpdateBranchCounts((*brit)->m_partitions);
+        recweight += (*brit)->GetRangePtr()->GetCurTargetLinkweight();
+    }
+
+    // Traverse tree collecting information
+
+    bool scorethisrecbranch(true);
+    double epochtime(0.0);
+    for (brit = timeList.FirstBody(); brit != timeList.EndBranch(); brit = timeList.NextBody(brit))
+    {
+        Branch_ptr pBranch = *brit;
+        // We do not score EpochBranches for an already encountered epoch time.
+        if (pBranch->GetTime() != epochtime)
+        {
+            // ScoreEvent returns information via reference variable "recweight" here.
+            pBranch->ScoreEvent(*this, lineages, recweight);
+        }
+
+        if (pBranch->Event() == btypeEpoch)
+        {
+            if (pBranch->GetTime() == epochtime)
+            {
+                // No ScoreEvent for this branch; it's later in a set.
+                lineages.UpdateBranchCounts(pBranch->m_partitions);
+                lineages.UpdateBranchCounts(pBranch->Child(0)->m_partitions, false);
+            }
+            else
+            {
+                // This branch did ScoreEvent above; it defines the new epoch time.
+                epochtime = pBranch->GetTime();
+            }
+        }
+
+        // We assume that recombination events are adjacent in the timelist.
+        // MDEBUG MFIX!  Is this right?  It seems arbitrary about which branch is scored.
+        // Is that justifiable in the presence of disease?
+        // I don't think so!  And this code should be in RBranch::ScoreEvent to fix it.
+        if (m_diseasepresent && pBranch->Event() == btypeRec)
+        {
+            if (scorethisrecbranch)
+            {
+                dynamic_cast<RecSummary *>(m_recsummary)->AddToRecombinationCounts(pBranch->m_partitions);
+                AddRecToRecsByPart(pBranch->m_partitions, m_recsummary->GetLastAdded());
+            }
+            scorethisrecbranch = !scorethisrecbranch;
+        }
+    }
+
+    // now summarize the stick
+    if (tree.UsingStick())
+    {
+        tree.ScoreStick(*this);
+    }
+
+    Compress();
+
+} // Summarize
+
+//------------------------------------------------------------------------------------
+
+void RecTreeSummary::AddRecToRecsByPart(const LongVec1d & membership, Interval* newrec)
+{
+    LongVec1d::size_type partforce;
+    for(partforce = 0; partforce < membership.size(); ++partforce)
+    {
+        m_recsbypart[partforce][membership[partforce]].push_back(newrec);
+    }
+} // SetupRecByPartitionPointers
+
+//------------------------------------------------------------------------------------
+
+const vector<vector<vector<Interval*> > > & RecTreeSummary::GetRecsByPart() const
+{
+    return m_recsbypart;
+} // GetRecsByPart
+
+//------------------------------------------------------------------------------------
+// Function: Writes tree summaries to sumfile.
+// Notes:  Doesn't assume cached summary ptrs set to null for forces not
+//   in use/existence since cached summary ptrs could look valid for a
+//   force when the force isn't even on.  Should test for this condition if
+//   going to optimize out the map lookup (call to GetSummary).
+
+void TreeSummary::WriteTreeSummary (ofstream & out)
+{
+    out << "\t" << xmlsum::TREESUM_START << endl;
+    out << "\t\t" << xmlsum::NCOPY_START << " " << GetNCopies()
+        << " " << xmlsum::NCOPY_END << endl;
+
+    CacheSummaries(); //just in case.
+
+    // If growth is on, we probably have to write out full intervals,
+    // not just points and waits.
+    // However, some forces may only come in a short form anyway, if they never
+    // happened, or only happened once (like recombination, for example).
+    // But IntervalData has this information stored in it now, so we're OK.
+
+    string form = "short";  //Either 'short' or 'long'.
+    DoubleVec1d sp;
+    DoubleVec1d sw;
+    LongVec2d shortpick;
+
+    vector<force_type> fvec;
+    fvec.push_back(force_COAL);
+    fvec.push_back(force_MIG);
+    fvec.push_back(force_DISEASE);
+    fvec.push_back(force_REC);
+    fvec.push_back(force_DIVERGENCE);
+    fvec.push_back(force_DIVMIG);
+
+    bool somelong = false;
+
+    for (unsigned long int fvi = 0; fvi < fvec.size(); fvi++)
+    {
+        Summary * currsum = GetSummary(fvec[fvi]);
+        if (currsum)
+        {
+            if (!(currsum->GetShortness()))
+            {
+                form = "long";
+                somelong = true;
+            }
+            sp = currsum->GetShortPoint();
+            sw = currsum->GetShortWait();
+            shortpick = currsum->GetShortPicks();
+
+            out << "\t\t" << xmlsum::SHORTFORCE_START << " "
+                << ToShortString(fvec[fvi])
+                << " " << form << endl;
+            WriteShorts( out, sp, sw, shortpick);
+            out << "\t\t" << xmlsum::SHORTFORCE_END << endl;
+        }
+        form = "short";
+    }
+
+    if (somelong)
+    {
+        out << "\t\t" << xmlsum::INTERVALS_START << endl;
+        m_intervalData.WriteIntervalData(out);
+        out << "\t\t" << xmlsum::INTERVALS_END << endl;
+    }
+    out << "\t" << xmlsum::TREESUM_END << endl;
+} // TreeSummary::WriteTreeSummary
+
+//------------------------------------------------------------------------------------
+// Writes a summary's shortwait and shortpoint to sumfile.
+
+void TreeSummary::WriteShorts (ofstream & out, DoubleVec1d & shortpoint,
+                               DoubleVec1d & shortwait, LongVec2d & shortpick) const
+{
+    vector<double>::iterator itstart;
+    vector<double>::iterator itend;
+
+    itstart = shortpoint.begin();
+    itend = shortpoint.end();
+
+    out << "\t\t\t" << xmlsum::SHORTPOINT_START << " ";
+    for( ; itstart!= itend; ++itstart )
+        out << *itstart << " ";
+    out << xmlsum::SHORTPOINT_END << endl;
+
+    itstart = shortwait.begin();
+    itend = shortwait.end();
+
+    out << "\t\t\t" << xmlsum::SHORTWAIT_START << " ";
+    for( ; itstart!= itend; ++itstart )
+        out << *itstart << " ";
+    out << xmlsum::SHORTWAIT_END << endl;
+
+    if (!shortpick.empty())
+    {
+        LongVec2d::iterator fitstart = shortpick.begin();
+        LongVec2d::iterator fitend = shortpick.end();
+        out << "\t\t\t" << xmlsum::SHORTPICK_START << endl;
+        for( ; fitstart!= fitend; ++fitstart )
+        {
+            out << "\t\t\t\t" << xmlsum::SHORTPICK_FORCE_START << " ";
+            LongVec1d::iterator istart = fitstart->begin();
+            LongVec1d::iterator iend = fitstart->end();
+            for( ; istart!= iend; ++istart )
+                out << *istart << " ";
+            out << xmlsum::SHORTPICK_FORCE_END << endl;
+        }
+        out << "\t\t\t" << xmlsum::SHORTPICK_END << endl;
+    }
+}
+
+//------------------------------------------------------------------------------------
+// Fills an individual tree summary with the appropriate summaries.
+// precondition:  last str read in was xmlsum::TREESUM_START
+// postcondition: last str read in should be xmlsum::TREESUM_END
+
+void TreeSummary::ReadInTreeSummary ( ifstream & in )
+{
+    string tag;
+    in >> tag;
+
+    SumFileHandler::ReadInCheckFileFormat("TreeSummary::ReadInTreeSummary",
+                                          xmlsum::NCOPY_START, tag);
+    long int tt;
+    in >> tt;
+    m_nCopies = tt;
+    in >> tag;
+    SumFileHandler::ReadInCheckFileFormat("TreeSummary::ReadInTreeSummary",
+                                          xmlsum::NCOPY_END, tag);
+
+    in >> tag;
+    force_type type;
+    string typeString;
+    string form;
+    while ( !in.eof() && tag != xmlsum::TREESUM_END )
+    {
+        if ( tag == xmlsum::SHORTFORCE_START )
+        {
+            in >> typeString;
+            type = ProduceForceTypeOrBarf(typeString);
+            in >> form;
+            Summary * current_sum = GetSummary(type);
+            if (form=="short")
+                current_sum->SetShortness(1);
+            else
+                current_sum->SetShortness(0);
+
+            ReadInSummary(type, tag, in);
+            in >> tag;
+            SumFileHandler::ReadInCheckFileFormat("TreeSummary::ReadInTreeSummary",
+                                                  xmlsum::SHORTFORCE_END, tag );
+        }
+        else if ( tag == xmlsum::INTERVALS_START )
+        {
+            ReadInIntervals(tag, in);
+            SumFileHandler::ReadInCheckFileFormat("TreeSummary::ReadInTreeSummary",
+                                                  xmlsum::INTERVALS_END, tag );
+        }
+        in >> tag;
+    }
+} // TreeSummary::ReadInTreeSummary
+
+//------------------------------------------------------------------------------------
+// Fill a vector of doubles for one force (it doesn't know which one it does).
+// precondition: next string read in should be xmlsum::SHORTPOINT_START
+// postcondition: last string read in should be xmlsum::SHORTWAIT_END
+//                next string to be read should be an end force tag
+
+void TreeSummary::ReadInSummary ( force_type type, string & tag, ifstream & in )
+{
+    Summary * current_sum = GetSummary(type);
+
+    in >> tag;
+    SumFileHandler::ReadInCheckFileFormat( "TreeSummary::ReadInSummary", xmlsum::SHORTPOINT_START, tag );
+    in >> tag;
+    while ( tag != xmlsum::SHORTPOINT_END )
+    {
+        current_sum->AddShortPoint( tag );
+        in >> tag;
+    }
+
+    in >> tag;
+    SumFileHandler::ReadInCheckFileFormat( "TreeSummary::ReadInSummary", xmlsum::SHORTWAIT_START, tag );
+    in >> tag;
+    while ( tag != xmlsum::SHORTWAIT_END )
+    {
+        current_sum->AddShortWait( tag );
+        in >> tag;
+    }
+
+    ForceSummary & fs = registry.GetForceSummary();
+    if (fs.GetNLocalPartitionForces() > 0 && fs.CheckForce(force_REC) && type == force_COAL)
+    {
+        // Called only when underlying data structures (trees, branches, ranges)
+        // are potentially recombinant (ie, contain RecRanges, not Ranges).
+        in >> tag;
+        SumFileHandler::ReadInCheckFileFormat( "TreeSummary::ReadInSummary", xmlsum::SHORTPICK_START, tag );
+        in >> tag;
+        while ( tag != xmlsum::SHORTPICK_END )
+        {
+            SumFileHandler::ReadInCheckFileFormat( "TreeSummary::ReadInSummary", xmlsum::SHORTPICK_FORCE_START, tag );
+            in >> tag;
+            StringVec1d newpicks;
+            while ( tag != xmlsum::SHORTPICK_FORCE_END)
+            {
+                in >> tag;
+                newpicks.push_back(tag);
+            }
+            current_sum->AddShortPicks( newpicks );
+            in >> tag;
+        }
+    }
+
+} // TreeSummary::ReadInSummary
+
+//------------------------------------------------------------------------------------
+
+void TreeSummary::ReadInIntervals( string & tag, ifstream & in )
+{
+    in >> tag;
+    while (tag == xmlsum::FORCE_START)
+    {
+        string typeString;
+        force_type type;
+        in >> typeString;
+        type = ProduceForceTypeOrBarf(typeString);
+        in >> tag;
+        SumFileHandler::ReadInCheckFileFormat("TreeSummary::ReadInIntervals", xmlsum::FORCE_END, tag );
+        in >> tag;
+        SumFileHandler::ReadInCheckFileFormat("TreeSummary::ReadInIntervals", xmlsum::ENDTIME_START, tag );
+        in >> tag;
+        double endtime = atof(tag.c_str());
+        in >> tag;
+        SumFileHandler::ReadInCheckFileFormat( "TreeSummary::ReadInInterval", xmlsum::ENDTIME_END, tag );
+        in >> tag;
+        long int oldstatus = 0;
+        if (tag == xmlsum::OLDSTATUS_START)
+        {
+            in >> tag;
+            oldstatus = atol(tag.c_str());
+            in >> tag;
+            SumFileHandler::ReadInCheckFileFormat( "TreeSummary::ReadInInterval", xmlsum::OLDSTATUS_END, tag );
+            in >> tag;
+        }
+
+        long int newstatus = 0;
+        if (tag == xmlsum::NEWSTATUS_START)
+        {
+            in >> tag;
+            newstatus = atol(tag.c_str());
+            in >> tag;
+            SumFileHandler::ReadInCheckFileFormat( "TreeSummary::ReadInInterval", xmlsum::NEWSTATUS_END, tag );
+            in >> tag;
+        }
+
+        // "recweight" is a Link recombination weight (Biglink weight or number of Littlelinks).
+        Linkweight recweight(ZERO);
+        if (tag == xmlsum::RECWEIGHT_START)
+        {
+            in >> tag;
+#ifdef RUN_BIGLINKS
+            recweight = atof(tag.c_str());
+#else
+            recweight = atol(tag.c_str());
+#endif
+            in >> tag;
+            SumFileHandler::ReadInCheckFileFormat( "TreeSummary::ReadInInterval", xmlsum::RECWEIGHT_END, tag );
+            in >> tag;
+        }
+
+        // "recpoint" is a recombination Littlelink (middle of target Biglink in Biglink version).
+        long int recpoint = 0;
+        if (tag == xmlsum::RECPOINT_START)
+        {
+            in >> tag;
+            recpoint = atol(tag.c_str());
+            in >> tag;
+            SumFileHandler::ReadInCheckFileFormat( "TreeSummary::ReadInInterval", xmlsum::RECPOINT_END, tag );
+            in >> tag;
+        }
+
+        SumFileHandler::ReadInCheckFileFormat( "TreeSummary::ReadInInterval", xmlsum::XPARTLINES_START, tag );
+        in >> tag;
+        LongVec1d xpartlines;
+        while (tag != xmlsum::XPARTLINES_END)
+        {
+            xpartlines.push_back(atol(tag.c_str()));
+            in >> tag;
+        }
+
+        in >> tag;
+        LongVec2d partlines;
+
+        if (tag == xmlsum::PARTLINES_START)
+        {
+            in >> tag;
+            LongVec1d part_temp;
+            while (tag != xmlsum::PARTLINES_END)
+            {
+                while (tag != ".")
+                {
+                    part_temp.push_back(atol(tag.c_str()));
+                    in >> tag;
+                }
+                partlines.push_back(part_temp);
+                part_temp.clear();
+                in >> tag;
+            }
+            in >> tag;
+        }
+
+        LongVec1d partnerpicks;
+        if (tag == xmlsum::PARTNERPICKS_START)
+        {
+            in >> tag;
+            while (tag != xmlsum::PARTNERPICKS_END)
+            {
+                while (tag != ".")
+                {
+                    partnerpicks.push_back(atol(tag.c_str()));
+                    in >> tag;
+                }
+                in >> tag;
+            }
+            in >> tag;
+        }
+
+        //The Summary function 'AddInterval' adds the interval to the interval
+        // list while also setting up the appropriate pointers for that force
+        // type's path through the intervals.
+        Summary * sumptr = GetSummary(type);
+        if (sumptr)
+            sumptr->AddInterval(endtime, partlines, xpartlines, recweight,
+                                oldstatus, newstatus, recpoint, partnerpicks, type);
+        else
+        {
+            //Assume the interval is free-floating, and add the interval directly.
+            Interval* fakeint = NULL;
+            m_intervalData.AddInterval(fakeint, endtime, partlines, xpartlines, recweight,
+                                       oldstatus, newstatus, recpoint, partnerpicks, type);
+        }
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+void TreeSummary::PrintIntervalData() const
+{
+    m_intervalData.PrintIntervalData();
+}
+
+//------------------------------------------------------------------------------------
+
+void TreeSummary::PrintStickSummaryToFile(ofstream & of) const
+{
+    DoubleVec2d::size_type njoints(m_stickData.freqs.size());
+    assert(m_stickData.lengths.size() == njoints);
+    assert(m_stickData.lnfreqs.size() == njoints);
+
+    // we add a cap of 50 joints to save file space...
+    if (njoints > 50) njoints = 50;
+
+    DoubleVec2d::size_type joint;
+    for(joint = 0; joint < njoints; ++joint)
+    {
+        of << "joint#" << joint << " ";
+        of << "freqA=" << m_stickData.freqs[joint][0] << " :lnfreqA=";
+        of << m_stickData.lnfreqs[joint][0] << ";" << " with length=";
+        of << m_stickData.lengths[joint] << endl;
+    }
+}
+
+//____________________________________________________________________________________
diff --git a/src/tree/treesum.h b/src/tree/treesum.h
new file mode 100644
index 0000000..3796b26
--- /dev/null
+++ b/src/tree/treesum.h
@@ -0,0 +1,182 @@
+// $id: treesum.h,v 1.15 2002/10/29 22:01:19 ewalkup Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef TREESUM_H
+#define TREESUM_H
+
+#include <fstream>
+#include <iostream>                     // debug function include
+#include <map>
+#include <string>
+#include <vector>
+
+#include "vectorx.h"
+#include "intervaldata.h"               // for IntervalData member
+#include "stringx.h"                    // debug function include
+#include "forceparam.h"                 // for STATIONARIES
+
+// #include "tree.h" in .cpp for generalized access to trees in
+//     TreeSummary::Summarize(), RecTreeSummary::Summarize, and
+//     TreeSummary::SummarizeTree();
+
+//------------------------------------------------------------------------------------
+
+class Tree;
+class ForceSummary;
+class Registry;
+class Summary;
+
+//------------------------------------------------------------------------------------
+
+typedef std::map<force_type, Summary *> Summap;
+typedef std::map<force_type, Summary *>::iterator Sumiter;
+typedef std::map<force_type, Summary *>::const_iterator Sumconst_iter;
+
+//------------------------------------------------------------------------------------
+
+struct StickSummary
+{
+    DoubleVec2d freqs;                  // dim by number of joints/stairs by xpart
+    DoubleVec1d lengths;                // dim by number of joints/stairs
+    DoubleVec2d lnfreqs;                // dim by number of joints/stairs by xpart
+};
+
+//------------------------------------------------------------------------------------
+
+/*********************************************************************
+Class TreeSummary.
+Summarizes the information from a single tree or run of identical
+trees for the use of the maximizer.  There is a polymorphic form of
+TreeSummary for recombinant trees and there will need to be
+a more drastically polymorphic form for trees with growth.
+
+Written by Jim Sloan, revised by Mary Kuhner
+   added debug printers Jon Yamato 2001/04/23
+   added AdjustSummary Mary Kuhner 2001/07/05
+   massively refactored Mary Kuhner 2002/04/18 (and hey, it's snowing today!)
+   ChainSummary moved to own file Mary Kuhner 2002/04/19
+   ChainSummary refactored out of existence by Mary again, 2004/08/01
+   adding Stick summarization and struct Jon 2007/03/19
+   adding EpochSummary Mary 2010/09/16
+**********************************************************************/
+
+class TreeSummary
+{
+  private:
+    TreeSummary & operator=(const TreeSummary & src);     // undefined
+
+  protected:
+
+    // speed optimization caches
+    Summary * m_coalsummary;
+    Summary * m_migsummary;
+    Summary * m_recsummary;
+    Summary * m_growsummary;
+    Summary * m_diseasesummary;
+    Summary * m_epochsummary;
+    Summary * m_divmigsummary;
+
+    IntervalData m_intervalData;        // the actual stored stuff; we own this
+    Summap       m_summaries;           // helper objects for interpreting m_intervalData
+    long         m_nCopies;             // number of identical trees
+
+    StickSummary m_stickData;           // the stored stick stuff, we own this
+
+    void Compress();                    // reduce storage space, if possible
+    void CacheSummaries();              // cache summary locations
+
+  public:
+
+    TreeSummary();  // used only to prototype
+    TreeSummary(const TreeSummary & src);
+    virtual TreeSummary * Clone() const;
+    virtual              ~TreeSummary();
+
+    virtual void Summarize(const Tree & tree);
+
+#ifdef STATIONARIES
+    virtual void DumpStationariesData(const Tree & tree, const ForceParameters & fp) const;
+#endif // STATIONARIES
+
+    void AddCopy() { ++m_nCopies; };
+    long GetNCopies() const { return m_nCopies; };
+
+    std::map<force_type, DoubleVec1d> InspectSummary() const;
+    virtual void AdjustSummary(const std::map<force_type, DoubleVec1d> & counts, long region);
+
+    Summary *      GetSummary(force_type type);
+    Summary const* GetSummary(force_type type) const;
+    void           AddSummary(force_type type, Summary * sum);
+
+    // Read and instantiate TreeSummary objs
+    void ReadInTreeSummary  ( std::ifstream & in );
+    void ReadInSummary      ( force_type type, string & tag, std::ifstream & in );
+    void ReadInIntervals    ( string & tag, std::ifstream & in );
+
+    // Write the short summary files
+    // (Writing whole intervals in tree/intervaldata.cpp)
+    void WriteTreeSummary ( std::ofstream & out );
+    void WriteShorts      ( std::ofstream & out, DoubleVec1d & shortpoint,
+                            DoubleVec1d & shortwait, LongVec2d & shortpick ) const;
+
+    IntervalData& GetIntervalData() { return m_intervalData; };
+
+    // speed optimized routines which duplicate GetSummary for
+    // the given force, but much more quickly.
+
+    Summary const* GetCoalSummary() const { return m_coalsummary; };
+    Summary const* GetMigSummary() const { return m_migsummary; };
+    Summary const* GetDiseaseSummary() const { return m_diseasesummary; };
+    Summary const* GetRecSummary() const { return m_recsummary; };
+    Summary const* GetGrowSummary() const { return m_growsummary; };
+    Summary const* GetEpochSummary() const { return m_epochsummary; };
+    Summary const* GetDivMigSummary() const { return m_divmigsummary; };
+
+    void SetStickSummary(const DoubleVec2d & freqs,
+                         const DoubleVec1d & lengths,
+                         const DoubleVec2d & lnfreqs)
+    {
+        m_stickData.freqs = freqs;
+        m_stickData.lengths = lengths;
+        m_stickData.lnfreqs = lnfreqs;
+    };
+
+    const StickSummary & GetStickSummary() const { return m_stickData; };
+
+    // debug functions
+    void PrintIntervalData() const;
+    void PrintStickSummaryToFile(std::ofstream & of) const;
+};
+
+//------------------------------------------------------------------------------------
+
+class RecTreeSummary : public TreeSummary
+{
+  protected:
+    // more speed optimization, dim: partforce X partition X recs_in_part
+    vector<vector<vector<Interval*> > > m_recsbypart;
+    bool m_diseasepresent;
+
+  public:
+    RecTreeSummary();
+    RecTreeSummary(const RecTreeSummary & src);
+    virtual TreeSummary * Clone() const;
+    virtual              ~RecTreeSummary()  {};
+
+    virtual void Summarize(const Tree & tree);
+
+    void AddRecToRecsByPart(const LongVec1d & membership, Interval* newrec);
+    const vector<vector<vector<Interval*> > > & GetRecsByPart() const;
+
+};
+
+#endif // TREESUM_H
+
+//____________________________________________________________________________________
diff --git a/src/ui_interface/chainparam_interface.cpp b/src/ui_interface/chainparam_interface.cpp
new file mode 100644
index 0000000..6dda761
--- /dev/null
+++ b/src/ui_interface/chainparam_interface.cpp
@@ -0,0 +1,538 @@
+// $Id: chainparam_interface.cpp,v 1.36 2011/01/12 17:27:20 mkkuhner Exp $
+
+/*
+ *  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+ *
+ *  This software is distributed free of charge for non-commercial use
+ *  and is copyrighted.  Of course, we do not guarantee that the software
+ *  works, and are not responsible for any damage you may cause or have.
+ *
+ */
+
+#include <iostream>
+#include "arranger.h"
+#include "chainparam.h"
+#include "chainparam_interface.h"
+#include "constants.h"
+#include "stringx.h"
+#include "ui_strings.h"
+#include "ui_vars.h"
+
+using namespace std;
+
+/// uiFinalChains
+
+uiFinalChains::uiFinalChains()
+    : SetGetLong(uistr::finalChains)
+{
+}
+
+uiFinalChains::~uiFinalChains()
+{
+}
+
+long uiFinalChains::Get(UIVars& vars, UIId id)
+{
+    return vars.chains.GetFinalNumberOfChains();
+}
+
+void uiFinalChains::Set(UIVars& vars, UIId id, long val)
+{
+    vars.chains.SetFinalNumberOfChains(val);
+}
+
+/// uiFinalDiscard
+
+uiFinalDiscard::uiFinalDiscard()
+    : SetGetLong(uistr::finalDiscard)
+{
+}
+
+uiFinalDiscard::~uiFinalDiscard()
+{
+}
+
+long uiFinalDiscard::Get(UIVars& vars, UIId id)
+{
+    return vars.chains.GetFinalNumberOfChainsToDiscard();
+}
+
+void uiFinalDiscard::Set(UIVars& vars, UIId id, long val)
+{
+    vars.chains.SetFinalNumberOfChainsToDiscard(val);
+}
+
+/// uiFinalSamples
+
+uiFinalSamples::uiFinalSamples()
+    : SetGetLong(uistr::finalSamples)
+{
+}
+
+uiFinalSamples::~uiFinalSamples()
+{
+}
+
+long uiFinalSamples::Get(UIVars& vars, UIId id)
+{
+    return vars.chains.GetFinalNumberOfSamples();
+}
+
+void uiFinalSamples::Set(UIVars& vars, UIId id, long val)
+{
+    vars.chains.SetFinalNumberOfSamples(val);
+}
+
+/// uiFinalInterval
+
+uiFinalInterval::uiFinalInterval()
+    : SetGetLong(uistr::finalInterval)
+{
+}
+
+uiFinalInterval::~uiFinalInterval()
+{
+}
+
+long uiFinalInterval::Get(UIVars& vars, UIId id)
+{
+    return vars.chains.GetFinalChainSamplingInterval();
+}
+
+void uiFinalInterval::Set(UIVars& vars, UIId id, long val)
+{
+    vars.chains.SetFinalChainSamplingInterval(val);
+}
+
+/// uiInitialChains
+
+uiInitialChains::uiInitialChains()
+    : SetGetLong(uistr::initialChains)
+{
+}
+
+uiInitialChains::~uiInitialChains()
+{
+}
+
+long uiInitialChains::Get(UIVars& vars, UIId id)
+{
+    return vars.chains.GetInitialNumberOfChains();
+}
+
+void uiInitialChains::Set(UIVars& vars, UIId id, long val)
+{
+    vars.chains.SetInitialNumberOfChains(val);
+}
+
+/// uiInitialDiscard
+
+uiInitialDiscard::uiInitialDiscard()
+    : SetGetLong(uistr::initialDiscard)
+{
+}
+
+uiInitialDiscard::~uiInitialDiscard()
+{
+}
+
+long uiInitialDiscard::Get(UIVars& vars, UIId id)
+{
+    return vars.chains.GetInitialNumberOfChainsToDiscard();
+}
+
+void uiInitialDiscard::Set(UIVars& vars, UIId id, long val)
+{
+    vars.chains.SetInitialNumberOfChainsToDiscard(val);
+}
+
+/// uiInitialSamples
+
+uiInitialSamples::uiInitialSamples()
+    : SetGetLong(uistr::initialSamples)
+{
+}
+
+uiInitialSamples::~uiInitialSamples()
+{
+}
+
+long uiInitialSamples::Get(UIVars& vars, UIId id)
+{
+    return vars.chains.GetInitialNumberOfSamples();
+}
+
+void uiInitialSamples::Set(UIVars& vars, UIId id, long val)
+{
+    vars.chains.SetInitialNumberOfSamples(val);
+}
+
+/// uiInitialInterval
+
+uiInitialInterval::uiInitialInterval()
+    : SetGetLong(uistr::initialInterval)
+{
+}
+
+uiInitialInterval::~uiInitialInterval()
+{
+}
+
+long uiInitialInterval::Get(UIVars& vars, UIId id)
+{
+    return vars.chains.GetInitialChainSamplingInterval();
+}
+
+void uiInitialInterval::Set(UIVars& vars, UIId id, long val)
+{
+    vars.chains.SetInitialChainSamplingInterval(val);
+}
+
+/// uiHeatedChain
+
+uiHeatedChain::uiHeatedChain()
+    : SetGetDouble(uistr::heatedChain)
+{
+}
+
+uiHeatedChain::~uiHeatedChain()
+{
+}
+
+double uiHeatedChain::Get(UIVars& vars, UIId id)
+{
+    return vars.chains.GetChainTemperature(id.GetIndex1());
+}
+
+void uiHeatedChain::Set(UIVars& vars, UIId id, double val)
+{
+    vars.chains.SetChainTemperature(val,id.GetIndex1());
+}
+
+/// uiHeatedChains
+
+uiHeatedChains::uiHeatedChains()
+    : GetDoubleVec1d(uistr::heatedChains)
+{
+}
+
+uiHeatedChains::~uiHeatedChains()
+{
+}
+
+DoubleVec1d uiHeatedChains::Get(UIVars& vars, UIId id)
+{
+    return vars.chains.GetChainTemperatures();
+}
+
+/// uiHeatedChainCount
+
+uiHeatedChainCount::uiHeatedChainCount()
+    : SetGetLong(uistr::heatedChainCount)
+{
+}
+
+uiHeatedChainCount::~uiHeatedChainCount()
+{
+}
+
+long uiHeatedChainCount::Get(UIVars& vars, UIId id)
+{
+    return vars.chains.GetChainCount();
+}
+
+void uiHeatedChainCount::Set(UIVars& vars, UIId id, long val)
+{
+    vars.chains.SetChainCount(val);
+}
+
+/// uiTempInterval
+
+uiTempInterval::uiTempInterval()
+    : SetGetLong(uistr::tempInterval)
+{
+}
+
+uiTempInterval::~uiTempInterval()
+{
+}
+
+long uiTempInterval::Get(UIVars& vars, UIId id)
+{
+    return vars.chains.GetTemperatureInterval();
+}
+
+void uiTempInterval::Set(UIVars& vars, UIId id, long val)
+{
+    vars.chains.SetTemperatureInterval(val);
+}
+
+/// uiAdaptiveTemp
+
+uiAdaptiveTemp::uiAdaptiveTemp()
+    : SetGetBool(uistr::tempAdapt)
+{
+}
+
+uiAdaptiveTemp::~uiAdaptiveTemp()
+{
+}
+
+bool uiAdaptiveTemp::Get(UIVars& vars, UIId id)
+{
+    return vars.chains.GetAdaptiveTemperatures();
+}
+
+void uiAdaptiveTemp::Set(UIVars& vars, UIId id, bool val)
+{
+    vars.chains.SetAdaptiveTemperatures(val);
+}
+
+/// uiNumReps
+
+uiNumReps::uiNumReps()
+    : SetGetLong(uistr::replicates)
+{
+}
+
+uiNumReps::~uiNumReps()
+{
+}
+
+long uiNumReps::Get(UIVars& vars, UIId id)
+{
+    return vars.chains.GetNumberOfReplicates();
+}
+
+void uiNumReps::Set(UIVars& vars, UIId id, long val)
+{
+    vars.chains.SetNumberOfReplicates(val);
+}
+
+/// uiDropArranger
+
+uiDropArranger::uiDropArranger()
+    : SetGetDouble(uistr::dropArranger)
+{
+}
+
+uiDropArranger::~uiDropArranger()
+{
+}
+
+double uiDropArranger::Get(UIVars& vars, UIId id)
+{
+    return vars.chains.GetDropArrangerRelativeTiming();
+}
+
+void uiDropArranger::Set(UIVars& vars, UIId id, double val)
+{
+    vars.chains.SetDropArrangerRelativeTiming(val);
+}
+
+/// uiSizeArranger
+
+uiSizeArranger::uiSizeArranger()
+    : SetGetDouble(uistr::sizeArranger)
+{
+}
+
+uiSizeArranger::~uiSizeArranger()
+{
+}
+
+double uiSizeArranger::Get(UIVars& vars, UIId id)
+{
+    return vars.chains.GetSizeArrangerRelativeTiming();
+}
+
+void uiSizeArranger::Set(UIVars& vars, UIId id, double val)
+{
+    vars.chains.SetSizeArrangerRelativeTiming(val);
+}
+
+/// uiBayesArranger
+
+uiBayesArranger::uiBayesArranger()
+    : SetGetDouble(uistr::bayesArranger)
+{
+}
+
+uiBayesArranger::~uiBayesArranger()
+{
+}
+
+double uiBayesArranger::Get(UIVars& vars, UIId id)
+{
+    return vars.chains.GetBayesianArrangerRelativeTiming();
+}
+
+void uiBayesArranger::Set(UIVars& vars, UIId id, double val)
+{
+    vars.chains.SetBayesianArrangerRelativeTiming(val);
+}
+
+/// uiLocusArranger
+
+uiLocusArranger::uiLocusArranger()
+    : SetGetDouble(uistr::locusArranger)
+{
+}
+
+uiLocusArranger::~uiLocusArranger()
+{
+}
+
+double uiLocusArranger::Get(UIVars& vars, UIId id)
+{
+    return vars.chains.GetLocusArrangerRelativeTiming();
+}
+
+void uiLocusArranger::Set(UIVars& vars, UIId id, double val)
+{
+    vars.chains.SetLocusArrangerRelativeTiming(val);
+}
+
+/// uiHapArranger
+
+uiHapArranger::uiHapArranger()
+    : SetGetDouble(uistr::hapArranger)
+{
+}
+
+uiHapArranger::~uiHapArranger()
+{
+}
+
+double uiHapArranger::Get(UIVars& vars, UIId id)
+{
+    return vars.chains.GetHaplotypeArrangerRelativeTiming();
+}
+
+void uiHapArranger::Set(UIVars& vars, UIId id, double val)
+{
+    vars.chains.SetHaplotypeArrangerRelativeTiming(val);
+}
+
+/// uiProbHapArranger
+
+uiProbHapArranger::uiProbHapArranger()
+    : SetGetDouble(uistr::probhapArranger)
+{
+}
+
+uiProbHapArranger::~uiProbHapArranger()
+{
+}
+
+double uiProbHapArranger::Get(UIVars& vars, UIId id)
+{
+    return vars.chains.GetProbHapArrangerRelativeTiming();
+}
+
+void uiProbHapArranger::Set(UIVars& vars, UIId id, double val)
+{
+    vars.chains.SetProbHapArrangerRelativeTiming(val);
+}
+
+/// uiZilchArranger
+
+uiZilchArranger::uiZilchArranger()
+    : SetGetDouble(uistr::zilchArranger)
+{
+}
+
+uiZilchArranger::~uiZilchArranger()
+{
+}
+
+double uiZilchArranger::Get(UIVars& vars, UIId id)
+{
+    return vars.chains.GetZilchArrangerRelativeTiming();
+}
+
+void uiZilchArranger::Set(UIVars& vars, UIId id, double val)
+{
+    vars.chains.SetZilchArrangerRelativeTiming(val);
+}
+
+/// uiStairArranger
+
+uiStairArranger::uiStairArranger()
+    : SetGetDouble(uistr::stairArranger)
+{
+}
+
+uiStairArranger::~uiStairArranger()
+{
+}
+
+double uiStairArranger::Get(UIVars& vars, UIId id)
+{
+    return vars.chains.GetStairArrangerRelativeTiming();
+}
+
+void uiStairArranger::Set(UIVars& vars, UIId id, double val)
+{
+    vars.chains.SetStairArrangerRelativeTiming(val);
+}
+
+/// uiEpochSizeArranger
+
+uiEpochSizeArranger::uiEpochSizeArranger()
+    : SetGetDouble(uistr::epochSizeArranger)
+{
+}
+
+uiEpochSizeArranger::~uiEpochSizeArranger()
+{
+}
+
+double uiEpochSizeArranger::Get(UIVars& vars, UIId id)
+{
+    return vars.chains.GetEpochSizeArrangerRelativeTiming();
+}
+
+void uiEpochSizeArranger::Set(UIVars& vars, UIId id, double val)
+{
+    vars.chains.SetEpochSizeArrangerRelativeTiming(val);
+}
+
+/// uiCanHapArrange
+
+uiCanHapArrange::uiCanHapArrange()
+    : GetBool(uistr::canHapArrange)
+{
+}
+
+uiCanHapArrange::~uiCanHapArrange()
+{
+}
+
+bool uiCanHapArrange::Get(UIVars& vars, UIId id)
+{
+    return vars.chains.GetHaplotypeArrangerPossible();
+}
+
+/// uiBayesianAnalysis
+
+uiBayesianAnalysis::uiBayesianAnalysis()
+    : SetGetBoolBayesLike(uistr::bayesian)
+{
+}
+
+uiBayesianAnalysis::~uiBayesianAnalysis()
+{
+}
+
+bool uiBayesianAnalysis::Get(UIVars& vars, UIId id)
+{
+    return vars.chains.GetDoBayesianAnalysis();
+}
+
+void uiBayesianAnalysis::Set(UIVars& vars, UIId id, bool val)
+{
+    vars.chains.SetDoBayesianAnalysis(val);
+}
+
+//____________________________________________________________________________________
diff --git a/src/ui_interface/chainparam_interface.h b/src/ui_interface/chainparam_interface.h
new file mode 100644
index 0000000..cde96d1
--- /dev/null
+++ b/src/ui_interface/chainparam_interface.h
@@ -0,0 +1,260 @@
+// $Id: chainparam_interface.h,v 1.35 2011/03/07 06:08:53 bobgian Exp $
+
+/*
+ *  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+ *
+ *  This software is distributed free of charge for non-commercial use
+ *  and is copyrighted.  Of course, we do not guarantee that the software
+ *  works, and are not responsible for any damage you may cause or have.
+ *
+ */
+
+#ifndef CHAINPARAM_INTERFACE_H
+#define CHAINPARAM_INTERFACE_H
+
+#include <string>
+#include "setget.h"
+
+class UIVars;
+
+class uiInitialChains : public SetGetLong
+{
+  public:
+    uiInitialChains();
+    virtual ~uiInitialChains();
+    long Get(UIVars& vars, UIId id);
+    void Set(UIVars& vars, UIId id,long val);
+};
+
+class uiInitialDiscard : public SetGetLong
+{
+  public:
+    uiInitialDiscard();
+    virtual ~uiInitialDiscard();
+    long Get(UIVars& vars, UIId id);
+    void Set(UIVars& vars, UIId id,long val) ;
+};
+
+class uiInitialSamples : public SetGetLong
+{
+  public:
+    uiInitialSamples();
+    virtual ~uiInitialSamples();
+    long Get(UIVars& vars, UIId id);
+    void Set(UIVars& vars, UIId id,long val);
+};
+
+class uiInitialInterval : public SetGetLong
+{
+  public:
+    uiInitialInterval();
+    virtual ~uiInitialInterval();
+    long Get(UIVars& vars, UIId id);
+    void Set(UIVars& vars, UIId id,long val);
+};
+
+class uiFinalChains : public SetGetLong
+{
+  public:
+    uiFinalChains();
+    virtual ~uiFinalChains();
+    long Get(UIVars& vars, UIId id);
+    void Set(UIVars& vars, UIId id,long val);
+};
+
+class uiFinalDiscard : public SetGetLong
+{
+  public:
+    uiFinalDiscard();
+    virtual ~uiFinalDiscard();
+    long Get(UIVars& vars, UIId id);
+    void Set(UIVars& vars, UIId id,long val) ;
+};
+
+class uiFinalSamples : public SetGetLong
+{
+  public:
+    uiFinalSamples();
+    virtual ~uiFinalSamples();
+    long Get(UIVars& vars, UIId id);
+    void Set(UIVars& vars, UIId id,long val);
+};
+
+class uiFinalInterval : public SetGetLong
+{
+  public:
+    uiFinalInterval();
+    virtual ~uiFinalInterval();
+    long Get(UIVars& vars, UIId id);
+    void Set(UIVars& vars, UIId id,long val);
+};
+
+class uiHeatedChain: public SetGetDouble
+{
+  public:
+    uiHeatedChain();
+    virtual ~uiHeatedChain();
+    double Get(UIVars& vars, UIId id);
+    void Set(UIVars& vars, UIId id, double val);
+};
+
+class uiHeatedChains : public GetDoubleVec1d
+{
+  public:
+    uiHeatedChains();
+    virtual ~uiHeatedChains();
+    DoubleVec1d Get(UIVars& vars, UIId id);
+};
+
+class uiHeatedChainCount : public SetGetLong
+{
+  public:
+    uiHeatedChainCount();
+    virtual ~uiHeatedChainCount();
+    long Get(UIVars& vars, UIId id);
+    void Set(UIVars& vars, UIId id, long val);
+};
+
+class uiTempInterval : public SetGetLong
+{
+  public:
+    uiTempInterval();
+    virtual ~uiTempInterval();
+    long Get(UIVars& vars, UIId id);
+    void Set(UIVars& vars, UIId id,long val) ;
+};
+
+class uiAdaptiveTemp : public SetGetBool
+{
+  public:
+    uiAdaptiveTemp();
+    virtual ~uiAdaptiveTemp();
+    bool Get(UIVars& vars, UIId id);
+    void Set(UIVars& vars, UIId id, bool val);
+};
+
+class uiNumReps : public SetGetLong
+{
+  public:
+    uiNumReps();
+    virtual ~uiNumReps();
+    long Get(UIVars& vars, UIId id);
+    void Set(UIVars& vars, UIId id,long val) ;
+};
+
+class uiDropArranger : public SetGetDouble
+{
+  public:
+    uiDropArranger();
+    virtual ~uiDropArranger();
+    double Get(UIVars& vars, UIId id);
+    void Set(UIVars& vars, UIId id,double val);
+};
+
+class uiSizeArranger : public SetGetDouble
+{
+  public:
+    uiSizeArranger();
+    virtual ~uiSizeArranger();
+    double Get(UIVars& vars, UIId id);
+    void Set(UIVars& vars, UIId id,double val);
+};
+
+class uiBayesArranger : public SetGetDouble
+{
+  public:
+    uiBayesArranger();
+    virtual ~uiBayesArranger();
+    double Get(UIVars& vars, UIId id);
+    void Set(UIVars& vars, UIId id, double val);
+};
+
+class uiLocusArranger : public SetGetDouble
+{
+  public:
+    uiLocusArranger();
+    virtual ~uiLocusArranger();
+    double Get(UIVars& vars, UIId id);
+    void Set(UIVars& vars, UIId id, double val);
+};
+
+class uiHapArranger : public SetGetDouble
+{
+  public:
+    uiHapArranger();
+    virtual ~uiHapArranger();
+    double Get(UIVars& vars, UIId id);
+    void Set(UIVars& vars, UIId id,double val);
+};
+
+class uiProbHapArranger : public SetGetDouble
+{
+  public:
+    uiProbHapArranger();
+    virtual ~uiProbHapArranger();
+    double Get(UIVars& vars, UIId id);
+    void Set(UIVars& vars, UIId id,double val);
+};
+
+class uiZilchArranger : public SetGetDouble
+{
+  public:
+    uiZilchArranger();
+    virtual ~uiZilchArranger();
+    double Get(UIVars& vars, UIId id);
+    void Set(UIVars& vars, UIId id,double val);
+};
+
+class uiStairArranger : public SetGetDouble
+{
+  public:
+    uiStairArranger();
+    virtual ~uiStairArranger();
+    double Get(UIVars& vars, UIId id);
+    void Set(UIVars& vars, UIId id,double val);
+};
+
+class uiEpochSizeArranger : public SetGetDouble
+{
+  public:
+    uiEpochSizeArranger();
+    virtual ~uiEpochSizeArranger();
+    double Get(UIVars& vars, UIId id);
+    void Set(UIVars& vars, UIId id,double val);
+};
+
+class uiCanHapArrange : public GetBool
+{
+  public:
+    uiCanHapArrange();
+    virtual ~uiCanHapArrange();
+    bool Get(UIVars& vars, UIId id);
+};
+
+//------------------------------------------------------------------------------------
+// Refinement of SetGetBool which prints "bayesian" for
+// a true value and "likelihood" for a false one
+class SetGetBoolBayesLike : public SetGetBool
+{
+  public:
+    SetGetBoolBayesLike(const std::string & key) : SetGetBool(key) {};
+    virtual ~SetGetBoolBayesLike() {};
+    virtual std::string MakePrintString(UIVars& vars, bool val)
+    {
+        if(val) return "bayesian";
+        return "likelihood";
+    };
+};
+
+class uiBayesianAnalysis : public SetGetBoolBayesLike
+{
+  public:
+    uiBayesianAnalysis();
+    virtual ~uiBayesianAnalysis();
+    bool Get(UIVars& vars, UIId id);
+    void Set(UIVars& vars, UIId id, bool val);
+};
+
+#endif  // CHAINPARAM_INTERFACE_H
+
+//____________________________________________________________________________________
diff --git a/src/ui_interface/constraint_interface.cpp b/src/ui_interface/constraint_interface.cpp
new file mode 100644
index 0000000..efb39cc
--- /dev/null
+++ b/src/ui_interface/constraint_interface.cpp
@@ -0,0 +1,206 @@
+// $Id: constraint_interface.cpp,v 1.9 2011/03/08 19:22:01 bobgian Exp $
+
+/*
+ *  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+ *
+ *  This software is distributed free of charge for non-commercial use
+ *  and is copyrighted.  Of course, we do not guarantee that the software
+ *  works, and are not responsible for any damage you may cause or have.
+ *
+ */
+
+#include <algorithm>    // for std::min()
+#include "constraint_interface.h"
+#include "defaults.h"
+#include "force.h"
+#include "ui_constants.h"
+#include "ui_interface.h"
+#include "ui_strings.h"
+#include "ui_vars.h"
+#include "paramstat.h"
+
+using std::string;
+
+//------------------------------------------------------------------------------------
+
+uiParameterStatus::uiParameterStatus()
+    : SetGetIndividualParamstatus(uistr::constraintType)
+{
+}
+
+uiParameterStatus::~uiParameterStatus()
+{
+}
+
+ParamStatus uiParameterStatus::Get(UIVars& vars, UIId id)
+{
+    return vars.forces.GetParamstatus(id.GetForceType(), id.GetIndex1()).Status();
+}
+
+void uiParameterStatus::Set(UIVars& vars, UIId id, ParamStatus val)
+{
+    vars.forces.SetParamstatus(val, id.GetForceType(), id.GetIndex1());
+}
+
+string uiParameterStatus::Description(UIVars& vars, UIId id)
+{
+    // EWFIX.P5 DIMENSIONS -- will change if we divide up 2-d params into 2-D storage
+    return vars.datapackplus.GetParamName(id.GetForceType(),id.GetIndex1());
+}
+
+//------------------------------------------------------------------------------------
+
+uiAddParamToGroup::uiAddParamToGroup()
+    : SetGetNoval(uistr::addParamToGroup)
+{
+}
+
+uiAddParamToGroup::~uiAddParamToGroup()
+{
+}
+
+void uiAddParamToGroup::Set(UIVars& vars, UIId id, noval val)
+{
+    vars.forces.AddParamToGroup(id.GetForceType(), id.GetIndex2(), id.GetIndex1());
+}
+
+//------------------------------------------------------------------------------------
+
+uiAddParamToNewGroup::uiAddParamToNewGroup()
+    : SetGetNoval(uistr::addParamToNewGroup)
+{
+}
+
+uiAddParamToNewGroup::~uiAddParamToNewGroup()
+{
+}
+
+void uiAddParamToNewGroup::Set(UIVars& vars, UIId id, noval val)
+{
+    vars.forces.AddParamToNewGroup(id.GetForceType(), id.GetIndex2());
+}
+
+//------------------------------------------------------------------------------------
+
+uiRemoveParamFromGroup::uiRemoveParamFromGroup()
+    : SetGetNoval(uistr::removeParamFromGroup)
+{
+}
+
+uiRemoveParamFromGroup::~uiRemoveParamFromGroup()
+{
+}
+
+void uiRemoveParamFromGroup::Set(UIVars& vars, UIId id, noval val)
+{
+    vars.forces.RemoveParamFromGroup(id.GetForceType(), id.GetIndex1());
+}
+
+//------------------------------------------------------------------------------------
+
+uiGroupParameterStatus::uiGroupParameterStatus()
+    : SetGetGroupParamstatus(uistr::groupConstraintType)
+{
+}
+
+uiGroupParameterStatus::~uiGroupParameterStatus()
+{
+}
+
+ParamStatus uiGroupParameterStatus::Get(UIVars& vars, UIId id)
+{
+    return vars.forces.GetGroupParamstatus(id.GetForceType(), id.GetIndex1());
+    //Index1 is the group index.
+}
+
+void uiGroupParameterStatus::Set(UIVars& vars, UIId id, ParamStatus val)
+{
+    vars.forces.SetGroupParamstatus(val, id.GetForceType(), id.GetIndex1());
+    //Index1 is the group index.
+}
+
+string uiGroupParameterStatus::Description(UIVars& vars, UIId id)
+{
+    return vars.datapackplus.GetParamName(id.GetForceType(),id.GetIndex2());
+    //Index2 is the parameter index.
+}
+
+//------------------------------------------------------------------------------------
+
+uiGroupParameterList::uiGroupParameterList()
+    : SetGetLongVec1d(uistr::groupParamList)
+{
+}
+
+uiGroupParameterList::~uiGroupParameterList()
+{
+}
+
+LongVec1d uiGroupParameterList::Get(UIVars& vars, UIId id)
+{
+    return vars.forces.GetGroupParamList(id.GetForceType(), id.GetIndex1());
+}
+
+void uiGroupParameterList::Set(UIVars& vars, UIId id, LongVec1d params)
+{
+    vars.forces.AddGroup(params, id.GetForceType(), id.GetIndex1());
+}
+
+//------------------------------------------------------------------------------------
+
+uiUngroupedParamsForOneForce::uiUngroupedParamsForOneForce()
+    : GetUIIdVec1d(uistr::ungroupedParamsForForce)
+{
+}
+
+uiUngroupedParamsForOneForce::~uiUngroupedParamsForOneForce()
+{
+}
+
+UIIdVec1d uiUngroupedParamsForOneForce::Get(UIVars& vars, UIId id)
+{
+    force_type thisForce = id.GetForceType();
+    long numPossibleParams = vars.forces.GetNumParameters(thisForce);
+    UIIdVec1d ungroupedParams;
+    for(long localId=0; localId < numPossibleParams; localId++)
+    {
+        long gindex = vars.forces.ParamInGroup(thisForce,localId);
+        if (gindex == FLAGLONG)
+        {
+            if (vars.forces.GetParamValid(thisForce,localId))
+            {
+                ungroupedParams.push_back(UIId(thisForce,localId));
+            }
+        }
+    }
+    return ungroupedParams;
+}
+
+//------------------------------------------------------------------------------------
+
+uiGroupedParamsForOneForce::uiGroupedParamsForOneForce()
+    : GetUIIdVec2d(uistr::groupedParamsForForce)
+{
+}
+
+uiGroupedParamsForOneForce::~uiGroupedParamsForOneForce()
+{
+}
+
+UIIdVec2d uiGroupedParamsForOneForce::Get(UIVars& vars, UIId id)
+{
+    force_type thisForce = id.GetForceType();
+    long numPossibleParams = vars.forces.GetNumParameters(thisForce);
+    UIIdVec2d groupedParams(vars.forces.GetNumGroups(thisForce));
+    for(long localId=0; localId < numPossibleParams; localId++)
+    {
+        long gindex = vars.forces.ParamInGroup(thisForce,localId);
+        if (gindex != FLAGLONG)
+        {
+            groupedParams[gindex].push_back(UIId(thisForce,gindex,localId));
+        }
+    }
+    return groupedParams;
+}
+
+//____________________________________________________________________________________
diff --git a/src/ui_interface/constraint_interface.h b/src/ui_interface/constraint_interface.h
new file mode 100644
index 0000000..69d302a
--- /dev/null
+++ b/src/ui_interface/constraint_interface.h
@@ -0,0 +1,91 @@
+// $Id: constraint_interface.h,v 1.5 2011/03/08 19:22:01 bobgian Exp $
+
+/*
+ *  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+ *
+ *  This software is distributed free of charge for non-commercial use
+ *  and is copyrighted.  Of course, we do not guarantee that the software
+ *  works, and are not responsible for any damage you may cause or have.
+ *
+ */
+
+#ifndef CONSTRAINT_INTERFACE_H
+#define CONSTRAINT_INTERFACE_H
+
+#include <string>
+#include "setget.h"
+
+class UIVars;
+
+class uiParameterStatus : public SetGetIndividualParamstatus
+{
+  public:
+    uiParameterStatus();
+    virtual ~uiParameterStatus();
+    virtual ParamStatus Get(UIVars& vars, UIId id);
+    virtual void        Set(UIVars& vars, UIId id, ParamStatus val);
+    virtual string Description(UIVars& vars, UIId id);
+};
+
+class uiAddParamToGroup : public SetGetNoval
+{
+  public:
+    uiAddParamToGroup();
+    virtual ~uiAddParamToGroup();
+    virtual void   Set(UIVars& vars, UIId id, noval val);
+};
+
+class uiAddParamToNewGroup : public SetGetNoval
+{
+  public:
+    uiAddParamToNewGroup();
+    virtual ~uiAddParamToNewGroup();
+    virtual void   Set(UIVars& vars, UIId id, noval val);
+};
+
+class uiRemoveParamFromGroup : public SetGetNoval
+{
+  public:
+    uiRemoveParamFromGroup();
+    virtual ~uiRemoveParamFromGroup();
+    virtual void   Set(UIVars& vars, UIId id, noval val);
+};
+
+class uiGroupParameterStatus : public SetGetGroupParamstatus
+{
+  public:
+    uiGroupParameterStatus();
+    virtual ~uiGroupParameterStatus();
+    virtual ParamStatus Get(UIVars& vars, UIId id);
+    virtual void        Set(UIVars& vars, UIId id, ParamStatus val);
+    virtual string Description(UIVars& vars, UIId id);
+};
+
+class uiGroupParameterList : public SetGetLongVec1d
+{
+  public:
+    uiGroupParameterList();
+    virtual ~uiGroupParameterList();
+    virtual LongVec1d   Get(UIVars& vars, UIId id);
+    virtual void        Set(UIVars& vars, UIId id, LongVec1d val);
+};
+
+class uiUngroupedParamsForOneForce : public GetUIIdVec1d
+{
+  public:
+    uiUngroupedParamsForOneForce();
+    virtual ~uiUngroupedParamsForOneForce();
+    virtual UIIdVec1d Get(UIVars& vars, UIId id);
+};
+
+class uiGroupedParamsForOneForce : public GetUIIdVec2d
+{
+  public:
+    uiGroupedParamsForOneForce();
+    virtual ~uiGroupedParamsForOneForce();
+    virtual UIIdVec2d Get(UIVars& vars, UIId id);
+};
+
+#endif  // CONSTRAINT_INTERFACE_H
+
+//____________________________________________________________________________________
diff --git a/src/ui_interface/data_interface.cpp b/src/ui_interface/data_interface.cpp
new file mode 100644
index 0000000..cd01fb1
--- /dev/null
+++ b/src/ui_interface/data_interface.cpp
@@ -0,0 +1,218 @@
+// $Id: data_interface.cpp,v 1.33 2012/06/30 01:32:43 bobgian Exp $
+
+/*
+ *  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+ *
+ *  This software is distributed free of charge for non-commercial use
+ *  and is copyrighted.  Of course, we do not guarantee that the software
+ *  works, and are not responsible for any damage you may cause or have.
+ *
+ */
+
+#include <iostream>
+
+#include "data_interface.h"
+#include "setget.h"
+#include "ui_vars.h"
+#include "ui_strings.h"
+#include "vectorx.h"
+
+//------------------------------------------------------------------------------------
+
+long uiCrossPartitionCount::Get(UIVars& vars, UIId id)
+{
+    return vars.datapackplus.GetNCrossPartitions();
+}
+
+uiCrossPartitionCount::uiCrossPartitionCount()
+    : GetLong(uistr::crossPartitionCount)
+{
+}
+
+uiCrossPartitionCount::~uiCrossPartitionCount()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+long uiDivMigPartitionCount::Get(UIVars& vars, UIId id)
+{
+    return vars.datapackplus.GetNPartitionsByForceType(force_DIVMIG);
+}
+
+uiDivMigPartitionCount::uiDivMigPartitionCount()
+    : GetLong(uistr::divmigrationPartitionCount)
+{
+}
+
+uiDivMigPartitionCount::~uiDivMigPartitionCount()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+string uiDivMigPartitionName::Get(UIVars& vars, UIId id)
+{
+    return vars.datapackplus.GetForcePartitionName(force_DIVMIG,id.GetIndex1());
+}
+
+uiDivMigPartitionName::uiDivMigPartitionName()
+    : GetString(uistr::divmigrationPartitionName)
+{
+}
+
+uiDivMigPartitionName::~uiDivMigPartitionName()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+long uiMigPartitionCount::Get(UIVars& vars, UIId id)
+{
+    return vars.datapackplus.GetNPartitionsByForceType(force_MIG);
+}
+
+uiMigPartitionCount::uiMigPartitionCount()
+    : GetLong(uistr::migrationPartitionCount)
+{
+}
+
+uiMigPartitionCount::~uiMigPartitionCount()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+string uiMigPartitionName::Get(UIVars& vars, UIId id)
+{
+    return vars.datapackplus.GetForcePartitionName(force_MIG,id.GetIndex1());
+}
+
+uiMigPartitionName::uiMigPartitionName()
+    : GetString(uistr::migrationPartitionName)
+{
+}
+
+uiMigPartitionName::~uiMigPartitionName()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+long uiDiseasePartitionCount::Get(UIVars& vars, UIId id)
+{
+    return vars.datapackplus.GetNPartitionsByForceType(force_DISEASE);
+}
+
+uiDiseasePartitionCount::uiDiseasePartitionCount()
+    : GetLong(uistr::diseasePartitionCount)
+{
+}
+
+uiDiseasePartitionCount::~uiDiseasePartitionCount()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+string uiDiseasePartitionName::Get(UIVars& vars, UIId id)
+{
+    return vars.datapackplus.GetForcePartitionName(force_DISEASE,id.GetIndex1());
+}
+
+uiDiseasePartitionName::uiDiseasePartitionName()
+    : GetString(uistr::diseasePartitionName)
+{
+}
+
+uiDiseasePartitionName::~uiDiseasePartitionName()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+LongVec1d uiLociNumbers::Get(UIVars& vars, UIId id)
+{
+    long count = vars.datapackplus.GetNumLoci(id.GetIndex1());
+    LongVec1d longVec;
+    for(long i= 0; i< count; i++)
+    {
+        longVec.push_back(i);
+    }
+    return longVec;
+}
+
+uiLociNumbers::uiLociNumbers()
+    : GetLongVec1d(uistr::lociNumbers)
+{
+}
+
+uiLociNumbers::~uiLociNumbers()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+LongVec1d uiRegionNumbers::Get(UIVars& vars, UIId id)
+{
+    long count = vars.datapackplus.GetNumRegions();
+    LongVec1d longVec;
+    for(long i= 0; i< count; i++)
+    {
+        longVec.push_back(i);
+    }
+    return longVec;
+}
+
+uiRegionNumbers::uiRegionNumbers()
+    : GetLongVec1d(uistr::regionNumbers)
+{
+}
+
+uiRegionNumbers::~uiRegionNumbers()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+uiRegionEffectivePopSize::uiRegionEffectivePopSize()
+    : SetGetDouble(uistr::effectivePopSize)
+{
+}
+
+uiRegionEffectivePopSize::~uiRegionEffectivePopSize()
+{
+}
+
+double uiRegionEffectivePopSize::Get(UIVars& vars, UIId id)
+{
+    return vars.datapackplus.GetEffectivePopSize(id.GetIndex1());
+}
+
+void uiRegionEffectivePopSize::Set(UIVars& vars, UIId id, double size)
+{
+    vars.datapackplus.SetEffectivePopSize(id.GetIndex1(), size);
+}
+
+//------------------------------------------------------------------------------------
+
+uiSimulateData::uiSimulateData()
+    : SetGetBool(uistr::simulateData)
+{
+}
+
+uiSimulateData::~uiSimulateData()
+{
+}
+
+bool uiSimulateData::Get(UIVars& vars, UIId id)
+{
+    return vars.datapackplus.GetSimulateData(id.GetIndex1(), id.GetIndex2());
+}
+
+void uiSimulateData::Set(UIVars& vars, UIId id, bool sim)
+{
+    vars.datapackplus.SetSimulateData(id.GetIndex1(), id.GetIndex2(), sim);
+}
+
+//____________________________________________________________________________________
diff --git a/src/ui_interface/data_interface.h b/src/ui_interface/data_interface.h
new file mode 100644
index 0000000..8c87bbf
--- /dev/null
+++ b/src/ui_interface/data_interface.h
@@ -0,0 +1,118 @@
+// $Id: data_interface.h,v 1.23 2012/02/15 18:13:42 jmcgill Exp $
+
+/*
+ *  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+ *
+ *  This software is distributed free of charge for non-commercial use
+ *  and is copyrighted.  Of course, we do not guarantee that the software
+ *  works, and are not responsible for any damage you may cause or have.
+ *
+ */
+
+#ifndef DATA_INTERFACE_H
+#define DATA_INTERFACE_H
+
+#include "setget.h"
+#include "vectorx.h"
+
+class UIVars;
+
+class uiCrossPartitionCount : public GetLong
+{
+  public:
+    uiCrossPartitionCount();
+    virtual ~uiCrossPartitionCount();
+    virtual long Get(UIVars& vars, UIId id);
+};
+
+//------------------------------------------------------------------------------------
+
+class uiDivMigPartitionCount : public GetLong
+{
+  public:
+    uiDivMigPartitionCount();
+    virtual ~uiDivMigPartitionCount();
+    virtual long Get(UIVars& vars, UIId id);
+};
+
+class uiDivMigPartitionName : public GetString
+{
+  public:
+    uiDivMigPartitionName();
+    virtual ~uiDivMigPartitionName();
+    virtual std::string Get(UIVars& vars, UIId id);
+};
+
+//------------------------------------------------------------------------------------
+
+class uiMigPartitionCount : public GetLong
+{
+  public:
+    uiMigPartitionCount();
+    virtual ~uiMigPartitionCount();
+    virtual long Get(UIVars& vars, UIId id);
+};
+
+class uiMigPartitionName : public GetString
+{
+  public:
+    uiMigPartitionName();
+    virtual ~uiMigPartitionName();
+    virtual std::string Get(UIVars& vars, UIId id);
+};
+
+//------------------------------------------------------------------------------------
+
+class uiDiseasePartitionCount : public GetLong
+{
+  public:
+    uiDiseasePartitionCount();
+    virtual ~uiDiseasePartitionCount();
+    virtual long Get(UIVars& vars, UIId id);
+};
+
+class uiDiseasePartitionName : public GetString
+{
+  public:
+    uiDiseasePartitionName();
+    virtual ~uiDiseasePartitionName();
+    virtual std::string Get(UIVars& vars, UIId id);
+};
+
+class uiLociNumbers : public GetLongVec1d
+{
+  public:
+    uiLociNumbers();
+    virtual ~uiLociNumbers();
+    virtual LongVec1d Get(UIVars& vars, UIId id);
+};
+
+class uiRegionNumbers : public GetLongVec1d
+{
+  public:
+    uiRegionNumbers();
+    virtual ~uiRegionNumbers();
+    virtual LongVec1d Get(UIVars& vars, UIId id);
+};
+
+class uiRegionEffectivePopSize : public SetGetDouble
+{
+  public:
+    uiRegionEffectivePopSize();
+    virtual ~uiRegionEffectivePopSize();
+    virtual double Get(UIVars& vars, UIId id);
+    virtual void   Set(UIVars& vars, UIId id, double size);
+};
+
+class uiSimulateData : public SetGetBool
+{
+  public:
+    uiSimulateData();
+    virtual ~uiSimulateData();
+    virtual bool Get(UIVars& vars, UIId id);
+    virtual void Set(UIVars& vars, UIId id, bool sim);
+};
+
+#endif // DATA_INTERFACE_H
+
+//____________________________________________________________________________________
diff --git a/src/ui_interface/datamodel_interface.cpp b/src/ui_interface/datamodel_interface.cpp
new file mode 100644
index 0000000..ef93895
--- /dev/null
+++ b/src/ui_interface/datamodel_interface.cpp
@@ -0,0 +1,735 @@
+// $Id: datamodel_interface.cpp,v 1.39 2011/03/07 06:08:53 bobgian Exp $
+
+/*
+ *  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+ *
+ *  This software is distributed free of charge for non-commercial use
+ *  and is copyrighted.  Of course, we do not guarantee that the software
+ *  works, and are not responsible for any damage you may cause or have.
+ *
+ */
+
+#include <cassert>
+#include <string>
+
+#include "datamodel_interface.h"
+#include "lamarc_strings.h"     // for lamarcmenu::calcPerLocus and ::calculated
+#include "ui_regid.h"
+#include "ui_strings.h"
+#include "ui_vars.h"
+#include "vectorx.h"
+#include "xml_strings.h"
+
+using std::string;
+
+//------------------------------------------------------------------------------------
+
+long uiLociCount::Get(UIVars& vars, UIId id)
+{
+    return vars.datapackplus.GetNumLoci(id.GetIndex1());
+}
+
+uiLociCount::uiLociCount()
+    : GetLong(uistr::lociCount)
+{
+}
+
+uiLociCount::~uiLociCount()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+string uiLocusName::Get(UIVars& vars, UIId id)
+{
+    return vars.datapackplus.GetLocusName(id.GetIndex1(),id.GetIndex2());
+
+}
+
+uiLocusName::uiLocusName()
+    : GetString(uistr::locusName)
+{
+}
+
+uiLocusName::~uiLocusName()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+string uiRegionName::Get(UIVars& vars, UIId id)
+{
+    return vars.datapackplus.GetRegionName(id.GetIndex1());
+
+}
+
+uiRegionName::uiRegionName()
+    : GetString(uistr::regionName)
+{
+}
+
+uiRegionName::~uiRegionName()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+bool uiFreqsFromData::Get(UIVars& vars, UIId id)
+{
+    return vars.datamodel.GetCalcFreqsFromData(UIRegId(id, vars));
+}
+
+void uiFreqsFromData::Set(UIVars& vars, UIId id, bool val)
+{
+    vars.datamodel.SetCalcFreqsFromData(val,UIRegId(id, vars));
+}
+
+uiFreqsFromData::uiFreqsFromData()
+    : SetGetBool(uistr::freqsFromData)
+{
+}
+
+uiFreqsFromData::~uiFreqsFromData()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+uiDataType::uiDataType()
+    : GetDataType(uistr::dataType)
+{
+}
+
+uiDataType::~uiDataType()
+{
+}
+
+data_type uiDataType::Get(UIVars& vars, UIId id)
+{
+    return vars.datamodel.GetDataType(UIRegId(id,vars));
+}
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d uiGTRRates::Get(UIVars& vars, UIId id)
+{
+    DoubleVec1d gtrs(6);
+    gtrs[0] = vars.datamodel.GetGTR_AC(UIRegId(id, vars));
+    gtrs[1] = vars.datamodel.GetGTR_AG(UIRegId(id, vars));
+    gtrs[2] = vars.datamodel.GetGTR_AT(UIRegId(id, vars));
+    gtrs[3] = vars.datamodel.GetGTR_CG(UIRegId(id, vars));
+    gtrs[4] = vars.datamodel.GetGTR_CT(UIRegId(id, vars));
+    gtrs[5] = vars.datamodel.GetGTR_GT(UIRegId(id, vars));
+    return gtrs;
+}
+
+uiGTRRates::uiGTRRates()
+    : GetDoubleVec1d(uistr::gtrRates)
+{
+}
+
+uiGTRRates::~uiGTRRates()
+{
+}
+
+string uiGTRRates::GetPrintString(UIVars& vars, UIId id)
+{
+    DoubleVec1d gtrs = Get(vars, id);
+    assert(gtrs.size() == 6);
+    string vals = Pretty(gtrs[0],6);
+    for (unsigned long i=1; i<gtrs.size(); i++)
+    {
+        vals += " " + Pretty(gtrs[i],6);
+    }
+    return vals;
+}
+
+//------------------------------------------------------------------------------------
+
+double uiGTRRateAC::Get(UIVars& vars, UIId id)
+{
+    return vars.datamodel.GetGTR_AC(UIRegId(id, vars));
+}
+
+void uiGTRRateAC::Set(UIVars& vars, UIId id, double val)
+{
+    vars.datamodel.SetGTR_AC(val,UIRegId(id, vars));
+}
+
+uiGTRRateAC::uiGTRRateAC()
+    : SetGetDouble(uistr::gtrRateAC)
+{
+}
+
+uiGTRRateAC::~uiGTRRateAC()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+double uiGTRRateAG::Get(UIVars& vars, UIId id)
+{
+    return vars.datamodel.GetGTR_AG(UIRegId(id, vars));
+}
+
+void uiGTRRateAG::Set(UIVars& vars, UIId id, double val)
+{
+    vars.datamodel.SetGTR_AG(val,UIRegId(id, vars));
+}
+
+uiGTRRateAG::uiGTRRateAG()
+    : SetGetDouble(uistr::gtrRateAG)
+{
+}
+
+uiGTRRateAG::~uiGTRRateAG()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+double uiGTRRateAT::Get(UIVars& vars, UIId id)
+{
+    return vars.datamodel.GetGTR_AT(UIRegId(id, vars));
+}
+
+void uiGTRRateAT::Set(UIVars& vars, UIId id, double val)
+{
+    vars.datamodel.SetGTR_AT(val,UIRegId(id, vars));
+}
+
+uiGTRRateAT::uiGTRRateAT()
+    : SetGetDouble(uistr::gtrRateAT)
+{
+}
+
+uiGTRRateAT::~uiGTRRateAT()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+double uiGTRRateCG::Get(UIVars& vars, UIId id)
+{
+    return vars.datamodel.GetGTR_CG(UIRegId(id, vars));
+}
+
+void uiGTRRateCG::Set(UIVars& vars, UIId id, double val)
+{
+    vars.datamodel.SetGTR_CG(val,UIRegId(id, vars));
+}
+
+uiGTRRateCG::uiGTRRateCG()
+    : SetGetDouble(uistr::gtrRateCG)
+{
+}
+
+uiGTRRateCG::~uiGTRRateCG()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+double uiGTRRateCT::Get(UIVars& vars, UIId id)
+{
+    return vars.datamodel.GetGTR_CT(UIRegId(id, vars));
+}
+
+void uiGTRRateCT::Set(UIVars& vars, UIId id, double val)
+{
+    vars.datamodel.SetGTR_CT(val,UIRegId(id, vars));
+}
+
+uiGTRRateCT::uiGTRRateCT()
+    : SetGetDouble(uistr::gtrRateCT)
+{
+}
+
+uiGTRRateCT::~uiGTRRateCT()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+double uiGTRRateGT::Get(UIVars& vars, UIId id)
+{
+    return vars.datamodel.GetGTR_GT(UIRegId(id, vars));
+}
+
+void uiGTRRateGT::Set(UIVars& vars, UIId id, double val)
+{
+    vars.datamodel.SetGTR_GT(val,UIRegId(id, vars));
+}
+
+uiGTRRateGT::uiGTRRateGT()
+    : SetGetDouble(uistr::gtrRateGT)
+{
+}
+
+uiGTRRateGT::~uiGTRRateGT()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+DoubleVec1d uiBaseFrequencies::Get(UIVars& vars, UIId id)
+{
+    DoubleVec1d freqs(4);
+    freqs[0] = vars.datamodel.GetFrequencyA(UIRegId(id, vars));
+    freqs[1] = vars.datamodel.GetFrequencyC(UIRegId(id, vars));
+    freqs[2] = vars.datamodel.GetFrequencyG(UIRegId(id, vars));
+    freqs[3] = vars.datamodel.GetFrequencyT(UIRegId(id, vars));
+    return freqs;
+}
+
+uiBaseFrequencies::uiBaseFrequencies()
+    : GetDoubleVec1d(uistr::baseFrequencies)
+{
+}
+
+uiBaseFrequencies::~uiBaseFrequencies()
+{
+}
+
+string uiBaseFrequencies::GetPrintString(UIVars& vars, UIId id)
+{
+    UIRegId regId(id, vars);
+    bool calculated = vars.datamodel.GetCalcFreqsFromData(regId);
+    if(regId.GetRegion() == uiconst::GLOBAL_ID && calculated)
+    {
+        return lamarcmenu::calcPerLocus;
+    }
+    else
+    {
+        DoubleVec1d freqs = Get(vars, id);
+        assert(freqs.size() == 4);
+        string vals = Pretty(freqs[0], 6);
+        for (unsigned long i=1; i<freqs.size(); i++)
+        {
+            vals += " " + Pretty(freqs[i], 6);
+        }
+        if(calculated)
+        {
+            vals += lamarcmenu::calculated;
+        }
+        return vals;
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+double uiBaseFrequencyA::Get(UIVars& vars, UIId id)
+{
+    return vars.datamodel.GetFrequencyA(UIRegId(id, vars));
+}
+
+void uiBaseFrequencyA::Set(UIVars& vars, UIId id, double val)
+{
+    vars.datamodel.SetFrequencyA(val,UIRegId(id, vars));
+}
+
+uiBaseFrequencyA::uiBaseFrequencyA()
+    : SetGetDouble(uistr::baseFrequencyA)
+{
+}
+
+uiBaseFrequencyA::~uiBaseFrequencyA()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+double uiBaseFrequencyC::Get(UIVars& vars, UIId id)
+{
+    return vars.datamodel.GetFrequencyC(UIRegId(id, vars));
+}
+
+void uiBaseFrequencyC::Set(UIVars& vars, UIId id, double val)
+{
+    vars.datamodel.SetFrequencyC(val,UIRegId(id, vars));
+}
+
+uiBaseFrequencyC::uiBaseFrequencyC()
+    : SetGetDouble(uistr::baseFrequencyC)
+{
+}
+
+uiBaseFrequencyC::~uiBaseFrequencyC()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+double uiBaseFrequencyG::Get(UIVars& vars, UIId id)
+{
+    return vars.datamodel.GetFrequencyG(UIRegId(id, vars));
+}
+
+void uiBaseFrequencyG::Set(UIVars& vars, UIId id, double val)
+{
+    vars.datamodel.SetFrequencyG(val,UIRegId(id, vars));
+}
+
+uiBaseFrequencyG::uiBaseFrequencyG()
+    : SetGetDouble(uistr::baseFrequencyG)
+{
+}
+
+uiBaseFrequencyG::~uiBaseFrequencyG()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+double uiBaseFrequencyT::Get(UIVars& vars, UIId id)
+{
+    return vars.datamodel.GetFrequencyT(UIRegId(id, vars));
+}
+
+void uiBaseFrequencyT::Set(UIVars& vars, UIId id, double val)
+{
+    vars.datamodel.SetFrequencyT(val,UIRegId(id, vars));
+}
+
+uiBaseFrequencyT::uiBaseFrequencyT()
+    : SetGetDouble(uistr::baseFrequencyT)
+{
+}
+
+uiBaseFrequencyT::~uiBaseFrequencyT()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+model_type uiDataModel::Get(UIVars& vars, UIId id)
+{
+    return vars.datamodel.GetDataModelType(UIRegId(id, vars));
+}
+
+void uiDataModel::Set(UIVars& vars, UIId id, model_type val)
+{
+    vars.datamodel.SetDataModelType(val,UIRegId(id, vars));
+}
+
+string
+uiDataModel::NextToggleValue(UIVars& vars, UIId id)
+{
+    ModelTypeVec1d legalModels = vars.datamodel.GetLegalDataModels(UIRegId(id, vars));
+    if(legalModels.empty())
+    {
+        implementation_error e("No legal model type available");
+        throw e;
+    }
+    model_type thisModel = vars.datamodel.GetDataModelType(UIRegId(id, vars));
+    long index;
+    long numModels = legalModels.size();
+    // find thisModel in the list and grab the next one
+    for(index = 0; index < numModels; index++)
+    {
+        if (legalModels[index] == thisModel)
+        {
+            return ToString(legalModels[(index + 1) % numModels]);
+        }
+    }
+    // didn't find this model, but there is a legal one, so grab the first
+    assert(false); //Why did we have an illegal data model to begin with?
+    return ToString(legalModels[0]);
+}
+
+uiDataModel::uiDataModel()
+    : SetGetModelType(uistr::dataModel)
+{
+}
+
+uiDataModel::~uiDataModel()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+StringVec1d uiDataModelReport::Get(UIVars& vars, UIId id)
+{
+    return vars.datamodel.GetDataModelReport(UIRegId(id, vars));
+}
+
+uiDataModelReport::uiDataModelReport()
+    : GetStringVec1d(uistr::dataModelReport)
+{
+}
+
+uiDataModelReport::~uiDataModelReport()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+bool uiNormalization::Get(UIVars& vars, UIId id)
+{
+    return vars.datamodel.GetNormalization(UIRegId(id, vars));
+}
+
+void uiNormalization::Set(UIVars& vars, UIId id, bool val)
+{
+    vars.datamodel.SetNormalization(val,UIRegId(id, vars));
+}
+
+uiNormalization::uiNormalization()
+    : SetGetBool(uistr::normalization)
+{
+}
+
+uiNormalization::~uiNormalization()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+double uiTTRatio::Get(UIVars& vars, UIId id)
+{
+    return vars.datamodel.GetTTRatio(UIRegId(id, vars));
+}
+
+void uiTTRatio::Set(UIVars& vars, UIId id, double val)
+{
+    vars.datamodel.SetTTRatio(val,UIRegId(id, vars));
+}
+
+uiTTRatio::uiTTRatio()
+    : SetGetDouble(uistr::TTRatio)
+{
+}
+
+uiTTRatio::~uiTTRatio()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+double uiPerBaseErrorRate::Get(UIVars& vars, UIId id)
+{
+    return vars.datamodel.GetPerBaseErrorRate(UIRegId(id, vars));
+}
+
+void uiPerBaseErrorRate::Set(UIVars& vars, UIId id, double val)
+{
+    vars.datamodel.SetPerBaseErrorRate(val,UIRegId(id, vars));
+}
+
+uiPerBaseErrorRate::uiPerBaseErrorRate()
+    : SetGetDouble(uistr::perBaseErrorRate)
+{
+}
+
+uiPerBaseErrorRate::~uiPerBaseErrorRate()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+double uiAlpha::Get(UIVars& vars, UIId id)
+{
+    return vars.datamodel.GetAlpha(UIRegId(id, vars));
+}
+
+void uiAlpha::Set(UIVars& vars, UIId id, double val)
+{
+    vars.datamodel.SetAlpha(val,UIRegId(id, vars));
+}
+
+string uiAlpha::Min(UIVars& vars, UIId id)
+{
+    return "zero";
+}
+
+string uiAlpha::Max(UIVars& vars, UIId id)
+{
+    return "one";
+}
+
+uiAlpha::uiAlpha()
+    : SetGetDouble(uistr::alpha)
+{
+}
+
+uiAlpha::~uiAlpha()
+{
+}
+
+bool uiOptimizeAlpha::Get(UIVars& vars, UIId id)
+{
+    return vars.datamodel.GetOptimizeAlpha(UIRegId(id, vars));
+}
+
+void uiOptimizeAlpha::Set(UIVars& vars, UIId id, bool val)
+{
+    vars.datamodel.SetOptimizeAlpha(val,UIRegId(id, vars));
+}
+
+uiOptimizeAlpha::uiOptimizeAlpha()
+    : SetGetBool(uistr::optimizeAlpha)
+{
+}
+
+uiOptimizeAlpha::~uiOptimizeAlpha()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+double uiAutoCorrelation::Get(UIVars& vars, UIId id)
+{
+    // NB: This gets the user (uninverted) value
+    return vars.datamodel.GetAutoCorrelation(UIRegId(id, vars));
+}
+
+void uiAutoCorrelation::Set(UIVars& vars, UIId id, double val)
+{
+    // NB: This sets the user (uninverted) value
+    vars.datamodel.SetAutoCorrelation(val,UIRegId(id, vars));
+}
+
+uiAutoCorrelation::uiAutoCorrelation()
+    : SetGetDouble(uistr::autoCorrelation)
+{
+}
+
+uiAutoCorrelation::~uiAutoCorrelation()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+long uiCategoryCount::Get(UIVars& vars, UIId id)
+{
+    return vars.datamodel.GetNumCategories(UIRegId(id, vars));
+}
+
+void uiCategoryCount::Set(UIVars& vars, UIId id, long val)
+{
+    vars.datamodel.SetNumCategories(val,UIRegId(id, vars));
+}
+
+string uiCategoryCount::Min(UIVars& vars, UIId id)
+{
+    return "1";
+}
+
+string uiCategoryCount::Max(UIVars& vars, UIId id)
+{
+    return ToString(defaults::maxNumCategories);
+}
+
+uiCategoryCount::uiCategoryCount()
+    : SetGetLong(uistr::categoryCount)
+{
+}
+
+uiCategoryCount::~uiCategoryCount()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+double uiCategoryProbability::Get(UIVars& vars, UIId id)
+{
+    return vars.datamodel.GetCategoryProbability(UIRegId(id, vars),id.GetIndex3());
+}
+
+void uiCategoryProbability::Set(UIVars& vars, UIId id, double val)
+{
+    vars.datamodel.SetCategoryProbability(val,UIRegId(id, vars),id.GetIndex3());
+}
+
+uiCategoryProbability::uiCategoryProbability()
+    : SetGetDouble(uistr::categoryProbability)
+{
+}
+
+uiCategoryProbability::~uiCategoryProbability()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+double uiCategoryRate::Get(UIVars& vars, UIId id)
+{
+    return vars.datamodel.GetCategoryRate(UIRegId(id, vars),id.GetIndex3());
+}
+
+void uiCategoryRate::Set(UIVars& vars, UIId id, double val)
+{
+    vars.datamodel.SetCategoryRate(val,UIRegId(id, vars),id.GetIndex3());
+}
+
+uiCategoryRate::uiCategoryRate()
+    : SetGetDouble(uistr::categoryRate)
+{
+}
+
+uiCategoryRate::~uiCategoryRate()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+double uiRelativeMuRate::Get(UIVars& vars, UIId id)
+{
+    return vars.datamodel.GetRelativeMuRate(UIRegId(id, vars));
+}
+
+void uiRelativeMuRate::Set(UIVars& vars, UIId id, double val)
+{
+    vars.datamodel.SetRelativeMuRate(val,UIRegId(id, vars));
+}
+
+uiRelativeMuRate::uiRelativeMuRate()
+    : SetGetDouble(uistr::relativeMuRate)
+{
+}
+
+uiRelativeMuRate::~uiRelativeMuRate()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+void
+uiUseGlobalDataModelForAll::Set(UIVars& vars, UIId id, noval val)
+{
+    vars.datamodel.SetAllRegionsToGlobalModel();
+}
+
+uiUseGlobalDataModelForAll::uiUseGlobalDataModelForAll()
+    : SetGetNoval(uistr::useGlobalDataModelForAll)
+{
+}
+
+uiUseGlobalDataModelForAll::~uiUseGlobalDataModelForAll()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+bool
+uiUseGlobalDataModelForOne::Get(UIVars& vars, UIId id)
+{
+    return vars.datamodel.GetUseGlobalModel(UIRegId(id, vars));
+}
+
+void
+uiUseGlobalDataModelForOne::Set(UIVars& vars, UIId id, bool val)
+{
+    vars.datamodel.SetUseGlobalModel(val,UIRegId(id, vars));
+}
+
+uiUseGlobalDataModelForOne::uiUseGlobalDataModelForOne()
+    : SetGetBool(uistr::useGlobalDataModelForOne)
+{
+}
+
+uiUseGlobalDataModelForOne::~uiUseGlobalDataModelForOne()
+{
+}
+
+//____________________________________________________________________________________
diff --git a/src/ui_interface/datamodel_interface.h b/src/ui_interface/datamodel_interface.h
new file mode 100644
index 0000000..eced294
--- /dev/null
+++ b/src/ui_interface/datamodel_interface.h
@@ -0,0 +1,331 @@
+// $Id: datamodel_interface.h,v 1.29 2011/03/07 06:08:53 bobgian Exp $
+
+/*
+ *  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+ *
+ *  This software is distributed free of charge for non-commercial use
+ *  and is copyrighted.  Of course, we do not guarantee that the software
+ *  works, and are not responsible for any damage you may cause or have.
+ *
+ */
+
+#ifndef DATAMODEL_INTERFACE_H
+#define DATAMODEL_INTERFACE_H
+
+#include <string>
+#include "setget.h"
+
+class UIVars;
+
+class uiLociCount : public GetLong
+{
+  public:
+    uiLociCount();
+    virtual ~uiLociCount();
+    virtual long Get(UIVars& vars, UIId id);
+};
+
+class uiLocusName : public GetString
+{
+  public:
+    uiLocusName();
+    virtual ~uiLocusName();
+    virtual std::string Get(UIVars& vars, UIId id);
+};
+
+class uiRegionName : public GetString
+{
+  public:
+    uiRegionName();
+    virtual ~uiRegionName();
+    virtual std::string Get(UIVars& vars, UIId id);
+};
+
+class uiFreqsFromData : public SetGetBool
+{
+  public:
+    uiFreqsFromData();
+    virtual ~uiFreqsFromData();
+    virtual bool Get(UIVars& vars, UIId id);
+    virtual void Set(UIVars& vars, UIId id,bool val);
+};
+
+class uiDataType : public GetDataType
+{
+  public:
+    uiDataType();
+    virtual ~uiDataType();
+    virtual data_type Get(UIVars& vars, UIId id);
+};
+
+//------------------------------------------------------------------------------------
+
+class uiGTRRates : public GetDoubleVec1d
+{
+  public:
+    uiGTRRates();
+    virtual ~uiGTRRates();
+    virtual DoubleVec1d Get(UIVars& vars, UIId id);
+    virtual std::string GetPrintString(UIVars& vars, UIId id);
+};
+
+class uiGTRRateAC : public SetGetDouble
+{
+  public:
+    uiGTRRateAC();
+    virtual ~uiGTRRateAC();
+    virtual double Get(UIVars& vars, UIId id);
+    virtual void Set(UIVars& vars, UIId id, double val);
+};
+
+class uiGTRRateAG : public SetGetDouble
+{
+  public:
+    uiGTRRateAG();
+    virtual ~uiGTRRateAG();
+    virtual double Get(UIVars& vars, UIId id);
+    virtual void Set(UIVars& vars, UIId id, double val);
+};
+
+class uiGTRRateAT : public SetGetDouble
+{
+  public:
+    uiGTRRateAT();
+    virtual ~uiGTRRateAT();
+    virtual double Get(UIVars& vars, UIId id);
+    virtual void Set(UIVars& vars, UIId id, double val);
+};
+
+class uiGTRRateCG : public SetGetDouble
+{
+  public:
+    uiGTRRateCG();
+    virtual ~uiGTRRateCG();
+    virtual double Get(UIVars& vars, UIId id);
+    virtual void Set(UIVars& vars, UIId id, double val);
+};
+
+class uiGTRRateCT : public SetGetDouble
+{
+  public:
+    uiGTRRateCT();
+    virtual ~uiGTRRateCT();
+    virtual double Get(UIVars& vars, UIId id);
+    virtual void Set(UIVars& vars, UIId id, double val);
+};
+
+class uiGTRRateGT : public SetGetDouble
+{
+  public:
+    uiGTRRateGT();
+    virtual ~uiGTRRateGT();
+    virtual double Get(UIVars& vars, UIId id);
+    virtual void Set(UIVars& vars, UIId id, double val);
+};
+
+//------------------------------------------------------------------------------------
+
+class uiBaseFrequencies : public GetDoubleVec1d
+{
+  public:
+    uiBaseFrequencies();
+    virtual ~uiBaseFrequencies();
+    virtual DoubleVec1d Get(UIVars& vars, UIId id);
+    virtual std::string GetPrintString(UIVars& vars, UIId id);
+};
+
+class uiBaseFrequencyA : public SetGetDouble
+{
+  public:
+    uiBaseFrequencyA();
+    virtual ~uiBaseFrequencyA();
+    virtual double Get(UIVars& vars, UIId id);
+    virtual void Set(UIVars& vars, UIId id, double val);
+};
+
+class uiBaseFrequencyC : public SetGetDouble
+{
+  public:
+    uiBaseFrequencyC();
+    virtual ~uiBaseFrequencyC();
+    virtual double Get(UIVars& vars, UIId id);
+    virtual void Set(UIVars& vars, UIId id, double val);
+};
+
+class uiBaseFrequencyG : public SetGetDouble
+{
+  public:
+    uiBaseFrequencyG();
+    virtual ~uiBaseFrequencyG();
+    virtual double Get(UIVars& vars, UIId id);
+    virtual void Set(UIVars& vars, UIId id, double val);
+};
+
+class uiBaseFrequencyT : public SetGetDouble
+{
+  public:
+    uiBaseFrequencyT();
+    virtual ~uiBaseFrequencyT();
+    virtual double Get(UIVars& vars, UIId id);
+    virtual void Set(UIVars& vars, UIId id, double val);
+};
+
+//------------------------------------------------------------------------------------
+
+class uiDataModel : public SetGetModelType
+{
+  public:
+    uiDataModel();
+    virtual ~uiDataModel();
+    virtual model_type Get(UIVars& vars, UIId id);
+    virtual void Set(UIVars& vars, UIId id,model_type val);
+    virtual std::string NextToggleValue(UIVars& vars, UIId id);
+};
+
+class uiDataModelReport : public GetStringVec1d
+{
+  public:
+    uiDataModelReport();
+    virtual ~uiDataModelReport();
+    virtual StringVec1d Get(UIVars& vars, UIId id);
+};
+
+//------------------------------------------------------------------------------------
+
+//LS NOTE:  Normalization is no longer in the menu, since it is turned on
+// automatically if needed.
+class uiNormalization : public SetGetBool
+{
+  public:
+    uiNormalization();
+    virtual ~uiNormalization();
+    virtual bool Get(UIVars& vars, UIId id);
+    virtual void Set(UIVars& vars, UIId id, bool val);
+};
+
+//------------------------------------------------------------------------------------
+
+class uiTTRatio : public SetGetDouble
+{
+  public:
+    uiTTRatio();
+    virtual ~uiTTRatio();
+    virtual double Get(UIVars& vars, UIId id);
+    virtual void Set(UIVars& vars, UIId id, double val);
+};
+
+//------------------------------------------------------------------------------------
+
+class uiPerBaseErrorRate : public SetGetDouble
+{
+  public:
+    uiPerBaseErrorRate();
+    virtual ~uiPerBaseErrorRate();
+    virtual double Get(UIVars& vars, UIId id);
+    virtual void Set(UIVars& vars, UIId id, double val);
+};
+
+//------------------------------------------------------------------------------------
+
+class uiAlpha: public SetGetDouble
+{
+  public:
+    uiAlpha();
+    virtual ~uiAlpha();
+    virtual double Get(UIVars& vars, UIId id);
+    virtual void Set(UIVars& vars, UIId id,double val);
+    virtual string Min(UIVars& vars, UIId id);
+    virtual string Max(UIVars& vars, UIId id);
+};
+
+class uiOptimizeAlpha: public SetGetBool
+{
+  public:
+    uiOptimizeAlpha();
+    virtual ~uiOptimizeAlpha();
+    virtual bool Get(UIVars& vars, UIId id);
+    virtual void Set(UIVars& vars, UIId id,bool val);
+};
+
+//------------------------------------------------------------------------------------
+
+class uiAutoCorrelation : public SetGetDouble
+{
+  public:
+    uiAutoCorrelation();
+    virtual ~uiAutoCorrelation();
+    virtual double Get(UIVars& vars, UIId id);
+    virtual void Set(UIVars& vars, UIId id, double val);
+};
+
+//------------------------------------------------------------------------------------
+
+// The uiCategoryCount menu item displays the number of categories
+class uiCategoryCount : public SetGetLong
+{
+  public:
+    uiCategoryCount();
+    virtual ~uiCategoryCount();
+    virtual long Get(UIVars& vars, UIId id);
+    virtual void Set(UIVars& vars, UIId id, long val);
+    virtual string Min(UIVars& vars, UIId id);
+    virtual string Max(UIVars& vars, UIId id);
+};
+
+//------------------------------------------------------------------------------------
+
+class uiCategoryProbability : public SetGetDouble
+{
+  public:
+    uiCategoryProbability();
+    virtual ~uiCategoryProbability();
+    virtual double Get(UIVars& vars, UIId id);
+    virtual void Set(UIVars& vars, UIId id, double val);
+};
+
+//------------------------------------------------------------------------------------
+
+class uiCategoryRate : public SetGetDouble
+{
+  public:
+    uiCategoryRate();
+    virtual ~uiCategoryRate();
+    virtual double Get(UIVars& vars, UIId id);
+    virtual void Set(UIVars& vars, UIId id, double val);
+};
+
+//------------------------------------------------------------------------------------
+
+class uiRelativeMuRate : public SetGetDouble
+{
+  public:
+    uiRelativeMuRate();
+    virtual ~uiRelativeMuRate();
+    virtual double Get(UIVars& vars, UIId id);
+    virtual void Set(UIVars& vars, UIId id, double val);
+};
+
+//------------------------------------------------------------------------------------
+
+class uiUseGlobalDataModelForAll : public SetGetNoval
+{
+  public:
+    uiUseGlobalDataModelForAll();
+    virtual ~uiUseGlobalDataModelForAll();
+    virtual void Set(UIVars& vars, UIId id, noval val);
+};
+
+//------------------------------------------------------------------------------------
+
+class uiUseGlobalDataModelForOne : public SetGetBool
+{
+  public:
+    uiUseGlobalDataModelForOne();
+    virtual ~uiUseGlobalDataModelForOne();
+    virtual bool Get(UIVars& vars, UIId id);
+    virtual void Set(UIVars& vars, UIId id, bool val);
+};
+
+#endif  // DATAMODEL_INTERFACE_H
+
+//____________________________________________________________________________________
diff --git a/src/ui_interface/force_interface.cpp b/src/ui_interface/force_interface.cpp
new file mode 100644
index 0000000..a660431
--- /dev/null
+++ b/src/ui_interface/force_interface.cpp
@@ -0,0 +1,1538 @@
+// $Id: force_interface.cpp,v 1.63 2012/06/30 01:32:43 bobgian Exp $
+
+/*
+ *  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+ *
+ *  This software is distributed free of charge for non-commercial use
+ *  and is copyrighted.  Of course, we do not guarantee that the software
+ *  works, and are not responsible for any damage you may cause or have.
+ *
+ */
+
+#include <cassert>
+#include <algorithm>    // for std::min()
+
+#include "force_interface.h"
+#include "defaults.h"
+#include "force.h"
+#include "ui_constants.h"
+#include "ui_interface.h"
+#include "ui_strings.h"
+#include "ui_vars.h"
+
+using std::string;
+
+//------------------------------------------------------------------------------------
+
+bool uiForceLegal::Get(UIVars& vars, UIId id)
+{
+    return vars.forces.GetForceLegal(id.GetForceType());
+}
+
+uiForceLegal::uiForceLegal()
+    : GetBool(uistr::forceLegal)
+{
+}
+
+uiForceLegal::~uiForceLegal()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+bool uiForceOnOff::Get(UIVars& vars, UIId id)
+{
+    return vars.forces.GetForceOnOff(id.GetForceType());
+}
+
+void uiForceOnOff::Set(UIVars& vars, UIId id, bool val)
+{
+    vars.forces.SetForceOnOff(val,id.GetForceType());
+}
+
+uiForceOnOff::uiForceOnOff()
+    : SetGetBool(uistr::forceOnOff)
+{
+}
+
+uiForceOnOff::~uiForceOnOff()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+long uiMaxEvents::Get(UIVars& vars, UIId id)
+{
+    return vars.forces.GetMaxEvents(id.GetForceType());
+}
+
+void uiMaxEvents::Set(UIVars& vars, UIId id, long val)
+{
+    vars.forces.SetMaxEvents(val,id.GetForceType());
+}
+
+uiMaxEvents::uiMaxEvents()
+    : SetGetLong(uistr::maxEvents)
+{
+}
+
+uiMaxEvents::~uiMaxEvents()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+uiParameter::uiParameter(const string& whichForceClass)
+    : SetGetDouble(whichForceClass)
+{
+}
+
+uiParameter::~uiParameter()
+{
+}
+
+string uiParameter::Description(UIVars& vars, UIId id)
+{
+    if (id.GetIndex1() == uiconst::GLOBAL_ID)
+    {
+        return SetGetDouble::Description(vars, id);
+    }
+    return vars.GetParamNameWithConstraint(id.GetForceType(), id.GetIndex1());
+}
+
+string uiParameter::Min(UIVars& vars, UIId id)
+{
+    force_type ftype = id.GetForceType();
+    long pindex = id.GetIndex1();
+    if (vars.chains.GetDoBayesianAnalysis())
+    {
+        if (vars.forces.GetParamstatus(ftype, pindex).Status() != pstat_constant)
+        {
+            return ToString(vars.forces.GetPrior(ftype, pindex).GetLowerBound());
+        }
+        //LS NOTE:  More a note than a debug, but:  if pindex is GLOBAL_ID, this
+        // means that the user is setting all start values at once, using one of
+        // the 'Single <force> starting estimate for all data' menu options.
+        // So, what this routine will do is report back the range for the default
+        // prior for this force.  This may be different from the actual priors
+        // for particular parameters, either larger or smaller.  Since 'Min' and
+        // 'Max' are only suggestions, the actual setting code in ui_vars_forces
+        // will simply check the desired value against that parameter's prior (be
+        // it the default or unique), and will warn the user appropriately.
+        //
+        // The other option here is to have GetPrior(ftype, GLOBAL_ID) return
+        // a prior that is either the intersection or the union of the individual
+        // priors, each of which have their own foibles.  That seems a bit complex
+        // for no clear benefit (given the current warning system), so I think
+        // our current system is as good as we're going to get any time soon.
+    }
+    switch (ftype)
+    {
+        case force_COAL:
+            return ToString(defaults::minTheta);
+            break;
+        case force_MIG:
+            return ToString(defaults::minMigRate);
+            break;
+        case force_DIVMIG:
+            return ToString(defaults::minDivMigRate);
+            break;
+        case force_DIVERGENCE:
+            return ToString(defaults::minEpoch);
+            break;
+        case force_DISEASE:
+            return ToString(defaults::minDiseaseRate);
+            break;
+        case force_REC:
+            return ToString(defaults::minRecRate);
+            break;
+        case force_EXPGROWSTICK:
+        case force_GROW:
+            return ToString(defaults::minGrowRate);
+            break;
+        case force_LOGSELECTSTICK:
+        case force_LOGISTICSELECTION:
+            return ToString(defaults::minLSelectCoeff);
+            break;
+        case force_REGION_GAMMA:
+            return ToString(defaults::minGammaOverRegions);
+            break;
+        case force_NONE:
+            assert(false);
+            return ToString(FLAGDOUBLE);
+            break;
+    }
+    assert(false); //Missing force type.
+    return ToString(FLAGDOUBLE);
+}
+
+string uiParameter::Max(UIVars& vars, UIId id)
+{
+    force_type ftype = id.GetForceType();
+    long pindex = id.GetIndex1();
+    if (vars.chains.GetDoBayesianAnalysis())
+    {
+        //LS NOTE:  (see Min, above)
+        if (vars.forces.GetParamstatus(ftype, pindex).Status() != pstat_constant)
+        {
+            return ToString(vars.forces.GetPrior(ftype, pindex).GetUpperBound());
+        }
+    }
+    switch (ftype)
+    {
+        case force_COAL:
+            return ToString(defaults::maxTheta);
+            break;
+        case force_MIG:
+            return ToString(defaults::maxMigRate);
+            break;
+        case force_DIVMIG:
+            return ToString(defaults::maxDivMigRate);
+            break;
+        case force_DIVERGENCE:
+            return ToString(defaults::maxEpoch);
+            break;
+        case force_DISEASE:
+            return ToString(defaults::maxDiseaseRate);
+            break;
+        case force_REC:
+            return ToString(defaults::maxRecRate);
+            break;
+        case force_EXPGROWSTICK:
+        case force_GROW:
+            return ToString(defaults::maxGrowRate);
+            break;
+        case force_LOGSELECTSTICK:
+        case force_LOGISTICSELECTION:
+            return ToString(defaults::maxLSelectCoeff);
+            break;
+        case force_REGION_GAMMA:
+            return ToString(defaults::maxGammaOverRegions);
+            break;
+        case force_NONE:
+            assert(false);
+            return ToString(FLAGDOUBLE);
+            break;
+    }
+    assert(false); //Missing force type.
+    return ToString(FLAGDOUBLE);
+}
+
+//------------------------------------------------------------------------------------
+
+uiCoalescence::uiCoalescence()
+    : SetGetBoolEnabled(uistr::coalescence)
+{
+}
+
+uiCoalescence::~uiCoalescence()
+{
+}
+
+bool uiCoalescence::Get(UIVars& vars, UIId id)
+{
+    return vars.forces.GetForceOnOff(force_COAL);
+}
+
+void uiCoalescence::Set(UIVars& vars, UIId id, bool val)
+{
+    vars.forces.SetForceOnOff(val,force_COAL);
+}
+
+//------------------------------------------------------------------------------------
+
+uiCoalescenceLegal::uiCoalescenceLegal()
+    : GetBool(uistr::coalescenceLegal)
+{
+}
+
+uiCoalescenceLegal::~uiCoalescenceLegal()
+{
+}
+
+bool uiCoalescenceLegal::Get(UIVars& vars, UIId id)
+{
+    return vars.forces.GetForceLegal(force_COAL);
+}
+
+//------------------------------------------------------------------------------------
+
+long uiCoalescenceMaxEvents::Get(UIVars& vars, UIId id)
+{
+    return vars.forces.GetMaxEvents(force_COAL);
+}
+
+void uiCoalescenceMaxEvents::Set(UIVars& vars, UIId id, long val)
+{
+    vars.forces.SetMaxEvents(val,force_COAL);
+}
+
+uiCoalescenceMaxEvents::uiCoalescenceMaxEvents()
+    : SetGetLong(uistr::coalescenceMaxEvents)
+{
+}
+
+uiCoalescenceMaxEvents::~uiCoalescenceMaxEvents()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+void uiFstTheta::Set(UIVars& vars, UIId id, noval val)
+{
+    vars.forces.SetAllThetaStartValuesFST();
+}
+
+uiFstTheta::uiFstTheta()
+    : SetGetNoval(uistr::fstSetTheta)
+{
+}
+
+uiFstTheta::~uiFstTheta()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+uiUserTheta::uiUserTheta()
+    : uiParameter(uistr::userSetTheta)
+{
+}
+
+uiUserTheta::~uiUserTheta()
+{
+}
+
+void uiUserTheta::Set(UIVars& vars, UIId id, double val)
+{
+    vars.forces.SetThetaStartValue(val,id.GetIndex1());
+}
+
+double uiUserTheta::Get(UIVars& vars, UIId id)
+{
+    return vars.forces.GetStartValue(force_COAL,id.GetIndex1());
+}
+
+//------------------------------------------------------------------------------------
+
+void uiWattersonTheta::Set(UIVars& vars, UIId id, noval val)
+{
+    vars.forces.SetAllThetaStartValuesWatterson();
+}
+
+uiWattersonTheta::uiWattersonTheta()
+    : SetGetNoval(uistr::wattersonSetTheta)
+{
+}
+
+uiWattersonTheta::~uiWattersonTheta()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+uiGlobalTheta::uiGlobalTheta()
+    : uiParameter(uistr::globalTheta)
+{
+}
+
+uiGlobalTheta::~uiGlobalTheta()
+{
+}
+
+void uiGlobalTheta::Set(UIVars& vars, UIId id, double val)
+{
+    vars.forces.SetAllThetaStartValues(val);
+}
+
+double uiGlobalTheta::Get(UIVars& vars, UIId id)
+{
+    assert(false);
+    throw implementation_error("shouldn't get the global theta value, just set");
+}
+
+//------------------------------------------------------------------------------------
+
+uiGrowth::uiGrowth()
+    : SetGetBoolEnabled(uistr::growth)
+{
+}
+
+uiGrowth::~uiGrowth()
+{
+}
+
+bool uiGrowth::Get(UIVars& vars, UIId id)
+{
+    return vars.forces.GetForceOnOff(force_GROW);
+}
+
+void uiGrowth::Set(UIVars& vars, UIId id, bool val)
+{
+    vars.forces.SetForceOnOff(val,force_GROW);
+}
+
+//------------------------------------------------------------------------------------
+
+uiGrowthScheme::uiGrowthScheme()
+    : SetGetGrowthScheme(uistr::growthScheme)
+{
+}
+
+uiGrowthScheme::~uiGrowthScheme()
+{
+}
+
+growth_scheme uiGrowthScheme::Get(UIVars& vars, UIId id)
+{
+    return vars.forces.GetGrowthScheme();
+}
+
+void uiGrowthScheme::Set(UIVars& vars, UIId id, growth_scheme val)
+{
+    vars.forces.SetGrowthScheme(val);
+}
+
+string uiGrowthScheme::NextToggleValue(UIVars& vars, UIId id)
+{
+    switch(Get(vars,id))
+    {
+        case growth_EXP:
+            return ToString(growth_STAIRSTEP);
+            break;
+        case growth_STAIRSTEP:
+            // this is unsupported, present for toggle demo purposes
+            return ToString(growth_EXP);
+            break;
+    }
+    throw implementation_error("uiGrowthScheme::NextToggleValue bad switch case");
+}
+
+//------------------------------------------------------------------------------------
+
+uiGrowthType::uiGrowthType()
+    : SetGetGrowthType(uistr::growthType)
+{
+}
+
+uiGrowthType::~uiGrowthType()
+{
+}
+
+growth_type uiGrowthType::Get(UIVars& vars, UIId id)
+{
+    return vars.forces.GetGrowthType();
+}
+
+void uiGrowthType::Set(UIVars& vars, UIId id, growth_type val)
+{
+    vars.forces.SetGrowthType(val);
+}
+
+string uiGrowthType::NextToggleValue(UIVars& vars, UIId id)
+{
+    switch(Get(vars,id))
+    {
+        case growth_CURVE:
+            return ToString(growth_STICKEXP);
+            break;
+        case growth_STICK:
+            // not sure we're supporting this for now--disabled
+            // assert in debug, but fall through to recover for release
+            assert(false);
+        case growth_STICKEXP:
+            return ToString(growth_CURVE);
+            break;
+    }
+    throw implementation_error("uiGrowthType::NextToggleValue bad switch case");
+}
+
+string uiGrowthType::MakePrintString(UIVars& vars, growth_type val)
+{
+    switch(val)
+    {
+        case growth_CURVE:
+            return "Curve";
+            break;
+        case growth_STICK:
+            // not sure we're supporting this for now--disabled
+            // assert in debug, but fall through to recover for release
+            assert(false);
+        case growth_STICKEXP:
+            return "Stick";
+            break;
+    }
+    throw implementation_error("uiGrowthType::MakePrintString bad switch case");
+}
+
+//------------------------------------------------------------------------------------
+
+uiGrowthLegal::uiGrowthLegal()
+    : GetBool(uistr::growthLegal)
+{
+}
+
+uiGrowthLegal::~uiGrowthLegal()
+{
+}
+
+bool uiGrowthLegal::Get(UIVars& vars, UIId id)
+{
+    return vars.forces.GetForceLegal(force_GROW);
+}
+
+//------------------------------------------------------------------------------------
+
+void uiGlobalGrowth::Set(UIVars& vars, UIId id, double val)
+{
+    vars.forces.SetAllGrowthStartValues(val);
+}
+
+double uiGlobalGrowth::Get(UIVars& vars, UIId id)
+{
+    assert(false);
+    throw implementation_error("shouldn't get the global growth value, just set");
+}
+
+uiGlobalGrowth::uiGlobalGrowth()
+    : uiParameter(uistr::globalGrowth)
+{
+}
+
+uiGlobalGrowth::~uiGlobalGrowth()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+void uiGrowthUser::Set(UIVars& vars, UIId id, double val)
+{
+    vars.forces.SetGrowthStartValue(val,id.GetIndex1());
+}
+
+double uiGrowthUser::Get(UIVars& vars, UIId id)
+{
+    return vars.forces.GetStartValue(force_GROW,id.GetIndex1());
+}
+
+uiGrowthUser::uiGrowthUser()
+    : uiParameter(uistr::growthByID)
+{
+}
+
+uiGrowthUser::~uiGrowthUser()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+long uiGrowthMaxEvents::Get(UIVars& vars, UIId id)
+{
+    return vars.forces.GetMaxEvents(force_GROW);
+}
+
+void uiGrowthMaxEvents::Set(UIVars& vars, UIId id, long val)
+{
+    vars.forces.SetMaxEvents(val,force_GROW);
+}
+
+uiGrowthMaxEvents::uiGrowthMaxEvents()
+    : SetGetLong(uistr::growthMaxEvents)
+{
+}
+
+uiGrowthMaxEvents::~uiGrowthMaxEvents()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+uiLogisticSelection::uiLogisticSelection()
+    : SetGetBoolEnabled(uistr::logisticSelection)
+{
+}
+
+uiLogisticSelection::~uiLogisticSelection()
+{
+}
+
+bool uiLogisticSelection::Get(UIVars& vars, UIId id)
+{
+    return vars.forces.GetForceOnOff(force_LOGISTICSELECTION);
+}
+
+void uiLogisticSelection::Set(UIVars& vars, UIId id, bool val)
+{
+    vars.forces.SetForceOnOff(val,force_LOGISTICSELECTION);
+}
+
+uiLogisticSelectionLegal::uiLogisticSelectionLegal()
+    : GetBool(uistr::logisticSelectionLegal)
+{
+}
+
+uiLogisticSelectionLegal::~uiLogisticSelectionLegal()
+{
+}
+
+bool uiLogisticSelectionLegal::Get(UIVars& vars, UIId id)
+{
+    return vars.forces.GetForceLegal(force_LOGISTICSELECTION);
+}
+
+void uiGlobalLogisticSelectionCoefficient::Set(UIVars& vars, UIId id, double val)
+{
+    vars.forces.SetLogisticSelectionCoefficientStartValue(val);
+}
+
+double uiGlobalLogisticSelectionCoefficient::Get(UIVars& vars, UIId id)
+{
+    assert(false);
+    throw implementation_error("shouldn't get the global logistic selection value, just set");
+}
+
+uiGlobalLogisticSelectionCoefficient::uiGlobalLogisticSelectionCoefficient()
+    : uiParameter(uistr::globalLogisticSelectionCoefficient)
+{
+}
+
+uiGlobalLogisticSelectionCoefficient::~uiGlobalLogisticSelectionCoefficient()
+{
+}
+
+void uiLogisticSelectionCoefficientUser::Set(UIVars& vars, UIId id, double val)
+{
+    vars.forces.SetLogisticSelectionCoefficientStartValue(val);
+}
+
+double uiLogisticSelectionCoefficientUser::Get(UIVars& vars, UIId id)
+{
+    return vars.forces.GetStartValue(force_LOGISTICSELECTION,0);
+}
+
+uiLogisticSelectionCoefficientUser::uiLogisticSelectionCoefficientUser()
+    : uiParameter(uistr::logisticSelectionCoefficient)
+{
+}
+
+uiLogisticSelectionCoefficientUser::~uiLogisticSelectionCoefficientUser()
+{
+}
+
+long uiLogisticSelectionMaxEvents::Get(UIVars& vars, UIId id)
+{
+    return vars.forces.GetMaxEvents(force_LOGISTICSELECTION);
+}
+
+void uiLogisticSelectionMaxEvents::Set(UIVars& vars, UIId id, long val)
+{
+    vars.forces.SetMaxEvents(val,force_LOGISTICSELECTION);
+}
+
+uiLogisticSelectionMaxEvents::uiLogisticSelectionMaxEvents()
+    : SetGetLong(uistr::logisticSelectionMaxEvents)
+{
+}
+
+uiLogisticSelectionMaxEvents::~uiLogisticSelectionMaxEvents()
+{
+}
+
+uiLogisticSelectionType::uiLogisticSelectionType()
+    : SetGetSelectionType(uistr::selectType)
+{
+}
+
+uiLogisticSelectionType::~uiLogisticSelectionType()
+{
+}
+
+selection_type uiLogisticSelectionType::Get(UIVars& vars, UIId id)
+{
+    return vars.forces.GetSelectionType();
+}
+
+void uiLogisticSelectionType::Set(UIVars& vars, UIId id, selection_type val)
+{
+    vars.forces.SetSelectionType(val);
+}
+
+string uiLogisticSelectionType::NextToggleValue(UIVars& vars, UIId id)
+{
+    switch(Get(vars,id))
+    {
+        case selection_DETERMINISTIC:
+            return ToString(selection_DETERMINISTIC);
+            break;
+        case selection_STOCHASTIC:
+            return ToString(selection_STOCHASTIC);
+            break;
+    }
+    throw implementation_error("uiLogisticSelectionType::NextToggleValue bad switch case");
+}
+
+string uiLogisticSelectionType::MakePrintString(UIVars& vars, selection_type val)
+{
+    switch(val)
+    {
+        case selection_DETERMINISTIC:
+            return "Deterministic";
+            break;
+        case selection_STOCHASTIC:
+            return "Stochastic";
+            break;
+    }
+    throw implementation_error("uiLogisticSelectionType::MakePrintString bad switch case");
+}
+
+//------------------------------------------------------------------------------------
+
+bool uiMigration::Get(UIVars& vars, UIId id)
+{
+    return vars.forces.GetForceOnOff(force_MIG);
+}
+
+void uiMigration::Set(UIVars& vars, UIId id, bool val)
+{
+    vars.forces.SetForceOnOff(val,force_MIG);
+}
+
+uiMigration::uiMigration()
+    : SetGetBoolEnabled(uistr::migration)
+{
+}
+
+uiMigration::~uiMigration()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+uiMigrationLegal::uiMigrationLegal()
+    : GetBool(uistr::migrationLegal)
+{
+}
+
+uiMigrationLegal::~uiMigrationLegal()
+{
+}
+
+bool uiMigrationLegal::Get(UIVars& vars, UIId id)
+{
+    return vars.forces.GetForceLegal(force_MIG);
+}
+
+//------------------------------------------------------------------------------------
+
+void uiMigrationUser::Set(UIVars& vars, UIId id, double val)
+{
+    vars.forces.SetMigrationStartValue(val,id.GetIndex1());
+}
+
+double uiMigrationUser::Get(UIVars& vars, UIId id)
+{
+    return vars.forces.GetStartValue(force_MIG,id.GetIndex1());
+}
+
+uiMigrationUser::uiMigrationUser()
+    : uiParameter(uistr::migrationUser)
+{
+}
+
+uiMigrationUser::~uiMigrationUser()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+string uiMigrationInto::Get(UIVars& vars, UIId id)
+{
+    string row = "";
+    long toPop = id.GetIndex1();
+    long npops = vars.datapackplus.GetNPartitionsByForceType(force_MIG);
+    long rowsdisplayed = std::min(npops,uiconst::migColumns);
+    long rowindex;
+    for(rowindex = 0; rowindex < rowsdisplayed; rowindex++)
+    {
+        row += " ";
+        long fromPop = rowindex;
+        if(fromPop == toPop)
+        {
+            row += " - ";
+        }
+        else
+        {
+            long colId = fromPop + npops * toPop;
+            double migRate =
+                vars.forces.GetStartValue(force_MIG,colId);
+            row += ToString(migRate);
+        }
+    }
+    if(npops > uiconst::migColumns)
+    {
+        row += " ...";
+    }
+    return row;
+}
+
+uiMigrationInto::uiMigrationInto()
+    : GetString(uistr::migrationInto)
+{
+}
+
+uiMigrationInto::~uiMigrationInto()
+{
+}
+
+string uiMigrationInto::Description(UIVars& vars, UIId id)
+{
+
+    string returnVal(UIKey());
+    returnVal += vars.datapackplus.GetForcePartitionName(force_MIG,id.GetIndex1());
+    return returnVal;
+}
+
+//------------------------------------------------------------------------------------
+
+void uiMigrationFst::Set(UIVars& vars, UIId id, noval val)
+{
+    vars.forces.SetAllMigrationStartValuesFST();
+}
+
+uiMigrationFst::uiMigrationFst()
+    : SetGetNoval(uistr::fstSetMigration)
+{
+}
+
+uiMigrationFst::~uiMigrationFst()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+double uiMigrationGlobal::Get(UIVars& vars, UIId id)
+{
+    assert(false);
+    throw implementation_error("shouldn't get the global migration value, just set");
+}
+
+void uiMigrationGlobal::Set(UIVars& vars, UIId id, double val)
+{
+    vars.forces.SetAllMigrationStartValues(val);
+}
+
+uiMigrationGlobal::uiMigrationGlobal()
+    : uiParameter(uistr::globalMigration)
+{
+}
+
+uiMigrationGlobal::~uiMigrationGlobal()
+{
+}
+
+long uiMigrationMaxEvents::Get(UIVars& vars, UIId id)
+{
+    return vars.forces.GetMaxEvents(force_MIG);
+}
+
+void uiMigrationMaxEvents::Set(UIVars& vars, UIId id, long val)
+{
+    vars.forces.SetMaxEvents(val,force_MIG);
+}
+
+uiMigrationMaxEvents::uiMigrationMaxEvents()
+    : SetGetLong(uistr::migrationMaxEvents)
+{
+}
+
+uiMigrationMaxEvents::~uiMigrationMaxEvents()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+bool uiDivMigration::Get(UIVars& vars, UIId id)
+{
+    return vars.forces.GetForceOnOff(force_DIVMIG);
+}
+
+void uiDivMigration::Set(UIVars& vars, UIId id, bool val)
+{
+    vars.forces.SetForceOnOff(val,force_DIVMIG);
+}
+
+uiDivMigration::uiDivMigration()
+    : SetGetBoolEnabled(uistr::divmigration)
+{
+}
+
+uiDivMigration::~uiDivMigration()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+uiDivMigrationLegal::uiDivMigrationLegal()
+    : GetBool(uistr::divmigrationLegal)
+{
+}
+
+uiDivMigrationLegal::~uiDivMigrationLegal()
+{
+}
+
+bool uiDivMigrationLegal::Get(UIVars& vars, UIId id)
+{
+    return vars.forces.GetForceLegal(force_DIVMIG);
+}
+
+//------------------------------------------------------------------------------------
+
+void uiDivMigrationUser::Set(UIVars& vars, UIId id, double val)
+{
+    vars.forces.SetDivMigrationStartValue(val,id.GetIndex1());
+}
+
+double uiDivMigrationUser::Get(UIVars& vars, UIId id)
+{
+    return vars.forces.GetStartValue(force_DIVMIG,id.GetIndex1());
+}
+
+uiDivMigrationUser::uiDivMigrationUser()
+    : uiParameter(uistr::divmigrationUser)
+{
+}
+
+uiDivMigrationUser::~uiDivMigrationUser()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+string uiDivMigrationInto::Get(UIVars& vars, UIId id)
+{
+    string row = "";
+    long toPop = id.GetIndex1();
+    long npops = vars.datapackplus.GetNPartitionsByForceType(force_DIVMIG);
+    long rowsdisplayed = std::min(npops,uiconst::migColumns);
+    long rowindex;
+    for(rowindex = 0; rowindex < rowsdisplayed; rowindex++)
+    {
+        row += " ";
+        long fromPop = rowindex;
+        if(fromPop == toPop)
+        {
+            row += " - ";
+        }
+        else
+        {
+            long colId = fromPop + npops * toPop;
+            double migRate =
+                vars.forces.GetStartValue(force_DIVMIG,colId);
+            row += ToString(migRate);
+        }
+    }
+    if(npops > uiconst::migColumns)
+    {
+        row += " ...";
+    }
+    return row;
+}
+
+uiDivMigrationInto::uiDivMigrationInto()
+    : GetString(uistr::divmigrationInto)
+{
+}
+
+uiDivMigrationInto::~uiDivMigrationInto()
+{
+}
+
+string uiDivMigrationInto::Description(UIVars& vars, UIId id)
+{
+
+    string returnVal(UIKey());
+    returnVal += vars.datapackplus.GetForcePartitionName(force_DIVMIG,id.GetIndex1());
+    return returnVal;
+}
+
+//------------------------------------------------------------------------------------
+
+#if 0  // MREMOVE destroying this class as DIVMIG can't do FST
+void uiDivMigrationFst::Set(UIVars& vars, UIId id, noval val)
+{
+    vars.forces.SetAllMigrationStartValuesFST();
+}
+
+uiDivMigrationFst::uiDivMigrationFst()
+    : SetGetNoval(uistr::fstSetDivMigration)
+{
+}
+
+uiDivMigrationFst::~uiDivMigrationFst()
+{
+}
+#endif
+
+//------------------------------------------------------------------------------------
+
+double uiDivMigrationGlobal::Get(UIVars& vars, UIId id)
+{
+    assert(false);
+    throw implementation_error("shouldn't get the global migration value, just set");
+}
+
+void uiDivMigrationGlobal::Set(UIVars& vars, UIId id, double val)
+{
+    vars.forces.SetAllDivMigrationStartValues(val);
+}
+
+uiDivMigrationGlobal::uiDivMigrationGlobal()
+    : uiParameter(uistr::globalDivMigration)
+{
+}
+
+uiDivMigrationGlobal::~uiDivMigrationGlobal()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+long uiDivMigrationMaxEvents::Get(UIVars& vars, UIId id)
+{
+    return vars.forces.GetMaxEvents(force_DIVMIG);
+}
+
+void uiDivMigrationMaxEvents::Set(UIVars& vars, UIId id, long val)
+{
+    vars.forces.SetMaxEvents(val,force_DIVMIG);
+}
+
+uiDivMigrationMaxEvents::uiDivMigrationMaxEvents()
+    : SetGetLong(uistr::divmigrationMaxEvents)
+{
+}
+
+uiDivMigrationMaxEvents::~uiDivMigrationMaxEvents()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+bool uiDivergence::Get(UIVars& vars, UIId id)
+{
+    return vars.forces.GetForceOnOff(force_DIVERGENCE);
+}
+
+void uiDivergence::Set(UIVars& vars, UIId id, bool val)
+{
+    vars.forces.SetForceOnOff(val,force_DIVERGENCE);
+}
+
+uiDivergence::uiDivergence()
+    : SetGetBoolEnabled(uistr::divergence)
+{
+}
+
+uiDivergence::~uiDivergence()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+string uiDivergenceEpochAncestor::Get(UIVars& vars, UIId id)
+{
+    return vars.forces.GetEpochAncestorName(id.GetIndex1());
+}
+
+uiDivergenceEpochAncestor::uiDivergenceEpochAncestor()
+    : GetString(uistr::divergenceEpochAncestor)
+{
+}
+
+uiDivergenceEpochAncestor::~uiDivergenceEpochAncestor()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+string uiDivergenceEpochDescendents::Get(UIVars& vars, UIId id)
+{
+    return vars.forces.GetEpochDescendentNames(id.GetIndex1());
+}
+
+uiDivergenceEpochDescendents::uiDivergenceEpochDescendents()
+    : GetString(uistr::divergenceEpochDescendents)
+{
+}
+
+uiDivergenceEpochDescendents::~uiDivergenceEpochDescendents()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+uiDivergenceLegal::uiDivergenceLegal()
+    : GetBool(uistr::divergenceLegal)
+{
+}
+
+uiDivergenceLegal::~uiDivergenceLegal()
+{
+}
+
+bool uiDivergenceLegal::Get(UIVars& vars, UIId id)
+{
+    return vars.forces.GetForceLegal(force_DIVERGENCE);
+}
+
+//------------------------------------------------------------------------------------
+
+long uiDivergenceEpochCount::Get(UIVars& vars, UIId id)
+{
+    return vars.datapackplus.GetNPartitionsByForceType(force_DIVERGENCE);
+}
+
+uiDivergenceEpochCount::uiDivergenceEpochCount()
+    : GetLong(uistr::divergenceEpochCount)
+{
+}
+
+uiDivergenceEpochCount::~uiDivergenceEpochCount()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+string uiDivergenceEpochName::Get(UIVars& vars, UIId id)
+{
+    return vars.datapackplus.GetForcePartitionName(force_DIVERGENCE,id.GetIndex1());
+}
+
+uiDivergenceEpochName::uiDivergenceEpochName()
+    : GetString(uistr::divergenceEpochName)
+{
+}
+
+uiDivergenceEpochName::~uiDivergenceEpochName()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+double uiDivergenceEpochBoundaryTime::Get(UIVars& vars, UIId id)
+{
+    return vars.forces.GetStartValue(force_DIVERGENCE,id.GetIndex1());
+}
+
+void uiDivergenceEpochBoundaryTime::Set(UIVars& vars, UIId id, double val)
+{
+    vars.forces.SetDivergenceEpochStartTime(val,id.GetIndex1());
+}
+
+uiDivergenceEpochBoundaryTime::uiDivergenceEpochBoundaryTime()
+    : uiParameter(uistr::divergenceEpochBoundaryTime)
+{
+}
+
+uiDivergenceEpochBoundaryTime::~uiDivergenceEpochBoundaryTime()
+{
+}
+
+string uiDivergenceEpochBoundaryTime::Description(UIVars& vars, UIId id)
+{
+    //string returnVal = vars.datapackplus.GetAncestorName(id.GetIndex1());
+    string returnVal = uistr::divergenceEpoch;
+    returnVal += " ";
+    returnVal += ToString(id.GetIndex1() + 1);
+    returnVal += " ";
+    returnVal += UIKey();
+
+    //string txtstr(UIKey());
+    //returnVal += txtstr;
+    return returnVal;
+}
+
+//-----------------------------------------------------------------------
+
+bool uiRecombine::Get(UIVars& vars, UIId id)
+{
+    return vars.forces.GetForceOnOff(force_REC);
+}
+
+void uiRecombine::Set(UIVars& vars, UIId id, bool val)
+{
+    vars.forces.SetForceOnOff(val,force_REC);
+}
+
+uiRecombine::uiRecombine()
+    : SetGetBoolEnabled(uistr::recombination)
+{
+}
+
+uiRecombine::~uiRecombine()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+uiRecombineLegal::uiRecombineLegal()
+    : GetBool(uistr::recombinationLegal)
+{
+}
+
+uiRecombineLegal::~uiRecombineLegal()
+{
+}
+
+bool uiRecombineLegal::Get(UIVars& vars, UIId id)
+{
+    return vars.forces.GetForceLegal(force_REC);
+}
+
+//------------------------------------------------------------------------------------
+
+double uiRecombinationRate::Get(UIVars& vars, UIId id)
+{
+    return vars.forces.GetStartValue(force_REC,0);
+}
+
+void uiRecombinationRate::Set(UIVars& vars, UIId id, double val)
+{
+    vars.forces.SetRecombinationStartValue(val);
+}
+
+uiRecombinationRate::uiRecombinationRate()
+    : uiParameter(uistr::recombinationRate)
+{
+}
+
+uiRecombinationRate::~uiRecombinationRate()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+long uiRecombinationMaxEvents::Get(UIVars& vars, UIId id)
+{
+    return vars.forces.GetMaxEvents(force_REC);
+}
+
+void uiRecombinationMaxEvents::Set(UIVars& vars, UIId id, long val)
+{
+    vars.forces.SetMaxEvents(val,force_REC);
+}
+
+uiRecombinationMaxEvents::uiRecombinationMaxEvents()
+    : SetGetLong(uistr::recombinationMaxEvents)
+{
+}
+
+uiRecombinationMaxEvents::~uiRecombinationMaxEvents()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+bool uiRegionGamma::Get(UIVars& vars, UIId id)
+{
+    return vars.forces.GetForceOnOff(force_REGION_GAMMA);
+}
+
+void uiRegionGamma::Set(UIVars& vars, UIId id, bool val)
+{
+    vars.forces.SetForceOnOff(val,force_REGION_GAMMA);
+}
+
+uiRegionGamma::uiRegionGamma()
+    : SetGetBoolEnabled(uistr::regionGamma)
+{
+}
+
+uiRegionGamma::~uiRegionGamma()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+uiRegionGammaLegal::uiRegionGammaLegal()
+    : GetBool(uistr::regionGammaLegal)
+{
+}
+
+uiRegionGammaLegal::~uiRegionGammaLegal()
+{
+}
+
+bool uiRegionGammaLegal::Get(UIVars& vars, UIId id)
+{
+    return vars.forces.GetForceLegal(force_REGION_GAMMA);
+}
+
+//------------------------------------------------------------------------------------
+
+double uiRegionGammaShape::Get(UIVars& vars, UIId id)
+{
+    return vars.forces.GetStartValue(force_REGION_GAMMA,0);
+}
+
+void uiRegionGammaShape::Set(UIVars& vars, UIId id, double val)
+{
+    vars.forces.SetRegionGammaStartValue(val);
+}
+
+uiRegionGammaShape::uiRegionGammaShape()
+    : uiParameter(uistr::regionGammaShape)
+{
+}
+
+uiRegionGammaShape::~uiRegionGammaShape()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+uiDisease::uiDisease()
+    : SetGetBoolEnabled(uistr::disease)
+{
+}
+
+uiDisease::~uiDisease()
+{
+}
+
+bool uiDisease::Get(UIVars& vars, UIId id)
+{
+    return vars.forces.GetForceOnOff(force_DISEASE);
+}
+
+void uiDisease::Set(UIVars& vars, UIId id, bool val)
+{
+    vars.forces.SetForceOnOff(val,force_DISEASE);
+}
+
+//------------------------------------------------------------------------------------
+
+uiDiseaseLegal::uiDiseaseLegal()
+    : GetBool(uistr::diseaseLegal)
+{
+}
+
+uiDiseaseLegal::~uiDiseaseLegal()
+{
+}
+
+bool uiDiseaseLegal::Get(UIVars& vars, UIId id)
+{
+    return vars.forces.GetForceLegal(force_DISEASE);
+}
+
+//------------------------------------------------------------------------------------
+
+long uiDiseaseMaxEvents::Get(UIVars& vars, UIId id)
+{
+    return vars.forces.GetMaxEvents(force_DISEASE);
+}
+
+void uiDiseaseMaxEvents::Set(UIVars& vars, UIId id, long val)
+{
+    vars.forces.SetMaxEvents(val,force_DISEASE);
+}
+
+uiDiseaseMaxEvents::uiDiseaseMaxEvents()
+    : SetGetLong(uistr::diseaseMaxEvents)
+{
+}
+
+uiDiseaseMaxEvents::~uiDiseaseMaxEvents()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+double uiDiseaseGlobal::Get(UIVars& vars, UIId id)
+{
+    assert(false);
+    throw implementation_error("shouldn't get the global disease value, just set");
+}
+
+void uiDiseaseGlobal::Set(UIVars& vars, UIId id, double val)
+{
+    vars.forces.SetAllDiseaseStartValues(val);
+}
+
+uiDiseaseGlobal::uiDiseaseGlobal()
+    : uiParameter(uistr::globalDisease)
+{
+}
+
+uiDiseaseGlobal::~uiDiseaseGlobal()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+void uiDiseaseByID::Set(UIVars& vars, UIId id, double val)
+{
+    vars.forces.SetDiseaseStartValue(val,id.GetIndex1());
+}
+
+double uiDiseaseByID::Get(UIVars& vars, UIId id)
+{
+    return vars.forces.GetStartValue(force_DISEASE,id.GetIndex1());
+}
+
+uiDiseaseByID::uiDiseaseByID()
+    : uiParameter(uistr::diseaseByID)
+{
+}
+
+uiDiseaseByID::~uiDiseaseByID()
+{
+}
+
+// EWFIX.P5 REFACTOR -- use UIId to index these instead of having per-force versions
+
+string uiDiseaseInto::Get(UIVars& vars, UIId id)
+{
+    string row = "";
+    long toPop = id.GetIndex1();
+    long nstati = vars.datapackplus.GetNPartitionsByForceType(force_DISEASE);
+    long rowsdisplayed = std::min(nstati,uiconst::diseaseColumns);
+    long rowindex;
+    for(rowindex = 0; rowindex < rowsdisplayed; rowindex++)
+    {
+        row += " ";
+        long fromPop = rowindex;
+        if(fromPop == toPop)
+        {
+            row += " - ";
+        }
+        else
+        {
+            long colId = fromPop + nstati * toPop;
+            double disRate = vars.forces.GetStartValue(force_DISEASE,colId);
+            row += ToString(disRate);
+        }
+    }
+    if(nstati > uiconst::diseaseColumns)
+    {
+        row += " ...";
+    }
+    return row;
+}
+
+uiDiseaseInto::uiDiseaseInto()
+    : GetString(uistr::diseaseInto)
+{
+}
+
+uiDiseaseInto::~uiDiseaseInto()
+{
+}
+
+string uiDiseaseInto::Description(UIVars& vars, UIId id)
+{
+    string returnVal(UIKey());
+    returnVal += vars.datapackplus.GetForcePartitionName(force_DISEASE,id.GetIndex1());
+    return returnVal;
+}
+
+//------------------------------------------------------------------------------------
+
+long uiDiseaseLocation::Get(UIVars& vars, UIId id)
+{
+    return vars.forces.GetDiseaseLocation();
+}
+
+void uiDiseaseLocation::Set(UIVars& vars, UIId id, long val)
+{
+    vars.forces.SetDiseaseLocation(val);
+}
+
+uiDiseaseLocation::uiDiseaseLocation()
+    : SetGetLong(uistr::diseaseLocation)
+{
+}
+
+uiDiseaseLocation::~uiDiseaseLocation()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+//Honestly, this needs to be the default instead of all that uiUser<force>, above.
+
+double
+uiStartValue::Get(UIVars& vars, UIId id)
+{
+    return vars.forces.GetStartValue(id.GetForceType(),id.GetIndex1());
+}
+
+void
+uiStartValue::Set(UIVars& vars, UIId id, double val)
+{
+    vars.forces.SetUserStartValue(val,id.GetForceType(),id.GetIndex1());
+}
+
+uiStartValue::uiStartValue()
+    : SetGetDouble(uistr::startValue)
+{
+}
+
+uiStartValue::~uiStartValue()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+method_type
+uiStartValueMethod::Get(UIVars& vars, UIId id)
+{
+    return vars.forces.GetStartMethod(id.GetForceType(),id.GetIndex1());
+}
+
+void
+uiStartValueMethod::Set(UIVars& vars, UIId id, method_type mtype)
+{
+    if(mtype != method_USER)
+    {
+        // method_USER methods should already be set with
+        // uiStartValues object before we get here
+        vars.forces.SetStartMethod(mtype,id.GetForceType(),id.GetIndex1());
+    }
+}
+
+uiStartValueMethod::uiStartValueMethod()
+    : SetGetMethodType(uistr::startValueMethod)
+{
+}
+
+uiStartValueMethod::~uiStartValueMethod()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+double
+uiTrueValue::Get(UIVars& vars, UIId id)
+{
+    return vars.forces.GetTrueValue(id.GetForceType(),id.GetIndex1());
+}
+
+void
+uiTrueValue::Set(UIVars& vars, UIId id, double val)
+{
+    vars.forces.SetTrueValue(val,id.GetForceType(),id.GetIndex1());
+}
+
+uiTrueValue::uiTrueValue()
+    : SetGetDouble(uistr::trueValue)
+{
+}
+
+uiTrueValue::~uiTrueValue()
+{
+}
+
+//____________________________________________________________________________________
diff --git a/src/ui_interface/force_interface.h b/src/ui_interface/force_interface.h
new file mode 100644
index 0000000..bab832f
--- /dev/null
+++ b/src/ui_interface/force_interface.h
@@ -0,0 +1,595 @@
+// $Id: force_interface.h,v 1.39 2012/06/30 01:32:43 bobgian Exp $
+
+/*
+ *  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+ *
+ *  This software is distributed free of charge for non-commercial use
+ *  and is copyrighted.  Of course, we do not guarantee that the software
+ *  works, and are not responsible for any damage you may cause or have.
+ *
+ */
+
+#ifndef FORCE_INTERFACE_H
+#define FORCE_INTERFACE_H
+
+#include <string>
+#include "setget.h"
+
+class UIVars;
+
+class uiForceLegal : public GetBool
+{
+  public:
+    uiForceLegal();
+    virtual ~uiForceLegal();
+    virtual bool Get(UIVars& vars, UIId id);
+};
+
+class uiForceOnOff : public SetGetBool
+{
+  public:
+    uiForceOnOff();
+    virtual ~uiForceOnOff();
+    virtual bool Get(UIVars& vars, UIId id);
+    virtual void Set(UIVars& vars, UIId id, bool val);
+};
+
+class uiMaxEvents : public SetGetLong
+{
+  public:
+    uiMaxEvents();
+    virtual ~uiMaxEvents();
+    virtual long Get(UIVars& vars, UIId id);
+    virtual void Set(UIVars& vars, UIId id, long val);
+};
+
+//LS DEBUG:  All the classes that inherit from uiParameter could be
+// collapsed into a single class that looked a lot like uiStartValue.  In
+// fact, the code in uiStartValue is essentially duplicated code that's used
+// in the XML reading, but nowhere else.
+class uiParameter : public SetGetDouble
+{
+  private:
+    uiParameter(); //undefined
+    force_type m_ftype;
+  public:
+    uiParameter(const std::string& whichForceClass);
+    virtual ~uiParameter();
+    virtual string Min(UIVars& vars, UIId id);
+    virtual string Max(UIVars& vars, UIId id);
+    virtual string Description(UIVars& vars, UIId id);
+};
+
+class uiCoalescence : public SetGetBoolEnabled
+{
+  public:
+    uiCoalescence();
+    virtual ~uiCoalescence();
+    virtual bool Get(UIVars& vars, UIId id);
+    virtual void Set(UIVars& vars, UIId id,bool val);
+};
+
+class uiCoalescenceLegal : public GetBool
+{
+  public:
+    uiCoalescenceLegal();
+    virtual ~uiCoalescenceLegal();
+    virtual bool Get(UIVars& vars, UIId id);
+};
+
+class uiCoalescenceMaxEvents : public SetGetLong
+{
+  public:
+    uiCoalescenceMaxEvents();
+    virtual ~uiCoalescenceMaxEvents();
+    virtual long Get(UIVars& vars, UIId id);
+    virtual void Set(UIVars& vars, UIId id, long val);
+};
+
+class uiFstTheta : public SetGetNoval
+{
+  public:
+    uiFstTheta();
+    virtual ~uiFstTheta();
+    virtual void Set(UIVars& vars, UIId id, noval val);
+};
+
+class uiUserTheta : public uiParameter
+{
+  public:
+    uiUserTheta();
+    virtual ~uiUserTheta();
+    virtual double Get(UIVars& vars, UIId id);
+    virtual void Set(UIVars& vars, UIId id, double val);
+};
+
+class uiWattersonTheta : public SetGetNoval
+{
+  public:
+    uiWattersonTheta();
+    virtual ~uiWattersonTheta();
+    virtual void Set(UIVars& vars, UIId id, noval val);
+};
+
+class uiGlobalTheta : public uiParameter
+{
+  public:
+    uiGlobalTheta();
+    virtual ~uiGlobalTheta();
+    virtual double Get(UIVars& vars, UIId id);
+    virtual void Set(UIVars& vars, UIId id, double val);
+};
+
+//------------------------------------------------------------------------------------
+
+class uiGrowth : public SetGetBoolEnabled
+{
+  public:
+    uiGrowth();
+    virtual ~uiGrowth();
+    virtual bool Get(UIVars& vars, UIId id);
+    virtual void Set(UIVars& vars, UIId id,bool val);
+};
+
+class uiGrowthLegal : public GetBool
+{
+  public:
+    uiGrowthLegal();
+    virtual ~uiGrowthLegal();
+    virtual bool Get(UIVars& vars, UIId id);
+};
+
+class uiGrowthType : public SetGetGrowthType
+{
+  public:
+    uiGrowthType();
+    virtual ~uiGrowthType();
+    virtual growth_type Get(UIVars& vars, UIId id);
+    virtual void Set(UIVars& vars, UIId id, growth_type gType);
+    virtual std::string NextToggleValue(UIVars& vars, UIId id);
+    virtual std::string MakePrintString(UIVars& vars, growth_type gType);
+};
+
+class uiGrowthScheme : public SetGetGrowthScheme
+{
+  public:
+    uiGrowthScheme();
+    virtual ~uiGrowthScheme();
+    virtual growth_scheme Get(UIVars& vars, UIId id);
+    virtual void Set(UIVars& vars, UIId id, growth_scheme gScheme);
+    virtual std::string NextToggleValue(UIVars& vars, UIId id);
+};
+
+class uiGlobalGrowth : public uiParameter
+{
+  public:
+    uiGlobalGrowth();
+    virtual ~uiGlobalGrowth();
+    virtual double Get(UIVars& vars, UIId id);
+    virtual void Set(UIVars& vars, UIId id, double val);
+};
+
+class uiGrowthUser : public uiParameter
+{
+  public:
+    uiGrowthUser();
+    virtual ~uiGrowthUser();
+    virtual double Get(UIVars& vars, UIId id);
+    virtual void Set(UIVars& vars, UIId id, double val);
+
+};
+
+class uiGrowthMaxEvents : public SetGetLong
+{
+  public:
+    uiGrowthMaxEvents();
+    virtual ~uiGrowthMaxEvents();
+    virtual long Get(UIVars& vars, UIId id);
+    virtual void Set(UIVars& vars, UIId id, long val);
+};
+
+//------------------------------------------------------------------------------------
+
+class uiLogisticSelection : public SetGetBoolEnabled
+{
+  public:
+    uiLogisticSelection();
+    virtual ~uiLogisticSelection();
+    virtual bool Get(UIVars& vars, UIId id);
+    virtual void Set(UIVars& vars, UIId id,bool val);
+};
+
+class uiLogisticSelectionLegal : public GetBool
+{
+  public:
+    uiLogisticSelectionLegal();
+    virtual ~uiLogisticSelectionLegal();
+    virtual bool Get(UIVars& vars, UIId id);
+};
+
+class uiGlobalLogisticSelectionCoefficient : public uiParameter
+{
+  public:
+    uiGlobalLogisticSelectionCoefficient();
+    virtual ~uiGlobalLogisticSelectionCoefficient();
+    virtual double Get(UIVars& vars, UIId id);
+    virtual void Set(UIVars& vars, UIId id, double val);
+};
+
+class uiLogisticSelectionCoefficientUser : public uiParameter
+{
+  public:
+    uiLogisticSelectionCoefficientUser();
+    virtual ~uiLogisticSelectionCoefficientUser();
+    virtual double Get(UIVars& vars, UIId id);
+    virtual void Set(UIVars& vars, UIId id, double val);
+
+};
+
+class uiLogisticSelectionMaxEvents : public SetGetLong
+{
+  public:
+    uiLogisticSelectionMaxEvents();
+    virtual ~uiLogisticSelectionMaxEvents();
+    virtual long Get(UIVars& vars, UIId id);
+    virtual void Set(UIVars& vars, UIId id, long val);
+};
+
+class uiLogisticSelectionType : public SetGetSelectionType
+{
+  public:
+    uiLogisticSelectionType();
+    virtual ~uiLogisticSelectionType();
+    virtual selection_type Get(UIVars& vars, UIId id);
+    virtual void Set(UIVars& vars, UIId id, selection_type sType);
+    virtual std::string NextToggleValue(UIVars& vars, UIId id);
+    virtual std::string MakePrintString(UIVars& vars, selection_type sType);
+};
+
+//------------------------------------------------------------------------------------
+
+class uiMigration : public SetGetBoolEnabled
+{
+  public:
+    uiMigration();
+    virtual ~uiMigration();
+    virtual bool Get(UIVars& vars, UIId id);
+    virtual void Set(UIVars& vars, UIId id,bool val);
+};
+
+class uiMigrationLegal : public GetBool
+{
+  public:
+    uiMigrationLegal();
+    virtual ~uiMigrationLegal();
+    virtual bool Get(UIVars& vars, UIId id);
+};
+
+class uiMigrationFst : public SetGetNoval
+{
+  public:
+    uiMigrationFst();
+    virtual ~uiMigrationFst();
+    virtual void Set(UIVars& vars, UIId id, noval val);
+};
+
+class uiMigrationGlobal : public uiParameter
+{
+  public:
+    uiMigrationGlobal();
+    virtual ~uiMigrationGlobal();
+    virtual double Get(UIVars& vars, UIId id);
+    virtual void Set(UIVars& vars, UIId id, double val);
+};
+
+class uiMigrationUser : public uiParameter
+{
+  public:
+    uiMigrationUser();
+    virtual ~uiMigrationUser();
+    virtual double Get(UIVars& vars, UIId id);
+    virtual void Set(UIVars& vars, UIId id, double val);
+};
+
+class uiMigrationMaxEvents : public SetGetLong
+{
+  public:
+    uiMigrationMaxEvents();
+    virtual ~uiMigrationMaxEvents();
+    virtual long Get(UIVars& vars, UIId id);
+    virtual void Set(UIVars& vars, UIId id, long val);
+};
+
+class uiMigrationInto: public GetString
+{
+  public:
+    uiMigrationInto();
+    virtual ~uiMigrationInto();
+    virtual std::string Get(UIVars& vars, UIId id);
+    virtual string Description(UIVars& vars, UIId id);
+};
+
+//------------------------------------------------------------------------------------
+
+class uiDivMigration : public SetGetBoolEnabled
+{
+  public:
+    uiDivMigration();
+    virtual ~uiDivMigration();
+    virtual bool Get(UIVars& vars, UIId id);
+    virtual void Set(UIVars& vars, UIId id,bool val);
+};
+
+class uiDivMigrationLegal : public GetBool
+{
+  public:
+    uiDivMigrationLegal();
+    virtual ~uiDivMigrationLegal();
+    virtual bool Get(UIVars& vars, UIId id);
+};
+
+class uiDivMigrationFst : public SetGetNoval
+{
+  public:
+    uiDivMigrationFst();
+    virtual ~uiDivMigrationFst();
+    virtual void Set(UIVars& vars, UIId id, noval val);
+};
+
+class uiDivMigrationGlobal : public uiParameter
+{
+  public:
+    uiDivMigrationGlobal();
+    virtual ~uiDivMigrationGlobal();
+    virtual double Get(UIVars& vars, UIId id);
+    virtual void Set(UIVars& vars, UIId id, double val);
+};
+
+class uiDivMigrationUser : public uiParameter
+{
+  public:
+    uiDivMigrationUser();
+    virtual ~uiDivMigrationUser();
+    virtual double Get(UIVars& vars, UIId id);
+    virtual void Set(UIVars& vars, UIId id, double val);
+};
+
+class uiDivMigrationMaxEvents : public SetGetLong
+{
+  public:
+    uiDivMigrationMaxEvents();
+    virtual ~uiDivMigrationMaxEvents();
+    virtual long Get(UIVars& vars, UIId id);
+    virtual void Set(UIVars& vars, UIId id, long val);
+};
+
+class uiDivMigrationInto: public GetString
+{
+  public:
+    uiDivMigrationInto();
+    virtual ~uiDivMigrationInto();
+    virtual std::string Get(UIVars& vars, UIId id);
+    virtual string Description(UIVars& vars, UIId id);
+};
+
+//------------------------------------------------------------------------------------
+
+class uiDivergence : public SetGetBoolEnabled
+{
+  public:
+    uiDivergence();
+    virtual ~uiDivergence();
+    virtual bool Get(UIVars& vars, UIId id);
+    virtual void Set(UIVars& vars, UIId id,bool val);
+};
+
+class uiDivergenceEpochAncestor : public GetString
+{
+  public:
+    uiDivergenceEpochAncestor();
+    virtual ~uiDivergenceEpochAncestor();
+    virtual std::string Get(UIVars& vars, UIId id);
+};
+
+class uiDivergenceEpochDescendents : public GetString
+{
+  public:
+    uiDivergenceEpochDescendents();
+    virtual ~uiDivergenceEpochDescendents();
+    virtual std::string Get(UIVars& vars, UIId id);
+};
+
+class uiDivergenceLegal : public GetBool
+{
+  public:
+    uiDivergenceLegal();
+    virtual ~uiDivergenceLegal();
+    virtual bool Get(UIVars& vars, UIId id);
+};
+
+class uiDivergenceEpochCount : public GetLong
+{
+  public:
+    uiDivergenceEpochCount();
+    virtual ~uiDivergenceEpochCount();
+    virtual long Get(UIVars& vars, UIId id);
+};
+
+class uiDivergenceEpochName : public GetString
+{
+  public:
+    uiDivergenceEpochName();
+    virtual ~uiDivergenceEpochName();
+    virtual std::string Get(UIVars& vars, UIId id);
+};
+
+
+class uiDivergenceEpochBoundaryTime :  public uiParameter
+{
+  public:
+    uiDivergenceEpochBoundaryTime();
+    virtual ~uiDivergenceEpochBoundaryTime();
+    virtual double Get(UIVars& vars, UIId id);
+    virtual void Set(UIVars& vars, UIId id, double val);
+    virtual string Description(UIVars& vars, UIId id);
+};
+
+//------------------------------------------------------------------------------------
+
+class uiRecombine : public SetGetBoolEnabled
+{
+  public:
+    uiRecombine();
+    virtual ~uiRecombine();
+    virtual bool Get(UIVars& vars, UIId id);
+    virtual void Set(UIVars& vars, UIId id,bool val);
+};
+
+class uiRecombineLegal : public GetBool
+{
+  public:
+    uiRecombineLegal();
+    virtual ~uiRecombineLegal();
+    virtual bool Get(UIVars& vars, UIId id);
+};
+
+class uiRecombinationRate : public uiParameter
+{
+  public:
+    uiRecombinationRate();
+    virtual ~uiRecombinationRate();
+    virtual double Get(UIVars& vars, UIId id);
+    virtual void Set(UIVars& vars, UIId id, double val);
+};
+
+class uiRecombinationMaxEvents : public SetGetLong
+{
+  public:
+    uiRecombinationMaxEvents();
+    virtual ~uiRecombinationMaxEvents();
+    virtual long Get(UIVars& vars, UIId id);
+    virtual void Set(UIVars& vars, UIId id, long val);
+};
+
+//------------------------------------------------------------------------------------
+
+class uiRegionGamma : public SetGetBoolEnabled
+{
+  public:
+    uiRegionGamma();
+    virtual ~uiRegionGamma();
+    virtual bool Get(UIVars& vars, UIId id);
+    virtual void Set(UIVars& vars, UIId id,bool val);
+};
+
+class uiRegionGammaLegal : public GetBool
+{
+  public:
+    uiRegionGammaLegal();
+    virtual ~uiRegionGammaLegal();
+    virtual bool Get(UIVars& vars, UIId id);
+};
+
+class uiRegionGammaShape : public uiParameter
+{
+  public:
+    uiRegionGammaShape();
+    virtual ~uiRegionGammaShape();
+    virtual double Get(UIVars& vars, UIId id);
+    virtual void Set(UIVars& vars, UIId id, double val);
+};
+
+//------------------------------------------------------------------------------------
+
+class uiDisease : public SetGetBoolEnabled
+{
+  public:
+    uiDisease();
+    virtual ~uiDisease();
+    virtual bool Get(UIVars& vars, UIId id);
+    virtual void Set(UIVars& vars, UIId id,bool val);
+};
+
+class uiDiseaseLegal : public GetBool
+{
+  public:
+    uiDiseaseLegal();
+    virtual ~uiDiseaseLegal();
+    virtual bool Get(UIVars& vars, UIId id);
+};
+
+class uiDiseaseMaxEvents : public SetGetLong
+{
+  public:
+    uiDiseaseMaxEvents();
+    virtual ~uiDiseaseMaxEvents();
+    virtual long Get(UIVars& vars, UIId id);
+    virtual void Set(UIVars& vars, UIId id, long val);
+};
+
+class uiDiseaseGlobal : public uiParameter
+{
+  public:
+    uiDiseaseGlobal();
+    virtual ~uiDiseaseGlobal();
+    virtual double Get(UIVars& vars, UIId id);
+    virtual void Set(UIVars& vars, UIId id, double val);
+};
+
+class uiDiseaseByID : public uiParameter
+{
+  public:
+    uiDiseaseByID();
+    virtual ~uiDiseaseByID();
+    virtual double Get(UIVars& vars, UIId id);
+    virtual void Set(UIVars& vars, UIId id, double val);
+};
+
+class uiDiseaseInto: public GetString
+{
+  public:
+    uiDiseaseInto();
+    virtual ~uiDiseaseInto();
+    virtual std::string Get(UIVars& vars, UIId id);
+    virtual string Description(UIVars& vars, UIId id);
+};
+
+class uiDiseaseLocation : public SetGetLong
+{
+  public:
+    uiDiseaseLocation();
+    virtual ~uiDiseaseLocation();
+    virtual long Get(UIVars& vars, UIId id);
+    virtual void Set(UIVars& vars, UIId id, long val);
+};
+
+class uiStartValue : public SetGetDouble
+{
+  public:
+    uiStartValue();
+    virtual ~uiStartValue();
+    virtual double Get(UIVars& vars, UIId id);
+    virtual void Set(UIVars& vars, UIId id, double val);
+};
+
+class uiStartValueMethod : public SetGetMethodType
+{
+  public:
+    uiStartValueMethod();
+    virtual ~uiStartValueMethod();
+    virtual method_type Get(UIVars& vars, UIId id);
+    virtual void Set(UIVars& vars, UIId id, method_type val);
+};
+
+class uiTrueValue : public SetGetDouble
+{
+  public:
+    uiTrueValue();
+    virtual ~uiTrueValue();
+    virtual double Get(UIVars& vars, UIId id);
+    virtual void Set(UIVars& vars, UIId id, double val);
+};
+
+#endif  // FORCE_INTERFACE_H
+
+//____________________________________________________________________________________
diff --git a/src/ui_interface/front_end_warnings.cpp b/src/ui_interface/front_end_warnings.cpp
new file mode 100644
index 0000000..81c559b
--- /dev/null
+++ b/src/ui_interface/front_end_warnings.cpp
@@ -0,0 +1,40 @@
+// $Id: front_end_warnings.cpp,v 1.3 2011/03/08 19:22:01 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include "front_end_warnings.h"
+#include "vectorx.h"
+
+FrontEndWarnings::FrontEndWarnings()
+{
+}
+
+FrontEndWarnings::~FrontEndWarnings()
+{
+}
+
+StringVec1d
+FrontEndWarnings::GetAndClearWarnings()
+{
+    StringVec1d warnings = m_warnings;
+    m_warnings.clear();
+    return warnings;
+}
+
+void
+FrontEndWarnings::AddWarning(std::string warnmsg)
+{
+    for (unsigned long wnum=0; wnum<m_warnings.size(); wnum++)
+    {
+        if (m_warnings[wnum]==warnmsg) return;
+    }
+    m_warnings.push_back(warnmsg);
+}
+
+//____________________________________________________________________________________
diff --git a/src/ui_interface/front_end_warnings.h b/src/ui_interface/front_end_warnings.h
new file mode 100644
index 0000000..97fe3a6
--- /dev/null
+++ b/src/ui_interface/front_end_warnings.h
@@ -0,0 +1,33 @@
+// $Id: front_end_warnings.h,v 1.3 2011/03/07 06:08:53 bobgian Exp $
+
+/*
+ *  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+ *
+ *  This software is distributed free of charge for non-commercial use
+ *  and is copyrighted.  Of course, we do not guarantee that the software
+ *  works, and are not responsible for any damage you may cause or have.
+ *
+ */
+
+#ifndef FRONT_END_WARNINGS_H
+#define FRONT_END_WARNINGS_H
+
+#include <string>
+#include <vector>
+
+class FrontEndWarnings
+{
+  private:
+    std::vector<std::string>    m_warnings;
+
+  public:
+    FrontEndWarnings();
+    virtual ~FrontEndWarnings();
+    std::vector<std::string> GetAndClearWarnings();
+    void      AddWarning(std::string warnmsg);
+
+};
+
+#endif  // FRONT_END_WARNINGS
+
+//____________________________________________________________________________________
diff --git a/src/ui_interface/prior_interface.cpp b/src/ui_interface/prior_interface.cpp
new file mode 100644
index 0000000..7761a0e
--- /dev/null
+++ b/src/ui_interface/prior_interface.cpp
@@ -0,0 +1,303 @@
+// $Id: prior_interface.cpp,v 1.13 2012/06/30 01:32:43 bobgian Exp $
+
+/*
+ *  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+ *
+ *  This software is distributed free of charge for non-commercial use
+ *  and is copyrighted.  Of course, we do not guarantee that the software
+ *  works, and are not responsible for any damage you may cause or have.
+ *
+ */
+
+#include <cassert>
+
+#include "local_build.h"                // for definition of LAMARC_NEW_FEATURE_RELATIVE_SAMPLING
+
+#include "prior_interface.h"
+#include "ui_strings.h"
+#include "ui_vars.h"
+
+using std::string;
+
+//------------------------------------------------------------------------------------
+
+uiParameterUseDefaultPrior::uiParameterUseDefaultPrior()
+    : SetGetBool(uistr::priorUseDefault)
+{
+}
+
+uiParameterUseDefaultPrior::~uiParameterUseDefaultPrior()
+{
+}
+
+bool uiParameterUseDefaultPrior::Get(UIVars& vars, UIId id)
+{
+    return vars.forces.GetUseDefaultPrior(id.GetForceType(), id.GetIndex1());
+}
+
+void uiParameterUseDefaultPrior::Set(UIVars& vars, UIId id, bool val)
+{
+    vars.forces.SetUseDefaultPrior(val, id.GetForceType(), id.GetIndex1());
+}
+
+//------------------------------------------------------------------------------------
+
+uiParameterPriorType::uiParameterPriorType()
+    : SetGetPriorType(uistr::priorType)
+{
+}
+
+uiParameterPriorType::~uiParameterPriorType()
+{
+}
+
+priortype uiParameterPriorType::Get(UIVars& vars, UIId id)
+{
+    return vars.forces.GetPriorType(id.GetForceType(), id.GetIndex1());
+}
+
+void uiParameterPriorType::Set(UIVars& vars, UIId id, priortype val)
+{
+    vars.forces.SetPriorType(val, id.GetForceType(), id.GetIndex1());
+}
+
+//------------------------------------------------------------------------------------
+
+uiParameterLowerBound::uiParameterLowerBound()
+    : SetGetDouble(uistr::priorLowerBound)
+{
+}
+
+uiParameterLowerBound::~uiParameterLowerBound()
+{
+}
+
+double uiParameterLowerBound::Get(UIVars& vars, UIId id)
+{
+    return vars.forces.GetLowerBound(id.GetForceType(), id.GetIndex1());
+}
+
+void uiParameterLowerBound::Set(UIVars& vars, UIId id, double val)
+{
+    vars.forces.SetLowerBound(val, id.GetForceType(), id.GetIndex1());
+}
+
+string uiParameterLowerBound::Min(UIVars& vars, UIId id)
+{
+    switch(id.GetForceType())
+    {
+        case force_COAL:
+            return ToString(defaults::minboundTheta);
+            break;
+        case force_MIG:
+            return ToString(defaults::minboundMig);
+            break;
+        case force_DIVMIG:
+            return ToString(defaults::minboundDivMig);
+            break;
+        case force_DIVERGENCE:
+            return ToString(defaults::minboundEpoch);
+            break;
+        case force_DISEASE:
+            return ToString(defaults::minboundDisease);
+            break;
+        case force_REC:
+            return ToString(defaults::minboundRec);
+            break;
+        case force_GROW:
+        case force_EXPGROWSTICK:
+            return ToString(defaults::minboundGrowth);
+            break;
+        case force_LOGSELECTSTICK:
+        case force_LOGISTICSELECTION:
+            return ToString(defaults::minboundLSelect);
+            break;
+        case force_REGION_GAMMA:
+        {
+            string msg = "uiParameterLowerBound::Min() attempted to set the lower ";
+            msg += "bound for the prior on the alpha parameter of the gamma \"force.\"";
+            msg += "Unfortunately the gamma can\'t be used for Bayesian analyses, ";
+            msg += "only likelihood analyses.";
+            throw implementation_error(msg);
+        }
+        break;
+        case force_NONE:
+            assert(false);
+            return ToString(defaults::minboundTheta);
+            break;
+    }
+    assert(false); //uncaught force type;
+    return ToString(defaults::minboundTheta);
+}
+
+string uiParameterLowerBound::Max(UIVars& vars, UIId id)
+{
+    return ToString(vars.forces.GetUpperBound(id.GetForceType(), id.GetIndex1()));
+}
+
+//------------------------------------------------------------------------------------
+
+uiParameterUpperBound::uiParameterUpperBound()
+    : SetGetDouble(uistr::priorUpperBound)
+{
+}
+
+uiParameterUpperBound::~uiParameterUpperBound()
+{
+}
+
+double uiParameterUpperBound::Get(UIVars& vars, UIId id)
+{
+    return vars.forces.GetUpperBound(id.GetForceType(), id.GetIndex1());
+}
+
+void uiParameterUpperBound::Set(UIVars& vars, UIId id, double val)
+{
+    vars.forces.SetUpperBound(val, id.GetForceType(), id.GetIndex1());
+}
+
+string uiParameterUpperBound::Min(UIVars& vars, UIId id)
+{
+    return ToString(vars.forces.GetLowerBound(id.GetForceType(), id.GetIndex1()));
+}
+
+string uiParameterUpperBound::Max(UIVars& vars, UIId id)
+{
+    switch(id.GetForceType())
+    {
+        case force_COAL:
+            return ToString(defaults::maxboundTheta);
+            break;
+        case force_DIVMIG:
+            return ToString(defaults::maxboundDivMig);
+            break;
+        case force_DIVERGENCE:
+            return ToString(defaults::maxboundEpoch);
+            break;
+        case force_MIG:
+            return ToString(defaults::maxboundMig);
+            break;
+        case force_DISEASE:
+            return ToString(defaults::maxboundDisease);
+            break;
+        case force_REC:
+            return ToString(defaults::maxboundRec);
+            break;
+        case force_GROW:
+        case force_EXPGROWSTICK:
+            return ToString(defaults::maxboundGrowth);
+            break;
+        case force_LOGSELECTSTICK:
+        case force_LOGISTICSELECTION:
+            return ToString(defaults::maxboundLSelect);
+            break;
+        case force_REGION_GAMMA:
+        {
+            string msg = "uiParameterUpperBound::Max() attempted to set the upper ";
+            msg += "bound for the prior on the alpha parameter of the gamma \"force.\"";
+            msg += "Unfortunately the gamma can\'t be used for Bayesian analyses, ";
+            msg += "only likelihood analyses.";
+            throw implementation_error(msg);
+        }
+        break;
+        case force_NONE:
+            assert(false);
+            return ToString(defaults::maxboundTheta);
+            break;
+    }
+    assert(false); //uncaught force type;
+    return ToString(defaults::maxboundTheta);
+}
+
+//------------------------------------------------------------------------------------
+
+#ifdef LAMARC_NEW_FEATURE_RELATIVE_SAMPLING
+uiParameterRelativeSampling::uiParameterRelativeSampling()
+    : SetGetLong(uistr::relativeSampleRate)
+{
+}
+
+uiParameterRelativeSampling::~uiParameterRelativeSampling()
+{
+}
+
+long uiParameterRelativeSampling::Get(UIVars& vars, UIId id)
+{
+    return vars.forces.GetRelativeSampling(id.GetForceType(), id.GetIndex1());
+}
+
+void uiParameterRelativeSampling::Set(UIVars& vars, UIId id, long val)
+{
+    vars.forces.SetRelativeSampling(val, id.GetForceType(), id.GetIndex1());
+}
+#endif
+
+//------------------------------------------------------------------------------------
+
+uiPriorByForce::uiPriorByForce()
+    : SetGetNoval(uistr::priorByForce)
+{
+}
+
+uiPriorByForce::~uiPriorByForce()
+{
+}
+
+string uiPriorByForce::Description(UIVars& vars, UIId id)
+{
+    string retVal = SetGetNoval::Description(vars,id);
+    retVal += vars.datapackplus.GetParamNameOfForce(id.GetForceType());
+    return retVal;
+}
+
+string uiPriorByForce::GetPrintString(UIVars& vars, UIId id)
+{
+    return vars.forces.GetPriorTypeSummaryDescription(id.GetForceType());
+}
+
+//------------------------------------------------------------------------------------
+
+uiPriorById::uiPriorById()
+    : SetGetNoval(uistr::priorByID)
+{
+}
+
+uiPriorById::~uiPriorById()
+{
+}
+
+string uiPriorById::Description(UIVars& vars, UIId id)
+{
+    return vars.GetParamNameWithConstraint(id.GetForceType(),id.GetIndex1());
+}
+
+string uiPriorById::GetPrintString(UIVars& vars, UIId id)
+{
+    return vars.forces.GetPriorTypeSummaryDescription(id.GetForceType(), id.GetIndex1());
+}
+
+//------------------------------------------------------------------------------------
+
+uiUseDefaultPriorsForForce::uiUseDefaultPriorsForForce()
+    : SetGetNoval(uistr::useDefaultPriorsForForce)
+{
+}
+
+uiUseDefaultPriorsForForce::~uiUseDefaultPriorsForForce()
+{
+}
+
+void uiUseDefaultPriorsForForce::Set(UIVars& vars, UIId id, noval val)
+{
+    force_type ft = id.GetForceType();
+    vars.forces.SetUseDefaultPriorsForForce(ft);
+}
+
+string uiUseDefaultPriorsForForce::Description(UIVars& vars, UIId id)
+{
+    string retVal = SetGetNoval::Description(vars,id);
+    retVal += vars.datapackplus.GetParamNameOfForce(id.GetForceType());
+    return retVal;
+}
+
+//____________________________________________________________________________________
diff --git a/src/ui_interface/prior_interface.h b/src/ui_interface/prior_interface.h
new file mode 100644
index 0000000..07cdedb
--- /dev/null
+++ b/src/ui_interface/prior_interface.h
@@ -0,0 +1,110 @@
+// $Id: prior_interface.h,v 1.8 2012/06/30 01:32:43 bobgian Exp $
+
+/*
+ *  Copyright 2004  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+ *
+ *  This software is distributed free of charge for non-commercial use
+ *  and is copyrighted.  Of course, we do not guarantee that the software
+ *  works, and are not responsible for any damage you may cause or have.
+ *
+ */
+
+#ifndef PRIOR_INTERFACE_H
+#define PRIOR_INTERFACE_H
+
+#include <string>
+#include "local_build.h"                // for definition of LAMARC_NEW_FEATURE_RELATIVE_SAMPLING
+#include "setget.h"
+
+class UIVars;
+
+class uiParameterUseDefaultPrior : public SetGetBool
+{
+  public:
+    uiParameterUseDefaultPrior();
+    virtual ~uiParameterUseDefaultPrior();
+    virtual bool     Get(UIVars& vars, UIId id);
+    virtual void     Set(UIVars& vars, UIId id, bool val);
+};
+
+class uiParameterPriorType : public SetGetPriorType
+{
+  public:
+    uiParameterPriorType();
+    virtual ~uiParameterPriorType();
+    virtual priortype   Get(UIVars& vars, UIId id);
+    virtual void        Set(UIVars& vars, UIId id, priortype val);
+};
+
+//------------------------------------------------------------------------------------
+
+class uiParameterLowerBound : public SetGetDouble
+{
+  public:
+    uiParameterLowerBound();
+    virtual ~uiParameterLowerBound();
+    virtual double      Get(UIVars& vars, UIId id);
+    virtual void        Set(UIVars& vars, UIId id, double val);
+    virtual string      Min(UIVars& vars, UIId id);
+    virtual string      Max(UIVars& vars, UIId id);
+};
+
+//------------------------------------------------------------------------------------
+
+class uiParameterUpperBound : public SetGetDouble
+{
+  public:
+    uiParameterUpperBound();
+    virtual ~uiParameterUpperBound();
+    virtual double      Get(UIVars& vars, UIId id);
+    virtual void        Set(UIVars& vars, UIId id, double val);
+    virtual string      Min(UIVars& vars, UIId id);
+    virtual string      Max(UIVars& vars, UIId id);
+};
+
+//------------------------------------------------------------------------------------
+#ifdef LAMARC_NEW_FEATURE_RELATIVE_SAMPLING
+class uiParameterRelativeSampling : public SetGetLong
+{
+  public:
+    uiParameterRelativeSampling();
+    virtual ~uiParameterRelativeSampling();
+    virtual long        Get(UIVars& vars, UIId id);
+    virtual void        Set(UIVars& vars, UIId id, long val);
+};
+#endif
+
+//------------------------------------------------------------------------------------
+
+class uiPriorByForce : public SetGetNoval
+{
+  public:
+    uiPriorByForce();
+    virtual ~uiPriorByForce();
+    string Description(UIVars& vars, UIId id);
+    string GetPrintString(UIVars& vars, UIId id);
+    void Set(UIVars& vars, UIId id, noval val) {};
+};
+
+class uiPriorById : public SetGetNoval
+{
+  public:
+    uiPriorById();
+    virtual ~uiPriorById();
+    string Description(UIVars& vars, UIId id);
+    string GetPrintString(UIVars& vars, UIId id);
+    void Set(UIVars& vars, UIId id, noval val) {};
+};
+
+class uiUseDefaultPriorsForForce : public SetGetNoval
+{
+  public:
+    uiUseDefaultPriorsForForce();
+    virtual ~uiUseDefaultPriorsForForce();
+    void Set(UIVars& vars, UIId id, noval val);
+    string Description(UIVars& vars, UIId id);
+};
+
+#endif  // PRIOR_INTERFACE_H
+
+//____________________________________________________________________________________
diff --git a/src/ui_interface/profile_interface.cpp b/src/ui_interface/profile_interface.cpp
new file mode 100644
index 0000000..a3c82ac
--- /dev/null
+++ b/src/ui_interface/profile_interface.cpp
@@ -0,0 +1,315 @@
+// $Id: profile_interface.cpp,v 1.27 2011/03/07 06:08:53 bobgian Exp $
+
+/*
+ *  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+ *
+ *  This software is distributed free of charge for non-commercial use
+ *  and is copyrighted.  Of course, we do not guarantee that the software
+ *  works, and are not responsible for any damage you may cause or have.
+ *
+ */
+
+#include <cassert>
+#include <iostream>
+
+#include "constants.h"
+#include "profile_interface.h"
+#include "stringx.h"
+#include "ui_id.h"
+#include "ui_interface.h"
+#include "ui_strings.h"
+#include "ui_vars.h"
+#include "ui_vars_forces.h"
+
+using std::string;
+
+//------------------------------------------------------------------------------------
+
+uiGlobalProfileOn::uiGlobalProfileOn()
+    : SetGetNoval(uistr::allProfilesOn)
+{
+}
+
+uiGlobalProfileOn::~uiGlobalProfileOn()
+{
+}
+
+void uiGlobalProfileOn::Set(UIVars& vars, UIId id,noval val)
+{
+    vars.forces.SetDoProfile(true);
+}
+
+//------------------------------------------------------------------------------------
+
+uiGlobalProfileOff::uiGlobalProfileOff()
+    : SetGetNoval(uistr::allProfilesOff)
+{
+}
+
+uiGlobalProfileOff::~uiGlobalProfileOff()
+{
+}
+
+void uiGlobalProfileOff::Set(UIVars& vars, UIId id,noval val)
+{
+    vars.forces.SetDoProfile(false);
+}
+
+//------------------------------------------------------------------------------------
+
+uiGlobalProfilePercentile::uiGlobalProfilePercentile()
+    : SetGetNoval(uistr::allProfilesPercentile)
+{
+}
+
+uiGlobalProfilePercentile::~uiGlobalProfilePercentile()
+{
+}
+
+void uiGlobalProfilePercentile::Set(UIVars& vars, UIId id,noval val)
+{
+    vars.forces.SetProfileType(profile_PERCENTILE);
+}
+
+//------------------------------------------------------------------------------------
+
+uiGlobalProfileFixed::uiGlobalProfileFixed()
+    : SetGetNoval(uistr::allProfilesFixed)
+{
+}
+
+uiGlobalProfileFixed::~uiGlobalProfileFixed()
+{
+}
+
+void uiGlobalProfileFixed::Set(UIVars& vars, UIId id,noval val)
+{
+    vars.forces.SetProfileType(profile_FIX);
+}
+
+//------------------------------------------------------------------------------------
+
+uiProfileByID::uiProfileByID()
+    : SetGetBool(uistr::profileByID)
+{
+}
+
+uiProfileByID::~uiProfileByID()
+{
+}
+
+string uiProfileByID::Description(UIVars& vars, UIId id)
+{
+    // EWFIX.P5 DIMENSIONS -- will change if we divide up 2-d params into 2-D storage
+    return vars.GetParamNameWithConstraint(id.GetForceType(),id.GetIndex1());
+}
+
+bool uiProfileByID::Get(UIVars& vars, UIId id)
+{
+    force_type thisForce = id.GetForceType();
+    long thisIndex = id.GetIndex1();
+    return vars.forces.GetDoProfile(thisForce,thisIndex);
+}
+
+void uiProfileByID::Set(UIVars& vars, UIId id, bool val)
+{
+    force_type thisForce = id.GetForceType();
+    long thisIndex = id.GetIndex1();
+    vars.forces.SetDoProfile(val,thisForce,thisIndex);
+}
+
+string uiProfileByID::MakePrintString(UIVars& vars, bool val)
+{
+    if(val) return "Enabled";
+    return "Disabled";
+};
+
+//------------------------------------------------------------------------------------
+
+uiProfileByForce::uiProfileByForce()
+    : SetGetProftype(uistr::profileByForce)
+{
+}
+
+uiProfileByForce::~uiProfileByForce()
+{
+}
+
+proftype uiProfileByForce::Get(UIVars& vars, UIId id)
+{
+    return vars.forces.GetProfileType(id.GetForceType());
+}
+
+void uiProfileByForce::Set(UIVars& vars, UIId id, proftype val)
+{
+    vars.forces.SetProfileType(val,id.GetForceType());
+}
+
+string uiProfileByForce::Description(UIVars& vars, UIId id)
+{
+    return uistr::profileByForce
+        + vars.datapackplus.GetParamNameOfForce(id.GetForceType());
+}
+
+string uiProfileByForce::GetPrintString(UIVars& vars, UIId id)
+{
+    return vars.forces.GetProfileTypeSummaryDescription(id.GetForceType());
+}
+
+//------------------------------------------------------------------------------------
+
+UIIdVec1d uiValidParamsForOneForce::Get(UIVars& vars, UIId id)
+{
+    force_type thisForce = id.GetForceType();
+    long numPossibleParams = vars.forces.GetNumParameters(thisForce);
+    UIIdVec1d validParams;
+    for(long localId=0; localId < numPossibleParams; localId++)
+    {
+        if(vars.forces.GetParamValid(thisForce,localId))
+        {
+            if (vars.forces.GetParamUnique(thisForce, localId))
+            {
+                validParams.push_back(UIId(thisForce,localId));
+            }
+        }
+    }
+    return validParams;
+}
+
+uiValidParamsForOneForce::uiValidParamsForOneForce()
+    : GetUIIdVec1d(uistr::validParamsForForce)
+{
+}
+
+uiValidParamsForOneForce::~uiValidParamsForOneForce()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+UIIdVec1d
+uiValidForces::Get(UIVars& vars, UIId id)
+{
+    ForceTypeVec1d activeForces = vars.forces.GetActiveForces();
+    ForceTypeVec1d::iterator iter;
+    UIIdVec1d uiids;
+
+    for(iter=activeForces.begin(); iter != activeForces.end(); iter++)
+    {
+        force_type ft = *iter;
+        uiids.push_back(UIId(ft));
+    }
+    return uiids;
+}
+
+uiValidForces::uiValidForces()
+    : GetUIIdVec1d(uistr::validForces)
+{
+}
+
+uiValidForces::~uiValidForces()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+void
+uiForceProfilesOff::Set(UIVars& vars, UIId id, noval val)
+{
+    force_type ft = id.GetForceType();
+    vars.forces.SetDoProfile(false,ft);
+}
+
+uiForceProfilesOff::uiForceProfilesOff()
+    : SetGetNoval(uistr::oneForceProfilesOff)
+{
+}
+
+uiForceProfilesOff::~uiForceProfilesOff()
+{
+}
+
+string
+uiForceProfilesOff::Description(UIVars& vars, UIId id)
+{
+    string retVal = SetGetNoval::Description(vars,id);
+    retVal += vars.datapackplus.GetParamNameOfForce(id.GetForceType());
+    return retVal;
+}
+
+void
+uiForceProfilesOn::Set(UIVars& vars, UIId id, noval val)
+{
+    force_type ft = id.GetForceType();
+    vars.forces.SetDoProfile(true,ft);
+}
+
+uiForceProfilesOn::uiForceProfilesOn()
+    : SetGetNoval(uistr::oneForceProfilesOn)
+{
+}
+
+uiForceProfilesOn::~uiForceProfilesOn()
+{
+}
+
+string
+uiForceProfilesOn::Description(UIVars& vars, UIId id)
+{
+    string retVal = SetGetNoval::Description(vars,id);
+    retVal += vars.datapackplus.GetParamNameOfForce(id.GetForceType());
+    return retVal;
+}
+
+proftype
+uiForceProfileType::Get(UIVars& vars, UIId id)
+{
+    force_type ft = id.GetForceType();
+    return vars.forces.GetProfileType(ft);
+}
+
+void
+uiForceProfileType::Set(UIVars& vars, UIId id, proftype ptype)
+{
+    force_type ft = id.GetForceType();
+    vars.forces.SetProfileType(ptype,ft);
+}
+
+uiForceProfileType::uiForceProfileType()
+    : SetGetProftype(uistr::oneForceProfileType)
+{
+}
+
+uiForceProfileType::~uiForceProfileType()
+{
+}
+
+string
+uiForceProfileType::Description(UIVars& vars, UIId id)
+{
+    string retVal = SetGetProftype::Description(vars,id);
+    retVal += vars.datapackplus.GetParamNameOfForce(id.GetForceType());
+    return retVal;
+}
+
+string
+uiForceProfileType::NextToggleValue(UIVars& vars, UIId id)
+{
+    switch(Get(vars,id))
+    {
+        case profile_NONE:
+            // Should never happen, but we could recover.
+            // Let's assert in the debug case, but
+            // fall through to recover for release
+            assert(false);
+        case profile_PERCENTILE:
+            return ToString(profile_FIX);
+            break;
+        case profile_FIX:
+            return ToString(profile_PERCENTILE);
+            break;
+    }
+    throw implementation_error("uiForceProfileType::NextToggleValue bad switch case");
+}
+
+//____________________________________________________________________________________
diff --git a/src/ui_interface/profile_interface.h b/src/ui_interface/profile_interface.h
new file mode 100644
index 0000000..2f99c8f
--- /dev/null
+++ b/src/ui_interface/profile_interface.h
@@ -0,0 +1,130 @@
+// $Id: profile_interface.h,v 1.21 2011/03/07 06:08:53 bobgian Exp $
+
+/*
+ *  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+ *
+ *  This software is distributed free of charge for non-commercial use
+ *  and is copyrighted.  Of course, we do not guarantee that the software
+ *  works, and are not responsible for any damage you may cause or have.
+ *
+ */
+
+#ifndef PROFILE_INTERFACE_H
+#define PROFILE_INTERFACE_H
+
+#include <string>
+#include "setget.h"
+
+class UIVars;
+
+class uiGlobalProfileOn : public SetGetNoval
+{
+  public:
+    uiGlobalProfileOn();
+    virtual ~uiGlobalProfileOn();
+    virtual void Set(UIVars& vars, UIId id, noval val);
+};
+
+class uiGlobalProfileOff : public SetGetNoval
+{
+  public:
+    uiGlobalProfileOff();
+    virtual ~uiGlobalProfileOff();
+    virtual void Set(UIVars& vars, UIId id, noval val);
+};
+
+class uiGlobalProfilePercentile : public SetGetNoval
+{
+  public:
+    uiGlobalProfilePercentile();
+    virtual ~uiGlobalProfilePercentile();
+    virtual void Set(UIVars& vars, UIId id, noval val);
+};
+
+class uiGlobalProfileFixed : public SetGetNoval
+{
+  public:
+    uiGlobalProfileFixed();
+    virtual ~uiGlobalProfileFixed();
+    virtual void Set(UIVars& vars, UIId id, noval val);
+};
+
+class uiProfileByForce : public SetGetProftype
+{
+  public:
+    uiProfileByForce();
+    virtual ~uiProfileByForce();
+    virtual proftype Get(UIVars& vars, UIId id);
+    virtual void Set(UIVars& vars, UIId id,proftype val);
+    virtual string Description(UIVars& vars, UIId id);
+    virtual std::string GetPrintString(UIVars& vars, UIId id);
+};
+
+class uiProfileByID : public SetGetBool
+{
+  public:
+    uiProfileByID();
+    virtual ~uiProfileByID();
+    virtual bool Get(UIVars& vars, UIId id);
+    virtual void Set(UIVars& vars, UIId id,bool val);
+    virtual string Description(UIVars& vars, UIId id);
+    virtual std::string MakePrintString(UIVars& vars, bool val);
+};
+
+class uiValidParamsForOneForce : public GetUIIdVec1d
+{
+  public:
+    uiValidParamsForOneForce();
+    virtual ~uiValidParamsForOneForce();
+    virtual UIIdVec1d Get(UIVars& vars, UIId id);
+};
+
+class uiValidForces : public GetUIIdVec1d
+{
+  public:
+    uiValidForces();
+    virtual ~uiValidForces();
+    virtual UIIdVec1d Get(UIVars& vars, UIId id);
+};
+
+class uiProfileOnOffByID : public SetGetBool
+{
+  public:
+    uiProfileOnOffByID();
+    virtual ~uiProfileOnOffByID();
+    virtual bool Get(UIVars& vars, UIId id);
+    virtual void Set(UIVars& vars, UIId id, bool val);
+};
+
+class uiForceProfilesOn : public SetGetNoval
+{
+  public:
+    uiForceProfilesOn();
+    virtual ~uiForceProfilesOn();
+    virtual void Set(UIVars& vars, UIId id, noval val);
+    virtual string Description(UIVars& vars, UIId id);
+};
+
+class uiForceProfilesOff : public SetGetNoval
+{
+  public:
+    uiForceProfilesOff();
+    virtual ~uiForceProfilesOff();
+    virtual void Set(UIVars& vars, UIId id, noval val);
+    virtual string Description(UIVars& vars, UIId id);
+};
+
+class uiForceProfileType : public SetGetProftype
+{
+  public:
+    uiForceProfileType();
+    virtual ~uiForceProfileType();
+    virtual string Description(UIVars& vars, UIId id);
+    virtual proftype Get(UIVars& vars, UIId id);
+    virtual void Set(UIVars& vars, UIId id,proftype val);
+    virtual std::string NextToggleValue(UIVars& vars, UIId id);
+};
+
+#endif  // PROFILE_INTERFACE_H
+
+//____________________________________________________________________________________
diff --git a/src/ui_interface/setget.h b/src/ui_interface/setget.h
new file mode 100644
index 0000000..1e9c328
--- /dev/null
+++ b/src/ui_interface/setget.h
@@ -0,0 +1,617 @@
+// $Id: setget.h,v 1.45 2011/03/07 06:08:53 bobgian Exp $
+
+/*
+ *  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+ *
+ *  This software is distributed free of charge for non-commercial use
+ *  and is copyrighted.  Of course, we do not guarantee that the software
+ *  works, and are not responsible for any damage you may cause or have.
+ *
+ */
+
+#ifndef SETGET_H
+#define SETGET_H
+
+#include <cassert>
+#include <string>
+
+#include "constants.h"
+#include "datatype.h"
+#include "errhandling.h"
+#include "parameter.h"
+#include "stringx.h"
+#include "rangex.h"
+#include "ui_constants.h"
+#include "ui_id.h"
+#include "paramstat.h"
+
+//------------------------------------------------------------------------------------
+
+class UIVars;
+
+// Classes defined here are for setting and getting values
+// from the "backend" code.
+
+// very basic virtual class. The parent of all SetGetTemplate<> objects
+class SetGet
+{
+  protected:
+    const std::string & uikey;
+  public:
+    SetGet(const std::string & key) : uikey(key) {};
+    virtual ~SetGet() {};
+    // UIKey should be unique -- it is used to create and
+    // access a single instance of this Getter/Setter
+    // and should describe what this getter/setter gets/sets
+    // Each should be defined as a constant string in the
+    // uistr class
+    virtual const string & UIKey()
+    { return uikey;};
+    // Get a string describing what will be set/gotten
+    virtual string Description(UIVars& vars, UIId id)
+    { return UIKey();};
+    //Get a string describing the min/max values possible.
+    virtual string Min(UIVars& vars, UIId id) { return ToString(FLAGDOUBLE);};
+    virtual string Max(UIVars& vars, UIId id) { return ToString(FLAGDOUBLE);};
+    // Get a string that represents the value stored in the backend
+    virtual std::string GetPrintString(UIVars& vars, UIId id) = 0;
+    virtual std::string NextToggleValue(UIVars& vars, UIId id)
+    { throw implementation_error("no toggle possible");};
+    // returns true if the values associated with id for this
+    // menu item are internally consistent -- used to verify
+    // that two or more values are mutually OK.
+    virtual bool IsConsistent(UIVars& vars, UIId id)
+    {
+        return true;
+    };
+    virtual void SetFromString(UIVars& vars, UIId id, string stringVal)
+    { throw implementation_error("no SetFromString possible");};
+};
+
+//------------------------------------------------------------------------------------
+// Basic methods instructing how to get and set a single
+// value.
+template<class ValType>
+class GetTemplate : public SetGet
+{
+  public:
+    GetTemplate(const std::string & key): SetGet(key) {};
+    virtual ~GetTemplate() {};
+    // reaches into the backend code to get a single value
+    virtual ValType Get(UIVars& vars, UIId id) = 0;
+    // default implementation -- makes a menu-printable string
+    // out of the id'th associated variable
+    virtual std::string GetPrintString(UIVars& vars, UIId id)
+    { return MakePrintString(vars,Get(vars,id));};
+    // how these values should be printed out
+    virtual std::string MakePrintString(UIVars& vars,ValType v)
+    { return ToString(v);};
+};
+
+template<class ValType>
+class SetGetTemplate : public GetTemplate<ValType>
+{
+  public:
+    SetGetTemplate(const std::string & key): GetTemplate<ValType>(key) {};
+    virtual ~SetGetTemplate() {};
+    // reaches into the backend code to set a single value
+    virtual void Set(UIVars& vars,UIId id,ValType value) = 0;
+    // converts stringVal to ValType and uses ::Set
+    virtual void SetFromString(UIVars& vars, UIId id, string stringVal)
+    { Set(vars,id,GetValFromString(vars,stringVal));};
+    // GetValFromString converts a string to ValType
+    virtual ValType GetValFromString(UIVars&,string) = 0;
+
+};
+
+//------------------------------------------------------------------------------------
+// Refinement of GetTemplate<bool> defines only how to get
+// a bool value from string input. All classes getting
+// a bool value should inherit from this one.
+class GetBool : public GetTemplate<bool>
+{
+  public:
+    GetBool(const std::string & key)
+        : GetTemplate<bool>(key) {};
+    virtual ~GetBool() {};
+};
+
+//------------------------------------------------------------------------------------
+// Refinement of GetTemplate<data_type>
+class GetDataType : public GetTemplate<data_type>
+{
+  public:
+    GetDataType(const std::string & key)
+        : GetTemplate<data_type>(key) {};
+    virtual ~GetDataType() {};
+};
+
+//------------------------------------------------------------------------------------
+// Refinement of GetTemplate<DoubleVec1d> defines only how to get
+// a DoubleVec1d value from string input. All classes getting
+// a DoubleVec1d value should inherit from this one.
+class GetDoubleVec1d : public GetTemplate<DoubleVec1d>
+{
+  public:
+    GetDoubleVec1d(const std::string & key)
+        : GetTemplate<DoubleVec1d>(key) {};
+    virtual ~GetDoubleVec1d() {};
+};
+
+//------------------------------------------------------------------------------------
+// Refinement of GetTemplate<ForceTypeVec1d> defines only how to get
+// a ForceTypeVec1d value from string input. All classes getting
+// a ForceTypeVec1d value should inherit from this one.
+class GetForceTypeVec1d : public GetTemplate<ForceTypeVec1d>
+{
+  public:
+    GetForceTypeVec1d(const std::string & key)
+        : GetTemplate<ForceTypeVec1d>(key) {};
+    virtual ~GetForceTypeVec1d() {};
+};
+
+//------------------------------------------------------------------------------------
+// Refinement of GetTemplate<long int> defines only how to get
+// a long int value from string input. All classes getting
+// a long int value should inherit from this one.
+class GetLong : public GetTemplate<long int>
+{
+  public:
+    GetLong(const std::string & key)
+        : GetTemplate<long int>(key) {};
+    virtual ~GetLong() {};
+};
+
+//------------------------------------------------------------------------------------
+// Refinement of GetTemplate<LongVec1d> defines only how to get
+// a LongVec1d value from string input. All classes getting
+// a LongVec1d value should inherit from this one.
+class GetLongVec1d : public GetTemplate<LongVec1d>
+{
+  public:
+    GetLongVec1d(const std::string & key)
+        : GetTemplate<LongVec1d>(key) {};
+    virtual ~GetLongVec1d() {};
+};
+
+//------------------------------------------------------------------------------------
+// Refinement of GetTemplate<MethodTypeVec1d> defines only how to get
+// a MethodTypeVec1d value from string input. All classes getting
+// a MethodTypeVec1d value should inherit from this one.
+class GetMethodTypeVec1d : public GetTemplate<MethodTypeVec1d>
+{
+  public:
+    GetMethodTypeVec1d(const std::string & key)
+        : GetTemplate<MethodTypeVec1d>(key) {};
+    virtual ~GetMethodTypeVec1d() {};
+};
+
+//------------------------------------------------------------------------------------
+// Refinement of GetTemplate<paramlistcondition> defines only how to get
+// a paramlistcondition value from string input. All classes getting
+// a paramlistcondition value should inherit from this one.
+class GetParamlistcondition : public GetTemplate<paramlistcondition>
+{
+  public:
+    GetParamlistcondition(const std::string & key)
+        : GetTemplate<paramlistcondition>(key) {};
+    virtual ~GetParamlistcondition() {};
+};
+
+//------------------------------------------------------------------------------------
+// Refinement of GetTemplate<std::string> defines only how to get
+// a std::string value from string input. All classes getting
+// a std::string value should inherit from this one.
+class GetString : public GetTemplate<std::string>
+{
+  public:
+    GetString(const std::string & key)
+        : GetTemplate<std::string>(key) {};
+    virtual ~GetString() {};
+};
+
+//------------------------------------------------------------------------------------
+// Refinement of GetTemplate<StringVec1d> defines only how to get
+// a StringVec1d value from string input. All classes getting
+// a StringVec1d value should inherit from this one.
+class GetStringVec1d : public GetTemplate<StringVec1d>
+{
+  public:
+    GetStringVec1d(const std::string & key)
+        : GetTemplate<StringVec1d>(key) {};
+    virtual ~GetStringVec1d() {};
+};
+
+//------------------------------------------------------------------------------------
+// Refinement of GetTemplate<UIIdVec1d> defines only how to get
+// a UIIdVec1d value from string input. All classes getting
+// a UIIdVec1d value should inherit from this one.
+class GetUIIdVec1d : public GetTemplate<UIIdVec1d>
+{
+  public:
+    GetUIIdVec1d(const std::string & key)
+        : GetTemplate<UIIdVec1d>(key) {};
+    virtual ~GetUIIdVec1d() {};
+};
+
+//------------------------------------------------------------------------------------
+// Refinement of GetTemplate<UIIdVec2d> defines only how to get
+// a UIIdVec2d value from string input. All classes getting
+// a UIIdVec2d value should inherit from this one.
+class GetUIIdVec2d : public GetTemplate<UIIdVec2d>
+{
+  public:
+    GetUIIdVec2d(const std::string & key)
+        : GetTemplate<UIIdVec2d>(key) {};
+    virtual ~GetUIIdVec2d() {};
+};
+
+//------------------------------------------------------------------------------------
+// Refinement of SetGetTemplate<bool> defines only how to get
+// a bool value from string input. All classes setting/getting
+// a bool value should inherit from this one.
+class SetGetBool : public SetGetTemplate<bool>
+{
+  public:
+    SetGetBool(const std::string & key): SetGetTemplate<bool>(key) {};
+    virtual ~SetGetBool() {};
+    virtual bool GetValFromString(UIVars& vars,string input)
+    { return ProduceBoolOrBarf(input);};
+    virtual std::string NextToggleValue(UIVars& vars,UIId id)
+    { bool v = Get(vars,id); return ToString(!v);};
+};
+
+//------------------------------------------------------------------------------------
+// Refinement of SetGetTemplate<double> defines only how to get
+// a double value from string input. All classes setting/getting
+// a double value should inherit from this one.
+class SetGetDouble : public SetGetTemplate<double>
+{
+  public:
+    SetGetDouble(const std::string & key): SetGetTemplate<double>(key) {};
+    virtual ~SetGetDouble() {};
+    virtual double GetValFromString(UIVars& vars,string input)
+    { return ProduceDoubleOrBarf(input);};
+};
+
+//------------------------------------------------------------------------------------
+// Refinement of SetGetTemplate<growth_scheme> defines only how to get
+// a growth_scheme value from string input. All classes setting/getting
+// a growth_scheme value should inherit from this one.
+class SetGetGrowthScheme : public SetGetTemplate<growth_scheme>
+{
+  public:
+    SetGetGrowthScheme(const std::string & key): SetGetTemplate<growth_scheme>(key) {};
+    virtual ~SetGetGrowthScheme() {};
+    virtual growth_scheme GetValFromString(UIVars& vars,string input)
+    { return ProduceGrowthSchemeOrBarf(input);};
+};
+
+//------------------------------------------------------------------------------------
+// Refinement of SetGetTemplate<growth_type> defines only how to get
+// a growth_type value from string input. All classes setting/getting
+// a growth_type value should inherit from this one.
+class SetGetGrowthType : public SetGetTemplate<growth_type>
+{
+  public:
+    SetGetGrowthType(const std::string & key): SetGetTemplate<growth_type>(key) {};
+    virtual ~SetGetGrowthType() {};
+    virtual growth_type GetValFromString(UIVars& vars,string input)
+    { return ProduceGrowthTypeOrBarf(input);};
+};
+
+//-------------------------------------------------------------------------------------
+// Refinement of SetGetTemplate<selection_type> defines only how to get
+// a selection_type value from string input. All classes setting/getting
+// a selection_type value should inherit from this one.
+class SetGetSelectionType : public SetGetTemplate<selection_type>
+{
+  public:
+    SetGetSelectionType(const std::string & key): SetGetTemplate<selection_type>(key) {};
+    virtual ~SetGetSelectionType() {};
+    virtual selection_type GetValFromString(UIVars& vars,string input)
+    { return ProduceSelectionTypeOrBarf(input);};
+};
+
+//------------------------------------------------------------------------------------
+// Refinement of SetGetTemplate<long int> defines only how to get
+// a long int value from string input. All classes setting/getting
+// a long int value should inherit from this one.
+class SetGetLong : public SetGetTemplate<long int>
+{
+  public:
+    SetGetLong(const std::string & key): SetGetTemplate<long int>(key) {};
+    virtual ~SetGetLong() {};
+    virtual long int GetValFromString(UIVars& vars,string input)
+    { return ProduceLongOrBarf(input);};
+};
+
+//------------------------------------------------------------------------------------
+// Refinement of SetGetTemplate<method_type> defines only how to get
+// a method_type value from string input. All classes setting/getting
+// a method_type value should inherit from this one.
+class SetGetMethodType : public SetGetTemplate<method_type>
+{
+  public:
+    SetGetMethodType(const std::string & key): SetGetTemplate<method_type>(key) {};
+    virtual ~SetGetMethodType() {};
+    virtual method_type GetValFromString(UIVars& vars,string input)
+    { return ProduceMethodTypeOrBarf(input);};
+};
+
+//------------------------------------------------------------------------------------
+// Refinement of SetGetTemplate<string> defines only how to get
+// a string value from string input. All classes setting/getting
+// a string value should inherit from this one.
+class SetGetString : public SetGetTemplate<std::string>
+{
+  public:
+    SetGetString(const std::string & key): SetGetTemplate<std::string>(key) {};
+    virtual ~SetGetString() {};
+    virtual string GetValFromString(UIVars& vars,string input)
+    { return input;};
+};
+
+//------------------------------------------------------------------------------------
+// Refinement of SetGetTemplate<model_type> defines only how to get
+// a string value from model_type input. All classes setting/getting
+// a model_type value should inherit from this one.
+class SetGetModelType : public SetGetTemplate<model_type>
+{
+  public:
+    SetGetModelType(const std::string & key): SetGetTemplate<model_type>(key) {};
+    virtual ~SetGetModelType() {};
+    virtual model_type GetValFromString(UIVars& vars,string input)
+    { return ProduceModelTypeOrBarf(input);};
+};
+
+//------------------------------------------------------------------------------------
+// Refinement of SetGetTemplate<proftype> defines only how to get
+// a string value from proftype input. All classes setting/getting
+// a proftype value should inherit from this one.
+class SetGetProftype : public SetGetTemplate<proftype>
+{
+  public:
+    SetGetProftype(const std::string & key): SetGetTemplate<proftype>(key) {};
+    virtual ~SetGetProftype() {};
+    virtual proftype GetValFromString(UIVars& vars,string input)
+    { return ProduceProftypeOrBarf(input);};
+    virtual std::string NextToggleValue(UIVars& vars,UIId id)
+    {
+        switch(Get(vars,id))
+        {
+            case profile_NONE:
+                return ToString(profile_FIX);
+                break;
+            case profile_FIX:
+                return ToString(profile_PERCENTILE);
+                break;
+            case profile_PERCENTILE:
+                return ToString(profile_NONE);
+                break;
+        }
+        return "UNKNOWN";  // should not happen; this line was
+        // added to silence compiler warning
+    };
+};
+
+class SetGetIndividualParamstatus : public SetGetTemplate<ParamStatus>
+{
+  public:
+    SetGetIndividualParamstatus(const std::string & key): SetGetTemplate<ParamStatus>(key) {};
+    virtual ~SetGetIndividualParamstatus() {};
+    virtual ParamStatus GetValFromString(UIVars& vars,string input)
+    { return ProduceParamstatusOrBarf(input);};
+    virtual std::string NextToggleValue(UIVars& vars,UIId id)
+    {
+        return (Get(vars,id).ToggleIndividualStatus(id.GetForceType()));
+    };
+};
+
+class SetGetGroupParamstatus : public SetGetTemplate<ParamStatus>
+{
+  public:
+    SetGetGroupParamstatus(const std::string & key): SetGetTemplate<ParamStatus>(key) {};
+    virtual ~SetGetGroupParamstatus() {};
+    virtual ParamStatus GetValFromString(UIVars& vars,string input)
+    { return ProduceParamstatusOrBarf(input);};
+    virtual std::string NextToggleValue(UIVars& vars,UIId id)
+    {
+        return (Get(vars,id).ToggleGroupStatus(id.GetForceType()));
+    };
+};
+
+//------------------------------------------------------------------------------------
+// Refinement of SetGetTemplate<verbosity_type> defines only how to get
+// a string value from verbosity_type input. All classes setting/getting
+// a verbosity_type value should inherit from this one.
+class SetGetVerbosityType : public SetGetTemplate<verbosity_type>
+{
+  public:
+    SetGetVerbosityType(const std::string & key)
+        : SetGetTemplate<verbosity_type>(key) {};
+    virtual ~SetGetVerbosityType() {};
+    virtual verbosity_type GetValFromString(UIVars& vars,string input)
+    { return ProduceVerbosityTypeOrBarf(input);};
+    virtual std::string NextToggleValue(UIVars& vars, UIId id)
+    {
+        switch(Get(vars,id))
+        {
+            case NONE:
+                return ToString(CONCISE);
+                break;
+            case CONCISE:
+                return ToString(NORMAL);
+                break;
+            case NORMAL:
+                return ToString(VERBOSE);
+                break;
+            case VERBOSE:
+                return ToString(NONE);
+                break;
+        }
+        return "UNKNOWN";  // line should never be reached;
+        // it's here to silence compiler warnings
+    };
+};
+
+//Since we want to exclude 'none' as an option for output file verbosity,
+// this class sets NextToggleValue to skip over it (but can handle the case
+// where it receives it as input, if, say, it was read in from the input file).
+
+class SetGetVerbosityTypeNoNone : public SetGetTemplate<verbosity_type>
+{
+  public:
+    SetGetVerbosityTypeNoNone(const std::string & key)
+        : SetGetTemplate<verbosity_type>(key) {};
+    virtual ~SetGetVerbosityTypeNoNone() {};
+    virtual verbosity_type GetValFromString(UIVars& vars, string input)
+    { return ProduceVerbosityTypeOrBarf(input);};
+    virtual std::string NextToggleValue(UIVars& vars, UIId id)
+    {
+        switch(Get(vars,id))
+        {
+            case NONE:
+                return ToString(CONCISE);
+                break;
+            case CONCISE:
+                return ToString(NORMAL);
+                break;
+            case NORMAL:
+                return ToString(VERBOSE);
+                break;
+            case VERBOSE:
+                return ToString(CONCISE);
+                break;
+        }
+        return "UNKNOWN";  // line should never be reached;
+        // it's here to silence compiler warnings
+    };
+};
+
+//For now, there are only two possible prior types, but there may be more in
+// the future.  If that changes, the bounds may need more information, too.
+class SetGetPriorType : public SetGetTemplate<priortype>
+{
+  public:
+    SetGetPriorType(const std::string & key)
+        : SetGetTemplate<priortype>(key) {};
+    virtual ~SetGetPriorType() {};
+    virtual priortype GetValFromString(UIVars& vars, string input)
+    { return ProducePriorTypeOrBarf(input);};
+    virtual std::string NextToggleValue(UIVars& vars, UIId id)
+    {
+        switch(Get(vars,id))
+        {
+            case LINEAR:
+                if (id.GetForceType()==force_GROW) {
+                    return ToString(LINEAR);
+                }
+                return ToString(LOGARITHMIC);
+                break;
+            case LOGARITHMIC:
+                return ToString(LINEAR);
+                break;
+        }
+        return "UNKNOWN";  // line should never be reached;
+        // it's here to silence compiler warnings
+    };
+};
+
+//------------------------------------------------------------------------------------
+// Refinement of SetGetTemplate<LongVec1d> defines only how to get
+// a LongVec1d value from string input. All classes setting/getting
+// a LongVec1d value should inherit from this one.
+class SetGetLongVec1d : public SetGetTemplate<LongVec1d>
+{
+  public:
+    SetGetLongVec1d(const std::string & key)
+        : SetGetTemplate<LongVec1d>(key) {};
+    virtual ~SetGetLongVec1d() {};
+    virtual LongVec1d  GetValFromString(UIVars& vars, string input)
+    {   return ProduceLongVec1dOrBarf(input); };
+};
+
+//------------------------------------------------------------------------------------
+// Refinement of SetGetTemplate<ProftypeVec1d> defines only how to get
+// a ProftypeVec1d value from string input. All classes setting/getting
+// a ProftypeVec1d value should inherit from this one.
+class SetGetProftypeVec1d : public SetGetTemplate<ProftypeVec1d>
+{
+  public:
+    SetGetProftypeVec1d(const std::string & key)
+        : SetGetTemplate<ProftypeVec1d>(key) {};
+    virtual ~SetGetProftypeVec1d() {};
+    virtual ProftypeVec1d  GetValFromString(UIVars& vars, string input)
+    {   return ProduceProftypeVec1dOrBarf(input); };
+};
+
+//------------------------------------------------------------------------------------
+// Refinement of SetGetBool which prints "Enabled" for
+// a true value and "Disabled" for a false one
+class SetGetBoolEnabled : public SetGetBool
+{
+  public:
+    SetGetBoolEnabled(const std::string & key) : SetGetBool(key) {};
+    virtual ~SetGetBoolEnabled() {};
+    virtual std::string MakePrintString(UIVars& vars, bool val)
+    {
+        if(val) return "Enabled";
+        return "Disabled";
+    };
+};
+
+//------------------------------------------------------------------------------------
+// Refinement of SetGetBool which prints "On" for
+// a true value and "Off" for a false one
+class SetGetBoolOnOff : public SetGetBool
+{
+  public:
+    SetGetBoolOnOff(const std::string & key) : SetGetBool(key) {};
+    virtual ~SetGetBoolOnOff() {};
+    virtual std::string MakePrintString(UIVars& vars, bool val)
+    {
+        if(val) return "On";
+        return "Off";
+    };
+};
+
+//------------------------------------------------------------------------------------
+// Refinement of SetGetBool which prints "Write" for
+// a true value and "Read" for a false one
+class SetGetBoolReadWrite : public SetGetBool
+{
+  public:
+    SetGetBoolReadWrite(const std::string & key) : SetGetBool(key) {};
+    virtual ~SetGetBoolReadWrite() {};
+    virtual std::string MakePrintString(UIVars& vars, bool val)
+    {
+        if(val) return "Write";
+        return "Read";
+    };
+};
+
+class SetGetNoval : public SetGetTemplate<noval>
+{
+  public:
+    SetGetNoval(const std::string & key)
+        : SetGetTemplate<noval>(key) {};
+    virtual ~SetGetNoval() {};
+    virtual noval Get(UIVars& vars, UIId id) { return noval_none;};
+    virtual noval GetValFromString(UIVars& vars, string input)
+    { return noval_none;};
+    virtual std::string NextToggleValue(UIVars& vars, UIId id)
+    { return ToString(noval_none);};
+};
+
+class SetGetProftypeSeries : public SetGetProftype
+{
+  public:
+    SetGetProftypeSeries(const std::string & key)
+        : SetGetProftype(key) {};
+    virtual ~SetGetProftypeSeries() {};
+};
+
+#endif  // SETGET_H
+
+//____________________________________________________________________________________
diff --git a/src/ui_interface/setgetmachine.cpp b/src/ui_interface/setgetmachine.cpp
new file mode 100644
index 0000000..e244a17
--- /dev/null
+++ b/src/ui_interface/setgetmachine.cpp
@@ -0,0 +1,385 @@
+// $Id: setgetmachine.cpp,v 1.95 2012/06/30 01:32:43 bobgian Exp $
+
+/*
+ *  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+ *
+ *  This software is distributed free of charge for non-commercial use
+ *  and is copyrighted.  Of course, we do not guarantee that the software
+ *  works, and are not responsible for any damage you may cause or have.
+ *
+ */
+
+#include <cassert>
+#include <string>
+
+#include "local_build.h"
+
+#include "chainparam_interface.h"
+#include "constraint_interface.h"
+#include "data_interface.h"
+#include "datamodel_interface.h"
+#include "errhandling.h"
+#include "force_interface.h"
+#include "profile_interface.h"
+#include "prior_interface.h"
+#include "setgetmachine.h"
+#include "traitmodel_interface.h"
+#include "userparam_interface.h"
+#include "vectorx.h"
+
+using namespace std;
+
+//------------------------------------------------------------------------------------
+
+SetGetMachine::SetGetMachine()
+{
+    Init();
+}
+
+SetGetMachine::~SetGetMachine()
+{
+    map<string,SetGet*>::iterator mapiter;
+    for(mapiter=setters.begin(); mapiter != setters.end(); mapiter++)
+    {
+        delete (*mapiter).second;
+    }
+
+}
+
+void SetGetMachine::Init()
+{
+
+    addSetter               (new uiAdaptiveTemp());
+    addSetter               (new uiAddParamToGroup());
+    addSetter               (new uiAddParamToNewGroup());
+    addSetter               (new uiAddRangeForTraitModel());
+    addSetter               (new uiAlpha());
+    addSetter               (new uiAutoCorrelation());
+    addSetter               (new uiBaseFrequencies());
+    addSetter               (new uiBaseFrequencyA());
+    addSetter               (new uiBaseFrequencyC());
+    addSetter               (new uiBaseFrequencyG());
+    addSetter               (new uiBaseFrequencyT());
+    addSetter               (new uiBayesArranger());
+    addSetter               (new uiBayesianAnalysis());
+    addSetter               (new uiCanHapArrange());
+    addSetter               (new uiCategoryCount());
+    addSetter               (new uiCategoryProbability());
+    addSetter               (new uiCategoryRate());
+    addSetter               (new uiCoalescence());
+    addSetter               (new uiCoalescenceLegal());
+    addSetter               (new uiCoalescenceMaxEvents());
+    addSetter               (new uiCrossPartitionCount());
+    addSetter               (new uiCurveFileEnabled());
+    addSetter               (new uiNewickTreeFileEnabled());
+#ifdef LAMARC_QA_TREE_DUMP
+    addSetter               (new uiArgFileEnabled());
+    addSetter               (new uiManyArgFiles());
+#endif // LAMARC_QA_TREE_DUMP
+    addSetter               (new uiCurveFilePrefix());
+    addSetter               (new uiMapFilePrefix());
+    addSetter               (new uiReclocFilePrefix());
+    addSetter               (new uiTraceFilePrefix());
+    addSetter               (new uiNewickTreeFilePrefix());
+#ifdef LAMARC_QA_TREE_DUMP
+    addSetter               (new uiArgFilePrefix());
+#endif // LAMARC_QA_TREE_DUMP
+    addSetter               (new uiDataFileName());
+    addSetter               (new uiDataModel());
+    addSetter               (new uiDataModelReport());
+    addSetter               (new uiDataType());
+    addSetter               (new uiDisease());
+    addSetter               (new uiDiseaseByID());
+    addSetter               (new uiDiseaseGlobal());
+    addSetter               (new uiDiseaseInto());
+    addSetter               (new uiDiseaseLegal());
+    addSetter               (new uiDiseaseLocation());
+    addSetter               (new uiDiseaseMaxEvents());
+    addSetter               (new uiDiseasePartitionCount());
+    addSetter               (new uiDiseasePartitionName());
+    addSetter               (new uiDivergence());
+    addSetter               (new uiDivergenceLegal());
+    addSetter               (new uiDivergenceEpochAncestor());
+    addSetter               (new uiDivergenceEpochDescendents());
+    addSetter               (new uiDivergenceEpochBoundaryTime());
+    addSetter               (new uiDivergenceEpochCount());
+    addSetter               (new uiDivergenceEpochName());
+    addSetter               (new uiDivMigPartitionCount());
+    addSetter               (new uiDivMigPartitionName());
+    addSetter               (new uiDivMigration());
+    addSetter               (new uiDivMigrationGlobal());
+    addSetter               (new uiDivMigrationInto());
+    addSetter               (new uiDivMigrationLegal());
+    addSetter               (new uiDivMigrationMaxEvents());
+    addSetter               (new uiDivMigrationUser());
+    addSetter               (new uiDropArranger());
+    addSetter               (new uiEpochSizeArranger());
+    addSetter               (new uiFinalChains());
+    addSetter               (new uiFinalDiscard());
+    addSetter               (new uiFinalInterval());
+    addSetter               (new uiFinalSamples());
+    addSetter               (new uiForceLegal());
+    addSetter               (new uiForceOnOff());
+    addSetter               (new uiForceProfileType());
+    addSetter               (new uiForceProfilesOff());
+    addSetter               (new uiForceProfilesOn());
+    addSetter               (new uiFreqsFromData());
+    addSetter               (new uiFstTheta());
+    addSetter               (new uiGTRRateAC());
+    addSetter               (new uiGTRRateAG());
+    addSetter               (new uiGTRRateAT());
+    addSetter               (new uiGTRRateCG());
+    addSetter               (new uiGTRRateCT());
+    addSetter               (new uiGTRRateGT());
+    addSetter               (new uiGTRRates());
+    addSetter               (new uiGlobalGrowth());
+    addSetter               (new uiGlobalProfileFixed());
+    addSetter               (new uiGlobalProfileOff());
+    addSetter               (new uiGlobalProfileOn());
+    addSetter               (new uiGlobalProfilePercentile());
+    addSetter               (new uiGlobalTheta());
+    addSetter               (new uiGroupParameterList());
+    addSetter               (new uiGroupParameterStatus());
+    addSetter               (new uiGroupedParamsForOneForce());
+    addSetter               (new uiGrowth());
+    addSetter               (new uiGrowthLegal());
+    addSetter               (new uiGrowthMaxEvents());
+    addSetter               (new uiGrowthType());
+    addSetter               (new uiGrowthScheme());
+    addSetter               (new uiGrowthUser());
+    addSetter               (new uiHapArranger());
+    addSetter               (new uiHeatedChain());
+    addSetter               (new uiHeatedChainCount());
+    addSetter               (new uiHeatedChains());
+    addSetter               (new uiInitialChains());
+    addSetter               (new uiInitialDiscard());
+    addSetter               (new uiInitialInterval());
+    addSetter               (new uiInitialSamples());
+    addSetter               (new uiLociCount());
+    addSetter               (new uiLociNumbers());
+    addSetter               (new uiLocusArranger());
+    addSetter               (new uiLocusName());
+    addSetter               (new uiLogisticSelection());
+    addSetter               (new uiLogisticSelectionCoefficientUser());
+    addSetter               (new uiLogisticSelectionLegal());
+    addSetter               (new uiLogisticSelectionMaxEvents());
+    addSetter               (new uiLogisticSelectionType());
+    addSetter               (new uiMaxEvents());
+    addSetter               (new uiMigPartitionCount());
+    addSetter               (new uiMigPartitionName());
+    addSetter               (new uiMigration());
+    addSetter               (new uiMigrationFst());
+    addSetter               (new uiMigrationGlobal());
+    addSetter               (new uiMigrationInto());
+    addSetter               (new uiMigrationLegal());
+    addSetter               (new uiMigrationMaxEvents());
+    addSetter               (new uiMigrationUser());
+    addSetter               (new uiNormalization());
+    addSetter               (new uiNumReps());
+    addSetter               (new uiOptimizeAlpha());
+    addSetter               (new uiParameterLowerBound());
+    addSetter               (new uiParameterPriorType());
+#ifdef LAMARC_NEW_FEATURE_RELATIVE_SAMPLING
+    addSetter               (new uiParameterRelativeSampling());
+#endif
+    addSetter               (new uiParameterStatus());
+    addSetter               (new uiParameterUpperBound());
+    addSetter               (new uiParameterUseDefaultPrior());
+    addSetter               (new uiPerBaseErrorRate());
+    addSetter               (new uiPlotPost());
+    addSetter               (new uiPriorByForce());
+    addSetter               (new uiPriorById());
+    addSetter               (new uiProbHapArranger());
+    addSetter               (new uiProfileByForce());
+    addSetter               (new uiProfileByID());
+    addSetter               (new uiProfilePrefix());
+    addSetter               (new uiProgress());
+    addSetter               (new uiRandomSeed());
+    addSetter               (new uiRecombinationMaxEvents());
+    addSetter               (new uiRecombinationRate());
+    addSetter               (new uiRecombine());
+    addSetter               (new uiRecombineLegal());
+    addSetter               (new uiRegionEffectivePopSize());
+    addSetter               (new uiRegionName());
+    addSetter               (new uiRegionGamma());
+    addSetter               (new uiRegionGammaLegal());
+    addSetter               (new uiRegionGammaShape());
+    addSetter               (new uiRegionNumbers());
+    addSetter               (new uiRelativeMuRate());
+    addSetter               (new uiRemoveParamFromGroup());
+    addSetter               (new uiRemoveRangeForTraitModel());
+    addSetter               (new uiResultsFileName());
+    addSetter               (new uiSetOldClockSeed());
+    addSetter               (new uiSetTraitModelRangeToPoint());
+    addSetter               (new uiSimulateData());
+    addSetter               (new uiSizeArranger());
+    addSetter               (new uiStairArranger());
+    addSetter               (new uiStartValue());
+    addSetter               (new uiStartValueMethod());
+    addSetter               (new uiSystemClock());
+    addSetter               (new uiTTRatio());
+    addSetter               (new uiTempInterval());
+    addSetter               (new uiReclocFileEnabled());
+    addSetter               (new uiTraceFileEnabled());
+    addSetter               (new uiTraitModelData());
+    addSetter               (new uiTraitModelFloat());
+    addSetter               (new uiTraitModelJump());
+    addSetter               (new uiTraitModelName());
+    addSetter               (new uiTraitModelPartition());
+    addSetter               (new uiTreeSumInFileEnabled());
+    addSetter               (new uiTreeSumInFileName());
+    addSetter               (new uiTreeSumOutFileEnabled());
+    addSetter               (new uiTreeSumOutFileName());
+    addSetter               (new uiTrueValue());
+    addSetter               (new uiUngroupedParamsForOneForce());
+    addSetter               (new uiUseDefaultPriorsForForce());
+    addSetter               (new uiUseGlobalDataModelForAll());
+    addSetter               (new uiUseGlobalDataModelForOne());
+    addSetter               (new uiUseOldClockSeed());
+    addSetter               (new uiUserTheta());
+    addSetter               (new uiValidForces());
+    addSetter               (new uiValidMovingLoci());
+    addSetter               (new uiValidParamsForOneForce());
+    addSetter               (new uiVerbosity());
+    addSetter               (new uiWattersonTheta());
+    addSetter               (new uiXMLOutFileName());
+    addSetter               (new uiXMLReportFileName());
+    addSetter               (new uiZilchArranger());
+}
+
+void SetGetMachine::addSetter(SetGet * setter)
+{
+    string key = setter->UIKey();
+    assert(setters.find(key) == setters.end()); // make sure we don't add it twice!
+    setters[key] = setter;
+}
+
+bool SetGetMachine::doGetBool(string variable, UIVars& vars, UIId id)
+{
+    return doGet<bool>(variable,vars,id,setters);
+}
+
+data_type SetGetMachine::doGetDataType(string variable, UIVars& vars, UIId id)
+{
+    return doGet<data_type>(variable,vars,id,setters);
+}
+
+double SetGetMachine::doGetDouble(string variable, UIVars& vars, UIId id)
+{
+    return doGet<double>(variable,vars,id,setters);
+}
+
+DoubleVec1d SetGetMachine::doGetDoubleVec1d(string variable, UIVars& vars, UIId id)
+{
+    return doGet<DoubleVec1d>(variable,vars,id,setters);
+}
+
+force_type SetGetMachine::doGetForceType(string variable, UIVars& vars, UIId id)
+{
+    return doGet<force_type>(variable,vars,id,setters);
+}
+
+ForceTypeVec1d SetGetMachine::doGetForceTypeVec1d(string variable, UIVars& vars, UIId id)
+{
+    return doGet<ForceTypeVec1d>(variable,vars,id,setters);
+}
+
+long SetGetMachine::doGetLong(string variable, UIVars& vars, UIId id)
+{
+    return doGet<long>(variable,vars,id,setters);
+}
+
+LongVec1d SetGetMachine::doGetLongVec1d(string variable, UIVars& vars, UIId id)
+{
+    return doGet<LongVec1d>(variable,vars,id,setters);
+}
+
+method_type SetGetMachine::doGetMethodType(string variable, UIVars& vars, UIId id)
+{
+    return doGet<method_type>(variable,vars,id,setters);
+}
+
+model_type SetGetMachine::doGetModelType(string variable, UIVars& vars, UIId id)
+{
+    return doGet<model_type>(variable,vars,id,setters);
+}
+
+proftype SetGetMachine::doGetProftype(string variable, UIVars& vars, UIId id)
+{
+    return doGet<proftype>(variable,vars,id,setters);
+}
+
+string SetGetMachine::doGetString(string variable, UIVars& vars, UIId id)
+{
+    return doGet<string>(variable,vars,id,setters);
+}
+
+StringVec1d SetGetMachine::doGetStringVec1d(string variable, UIVars& vars, UIId id)
+{
+    return doGet<StringVec1d>(variable,vars,id,setters);
+}
+
+verbosity_type SetGetMachine::doGetVerbosityType(string variable, UIVars& vars, UIId id)
+{
+    return doGet<verbosity_type>(variable,vars,id,setters);
+}
+
+UIIdVec1d
+SetGetMachine::doGetUIIdVec1d(string variable, UIVars& vars, UIId id)
+{
+    return doGet<UIIdVec1d>(variable,vars,id,setters);
+}
+
+UIIdVec2d
+SetGetMachine::doGetUIIdVec2d(string variable, UIVars& vars, UIId id)
+{
+    return doGet<UIIdVec2d>(variable,vars,id,setters);
+}
+
+string SetGetMachine::doGetPrintString(string variable, UIVars& vars, UIId id)
+{
+    SetGet * setter = setters[variable];
+    return setter->GetPrintString(vars,id);
+}
+
+string SetGetMachine::doGetDescription(string variable, UIVars& vars, UIId id)
+{
+    SetGet * setter = setters[variable];
+    return setter->Description(vars,id);
+}
+
+bool SetGetMachine::doGetConsistency(string variable, UIVars& vars, UIId id)
+{
+    SetGet * setter = setters[variable];
+    return setter->IsConsistent(vars,id);
+}
+
+string SetGetMachine::doGetMin(string variable, UIVars& vars, UIId id)
+{
+    SetGet * setter = setters[variable];
+    return setter->Min(vars,id);
+}
+
+string SetGetMachine::doGetMax(string variable, UIVars& vars, UIId id)
+{
+    SetGet * setter = setters[variable];
+    return setter->Max(vars,id);
+}
+
+void SetGetMachine::doSet(string variable, UIVars& vars, UIId id, string value)
+{
+    SetGet * setter = setters[variable];
+
+    // set variables
+    setter->SetFromString(vars,id,value);
+
+}
+
+void SetGetMachine::doToggle(string variable, UIVars& vars, UIId id)
+{
+    SetGet * setter = setters[variable];
+    string nextValue = setter->NextToggleValue(vars,id);
+    doSet(variable,vars,id,nextValue);
+}
+
+//____________________________________________________________________________________
diff --git a/src/ui_interface/setgetmachine.h b/src/ui_interface/setgetmachine.h
new file mode 100644
index 0000000..d6c151f
--- /dev/null
+++ b/src/ui_interface/setgetmachine.h
@@ -0,0 +1,105 @@
+// $Id: setgetmachine.h,v 1.30 2012/06/30 01:32:43 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef SETGETMACHINE_H
+#define SETGETMACHINE_H
+
+#include <string>
+#include <map>
+
+#include "constants.h"
+#include "datatype.h"
+#include "setget.h"
+#include "vectorx.h"
+
+class UIVars;
+
+using std::string;
+
+class SetGetMachine
+{
+  private:
+    // a map from strings in uistr class to SetGet objects that
+    // can set and get those variables
+    std::map<string,SetGet*> setters;
+
+  protected:
+    // encapsulates process of adding a SetGet* object to the
+    // map above
+    virtual void addSetter(SetGet *);
+    void Init();// set up data-dependent structures
+
+  public:
+    SetGetMachine();
+    virtual ~SetGetMachine();
+
+    // get the value of "variable", indexed by "id" where appropriate
+    bool        doGetBool           (string variable,UIVars& vars, UIId id);
+    data_type   doGetDataType       (string variable,UIVars& vars, UIId id);
+    double      doGetDouble         (string variable,UIVars& vars, UIId id);
+    DoubleVec1d doGetDoubleVec1d    (string variable,UIVars& vars, UIId id);
+    force_type  doGetForceType      (string variable,UIVars& vars, UIId id);
+    ForceTypeVec1d doGetForceTypeVec1d (string variable,UIVars& vars, UIId id);
+    long        doGetLong           (string variable,UIVars& vars, UIId id);
+    LongVec1d   doGetLongVec1d      (string variable,UIVars& vars, UIId id);
+    method_type doGetMethodType     (string variable,UIVars& vars, UIId id);
+    model_type  doGetModelType      (string variable,UIVars& vars, UIId id);
+    proftype    doGetProftype       (string variable,UIVars& vars, UIId id);
+    string      doGetString         (string variable,UIVars& vars, UIId id);
+    StringVec1d doGetStringVec1d    (string variable,UIVars& vars, UIId id);
+    verbosity_type doGetVerbosityType  (string variable,UIVars& vars, UIId id);
+    UIIdVec1d   doGetUIIdVec1d      (string variable,UIVars& vars, UIId id);
+    UIIdVec2d   doGetUIIdVec2d      (string variable,UIVars& vars, UIId id);
+
+    // set the id'th instance of variable to value "val"
+    void                doSet               (string variable,UIVars& vars, UIId id, string val);
+
+    // advance the id'th instance of variable to the next value
+    void        doToggle            (string variable,UIVars& vars, UIId id);
+
+    // get a string for printing
+    // the value of "variable", indexed by "id" where appropriate
+    string      doGetPrintString    (string variable,UIVars& vars, UIId id);
+
+    // get a string describing what a variable is
+    string      doGetDescription    (string variable,UIVars& vars, UIId id);
+
+    // is the associated value consistent with the model
+    bool        doGetConsistency    (string variable,UIVars& vars, UIId id);
+
+    //get a string for the min/max enterable values.
+    string      doGetMin       (std::string variable, UIVars& vars, UIId id);
+    string      doGetMax       (std::string variable, UIVars& vars, UIId id);
+
+};
+
+template<class T> T doGet(
+    string variable,
+    UIVars& vars,
+    UIId id,
+    std::map<string,SetGet*> & setters)
+{
+    SetGet * getter = setters[variable];
+    GetTemplate<T> * tGetter =
+        dynamic_cast<GetTemplate<T>*>(getter);
+    if(tGetter == NULL)
+    {
+        string err = "SetGetMachine found no match for variable\n\""
+            + variable + "\"\n"
+            + "\tDid you remember to add a setter object in SetGetMachine::Init()?\n";
+        implementation_error e(err);
+        throw e;
+    }
+    return tGetter->Get(vars,id);
+};
+
+#endif // SETGETMACHINE_H
+
+//____________________________________________________________________________________
diff --git a/src/ui_interface/traitmodel_interface.cpp b/src/ui_interface/traitmodel_interface.cpp
new file mode 100644
index 0000000..16d45b7
--- /dev/null
+++ b/src/ui_interface/traitmodel_interface.cpp
@@ -0,0 +1,286 @@
+// $Id: traitmodel_interface.cpp,v 1.7 2011/03/07 06:08:53 bobgian Exp $
+
+/*
+ *  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+ *
+ *  This software is distributed free of charge for non-commercial use
+ *  and is copyrighted.  Of course, we do not guarantee that the software
+ *  works, and are not responsible for any damage you may cause or have.
+ *
+ */
+
+#include <cassert>
+
+#include "traitmodel_interface.h"
+#include "registry.h"
+#include "ui_strings.h"
+#include "ui_regid.h"
+#include "ui_vars.h"
+
+//------------------------------------------------------------------------------------
+
+uiTraitModelName::uiTraitModelName()
+    : GetString(uistr::traitModelName)
+{
+}
+
+//------------------------------------------------------------------------------------
+
+uiTraitModelName::~uiTraitModelName()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+string uiTraitModelName::Get(UIVars& vars, UIId id)
+{
+    UIRegId regID(id, vars);
+    return vars.traitmodels.GetName(regID);
+}
+
+//------------------------------------------------------------------------------------
+
+SetRangepair::SetRangepair(const string& key)
+    : SetGetTemplate<rangepair>(key)
+{
+}
+
+//------------------------------------------------------------------------------------
+
+SetRangepair::~SetRangepair()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+rangepair SetRangepair::Get(UIVars& vars, UIId id)
+{
+    assert(false);
+    throw implementation_error("Shouldn't be able to 'Get' anything from this interface");
+};
+
+//------------------------------------------------------------------------------------
+
+rangepair SetRangepair::GetValFromString(UIVars& vars, string val)
+{
+    string::size_type colonpos = val.rfind(":");
+    string firstnum = val;
+    if (colonpos != string::npos)
+    {
+        firstnum.erase(colonpos, firstnum.size()-colonpos);
+    }
+    long int first = ProduceLongOrBarf(firstnum);
+    long int second = first;
+    if (colonpos != string::npos)
+    {
+        string secondnum = val;
+        secondnum.erase(0,colonpos+1);
+        second = ProduceLongOrBarf(secondnum);
+    }
+    if (first > second)
+    {
+        throw data_error("The second value in the range must be larger than the first.");
+    }
+    if (registry.GetConvertOutputToEliminateZeroes() && ((first == 0) || (second == 0)))
+    {
+        throw data_error("We assume that because you had no '0's for any map positions, your data follows the"
+                         " traditional biologist convention of not having a site 0, and placing site -1 next"
+                         " to site 1.  If this is incorrect, you must edit your LAMARC input file to set the"
+                         " '<convert_output_to_eliminate_zeroes>' tag to 'false'.");
+    }
+    //Now make it open-ended.
+    second++;
+    return ToSequentialIfNeeded(std::make_pair(first, second));
+}
+
+//------------------------------------------------------------------------------------
+
+uiAddRangeForTraitModel::uiAddRangeForTraitModel()
+    : SetRangepair(uistr::addRangeForTraitModel)
+{
+}
+
+//------------------------------------------------------------------------------------
+
+uiAddRangeForTraitModel::~uiAddRangeForTraitModel()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+void uiAddRangeForTraitModel::Set(UIVars& vars, UIId id, rangepair val)
+{
+    UIRegId regID(id, vars);
+    vars.traitmodels.AddRange(regID, val);
+}
+
+//------------------------------------------------------------------------------------
+
+uiRemoveRangeForTraitModel::uiRemoveRangeForTraitModel()
+    : SetRangepair(uistr::removeRangeForTraitModel)
+{
+}
+
+//------------------------------------------------------------------------------------
+
+uiRemoveRangeForTraitModel::~uiRemoveRangeForTraitModel()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+void uiRemoveRangeForTraitModel::Set(UIVars& vars, UIId id, rangepair val)
+{
+    UIRegId regID(id, vars);
+    vars.traitmodels.RemoveRange(regID, val);
+}
+
+//------------------------------------------------------------------------------------
+
+uiSetTraitModelRangeToPoint::uiSetTraitModelRangeToPoint()
+    : SetGetLong(uistr::traitModelRangeToPoint)
+{
+}
+
+//------------------------------------------------------------------------------------
+
+uiSetTraitModelRangeToPoint::~uiSetTraitModelRangeToPoint()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+void uiSetTraitModelRangeToPoint::Set(UIVars& vars, UIId id, long int val)
+{
+    UIRegId regID(id, vars);
+    if (registry.GetConvertOutputToEliminateZeroes() && val == 0)
+    {
+        throw data_error("We assume that because you had no '0's for any map positions, your data follows"
+                         " the traditional biologist convention of not having a site 0, and placing site"
+                         " -1 next to site 1.  If this is incorrect, you must edit your LAMARC input file"
+                         " to set the 'convert_output_to_eliminate_zeroes' tag to 'false'.");
+    }
+    val = ToSequentialIfNeeded(val);
+    vars.traitmodels.SetRangeToPoint(regID, val);
+}
+
+//------------------------------------------------------------------------------------
+
+long int uiSetTraitModelRangeToPoint::Get(UIVars& vars, UIId id)
+{
+    assert(false);
+    throw implementation_error("Shouldn't be able to 'Get' anything from this interface");
+}
+
+//------------------------------------------------------------------------------------
+
+uiValidMovingLoci::uiValidMovingLoci()
+    : GetUIIdVec1d(uistr::validMovingLoci)
+{
+}
+
+//------------------------------------------------------------------------------------
+
+uiValidMovingLoci::~uiValidMovingLoci()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+UIIdVec1d uiValidMovingLoci::Get(UIVars& vars, UIId id)
+{
+    vector<UIRegId> regIDs = vars.traitmodels.GetRegIDs();
+    UIIdVec1d uiIDs;
+    for (vector<UIRegId>::iterator regID = regIDs.begin(); regID != regIDs.end(); regID++)
+    {
+        UIId id(regID->GetRegion(), regID->GetLocus());
+        uiIDs.push_back(id);
+    }
+    return uiIDs;
+}
+
+//------------------------------------------------------------------------------------
+
+uiTraitModelFloat::uiTraitModelFloat()
+    : SetGetNoval(uistr::traitAnalysisFloat)
+{
+}
+
+//------------------------------------------------------------------------------------
+
+uiTraitModelFloat::~uiTraitModelFloat()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+void uiTraitModelFloat::Set(UIVars& vars, UIId id,noval val)
+{
+    UIRegId regID(id, vars);
+    vars.traitmodels.SetAnalysisType(regID, mloc_mapfloat);
+}
+
+//------------------------------------------------------------------------------------
+
+uiTraitModelJump::uiTraitModelJump()
+    : SetGetNoval(uistr::traitAnalysisJump)
+{
+}
+
+//------------------------------------------------------------------------------------
+
+uiTraitModelJump::~uiTraitModelJump()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+void uiTraitModelJump::Set(UIVars& vars, UIId id,noval val)
+{
+    UIRegId regID(id, vars);
+    vars.traitmodels.SetAnalysisType(regID, mloc_mapjump);
+}
+
+//------------------------------------------------------------------------------------
+
+uiTraitModelData::uiTraitModelData()
+    : SetGetNoval(uistr::traitAnalysisData)
+{
+}
+
+//------------------------------------------------------------------------------------
+
+uiTraitModelData::~uiTraitModelData()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+void uiTraitModelData::Set(UIVars& vars, UIId id,noval val)
+{
+    UIRegId regID(id, vars);
+    vars.traitmodels.SetAnalysisType(regID, mloc_data);
+}
+
+//------------------------------------------------------------------------------------
+
+uiTraitModelPartition::uiTraitModelPartition()
+    : SetGetNoval(uistr::traitAnalysisPartition)
+{
+}
+
+//------------------------------------------------------------------------------------
+
+uiTraitModelPartition::~uiTraitModelPartition()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+void uiTraitModelPartition::Set(UIVars& vars, UIId id,noval val)
+{
+    UIRegId regID(id, vars);
+    vars.traitmodels.SetAnalysisType(regID, mloc_partition);
+}
+
+//____________________________________________________________________________________
diff --git a/src/ui_interface/traitmodel_interface.h b/src/ui_interface/traitmodel_interface.h
new file mode 100644
index 0000000..16f1c95
--- /dev/null
+++ b/src/ui_interface/traitmodel_interface.h
@@ -0,0 +1,105 @@
+// $Id: traitmodel_interface.h,v 1.5 2011/03/07 06:08:53 bobgian Exp $
+
+/*
+ *  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+ *
+ *  This software is distributed free of charge for non-commercial use
+ *  and is copyrighted.  Of course, we do not guarantee that the software
+ *  works, and are not responsible for any damage you may cause or have.
+ *
+ */
+
+#ifndef TRAITMODEL_INTERFACE_H
+#define TRAITMODEL_INTERFACE_H
+
+#include <string>
+#include "setget.h"
+#include "rangex.h"
+
+class UIVars;
+
+class uiTraitModelName : public GetString
+{
+  public:
+    uiTraitModelName();
+    virtual ~uiTraitModelName();
+    virtual std::string Get(UIVars& vars, UIId id);
+};
+
+class SetRangepair : public SetGetTemplate<rangepair>
+{
+  public:
+    SetRangepair(const string& key);
+    virtual ~SetRangepair();
+    virtual rangepair Get(UIVars& vars, UIId id); //throws
+    virtual rangepair GetValFromString(UIVars& vars, string val);
+};
+
+class uiAddRangeForTraitModel : public SetRangepair
+{
+  public:
+    uiAddRangeForTraitModel();
+    virtual ~uiAddRangeForTraitModel();
+    virtual void Set(UIVars& vars, UIId id, rangepair val);
+};
+
+class uiRemoveRangeForTraitModel : public SetRangepair
+{
+  public:
+    uiRemoveRangeForTraitModel();
+    virtual ~uiRemoveRangeForTraitModel();
+    virtual void Set(UIVars& vars, UIId id, rangepair val);
+};
+
+class uiSetTraitModelRangeToPoint : public SetGetLong
+{
+  public:
+    uiSetTraitModelRangeToPoint();
+    virtual ~uiSetTraitModelRangeToPoint();
+    virtual void Set(UIVars& vars, UIId id, long val);
+    virtual long Get(UIVars& vars, UIId id); //throws!
+};
+
+class uiValidMovingLoci : public GetUIIdVec1d
+{
+  public:
+    uiValidMovingLoci();
+    virtual ~uiValidMovingLoci();
+    virtual UIIdVec1d Get(UIVars& vars, UIId id);
+};
+
+class uiTraitModelFloat : public SetGetNoval
+{
+  public:
+    uiTraitModelFloat();
+    virtual ~uiTraitModelFloat();
+    virtual void Set(UIVars& vars, UIId id, noval val);
+};
+
+class uiTraitModelJump : public SetGetNoval
+{
+  public:
+    uiTraitModelJump();
+    virtual ~uiTraitModelJump();
+    virtual void Set(UIVars& vars, UIId id, noval val);
+};
+
+class uiTraitModelData : public SetGetNoval
+{
+  public:
+    uiTraitModelData();
+    virtual ~uiTraitModelData();
+    virtual void Set(UIVars& vars, UIId id, noval val);
+};
+
+class uiTraitModelPartition : public SetGetNoval
+{
+  public:
+    uiTraitModelPartition();
+    virtual ~uiTraitModelPartition();
+    virtual void Set(UIVars& vars, UIId id, noval val);
+};
+
+#endif  // DATAMODEL_INTERFACE_H
+
+//____________________________________________________________________________________
diff --git a/src/ui_interface/ui_constants.cpp b/src/ui_interface/ui_constants.cpp
new file mode 100644
index 0000000..b2439b4
--- /dev/null
+++ b/src/ui_interface/ui_constants.cpp
@@ -0,0 +1,22 @@
+// $Id: ui_constants.cpp,v 1.9 2010/03/02 23:12:31 bobgian Exp $
+
+/*
+ *  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+ *
+ *  This software is distributed free of charge for non-commercial use
+ *  and is copyrighted.  Of course, we do not guarantee that the software
+ *  works, and are not responsible for any damage you may cause or have.
+ *
+ */
+
+#include "ui_constants.h"
+
+const long uiconst::NO_ID               = -1;
+const long uiconst::GLOBAL_ID           = -3;
+const long uiconst::GLOBAL_DATAMODEL_NUC_ID       = -30;
+const long uiconst::GLOBAL_DATAMODEL_MSAT_ID      = -31;
+const long uiconst::GLOBAL_DATAMODEL_KALLELE_ID   = -32;
+const long uiconst::diseaseColumns      = 3;
+const long uiconst::migColumns          = 3;
+
+//____________________________________________________________________________________
diff --git a/src/ui_interface/ui_constants.h b/src/ui_interface/ui_constants.h
new file mode 100644
index 0000000..c8eb456
--- /dev/null
+++ b/src/ui_interface/ui_constants.h
@@ -0,0 +1,31 @@
+// $Id: ui_constants.h,v 1.12 2011/03/07 06:08:53 bobgian Exp $
+
+/*
+ *  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+ *
+ *  This software is distributed free of charge for non-commercial use
+ *  and is copyrighted.  Of course, we do not guarantee that the software
+ *  works, and are not responsible for any damage you may cause or have.
+ *
+ */
+
+#ifndef UI_CONSTANTS_H
+#define UI_CONSTANTS_H
+
+enum ui_param_class { uipsingle, untouched, global };
+
+class uiconst
+{
+  public:
+    static const long NO_ID;
+    static const long GLOBAL_ID;
+    static const long GLOBAL_DATAMODEL_NUC_ID;
+    static const long GLOBAL_DATAMODEL_MSAT_ID;
+    static const long GLOBAL_DATAMODEL_KALLELE_ID;
+    static const long diseaseColumns;
+    static const long migColumns;
+};
+
+#endif // UI_CONSTANTS_H
+
+//____________________________________________________________________________________
diff --git a/src/ui_interface/ui_id.cpp b/src/ui_interface/ui_id.cpp
new file mode 100644
index 0000000..720a815
--- /dev/null
+++ b/src/ui_interface/ui_id.cpp
@@ -0,0 +1,147 @@
+// $Id: ui_id.cpp,v 1.11 2011/03/07 06:08:53 bobgian Exp $
+
+/*
+ *  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+ *
+ *  This software is distributed free of charge for non-commercial use
+ *  and is copyrighted.  Of course, we do not guarantee that the software
+ *  works, and are not responsible for any damage you may cause or have.
+ *
+ */
+
+#include <cassert>
+
+#include "ui_id.h"
+#include "ui_constants.h"
+
+//------------------------------------------------------------------------------------
+
+// an ID expressing a doubly indexed quantity
+UIId::UIId(long index1, long index2, long index3)
+    :
+    m_hasForce(false),
+    m_forceType(force_COAL),        // any force_type will do here as
+    // long as it's the same value for
+    // every UIId with m_hasForce == false
+    // We picked force_COAL 'cause it's first
+    m_index1(index1),
+    m_index2(index2),
+    m_index3(index3)
+{
+    assert(IndexesAreOkay());
+}
+
+// an ID expressing a force and a doubly indexed quantity
+UIId::UIId(force_type force, long index1, long index2, long index3)
+    :
+    m_hasForce(true),
+    m_forceType(force),
+    m_index1(index1),
+    m_index2(index2),
+    m_index3(index3)
+{
+    assert(IndexesAreOkay());
+}
+
+bool UIId::IndexesAreOkay() const
+{
+    if (!(m_index1 >= 0 ||
+          m_index1 == uiconst::NO_ID ||
+          m_index1 == uiconst::GLOBAL_ID ||
+          m_index1 == uiconst::GLOBAL_DATAMODEL_NUC_ID ||
+          m_index1 == uiconst::GLOBAL_DATAMODEL_MSAT_ID ||
+          m_index1 == uiconst::GLOBAL_DATAMODEL_KALLELE_ID))
+    {
+        return false;
+    }
+    if (!(m_index2 >= 0 || m_index2 == uiconst::NO_ID))
+    {
+        return false;
+    }
+    if (!(m_index3 >= 0 || m_index3 == uiconst::NO_ID))
+    {
+        return false;
+    }
+    return true;
+}
+
+force_type
+UIId::GetForceType() const
+{
+    assert(m_hasForce);
+    return m_forceType;
+}
+
+long
+UIId::GetIndex1() const
+{
+    assert(HasIndex1());
+    return m_index1;
+}
+
+long
+UIId::GetIndex2() const
+{
+    assert(HasIndex1() && HasIndex2());
+    return m_index2;
+}
+
+long
+UIId::GetIndex3() const
+{
+    assert(HasIndex1() && HasIndex2() && HasIndex3());
+    return m_index3;
+}
+
+bool
+UIId::operator==(const UIId& id) const
+{
+    // note -- logically, the values of m_forceType shouldn't
+    // need to be identical when m_hasForce is false. BUT,
+    // we're always constructing UIId's to have m_forceType == force_COAL
+    // when m_hasForce == false
+    return (    m_hasForce  ==  id.m_hasForce
+                &&  m_index1    ==  id.m_index1
+                &&  m_index2    ==  id.m_index2
+                &&  m_index3    ==  id.m_index3
+                &&  m_forceType ==  id.m_forceType
+        );
+}
+
+bool
+UIId::HasForce() const
+{
+    return m_hasForce;
+}
+
+bool
+UIId::HasIndex1() const
+{
+    return (m_index1 != uiconst::NO_ID);
+}
+
+bool
+UIId::HasIndex2() const
+{
+    return (m_index2 != uiconst::NO_ID);
+}
+
+bool
+UIId::HasIndex3() const
+{
+    return (m_index3 != uiconst::NO_ID);
+}
+
+UIId & NO_ID()
+{
+    static UIId no_id = UIId();
+    return no_id;
+};
+
+UIId & GLOBAL_ID()
+{
+    static UIId global_id = UIId(uiconst::GLOBAL_ID);
+    return global_id;
+};
+
+//____________________________________________________________________________________
diff --git a/src/ui_interface/ui_id.h b/src/ui_interface/ui_id.h
new file mode 100644
index 0000000..15c6c97
--- /dev/null
+++ b/src/ui_interface/ui_id.h
@@ -0,0 +1,53 @@
+// $Id: ui_id.h,v 1.9 2011/03/07 06:08:53 bobgian Exp $
+
+/*
+ *  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+ *
+ *  This software is distributed free of charge for non-commercial use
+ *  and is copyrighted.  Of course, we do not guarantee that the software
+ *  works, and are not responsible for any damage you may cause or have.
+ *
+ */
+
+#ifndef UIID_H
+#define UIID_H
+
+#include "constants.h"       // for force_type
+#include "ui_constants.h"    // for uiconst::NO_ID
+
+class UIId
+{
+  private:
+    // note -- would like to make these const members as
+    // they really shouldn't change, but we're using
+    // vectors of them (not vectors of pointers) so
+    // we have to allow the assignment operator
+    bool          m_hasForce;
+    force_type    m_forceType;
+    long          m_index1;
+    long          m_index2;
+    long          m_index3;
+    bool IndexesAreOkay() const;
+
+  public:
+    // note -- we are accepting the default copy constructor
+    UIId(long index1=uiconst::NO_ID, long index2=uiconst::NO_ID, long index3=uiconst::NO_ID);
+    UIId(force_type force, long index1=uiconst::NO_ID, long index2=uiconst::NO_ID, long index3=uiconst::NO_ID);
+    force_type GetForceType() const;
+    long GetIndex1() const;
+    long GetIndex2() const;
+    long GetIndex3() const;
+
+    bool operator==(const UIId& id) const;
+    bool HasForce() const;
+    bool HasIndex1() const;
+    bool HasIndex2() const;
+    bool HasIndex3() const;
+};
+
+UIId & NO_ID();
+UIId & GLOBAL_ID();
+
+#endif  // UIID_H
+
+//____________________________________________________________________________________
diff --git a/src/ui_interface/ui_interface.cpp b/src/ui_interface/ui_interface.cpp
new file mode 100644
index 0000000..dcc6927
--- /dev/null
+++ b/src/ui_interface/ui_interface.cpp
@@ -0,0 +1,346 @@
+// $Id: ui_interface.cpp,v 1.51 2011/03/07 06:08:53 bobgian Exp $
+
+/*
+ *  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+ *
+ *  This software is distributed free of charge for non-commercial use
+ *  and is copyrighted.  Of course, we do not guarantee that the software
+ *  works, and are not responsible for any damage you may cause or have.
+ *
+ */
+
+#include <string>
+#include <fstream>
+
+#include "constants.h"
+#include "front_end_warnings.h"
+#include "parameter.h"
+#include "setgetmachine.h"
+#include "stringx.h"
+#include "ui_constants.h"
+#include "ui_interface.h"
+#include "ui_strings.h"
+#include "ui_vars.h"
+#include "ui_regid.h"
+#include "undoredochain.h"
+#include "xml.h"
+
+UIInterface::UIInterface(FrontEndWarnings & warnings, DataPack& datapack, string fileName)
+    :
+    m_warnings(warnings),
+    undoRedoChain(new UndoRedoChain(datapack,fileName,this)),
+    undoRedoMode(undoRedoMode_FILE)
+{
+}
+
+UIInterface::~UIInterface()
+{
+    delete undoRedoChain;
+}
+
+UIVars & UIInterface::GetCurrentVars()
+{
+    return undoRedoChain->GetCurrentVars();
+}
+
+//------------------------------------------------------------------------------------
+// get, set and prints for variables that are indexed by id number
+//------------------------------------------------------------------------------------
+
+bool UIInterface::doGetBool(string varName, const UIId id)
+{
+    return setGetMachine.doGetBool(varName, GetCurrentVars(),id);
+}
+
+data_type UIInterface::doGetDataType(string varName, const UIId id)
+{
+    return setGetMachine.doGetDataType(varName, GetCurrentVars(),id);
+}
+
+double UIInterface::doGetDouble(string varName, const UIId id)
+{
+    return setGetMachine.doGetDouble(varName, GetCurrentVars(),id);
+}
+
+DoubleVec1d UIInterface::doGetDoubleVec1d(string varName, const UIId id)
+{
+    return setGetMachine.doGetDoubleVec1d(varName, GetCurrentVars(),id);
+}
+
+force_type
+UIInterface::doGetForceType(string varName, const UIId id)
+{
+    return setGetMachine.doGetForceType(varName,GetCurrentVars(),id);
+}
+
+ForceTypeVec1d UIInterface::doGetForceTypeVec1d(string variableName, const UIId id)
+{
+    return setGetMachine.doGetForceTypeVec1d(variableName,GetCurrentVars(),id);
+}
+
+long UIInterface::doGetLong(string varName, const UIId id)
+{
+    return setGetMachine.doGetLong(varName, GetCurrentVars(),id);
+}
+
+LongVec1d UIInterface::doGetLongVec1d(string variableName, const UIId id)
+{
+    return setGetMachine.doGetLongVec1d(variableName,GetCurrentVars(),id);
+}
+
+method_type UIInterface::doGetMethodType(string varName, const UIId id)
+{
+    return setGetMachine.doGetMethodType(varName, GetCurrentVars(),id);
+}
+
+model_type UIInterface::doGetModelType(string varName, const UIId id)
+{
+    return setGetMachine.doGetModelType(varName, GetCurrentVars(),id);
+}
+
+proftype UIInterface::doGetProftype(string varName, const UIId id)
+{
+    return setGetMachine.doGetProftype(varName, GetCurrentVars(),id);
+}
+
+string UIInterface::doGetString(string varName, const UIId id)
+{
+    return setGetMachine.doGetString(varName, GetCurrentVars(),id);
+}
+
+StringVec1d UIInterface::doGetStringVec1d(string varName, const UIId id)
+{
+    return setGetMachine.doGetStringVec1d(varName, GetCurrentVars(),id);
+}
+
+UIIdVec1d
+UIInterface::doGetUIIdVec1d(string varName, const UIId id)
+{
+    return setGetMachine.doGetUIIdVec1d(varName, GetCurrentVars(),id);
+}
+
+UIIdVec2d
+UIInterface::doGetUIIdVec2d(string varName, const UIId id)
+{
+    return setGetMachine.doGetUIIdVec2d(varName, GetCurrentVars(),id);
+}
+
+verbosity_type UIInterface::doGetVerbosityType(string varName, const UIId id)
+{
+    return setGetMachine.doGetVerbosityType(varName, GetCurrentVars(),id);
+}
+
+void UIInterface::doSet(string varName, string value, const UIId id)
+// EWFIX.P5 REFACTOR -- uiInterface::doSet and uiInterface::doToggle
+// have important duplicated code
+{
+    if(undoRedoMode == undoRedoMode_USER)
+    {
+        undoRedoChain->StartNewFrame();
+        try
+        {
+            setGetMachine.doSet(varName, GetCurrentVars(),id,value);
+        }
+        catch(const data_error &e)
+        {
+            undoRedoChain->RejectNewFrame();
+            throw;
+        }
+        undoRedoChain->AcceptNewFrame();
+    }
+    else
+    {
+        setGetMachine.doSet(varName, GetCurrentVars(),id,value);
+    }
+}
+
+void UIInterface::doToggle(string varName, const UIId id)
+// EWFIX.P5 REFACTOR -- uiInterface::doSet and uiInterface::doToggle
+// have important duplicated code
+{
+    if(undoRedoMode == undoRedoMode_USER)
+    {
+        undoRedoChain->StartNewFrame();
+        try
+        {
+            setGetMachine.doToggle(varName, GetCurrentVars(),id);
+        }
+        catch(const data_error &e)
+        {
+            undoRedoChain->RejectNewFrame();
+            throw;
+        }
+        undoRedoChain->AcceptNewFrame();
+    }
+    else
+    {
+        setGetMachine.doToggle(varName, GetCurrentVars(),id);
+    }
+}
+
+string UIInterface::doGetPrintString(string varName, const UIId id)
+{
+    return setGetMachine.doGetPrintString(varName, GetCurrentVars(),id);
+}
+
+string UIInterface::doGetDescription(string variableName, const UIId id)
+{
+    return setGetMachine.doGetDescription(variableName,GetCurrentVars(),id);
+}
+
+bool UIInterface::doGetConsistency(string variableName,const UIId id)
+{
+    return setGetMachine.doGetConsistency(variableName,GetCurrentVars(),id);
+}
+
+string UIInterface::doGetMin(string varName, const UIId id )
+{
+    return setGetMachine.doGetMin(varName, GetCurrentVars(), id);
+}
+
+string UIInterface::doGetMax(string varName, const UIId id )
+{
+    return setGetMachine.doGetMax(varName, GetCurrentVars(), id);
+}
+
+//------------------------------------------------------------------------------------
+
+void UIInterface::SetUndoRedoMode(undo_redo_mode umode)
+{
+    undoRedoMode = umode;
+}
+
+void UIInterface::Undo()
+{
+    undoRedoChain->Undo();
+}
+
+void UIInterface::Redo()
+{
+    undoRedoChain->Redo();
+}
+
+bool UIInterface::CanUndo()
+{
+    return undoRedoChain->CanUndo();
+}
+
+bool UIInterface::CanRedo()
+{
+    return undoRedoChain->CanRedo();
+}
+
+std::string UIInterface::GetUndoDescription()
+{
+    return undoRedoChain->GetUndoDescription();
+}
+
+std::string UIInterface::GetRedoDescription()
+{
+    return undoRedoChain->GetRedoDescription();
+}
+
+StringVec1d UIInterface::GetAndClearWarnings()
+{
+    return m_warnings.GetAndClearWarnings();
+}
+
+void UIInterface::AddWarning(std::string warnmsg)
+{
+    m_warnings.AddWarning(warnmsg);
+}
+
+bool UIInterface::IsReadyToRun()
+{
+    if (WhatIsWrong() == "")
+    {
+        return true;
+    }
+    return false;
+}
+
+std::string UIInterface::WhatIsWrong()
+{
+    //LS NOTE:  Any other tests we want can also go here.
+    //Test #1:  One or more forces have too many starting values set to zero.
+    vector<force_type> forces = GetCurrentVars().forces.GetActiveForces();
+    for (unsigned long fnum = 0; fnum<forces.size(); fnum++)
+    {
+        if (!GetCurrentVars().forces.AreZeroesValid(forces[fnum]))
+        {
+            string msg = "Invalid settings for force " + ToString(forces[fnum]) +
+                ".  Too many parameters are set invalid \n  or have a start value of " +
+                "zero.  To fix this, select 'A' from the main menu,\n  then select " +
+                "the appropriate force, then change the starting values or select\n" +
+                "  'C' and change some of the constraints.\n";
+            return msg;
+        }
+    }
+
+    //Test #2: Summary file reading is on, but we cannot read the file.
+    if (GetCurrentVars().userparams.GetReadSumFile())
+    {
+        string sumInName = GetCurrentVars().userparams.GetTreeSumInFileName();
+        std::ifstream testsum(sumInName.c_str(), std::ios::in);
+        if(!testsum)
+        {
+            string msg = "Cannot open or read file \""
+                + sumInName
+                + "\" for summary file reading.  Check that the name is valid and "
+                + "that the permissions are correct, or simply turn off summary file "
+                + "reading for this run.";
+            return msg;
+        }
+    }
+
+    //Test #3:  Bayesian analysis is on, but all parameters are set constant
+    // or invalid.
+    if (GetCurrentVars().chains.GetDoBayesianAnalysis())
+    {
+        if (!GetCurrentVars().forces.SomeVariableParams())
+        {
+            if (!GetCurrentVars().traitmodels.GetNumMovableLoci() > 0)
+            {
+                return "All of the parameters are set constant or invalid, "
+                    "which means that this Bayesian run has nothing to do.  If you "
+                    "truly want no parameter to vary, set one parameter's prior to be "
+                    "very thin.";
+            }
+        }
+    }
+
+    //Test #4:  Some category rates are identical.
+    for(long regionId = 0; regionId < GetCurrentVars().datapackplus.GetNumRegions() ; regionId++)
+    {
+        for (long locusId = 0; locusId < GetCurrentVars().datapackplus.GetNumLoci(regionId); locusId++)
+        {
+            UIRegId regId(regionId,locusId,GetCurrentVars());
+            if (GetCurrentVars().datamodel.IdenticalCategoryRates(regId))
+            {
+                string msg;
+                if (GetCurrentVars().datamodel.GetUseGlobalModel(regId))
+                {
+                    msg = "The global data model for " + ToString(regId.GetDataType())
+                        + " data";
+                }
+                else
+                {
+                    msg = "The data model for region " + ToString(regId.GetRegion()+1);
+                    if (GetCurrentVars().datapackplus.GetNumLoci(regionId) > 1)
+                    {
+                        msg += ", segment " + ToString(regId.GetLocus()+1);
+                    }
+                }
+                msg += " has multiple categories with the same rate, which serves no "
+                    "purpose but to slow the program down considerably.  Please change this "
+                    "before running the program.";
+                return msg;
+            }
+        }
+    }
+
+    //Passed all the tests.
+    return "";
+}
+
+//____________________________________________________________________________________
diff --git a/src/ui_interface/ui_interface.h b/src/ui_interface/ui_interface.h
new file mode 100644
index 0000000..88db694
--- /dev/null
+++ b/src/ui_interface/ui_interface.h
@@ -0,0 +1,97 @@
+// $Id: ui_interface.h,v 1.43 2011/03/07 06:08:53 bobgian Exp $
+
+/*
+ *  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+ *
+ *  This software is distributed free of charge for non-commercial use
+ *  and is copyrighted.  Of course, we do not guarantee that the software
+ *  works, and are not responsible for any damage you may cause or have.
+ *
+ */
+
+#ifndef UI_INTERFACE_H
+#define UI_INTERFACE_H
+
+#include <string>
+#include <deque>
+#include "constants.h"
+#include "setgetmachine.h"
+#include "vectorx.h"
+
+using std::string;
+
+class DataPack;
+class FrontEndWarnings;
+class UIVars;
+class UndoRedoChain;
+
+enum undo_redo_mode { undoRedoMode_FILE, undoRedoMode_USER };
+
+// This class should eventually provide the only interface to the back end
+// via methods that take only longs, doubles and strings as arguments.
+class UIInterface
+{
+  private:
+    FrontEndWarnings &  m_warnings;
+    UndoRedoChain * undoRedoChain;    // stores undo/redo
+    SetGetMachine   setGetMachine;
+    undo_redo_mode  undoRedoMode;   // cache each variable set, or
+    // run them together
+    UIInterface();                  // undefined
+
+  protected:
+
+  public:
+    UIInterface(FrontEndWarnings & warnings, DataPack&,std::string fileName);
+    virtual ~UIInterface();
+
+    UIVars& GetCurrentVars();
+
+    //////////////////////////////////////////////////
+    void SetUndoRedoMode(undo_redo_mode);
+    void Undo();
+    void Redo();
+    bool CanUndo();
+    bool CanRedo();
+    string GetUndoDescription();
+    string GetRedoDescription();
+
+    //////////////////////////////////////////////////
+    bool        doGetBool(string varName,          const UIId id = NO_ID());
+    data_type   doGetDataType(string varName,      const UIId id = NO_ID());
+    double      doGetDouble(string varName,        const UIId id = NO_ID());
+    DoubleVec1d doGetDoubleVec1d(string varName,   const UIId id = NO_ID());
+    force_type  doGetForceType(string varName,     const UIId id = NO_ID());
+    ForceTypeVec1d  doGetForceTypeVec1d(string varName, const UIId id = NO_ID());
+    long        doGetLong(string varName,          const UIId id = NO_ID());
+    LongVec1d   doGetLongVec1d(string varName,     const UIId id = NO_ID());
+    method_type doGetMethodType(string varName,    const UIId id = NO_ID());
+    model_type  doGetModelType(string varName,     const UIId id = NO_ID());
+    proftype    doGetProftype(string varName,      const UIId id = NO_ID());
+    string      doGetString(string varName,        const UIId id = NO_ID());
+    StringVec1d doGetStringVec1d(string varName,   const UIId id = NO_ID());
+    UIIdVec1d   doGetUIIdVec1d(string varName,     const UIId id = NO_ID());
+    UIIdVec2d   doGetUIIdVec2d(string varName,     const UIId id = NO_ID());
+    verbosity_type doGetVerbosityType(string varName, const UIId id = NO_ID());
+
+    void doSet(string varName, string value,    const UIId id = NO_ID());
+    void doToggle(string varName,               const UIId id = NO_ID());
+
+    string        doGetPrintString(string varName, const UIId id = NO_ID());
+    string        doGetDescription(string varName, const UIId id = NO_ID());
+    bool          doGetConsistency(string varName, const UIId id = NO_ID());
+
+    string        doGetMin(string varName, const UIId id = NO_ID());
+    string        doGetMax(string varName, const UIId id = NO_ID());
+
+    std::vector<std::string> GetAndClearWarnings();
+    void      AddWarning(std::string warnmsg);
+
+    bool          IsReadyToRun();
+    std::string   WhatIsWrong();
+
+};
+
+#endif  // UI_INTERFACE_H
+
+//____________________________________________________________________________________
diff --git a/src/ui_interface/ui_regid.cpp b/src/ui_interface/ui_regid.cpp
new file mode 100644
index 0000000..deda3ae
--- /dev/null
+++ b/src/ui_interface/ui_regid.cpp
@@ -0,0 +1,79 @@
+// $Id: ui_regid.cpp,v 1.7 2011/03/07 06:08:53 bobgian Exp $
+
+/*
+ *  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+ *
+ *  This software is distributed free of charge for non-commercial use
+ *  and is copyrighted.  Of course, we do not guarantee that the software
+ *  works, and are not responsible for any damage you may cause or have.
+ *
+ */
+
+#include <cassert>
+
+#include "ui_regid.h"
+#include "ui_vars.h"
+#include "ui_id.h"
+#include "ui_constants.h"
+
+//------------------------------------------------------------------------------------
+
+UIRegId::UIRegId(long reg, long loc, const UIVars& uivars)
+    : m_region(reg),
+      m_locus(loc),
+      m_dtype(uivars.datapackplus.GetDataType(m_region, m_locus))
+{
+}
+
+UIRegId::UIRegId(data_type dtype)
+    : m_region(uiconst::GLOBAL_ID),
+      m_locus(uiconst::GLOBAL_ID),
+      m_dtype(dtype)
+{
+}
+
+UIRegId::UIRegId(UIId id, const UIVars& uivars)
+{
+    if (id.GetIndex1()==uiconst::GLOBAL_ID)
+    {
+        assert(false); //We should be using the DATAMODEL flag values, below.
+        m_region = uiconst::GLOBAL_ID;
+        m_locus = uiconst::GLOBAL_ID;
+        m_dtype = dtype_DNA; //default value
+    }
+    else if (id.GetIndex1()==uiconst::GLOBAL_DATAMODEL_NUC_ID)
+    {
+        m_region = uiconst::GLOBAL_ID;
+        m_locus = uiconst::GLOBAL_ID;
+        m_dtype = dtype_DNA;
+    }
+    else if (id.GetIndex1()==uiconst::GLOBAL_DATAMODEL_MSAT_ID)
+    {
+        m_region = uiconst::GLOBAL_ID;
+        m_locus = uiconst::GLOBAL_ID;
+        m_dtype = dtype_msat;
+    }
+    else if (id.GetIndex1()==uiconst::GLOBAL_DATAMODEL_KALLELE_ID)
+    {
+        m_region = uiconst::GLOBAL_ID;
+        m_locus = uiconst::GLOBAL_ID;
+        m_dtype = dtype_kallele;
+    }
+    else
+    {
+        m_region = id.GetIndex1();
+        m_locus = id.GetIndex2();
+        m_dtype = uivars.datapackplus.GetDataType(m_region, m_locus);
+    }
+}
+
+bool UIRegId::operator<(const UIRegId other) const
+{
+    if (m_region == other.m_region)
+    {
+        return (m_locus < other.m_locus);
+    }
+    return (m_region < other.m_region);
+}
+
+//____________________________________________________________________________________
diff --git a/src/ui_interface/ui_regid.h b/src/ui_interface/ui_regid.h
new file mode 100644
index 0000000..35481f3
--- /dev/null
+++ b/src/ui_interface/ui_regid.h
@@ -0,0 +1,51 @@
+// $Id: ui_regid.h,v 1.5 2011/03/07 06:08:53 bobgian Exp $
+
+/*
+ *  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+ *
+ *  This software is distributed free of charge for non-commercial use
+ *  and is copyrighted.  Of course, we do not guarantee that the software
+ *  works, and are not responsible for any damage you may cause or have.
+ *
+ */
+
+//This is not much more than a struct right now, but has some complexity in
+// the constructors.  If you give it a region, locus, and the UIVars, it uses
+// the UIVars to look up the data type for that region and locus.  If you just
+// give it a datatype, it uses the GLOBAL_ID value for the region and locus.
+// If you give it a UIId and the UIVars, it grabs the region and locus from
+// Index1 and Index2, then looks up the appropriate data type in UIVars.  If
+// Index1 is one of three global variables (one for each data type), it sets
+// the region and locus to GLOBAL_ID, and sets the appropriate datatype.
+//  --Lucian
+
+#ifndef UIREGID_H
+#define UIREGID_H
+
+#include "datatype.h"       // for data_type
+
+class UIVars;
+class UIId;
+
+class UIRegId
+{
+  private:
+    UIRegId(); //undefined
+    long      m_region;
+    long      m_locus;
+    data_type m_dtype;
+
+  public:
+    UIRegId(long reg, long loc, const UIVars& uivars);
+    UIRegId(data_type dtype);
+    UIRegId(UIId id, const UIVars& uivars);
+    ~UIRegId() {};
+    bool operator<(const UIRegId other) const;
+    long      GetRegion()   {return m_region;};
+    long      GetLocus()    {return m_locus;};
+    data_type GetDataType() {return m_dtype;};
+};
+
+#endif  // UIREGID_H
+
+//____________________________________________________________________________________
diff --git a/src/ui_interface/ui_strings.cpp b/src/ui_interface/ui_strings.cpp
new file mode 100644
index 0000000..38968c0
--- /dev/null
+++ b/src/ui_interface/ui_strings.cpp
@@ -0,0 +1,248 @@
+// $Id: ui_strings.cpp,v 1.103 2012/06/30 01:32:43 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <string>
+
+#include "local_build.h"
+
+#include "ui_strings.h"
+
+using std::string;
+
+// Strings assigned here should be defined in ui_strings.h as
+// public static const string members of class uistr.
+
+const string uistr::TTRatio         = "TT Ratio";
+const string uistr::addParamToGroup = "Add a parameter to an existing or new group";
+const string uistr::addParamToNewGroup = "Add a parameter to a new group";
+const string uistr::addRangeForTraitModel = "Allow a range of sites for this trait";
+const string uistr::allProfilesOff  = "Turn OFF Profiling for ALL parameters";
+const string uistr::allProfilesOn   = "Turn ON  Profiling for ALL parameters";
+const string uistr::allProfilesPercentile = "Set all profiling to Percentile";
+const string uistr::allProfilesFixed      = "Set all profiling to Fixed";
+const string uistr::allStr          = "all";
+const string uistr::alpha           = "Percentage stepwise mutations";
+const string uistr::argFilePrefix   = "Prefix to use for all ARG output files";
+const string uistr::autoCorrelation = "Auto-Correlation";
+const string uistr::baseFrequencies = "Base Frequencies (A C G T)";
+const string uistr::baseFrequencyA  = "Relative Base Frequency of Adenine ";
+const string uistr::baseFrequencyC  = "Relative Base Frequency of Cytosine";
+const string uistr::baseFrequencyG  = "Relative Base Frequency of Guanine ";
+const string uistr::baseFrequencyT  = "Relative Base Frequency of Thymine ";
+const string uistr::bayesArranger   = "Bayesian rearranger frequency";
+const string uistr::bayesian        = "Perform Bayesian or Likelihood analysis";
+const string uistr::canHapArrange   = "Haplotype Arrangement allowed";
+const string uistr::categoryCount   = "Number of Categories";
+const string uistr::categoryPair    = "Category (Probability, Rate)";
+const string uistr::categoryProbability     = "Relative Category Probability";
+const string uistr::categoryRate    = "Relative Category Rate";
+const string uistr::coalescence     = "Coalescence";
+const string uistr::coalescenceLegal  = "Coalescence legal for this data set ";
+const string uistr::coalescenceMaxEvents = "Maximum number of coalescence events";
+const string uistr::coalescenceStartMethodByID= "Method for calcualting starting coalescence for";
+const string uistr::constraintByForce = "Constraint tables for ";
+const string uistr::constraintType = "Constraint for this parameter";
+const string uistr::crossPartitionCount= "Number of cross partitions";
+const string uistr::curveFilePrefix = "Prefix to use for all Bayesian curvefiles";
+const string uistr::dataFileName    = "Input data file name";
+const string uistr::dataModel       = "Data Model";
+const string uistr::dataModelReport = "Data Model Report";
+const string uistr::dataType        = "Datatype for this region";
+const string uistr::defaultStr      = "default";
+const string uistr::disease         = "Disease";
+const string uistr::diseaseByID     = "Disease mutation rate to ";
+const string uistr::diseaseByID2    = " from ";
+const string uistr::diseaseInto     = "Disease mutation rate into ";
+const string uistr::diseaseLegal    = "Disease legal for this data set ";
+const string uistr::diseaseLocation = "Disease site ";
+const string uistr::diseaseMaxEvents= "Maximum number of disease events";
+const string uistr::diseasePartitionCount= "Number of disease partitions ";
+const string uistr::diseasePartitionName= "Name of disease partition ";
+const string uistr::diseasePartitionNames= "Names of disease partitions ";
+const string uistr::divergence      = "Divergence";
+const string uistr::divergenceEpoch = "Epoch";
+const string uistr::divergenceEpochAncestor = "Ancestor";
+const string uistr::divergenceEpochDescendent = "Descendent";
+const string uistr::divergenceEpochDescendents = "Descendents";
+const string uistr::divergenceEpochCount= "Number of divergence epochs";
+const string uistr::divergenceEpochName= "Name of divergence epoch";
+const string uistr::divergenceEpochNames= "Names of divergence epochs";
+const string uistr::divergenceEpochBoundaryTime= "Boundary Time";
+const string uistr::divergenceLegal = "Divergence legal using this data set ";
+const string uistr::divmigration       = "Migration (in the presence of divergence)";
+const string uistr::divmigrationByID   = "Rate of migrants into ";
+const string uistr::divmigrationByID2  = " originating from ";
+const string uistr::divmigrationInto   = "Migrating into ";
+const string uistr::divmigrationLegal  = "Migration legal using this data set ";
+const string uistr::divmigrationMaxEvents = "Maximum number of migration event";
+const string uistr::divmigrationPartitionCount= "Number of migration groups";
+const string uistr::divmigrationPartitionName= "Name of migration group ";
+const string uistr::divmigrationPartitionNames= "Names of migration groups ";
+const string uistr::divmigrationUser="User never sees: uistr::divmigrationUser string";
+const string uistr::dropArranger    = "Topology rearranger frequency";
+const string uistr::effectivePopSize= "Effective population size";
+const string uistr::epochSizeArranger= "Epoch size rearranger frequency";
+const string uistr::expGrowStick    = "Exponential Growth via stick";
+const string uistr::finalChains     = "Number of chains (final)";
+const string uistr::finalDiscard    = "Number of samples to discard (final burn-in)";
+const string uistr::finalInterval   = "Interval between recorded items (final)";
+const string uistr::finalSamples    = "Number of recorded genealogies (final)";
+const string uistr::finalSamplesBayes = "Number of recorded parameter sets (final)";
+const string uistr::forceLegal      = "Is a force legal";
+const string uistr::forceName       = "Name of force";
+const string uistr::forceOnOff      = "Force turned on?";
+const string uistr::forceVector     = "Force vector";
+const string uistr::freqsFromData   = "Base frequencies computed from data";
+const string uistr::fstSetMigration = "Use FST estimate for migration rates";
+const string uistr::fstSetTheta     = "Use FST estimate for thetas";
+const string uistr::globalDisease   = "Single disease starting estimate for all data";
+const string uistr::globalGrowth    = "Single growth starting estimate for all data";
+const string uistr::globalLogisticSelectionCoefficient = "Single log. sel. coeff. starting estimate for all data";
+const string uistr::globalMigration = "Single migration starting estimate for all data";
+const string uistr::globalDivMigration = "Single migration starting value for all data";
+const string uistr::globalTheta     = "Single theta starting estimate for all data";
+const string uistr::groupConstraintType = "Constraint type for this group.";
+const string uistr::groupParamList  = "List of parameters to constrain together.";
+const string uistr::groupedParamsForForce = "Ids of valid grouped params for force.";
+const string uistr::growth          = "Growth estimation";
+const string uistr::growthByID      = "Growth rate for";
+const string uistr::growthLegal     = "Growth legal for this data set ";
+const string uistr::growthMaxEvents = "Maximum number of growth events";
+const string uistr::growthRate      = "Growth";
+const string uistr::growthScheme    = "Type of Growth";
+const string uistr::growthType      = "Growth implemented via";
+const string uistr::gtrRateAC       = "GTR rates AC";
+const string uistr::gtrRateAG       = "GTR rates AG";
+const string uistr::gtrRateAT       = "GTR rates AT";
+const string uistr::gtrRateCG       = "GTR rates CG";
+const string uistr::gtrRateCT       = "GTR rates CT";
+const string uistr::gtrRateGT       = "GTR rates GT";
+const string uistr::gtrRates        = "GTR rates [AC AG AT CG CT GT]";
+const string uistr::hapArranger     = "Haplotype rearranger frequency";
+const string uistr::heatedChain     = "Relative temperature for search ";
+const string uistr::heatedChainCount= "Number of Simultaneous Searches (Heating)";
+const string uistr::heatedChains    = "Relative Temperatures of Simultaneous Searches (Heating)";
+const string uistr::initialChains   = "Number of chains (initial)";
+const string uistr::initialDiscard  = "Number of samples to discard (initial burn-in) ";
+const string uistr::initialInterval = "Interval between recorded items (initial)";
+const string uistr::initialSamples  = "Number of recorded genealogies (initial)";
+const string uistr::initialSamplesBayes  = "Number of recorded parameter sets (initial)";
+const string uistr::lociCount       = "Number of segments for region";
+const string uistr::lociNumbers     = "List of segment numbers";
+const string uistr::locusArranger   = "Trait Location rearranger frequency (for mapping)";
+const string uistr::locusName       = "Segment name";
+const string uistr::logSelectStick  = "Stochastic selection.";
+const string uistr::logisticSelection = "Est. of logistic selection coeff.";
+const string uistr::logisticSelectionCoefficient = "Starting selection coeff. (\"s\")";
+const string uistr::logisticSelectionLegal = "Logistic selection legal for this data set";
+const string uistr::logisticSelectionMaxEvents = "Maximum number of logistic selection events";
+const string uistr::manyArgFiles    = "Write out ALL sampled ARGs from last chain";
+const string uistr::mapFilePrefix   = "Prefix to use for all Mapping information files";
+const string uistr::maxEvents       = "Maximum number of events";
+const string uistr::migration       = "Migration";
+const string uistr::migrationByID   = "Migration rate into ";
+const string uistr::migrationByID2  = " from ";
+const string uistr::migrationInto   = "Migration into ";
+const string uistr::migrationLegal  = "Migration legal for this data set ";
+const string uistr::migrationMaxEvents = "Maximum number of migration events";
+const string uistr::migrationPartitionCount= "Number of migration partitions";
+const string uistr::migrationPartitionName= "Name of migration partition ";
+const string uistr::migrationPartitionNames= "Names of migration partitions";
+const string uistr::migrationUser="User never sees: uistr::migrationUser string";
+const string uistr::muRate          = "MuRate";
+const string uistr::newickTreeFilePrefix = "Prefix to use for all Newick Tree files";
+const string uistr::normalization   = "Normalization";
+const string uistr::oneForceProfileType = "Profile type for ";
+const string uistr::oneForceProfilesOff = "Turn OFF profiles for ";
+const string uistr::oneForceProfilesOn  = "Turn ON  profiles for ";
+const string uistr::optimizeAlpha   = "Optimize the multi-step:single-step ratio after each chain";
+const string uistr::paramName       = "Name of parameter";
+const string uistr::paramVector     = "Parameter vector";
+const string uistr::perBaseErrorRate= "Per-base error rate";
+const string uistr::plotPost        = "Show plots";
+const string uistr::priorByForce    = "Bayesian priors for ";
+const string uistr::priorByID       = "Prior settings for ";
+const string uistr::priorLowerBound = "Lower bound of the prior";
+const string uistr::priorType       = "Shape of the prior";
+const string uistr::priorUpperBound = "Upper bound of the prior";
+const string uistr::priorUseDefault = "Use the default prior for this force";
+const string uistr::probhapArranger = "Trait haplotypes rearranger frequency";
+const string uistr::profileByForce  = "Profile tables for ";
+const string uistr::profileByID     = "Profile";
+const string uistr::profileprefix   = "prefix for profile output (beta test)";
+const string uistr::profiles        = "Calculate profile tables";
+const string uistr::progress        = "Verbosity of progress reports";
+const string uistr::randomSeed      = "Random seed (closest 4n+1)";
+const string uistr::rateCategories  = "Rate Categories";
+const string uistr::recRate         = "RecRate";
+const string uistr::reclocFilePrefix= "Prefix to use for all Recloc files";
+const string uistr::recombination   = "Recombination";
+const string uistr::recombinationLegal     = "Recombination legal for this data set ";
+const string uistr::recombinationMaxEvents = "Maximum number of recombination events";
+const string uistr::recombinationRate   = "Starting recombination rate";
+const string uistr::regGammaShape = "Scaled shape parameter (\"alpha\")";
+const string uistr::regionCount     = "Number of regions";
+const string uistr::regionGamma     = "Gamma (mu rate varies over regions)";
+const string uistr::regionGammaLegal = "Gamma over regions legal for this data set ";
+const string uistr::regionGammaShape= "Starting scaled shape parameter (\"alpha\")";
+const string uistr::regionName      = "Region name";
+const string uistr::regionNumbers   = "List of region numbers";
+const string uistr::relativeMuRate  = "Relative mutation rate";
+#ifdef LAMARC_NEW_FEATURE_RELATIVE_SAMPLING
+const string uistr::relativeSampleRate  = "Relative parameter change proposal rate";
+#endif
+const string uistr::removeParamFromGroup = "Remove a parameter from its group";
+const string uistr::removeRangeForTraitModel = "Disallow a range of sites for this trait";
+const string uistr::replicates      = "Number of replicates";
+const string uistr::resultsFileName = "Name of output results file";
+const string uistr::selectType      = "Selection implemented via";
+const string uistr::setOldSeedFromClock= "Random seed from earlier run";
+const string uistr::simulateData    = "Simulate new data for this segment (destroying provided data!)";
+const string uistr::sizeArranger    = "Tree-size rearranger frequency";
+const string uistr::startValue      = "Initial starting value";
+const string uistr::startValueMethod = "Method for calculating starting value";
+const string uistr::stairArranger    = "Stair arranger frequency";
+const string uistr::systemClock     = "Use system clock to set random seed";
+const string uistr::tempAdapt       = "Adjust temperatures automatically during run";
+const string uistr::tempInterval    = "Swap interval for different temperatures";
+const string uistr::theta           = "Theta";
+const string uistr::traceFilePrefix = "Prefix to use for all Tracer files";
+const string uistr::traitAnalysisData = "Use this trait as data only";
+const string uistr::traitAnalysisFloat = "Map this trait after collecting trees ('float')";
+const string uistr::traitAnalysisJump = "Map this trait while collecting trees ('jump')";
+const string uistr::traitAnalysisPartition = "Partition your data into sub-populations using this trait";
+const string uistr::traitModelName  = "Trait name";
+const string uistr::traitModelRangeToPoint  = "Place this trait at a single site";
+const string uistr::treeSumInFileEnabled  = "Reading of tree summary file";
+const string uistr::treeSumInFileName     = "Name of input tree summary file";
+const string uistr::treeSumOutFileEnabled = "Writing of tree summary file";
+const string uistr::treeSumOutFileName    = "Name of output tree summary file";
+const string uistr::trueValue       = "The TRUTH, string never seen by users";
+const string uistr::ungroupedParamsForForce = "Ids of valid ungrouped params for force.";
+const string uistr::useArgFiles     = "Write out ARG at end of last chain";
+const string uistr::useCurveFiles   = "Write Bayesian results to individual files";
+const string uistr::useDefaultPriorsForForce = "Use default priors for all parameters for ";
+const string uistr::useGlobalDataModelForAll = "Use default data model for all regions/segments";
+const string uistr::useGlobalDataModelForOne = "Use default data model for this region/segment";
+const string uistr::useNewickTreeFiles = "Write the best Newick Trees to files";
+const string uistr::useOldSeedFromClock= "Restore random seed from earlier run";
+const string uistr::useReclocFiles  = "Write recombination locations to files";
+const string uistr::useTraceFiles   = "Write Tracer output to files";
+const string uistr::userSetTheta    = "Theta value for";
+const string uistr::validForces     = "Ids of valid forces";
+const string uistr::validMovingLoci = "Ids of valid moving segments";
+const string uistr::validParamsForForce  = "Ids of valid parameters for force ";
+const string uistr::verbosity       = "Verbosity level of output file";
+const string uistr::wattersonSetTheta = "Use Watterson's estimate for thetas";
+const string uistr::xmlOutFileName  = "Name of menu-modified version of input file";
+const string uistr::xmlReportFileName="Name for XML output file. (Beta test)";
+const string uistr::zilchArranger   = "Do-nothing rearranger frequency";
+
+//____________________________________________________________________________________
diff --git a/src/ui_interface/ui_strings.h b/src/ui_interface/ui_strings.h
new file mode 100644
index 0000000..d418336
--- /dev/null
+++ b/src/ui_interface/ui_strings.h
@@ -0,0 +1,253 @@
+// $Id: ui_strings.h,v 1.89 2012/03/23 20:33:37 ewalkup Exp $
+
+/*
+ *  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+ *
+ *  This software is distributed free of charge for non-commercial use
+ *  and is copyrighted.  Of course, we do not guarantee that the software
+ *  works, and are not responsible for any damage you may cause or have.
+ *
+ */
+
+#ifndef UI_STRINGS_H
+#define UI_STRINGS_H
+
+#include <string>
+#include "local_build.h"
+
+using std::string;
+
+// values for the static const strings below are set in ui_strings.cpp
+class uistr
+{
+  public:
+    static const string TTRatio;
+    static const string addParamToGroup;
+    static const string addParamToNewGroup;
+    static const string addRangeForTraitModel;
+    static const string allProfilesFixed;
+    static const string allProfilesOff;
+    static const string allProfilesOn;
+    static const string allProfilesPercentile;
+    static const string allStr;
+    static const string alpha;
+    static const string argFilePrefix;
+    static const string autoCorrelation;
+    static const string baseFrequencies;
+    static const string baseFrequencyA;
+    static const string baseFrequencyC;
+    static const string baseFrequencyG;
+    static const string baseFrequencyT;
+    static const string bayesArranger;
+    static const string bayesian;
+    static const string canHapArrange;
+    static const string categoryCount;
+    static const string categoryPair;
+    static const string categoryProbability;
+    static const string categoryRate;
+    static const string coalescence;
+    static const string coalescenceLegal;
+    static const string coalescenceMaxEvents;
+    static const string coalescenceStartMethodByID;
+    static const string constraintByForce;
+    static const string constraintType;
+    static const string crossPartitionCount;
+    static const string curveFilePrefix;
+    static const string dataFileName;
+    static const string dataModel;
+    static const string dataModelReport;
+    static const string dataType;
+    static const string defaultStr;
+    static const string disease;
+    static const string diseaseByID2;
+    static const string diseaseByID;
+    static const string diseaseInto;
+    static const string diseaseLegal;
+    static const string diseaseLocation;
+    static const string diseaseMaxEvents;
+    static const string diseasePartitionCount;
+    static const string diseasePartitionName;
+    static const string diseasePartitionNames;
+    static const string divergence;
+    static const string divergenceEpoch;
+    static const string divergenceEpochAncestor;
+    static const string divergenceEpochDescendent;
+    static const string divergenceEpochDescendents;
+    static const string divergenceEpochCount;
+    static const string divergenceEpochName;
+    static const string divergenceEpochNames;
+    static const string divergenceEpochBoundaryTime;
+    static const string divergenceLegal;
+    static const string divmigration;
+    static const string divmigrationByID2;
+    static const string divmigrationByID;
+    static const string divmigrationInto;
+    static const string divmigrationLegal;
+    static const string divmigrationMaxEvents;
+    static const string divmigrationPartitionCount;
+    static const string divmigrationPartitionName;
+    static const string divmigrationPartitionNames;
+    static const string divmigrationUser;
+    static const string dropArranger;
+    static const string effectivePopSize;
+    static const string epochSizeArranger;
+    static const string expGrowStick;
+    static const string finalChains;
+    static const string finalDiscard;
+    static const string finalInterval;
+    static const string finalSamples;
+    static const string finalSamplesBayes;
+    static const string forceLegal;
+    static const string forceName;
+    static const string forceOnOff;
+    static const string forceVector;
+    static const string freqsFromData;
+    static const string fstSetMigration;
+    static const string fstSetTheta;
+    static const string globalDisease;
+    static const string globalGrowth;
+    static const string globalLogisticSelectionCoefficient;
+    static const string globalMigration;
+    static const string globalDivMigration;
+    static const string globalTheta;
+    static const string groupConstraintType;
+    static const string groupParamList;
+    static const string groupedParamsForForce;
+    static const string growth;
+    static const string growthByID;
+    static const string growthLegal;
+    static const string growthMaxEvents;
+    static const string growthRate;
+    static const string growthScheme;
+    static const string growthType;
+    static const string gtrRateAC;
+    static const string gtrRateAG;
+    static const string gtrRateAT;
+    static const string gtrRateCG;
+    static const string gtrRateCT;
+    static const string gtrRateGT;
+    static const string gtrRates;
+    static const string hapArranger;
+    static const string heatedChain;
+    static const string heatedChainCount;
+    static const string heatedChains;
+    static const string initialChains;
+    static const string initialDiscard;
+    static const string initialInterval;
+    static const string initialSamples;
+    static const string initialSamplesBayes;
+    static const string lociCount;
+    static const string lociNumbers;
+    static const string locusArranger;
+    static const string locusName;
+    static const string logSelectStick;
+    static const string logisticSelection;
+    static const string logisticSelectionCoefficient;
+    static const string logisticSelectionLegal;
+    static const string logisticSelectionMaxEvents;
+    static const string manyArgFiles;
+    static const string mapFilePrefix;
+    static const string maxEvents;
+    static const string migration;
+    static const string migrationByID2;
+    static const string migrationByID;
+    static const string migrationInto;
+    static const string migrationLegal;
+    static const string migrationMaxEvents;
+    static const string migrationPartitionCount;
+    static const string migrationPartitionName;
+    static const string migrationPartitionNames;
+    static const string migrationUser;
+    static const string muRate;
+    static const string newickTreeFilePrefix;
+    static const string normalization;
+    static const string oneForceProfileType;
+    static const string oneForceProfilesOff;
+    static const string oneForceProfilesOn;
+    static const string optimizeAlpha;
+    static const string paramName;
+    static const string paramVector;
+    static const string perBaseErrorRate;
+    static const string plotPost;
+    static const string priorByForce;
+    static const string priorByID;
+    static const string priorLowerBound;
+    static const string priorType;
+    static const string priorUpperBound;
+    static const string priorUseDefault;
+    static const string probhapArranger;
+    static const string profileByForce;
+    static const string profileByID;
+    static const string profileprefix;
+    static const string profiles;
+    static const string progress;
+    static const string randomSeed;
+    static const string rateCategories;
+    static const string recRate;
+    static const string reclocFilePrefix;
+    static const string recombination;
+    static const string recombinationLegal;
+    static const string recombinationMaxEvents;
+    static const string recombinationRate;
+    static const string regGammaShape;
+    static const string regionCount;
+    static const string regionGamma;
+    static const string regionGammaLegal;
+    static const string regionGammaShape;
+    static const string regionName;
+    static const string regionNumbers;
+    static const string relativeMuRate;
+#ifdef LAMARC_NEW_FEATURE_RELATIVE_SAMPLING
+    static const string relativeSampleRate;
+#endif
+    static const string removeParamFromGroup;
+    static const string removeRangeForTraitModel;
+    static const string replicates;
+    static const string resultsFileName;
+    static const string selectType;
+    static const string setOldSeedFromClock;
+    static const string simulateData;
+    static const string sizeArranger;
+    static const string startValue;
+    static const string startValueMethod;
+    static const string stairArranger;
+    static const string systemClock;
+    static const string tempAdapt;
+    static const string tempInterval;
+    static const string theta;
+    static const string traceFilePrefix;
+    static const string traitAnalysisData;
+    static const string traitAnalysisFloat;
+    static const string traitAnalysisJump;
+    static const string traitAnalysisPartition;
+    static const string traitModelName;
+    static const string traitModelRangeToPoint;
+    static const string treeSumInFileEnabled;
+    static const string treeSumInFileName;
+    static const string treeSumOutFileEnabled;
+    static const string treeSumOutFileName;
+    static const string trueValue;
+    static const string ungroupedParamsForForce;
+    static const string useArgFiles;
+    static const string useCurveFiles;
+    static const string useDefaultPriorsForForce;
+    static const string useGlobalDataModelForAll;
+    static const string useGlobalDataModelForOne;
+    static const string useNewickTreeFiles;
+    static const string useOldSeedFromClock;
+    static const string useReclocFiles;
+    static const string useTraceFiles;
+    static const string userSetTheta;
+    static const string validForces;
+    static const string validMovingLoci;
+    static const string validParamsForForce;
+    static const string verbosity;
+    static const string wattersonSetTheta;
+    static const string xmlOutFileName;
+    static const string xmlReportFileName;
+    static const string zilchArranger;
+};
+
+#endif // UI_STRINGS_H
+
+//____________________________________________________________________________________
diff --git a/src/ui_interface/ui_warnings.cpp b/src/ui_interface/ui_warnings.cpp
new file mode 100644
index 0000000..2ebd15f
--- /dev/null
+++ b/src/ui_interface/ui_warnings.cpp
@@ -0,0 +1,20 @@
+// $Id: ui_warnings.cpp,v 1.4 2012/06/30 01:32:43 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <string>
+#include "ui_warnings.h"
+
+using std::string;
+
+const string uiwarn::calcFST_0      = "Warning: calculating FST estimates for ";
+const string uiwarn::calcFST_1      = "and their reciprocal rates is impossible due to the data for the populations involved.  "
+    "If the FST method is invoked to obtain starting values for those parameters, defaults will be used instead.";
+
+//____________________________________________________________________________________
diff --git a/src/ui_interface/ui_warnings.h b/src/ui_interface/ui_warnings.h
new file mode 100644
index 0000000..648689c
--- /dev/null
+++ b/src/ui_interface/ui_warnings.h
@@ -0,0 +1,28 @@
+// $Id: ui_warnings.h,v 1.3 2011/03/07 06:08:53 bobgian Exp $
+
+/*
+ *  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+ *
+ *  This software is distributed free of charge for non-commercial use
+ *  and is copyrighted.  Of course, we do not guarantee that the software
+ *  works, and are not responsible for any damage you may cause or have.
+ *
+ */
+
+#ifndef UI_WARNINGS_H
+#define UI_WARNINGS_H
+
+#include <string>
+
+using std::string;
+
+class uiwarn
+{
+  public:
+    static const string calcFST_0;
+    static const string calcFST_1;
+};
+
+#endif // UI_WARNINGS_H
+
+//____________________________________________________________________________________
diff --git a/src/ui_interface/userparam_interface.cpp b/src/ui_interface/userparam_interface.cpp
new file mode 100644
index 0000000..d2ab24e
--- /dev/null
+++ b/src/ui_interface/userparam_interface.cpp
@@ -0,0 +1,664 @@
+// $Id: userparam_interface.cpp,v 1.35 2012/06/30 01:32:43 bobgian Exp $
+
+/*
+ *  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+ *
+ *  This software is distributed free of charge for non-commercial use
+ *  and is copyrighted.  Of course, we do not guarantee that the software
+ *  works, and are not responsible for any damage you may cause or have.
+ *
+ */
+
+#include <iostream>
+
+#include "local_build.h"
+
+#include "ui_interface.h"
+#include "ui_strings.h"
+#include "ui_vars.h"
+#include "userparam_interface.h"
+
+//------------------------------------------------------------------------------------
+// Data File
+//------------------------------------------------------------------------------------
+
+uiDataFileName::uiDataFileName()
+    : GetString(uistr::dataFileName)
+{
+}
+
+uiDataFileName::~uiDataFileName()
+{
+}
+
+string uiDataFileName::Get(UIVars& vars, UIId id)
+{
+    return vars.userparams.GetDataFileName();
+}
+
+//------------------------------------------------------------------------------------
+// Posterior Plot
+//------------------------------------------------------------------------------------
+
+uiPlotPost::uiPlotPost()
+    : SetGetBool(uistr::plotPost)
+{
+}
+
+uiPlotPost::~uiPlotPost()
+{
+}
+
+bool uiPlotPost::Get(UIVars& vars, UIId id)
+{
+    return vars.userparams.GetPlotPost();
+}
+
+void uiPlotPost::Set(UIVars& vars, UIId id, bool val)
+{
+    vars.userparams.SetPlotPost(val);
+}
+
+//------------------------------------------------------------------------------------
+// Progress
+//------------------------------------------------------------------------------------
+
+uiProgress::uiProgress()
+    : SetGetVerbosityType(uistr::progress)
+{
+}
+
+uiProgress::~uiProgress()
+{
+}
+
+verbosity_type uiProgress::Get(UIVars& vars, UIId id)
+{
+    return vars.userparams.GetProgress();
+}
+
+void uiProgress::Set(UIVars& vars, UIId id, verbosity_type val)
+{
+    vars.userparams.SetProgress(val);
+}
+
+//------------------------------------------------------------------------------------
+// Random Seed
+//------------------------------------------------------------------------------------
+
+uiRandomSeed::uiRandomSeed()
+    : SetGetLong(uistr::randomSeed)
+{
+}
+
+uiRandomSeed::~uiRandomSeed()
+{
+}
+
+long uiRandomSeed::Get(UIVars& vars, UIId id)
+{
+    return vars.userparams.GetRandomSeed();
+}
+
+void uiRandomSeed::Set(UIVars& vars, UIId id, long val)
+{
+    vars.userparams.SetRandomSeed(val);
+}
+
+//------------------------------------------------------------------------------------
+// Results File
+//------------------------------------------------------------------------------------
+
+uiResultsFileName::uiResultsFileName()
+    : SetGetString(uistr::resultsFileName)
+{
+}
+
+uiResultsFileName::~uiResultsFileName()
+{
+}
+
+string uiResultsFileName::Get(UIVars& vars, UIId id)
+{
+    return vars.userparams.GetResultsFileName();
+}
+
+void uiResultsFileName::Set(UIVars& vars, UIId id, string val)
+{
+    vars.userparams.SetResultsFileName(val);
+}
+
+//------------------------------------------------------------------------------------
+// System Clock Random Seed
+//------------------------------------------------------------------------------------
+
+uiSystemClock::uiSystemClock()
+    : SetGetBool(uistr::systemClock)
+{
+}
+
+uiSystemClock::~uiSystemClock()
+{
+}
+
+bool uiSystemClock::Get(UIVars& vars, UIId id)
+{
+    return vars.userparams.GetUseSystemClock();
+}
+
+void uiSystemClock::Set(UIVars& vars, UIId id, bool val)
+{
+    // Ignore "val" and always set true
+    vars.userparams.SetUseSystemClock(true);
+}
+
+//------------------------------------------------------------------------------------
+// Set Old Clock Random Seed
+//------------------------------------------------------------------------------------
+
+uiSetOldClockSeed::uiSetOldClockSeed()
+    : SetGetLong(uistr::setOldSeedFromClock)
+{
+}
+
+uiSetOldClockSeed::~uiSetOldClockSeed()
+{
+}
+
+long uiSetOldClockSeed::Get(UIVars& vars, UIId id)
+{
+    return vars.userparams.GetOldClockSeed();
+}
+
+void uiSetOldClockSeed::Set(UIVars& vars, UIId id, long val)
+{
+    vars.userparams.SetOldClockSeed(val);
+}
+
+//------------------------------------------------------------------------------------
+// Use Old Clock Random Seed
+//------------------------------------------------------------------------------------
+
+uiUseOldClockSeed::uiUseOldClockSeed()
+    : SetGetNoval(uistr::useOldSeedFromClock)
+{
+}
+
+uiUseOldClockSeed::~uiUseOldClockSeed()
+{
+}
+
+void uiUseOldClockSeed::Set(UIVars& vars, UIId id, noval val)
+{
+    vars.userparams.SetUseOldClockSeed(true);
+}
+
+//------------------------------------------------------------------------------------
+// TreeSumInFileEnabled
+//------------------------------------------------------------------------------------
+
+uiTreeSumInFileEnabled::uiTreeSumInFileEnabled()
+    : SetGetBoolEnabled( uistr::treeSumInFileEnabled)
+{
+}
+
+uiTreeSumInFileEnabled::~uiTreeSumInFileEnabled()
+{
+}
+
+bool uiTreeSumInFileEnabled::Get(UIVars& vars, UIId id)
+{
+    return vars.userparams.GetReadSumFile();
+}
+
+void uiTreeSumInFileEnabled::Set(UIVars& vars, UIId id, bool val)
+{
+    vars.userparams.SetReadSumFile(val);
+}
+
+//------------------------------------------------------------------------------------
+// Tree Input Summary File
+//------------------------------------------------------------------------------------
+
+uiTreeSumInFileName::uiTreeSumInFileName()
+    : SetGetString( uistr::treeSumInFileName)
+{
+}
+
+uiTreeSumInFileName::~uiTreeSumInFileName()
+{
+}
+
+string uiTreeSumInFileName::Get(UIVars& vars, UIId id)
+{
+    return vars.userparams.GetTreeSumInFileName();
+}
+
+void uiTreeSumInFileName::Set(UIVars& vars, UIId id, string val)
+{
+    vars.userparams.SetTreeSumInFileName(val);
+}
+
+//------------------------------------------------------------------------------------
+// TreeSumOutFileEnabled
+//------------------------------------------------------------------------------------
+
+uiTreeSumOutFileEnabled::uiTreeSumOutFileEnabled()
+    : SetGetBoolEnabled( uistr::treeSumOutFileEnabled)
+{
+}
+
+uiTreeSumOutFileEnabled::~uiTreeSumOutFileEnabled()
+{
+}
+
+bool uiTreeSumOutFileEnabled::Get(UIVars& vars, UIId id)
+{
+    return vars.userparams.GetWriteSumFile();
+}
+
+void uiTreeSumOutFileEnabled::Set(UIVars& vars, UIId id, bool val)
+{
+    vars.userparams.SetWriteSumFile(val);
+}
+
+//------------------------------------------------------------------------------------
+// Tree Output Summary File
+//------------------------------------------------------------------------------------
+
+uiTreeSumOutFileName::uiTreeSumOutFileName()
+    : SetGetString( uistr::treeSumOutFileName)
+{
+}
+
+uiTreeSumOutFileName::~uiTreeSumOutFileName()
+{
+}
+
+string uiTreeSumOutFileName::Get(UIVars& vars, UIId id)
+{
+    return vars.userparams.GetTreeSumOutFileName();
+}
+
+void uiTreeSumOutFileName::Set(UIVars& vars, UIId id, string val)
+{
+    vars.userparams.SetTreeSumOutFileName(val);
+}
+
+//------------------------------------------------------------------------------------
+// CurveFileEnabled
+//------------------------------------------------------------------------------------
+
+uiCurveFileEnabled::uiCurveFileEnabled()
+    : SetGetBoolEnabled( uistr::useCurveFiles)
+{
+}
+
+uiCurveFileEnabled::~uiCurveFileEnabled()
+{
+}
+
+bool uiCurveFileEnabled::Get(UIVars& vars, UIId id)
+{
+    return vars.userparams.GetWriteCurveFiles();
+}
+
+void uiCurveFileEnabled::Set(UIVars& vars, UIId id, bool val)
+{
+    vars.userparams.SetWriteCurveFiles(val);
+}
+
+//------------------------------------------------------------------------------------
+// ReclocFileEnabled
+//------------------------------------------------------------------------------------
+
+uiReclocFileEnabled::uiReclocFileEnabled()
+    : SetGetBoolEnabled( uistr::useReclocFiles)
+{
+}
+
+uiReclocFileEnabled::~uiReclocFileEnabled()
+{
+}
+
+bool uiReclocFileEnabled::Get(UIVars& vars, UIId id)
+{
+    return vars.userparams.GetWriteReclocFiles();
+}
+
+void uiReclocFileEnabled::Set(UIVars& vars, UIId id, bool val)
+{
+    vars.userparams.SetWriteReclocFiles(val);
+}
+
+//------------------------------------------------------------------------------------
+// TraceFileEnabled
+//------------------------------------------------------------------------------------
+
+uiTraceFileEnabled::uiTraceFileEnabled()
+    : SetGetBoolEnabled( uistr::useTraceFiles)
+{
+}
+
+uiTraceFileEnabled::~uiTraceFileEnabled()
+{
+}
+
+bool uiTraceFileEnabled::Get(UIVars& vars, UIId id)
+{
+    return vars.userparams.GetWriteTraceFiles();
+}
+
+void uiTraceFileEnabled::Set(UIVars& vars, UIId id, bool val)
+{
+    vars.userparams.SetWriteTraceFiles(val);
+}
+
+//------------------------------------------------------------------------------------
+// NewickTreeFileEnabled
+//------------------------------------------------------------------------------------
+
+uiNewickTreeFileEnabled::uiNewickTreeFileEnabled()
+    : SetGetBoolEnabled( uistr::useNewickTreeFiles)
+{
+}
+
+uiNewickTreeFileEnabled::~uiNewickTreeFileEnabled()
+{
+}
+
+bool uiNewickTreeFileEnabled::Get(UIVars& vars, UIId id)
+{
+    return vars.userparams.GetWriteNewickTreeFiles();
+}
+
+void uiNewickTreeFileEnabled::Set(UIVars& vars, UIId id, bool val)
+{
+    vars.userparams.SetWriteNewickTreeFiles(val);
+}
+
+//------------------------------------------------------------------------------------
+// ArgFileEnabled
+//------------------------------------------------------------------------------------
+
+#ifdef LAMARC_QA_TREE_DUMP
+
+uiArgFileEnabled::uiArgFileEnabled()
+    : SetGetBoolEnabled( uistr::useArgFiles)
+{
+}
+
+uiArgFileEnabled::~uiArgFileEnabled()
+{
+}
+
+bool uiArgFileEnabled::Get(UIVars& vars, UIId id)
+{
+    return vars.userparams.GetWriteArgFiles();
+}
+
+void uiArgFileEnabled::Set(UIVars& vars, UIId id, bool val)
+{
+    vars.userparams.SetWriteArgFiles(val);
+}
+
+//------------------------------------------------------------------------------------
+// ManyArgFiles
+//------------------------------------------------------------------------------------
+
+uiManyArgFiles::uiManyArgFiles()
+    : SetGetBoolEnabled( uistr::manyArgFiles)
+{
+}
+
+uiManyArgFiles::~uiManyArgFiles()
+{
+}
+
+bool uiManyArgFiles::Get(UIVars& vars, UIId id)
+{
+    return vars.userparams.GetWriteManyArgs();
+}
+
+void uiManyArgFiles::Set(UIVars& vars, UIId id, bool val)
+{
+    vars.userparams.SetWriteManyArgs(val);
+}
+
+#endif // LAMARC_QA_TREE_DUMP
+
+//------------------------------------------------------------------------------------
+// Curve File Output Name's Prefix
+//------------------------------------------------------------------------------------
+
+uiCurveFilePrefix::uiCurveFilePrefix()
+    : SetGetString( uistr::curveFilePrefix)
+{
+}
+
+uiCurveFilePrefix::~uiCurveFilePrefix()
+{
+}
+
+string uiCurveFilePrefix::Get(UIVars& vars, UIId id)
+{
+    return vars.userparams.GetCurveFilePrefix();
+}
+
+void uiCurveFilePrefix::Set(UIVars& vars, UIId id, string val)
+{
+    vars.userparams.SetCurveFilePrefix(val);
+}
+
+//------------------------------------------------------------------------------------
+// Curve File Output Name's Prefix
+//------------------------------------------------------------------------------------
+
+uiMapFilePrefix::uiMapFilePrefix()
+    : SetGetString( uistr::mapFilePrefix)
+{
+}
+
+uiMapFilePrefix::~uiMapFilePrefix()
+{
+}
+
+string uiMapFilePrefix::Get(UIVars& vars, UIId id)
+{
+    return vars.userparams.GetMapFilePrefix();
+}
+
+void uiMapFilePrefix::Set(UIVars& vars, UIId id, string val)
+{
+    vars.userparams.SetMapFilePrefix(val);
+}
+
+//------------------------------------------------------------------------------------
+// Profile Output Name's Prefix
+//------------------------------------------------------------------------------------
+
+uiProfilePrefix::uiProfilePrefix()
+    : SetGetString( uistr::profileprefix)
+{
+}
+
+uiProfilePrefix::~uiProfilePrefix()
+{
+}
+
+string uiProfilePrefix::Get(UIVars& vars, UIId id)
+{
+    return vars.userparams.GetProfilePrefix();
+}
+
+void uiProfilePrefix::Set(UIVars& vars, UIId id, string val)
+{
+    vars.userparams.SetProfilePrefix(val);
+}
+
+//------------------------------------------------------------------------------------
+// Recloc File Output Name's Prefix
+//------------------------------------------------------------------------------------
+
+uiReclocFilePrefix::uiReclocFilePrefix()
+    : SetGetString( uistr::reclocFilePrefix)
+{
+}
+
+uiReclocFilePrefix::~uiReclocFilePrefix()
+{
+}
+
+string uiReclocFilePrefix::Get(UIVars& vars, UIId id)
+{
+    return vars.userparams.GetReclocFilePrefix();
+}
+
+void uiReclocFilePrefix::Set(UIVars& vars, UIId id, string val)
+{
+    vars.userparams.SetReclocFilePrefix(val);
+}
+
+//------------------------------------------------------------------------------------
+// Trace File Output Name's Prefix
+//------------------------------------------------------------------------------------
+
+uiTraceFilePrefix::uiTraceFilePrefix()
+    : SetGetString( uistr::traceFilePrefix)
+{
+}
+
+uiTraceFilePrefix::~uiTraceFilePrefix()
+{
+}
+
+string uiTraceFilePrefix::Get(UIVars& vars, UIId id)
+{
+    return vars.userparams.GetTraceFilePrefix();
+}
+
+void uiTraceFilePrefix::Set(UIVars& vars, UIId id, string val)
+{
+    vars.userparams.SetTraceFilePrefix(val);
+}
+
+//------------------------------------------------------------------------------------
+// NewickTree File Output Name's Prefix
+//------------------------------------------------------------------------------------
+
+uiNewickTreeFilePrefix::uiNewickTreeFilePrefix()
+    : SetGetString( uistr::newickTreeFilePrefix)
+{
+}
+
+uiNewickTreeFilePrefix::~uiNewickTreeFilePrefix()
+{
+}
+
+string uiNewickTreeFilePrefix::Get(UIVars& vars, UIId id)
+{
+    return vars.userparams.GetNewickTreeFilePrefix();
+}
+
+void uiNewickTreeFilePrefix::Set(UIVars& vars, UIId id, string val)
+{
+    vars.userparams.SetNewickTreeFilePrefix(val);
+}
+
+//------------------------------------------------------------------------------------
+// ARG File Output Name's Prefix
+//------------------------------------------------------------------------------------
+
+#ifdef LAMARC_QA_TREE_DUMP
+
+uiArgFilePrefix::uiArgFilePrefix()
+    : SetGetString( uistr::argFilePrefix)
+{
+}
+
+uiArgFilePrefix::~uiArgFilePrefix()
+{
+}
+
+string uiArgFilePrefix::Get(UIVars& vars, UIId id)
+{
+    return vars.userparams.GetArgFilePrefix();
+}
+
+void uiArgFilePrefix::Set(UIVars& vars, UIId id, string val)
+{
+    vars.userparams.SetArgFilePrefix(val);
+}
+
+#endif // LAMARC_QA_TREE_DUMP
+
+//------------------------------------------------------------------------------------
+// Verbosity
+//------------------------------------------------------------------------------------
+
+uiVerbosity::uiVerbosity()
+    : SetGetVerbosityTypeNoNone(uistr::verbosity)
+{
+}
+
+uiVerbosity::~uiVerbosity()
+{
+}
+
+verbosity_type uiVerbosity::Get(UIVars& vars, UIId id)
+{
+    return vars.userparams.GetVerbosity();
+}
+
+void uiVerbosity::Set(UIVars& vars, UIId id, verbosity_type val)
+{
+    vars.userparams.SetVerbosity(val);
+}
+
+//------------------------------------------------------------------------------------
+// XML Output File
+//------------------------------------------------------------------------------------
+
+uiXMLOutFileName::uiXMLOutFileName()
+    : SetGetString(uistr::xmlOutFileName)
+{
+}
+
+uiXMLOutFileName::~uiXMLOutFileName()
+{
+}
+
+string uiXMLOutFileName::Get(UIVars& vars, UIId id)
+{
+    return vars.userparams.GetXMLOutFileName();
+}
+
+void uiXMLOutFileName::Set(UIVars& vars, UIId id, string val)
+{
+    vars.userparams.SetXMLOutFileName(val);
+}
+
+//------------------------------------------------------------------------------------
+// XML Output File
+//------------------------------------------------------------------------------------
+
+uiXMLReportFileName::uiXMLReportFileName()
+    : SetGetString(uistr::xmlReportFileName)
+{
+}
+
+uiXMLReportFileName::~uiXMLReportFileName()
+{
+}
+
+string uiXMLReportFileName::Get(UIVars& vars, UIId id)
+{
+    return vars.userparams.GetXMLReportFileName();
+}
+
+void uiXMLReportFileName::Set(UIVars& vars, UIId id, string val)
+{
+    vars.userparams.SetXMLReportFileName(val);
+}
+
+//____________________________________________________________________________________
diff --git a/src/ui_interface/userparam_interface.h b/src/ui_interface/userparam_interface.h
new file mode 100644
index 0000000..36e6a20
--- /dev/null
+++ b/src/ui_interface/userparam_interface.h
@@ -0,0 +1,292 @@
+// $Id: userparam_interface.h,v 1.38 2012/06/30 01:32:43 bobgian Exp $
+
+/*
+ *  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+ *
+ *  This software is distributed free of charge for non-commercial use
+ *  and is copyrighted.  Of course, we do not guarantee that the software
+ *  works, and are not responsible for any damage you may cause or have.
+ *
+ */
+
+#ifndef USERPARAM_INTERFACE_H
+#define USERPARAM_INTERFACE_H
+
+#include <string>
+
+#include "local_build.h"
+
+#include "setget.h"
+
+class UIVars;
+
+class uiDataFileName : public GetString
+{
+  public:
+    uiDataFileName();
+    virtual ~uiDataFileName();
+    string Get(UIVars& vars, UIId id);
+};
+
+class uiPlotPost : public SetGetBool
+{
+  public:
+    uiPlotPost();
+    virtual ~uiPlotPost();
+    bool Get(UIVars& vars, UIId id);
+    void Set(UIVars& vars, UIId id,bool val);
+};
+
+class uiProgress : public SetGetVerbosityType
+{
+  public:
+    uiProgress();
+    virtual ~uiProgress();
+    verbosity_type Get(UIVars& vars, UIId id);
+    void Set(UIVars& vars, UIId id,verbosity_type val);
+};
+
+class uiRandomSeed : public SetGetLong
+{
+  public:
+    uiRandomSeed();
+    virtual ~uiRandomSeed();
+    long Get(UIVars& vars, UIId id);
+    void Set(UIVars& vars, UIId id,long val);
+};
+
+class uiSetOldClockSeed : public SetGetLong
+{
+  public:
+    uiSetOldClockSeed();
+    virtual ~uiSetOldClockSeed();
+    long Get(UIVars& vars, UIId id);
+    void Set(UIVars& vars, UIId id,long val);
+};
+
+class uiResultsFileName : public SetGetString
+{
+  public:
+    uiResultsFileName();
+    virtual ~uiResultsFileName();
+    string Get(UIVars& vars, UIId id);
+    void Set(UIVars& vars, UIId id,string val);
+};
+
+// actually, it doesn't behave like a SetGetBool
+// because you set it with this, but unset it with
+// uiRandomSeed or uiUseOldClockSeed
+class uiSystemClock : public SetGetBool
+{
+  public:
+    uiSystemClock();
+    virtual ~uiSystemClock();
+    bool Get(UIVars& vars, UIId id);
+    void Set(UIVars& vars, UIId id, bool val);
+};
+
+// actually, it doesn't behave like a SetGetBool
+// because you set it with this, but unset it with
+// uiRandomSeed or uiSystemClock
+class uiUseOldClockSeed : public SetGetNoval
+{
+  public:
+    uiUseOldClockSeed();
+    virtual ~uiUseOldClockSeed();
+    void Set(UIVars& vars, UIId id, noval val);
+};
+
+class uiTreeSumInFileEnabled : public SetGetBoolEnabled
+{
+  public:
+    uiTreeSumInFileEnabled();
+    virtual ~uiTreeSumInFileEnabled();
+    bool Get(UIVars& vars, UIId id);
+    void Set(UIVars& vars, UIId id,bool val);
+};
+
+class uiTreeSumInFileName : public SetGetString
+{
+  public:
+    uiTreeSumInFileName();
+    virtual ~uiTreeSumInFileName();
+    string Get(UIVars& vars, UIId id);
+    void Set(UIVars& vars, UIId id,string val);
+};
+
+//Used to need this when you were either reading or writing,
+// but now you can do both/either.
+
+class uiTreeSumOutFileEnabled : public SetGetBoolEnabled
+{
+  public:
+    uiTreeSumOutFileEnabled();
+    virtual ~uiTreeSumOutFileEnabled();
+    bool Get(UIVars& vars, UIId id);
+    void Set(UIVars& vars, UIId id,bool val);
+};
+
+class uiTreeSumOutFileName : public SetGetString
+{
+  public:
+    uiTreeSumOutFileName();
+    virtual ~uiTreeSumOutFileName();
+    string Get(UIVars& vars, UIId id);
+    void Set(UIVars& vars, UIId id,string val);
+};
+
+class uiCurveFileEnabled : public SetGetBoolEnabled
+{
+  public:
+    uiCurveFileEnabled();
+    virtual ~uiCurveFileEnabled();
+    bool Get(UIVars& vars, UIId id);
+    void Set(UIVars& vars, UIId id,bool val);
+};
+
+class uiReclocFileEnabled : public SetGetBoolEnabled
+{
+  public:
+    uiReclocFileEnabled();
+    virtual ~uiReclocFileEnabled();
+    bool Get(UIVars& vars, UIId id);
+    void Set(UIVars& vars, UIId id,bool val);
+};
+
+class uiTraceFileEnabled : public SetGetBoolEnabled
+{
+  public:
+    uiTraceFileEnabled();
+    virtual ~uiTraceFileEnabled();
+    bool Get(UIVars& vars, UIId id);
+    void Set(UIVars& vars, UIId id,bool val);
+};
+
+class uiNewickTreeFileEnabled : public SetGetBoolEnabled
+{
+  public:
+    uiNewickTreeFileEnabled();
+    virtual ~uiNewickTreeFileEnabled();
+    bool Get(UIVars& vars, UIId id);
+    void Set(UIVars& vars, UIId id,bool val);
+};
+
+#ifdef LAMARC_QA_TREE_DUMP
+
+class uiArgFileEnabled : public SetGetBoolEnabled
+{
+  public:
+    uiArgFileEnabled();
+    virtual ~uiArgFileEnabled();
+    bool Get(UIVars& vars, UIId id);
+    void Set(UIVars& vars, UIId id,bool val);
+};
+
+class uiManyArgFiles : public SetGetBoolEnabled
+{
+  public:
+    uiManyArgFiles();
+    virtual ~uiManyArgFiles();
+    bool Get(UIVars& vars, UIId id);
+    void Set(UIVars& vars, UIId id,bool val);
+};
+
+#endif // LAMARC_QA_TREE_DUMP
+
+class uiCurveFilePrefix : public SetGetString
+{
+  public:
+    uiCurveFilePrefix();
+    virtual ~uiCurveFilePrefix();
+    string Get(UIVars& vars, UIId id);
+    void Set(UIVars& vars, UIId id,string val);
+};
+
+class uiMapFilePrefix : public SetGetString
+{
+  public:
+    uiMapFilePrefix();
+    virtual ~uiMapFilePrefix();
+    string Get(UIVars& vars, UIId id);
+    void Set(UIVars& vars, UIId id,string val);
+};
+
+class uiProfilePrefix : public SetGetString
+{
+  public:
+    uiProfilePrefix();
+    virtual ~uiProfilePrefix();
+    string Get(UIVars& vars, UIId id);
+    void Set(UIVars& vars, UIId id,string val);
+};
+
+class uiReclocFilePrefix : public SetGetString
+{
+  public:
+    uiReclocFilePrefix();
+    virtual ~uiReclocFilePrefix();
+    string Get(UIVars& vars, UIId id);
+    void Set(UIVars& vars, UIId id,string val);
+};
+
+class uiTraceFilePrefix : public SetGetString
+{
+  public:
+    uiTraceFilePrefix();
+    virtual ~uiTraceFilePrefix();
+    string Get(UIVars& vars, UIId id);
+    void Set(UIVars& vars, UIId id,string val);
+};
+
+class uiNewickTreeFilePrefix : public SetGetString
+{
+  public:
+    uiNewickTreeFilePrefix();
+    virtual ~uiNewickTreeFilePrefix();
+    string Get(UIVars& vars, UIId id);
+    void Set(UIVars& vars, UIId id,string val);
+};
+
+#ifdef LAMARC_QA_TREE_DUMP
+
+class uiArgFilePrefix : public SetGetString
+{
+  public:
+    uiArgFilePrefix();
+    virtual ~uiArgFilePrefix();
+    string Get(UIVars& vars, UIId id);
+    void Set(UIVars& vars, UIId id,string val);
+};
+
+#endif // LAMARC_QA_TREE_DUMP
+
+class uiVerbosity : public SetGetVerbosityTypeNoNone
+{
+  public:
+    uiVerbosity();
+    virtual ~uiVerbosity();
+    verbosity_type Get(UIVars& vars, UIId id);
+    void Set(UIVars& vars, UIId id,verbosity_type val);
+};
+
+class uiXMLOutFileName : public SetGetString
+{
+  public:
+    uiXMLOutFileName();
+    virtual ~uiXMLOutFileName();
+    string Get(UIVars& vars, UIId id);
+    void Set(UIVars& vars, UIId id,string val);
+};
+
+class uiXMLReportFileName : public SetGetString
+{
+  public:
+    uiXMLReportFileName();
+    virtual ~uiXMLReportFileName();
+    string Get(UIVars& vars, UIId id);
+    void Set(UIVars& vars, UIId id,string val);
+};
+
+#endif  // USERPARAM_INTERFACE_H
+
+//____________________________________________________________________________________
diff --git a/src/ui_util/undoredochain.cpp b/src/ui_util/undoredochain.cpp
new file mode 100644
index 0000000..b5f6331
--- /dev/null
+++ b/src/ui_util/undoredochain.cpp
@@ -0,0 +1,151 @@
+// $Id: undoredochain.cpp,v 1.25 2012/06/30 01:32:43 bobgian Exp $
+
+/*
+ *  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+ *
+ *  This software is distributed free of charge for non-commercial use
+ *  and is copyrighted.  Of course, we do not guarantee that the software
+ *  works, and are not responsible for any damage you may cause or have.
+ *
+ */
+
+#include <stack>
+#include <iostream>
+#include "errhandling.h"
+#include "undoredochain.h"
+#include "ui_vars.h"
+
+UndoRedoChain::UndoRedoChain(DataPack& datapack,string fileName,UIInterface* ui)
+    : defaultVars(datapack,fileName,ui)
+{
+}
+
+UndoRedoChain::~UndoRedoChain()
+{
+    DeleteDoneItems();
+    DeleteUndoneItems();
+}
+
+void UndoRedoChain::StartNewFrame()
+{
+    UIVars * nextVars = new UIVars(GetCurrentVars());
+    done.push(nextVars);
+}
+
+void UndoRedoChain::RejectNewFrame()
+{
+    UIVars * clobberMe = done.top();
+    done.pop();
+    delete clobberMe;
+}
+
+void UndoRedoChain::AcceptNewFrame()
+{
+    DeleteUndoneItems();
+}
+
+void UndoRedoChain::Undo()
+{
+    if(CanUndo())
+    {
+        UIVars * vars = done.top();
+        done.pop();
+        undone.push(vars);
+    }
+    else
+    {
+        // well, we could throw an error here if
+        // ever we try to undo when we can't,
+        // but it seems reasonable to just do
+        // nothing.
+    }
+}
+
+void UndoRedoChain::Redo()
+{
+    if(CanRedo())
+    {
+        UIVars * vars = undone.top();
+        undone.pop();
+        done.push(vars);
+    }
+    else
+    {
+        // well, we could throw an error here if
+        // ever we try to redo when we can't,
+        // but it seems reasonable to just do
+        // nothing.
+    }
+}
+
+bool UndoRedoChain::CanUndo()
+{
+    return (!(done.empty()));
+}
+
+bool UndoRedoChain::CanRedo()
+{
+    return (!(undone.empty()));
+}
+
+std::string UndoRedoChain::GetUndoDescription()
+{
+    if(CanUndo())
+    {
+#if 0
+        UndoRedo * action = done.top();
+        return action->GetUndoDescription();
+#endif
+        return "UNDO LAST CHANGE";
+    }
+    else
+    {
+        throw UndoRedoCannotUndo();
+    }
+}
+
+std::string UndoRedoChain::GetRedoDescription()
+{
+    if(CanRedo())
+    {
+#if 0
+        UndoRedo * action = undone.top();
+        return action->GetRedoDescription();
+#endif
+        return "REDO LAST CHANGE";
+    }
+    else
+    {
+        throw UndoRedoCannotRedo();
+    }
+}
+
+void UndoRedoChain::DeleteDoneItems()
+{
+    while(!(done.empty()))
+    {
+        UIVars * clobberMe = done.top();
+        done.pop();
+        delete clobberMe;
+    }
+}
+
+void UndoRedoChain::DeleteUndoneItems()
+{
+    while(!(undone.empty()))
+    {
+        UIVars * clobberMe = undone.top();
+        undone.pop();
+        delete clobberMe;
+    }
+}
+
+UIVars & UndoRedoChain::GetCurrentVars()
+{
+    if(!(done.empty()))
+        return *(done.top());
+    else
+        return defaultVars;
+}
+
+//____________________________________________________________________________________
diff --git a/src/ui_util/undoredochain.h b/src/ui_util/undoredochain.h
new file mode 100644
index 0000000..49ca4a8
--- /dev/null
+++ b/src/ui_util/undoredochain.h
@@ -0,0 +1,78 @@
+// $Id: undoredochain.h,v 1.19 2011/03/07 06:08:53 bobgian Exp $
+
+/*
+ *  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+ *
+ *  This software is distributed free of charge for non-commercial use
+ *  and is copyrighted.  Of course, we do not guarantee that the software
+ *  works, and are not responsible for any damage you may cause or have.
+ *
+ */
+
+#ifndef UNDOREDOCHAIN_H
+#define UNDOREDOCHAIN_H
+
+#include <stack>
+#include <stdexcept>
+#include <string>
+
+#include "errhandling.h"
+#include "ui_vars.h"
+
+class DataPack;
+class UIInterface;
+
+/// stores stacks of done and undone changes
+class UndoRedoChain
+{
+  private:
+    UIVars defaultVars;           /// factory settings
+    std::stack<UIVars*> done;     /// actions we've done
+    std::stack<UIVars*> undone;   /// actions we've done & undone
+
+    UndoRedoChain();
+
+  protected:
+    void DeleteUndoneItems();           /// remove items from undone stack
+    /// and delete them
+    void DeleteDoneItems();             /// remove items from done stack
+    /// and delete them
+
+  public:
+    UndoRedoChain(DataPack& datapack,std::string fileName,UIInterface* ui);
+    ~UndoRedoChain();
+    void StartNewFrame();               /// start next undo-able action
+    void AcceptNewFrame();              /// commit to action in last StartNewFrame()
+    void RejectNewFrame();              /// back out of action in last StartNewFrame()
+    void Undo();                        /// Undo last action done
+    void Redo();                        /// Redo last action done
+    bool CanUndo();
+    bool CanRedo();
+    std::string GetUndoDescription();
+    std::string GetRedoDescription();
+    UIVars & GetCurrentVars();
+};
+
+class UndoRedoCannotUndo : public std::exception
+{
+  private:
+    std::string _what;
+  public:
+    UndoRedoCannotUndo(): _what ("Nothing to undo") {};
+    virtual ~UndoRedoCannotUndo() throw() {};
+    virtual const char* what () const throw() { return _what.c_str (); };
+};
+
+class UndoRedoCannotRedo : public std::exception
+{
+  private:
+    std::string _what;
+  public:
+    UndoRedoCannotRedo(): _what ("Nothing to redo") {};
+    virtual ~UndoRedoCannotRedo() throw() {};
+    virtual const char* what () const throw() { return _what.c_str (); };
+};
+
+#endif  // UNDOREDOCHAIN_H
+
+//____________________________________________________________________________________
diff --git a/src/ui_vars/report_strings.cpp b/src/ui_vars/report_strings.cpp
new file mode 100644
index 0000000..5337b4d
--- /dev/null
+++ b/src/ui_vars/report_strings.cpp
@@ -0,0 +1,42 @@
+// $Id: report_strings.cpp,v 1.7 2011/03/08 19:22:01 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include "report_strings.h"
+
+using std::string;
+
+const string reportstr::header              = "Parameters for model of type: ";
+
+const string reportstr::categoryHeader      = " rate categories with correlated length ";
+const string reportstr::categoryRate        = "Relative rate";
+const string reportstr::categoryRelativeProb= "Relative probability";
+
+const string reportstr::baseFreqs           = "Base frequencies: ";
+const string reportstr::baseFreqSeparator   = ", ";
+
+const string reportstr::TTratio             = "Transition/transversion ratio: ";
+
+const string reportstr::GTRRates            = "Mutation parameters: ";
+const string reportstr::GTRRateSeparator    = ", ";
+const string reportstr::GTRRatesFromA       = "Between A and (C, G, T): ";
+const string reportstr::GTRRatesFromC       = "Between C and (G, T): ";
+const string reportstr::GTRRatesFromG       = "Between G and (T): ";
+
+const string reportstr::numberOfBins        = "number of bins: ";
+
+const string reportstr::brownian            = "<Brownian model has no additional parameters>";
+
+const string reportstr::alpha               = "alpha: ";
+const string reportstr::optimizeAlpha       = "Optimized value of Alpha will be reported for each chain";
+const string reportstr::relativeMutationRate= "Relative marker mutation rate: ";
+
+const string reportstr::perBaseErrorRate    = "Per-base error rate: ";
+
+//____________________________________________________________________________________
diff --git a/src/ui_vars/report_strings.h b/src/ui_vars/report_strings.h
new file mode 100644
index 0000000..4056e75
--- /dev/null
+++ b/src/ui_vars/report_strings.h
@@ -0,0 +1,50 @@
+// $Id: report_strings.h,v 1.7 2011/03/08 19:22:01 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef REPORT_STRINGS_H
+#define REPORT_STRINGS_H
+
+#include <string>
+
+class reportstr
+{
+  public:
+    static const std::string    header;
+
+    static const std::string    categoryHeader;
+    static const std::string    categoryRate;
+    static const std::string    categoryRelativeProb;
+
+    static const std::string    baseFreqs;
+    static const std::string    baseFreqSeparator;
+
+    static const std::string    TTratio;
+
+    static const std::string    GTRRates;
+    static const std::string    GTRRateSeparator;
+    static const std::string    GTRRatesFromA;
+    static const std::string    GTRRatesFromC;
+    static const std::string    GTRRatesFromG;
+
+    static const std::string    numberOfBins;
+
+    static const std::string    brownian;
+
+    static const std::string    alpha;
+    static const std::string    optimizeAlpha;
+
+    static const std::string    relativeMutationRate;
+
+    static const std::string    perBaseErrorRate;
+};
+
+#endif // REPORT_STRINGS_H
+
+//____________________________________________________________________________________
diff --git a/src/ui_vars/ui_vars.cpp b/src/ui_vars/ui_vars.cpp
new file mode 100644
index 0000000..f9c7604
--- /dev/null
+++ b/src/ui_vars/ui_vars.cpp
@@ -0,0 +1,117 @@
+// $Id: ui_vars.cpp,v 1.26 2011/03/07 06:08:53 bobgian Exp $
+
+/*
+ *  Copyright 2004  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+ *
+ *  This software is distributed free of charge for non-commercial use
+ *  and is copyrighted.  Of course, we do not guarantee that the software
+ *  works, and are not responsible for any damage you may cause or have.
+ *
+ */
+
+#include <cassert>
+
+#include "datapack.h"
+#include "defaults.h"
+#include "ui_vars.h"
+
+//------------------------------------------------------------------------------------
+
+UIVars::UIVars(DataPack& datapack, string fileName, UIInterface* myui)
+    :   ui(myui),
+        chains(this), //no prerequisites
+        userparams( this,
+                    fileName), //no prerequisites
+        datapackplus(this,
+                     datapack), //no prerequisites
+        datamodel(this,
+                  datapack.GetNRegions(),datapack.GetNMaxLoci()),
+        //datamodel requires datapackplus
+        traitmodels(this),
+        //traitmodels also requires datapackplus
+        forces(     this,
+                    datapackplus.GetNCrossPartitions(),
+                    datapackplus.GetNPartitionsByForceType(force_MIG),
+                    datapackplus.GetNPartitionsByForceType(force_DIVMIG),
+                    datapackplus.GetNPartitionsByForceType(force_DISEASE),
+                    datapack.CanMeasureRecombination())
+        //forces requires both datapackplus and datamodel.  It might
+        // eventually require traitmodels, too!  So.  Definitely last.
+
+        // WARNING!!
+        // Here we're loading up the UIVars with information coming from
+        // the datapack. However, here in the constructor is the only
+        // time we should be doing anything directly from the DataPack.
+        // After construction time, access needed to DataPack structures
+        // by the UI should go through the UIVarsDataPackPlus class.
+        // Ideally, we'd like to refactor this later so that there is
+        // an "early" and a "late" version of the DataPack. This restriction
+        // on access is the first step in supporting that refactor.
+{
+}
+
+UIVars::UIVars(const UIVars& vars)
+    :   ui(vars.ui),
+        chains(this,vars.chains),
+        userparams(this,vars.userparams),
+        datapackplus(this,vars.datapackplus),
+        datamodel(this,vars.datamodel),
+        traitmodels(this, vars.traitmodels),
+        forces(this,vars.forces)
+{
+}
+
+UIVars::~UIVars()
+{
+}
+
+string UIVars::GetParamNameWithConstraint(force_type ftype, long pindex,
+                                          bool doLongName) const
+{
+    //This function has to be here because it uses both forces and datapackplus.
+
+    string shortname = datapackplus.GetParamName(ftype, pindex, false);
+    string longname = datapackplus.GetParamName(ftype, pindex, true);
+    long gindex = forces.ParamInGroup(ftype, pindex);
+    if (gindex != FLAGLONG)
+    {
+        LongVec1d gpindexes = forces.GetGroupParamList(ftype, gindex);
+        shortname = "";
+        longname = "";
+        string::size_type commapos = string::npos;
+        //This is guaranteed to be overwritten.
+        for (unsigned long gpnum=0; gpnum<gpindexes.size(); gpnum++)
+        {
+            commapos = longname.rfind(",");
+            shortname += datapackplus.GetParamName(ftype, gpindexes[gpnum], false)
+                + "/";
+            longname +=  datapackplus.GetParamName(ftype, gpindexes[gpnum], true)
+                + ", ";
+        }
+        shortname.erase(shortname.size()-1,1);
+        longname.erase(longname.size()-2,2);
+        if (gpindexes.size()==2)
+        {
+            longname.replace(commapos,1," and");
+            longname.append(" (both constrained to ");
+        }
+        else if (gpindexes.size() > 2)
+        {
+            longname.insert(commapos+1," and");
+            longname.append(" (all constrained to ");
+        }
+        else
+        {
+            assert (gpindexes.size() == 1);
+            longname.append(" (the only current member of a potential group, which would be constrained to ");
+        }
+    }
+
+    ParamStatus pstat(forces.GetParamstatus(ftype,pindex));
+    longname.append(pstat.ConstraintDescription(gindex));
+
+    if (doLongName) return longname;
+    return shortname;
+}
+
+//____________________________________________________________________________________
diff --git a/src/ui_vars/ui_vars.h b/src/ui_vars/ui_vars.h
new file mode 100644
index 0000000..3d0874e
--- /dev/null
+++ b/src/ui_vars/ui_vars.h
@@ -0,0 +1,64 @@
+// $Id: ui_vars.h,v 1.15 2011/03/07 06:08:53 bobgian Exp $
+
+/*
+ *  Copyright 2004  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+ *
+ *  This software is distributed free of charge for non-commercial use
+ *  and is copyrighted.  Of course, we do not guarantee that the software
+ *  works, and are not responsible for any damage you may cause or have.
+ *
+ */
+
+#ifndef UI_VARS_H
+#define UI_VARS_H
+
+#include "ui_interface.h"
+#include "ui_vars_chainparams.h"
+#include "ui_vars_datamodel.h"
+#include "ui_vars_forces.h"
+#include "ui_vars_userparams.h"
+#include "ui_vars_datapackplus.h"
+#include "ui_vars_traitmodels.h"
+
+class DataPack;
+
+using std::string;
+
+// EWFIX.P5 ENHANCEMENT -- all of the aggregate members of this class have
+// a variety of Set methods for setting values. However, we're not
+// actually checking that those are good values in most cases.
+// we need to create a specialized exception that the menu can
+// catch and query the user to try again
+
+// variables that can be changed by the user
+class UIVars
+{
+  private:
+    UIVars();   // undefined
+    UIVars& operator=(const UIVars& vars); // undefined
+    UIInterface* ui;
+
+  public:
+    // one might argue that the constructors should have
+    // restricted access since only the UndoRedoChain should
+    // be creating these puppies.
+    UIVars(DataPack& datapack,string fileName,UIInterface* myui);
+    UIVars(const UIVars& vars);
+    virtual ~UIVars();
+    virtual string GetParamNameWithConstraint(force_type ftpye, long pindex,
+                                              bool doLongName=true) const;
+    virtual UIInterface* GetUI() const {return ui;}
+
+    // public members because we want direct access to their
+    // public methods
+    UIVarsChainParameters       chains;
+    UIVarsUserParameters        userparams;
+    UIVarsDataPackPlus          datapackplus;
+    UIVarsDataModels            datamodel;
+    UIVarsTraitModels           traitmodels;
+    UIVarsForces                forces;
+};
+
+#endif  // UI_VARS_H
+
+//____________________________________________________________________________________
diff --git a/src/ui_vars/ui_vars_chainparams.cpp b/src/ui_vars/ui_vars_chainparams.cpp
new file mode 100644
index 0000000..6add31c
--- /dev/null
+++ b/src/ui_vars/ui_vars_chainparams.cpp
@@ -0,0 +1,545 @@
+// $Id: ui_vars_chainparams.cpp,v 1.39 2012/05/15 06:21:48 ewalkup Exp $
+
+/*
+ *  Copyright 2004  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+ *
+ *  This software is distributed free of charge for non-commercial use
+ *  and is copyrighted.  Of course, we do not guarantee that the software
+ *  works, and are not responsible for any damage you may cause or have.
+ *
+ */
+
+#include <cassert>
+
+#include "defaults.h"
+#include "timex.h"      // for GetTime()
+#include "ui_vars.h"
+
+//------------------------------------------------------------------------------------
+
+UIVarsChainParameters::UIVarsChainParameters(UIVars * myUIVars)
+    :
+    UIVarsComponent(myUIVars),
+    adaptiveTemperatures            (defaults::useAdaptiveTemperatures),
+    chainTemperatures               (defaults::chainTemperatures()),
+    dropArrangerRelativeTiming      (defaults::dropArrangerTiming),
+    sizeArrangerRelativeTiming      (defaults::sizeArrangerTiming),
+    haplotypeArrangerRelativeTiming (defaults::haplotypeArrangerTiming),
+    probhapArrangerRelativeTiming   (defaults::probhapArrangerTiming),
+    bayesianArrangerRelativeTiming  (FLAGDOUBLE),
+    locusArrangerRelativeTiming     (FLAGDOUBLE),
+    zilchArrangerRelativeTiming     (0.0),
+    stairArrangerRelativeTiming     (0.0),
+    epochSizeArrangerRelativeTiming (0.0),
+    doBayesianAnalysis              (defaults::useBayesianAnalysis),
+    finalChainSamplingInterval      (defaults::finalInterval),
+    finalNumberOfChains             (defaults::finalNChains),
+    finalNumberOfChainsToDiscard    (defaults::finalDiscard),
+    finalNumberOfSamples            (defaults::finalNSamples),
+    initialChainSamplingInterval    (defaults::initInterval),
+    initialNumberOfChains           (defaults::initNChains),
+    initialNumberOfChainsToDiscard  (defaults::initDiscard),
+    initialNumberOfSamples          (defaults::initNSamples),
+    numberOfReplicates              (defaults::replicates),
+    temperatureInterval             (defaults::temperatureInterval)
+{
+}
+
+UIVarsChainParameters::UIVarsChainParameters(UIVars * myUIVars, const UIVarsChainParameters& chainparams)
+    : UIVarsComponent(myUIVars),
+      adaptiveTemperatures            (chainparams.adaptiveTemperatures),
+      chainTemperatures               (chainparams.chainTemperatures),
+      dropArrangerRelativeTiming      (chainparams.dropArrangerRelativeTiming),
+      sizeArrangerRelativeTiming      (chainparams.sizeArrangerRelativeTiming),
+      haplotypeArrangerRelativeTiming (chainparams.haplotypeArrangerRelativeTiming),
+      probhapArrangerRelativeTiming   (chainparams.probhapArrangerRelativeTiming),
+      bayesianArrangerRelativeTiming  (chainparams.bayesianArrangerRelativeTiming),
+      locusArrangerRelativeTiming     (chainparams.locusArrangerRelativeTiming),
+      zilchArrangerRelativeTiming     (chainparams.zilchArrangerRelativeTiming),
+      stairArrangerRelativeTiming     (chainparams.stairArrangerRelativeTiming),
+      epochSizeArrangerRelativeTiming (chainparams.epochSizeArrangerRelativeTiming),
+      doBayesianAnalysis              (chainparams.doBayesianAnalysis),
+      finalChainSamplingInterval      (chainparams.finalChainSamplingInterval),
+      finalNumberOfChains             (chainparams.finalNumberOfChains),
+      finalNumberOfChainsToDiscard    (chainparams.finalNumberOfChainsToDiscard),
+      finalNumberOfSamples            (chainparams.finalNumberOfSamples),
+      initialChainSamplingInterval    (chainparams.initialChainSamplingInterval),
+      initialNumberOfChains           (chainparams.initialNumberOfChains),
+      initialNumberOfChainsToDiscard  (chainparams.initialNumberOfChainsToDiscard),
+      initialNumberOfSamples          (chainparams.initialNumberOfSamples),
+      numberOfReplicates              (chainparams.numberOfReplicates),
+      temperatureInterval             (chainparams.temperatureInterval)
+{
+}
+
+UIVarsChainParameters::~UIVarsChainParameters()
+{
+}
+
+long int UIVarsChainParameters::GetChainCount() const
+{
+    return chainTemperatures.size();
+}
+
+double UIVarsChainParameters::GetChainTemperature(long int chainId) const
+{
+    assert(chainId < (long int)chainTemperatures.size());
+    return chainTemperatures[chainId];
+}
+
+DoubleVec1d UIVarsChainParameters::GetChainTemperatures() const
+{
+    return chainTemperatures;
+}
+
+double UIVarsChainParameters::GetBayesianArrangerRelativeTiming() const
+{
+    if (GetDoBayesianAnalysis())
+    {
+        if(bayesianArrangerRelativeTiming <= 0.0)
+        {
+            assert(bayesianArrangerRelativeTiming == FLAGDOUBLE);
+            return GetDropArrangerRelativeTiming();
+        }
+        else
+        {
+            return bayesianArrangerRelativeTiming;
+        }
+    }
+    else
+    {
+        return 0.0;
+    }
+}
+
+double UIVarsChainParameters::GetLocusArrangerRelativeTiming() const
+{
+    if (GetConstUIVars().traitmodels.AnyJumpingAnalyses() > 0)
+    {
+        if(locusArrangerRelativeTiming <= 0.0)
+        {
+            assert(locusArrangerRelativeTiming == FLAGDOUBLE);
+            return dropArrangerRelativeTiming * .2;
+        }
+        else
+        {
+            return locusArrangerRelativeTiming;
+        }
+    }
+    else
+    {
+        return 0.0;
+    }
+}
+
+double UIVarsChainParameters::GetDropArrangerRelativeTiming() const
+{
+    if (zilchArrangerRelativeTiming > 0)
+    {
+        return 0.0;
+    }
+    return dropArrangerRelativeTiming;
+}
+
+double UIVarsChainParameters::GetStairArrangerRelativeTiming() const
+{
+    return stairArrangerRelativeTiming;
+}
+
+double UIVarsChainParameters::GetEpochSizeArrangerRelativeTiming() const
+{
+    return epochSizeArrangerRelativeTiming;
+}
+
+double UIVarsChainParameters::GetSizeArrangerRelativeTiming() const
+{
+    return sizeArrangerRelativeTiming;
+}
+
+double UIVarsChainParameters::GetZilchArrangerRelativeTiming() const
+{
+    if (GetConstUIVars().datapackplus.AnySimulation())
+    {
+        return zilchArrangerRelativeTiming;
+    }
+    else
+    {
+        return 0.0;
+    }
+}
+
+bool UIVarsChainParameters::GetHaplotypeArrangerPossible() const
+{
+    return GetConstUIVars().datapackplus.CanHapArrange();
+}
+
+double UIVarsChainParameters::GetHaplotypeArrangerRelativeTiming() const
+{
+    return haplotypeArrangerRelativeTiming;
+}
+
+bool UIVarsChainParameters::GetProbHapArrangerPossible() const
+{
+    return (GetConstUIVars().traitmodels.AnyJumpingAnalyses() &&
+            GetConstUIVars().datapackplus.AnyRelativeHaplotypes());
+}
+
+double UIVarsChainParameters::GetProbHapArrangerRelativeTiming() const
+{
+    return probhapArrangerRelativeTiming;
+}
+
+long int UIVarsChainParameters::GetTemperatureInterval() const
+{
+    return temperatureInterval;
+}
+
+bool UIVarsChainParameters::GetAdaptiveTemperatures() const
+{
+    return adaptiveTemperatures;
+}
+
+long int UIVarsChainParameters::GetNumberOfReplicates() const
+{
+    return numberOfReplicates;
+}
+
+long int UIVarsChainParameters::GetInitialNumberOfChains() const
+{
+    return initialNumberOfChains;
+}
+
+long int UIVarsChainParameters::GetInitialNumberOfSamples() const
+{
+    return initialNumberOfSamples;
+}
+
+long int UIVarsChainParameters::GetInitialChainSamplingInterval() const
+{
+    return initialChainSamplingInterval;
+}
+
+long int UIVarsChainParameters::GetInitialNumberOfChainsToDiscard() const
+{
+    return initialNumberOfChainsToDiscard;
+}
+
+long int UIVarsChainParameters::GetFinalNumberOfChains() const
+{
+    return finalNumberOfChains;
+}
+
+long int UIVarsChainParameters::GetFinalNumberOfSamples() const
+{
+    return finalNumberOfSamples;
+}
+
+long int UIVarsChainParameters::GetFinalChainSamplingInterval() const
+{
+    return finalChainSamplingInterval;
+}
+
+long int UIVarsChainParameters::GetFinalNumberOfChainsToDiscard() const
+{
+    return finalNumberOfChainsToDiscard;
+}
+
+bool UIVarsChainParameters::GetDoBayesianAnalysis() const
+{
+    return doBayesianAnalysis;
+}
+
+void UIVarsChainParameters::SetChainCount(long int chainCount)
+{
+    if(chainCount < 1)
+    {
+        throw data_error("There must be a positive number of simultaneous searches");
+    }
+    if (chainCount > defaults::maxNumHeatedChains)
+    {
+        string err = "There must be less than "
+            + ToString(defaults::maxNumHeatedChains)
+            + " multiple searches for a given run. "
+            + " (A reasonably high number is 5.)";
+        throw data_error(err);
+    }
+    if (chainCount > 5)
+    {
+        GetConstUIVars().GetUI()->AddWarning("Warning:  a high number of simultaneous heated chains can cause "
+                                             "LAMARC to run out of memory.  Five chains is probably a reasonably high upper limit.");
+    }
+    while((long int)chainTemperatures.size() < chainCount)
+    {
+        chainTemperatures.push_back(MakeNextChainTemperature(chainTemperatures));
+    }
+    chainTemperatures.resize(chainCount);
+}
+
+void UIVarsChainParameters::SetChainTemperature(double val, long int chainId)
+{
+    assert(chainId < (long int)chainTemperatures.size());
+    if (val <= 0)
+    {
+        throw data_error("All chain temperatures must be positive.");
+    }
+    chainTemperatures[chainId] = val;
+}
+
+void UIVarsChainParameters::RescaleDefaultSizeArranger()
+{
+    if (dropArrangerRelativeTiming > 0)
+    {
+        sizeArrangerRelativeTiming = sizeArrangerRelativeTiming * dropArrangerRelativeTiming;
+    }
+}
+
+void UIVarsChainParameters::SetBayesianArrangerRelativeTiming(double val)
+{
+    if (val < 0.0)
+    {
+        throw data_error("The bayesian rearranger frequency must be positive.");
+    }
+    if (val == 0.0)
+    {
+        if (GetDoBayesianAnalysis() == false) return;
+        throw data_error("The Bayesian rearranger frequency must be positive when a Bayesian analysis is on.\n"
+                         "If you don't want a Bayesian analysis, turn it off in the 'Search Strategy' menu.");
+    }
+    if (GetDoBayesianAnalysis() == false)
+    {
+        throw data_error("You are not performing a Bayesian analysis, and therefore may not turn on the\n"
+                         "Bayesian arranger (the 'bayesian' tag within the 'strategy' tag)");
+    }
+    bayesianArrangerRelativeTiming = val;
+}
+
+void UIVarsChainParameters::SetLocusArrangerRelativeTiming(double val)
+{
+    if (val <= 0.0)
+    {
+        throw data_error("The trait location rearranger frequency must be greater than zero.");
+    }
+    locusArrangerRelativeTiming = val;
+}
+
+void UIVarsChainParameters::SetDropArrangerRelativeTiming(double val)
+{
+    // shut this test off for newick testing 8/10 JRM
+    if (val <= 0)
+    {
+        throw data_error("The topology rearranger frequency must be greater than zero.");
+    }
+    zilchArrangerRelativeTiming = 0.0;
+    dropArrangerRelativeTiming = val;
+}
+
+void UIVarsChainParameters::SetStairArrangerRelativeTiming(double val)
+{
+    if (val < 0)
+    {
+        throw data_error("The stair rearranger frequency must be positive.");
+    }
+    stairArrangerRelativeTiming = val;
+}
+
+void UIVarsChainParameters::SetEpochSizeArrangerRelativeTiming(double val)
+{
+    if (val < 0)
+    {
+        throw data_error("The epoch-size rearranger frequency must be positive.");
+    }
+    epochSizeArrangerRelativeTiming = val;
+}
+
+void UIVarsChainParameters::SetSizeArrangerRelativeTiming(double val)
+{
+    if (val < 0)
+    {
+        throw data_error("The size rearranger frequency must be positive.");
+    }
+    sizeArrangerRelativeTiming = val;
+}
+
+void UIVarsChainParameters::SetHaplotypeArrangerRelativeTiming(double val)
+{
+    if (val < 0)
+    {
+        throw data_error("The haplotype rearranger frequency must be positive.");
+    }
+    haplotypeArrangerRelativeTiming = val;
+}
+
+void UIVarsChainParameters::SetProbHapArrangerRelativeTiming(double val)
+{
+    if (val < 0)
+    {
+        throw data_error("The trait haplotype rearranger frequency must be positive.");
+    }
+    probhapArrangerRelativeTiming = val;
+}
+
+void UIVarsChainParameters::SetZilchArrangerRelativeTiming(double val)
+{
+    if (val < 0.0)
+    {
+        throw data_error("The do-nothing rearranger frequency must be positive.");
+    }
+    if (GetConstUIVars().datapackplus.AnySimulation())
+    {
+        zilchArrangerRelativeTiming = val;
+    }
+    else
+    {
+        throw data_error("You may not use the do-nothing arranger if you are not simulating data.");
+    }
+}
+
+void UIVarsChainParameters::SetTemperatureInterval(long int val)
+{
+    if (val < 1)
+    {
+        throw data_error("The swapping interval must be one or greater.");
+    }
+    temperatureInterval = val;
+}
+
+void UIVarsChainParameters::SetAdaptiveTemperatures(bool val)
+{
+    adaptiveTemperatures = val;
+    // NOTE: We allow lamarc to use the current (possibly
+    // user-set) temperatures as the starting point for
+    // adaptive temperatures. Jon says this is fine.
+}
+
+void UIVarsChainParameters::SetNumberOfReplicates(long int val)
+{
+    if (val < 0)
+    {
+        throw data_error("The number of replicates must be one or greater.");
+    }
+    if (val == 0)
+    {
+        val = 1;
+        GetConstUIVars().GetUI()->AddWarning("The minimum number of replicates is one, meaning, 'Do one analysis'.  "
+                                             "Setting the number of replicates to one, instead of zero.");
+    }
+    numberOfReplicates = val;
+}
+
+void UIVarsChainParameters::SetInitialNumberOfChains(long int val)
+{
+    if (val < 0)
+    {
+        throw data_error("You may not have a negative number of initial chains.");
+    }
+    initialNumberOfChains = val;
+    WarnIfNoSamples();
+}
+
+void UIVarsChainParameters::SetInitialNumberOfSamples(long int val)
+{
+    if (val < 0)
+    {
+        throw data_error("You may not have a negative number of initial samples.");
+    }
+    initialNumberOfSamples = val;
+    WarnIfNoSamples();
+}
+
+void UIVarsChainParameters::SetInitialChainSamplingInterval(long int val)
+{
+    if (val <= 0)
+    {
+        throw data_error("You must have a positive sampling interval.");
+    }
+    initialChainSamplingInterval = val;
+}
+
+void UIVarsChainParameters::SetInitialNumberOfChainsToDiscard(long int val)
+{
+    if (val < 0)
+    {
+        throw data_error("You may not have a negative number of discarded genealogies.");
+    }
+    initialNumberOfChainsToDiscard = val;
+}
+
+void UIVarsChainParameters::SetFinalNumberOfChains(long int val)
+{
+    if (val < 0)
+    {
+        throw data_error("You may not have a negative number of final chains.");
+    }
+    finalNumberOfChains = val;
+    WarnIfNoSamples();
+}
+
+void UIVarsChainParameters::SetFinalNumberOfSamples(long int val)
+{
+    if (val < 0)
+    {
+        throw data_error("You may not have a negative number of final samples.");
+    }
+    finalNumberOfSamples = val;
+    WarnIfNoSamples();
+}
+
+void UIVarsChainParameters::SetFinalChainSamplingInterval(long int val)
+{
+    if (val <= 0)
+    {
+        throw data_error("You must have a positive sampling interval.");
+    }
+    finalChainSamplingInterval = val;
+}
+
+void UIVarsChainParameters::SetFinalNumberOfChainsToDiscard(long int val)
+{
+    if (val < 0)
+    {
+        throw data_error("You may not have a negative number of discarded genealogies.");
+    }
+    finalNumberOfChainsToDiscard = val;
+}
+
+void UIVarsChainParameters::WarnIfNoSamples()
+{
+    if (((initialNumberOfChains == 0) || (initialNumberOfSamples == 0))
+        && ((finalNumberOfChains == 0) || (finalNumberOfSamples == 0)))
+    {
+        GetConstUIVars().GetUI()->AddWarning("Warning:  LAMARC will never sample any trees.");
+    }
+}
+
+void UIVarsChainParameters::SetDoBayesianAnalysis(bool val)
+{
+    if (val && GetConstUIVars().forces.GetForceOnOff(force_REGION_GAMMA))
+    {
+        throw data_error("Cannot do a Bayesian analysis while attempting to estimate Gamma.");
+    }
+    if (!val && GetConstUIVars().forces.GetForceOnOff(force_DIVERGENCE))
+    {
+        throw data_error("Must do a Bayesian analysis while modeling Divergence.");
+    }
+    doBayesianAnalysis = val;
+}
+
+double MakeNextChainTemperature(const vector<double> temperatures)
+// this assigns temperatures with constant difference between
+// successive temperatures.
+{
+    if(temperatures.empty())
+    {
+        assert(false); //How did we get an empty temperature vec?
+        return defaults::minTemperature;
+    }
+    unsigned long int size = temperatures.size();
+    if(size == 1)
+    {
+        return defaults::secondTemperature;
+    }
+    double diff = temperatures[size-1] - temperatures[size-2];
+    return temperatures[size-1] + diff;
+}
+
+//____________________________________________________________________________________
diff --git a/src/ui_vars/ui_vars_chainparams.h b/src/ui_vars/ui_vars_chainparams.h
new file mode 100644
index 0000000..ea9d7e2
--- /dev/null
+++ b/src/ui_vars/ui_vars_chainparams.h
@@ -0,0 +1,124 @@
+// $Id: ui_vars_chainparams.h,v 1.17 2011/03/07 06:08:53 bobgian Exp $
+
+/*
+ *  Copyright 2004  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+ *
+ *  This software is distributed free of charge for non-commercial use
+ *  and is copyrighted.  Of course, we do not guarantee that the software
+ *  works, and are not responsible for any damage you may cause or have.
+ *
+ */
+
+#ifndef UI_VARS_CHAINPARAMS_H
+#define UI_VARS_CHAINPARAMS_H
+
+#include <string>
+#include "vectorx.h"
+#include "ui_vars_component.h"
+
+using std::string;
+
+// variables that can be changed by the user
+class UIVarsChainParameters : public UIVarsComponent
+{
+  private:
+    //
+    UIVarsChainParameters();                                // undefined
+    UIVarsChainParameters(const UIVarsChainParameters&);    // undefined
+
+    // variables for the ChainParameters
+    bool                    adaptiveTemperatures;
+    DoubleVec1d             chainTemperatures;
+    double                  dropArrangerRelativeTiming;
+    double                  sizeArrangerRelativeTiming;
+    double                  haplotypeArrangerRelativeTiming;
+    double                  probhapArrangerRelativeTiming;
+    double                  bayesianArrangerRelativeTiming;
+    double                  locusArrangerRelativeTiming;
+    double                  zilchArrangerRelativeTiming;
+    double                  stairArrangerRelativeTiming;
+    double                  epochSizeArrangerRelativeTiming;
+    bool                    doBayesianAnalysis;
+    long                    finalChainSamplingInterval;
+    long                    finalNumberOfChains;
+    long                    finalNumberOfChainsToDiscard;
+    long                    finalNumberOfSamples;
+    long                    initialChainSamplingInterval;
+    long                    initialNumberOfChains;
+    long                    initialNumberOfChainsToDiscard;
+    long                    initialNumberOfSamples;
+    long                    numberOfReplicates;
+    long                    temperatureInterval;
+
+  public:
+    // one might argue that the constructors should have
+    // restricted access since only UIVars should
+    // be creating these puppies.
+    UIVarsChainParameters(UIVars*);
+    UIVarsChainParameters(UIVars*,const UIVarsChainParameters&);
+
+    virtual ~UIVarsChainParameters();
+
+    /////////////////////////////////////////////////////////////
+    // Get Methods for chain parameter variables
+    virtual bool           GetAdaptiveTemperatures() const;
+    virtual bool           GetDoBayesianAnalysis() const;
+    virtual long           GetChainCount() const;
+    virtual double         GetChainTemperature(long chainId) const;
+    virtual DoubleVec1d    GetChainTemperatures() const;
+    virtual double         GetDropArrangerRelativeTiming() const;
+    virtual double         GetSizeArrangerRelativeTiming() const;
+    virtual double         GetBayesianArrangerRelativeTiming() const;
+    virtual double         GetLocusArrangerRelativeTiming() const;
+    virtual double         GetZilchArrangerRelativeTiming() const;
+    virtual double         GetStairArrangerRelativeTiming() const;
+    virtual double         GetEpochSizeArrangerRelativeTiming() const;
+    virtual long           GetFinalChainSamplingInterval() const;
+    virtual long           GetFinalNumberOfChains() const;
+    virtual long           GetFinalNumberOfChainsToDiscard() const;
+    virtual long           GetFinalNumberOfSamples() const;
+    virtual bool           GetHaplotypeArrangerPossible() const;
+    virtual double         GetHaplotypeArrangerRelativeTiming() const;
+    virtual bool           GetProbHapArrangerPossible() const;
+    virtual double         GetProbHapArrangerRelativeTiming() const;
+    virtual long           GetInitialChainSamplingInterval() const;
+    virtual long           GetInitialNumberOfChains() const;
+    virtual long           GetInitialNumberOfChainsToDiscard() const;
+    virtual long           GetInitialNumberOfSamples() const;
+    virtual long           GetNumberOfReplicates() const;
+    virtual long           GetTemperatureInterval() const;
+
+    /////////////////////////////////////////////////////////////
+    virtual void            SetAdaptiveTemperatures(bool);
+    virtual void            SetDoBayesianAnalysis(bool);
+    virtual void            SetChainCount(long count);
+    virtual void            SetChainTemperature(double temp, long chainId);
+    virtual void            RescaleDefaultSizeArranger();
+    virtual void            SetBayesianArrangerRelativeTiming(double);
+    virtual void            SetDropArrangerRelativeTiming(double);
+    virtual void            SetSizeArrangerRelativeTiming(double);
+    virtual void            SetLocusArrangerRelativeTiming(double);
+    virtual void            SetHaplotypeArrangerRelativeTiming(double);
+    virtual void            SetProbHapArrangerRelativeTiming(double);
+    virtual void            SetZilchArrangerRelativeTiming(double);
+    virtual void            SetStairArrangerRelativeTiming(double);
+    virtual void            SetEpochSizeArrangerRelativeTiming(double);
+    virtual void            SetFinalChainSamplingInterval(long);
+    virtual void            SetFinalNumberOfChains(long);
+    virtual void            SetFinalNumberOfChainsToDiscard(long);
+    virtual void            SetFinalNumberOfSamples(long);
+    virtual void            SetInitialChainSamplingInterval(long);
+    virtual void            SetInitialNumberOfChains(long);
+    virtual void            SetInitialNumberOfChainsToDiscard(long);
+    virtual void            SetInitialNumberOfSamples(long);
+    virtual void            SetNumberOfReplicates(long);
+    virtual void            SetTemperatureInterval(long);
+  private:
+    virtual void            WarnIfNoSamples();
+};
+
+double MakeNextChainTemperature(const vector<double> temperatures);
+
+#endif  // UI_VARS_CHAINPARAMS_H
+
+//____________________________________________________________________________________
diff --git a/src/ui_vars/ui_vars_component.cpp b/src/ui_vars/ui_vars_component.cpp
new file mode 100644
index 0000000..000c5d7
--- /dev/null
+++ b/src/ui_vars/ui_vars_component.cpp
@@ -0,0 +1,42 @@
+// $Id: ui_vars_component.cpp,v 1.5 2010/03/02 23:12:32 bobgian Exp $
+
+/*
+ *  Copyright 2004  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+ *
+ *  This software is distributed free of charge for non-commercial use
+ *  and is copyrighted.  Of course, we do not guarantee that the software
+ *  works, and are not responsible for any damage you may cause or have.
+ *
+ */
+
+#include <cassert>
+#include <cstddef>              // for NULL
+#include "ui_vars_component.h"
+
+//------------------------------------------------------------------------------------
+
+UIVarsComponent::UIVarsComponent(UIVars * myUIVars)
+    : m_UIVars(myUIVars)
+{
+}
+
+UIVarsComponent::~UIVarsComponent()
+{
+    m_UIVars = NULL;
+}
+
+const UIVars &
+UIVarsComponent::GetConstUIVars() const
+{
+    assert(m_UIVars != NULL);
+    return (*m_UIVars);
+}
+
+UIVars &
+UIVarsComponent::GetUIVars()
+{
+    assert(m_UIVars != NULL);
+    return (*m_UIVars);
+}
+
+//____________________________________________________________________________________
diff --git a/src/ui_vars/ui_vars_component.h b/src/ui_vars/ui_vars_component.h
new file mode 100644
index 0000000..cfd15f8
--- /dev/null
+++ b/src/ui_vars/ui_vars_component.h
@@ -0,0 +1,39 @@
+// $Id: ui_vars_component.h,v 1.8 2011/03/07 06:08:53 bobgian Exp $
+
+/*
+ *  Copyright 2004  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+ *
+ *  This software is distributed free of charge for non-commercial use
+ *  and is copyrighted.  Of course, we do not guarantee that the software
+ *  works, and are not responsible for any damage you may cause or have.
+ *
+ */
+
+#ifndef UI_VARS_COMPONENT_H
+#define UI_VARS_COMPONENT_H
+
+class UIVars;
+
+class UIVarsComponent
+{
+    friend class UIVars;    // which is intended to call SetParent method
+
+  private:
+    UIVarsComponent();                          // undefined
+  protected:
+    UIVars * m_UIVars;
+  public:
+    // one might argue that the constructors should have
+    // restricted access since only UIVars should
+    // be creating these puppies.
+    UIVarsComponent(UIVars*);
+    virtual ~UIVarsComponent();
+
+    const UIVars& GetConstUIVars() const;
+    UIVars& GetUIVars() ;
+
+};
+
+#endif  // UI_VARS_COMPONENT_H
+
+//____________________________________________________________________________________
diff --git a/src/ui_vars/ui_vars_datamodel.cpp b/src/ui_vars/ui_vars_datamodel.cpp
new file mode 100644
index 0000000..68c3bd6
--- /dev/null
+++ b/src/ui_vars/ui_vars_datamodel.cpp
@@ -0,0 +1,1102 @@
+// $Id: ui_vars_datamodel.cpp,v 1.41 2011/03/07 06:08:53 bobgian Exp $
+
+/*
+ *  Copyright 2004  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+ *
+ *  This software is distributed free of charge for non-commercial use
+ *  and is copyrighted.  Of course, we do not guarantee that the software
+ *  works, and are not responsible for any damage you may cause or have.
+ *
+ */
+
+#include <cassert>
+
+#include "calculators.h"        // for FrequenciesFromData(const Locus&);
+#include "constants.h"          // for model_type
+#include "datatype.h"           // for predicate ModelIsIllegalForDataType
+#include "defaults.h"
+#include "dlmodel.h"            // for ModelTypeAcceptsDataType
+#include "errhandling.h"
+#include "report_strings.h"
+#include "stringx.h"
+#include "vectorx.h"            // for DoubleVec1d
+#include "ui_constants.h"
+#include "ui_regid.h"
+#include "ui_vars.h"
+#include "ui_vars_datamodel.h"
+
+//------------------------------------------------------------------------------------
+
+UIVarsSingleDataModel::UIVarsSingleDataModel(UIRegId regionId)
+    :
+    m_regionIndex(regionId.GetRegion()),
+    m_locusIndex(regionId.GetLocus()),
+    m_datatype(regionId.GetDataType()),
+    doNormalize(defaults::doNormalize),
+    autoCorrelation(defaults::autoCorrelation),
+    categoryRates(defaults::categoryRates()),
+    categoryProbabilities(defaults::categoryProbabilities()),
+    ttRatio(defaults::ttratio),
+    doCalcFreqsFromData(defaults::calcFreqsFromData),
+    baseFrequency_A(defaults::baseFrequencyA),
+    baseFrequency_C(defaults::baseFrequencyC),
+    baseFrequency_G(defaults::baseFrequencyG),
+    baseFrequency_T(defaults::baseFrequencyT),
+    gtr_AC(defaults::gtrRateAC),
+    gtr_AG(defaults::gtrRateAG),
+    gtr_AT(defaults::gtrRateAT),
+    gtr_CG(defaults::gtrRateCG),
+    gtr_CT(defaults::gtrRateCT),
+    gtr_GT(defaults::gtrRateGT),
+    alpha(defaults::KS_alpha),
+    m_relmurate(defaults::relativeMuRate),
+    optimizeAlpha(defaults::optimize_KS_alpha),
+    m_perBaseErrorRate(defaults::per_base_error_rate)
+{
+    m_modelType = DefaultModelForDataType(m_datatype);
+}
+
+UIVarsSingleDataModel::UIVarsSingleDataModel(const UIVarsSingleDataModel & clone, UIRegId regionId)
+    :
+    m_regionIndex           (regionId.GetRegion()),
+    m_locusIndex            (regionId.GetLocus()),
+    m_datatype              (regionId.GetDataType()),
+    m_modelType             (clone.m_modelType),
+    doNormalize             (clone.doNormalize),
+    autoCorrelation         (clone.autoCorrelation),
+    categoryRates           (clone.categoryRates),
+    categoryProbabilities   (clone.categoryProbabilities),
+    ttRatio                 (clone.ttRatio),
+    doCalcFreqsFromData     (clone.doCalcFreqsFromData),
+    baseFrequency_A         (clone.baseFrequency_A),
+    baseFrequency_C         (clone.baseFrequency_C),
+    baseFrequency_G         (clone.baseFrequency_G),
+    baseFrequency_T         (clone.baseFrequency_T),
+    gtr_AC                  (clone.gtr_AC),
+    gtr_AG                  (clone.gtr_AG),
+    gtr_AT                  (clone.gtr_AT),
+    gtr_CG                  (clone.gtr_CG),
+    gtr_CT                  (clone.gtr_CT),
+    gtr_GT                  (clone.gtr_GT),
+    alpha                   (clone.alpha),
+    m_relmurate             (clone.m_relmurate),
+    optimizeAlpha           (clone.optimizeAlpha),
+    m_perBaseErrorRate      (clone.m_perBaseErrorRate)
+{
+    if (!ModelTypeAcceptsDataType(m_modelType, m_datatype))
+    {
+        assert(false); //Let's try to not get in this situation.
+        m_modelType = DefaultModelForDataType(m_datatype);
+    }
+}
+
+UIVarsSingleDataModel::~UIVarsSingleDataModel()
+{
+}
+
+double
+UIVarsSingleDataModel::MakeNextCategoryRate(DoubleVec1d rates)
+{
+    if(rates.empty())
+    {
+        assert(false); //How did we have an empty vector to start with?
+        return defaults::categoryRate;
+    }
+    return rates[rates.size()-1] * defaults::categoryRateMultiple;
+}
+
+double
+UIVarsSingleDataModel::MakeNextCategoryProbability(DoubleVec1d probabilities)
+{
+    if(probabilities.empty())
+    {
+        assert(false); //How did we have an empty vector to start with?
+        return defaults::categoryProbability;
+    }
+    return probabilities[probabilities.size()-1];
+}
+
+long int UIVarsSingleDataModel::GetNumCategories() const
+{
+    // well, really it should be categoryProbablities.size()
+    // as well. argh
+    assert(categoryRates.size() == categoryProbabilities.size());
+    return categoryRates.size();
+}
+
+double UIVarsSingleDataModel::GetCategoryProbability(long int categoryIndex) const
+{
+    assert(categoryIndex < GetNumCategories());
+    return categoryProbabilities[categoryIndex];
+}
+
+double UIVarsSingleDataModel::GetCategoryRate(long int categoryIndex) const
+{
+    assert(categoryIndex < GetNumCategories());
+    return categoryRates[categoryIndex];
+}
+
+bool UIVarsSingleDataModel::GetCalcFreqsFromData()  const
+{
+    if (GetDataModelType() == GTR)
+    {
+        return false;
+    }
+    return doCalcFreqsFromData;
+}
+
+double UIVarsSingleDataModel::GetFrequencyA() const
+{
+    if(GetCalcFreqsFromData())
+    {
+        // not terribly efficient, but maintaining logic to cache
+        // the values was very difficult
+        return FrequenciesFromData(m_regionIndex,m_locusIndex,GetDataModelType())[0];
+    }
+    return baseFrequency_A;
+}
+
+double UIVarsSingleDataModel::GetFrequencyC() const
+{
+    if(GetCalcFreqsFromData())
+    {
+        // not terribly efficient, but maintaining logic to cache
+        // the values was very difficult
+        return FrequenciesFromData(m_regionIndex,m_locusIndex,GetDataModelType())[1];
+    }
+    return baseFrequency_C;
+}
+
+double UIVarsSingleDataModel::GetFrequencyG() const
+{
+    if(GetCalcFreqsFromData())
+    {
+        // not terribly efficient, but maintaining logic to cache
+        // the values was very difficult
+        return FrequenciesFromData(m_regionIndex,m_locusIndex,GetDataModelType())[2];
+    }
+    return baseFrequency_G;
+}
+
+double UIVarsSingleDataModel::GetFrequencyT() const
+{
+    if(GetCalcFreqsFromData())
+    {
+        // not terribly efficient, but maintaining logic to cache
+        // the values was very difficult
+        return FrequenciesFromData(m_regionIndex,m_locusIndex,GetDataModelType())[3];
+    }
+    return baseFrequency_T;
+}
+
+void UIVarsSingleDataModel::SetDataModelType(model_type mtype)
+{
+    if (ModelTypeAcceptsDataType(mtype,m_datatype))
+    {
+        m_modelType = mtype;
+    }
+    else
+    {
+        string error = "The model " + ToString(mtype) +
+            " may not be used for data type " + ToString(m_datatype) +
+            " in region " + ToString(m_regionIndex+1) + ".";
+        //EWFIX.P5 LOCUS + ", segment " + ToString(m_locusIndex+1) + ".";
+        throw data_error(error);
+    }
+}
+
+void UIVarsSingleDataModel::SetAutoCorrelation(double x)
+{
+    if (x < 1)
+    {
+        throw data_error("The autocorrelation must be one or greater.");
+    }
+    autoCorrelation = x;
+}
+
+void UIVarsSingleDataModel::SetNumCategories(long int numCats)
+{
+    if(numCats < 1)
+    {
+        throw data_error("There must be a positive number of categories for all mutational models.");
+    }
+    if (numCats > defaults::maxNumCategories)
+    {
+        string err = "The maximum number of categories of mutation rates you may";
+        err += " set is " + ToString(defaults::maxNumCategories)
+            + ", though the effectiveness of adding "
+            + "more categories drops considerably after about 5.";
+        throw data_error(err);
+    }
+    while(static_cast<long int>(categoryRates.size()) < numCats)
+    {
+        categoryRates.push_back(MakeNextCategoryRate(categoryRates));
+    }
+    categoryRates.resize(numCats);
+    while(static_cast<long int>(categoryProbabilities.size()) < numCats)
+    {
+        categoryProbabilities.push_back(MakeNextCategoryProbability(categoryProbabilities));
+    }
+    categoryProbabilities.resize(numCats);
+}
+
+void UIVarsSingleDataModel::SetCategoryProbability(double prob, long int categoryIndex)
+{
+    assert(categoryIndex < GetNumCategories());
+    if(prob <= 0)
+    {
+        throw data_error("You must set a positive probability to go with each rate.");
+    }
+    categoryProbabilities[categoryIndex] = prob;
+}
+
+void UIVarsSingleDataModel::SetCategoryRate(double rate, long int categoryIndex)
+{
+    assert(categoryIndex < GetNumCategories());
+    if(rate < 0 )
+    {
+        throw data_error("You must set a positive or zero relative rate for each category.");
+    }
+    categoryRates[categoryIndex] = rate;
+}
+
+void UIVarsSingleDataModel::SetTTRatio(double x)
+{
+    if (x < 0.5)
+    {
+        throw data_error("The TT Ratio must be greater than 0.5.");
+    }
+    ttRatio=x;
+}
+
+void UIVarsSingleDataModel::SetCalcFreqsFromData(bool set)
+{
+    if(set == false && GetCalcFreqsFromData() == true)
+    {
+        // setting calc freqs from true to false, so
+        // we want to start with the calculated values
+        // in the local variables
+        if(m_regionIndex >= 0)     // doesn't make sense to do
+            // this for the global model
+        {
+            DoubleVec1d freqs =
+                FrequenciesFromData(m_regionIndex,m_locusIndex,GetDataModelType());
+            baseFrequency_A = freqs[0];
+            baseFrequency_C = freqs[1];
+            baseFrequency_G = freqs[2];
+            baseFrequency_T = freqs[3];
+        }
+
+    }
+    assert(set==false || GetDataModelType() != GTR);    // should be checked in caller
+    doCalcFreqsFromData = set;
+}
+
+void UIVarsSingleDataModel::SetFrequencyA(double freq)
+{
+    if (freq<0)
+    {
+        throw data_error("Base frequencies must be greater than zero");
+    }
+    SetCalcFreqsFromData(false);
+    baseFrequency_A = freq;
+}
+
+void UIVarsSingleDataModel::SetFrequencyC(double freq)
+{
+    if (freq<0)
+    {
+        throw data_error("Base frequencies must be greater than zero");
+    }
+    SetCalcFreqsFromData(false);
+    baseFrequency_C = freq;
+}
+
+void UIVarsSingleDataModel::SetFrequencyG(double freq)
+{
+    if (freq<0)
+    {
+        throw data_error("Base frequencies must be greater than zero");
+    }
+    SetCalcFreqsFromData(false);
+    baseFrequency_G = freq;
+}
+
+void UIVarsSingleDataModel::SetFrequencyT(double freq)
+{
+    if (freq<0)
+    {
+        throw data_error("Base frequencies must be greater than zero");
+    }
+    SetCalcFreqsFromData(false);
+    baseFrequency_T = freq;
+}
+
+void UIVarsSingleDataModel::SetGTR_AT(double x)
+{
+    if (x <= 0)
+    {
+        throw data_error("GTR rates must be greater than zero");
+    }
+    gtr_AT = x;
+}
+
+void UIVarsSingleDataModel::SetGTR_AC(double x)
+{
+    if (x <= 0)
+    {
+        throw data_error("GTR rates must be greater than zero");
+    }
+    gtr_AC = x;
+}
+void UIVarsSingleDataModel::SetGTR_AG(double x)
+{
+    if (x <= 0)
+    {
+        throw data_error("GTR rates must be greater than zero");
+    }
+    gtr_AG = x;
+}
+void UIVarsSingleDataModel::SetGTR_CG(double x)
+{
+    if (x <= 0)
+    {
+        throw data_error("GTR rates must be greater than zero");
+    }
+    gtr_CG = x;
+}
+void UIVarsSingleDataModel::SetGTR_CT(double x)
+{
+    if (x <= 0)
+    {
+        throw data_error("GTR rates must be greater than zero");
+    }
+    gtr_CT = x;
+}
+void UIVarsSingleDataModel::SetGTR_GT(double x)
+{
+    if (x <= 0)
+    {
+        throw data_error("GTR rates must be greater than zero");
+    }
+    gtr_GT = x;
+}
+
+void UIVarsSingleDataModel::SetRelativeMuRate(double x)
+{
+    if (x <= 0)
+    {
+        throw data_error("The relative mutation rate must be greater than zero");
+    }
+    m_relmurate = x;
+}
+
+void UIVarsSingleDataModel::SetAlpha(double x)
+{
+    if (x < 0 || x > 1)
+    {
+        throw data_error("The multi-step:single-step ratio for a MixedKS model must be between zero and one.");
+    }
+    alpha = x;
+}
+
+bool UIVarsSingleDataModel::IdenticalCategoryRates() const
+{
+    DoubleVec1d testRates = categoryRates;
+    std::sort(testRates.begin(), testRates.end());
+    for (unsigned long int i = 1; i < testRates.size(); i++)
+    {
+        if (fabs(testRates[i] - testRates[i-1]) < EPSILON)
+        {
+            return true;
+        }
+    }
+    return false;
+}
+
+//------------------------------------------------------------------------------------
+
+void UIVarsDataModels::checkRegionIdGlobalNotAllowed(UIRegId regionId) const
+{
+    if (regionId.GetRegion() < 0)
+    {
+        assert(false);
+        throw implementation_error("Region index < 0");
+    }
+    if (regionId.GetRegion() >= GetConstUIVars().datapackplus.GetNumRegions())
+    {
+        assert(false);
+        throw implementation_error("Region index >= number of regions");
+    }
+    if (regionId.GetLocus() >= GetConstUIVars().datapackplus.GetNumLoci(regionId.GetRegion()))
+    {
+        assert(false);
+        throw implementation_error("Locus index >= number of regions");
+    }
+}
+
+void UIVarsDataModels::checkRegionIdGlobalAllowed(UIRegId regionId) const
+{
+    if (regionId.GetRegion() != uiconst::GLOBAL_ID)
+    {
+        checkRegionIdGlobalNotAllowed(regionId);
+    }
+}
+
+UIVarsSingleDataModel & UIVarsDataModels::getRegionModelToAlter(UIRegId regionId)
+{
+    checkRegionIdGlobalAllowed(regionId);
+    if(regionId.GetRegion() == uiconst::GLOBAL_ID)
+    {
+        switch(regionId.GetDataType())
+        {
+            case dtype_DNA:
+            case dtype_SNP:
+                return m_globalModelNucleotide;
+            case dtype_msat:
+                return m_globalModelMsat;
+            case dtype_kallele:
+                return m_globalModelKAllele;
+        }
+    }
+    // if we're here, then regionId is not specifying the global
+    // data model. However, that region may currently be using
+    // the global data model. If that is the case, we have to
+    // initialize the data model for regionId to a copy of the
+    // global data model and edit from there. That's exactly
+    // what SetUseGlobalModel does when setting from true to
+    // false. (If setting from false to false, we already have
+    // the correct datamodel in individualModels[regionId])
+    // ewalkup 8-19-2004
+    SetUseGlobalModel(false,regionId);
+    return m_individualModels[regionId.GetRegion()][regionId.GetLocus()];
+}
+
+bool UIVarsDataModels::GetUseGlobalModel(UIRegId regionId) const
+{
+    checkRegionIdGlobalNotAllowed(regionId);
+    return m_useGlobalModel[regionId.GetRegion()][regionId.GetLocus()];
+}
+
+bool UIVarsDataModels::IdenticalCategoryRates(UIRegId regionId) const
+{
+    checkRegionIdGlobalAllowed(regionId);
+    return GetRegionModel(regionId).IdenticalCategoryRates();
+}
+
+void UIVarsDataModels::SetUseGlobalModel(bool useGlobal, UIRegId regionId)
+{
+    checkRegionIdGlobalNotAllowed(regionId);
+    if(useGlobal == GetUseGlobalModel(regionId))
+    {
+        return;
+        // we don't want to do anything if we're already
+        // set to this value
+    }
+    m_useGlobalModel[regionId.GetRegion()][regionId.GetLocus()] = useGlobal;
+    if(useGlobal == false)
+        // we'll be using a local model for this regionId,
+        // but we just had the global model.
+        // We need a model to use!!
+        // So, start with a copy of the global model
+        // (One might argue that we should re-use whatever
+        // we've got in the existing local data model, but
+        // that precludes the user from setting to the
+        // global and then tweaking just a little for the
+        // current regionId.)   ewalkup 8-19-2004
+    {
+        //We need to save the relative mu rate from the initial setup.
+        UIVarsSingleDataModel newmodel(GetGlobalModel(regionId.GetDataType()), regionId);
+        m_individualModels[regionId.GetRegion()][regionId.GetLocus()] = newmodel;
+    }
+}
+
+UIVarsDataModels::UIVarsDataModels(UIVars * myUIVars, long int nregions, long int maxLoci)
+    : UIVarsComponent(myUIVars),
+      m_globalModelNucleotide(UIRegId(dtype_DNA)),
+      m_globalModelMsat(UIRegId(dtype_msat)),
+      m_globalModelKAllele(UIRegId(dtype_kallele)),
+      m_useGlobalModel(nregions,deque<bool>(maxLoci,true)),
+      m_individualModels(nregions)
+{
+    //Requires datapackplus to be set up already in the uivars.
+    for(size_t index = 0; index < m_individualModels.size(); index++)
+    {
+        m_individualModels[index] = vector<UIVarsSingleDataModel>();
+    }
+
+    for(long int regionIndex = 0; regionIndex < nregions; regionIndex++)
+    {
+        long int numLoci = GetConstUIVars().datapackplus.GetNumLoci(regionIndex);
+        for(long int locusIndex = 0; locusIndex < numLoci; locusIndex++)
+        {
+            UIRegId regId(regionIndex, locusIndex, GetConstUIVars());
+            UIVarsSingleDataModel thisModel(regId);
+            m_individualModels[regionIndex].push_back(thisModel);
+        }
+    }
+}
+
+UIVarsDataModels::UIVarsDataModels(UIVars * myUIVars, const UIVarsDataModels& dataModels)
+    : UIVarsComponent(myUIVars),
+      m_globalModelNucleotide(dataModels.m_globalModelNucleotide),
+      m_globalModelMsat(dataModels.m_globalModelMsat),
+      m_globalModelKAllele(dataModels.m_globalModelKAllele),
+      m_useGlobalModel(dataModels.m_useGlobalModel)
+{
+    m_individualModels.clear();
+    for(size_t index = 0; index < dataModels.m_individualModels.size(); index++)
+    {
+        m_individualModels.push_back(vector<UIVarsSingleDataModel>());
+        for(size_t index2 = 0; index2 < dataModels.m_individualModels[index].size(); index2++)
+        {
+            m_individualModels[index].push_back(UIVarsSingleDataModel(dataModels.m_individualModels[index][index2]));
+        }
+    }
+}
+
+//We need to call Initialize because some of the information we need is in
+// uivars.datapackplus, which isn't set up until after the construction of
+// UIVars.  Since we're also constructed there, we have to wait.
+void UIVarsDataModels::Initialize()
+{
+}
+
+UIVarsDataModels::~UIVarsDataModels()
+{
+}
+
+const UIVarsSingleDataModel & UIVarsDataModels::GetRegionModel(UIRegId regionId) const
+{
+    checkRegionIdGlobalAllowed(regionId);
+    if (regionId.GetRegion() == uiconst::GLOBAL_ID)
+    {
+        return GetGlobalModel(regionId.GetDataType());
+    }
+    assert(regionId.GetLocus() != uiconst::GLOBAL_ID);
+    //LS NOTE:  If we want to store region-wide default data models instead of
+    // only global data models, we would get at them here and GetLocus() would
+    // indeed return GLOBAL_ID.  But this system is not yet in place.
+    if(GetUseGlobalModel(regionId))
+    {
+        return GetGlobalModel(regionId.GetDataType());
+    }
+    return m_individualModels[regionId.GetRegion()][regionId.GetLocus()];
+}
+
+const UIVarsSingleDataModel & UIVarsDataModels::GetGlobalModel(data_type datatype) const
+{
+    switch (datatype)
+    {
+        case dtype_DNA:
+        case dtype_SNP:
+            return m_globalModelNucleotide;
+        case dtype_msat:
+            return m_globalModelMsat;
+        case dtype_kallele:
+            return m_globalModelKAllele;
+    }
+    assert(false); //uncaught data type
+    return m_globalModelNucleotide;
+}
+
+data_type UIVarsDataModels::GetDataType(UIRegId regionId) const
+{
+    return regionId.GetDataType();
+}
+
+model_type  UIVarsDataModels::GetDataModelType(UIRegId regionId) const
+{
+    return GetRegionModel(regionId).GetDataModelType();
+}
+
+bool        UIVarsDataModels::GetNormalization(UIRegId regionId) const
+{
+    return GetRegionModel(regionId).GetNormalization();
+}
+
+double      UIVarsDataModels::GetAutoCorrelation(UIRegId regionId) const
+{
+    return GetRegionModel(regionId).GetAutoCorrelation();
+}
+
+long int    UIVarsDataModels::GetNumCategories(UIRegId regionId) const
+{
+    return GetRegionModel(regionId).GetNumCategories();
+}
+
+double UIVarsDataModels::GetCategoryRate(UIRegId regionId, long int categoryIndex) const
+{
+    return GetRegionModel(regionId).GetCategoryRate(categoryIndex);
+}
+
+DoubleVec1d UIVarsDataModels::GetCategoryRates(UIRegId regionId) const
+{
+    return GetRegionModel(regionId).GetCategoryRates();
+}
+
+DoubleVec1d UIVarsDataModels::GetCategoryProbabilities(UIRegId regionId) const
+{
+    return GetRegionModel(regionId).GetCategoryProbabilities();
+}
+
+double UIVarsDataModels::GetCategoryProbability(UIRegId regionId, long int categoryIndex) const
+{
+    return GetRegionModel(regionId).GetCategoryProbability(categoryIndex);
+}
+
+double      UIVarsDataModels::GetTTRatio(UIRegId regionId) const
+{
+    return GetRegionModel(regionId).GetTTRatio();
+}
+
+bool UIVarsDataModels::GetCalcFreqsFromData(UIRegId regionId) const
+{
+    return GetRegionModel(regionId).GetCalcFreqsFromData();
+}
+
+bool UIVarsDataModels::returnLocalCalcedFreqWithGlobalModel(UIRegId regionId) const
+{
+    // When a locus is assigned a global data model and that global
+    // data model says to use calculated frequencies for nucleotides,
+    // we need to get those calculated values on a per-locus basis.
+    // EWFIX.P5 LOCUS -- currently we're doing this per region
+    if(regionId.GetRegion() != uiconst::GLOBAL_ID)
+    {
+        if ( GetUseGlobalModel(regionId)         &&
+             GetGlobalModel(regionId.GetDataType()).GetCalcFreqsFromData() )
+        {
+            return true;
+        }
+    }
+    return false;
+}
+
+double      UIVarsDataModels::GetFrequencyA(UIRegId regionId) const
+{
+    if(returnLocalCalcedFreqWithGlobalModel(regionId))
+    {
+        // not terribly efficient, but maintaining logic to cache
+        // the values was very difficult
+        return FrequenciesFromData(regionId.GetRegion(),
+                                   regionId.GetLocus(),
+                                   GetDataModelType(regionId))[0];
+    }
+    else
+    {
+        return GetRegionModel(regionId).GetFrequencyA();
+    }
+}
+
+double      UIVarsDataModels::GetFrequencyC(UIRegId regionId) const
+{
+    if(returnLocalCalcedFreqWithGlobalModel(regionId))
+    {
+        // not terribly efficient, but maintaining logic to cache
+        // the values was very difficult
+        return FrequenciesFromData(regionId.GetRegion(),
+                                   regionId.GetLocus(),
+                                   GetDataModelType(regionId))[1];
+    }
+    else
+    {
+        return GetRegionModel(regionId).GetFrequencyC();
+    }
+}
+
+double      UIVarsDataModels::GetFrequencyG(UIRegId regionId) const
+{
+    if(returnLocalCalcedFreqWithGlobalModel(regionId))
+    {
+        // not terribly efficient, but maintaining logic to cache
+        // the values was very difficult
+        return FrequenciesFromData(regionId.GetRegion(),
+                                   regionId.GetLocus(),
+                                   GetDataModelType(regionId))[2];
+    }
+    else
+    {
+        return GetRegionModel(regionId).GetFrequencyG();
+    }
+}
+
+double      UIVarsDataModels::GetFrequencyT(UIRegId regionId) const
+{
+    if(returnLocalCalcedFreqWithGlobalModel(regionId))
+    {
+        // not terribly efficient, but maintaining logic to cache
+        // the values was very difficult
+        return FrequenciesFromData(regionId.GetRegion(),
+                                   regionId.GetLocus(),
+                                   GetDataModelType(regionId))[3];
+    }
+    else
+    {
+        return GetRegionModel(regionId).GetFrequencyT();
+    }
+}
+
+double      UIVarsDataModels::GetGTR_AC(UIRegId regionId) const
+{
+    return GetRegionModel(regionId).GetGTR_AC();
+}
+
+double      UIVarsDataModels::GetGTR_AG(UIRegId regionId) const
+{
+    return GetRegionModel(regionId).GetGTR_AG();
+}
+
+double      UIVarsDataModels::GetGTR_AT(UIRegId regionId) const
+{
+    return GetRegionModel(regionId).GetGTR_AT();
+}
+
+double      UIVarsDataModels::GetGTR_CG(UIRegId regionId) const
+{
+    return GetRegionModel(regionId).GetGTR_CG();
+}
+
+double      UIVarsDataModels::GetGTR_CT(UIRegId regionId) const
+{
+    return GetRegionModel(regionId).GetGTR_CT();
+}
+
+double      UIVarsDataModels::GetGTR_GT(UIRegId regionId) const
+{
+    return GetRegionModel(regionId).GetGTR_GT();
+}
+
+double UIVarsDataModels::GetAlpha(UIRegId regionId) const
+{
+    return GetRegionModel(regionId).GetAlpha();
+}
+
+double UIVarsDataModels::GetRelativeMuRate(UIRegId regionId) const
+{
+    return GetRegionModel(regionId).GetRelativeMuRate();
+}
+
+DoubleVec2d UIVarsDataModels::GetRelativeMuRates() const
+{
+    const UIVars& vars(GetConstUIVars());
+    long int reg, nregs(vars.datapackplus.GetNumRegions());
+    DoubleVec2d relmurates(nregs);
+    for(reg = 0; reg < nregs; ++reg)
+    {
+        long int loc, nloci(vars.datapackplus.GetNumLoci(reg));
+        DoubleVec1d regionalmurates(nloci);
+        for(loc = 0; loc < nloci; ++loc)
+        {
+            UIRegId regionId(reg,loc,vars);
+            regionalmurates[loc] = GetRegionModel(regionId).GetRelativeMuRate();
+        }
+        relmurates[reg] = regionalmurates;
+    }
+    return relmurates;
+}
+
+bool UIVarsDataModels::GetOptimizeAlpha(UIRegId regionId) const
+{
+    return GetRegionModel(regionId).GetOptimizeAlpha();
+}
+
+double UIVarsDataModels::GetPerBaseErrorRate(UIRegId regionId) const
+{
+    return GetRegionModel(regionId).GetPerBaseErrorRate();
+}
+
+StringVec1d
+UIVarsDataModels::GetDataModelReport(UIRegId regionId) const
+{
+    StringVec1d report;
+    string line;
+    // model type identification
+    //////////////////////////////////////////////////////
+    line =  reportstr::header + ToString(GetDataModelType(regionId));
+    report.push_back(line);
+    // add items common to all models
+    //////////////////////////////////////////////////////
+    if(GetNumCategories(regionId) > 1)
+    {
+        line =  ToString(GetNumCategories(regionId)) + reportstr::categoryHeader;
+        line += ToString(GetAutoCorrelation(regionId));
+        report.push_back(line);
+        DoubleVec1d catRates = GetCategoryRates(regionId);
+        DoubleVec1d catProbs = GetCategoryProbabilities(regionId);
+        for(long int cat = 0; cat < GetNumCategories(regionId); cat++)
+        {
+            line =    reportstr::categoryRate
+                + " "
+                + ToString(catRates[cat])
+                + " "
+                + reportstr::categoryRelativeProb
+                + " "
+                + ToString(catProbs[cat]);
+            report.push_back(line);
+        }
+    }
+    line = reportstr::relativeMutationRate + " " +
+        ToString(GetRelativeMuRate(regionId));
+    report.push_back(line);
+    // add items common to nuc models
+    //////////////////////////////////////////////////////
+    if(GetDataModelType(regionId) == F84 || GetDataModelType(regionId) == GTR)
+    {
+        line =  reportstr::baseFreqs;
+        line += ToString(GetFrequencyA(regionId)) + reportstr::baseFreqSeparator;
+        line += ToString(GetFrequencyC(regionId)) + reportstr::baseFreqSeparator;
+        line += ToString(GetFrequencyG(regionId)) + reportstr::baseFreqSeparator;
+        line += ToString(GetFrequencyT(regionId));
+        report.push_back(line);
+    }
+    // add items for F84 models
+    //////////////////////////////////////////////////////
+    if(GetDataModelType(regionId) == F84)
+    {
+        line = reportstr::TTratio + ToString(GetTTRatio(regionId));
+        report.push_back(line);
+        line = reportstr::perBaseErrorRate + ToString(GetPerBaseErrorRate(regionId));
+        report.push_back(line);
+    }
+    // add items for GTR models
+    //////////////////////////////////////////////////////
+    if(GetDataModelType(regionId) == GTR)
+    {
+        line = reportstr::GTRRates;
+        report.push_back(line);
+        line =  reportstr::GTRRatesFromA;
+        line += ToString(GetGTR_AC(regionId)) + reportstr::GTRRateSeparator;
+        line += ToString(GetGTR_AG(regionId)) + reportstr::GTRRateSeparator;
+        line += ToString(GetGTR_AT(regionId));
+        report.push_back(line);
+        line =  reportstr::GTRRatesFromC;
+        line += ToString(GetGTR_CG(regionId)) + reportstr::GTRRateSeparator;
+        line += ToString(GetGTR_CT(regionId));
+        report.push_back(line);
+        line =  reportstr::GTRRatesFromG;
+        line += ToString(GetGTR_GT(regionId));
+        report.push_back(line);
+        line = reportstr::perBaseErrorRate + ToString(GetPerBaseErrorRate(regionId));
+        report.push_back(line);
+    }
+    // add items for Stepwise models
+    //////////////////////////////////////////////////////
+    if(GetDataModelType(regionId) == Stepwise
+       || GetDataModelType(regionId) == KAllele
+       || GetDataModelType(regionId) == MixedKS)
+    {
+        line =  reportstr::numberOfBins + ToString(defaults::bins);
+        // erynes BUGBUG -- hard coded for some models, set for
+        // others? Need to make this available from
+        // UIVarsDataModels
+        report.push_back(line);
+    }
+    // add items for Brownian models
+    //////////////////////////////////////////////////////
+    if(GetDataModelType(regionId) == Brownian)
+    {
+        line =  reportstr::brownian;
+        report.push_back(line);
+    }
+    // add items for MixedKS models
+    //////////////////////////////////////////////////////
+    if(GetDataModelType(regionId) == MixedKS)
+    {
+        line =  reportstr::alpha + ToString(GetAlpha(regionId));
+        report.push_back(line);
+        if(GetOptimizeAlpha(regionId))
+        {
+            line = reportstr::optimizeAlpha;
+            report.push_back(line);
+        }
+    }
+    return report;
+}
+
+ModelTypeVec1d
+UIVarsDataModels::GetLegalDataModels(UIRegId regionId) const
+{
+    ModelTypeVec1d legalModels = defaults::allDataModels();
+    legalModels.erase(
+        std::remove_if(legalModels.begin(),legalModels.end(),
+                       ModelIsIllegalForDataType(regionId.GetDataType())),
+        legalModels.end());
+    return legalModels;
+}
+
+void UIVarsDataModels::SetDataModelType(model_type x, UIRegId regionId)
+{
+    getRegionModelToAlter(regionId).SetDataModelType(x);
+}
+
+void UIVarsDataModels::SetNormalization(bool x, UIRegId regionId)
+{
+    getRegionModelToAlter(regionId).SetNormalization(x);
+}
+
+void UIVarsDataModels::SetAutoCorrelation(double x, UIRegId regionId)
+{
+    getRegionModelToAlter(regionId).SetAutoCorrelation(x);
+}
+
+void UIVarsDataModels::SetNumCategories(long int numCats, UIRegId regionId)
+{
+    getRegionModelToAlter(regionId).SetNumCategories(numCats);
+}
+
+void UIVarsDataModels::SetCategoryRate(double x, UIRegId regionId, long int categoryIndex)
+{
+    getRegionModelToAlter(regionId).SetCategoryRate(x,categoryIndex);
+}
+
+void UIVarsDataModels::SetCategoryProbability(double x, UIRegId regionId, long int categoryIndex)
+{
+    getRegionModelToAlter(regionId).SetCategoryProbability(x,categoryIndex);
+}
+
+void UIVarsDataModels::SetTTRatio(double x, UIRegId regionId)
+{
+    if (x == 0.5)
+    {
+        x = 0.500001;
+        GetConstUIVars().GetUI()->AddWarning("Using 0.500001 for the TT Ratio instead of 0.5, since "
+                                             "LAMARC's algorithm divides by zero if the TT Ratio is exactly 0.5.");
+    }
+    getRegionModelToAlter(regionId).SetTTRatio(x);
+}
+
+void UIVarsDataModels::SetCalcFreqsFromData(bool x, UIRegId regionId)
+{
+    if (x==true && GetDataModelType(regionId) == GTR)
+    {
+        GetConstUIVars().GetUI()->AddWarning("Warning:  You may not calculate base frequencies from the data in the GTR model -- "
+                                             "the base frequencies should arise from the same program that gave you the GTR rates.");
+        return;
+    };
+    getRegionModelToAlter(regionId).SetCalcFreqsFromData(x);
+}
+
+double UIVarsDataModels::adjustZeroFrequency(double freq)
+{
+    if (freq == 0)
+    {
+        GetConstUIVars().GetUI()->AddWarning("Base frequencies must be greater than zero--setting this frequency to 0.00001");
+        return defaults::minLegalFrequency;
+    }
+    return freq;
+}
+
+void UIVarsDataModels::SetFrequencyA(double x, UIRegId regionId)
+{
+    getRegionModelToAlter(regionId).SetFrequencyA(adjustZeroFrequency(x));
+}
+
+void UIVarsDataModels::SetFrequencyC(double x, UIRegId regionId)
+{
+    getRegionModelToAlter(regionId).SetFrequencyC(adjustZeroFrequency(x));
+}
+
+void UIVarsDataModels::SetFrequencyG(double x, UIRegId regionId)
+{
+    getRegionModelToAlter(regionId).SetFrequencyG(adjustZeroFrequency(x));
+}
+
+void UIVarsDataModels::SetFrequencyT(double x, UIRegId regionId)
+{
+    getRegionModelToAlter(regionId).SetFrequencyT(adjustZeroFrequency(x));
+}
+
+void UIVarsDataModels::SetGTR_AC(double x, UIRegId regionId)
+{
+    getRegionModelToAlter(regionId).SetGTR_AC(x);
+}
+
+void UIVarsDataModels::SetGTR_AG(double x, UIRegId regionId)
+{
+    getRegionModelToAlter(regionId).SetGTR_AG(x);
+}
+
+void UIVarsDataModels::SetGTR_AT(double x, UIRegId regionId)
+{
+    getRegionModelToAlter(regionId).SetGTR_AT(x);
+}
+
+void UIVarsDataModels::SetGTR_CG(double x, UIRegId regionId)
+{
+    getRegionModelToAlter(regionId).SetGTR_CG(x);
+}
+
+void UIVarsDataModels::SetGTR_CT(double x, UIRegId regionId)
+{
+    getRegionModelToAlter(regionId).SetGTR_CT(x);
+}
+
+void UIVarsDataModels::SetGTR_GT(double x, UIRegId regionId)
+{
+    getRegionModelToAlter(regionId).SetGTR_GT(x);
+}
+
+void UIVarsDataModels::SetAlpha(double x, UIRegId regionId)
+{
+    getRegionModelToAlter(regionId).SetAlpha(x);
+}
+
+void UIVarsDataModels::SetRelativeMuRate(double x, UIRegId regionId)
+{
+    if (x <= 0)
+    {
+        throw data_error("The relative mutation rate must be greater than zero.");
+    }
+    getRegionModelToAlter(regionId).SetRelativeMuRate(x);
+    GetUIVars().forces.FillCalculatedStartValues();
+}
+
+void UIVarsDataModels::SetOptimizeAlpha(bool x, UIRegId regionId)
+{
+    getRegionModelToAlter(regionId).SetOptimizeAlpha(x);
+}
+
+void UIVarsDataModels::SetPerBaseErrorRate(double x, UIRegId regionId)
+{
+    if (x < 0)
+    {
+        throw data_error("The per base error rate must be non-negative");
+    }
+    if (x > 0.1)                        // EWFIX -- what is the limit
+    {
+        throw data_error("The per base error rate must less than or equal to 0.1");
+    }
+    getRegionModelToAlter(regionId).SetPerBaseErrorRate(x);
+}
+
+void UIVarsDataModels::SetAllRegionsToGlobalModel()
+{
+    vector < deque<bool> >::iterator i;
+    for(i = m_useGlobalModel.begin(); i != m_useGlobalModel.end(); i++)
+    {
+        deque<bool>::iterator j;
+        for(j = i->begin(); j != i->end(); j++)
+        {
+            (*j) = true;
+        }
+    }
+}
+
+UIVarsDataModels::ModelIsIllegalForDataType::ModelIsIllegalForDataType(
+    data_type dtype)
+    :
+    m_dtype(dtype)
+{
+}
+
+UIVarsDataModels::ModelIsIllegalForDataType::~ModelIsIllegalForDataType()
+{
+}
+
+bool
+UIVarsDataModels::ModelIsIllegalForDataType::operator()(model_type mtype) const
+{
+    return !(ModelTypeAcceptsDataType(mtype,m_dtype));
+}
+
+//____________________________________________________________________________________
diff --git a/src/ui_vars/ui_vars_datamodel.h b/src/ui_vars/ui_vars_datamodel.h
new file mode 100644
index 0000000..8a74085
--- /dev/null
+++ b/src/ui_vars/ui_vars_datamodel.h
@@ -0,0 +1,230 @@
+// $Id: ui_vars_datamodel.h,v 1.24 2011/03/07 06:08:53 bobgian Exp $
+
+/*
+ *  Copyright 2004  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+ *
+ *  This software is distributed free of charge for non-commercial use
+ *  and is copyrighted.  Of course, we do not guarantee that the software
+ *  works, and are not responsible for any damage you may cause or have.
+ *
+ */
+
+#ifndef UI_VARS_DATAMODEL_H
+#define UI_VARS_DATAMODEL_H
+
+#include <deque>
+#include <vector>
+
+#include "constants.h"          // for model_type
+#include "types.h"
+#include "vectorx.h"            // for DoubleVec1d
+#include "ui_vars_component.h"
+#include "datatype.h"           // for data_type
+
+class UIRegId;
+
+using std::deque;
+using std::vector;
+
+class UIVarsDataModels;
+
+class UIVarsSingleDataModel
+{
+    friend class UIVarsDataModels;
+
+  private:
+    UIVarsSingleDataModel();    // undefined
+    long            m_regionIndex;
+    long            m_locusIndex;
+    data_type       m_datatype;
+    model_type      m_modelType;
+    bool            doNormalize;
+    double          autoCorrelation;
+    DoubleVec1d     categoryRates;
+    DoubleVec1d     categoryProbabilities;
+    double          ttRatio;
+    bool            doCalcFreqsFromData;
+    double          baseFrequency_A;
+    double          baseFrequency_C;
+    double          baseFrequency_G;
+    double          baseFrequency_T;
+    double          gtr_AC;
+    double          gtr_AG;
+    double          gtr_AT;
+    double          gtr_CG;
+    double          gtr_CT;
+    double          gtr_GT;
+    double          alpha;
+    double          m_relmurate;
+    bool            optimizeAlpha;
+    double          m_perBaseErrorRate;
+    double MakeNextCategoryProbability(DoubleVec1d probabilities);
+    double MakeNextCategoryRate(DoubleVec1d rates);
+
+    //protected:
+    void CalculateFrequenciesFromData();
+
+    //public:
+    UIVarsSingleDataModel(UIRegId regionId);
+    UIVarsSingleDataModel(const UIVarsSingleDataModel& clone, UIRegId regionId);
+
+    /////////////////////////////////////////////////////////////
+    virtual data_type   GetDataType()           const {return m_datatype;};
+    virtual model_type  GetDataModelType()      const {return m_modelType;};
+    virtual bool        GetNormalization()      const {return doNormalize;};
+    virtual double      GetAutoCorrelation()    const {return autoCorrelation;};
+    virtual long        GetNumCategories()      const;
+    virtual double      GetCategoryRate(long categoryIndex) const;
+    virtual DoubleVec1d GetCategoryRates()      const {return categoryRates;};
+    virtual DoubleVec1d GetCategoryProbabilities()  const {return categoryProbabilities;};
+    virtual double      GetCategoryProbability(long categoryIndex) const;
+    virtual double      GetTTRatio()            const {return ttRatio;};
+    virtual bool        GetCalcFreqsFromData()  const;
+    virtual double      GetFrequencyA()         const;
+    virtual double      GetFrequencyC()         const;
+    virtual double      GetFrequencyG()         const;
+    virtual double      GetFrequencyT()         const;
+    virtual double      GetGTR_AC()             const {return gtr_AC;};
+    virtual double      GetGTR_AG()             const {return gtr_AG;};
+    virtual double      GetGTR_AT()             const {return gtr_AT;};
+    virtual double      GetGTR_CG()             const {return gtr_CG;};
+    virtual double      GetGTR_CT()             const {return gtr_CT;};
+    virtual double      GetGTR_GT()             const {return gtr_GT;};
+    virtual double      GetAlpha()              const {return alpha;};
+    virtual double      GetRelativeMuRate()     const {return m_relmurate;};
+    virtual bool        GetOptimizeAlpha()      const {return optimizeAlpha;};
+    virtual double      GetPerBaseErrorRate()   const {return m_perBaseErrorRate;};
+
+    /////////////////////////////////////////////////////////////
+    virtual void SetDataModelType(model_type x);
+    virtual void SetNormalization(bool x)       {doNormalize = x;};
+    virtual void SetAutoCorrelation(double x);
+    virtual void SetNumCategories(long x);
+    virtual void SetCategoryRate(double x, long index);
+    virtual void SetCategoryProbability(double x, long index);
+    virtual void SetTTRatio(double x);
+    virtual void SetCalcFreqsFromData(bool x);
+    virtual void SetFrequencyA(double x);
+    virtual void SetFrequencyC(double x);
+    virtual void SetFrequencyG(double x);
+    virtual void SetFrequencyT(double x);
+    virtual void SetGTR_AT(double x);
+    virtual void SetGTR_AC(double x);
+    virtual void SetGTR_AG(double x);
+    virtual void SetGTR_CG(double x);
+    virtual void SetGTR_CT(double x);
+    virtual void SetGTR_GT(double x);
+    virtual void SetRelativeMuRate(double x);
+    virtual void SetAlpha(double x);
+    virtual void SetOptimizeAlpha(bool x)       {optimizeAlpha = x;};
+    virtual void SetPerBaseErrorRate(double x)  {m_perBaseErrorRate = x;};
+
+    virtual bool IdenticalCategoryRates() const;
+
+  public:
+    virtual ~UIVarsSingleDataModel();
+
+};
+
+class UIVarsDataModels : public UIVarsComponent
+{
+  private:
+    UIVarsSingleDataModel           m_globalModelNucleotide;
+    UIVarsSingleDataModel           m_globalModelMsat;
+    UIVarsSingleDataModel           m_globalModelKAllele;
+    vector< deque<bool> >                     m_useGlobalModel;
+    vector< vector<UIVarsSingleDataModel> >   m_individualModels;
+    UIVarsDataModels();              // undefined
+    UIVarsDataModels(const UIVarsDataModels&);              // undefined
+
+    class ModelIsIllegalForDataType : public std::unary_function<model_type,bool>
+    {
+      private:
+        data_type m_dtype;
+      public:
+        ModelIsIllegalForDataType(data_type dtype);
+        ~ModelIsIllegalForDataType();
+        bool operator()(model_type mtype) const;
+    };
+
+  protected:
+    virtual void checkRegionIdGlobalAllowed(UIRegId regionId) const;
+    virtual void checkRegionIdGlobalNotAllowed(UIRegId regionId) const;
+    virtual UIVarsSingleDataModel & getRegionModelToAlter(UIRegId regionId);
+    virtual bool returnLocalCalcedFreqWithGlobalModel(UIRegId regionId) const;
+    virtual double adjustZeroFrequency(double mightBeZero);
+
+  public:
+    // one might argue that the constructors should have
+    // restricted access since only UIVars should
+    // be creating these puppies.
+    UIVarsDataModels(UIVars*,long nregions,long maxLoci);
+    UIVarsDataModels(UIVars*,const UIVarsDataModels&);
+    virtual void Initialize();
+    virtual ~UIVarsDataModels();
+
+    virtual bool GetUseGlobalModel(UIRegId regionId) const;
+    virtual const UIVarsSingleDataModel & GetRegionModel(UIRegId regionId) const;
+    virtual const UIVarsSingleDataModel & GetGlobalModel(data_type) const;
+
+    virtual data_type   GetDataType(UIRegId regionId) const;
+    virtual model_type  GetDataModelType(UIRegId regionId) const;
+    virtual bool        GetNormalization(UIRegId regionId) const;
+    virtual double      GetAutoCorrelation(UIRegId regionId) const;
+    virtual long        GetNumCategories(UIRegId regionId) const;
+    virtual double      GetCategoryRate(UIRegId regionId, long categoryIndex) const;
+    virtual DoubleVec1d GetCategoryRates(UIRegId regionId) const;
+    virtual DoubleVec1d GetCategoryProbabilities(UIRegId regionId) const;
+    virtual double      GetCategoryProbability(UIRegId regionId, long categoryIndex) const;
+    virtual double      GetTTRatio(UIRegId regionId) const;
+    virtual bool        GetCalcFreqsFromData(UIRegId regionId) const;
+    virtual double      GetFrequencyA(UIRegId regionId) const;
+    virtual double      GetFrequencyC(UIRegId regionId) const;
+    virtual double      GetFrequencyG(UIRegId regionId) const;
+    virtual double      GetFrequencyT(UIRegId regionId) const;
+    virtual double      GetGTR_AC(UIRegId regionId) const;
+    virtual double      GetGTR_AG(UIRegId regionId) const;
+    virtual double      GetGTR_AT(UIRegId regionId) const;
+    virtual double      GetGTR_CG(UIRegId regionId) const;
+    virtual double      GetGTR_CT(UIRegId regionId) const;
+    virtual double      GetGTR_GT(UIRegId regionId) const;
+    virtual double      GetAlpha(UIRegId regionId) const;
+    virtual double      GetRelativeMuRate(UIRegId regionId) const;
+    virtual DoubleVec2d GetRelativeMuRates() const;
+    virtual bool        GetOptimizeAlpha(UIRegId regionId) const;
+    virtual double      GetPerBaseErrorRate(UIRegId regionId) const;
+    virtual StringVec1d GetDataModelReport(UIRegId regionId) const;
+    virtual ModelTypeVec1d  GetLegalDataModels(UIRegId regionId) const;
+
+    virtual bool IdenticalCategoryRates(UIRegId regionId) const;
+
+    virtual void SetUseGlobalModel(bool x, UIRegId regionId);
+    virtual void SetDataModelType(model_type x, UIRegId regionId);
+    virtual void SetNormalization(bool x, UIRegId regionId);
+    virtual void SetAutoCorrelation(double x, UIRegId regionId);
+    virtual void SetNumCategories(long x, UIRegId regionId);
+    virtual void SetCategoryRate(double x, UIRegId regionId, long categoryIndex);
+    virtual void SetCategoryProbability(double x, UIRegId regionId, long categoryIndex);
+    virtual void SetTTRatio(double x, UIRegId regionId);
+    virtual void SetCalcFreqsFromData(bool x, UIRegId regionId);
+    virtual void SetFrequencyA(double x, UIRegId regionId);
+    virtual void SetFrequencyC(double x, UIRegId regionId);
+    virtual void SetFrequencyG(double x, UIRegId regionId);
+    virtual void SetFrequencyT(double x, UIRegId regionId);
+    virtual void SetGTR_AT(double x, UIRegId regionId);
+    virtual void SetGTR_AC(double x, UIRegId regionId);
+    virtual void SetGTR_AG(double x, UIRegId regionId);
+    virtual void SetGTR_CG(double x, UIRegId regionId);
+    virtual void SetGTR_CT(double x, UIRegId regionId);
+    virtual void SetGTR_GT(double x, UIRegId regionId);
+    virtual void SetRelativeMuRate(double x, UIRegId regionId);
+    virtual void SetAlpha(double x, UIRegId regionId);
+    virtual void SetOptimizeAlpha(bool x, UIRegId regionId);
+    virtual void SetPerBaseErrorRate(double x, UIRegId regionId);
+
+    virtual void SetAllRegionsToGlobalModel();
+};
+
+#endif  // UI_VARS_DATAMODEL_H
+
+//____________________________________________________________________________________
diff --git a/src/ui_vars/ui_vars_datapackplus.cpp b/src/ui_vars/ui_vars_datapackplus.cpp
new file mode 100644
index 0000000..578177e
--- /dev/null
+++ b/src/ui_vars/ui_vars_datapackplus.cpp
@@ -0,0 +1,629 @@
+// $Id: ui_vars_datapackplus.cpp,v 1.36 2013/11/08 21:46:22 mkkuhner Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+#include <set>
+
+#include "datapack.h"
+#include "dlmodel.h"
+#include "errhandling.h"
+#include "region.h"
+#include "stringx.h"
+#include "ui_vars.h"
+#include "ui_vars_datapackplus.h"
+#include "ui_strings.h"
+
+//------------------------------------------------------------------------------------
+
+UIVarsDataPackPlus::UIVarsDataPackPlus(UIVars * myUIVars, DataPack& dp)
+    : UIVarsComponent(myUIVars),
+      datapack(dp),
+      m_effectivePopSizes(),
+      m_simulateData()
+{
+    for (long regnum=0; regnum<GetNumRegions(); regnum++)
+    {
+        m_effectivePopSizes.push_back(datapack.GetRegion(regnum).GetEffectivePopSize());
+        deque<bool> nosims(GetNumLoci(regnum), false);
+        m_simulateData.push_back(nosims);
+    }
+}
+
+UIVarsDataPackPlus::UIVarsDataPackPlus(UIVars * myUIVars, const UIVarsDataPackPlus& dpp)
+    : UIVarsComponent(myUIVars),
+      datapack(dpp.datapack),
+      m_effectivePopSizes(dpp.m_effectivePopSizes),
+      m_simulateData(dpp.m_simulateData)
+{
+}
+
+UIVarsDataPackPlus::~UIVarsDataPackPlus()
+{
+}
+
+long
+UIVarsDataPackPlus::GetMaxLoci() const
+{
+    long maxL = 0;
+    long numRegions = GetNumRegions();
+    for(long i=0; i < numRegions; i++)
+    {
+        long lociCount = GetNumLoci(i);
+        if(lociCount > maxL)
+        {
+            maxL = lociCount;
+        }
+    }
+
+    return maxL;
+}
+
+long
+UIVarsDataPackPlus::GetNumRegions() const
+{
+    return datapack.GetNRegions();
+}
+
+long
+UIVarsDataPackPlus::GetNumLoci(long regIndex) const
+{
+    return datapack.GetRegion(regIndex).GetNloci();
+}
+
+long
+UIVarsDataPackPlus::GetNCrossPartitions() const
+{
+    return datapack.GetNCrossPartitions();
+}
+
+long
+UIVarsDataPackPlus::GetNPartitionsByForceType(force_type ft) const
+{
+    return datapack.GetNPartitionsByForceType(ft);
+}
+
+double UIVarsDataPackPlus::GetEffectivePopSize(long region) const
+{
+    return m_effectivePopSizes[region];
+}
+
+long UIVarsDataPackPlus::GetNumIndividualsWithMultipleHaplotypes(long region, string lname) const
+{
+    long nMultiInd=0;
+    const IndVec individuals = datapack.GetRegion(region).GetIndividuals();
+    for (unsigned long ind=0; ind<individuals.size(); ind++)
+    {
+        if (individuals[ind].GetHaplotypesFor(lname, 0).MultipleHaplotypes())
+        {
+            nMultiInd++;
+        }
+    }
+    return nMultiInd;
+}
+
+bool UIVarsDataPackPlus::GetSimulateData(long region, long locus) const
+{
+    if (region < 0 || locus < 0)
+    {
+        assert(false); //Don't ask about this! (probably FLAGLONG or the like)
+        return false;
+    }
+    return m_simulateData[region][locus];
+}
+
+// LS NOTE:  This should move into the converter eventually.  Probably.
+void UIVarsDataPackPlus::SetEffectivePopSize(long region, double size)
+{
+    if (size <= 0)
+    {
+        throw data_error("The effective population size must be positive.");
+    }
+    assert(region < GetNumRegions());
+    if (region >= static_cast<long>(m_effectivePopSizes.size()))
+    {
+        throw implementation_error("Unable to set this region's size--vector not long enough.");
+    }
+    m_effectivePopSizes[region] = size;
+
+    // may need to update start values at this point
+    GetUIVars().forces.FillCalculatedStartValues();
+}
+
+void UIVarsDataPackPlus::SetSimulateData(long region, long locus, bool sim)
+{
+    m_simulateData[region][locus] = sim;
+}
+
+void UIVarsDataPackPlus::RevertAllMovingLoci()
+{
+    for (long reg=0; reg<GetNumRegions(); reg++)
+    {
+        datapack.GetRegion(reg).RevertMovingLoci();
+    }
+}
+
+bool
+UIVarsDataPackPlus::CanHapArrange() const
+{
+    return datapack.CanHapArrange();
+}
+
+bool
+UIVarsDataPackPlus::HasSomeNucleotideData() const
+{
+    for (long reg=0; reg<GetNumRegions(); reg++)
+    {
+        for (long loc=0; loc<GetNumLoci(reg); loc++)
+        {
+            if ((GetDataType(reg, loc) == dtype_DNA) || (GetDataType(reg, loc) == dtype_SNP))
+            {
+                return true;
+            }
+        }
+    }
+    return false;
+}
+
+bool
+UIVarsDataPackPlus::HasSomeMsatData() const
+{
+    for (long reg=0; reg<GetNumRegions(); reg++)
+    {
+        for (long loc=0; loc<GetNumLoci(reg); loc++)
+        {
+            if (GetDataType(reg, loc) == dtype_msat)
+            {
+                return true;
+            }
+        }
+    }
+    return false;
+}
+
+bool
+UIVarsDataPackPlus::HasSomeKAlleleData() const
+{
+    for (long reg=0; reg<GetNumRegions(); reg++)
+    {
+        for (long loc=0; loc<GetNumLoci(reg); loc++)
+        {
+            if (GetDataType(reg, loc) == dtype_kallele)
+            {
+                return true;
+            }
+        }
+    }
+    return false;
+}
+
+string
+UIVarsDataPackPlus::GetCrossPartitionName(long index) const
+{
+    return GetCrossPartitionNames()[index];
+}
+
+StringVec1d
+UIVarsDataPackPlus::GetCrossPartitionNames() const
+{
+    return datapack.GetAllCrossPartitionNames();
+}
+
+string
+UIVarsDataPackPlus::GetForcePartitionName(force_type ft, long index) const
+{
+    StringVec1d names = GetForcePartitionNames(ft);
+    return names[index];
+}
+
+StringVec1d
+UIVarsDataPackPlus::GetForcePartitionNames(force_type ft) const
+{
+    return datapack.GetAllPartitionNames(ft);
+}
+
+string
+UIVarsDataPackPlus::GetLocusName(long regionIndex, long locusIndex) const
+{
+    string name = datapack.GetRegion(regionIndex).GetLocus(locusIndex).GetName();
+    string locID;
+    //LS DEBUG MAPPING:  When IsMovable is no longer sufficient to tell if
+    // something is a trait or not, (i.e. if it's being used to partition data)
+    // this will need to be changed.
+    if (datapack.GetRegion(regionIndex).GetLocus(locusIndex).IsMovable())
+    {
+        locID = ", trait ";
+    }
+    else
+    {
+        if (datapack.GetRegion(regionIndex).GetNloci() == 1)
+        {
+            //We assume that if there is only one locus in a region, the thing
+            // that has better name recognition is the region, not the locus.
+            locID = ": ";
+            name = datapack.GetRegion(regionIndex).GetRegionName();
+        }
+        else
+        {
+            locID = ", segment " + indexToKey(locusIndex) + " : ";
+        }
+    }
+
+    string fullname = "Region "+indexToKey(regionIndex) + locID + name;
+    return fullname;
+}
+
+long
+UIVarsDataPackPlus::GetLocusIndex(long regionIndex, string locusName) const
+{
+    return datapack.GetRegion(regionIndex).GetLocusIndex(locusName);
+}
+
+bool
+UIVarsDataPackPlus::HasLocus(long regionIndex, string locusName) const
+{
+    return datapack.GetRegion(regionIndex).HasLocus(locusName);
+}
+
+string
+UIVarsDataPackPlus::GetRegionName(long index) const
+{
+    string name = datapack.GetRegion(index).GetRegionName();
+    string fullname = "Region "+indexToKey(index)+" : "+name;
+    return fullname;
+}
+
+string
+UIVarsDataPackPlus::GetSimpleRegionName(long index) const
+{
+    return datapack.GetRegion(index).GetRegionName();
+}
+
+string
+UIVarsDataPackPlus::GetParamName(force_type ftype, long index,bool doLongName) const
+{
+    string pname = "";
+    long fromIndex;
+    long toIndex;
+    long nstates;
+    switch(ftype)
+    {
+        case force_COAL:
+            if(doLongName)
+            {
+                pname += uistr::theta + " for ";
+                pname += GetCrossPartitionName(index);
+            }
+            else
+            {
+                pname += "Theta";
+                pname += indexToKey(index);
+            }
+            break;
+        case force_DIVERGENCE:
+            if(doLongName)
+            {
+                pname += uistr::divergenceEpochBoundaryTime + " for ";
+                pname += ToString(index);
+            }
+            else
+            {
+                pname += "Epoch " + ToString(index);
+            }
+            break;
+        case force_DISEASE:
+            nstates = GetNPartitionsByForceType(ftype);
+            toIndex = index / nstates;
+            fromIndex = index % nstates;
+            if(doLongName)
+            {
+                pname += uistr::muRate + " from ";
+                pname += GetForcePartitionName(ftype,fromIndex);
+                pname += " to ";
+                pname += GetForcePartitionName(ftype,toIndex);
+            }
+            else
+            {
+                pname += "D";
+                pname += indexToKey(fromIndex);
+                pname += indexToKey(toIndex);
+            }
+            break;
+        case force_EXPGROWSTICK:
+        case force_GROW:
+            if(doLongName)
+            {
+                pname += uistr::growthRate + " for ";
+                pname += GetCrossPartitionName(index);
+            }
+            else
+            {
+                pname += "Growth";
+                pname += indexToKey(index);
+            }
+            break;
+        case force_MIG:
+        case force_DIVMIG:
+            nstates = GetNPartitionsByForceType(ftype);
+            toIndex = index / nstates;
+            fromIndex = index % nstates;
+            if(doLongName)
+            {
+                pname += uistr::migrationByID;
+                pname += GetForcePartitionName(ftype,toIndex);
+                pname += uistr::migrationByID2;
+                pname += GetForcePartitionName(ftype,fromIndex);
+            }
+            else
+            {
+                pname += "M";
+                pname += indexToKey(fromIndex);
+                pname += indexToKey(toIndex);
+            }
+            break;
+        case force_REC:
+            if(doLongName)
+            {
+                pname += uistr::recRate;
+            }
+            else
+            {
+                pname += "Rec";
+            }
+            break;
+        case force_REGION_GAMMA:
+            if(doLongName)
+            {
+                pname += uistr::regGammaShape;
+            }
+            else
+            {
+                pname += "ShapeParam";
+            }
+            break;
+        case force_LOGSELECTSTICK:
+        case force_LOGISTICSELECTION:
+            if(doLongName)
+            {
+                pname += uistr::logisticSelectionCoefficient;
+            }
+            else
+            {
+                pname += "Sel. Coeff.";
+            }
+            break;
+        case force_NONE:
+            assert(false);
+            pname += "No force";
+            break;
+    }
+    return pname;
+}
+
+string
+UIVarsDataPackPlus::GetParamNameOfForce(force_type ftype) const
+{
+    switch(ftype)
+    {
+        case force_COAL:
+            return "Theta";
+            break;
+        case force_DISEASE:
+            return "Disease Mutation Rate";
+            break;
+        case force_GROW:
+            return "Growth Rate";
+            break;
+        case force_DIVERGENCE:
+            return "Epoch Boundary Time";
+            break;
+        case force_DIVMIG:
+        case force_MIG:
+            return "Migration Rate";
+            break;
+        case force_REC:
+            return "Recombination Rate";
+            break;
+        case force_REGION_GAMMA:
+            return "Gamma Over Regions";
+            break;
+        case force_EXPGROWSTICK:
+            return "Exponential Growth via stick";
+            break;
+        case force_LOGISTICSELECTION:
+            return "Logistic Selection Coefficient";
+            break;
+        case force_LOGSELECTSTICK:
+            return "Logistic Selection via stick";
+            break;
+        case force_NONE:
+            assert(false);
+            throw implementation_error("UIVarsDataPackPlus::GetParamNameOfForce for force_NONE!");
+            break;
+    };
+    assert(false);
+    throw implementation_error("UIVarsDataPackPlus::GetParamNameOfForce missing switch case");
+}
+
+data_type
+UIVarsDataPackPlus::GetDataType(long regionId, long locusId) const
+{
+    const Region & thisRegion = datapack.GetRegion(regionId);
+    const Locus & thisLocus = thisRegion.GetLocus(locusId);
+    return thisLocus.GetDataType();
+}
+
+bool UIVarsDataPackPlus::IsMovable(long regionId, long locusId) const
+{
+    const Region & thisRegion = datapack.GetRegion(regionId);
+    const Locus & thisLocus = thisRegion.GetLocus(locusId);
+    return thisLocus.IsMovable();
+}
+
+string UIVarsDataPackPlus::GetName(long regionId, long locusId) const
+{
+    const Region & thisRegion = datapack.GetRegion(regionId);
+    const Locus & thisLocus = thisRegion.GetLocus(locusId);
+    return thisLocus.GetName();
+}
+
+rangeset UIVarsDataPackPlus::GetRange(long regionId, long locusId) const
+{
+    const Region & thisRegion = datapack.GetRegion(regionId);
+    const Locus & thisLocus = thisRegion.GetLocus(locusId);
+    return thisLocus.GetAllowedRange();
+}
+
+rangepair UIVarsDataPackPlus::GetRegionSiteSpan(long regionId) const
+{
+    return datapack.GetRegion(regionId).GetSiteSpan();
+}
+
+long UIVarsDataPackPlus::GetNumSites(long regionId) const
+{
+    const Region & thisRegion = datapack.GetRegion(regionId);
+    return thisRegion.GetNumSites();
+}
+
+long UIVarsDataPackPlus::GetNumSites(long regionId, long locusId) const
+{
+    const Region & thisRegion = datapack.GetRegion(regionId);
+    const Locus & thisLocus = thisRegion.GetLocus(locusId);
+    return thisLocus.GetNsites();
+}
+
+StringVec2d UIVarsDataPackPlus::GetUniqueAlleles(long regionId,
+                                                 long locusId) const
+{
+    StringVec2d retVec;
+    const Region& thisRegion = datapack.GetRegion(regionId);
+    const Locus& thisLocus = thisRegion.GetLocus(locusId);
+    vector<TipData> tips = thisLocus.GetTipData();
+    const IndVec individuals = thisRegion.GetIndividuals();
+    for (long marker = 0; marker < thisLocus.GetNmarkers(); marker++)
+    {
+        std::set<string> uniqueAlleles;
+        //First, get any alleles from the unknown haplotypes
+        for (unsigned long ind=0; ind<individuals.size(); ind++)
+        {
+            StringVec1d alleles = individuals[ind].GetAllelesFor(thisLocus.GetName(), marker);
+            for (unsigned long nallele=0; nallele<alleles.size(); nallele++)
+            {
+                if (alleles[nallele] != "?")
+                {
+                    uniqueAlleles.insert(alleles[nallele]);
+                }
+            }
+        }
+        //Now get any alleles from the tips
+        for (unsigned long tip = 0; tip < tips.size(); ++tip)
+        {
+            if (!tips[tip].m_nodata)
+            {
+                string allele = tips[tip].data[marker];
+                if (allele != "?")
+                {
+                    uniqueAlleles.insert(allele);
+                }
+            }
+        }
+        //Convert the set to a vector (for simplicity for our clients)
+        StringVec1d alleleStrings;
+        for (std::set<string>::iterator unique=uniqueAlleles.begin(); unique != uniqueAlleles.end(); unique++)
+        {
+            alleleStrings.push_back(*unique);
+        }
+        retVec.push_back(alleleStrings);
+    }
+    return retVec;
+
+}
+
+std::set<long> UIVarsDataPackPlus::GetPloidies(long region) const
+{
+    return datapack.GetRegion(region).GetPloidies();
+}
+
+const Locus* UIVarsDataPackPlus::GetConstLocusPointer(long region, long locus) const
+{
+    return &datapack.GetRegion(region).GetLocus(locus);
+}
+
+bool UIVarsDataPackPlus::AnySimulation() const
+{
+    for (unsigned long region=0; region<m_simulateData.size(); region++)
+    {
+        for (unsigned long locus=0; locus<m_simulateData[region].size(); locus++)
+        {
+            if (m_simulateData[region][locus]) return true;
+        }
+    }
+    return false;
+}
+
+bool UIVarsDataPackPlus::AnyRelativeHaplotypes() const
+{
+    for (long region=0; region<GetNumRegions(); region++)
+    {
+        const Region& reg = datapack.GetRegion(region);
+        for (long ind=0; ind<reg.GetNIndividuals(); ind++)
+        {
+            const Individual& indiv = reg.GetIndividual(ind);
+            if (indiv.MultipleTraitHaplotypes()) return true;
+        }
+    }
+    return false;
+}
+
+bool UIVarsDataPackPlus::AnySNPDataWithDefaultLocations() const
+{
+    for (long region=0; region<GetNumRegions(); region++)
+    {
+        if (datapack.GetRegion(region).AnySNPDataWithDefaultLocations())
+        {
+            return true;
+        }
+    }
+    return false;
+}
+
+string UIVarsDataPackPlus::GetAncestorName(long index) const
+{
+    //const Region & thisRegion = datapack.GetRegion(regionId);
+    //const Locus & thisLocus = thisRegion.GetLocus(locusId);
+    //uiInterface.GetCurrentVars().forces.AddAncestor(ancname);
+
+    //return thisLocus.GetName();
+    //string pname = datapack.forces.UIVarsDivergenceForceGetParent(index).GetName();
+    //datapack.GetRegion(index).GetRegionName();
+    string pname = uistr::divergenceEpochAncestor;
+    pname += ToString(index + 1);
+
+    return pname;
+}
+
+string UIVarsDataPackPlus::GetDescendentNames(long index) const
+{
+    //const Region & thisRegion = datapack.GetRegion(regionId);
+    //const Locus & thisLocus = thisRegion.GetLocus(locusId);
+    //uiInterface.GetCurrentVars().forces.AddAncestor(ancname);
+
+    //return thisLocus.GetName();
+    //string pname = datapack.forces.GetAncestor(index);
+    string pname = uistr::divergenceEpochDescendent;
+    pname += ToString(index + 1);
+    pname += "-1 ";
+    pname += uistr::divergenceEpochDescendent;
+    pname += ToString(index + 1);
+    pname += "-2";
+
+    return pname;
+}
+
+//____________________________________________________________________________________
diff --git a/src/ui_vars/ui_vars_datapackplus.h b/src/ui_vars/ui_vars_datapackplus.h
new file mode 100644
index 0000000..f16782b
--- /dev/null
+++ b/src/ui_vars/ui_vars_datapackplus.h
@@ -0,0 +1,114 @@
+// $Id: ui_vars_datapackplus.h,v 1.23 2012/06/30 01:32:43 bobgian Exp $
+
+/*
+ *  Copyright 2004  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+ *
+ *  This software is distributed free of charge for non-commercial use
+ *  and is copyrighted.  Of course, we do not guarantee that the software
+ *  works, and are not responsible for any damage you may cause or have.
+ *
+ */
+
+#ifndef UI_VARS_DATAPACK_PLUS_H
+#define UI_VARS_DATAPACK_PLUS_H
+
+#include "datatype.h"
+#include "ui_vars_component.h"
+#include "vectorx.h"
+#include "rangex.h"
+
+class DataPack;
+
+class UIVarsDataPackPlus : public UIVarsComponent
+{
+  private:
+    // EWFIX.P5 REFACTOR: To make undo/redo work properly, the effective pop sizes
+    // had to be moved into member variable m_effectivePopSizes, then synched
+    // later (in Registry::InstallEffectivePopSizes(), by using call to
+    // GetEffectivePopSizes()).  This is not ideal, but there's no great
+    // solution.
+    //
+    // LS Response:  This (and many other things) would be fixed if we had a
+    // phase-1 datapack and a phase-2 datapack.
+
+    //LS NOTE:  non-const because of RevertAllMovingLoci() and nothing else:
+    DataPack&            datapack;
+    DoubleVec1d          m_effectivePopSizes;
+    vector<deque<bool> > m_simulateData;
+
+    UIVarsDataPackPlus();                           // undefined
+    UIVarsDataPackPlus(const UIVarsDataPackPlus&);  // undefined
+  public:
+    // one might argue that the constructors should have
+    // restricted access since only UIVars should
+    // be creating these puppies.
+    UIVarsDataPackPlus(UIVars * myUIVars,DataPack& dp);
+    UIVarsDataPackPlus(UIVars * myUIVars,const UIVarsDataPackPlus&);
+    virtual ~UIVarsDataPackPlus();
+
+    // data size info
+    long            GetMaxLoci() const;
+    long            GetNumRegions() const;
+    long            GetNumLoci(long region) const;
+    long            GetNCrossPartitions() const;
+    long            GetNPartitionsByForceType(force_type ft) const;
+    double          GetEffectivePopSize(long region) const;
+    bool            GetSimulateData(long region, long locus) const;
+    DoubleVec1d     GetEffectivePopSizes() const { return m_effectivePopSizes;};
+    long            GetNumIndividualsWithMultipleHaplotypes(long region, string lname) const;
+
+
+    // LS DEBUG:  This should move into the converter eventually.  Probably.
+    void            SetEffectivePopSize(long region, double size);
+    void            SetSimulateData(long region, long locus, bool sim);
+
+    // Revert the datapack to phase 1 again
+    void            RevertAllMovingLoci();
+
+    // data type info
+    bool            CanHapArrange() const;
+    bool            HasSomeNucleotideData() const;
+    bool            HasSomeMsatData() const;
+    bool            HasSomeKAlleleData() const;
+
+    // names
+    string          GetCrossPartitionName(long index) const;
+    StringVec1d     GetCrossPartitionNames() const;
+    string          GetForcePartitionName(force_type ftype, long index) const;
+    StringVec1d     GetForcePartitionNames(force_type ftype) const;
+    string          GetLocusName(long regionIndex, long locusIndex) const;
+    long            GetLocusIndex(long regionIndex, string locusName) const;
+    bool            HasLocus(long regionIndex, string locusName) const;
+    string          GetRegionName(long index) const;
+    string          GetSimpleRegionName(long index) const;
+    string          GetParamName(force_type ftype, long index, bool doLongName=true) const;
+
+    // not actually dependent on data
+    string          GetParamNameOfForce(force_type ftype) const;
+
+    //
+    data_type       GetDataType(long regionId, long locusId) const;
+    bool            IsMovable(long regionId, long locusId) const;
+    string          GetName(long regionId, long locusId) const;
+    rangeset        GetRange(long regionId, long locusId) const;
+    rangepair       GetRegionSiteSpan(long regionId) const;
+    long            GetNumSites(long region) const;
+    long            GetNumSites(long region, long locus) const;
+    StringVec2d     GetUniqueAlleles(long region, long locus) const;
+    std::set<long>  GetPloidies(long region) const;
+
+    //For Phenotypes, which need them for Haplotypes.
+    const Locus* GetConstLocusPointer(long region, long locus) const;
+
+    //Divergence
+    string GetAncestorName(long index1) const;
+    string GetDescendentNames(long index1) const;
+
+    bool            AnySimulation() const;
+    bool            AnyRelativeHaplotypes() const;
+    bool            AnySNPDataWithDefaultLocations() const;
+};
+
+#endif  // UI_VARS_DATAPACK_PLUS_H
+
+//____________________________________________________________________________________
diff --git a/src/ui_vars/ui_vars_forces.cpp b/src/ui_vars/ui_vars_forces.cpp
new file mode 100644
index 0000000..23923ee
--- /dev/null
+++ b/src/ui_vars/ui_vars_forces.cpp
@@ -0,0 +1,2975 @@
+// $Id: ui_vars_forces.cpp,v 1.94 2013/11/08 21:46:22 mkkuhner Exp $
+
+/*
+ *  Copyright 2004  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+ *
+ *  This software is distributed free of charge for non-commercial use
+ *  and is copyrighted.  Of course, we do not guarantee that the software
+ *  works, and are not responsible for any damage you may cause or have.
+ *
+ */
+
+#include <cassert>
+#include <algorithm>
+#include <deque>
+#include <map>
+#include <vector>
+
+#include "local_build.h"                // for definition of LAMARC_NEW_FEATURE_RELATIVE_SAMPLING
+
+#include "calculators.h"
+#include "constants.h"                  // for method_type
+#include "defaults.h"
+#include "errhandling.h"
+#include "registry.h"
+#include "stringx.h"
+#include "ui_vars.h"
+#include "ui_constants.h"
+#include "ui_vars_forces.h"
+#include "ui_vars_prior.h"
+#include "ui_warnings.h"
+#include "vectorx.h"
+
+using std::deque;
+using std::make_pair;
+using std::map;
+using std::vector;
+using std::list;
+
+//------------------------------------------------------------------------------------
+
+UIVarsSingleForce::UIVarsSingleForce
+(
+    UIVars * myUIVars,
+    force_type ftype,
+    long int nparams,
+    double defaultVal,
+    method_type methodType,
+    bool canBeSetUnset,
+    bool isOn,
+    long int eventMax,
+    UIVarsPrior defaultPrior
+    )
+    :
+    UIVarsComponent(myUIVars),
+    m_numParameters(nparams),
+    m_defaultValue(defaultVal),
+    m_defaultMethodType(methodType),
+    m_canSetOnOff(canBeSetUnset),
+    m_onOff(isOn),
+    m_maxEvents(eventMax),
+    m_profileType(defaults::profileType),
+    m_doProfile(nparams,true),
+    m_userSuppliedStartValues(nparams,FLAGDOUBLE),
+    // we CAN'T have user supplied start values at
+    // the time this structure is created. So we
+    // set them to FLAGDOUBLE. We should never access
+    // these FLAGDOUBLE values because that would
+    // require m_startValueMethodTypes to contain
+    // method_USER, and we assert against that below
+    m_startValueMethodTypes(nparams,methodType),
+    m_pstatusValues(nparams,ParamStatus(pstat_unconstrained)),
+    m_defaultPrior(defaultPrior),
+    m_priors(nparams, defaultPrior),
+    m_useDefaultPrior(nparams, true),
+    m_ftype(ftype)
+{
+    long i;
+    for (i = 0; i < nparams; ++i)
+    {
+        m_pstatusValues.push_back(ParamStatus(pstat_unconstrained));
+    }
+    assert(methodType        != method_USER);
+    assert(m_defaultMethodType != method_USER);
+    // must be a type that comes with a default value
+    // (e.g. method_PROGRAMDEFAULT) or requires calculation
+    // (method_FST or method_WATTERSON)
+}
+
+UIVarsSingleForce::UIVarsSingleForce
+(
+    UIVars * myUIVars,
+    const UIVarsSingleForce& singleForce)
+    :
+    UIVarsComponent(myUIVars),
+    m_numParameters                     (singleForce.m_numParameters),
+    m_defaultValue                      (singleForce.m_defaultValue),
+    m_defaultMethodType                 (singleForce.m_defaultMethodType),
+    m_canSetOnOff                       (singleForce.m_canSetOnOff),
+    m_onOff                             (singleForce.m_onOff),
+    m_maxEvents                         (singleForce.m_maxEvents),
+    m_profileType                       (singleForce.m_profileType),
+    m_doProfile                         (singleForce.m_doProfile),
+    m_userSuppliedStartValues           (singleForce.m_userSuppliedStartValues),
+    m_startValueMethodTypes             (singleForce.m_startValueMethodTypes),
+    m_groups                            (singleForce.m_groups),
+    m_pstatusValues                     (singleForce.m_pstatusValues),
+    m_defaultPrior                      (singleForce.m_defaultPrior),
+    m_priors                            (singleForce.m_priors),
+    m_useDefaultPrior                   (singleForce.m_useDefaultPrior),
+    m_ftype                             (singleForce.m_ftype),
+    m_calculatedStartValues             (singleForce.m_calculatedStartValues)
+{
+}
+
+void UIVarsSingleForce::checkIndexValue(long int index) const
+{
+    // if you get an assert here, the menu or xml logic
+    // isn't checking something it should be
+    assert(index >= 0);
+    assert(index < GetNumParameters());
+    // However, allowing the user to put in parameter values in the Group
+    // tag means we check here for that validity, and we need to
+    // throw if it fails:
+    if (index < 0)
+    {
+        throw data_error("Invalid (negative) parameter index in checkIndexValue.");
+    }
+    if (index >= GetNumParameters())
+    {
+        throw data_error("Invalid (too large) parameter index in checkIndexValue.");
+    }
+}
+
+void UIVarsSingleForce::AssertOnIllegalStartMethod(method_type method)
+{
+    assert(method == method_PROGRAMDEFAULT);
+    // assume that there are no calculation start method
+    // types (such as method_FST or method_WATTERSON) for
+    // an arbitrary force. Those that do have them will
+    // override this method
+}
+
+// used to set a start method without providing a value (thus not
+// suitable for use in Set
+void UIVarsSingleForce::SetStartMethod(method_type method, long int index)
+{
+    checkIndexValue(index);
+    AssertOnIllegalStartMethod(method);
+    m_startValueMethodTypes[index] = method;
+
+}
+
+void UIVarsSingleForce::SetStartMethods(method_type method)
+{
+    for (int index = 0; index < GetNumParameters(); index++)
+    {
+        // use this method since we want to pick up
+        // sanity checking done in virtual method
+        // SetStartMethod
+        SetStartMethod(method,index);
+    }
+}
+
+string
+UIVarsSingleForce::GetParamName(long int pindex)
+{
+    return GetConstUIVars().GetParamNameWithConstraint(m_ftype, pindex);
+}
+
+bool
+UIVarsSingleForce::GetLegal() const
+{
+    return (GetOnOff() || m_canSetOnOff);
+}
+
+bool
+UIVarsSingleForce::GetDoProfile(long int index) const
+{
+    checkIndexValue(index);
+    ParamStatus mystatus = GetParamstatus(index);
+    if (mystatus.Inferred())
+    {
+        if (GetConstUIVars().chains.GetDoBayesianAnalysis())
+        {
+            return true;
+        }
+        return m_doProfile[index];
+    }
+    else
+        return false;
+}
+
+proftype UIVarsSingleForce::GetProfileType() const
+{
+    return m_profileType;
+}
+
+proftype UIVarsSingleForce::GetProfileType(long int index) const
+{
+    checkIndexValue(index);
+    if( GetDoProfile(index))
+    {
+        return m_profileType;
+    }
+    else
+    {
+        return profile_NONE;
+    }
+}
+
+string
+UIVarsSingleForce::GetProfileTypeSummaryDescription() const
+{
+    proftype ptype = GetProfileType();
+    paramlistcondition condition = GetParamListCondition();
+    if(condition == paramlist_NO)
+    {
+        return ToString(condition) + "       ";
+    }
+    else
+    {
+        return ToString(ptype) +" (" +ToString(condition) +")";
+    }
+}
+
+string
+UIVarsSingleForce::GetPriorTypeSummaryDescription() const
+{
+    bool somelinear = false;
+    bool somelog = false;
+    for (unsigned long pnum=0; pnum<m_priors.size(); pnum++)
+    {
+        ParamStatus mystatus = GetParamstatus(pnum);
+        if (mystatus.Inferred())
+        {
+            switch(GetPrior(pnum).GetPriorType())
+            {
+                case LINEAR:
+                    somelinear = true;
+                    break;
+                case LOGARITHMIC:
+                    somelog = true;
+                    break;
+            }
+        }
+    }
+    if (somelinear && somelog)
+    {
+        return "(mixed linear/log)";
+    }
+    else if (somelinear && (!somelog))
+    {
+        return "(all linear)";
+    }
+    else if (somelog && (!somelinear))
+    {
+        return "(all logarithmic)";
+    }
+    else
+    {
+        return "(none eligible)";
+    }
+}
+
+paramlistcondition
+UIVarsSingleForce::GetParamListCondition() const
+{
+    bool seenOff = false;
+    bool seenOn  = false;
+    for (int index = 0; index < GetNumParameters(); index++)
+    {
+        // premature optimization being the root of all
+        // evil, I've not chosen to break out of the loop when
+        // seenOn and seenOff are both true, but you can change
+        // it if you want -- ewalkup 8-19-2004
+        if(GetParamValid(index))
+        {
+            if (GetDoProfile(index))
+            {
+                seenOn = true;
+            }
+            else
+            {
+                seenOff = true;
+            }
+        }
+    }
+    if(seenOn)
+    {
+        if(seenOff)
+        {
+            return paramlist_MIX;
+        }
+        else
+        {
+            return paramlist_YES;
+        }
+    }
+    else
+    {
+        return paramlist_NO;
+    }
+}
+
+void  UIVarsSingleForce::SetDoProfile(bool doIt, long int index)
+{
+    checkIndexValue(index);
+    long gindex = ParamInGroup(index);
+    ParamStatus mystatus = GetParamstatus(index);
+    if (doIt && !mystatus.Varies())
+    {
+        // Do nothing--we can't turn on profiling for this parameter.
+        if (GetParamValid(index))
+        {
+            string warning = "Warning:  " + GetParamName(index) +
+                " may not be profiled because it is currently set to be "
+                + ToString(mystatus.Status()) + ".";
+            GetConstUIVars().GetUI()->AddWarning(warning);
+        }
+        return;
+    }
+    if (GetConstUIVars().chains.GetDoBayesianAnalysis())
+    {
+        if (doIt==false && mystatus.Inferred())
+        {
+            string warning = "Warning:  Profiling is always on for all valid parameters in a Bayesian analysis, since profiling takes no extra time.";
+            GetConstUIVars().GetUI()->AddWarning(warning);
+            return;
+        }
+    }
+    m_doProfile[index] = doIt;
+    if (gindex != FLAGLONG)
+    {
+        SetDoProfilesForGroup(doIt, gindex);
+    }
+}
+
+void  UIVarsSingleForce::SetDoProfile(bool doIt)
+{
+    if (GetConstUIVars().chains.GetDoBayesianAnalysis())
+    {
+        if (doIt==false)
+        {
+            string warning = "Warning:  Profiling is always on for all valid parameters in a Bayesian analysis, since profiling takes no extra time.";
+            GetConstUIVars().GetUI()->AddWarning(warning);
+            return;
+        }
+    }
+    for (int index = 0; index < GetNumParameters(); index++)
+    {
+        SetDoProfile(doIt,index);
+    }
+}
+
+void UIVarsSingleForce::SetProfileType(proftype p)
+{
+    m_profileType = p;
+}
+
+ParamStatus UIVarsSingleForce::GetParamstatus(long int pindex)  const
+{
+    long int gindex = ParamInGroup(pindex);
+    if (gindex != FLAGLONG)
+    {
+        ParamStatus mystatus = m_groups[gindex].first;
+        if ((mystatus.Status() == pstat_identical) && (pindex == m_groups[gindex].second[0]))
+        {
+            return ParamStatus(pstat_identical_head);
+            //MFIX probably needs changing, need to interface here
+            //with finished UI design
+        }
+        else
+            if ((mystatus.Status() == pstat_multiplicative) && (pindex == m_groups[gindex].second[0]))
+            {
+                return ParamStatus(pstat_multiplicative_head);
+            }
+            else
+            {
+                return mystatus;
+            }
+    }
+    else return m_pstatusValues[pindex];
+};
+
+void UIVarsSingleForce::SetParamstatus(const ParamStatus& mystatus, long int index)
+{
+    long int gindex = ParamInGroup(index);
+    if (gindex == FLAGLONG)
+    {
+        m_pstatusValues[index] = mystatus;
+    }
+    else
+    {
+        SetGroupParamstatus(mystatus.Status(), gindex);
+    }
+};
+
+double UIVarsSingleForce::GetStartValue(long int pindex) const
+{
+    long int gindex = ParamInGroup(pindex);
+    if (gindex == FLAGLONG)
+    {
+        return GetUngroupedStartValue(pindex);
+    }
+    double sum = 0;
+    for (unsigned long int gpindex = 0; gpindex < m_groups[gindex].second.size(); gpindex++)
+    {
+        sum += GetUngroupedStartValue(m_groups[gindex].second[gpindex]);
+    }
+    return (sum/m_groups[gindex].second.size());
+}
+
+double UIVarsSingleForce::GetUngroupedStartValue(long int index) const
+{
+    // start value might have been set by the user, or we might
+    // have to grab it from the calculated start values
+
+    double retval = 0.0;
+    if (!GetParamstatus(index).Valid()) return 0.0;
+
+    checkIndexValue(index);
+    method_type thisMethod = m_startValueMethodTypes[index];
+
+    switch(thisMethod)
+    {
+        case method_PROGRAMDEFAULT:
+            retval = m_defaultValue;
+            break;
+        case method_USER:
+            retval = m_userSuppliedStartValues[index];
+            break;
+        default:
+        {
+            // see if we have calculated values for the
+            // start value method type for this parameter
+            map<method_type,DoubleVec1d>::const_iterator it;
+            it = m_calculatedStartValues.find(thisMethod);
+            if(it == m_calculatedStartValues.end())
+            {
+                assert(false);
+                throw implementation_error("Unable to calculate start values for " +
+                                           ToString(thisMethod) + ".");
+            }
+            // we've populated the m_calculatedStartValues for
+            // this method, so grab them and return the correct
+            // one for this parameter
+            retval = (it->second)[index];
+            break;
+        }
+    }
+    double minval = GetMinStartValue(index);
+    double maxval = GetMaxStartValue(index);
+    assert (maxval > minval);
+    if (retval < minval)
+    {
+        return minval;
+    }
+    if (retval > maxval)
+    {
+        return maxval;
+    }
+    return retval;
+}
+
+DoubleVec1d
+UIVarsSingleForce::GetStartValues() const
+{
+    DoubleVec1d retValue(GetNumParameters(),FLAGDOUBLE);
+    // the user may have calculated start values and
+    // then hand-edited some of them, so we need to
+    // populate a vector with the individual values
+    for(size_t i = 0; i < retValue.size(); i++)
+    {
+        retValue[i] = GetStartValue(i);
+    }
+    return retValue;
+}
+
+double UIVarsSingleForce::GetMinStartValue(long int pindex) const
+{
+    ParamStatus mystatus = GetParamstatus(pindex);
+    if (!mystatus.Valid()) return 0;
+    if (GetConstUIVars().chains.GetDoBayesianAnalysis())
+    {
+        if (mystatus.Status() != pstat_constant)
+        {
+            return GetPrior(pindex).GetLowerBound();
+        }
+    }
+    switch(m_ftype)
+    {
+        case force_COAL:
+            return defaults::minTheta;
+        case force_MIG:
+        case force_DIVMIG:
+            return defaults::minMigRate;
+        case force_DISEASE:
+            return defaults::minDiseaseRate;
+        case force_REC:
+            return defaults::minRecRate;
+        case force_EXPGROWSTICK:
+        case force_GROW:
+            return defaults::minGrowRate;
+        case force_REGION_GAMMA:
+            return defaults::minGammaOverRegions;
+            break;
+        case force_LOGSELECTSTICK:
+        case force_LOGISTICSELECTION:
+            return defaults::minLSelectCoeff;
+            break;
+        case force_DIVERGENCE:
+            return defaults::minEpoch;
+            break;
+        default:
+            assert(false);              //Uncaught force type.
+    }
+    return 0.0;  //To prevent compiler warning.
+}
+
+double UIVarsSingleForce::GetMaxStartValue(long int pindex) const
+{
+    ParamStatus mystatus = GetParamstatus(pindex);
+    if (!mystatus.Valid()) return 0;
+    if (GetConstUIVars().chains.GetDoBayesianAnalysis())
+    {
+        if (mystatus.Status() != pstat_constant)
+        {
+            return GetPrior(pindex).GetUpperBound();
+        }
+    }
+    switch(m_ftype)
+    {
+        case force_COAL:
+            return defaults::maxTheta;
+        case force_MIG:
+        case force_DIVMIG:
+            return defaults::maxMigRate;
+        case force_DISEASE:
+            return defaults::maxDiseaseRate;
+        case force_REC:
+            return defaults::maxRecRate;
+        case force_EXPGROWSTICK:
+        case force_GROW:
+            return defaults::maxGrowRate;
+        case force_REGION_GAMMA:
+            return defaults::maxGammaOverRegions;
+            break;
+        case force_LOGSELECTSTICK:
+        case force_LOGISTICSELECTION:
+            return defaults::maxLSelectCoeff;
+            break;
+        case force_DIVERGENCE:
+            return defaults::maxEpoch;
+            break;
+        default:
+            assert(false);              //Uncaught force type.
+    }
+    return 1000.0; // To prevent compiler warning.
+}
+
+void UIVarsSingleForce::SetUserStartValue(double startVal, long int index)
+{
+    checkIndexValue(index);
+    bool isinvalid = !GetParamstatus(index).Valid();
+    if (isinvalid)
+    {
+        if (startVal != 0)
+        {
+            string warning = "Warning:  " + GetParamName(index) +
+                " ignores any attempt to set the starting value, since it has been set invalid.";
+            GetConstUIVars().GetUI()->AddWarning(warning);
+        }
+        return;
+    }
+    double minval = GetMinStartValue(index);
+    double maxval = GetMaxStartValue(index);
+    assert ((maxval > minval) || isinvalid);
+    if (startVal < minval)
+    {
+        startVal = minval;
+        string warning = "Warning:  the minimum value for " + GetParamName(index) +
+            " is " + ToString(minval) + ":  setting the starting value there.";
+        GetConstUIVars().GetUI()->AddWarning(warning);
+    }
+    if (startVal > maxval)
+    {
+        startVal = maxval;
+        string warning = "Warning:  the maximum value for " + GetParamName(index) +
+            " is " + ToString(maxval) + ":  setting the starting value there.";
+        GetConstUIVars().GetUI()->AddWarning(warning);
+    }
+    long int gindex = ParamInGroup(index);
+    if (gindex != FLAGLONG)
+    {
+        for (unsigned long int gpindex = 0; gpindex < m_groups[gindex].second.size(); gpindex++)
+        {
+            long int pindex = m_groups[gindex].second[gpindex];
+            m_userSuppliedStartValues[pindex]=startVal;
+            m_startValueMethodTypes[pindex] = method_USER;
+        }
+    }
+    else
+    {
+        m_userSuppliedStartValues[index]=startVal;
+        m_startValueMethodTypes[index] = method_USER; // don't use SetStartMethod
+        // here -- it can block use
+        // of method_USER since
+        // the UI shouldn't set
+        // method_USER without
+        // going through this method
+    }
+}
+
+void UIVarsSingleForce::SetUserStartValues(double startVal)
+{
+    for (int pindex = 0; pindex < GetNumParameters(); pindex++)
+    {
+        if (!GetParamstatus(pindex).Valid())
+        {
+            SetUserStartValue(startVal,pindex);
+        }
+    }
+}
+
+method_type UIVarsSingleForce::GetStartMethod(long int index) const
+{
+    checkIndexValue(index);
+    return m_startValueMethodTypes[index];
+}
+
+void UIVarsSingleForce::SetTrueValue(double trueVal, long int index)
+{
+    checkIndexValue(index);
+    if (!GetParamstatus(index).Valid())
+    {
+        if (trueVal != 0)
+        {
+            string warning = "Warning:  " + GetParamName(index) +
+                " ignores any attempt to set the starting value, since it has been set invalid.";
+            GetConstUIVars().GetUI()->AddWarning(warning);
+        }
+        return;
+    }
+    if (m_truevalues.empty())
+    {
+        m_truevalues.resize(GetNumParameters());
+    }
+    m_truevalues[index] = trueVal;
+}
+
+double UIVarsSingleForce::GetTrueValue(long int index) const
+{
+    checkIndexValue(index);
+    if (m_truevalues.empty()) return FLAGDOUBLE; // JDEBUG
+    else return m_truevalues[index];
+}
+
+const UIVarsPrior& UIVarsSingleForce::GetPrior(long int pindex) const
+{
+    if (pindex == uiconst::GLOBAL_ID)
+    {
+        return m_defaultPrior;
+    }
+    checkIndexValue(pindex);
+    if (GetUseDefaultPrior(pindex))
+    {
+        return m_defaultPrior;
+    }
+    return m_priors[pindex];
+};
+
+priortype UIVarsSingleForce::GetPriorType(long int pindex) const
+{
+    checkIndexValue(pindex);
+    return GetPrior(pindex).GetPriorType();
+};
+
+double UIVarsSingleForce::GetLowerBound(long int pindex) const
+{
+    checkIndexValue(pindex);
+    return GetPrior(pindex).GetLowerBound();
+};
+
+double UIVarsSingleForce::GetUpperBound(long int pindex) const
+{
+    checkIndexValue(pindex);
+    return GetPrior(pindex).GetUpperBound();
+};
+
+#ifdef LAMARC_NEW_FEATURE_RELATIVE_SAMPLING
+long UIVarsSingleForce::GetRelativeSampling(long int pindex) const
+{
+    checkIndexValue(pindex);
+    return GetPrior(pindex).GetRelativeSampling();
+};
+#endif
+
+void UIVarsSingleForce::SetPriorType(priortype ptype, long int pindex)
+{
+    checkIndexValue(pindex);
+    long int gindex = ParamInGroup(pindex);
+    if (gindex == FLAGLONG)
+    {
+        SetUngroupedPriorType(ptype, pindex);
+    }
+    else for (unsigned long int gpindex = 0; gpindex < m_groups[gindex].second.size(); gpindex++)
+         {
+             SetUngroupedPriorType(ptype, m_groups[gindex].second[gpindex]);
+         }
+}
+
+void UIVarsSingleForce::SetUngroupedPriorType(priortype ptype, long int pindex)
+{
+    if (GetUseDefaultPrior(pindex))
+    {
+        m_useDefaultPrior[pindex] = false;
+        m_priors[pindex] = m_defaultPrior;
+    }
+    m_priors[pindex].SetPriorType(ptype);
+}
+
+#ifdef LAMARC_NEW_FEATURE_RELATIVE_SAMPLING
+void UIVarsSingleForce::SetRelativeSampling(long rate, long int pindex)
+{
+    checkIndexValue(pindex);
+    long int gindex = ParamInGroup(pindex);
+    if (gindex == FLAGLONG)
+    {
+        SetUngroupedRelativeSampling(rate, pindex);
+    }
+    else
+        for (unsigned long int gpindex = 0; gpindex < m_groups[gindex].second.size(); gpindex++)
+        {
+            SetUngroupedRelativeSampling(rate, m_groups[gindex].second[gpindex]);
+        }
+}
+
+void UIVarsSingleForce::SetUngroupedRelativeSampling(long rate, long int pindex)
+{
+    checkIndexValue(pindex);
+    if (GetUseDefaultPrior(pindex))
+    {
+        m_useDefaultPrior[pindex] = false;
+        m_priors[pindex] = m_defaultPrior;
+    }
+    m_priors[pindex].SetRelativeSampling(rate);
+}
+
+#endif
+
+void UIVarsSingleForce::SetLowerBound(double bound, long int pindex)
+{
+    checkIndexValue(pindex);
+    long int gindex = ParamInGroup(pindex);
+    if (gindex == FLAGLONG)
+    {
+        SetUngroupedLowerBound(bound, pindex);
+    }
+    else
+        for (unsigned long int gpindex = 0; gpindex < m_groups[gindex].second.size(); gpindex++)
+        {
+            SetUngroupedLowerBound(bound, m_groups[gindex].second[gpindex]);
+        }
+}
+
+void UIVarsSingleForce::SetUngroupedLowerBound(double bound, long int pindex)
+{
+    checkIndexValue(pindex);
+    if (GetUseDefaultPrior(pindex))
+    {
+        m_useDefaultPrior[pindex] = false;
+        m_priors[pindex] = m_defaultPrior;
+    }
+    m_priors[pindex].SetLowerBound(bound);
+}
+
+void UIVarsSingleForce::SetUpperBound(double bound, long int pindex)
+{
+    checkIndexValue(pindex);
+    long int gindex = ParamInGroup(pindex);
+    if (gindex == FLAGLONG)
+    {
+        SetUngroupedUpperBound(bound, pindex);
+    }
+    else for (unsigned long int gpindex = 0; gpindex < m_groups[gindex].second.size(); gpindex++)
+         {
+             SetUngroupedUpperBound(bound, m_groups[gindex].second[gpindex]);
+         }
+}
+
+void UIVarsSingleForce::SetUngroupedUpperBound(double bound, long int pindex)
+{
+    checkIndexValue(pindex);
+    if (GetUseDefaultPrior(pindex))
+    {
+        m_useDefaultPrior[pindex] = false;
+        m_priors[pindex] = m_defaultPrior;
+    }
+    m_priors[pindex].SetUpperBound(bound);
+}
+
+//------------------------------------------------------------------------------------
+
+const UIVarsPrior& UIVarsSingleForce::GetDefaultPrior() const
+{
+    return m_defaultPrior;
+};
+
+bool UIVarsSingleForce::GetUseDefaultPrior(long int pindex) const
+{
+    checkIndexValue(pindex);
+    return m_useDefaultPrior[pindex];
+};
+
+priortype UIVarsSingleForce::GetDefaultPriorType() const
+{
+    return m_defaultPrior.GetPriorType();
+};
+
+double UIVarsSingleForce::GetDefaultLowerBound() const
+{
+    return m_defaultPrior.GetLowerBound();
+};
+
+double UIVarsSingleForce::GetDefaultUpperBound() const
+{
+    return m_defaultPrior.GetUpperBound();
+};
+
+#ifdef LAMARC_NEW_FEATURE_RELATIVE_SAMPLING
+long UIVarsSingleForce::GetDefaultRelativeSampling() const
+{
+    return m_defaultPrior.GetRelativeSampling();
+};
+#endif
+
+void UIVarsSingleForce::SetUseDefaultPrior(bool val, long int pindex)
+{
+    checkIndexValue(pindex);
+    m_useDefaultPrior[pindex] = val;
+};
+
+void UIVarsSingleForce::SetUseAllDefaultPriors()
+{
+    for (unsigned long int pindex = 0; pindex < m_useDefaultPrior.size(); pindex++)
+    {
+        m_useDefaultPrior[pindex] = true;
+    }
+};
+
+void UIVarsSingleForce::SetDefaultPriorType(priortype ptype)
+{
+    m_defaultPrior.SetPriorType(ptype);
+}
+
+void UIVarsSingleForce::SetDefaultLowerBound(double bound)
+{
+    m_defaultPrior.SetLowerBound(bound);
+}
+
+void UIVarsSingleForce::SetDefaultUpperBound(double bound)
+{
+    m_defaultPrior.SetUpperBound(bound);
+}
+
+#ifdef LAMARC_NEW_FEATURE_RELATIVE_SAMPLING
+void UIVarsSingleForce::SetDefaultRelativeSampling(long rate)
+{
+    m_defaultPrior.SetRelativeSampling(rate);
+}
+#endif
+
+//------------------------------------------------------------------------------------
+
+void
+UIVarsSingleForce::SetMaxEvents(long int events)
+{
+    if (m_ftype != force_GROW && m_ftype != force_COAL && events <= 0)
+    {
+        throw data_error("The maximum number of events for force " + ToString(m_ftype) + " must be greater than zero.");
+    }
+    m_maxEvents = events;
+}
+
+void
+UIVarsSingleForce::SetOnOff(bool onOffVal)
+{
+    assert( m_canSetOnOff || ( onOffVal == m_onOff ) );
+    m_onOff = onOffVal;
+}
+
+bool
+UIVarsSingleForce::GetParamValid(long int id) const
+{
+    return (GetOnOff() && (id >= 0) && (id < m_numParameters));
+}
+
+bool
+UIVarsSingleForce::GetParamUnique(long int id) const
+{
+    long int gindex = ParamInGroup(id);
+    if (gindex == FLAGLONG) return true;
+    if (m_groups[gindex].second[0] == id) return true;
+    return false;
+}
+
+bool UIVarsSingleForce::SomeVariableParams() const
+{
+    for (long int pnum = 0; pnum < m_numParameters; pnum++)
+    {
+        if (GetParamstatus(pnum).Varies()) return true;
+    }
+    return false;  // nothing ever varied
+}
+
+void UIVarsSingleForce::AddGroup(ParamStatus mystatus, LongVec1d indices)
+{
+    assert(indices.size() > 1);
+
+    //Make a new ParamGroup and put it into m_groups.  Don't just put 'indices'
+    // into it because we need to check each entry in it.
+    LongVec1d blanklist;
+    ParamGroup newgroup = make_pair(mystatus,blanklist);
+    m_groups.push_back(newgroup);
+    long int gindex = m_groups.size()-1; //<-OK because we just added one.
+
+    //AddGroup is at the interface between the user and the program, whether
+    // through the menu or through the XML. As such, the indexes used start
+    // at one instead of zero--this is where that is changed for the program.
+    for (unsigned long int vecindex = 0; vecindex < indices.size(); vecindex++)
+    {
+        AddParamToGroup(indices[vecindex] - 1, gindex);
+    }
+}
+
+void UIVarsSingleForce::AddParamToGroup(long int pindex, long int gindex)
+{
+    checkGIndexValue(gindex);
+    checkIndexValue(pindex);
+    if (ParamInGroup(pindex) != FLAGLONG)
+    {
+        string e = "Invalid parameter indexes in group constraints for force "
+            + ToString(m_ftype)
+            + ":  The same parameter index is included in more than one group.";
+        throw data_error(e);
+    }
+    if (m_groups[gindex].second.size() > 0)
+    {
+        //copy the prior to the new parameter.
+        long int onegroupparam = m_groups[gindex].second[0];
+        m_useDefaultPrior[pindex] = m_useDefaultPrior[onegroupparam];
+        m_priors[pindex] = m_priors[onegroupparam];
+    }
+    m_groups[gindex].second.push_back(pindex);
+    std::sort(m_groups[gindex].second.begin(), m_groups[gindex].second.end());
+}
+
+void UIVarsSingleForce::AddParamToNewGroup(long int pindex)
+{
+    checkIndexValue(pindex);
+    if (ParamInGroup(pindex) != FLAGLONG)
+    {
+        throw data_error("Cannot add a parameter to a new group because it is already in a group.");
+    }
+    LongVec1d pindices;
+    pindices.push_back(pindex);
+    ParamGroup newgroup = make_pair(defaults::groupPstat, pindices);
+    m_groups.push_back(newgroup);
+}
+
+void UIVarsSingleForce::RemoveParamIfInAGroup(long int pindex)
+{
+    long int gnum = ParamInGroup(pindex);
+    if (gnum != FLAGLONG)
+    {
+        for (vector<long int>::iterator gpindex = m_groups[gnum].second.begin();
+             gpindex != m_groups[gnum].second.end(); )
+        {
+            if ((*gpindex) == pindex)
+            {
+                ParamStatus old_pstat = GetGroupParamstatus(gnum);
+                gpindex = m_groups[gnum].second.erase(gpindex);
+                if (m_groups[gnum].second.size() == 0)
+                {
+                    //Delete the group--it's empty.
+                    m_groups.erase(m_groups.begin() + gnum);
+                }
+                if (old_pstat.Grouped())
+                {
+                    SetParamstatus(ParamStatus(pstat_unconstrained), pindex);
+                }
+                else
+                {
+                    SetParamstatus(old_pstat, pindex);
+                }
+                return;
+            }
+            else
+            {
+                gpindex++;
+            }
+        }
+    }
+}
+
+void UIVarsSingleForce::SetGroupParamstatus(ParamStatus pstat, long int gindex)
+{
+    // MDEBUG not correct for multiplicative!
+    if (pstat.Status() == pstat_unconstrained)
+    {
+        pstat = ParamStatus(pstat_identical);
+        GetConstUIVars().GetUI()->AddWarning("Warning:  Groups may not be set 'unconstrained'; setting to 'identical' instead.");
+    }
+    checkGIndexValue(gindex);
+    m_groups[gindex].first = pstat;
+}
+
+ParamStatus UIVarsSingleForce::GetGroupParamstatus(long int gindex) const
+{
+    checkGIndexValue(gindex);
+    return m_groups[gindex].first;
+}
+
+LongVec1d UIVarsSingleForce::GetGroupParamList(long int gindex) const
+{
+    checkGIndexValue(gindex);
+    return m_groups[gindex].second;
+}
+
+long int UIVarsSingleForce::ParamInGroup(long int pindex) const
+{
+    checkIndexValue(pindex);
+    for (long int gindex = 0; gindex < static_cast<long int>(m_groups.size()); gindex++)
+    {
+        for (unsigned long int gpindex = 0; gpindex < m_groups[gindex].second.size(); gpindex++)
+        {
+            if (pindex == m_groups[gindex].second[gpindex])
+            {
+                return static_cast<long int>(gindex);
+            }
+        }
+    }
+    return FLAGLONG;
+}
+
+void UIVarsSingleForce::SetDoProfilesForGroup(bool doIt, long int gindex)
+{
+    //Since this is a private function that runs from SetDoProfile(), all the
+    // error checking has already been done, and we need only set the
+    // appropriate values.
+    checkGIndexValue(gindex);
+    for (unsigned long int gpindex = 0; gpindex < m_groups[gindex].second.size(); gpindex++)
+    {
+        m_doProfile[m_groups[gindex].second[gpindex]] = doIt;
+    }
+}
+
+void UIVarsSingleForce::checkGIndexValue(long int gindex) const
+{
+    if (gindex==FLAGLONG) return;
+    assert (gindex >= 0);
+    if (gindex<0)
+        throw data_error("Error:  group index value less than zero.");
+    assert(gindex < static_cast<long int>(m_groups.size()));
+    if (gindex >= static_cast<long int>(m_groups.size()))
+        throw data_error("Error:  group index value greater than the number of groups.");
+}
+
+bool UIVarsSingleForce::AreGroupsValid() const
+{
+    LongVec1d allpindices;
+    for (unsigned long int gindex = 0; gindex < m_groups.size(); gindex++)
+    {
+        if (m_groups[gindex].second.size() < 2) return false;
+        allpindices.insert(allpindices.end(), m_groups[gindex].second.begin(), m_groups[gindex].second.end());
+    }
+    std::sort(allpindices.begin(), allpindices.end());
+    for (unsigned long int gpindex=1; gpindex<allpindices.size(); gpindex++)
+    {
+        if (allpindices[gpindex] == allpindices[gpindex-1])
+            return false;
+    }
+    return true;
+}
+
+void UIVarsSingleForce::FixGroups()
+{
+    //First, we remove any parameter that existed in more than one group. This
+    // shouldn't ever be the case due to checks in AddParamToGroup, but hey.
+    // First we have to find them again.
+    LongVec1d allpindices;
+    for (unsigned long int gindex = 0; gindex < m_groups.size(); gindex++)
+    {
+        allpindices.insert(allpindices.end(), m_groups[gindex].second.begin(), m_groups[gindex].second.end());
+    }
+    std::sort(allpindices.begin(), allpindices.end());
+    for (unsigned long int gpindex=1; gpindex<allpindices.size(); gpindex++)
+    {
+        if (allpindices[gpindex] == allpindices[gpindex-1])
+        {
+            RemoveParamIfInAGroup(allpindices[gpindex]);
+            //This removes the param from all groups.  It's not the ideal solution,
+            // and the real solution is to never get in this situation in the first
+            // place.  Which should be covered in AddParamToGroup.  So we're set.
+        }
+    }
+
+    //Now, any parameter status flags that are 'joint' or 'identical' but do
+    // not correspond with any group are set to 'unconstrained'.
+    for (long int pindex = 0; pindex < m_numParameters; pindex++)
+    {
+        ParamStatus mystatus = GetParamstatus(pindex);
+        if (mystatus.Grouped())
+        {
+            if (ParamInGroup(pindex) == FLAGLONG)
+            {
+                SetParamstatus(ParamStatus(pstat_unconstrained), pindex);
+            }
+        }
+        //Also, any truly invalid parameters (i.e. the migration diagonals) may
+        // not be in a group, nor may they be set to some status other than
+        // 'invalid'.
+        if (!GetParamValid(pindex))
+        {
+            RemoveParamIfInAGroup(pindex);
+            SetParamstatus(ParamStatus(pstat_invalid), pindex);
+        }
+    }
+
+    //Now, any group with only one parameter in it is deleted, setting the
+    // corresponding ParamStatus appropriately.
+    for (vector<ParamGroup>::iterator giter = m_groups.begin();
+         giter != m_groups.end();)      // <- iterator incremented below.
+    {
+        if ((*giter).second.size() == 0)
+        {
+            giter = m_groups.erase(giter);
+        }
+        else if ((*giter).second.size() == 1)
+        {
+            long int pindex = (*giter).second[0];
+            ParamStatus mystatus((*giter).first);
+            if (mystatus.Grouped())
+            {
+                SetParamstatus(ParamStatus(pstat_unconstrained), pindex);
+            }
+            else
+            {
+                SetParamstatus(ParamStatus((*giter).first), pindex);
+            }
+            giter = m_groups.erase(giter);
+        }
+        else
+        {
+            ++giter;
+        }
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+UIVars2DForce::UIVars2DForce
+(   UIVars * myUIVars,
+    force_type ftype,
+    long int numPartitions,
+    double defaultVal,
+    method_type methodType,
+    bool canBeSetUnset,
+    bool isOn,
+    long int eventMax,
+    UIVarsPrior defaultPrior
+    )
+    :
+    UIVarsSingleForce(
+        myUIVars,
+        ftype,
+        numPartitions*numPartitions,
+        defaultVal,
+        methodType,
+        canBeSetUnset,         // can't change original on/off setting
+        isOn,
+        eventMax,
+        defaultPrior),
+    m_npartitions(numPartitions)
+{
+    // EWFIX.P5 DIMENSIONS -- we need the 1D <=> 2D translation objects
+    //Make the diagonals of the parameter statuses (stati?) invalid.
+    for (long int diagonal = 1; diagonal <= numPartitions; diagonal++)
+    {
+        SetParamstatus(ParamStatus(pstat_invalid), ((diagonal-1)*numPartitions + diagonal)-1);
+    }
+}
+
+UIVars2DForce::UIVars2DForce
+(   UIVars * myUIVars, const UIVars2DForce & twoD )
+    :
+    UIVarsSingleForce(myUIVars,twoD),
+    m_npartitions(twoD.m_npartitions)
+{
+}
+
+void UIVars2DForce::AssertOnIllegalStartMethod(method_type method)
+{
+    assert( (method == method_FST) || (method == method_PROGRAMDEFAULT) );
+}
+
+bool
+UIVars2DForce::GetParamValid(long int id) const
+{
+    bool possible = UIVarsSingleForce::GetParamValid(id);
+    if(possible)
+    {
+        // NB I believe this means "is it off-diagonal"?
+        // MDEBUG This may need to be overridden in DivMig, but I don't
+        // know how as it requires the Epoch structure to determine which
+        // off-diagonal entries are invalid
+        const UIVars& vars = GetConstUIVars();
+        long int numPops = vars.datapackplus.GetNPartitionsByForceType(m_ftype);
+        long int intoIndex = id / numPops;
+        long int fromIndex = id % numPops;
+        return (intoIndex != fromIndex);
+    }
+    return false;
+}
+
+//AreZeroesValid checks to make sure that we haven't set so many migration
+// events to zero (and constant) that we can no longer coalesce.  Note that
+// setting a parameter to 'pstat_invalid' means that we are setting it to zero
+// and constant, too.
+//
+// This function could make suggestions about what to fix, but it suggestions
+// are unlikely to be biologically relevant for the populations involved,
+// so it seems better to just force the user to make their own changes, or just
+// remove all 0/constant constraints.
+//
+// If it is to make suggestions, it would need to be non-const.
+
+bool
+UIVars2DForce::AreZeroesValid() const
+{
+    //The goal of this algorithm is to find 'Rome', a mythical land from which
+    // all of your current populations descended.  In a migration system with
+    // no constraints, all partitions could be Rome, but we only need to find
+    // one to satisfy our test.  If we find at least one Rome, we return true,
+    // and if not, we return false.
+    //
+    // First, we make two lists that will hold partition indexes in them,
+    // 'reached' and 'unreached'.  At the beginning, everything is in
+    // 'unreached'.
+
+    list<long int> reached, unreached;
+    for (long int partind = 0; partind < m_npartitions; partind++)
+        unreached.push_back(partind);
+
+    // Now we seize a partition at random (0) to see if it's Rome.
+    long int testRome = 0;
+
+    // Now we loop until we return either true or false.
+    while (true)
+    {
+        //First, we don't have to worry about getting to testRome.  We also
+        // don't need to add testRome to 'reached', since that list is only
+        // used to back away from testRome if need be.
+        unreached.remove(testRome);
+
+        //Check to see who we can get to in 'unreached' from Rome.  We only
+        // test routes that go through the unreached list, since everything in
+        // the 'reached' list has already been exhaustively searched.
+        UpdateReachList(testRome, unreached, reached);
+
+        //Check to see if we're done.
+        if (unreached.size() == 0) return true;
+
+        //Otherwise, we need a new Rome candidate.  If our system is valid, Rome
+        // *must* be in the 'unreached' group.  (If it wasn't, our old testRome
+        // would have to have also been a valid Rome:  If everyone can come from
+        // Rome by definition, and people in Rome can come from testRome, testRome
+        // is also a valid 'Rome'.)
+        testRome = PickANewRome(testRome, unreached, reached);
+
+        //If PickANewRome failed, our system is invalid--return false;
+        if (testRome == FLAGLONG) return false;
+
+        //Otherwise, we can go ahead and loop with our new Rome.  Before we do,
+        // though, we can clear out our 'reached' list.  We can do this because
+        // not only do we know that migrants from our new testRome can get to
+        // everywhere in 'reached', we *also* know that nothing in 'reached' can
+        // get to 'unreached'--testRome and the other 'unreached' entries are the
+        // only possibilities left.
+        //
+        // So, we clear 'reached'.  This has the effect of making our next loop
+        // still more efficient.
+        reached.clear();
+    }
+}
+
+void UIVars2DForce::UpdateReachList(long int testRome, list<long int>& unreached,
+                                    list<long int>& reached) const
+{
+    list<long int> oneStepAway;
+    oneStepAway.push_back(testRome);
+
+    while (oneStepAway.size() > 0)
+    {
+        list<long int> twoStepsAway;
+        for (list<long int>::iterator partfrom = oneStepAway.begin();
+             partfrom != oneStepAway.end(); partfrom++)
+        {
+            for (list<long int>::iterator partto = unreached.begin();
+                 partto != unreached.end(); partto++)
+            {
+                if (CanReach((*partfrom), (*partto)))
+                {
+                    twoStepsAway.push_back((*partto));
+                }
+            }
+        }
+        for (list<long int>::iterator stepit = oneStepAway.begin();
+             stepit != oneStepAway.end(); stepit++ )
+        {
+            reached.push_back((*stepit));
+            unreached.remove((*stepit));
+        }
+        oneStepAway = twoStepsAway;
+    }
+}
+
+long int UIVars2DForce::PickANewRome(long int oldRome, list<long int> unreached,
+                                     list<long int> reached) const
+{
+    list<long int> intermediates;
+    intermediates.push_back(oldRome);
+    reached.remove(oldRome);
+    while (intermediates.size() > 0)
+    {
+        list<long int> newintermediates;
+        for (list<long int>::iterator partto = intermediates.begin();
+             partto != intermediates.end(); partto++)
+        {
+            //Check to see if we can get to anything in the 'unreached' list.
+            // If so, that's our new Rome candidate, and we're done.
+            for (list<long int>::iterator partfrom = unreached.begin();
+                 partfrom != unreached.end(); partfrom++)
+            {
+                if (CanReach((*partfrom), (*partto)))
+                {
+                    return (*partfrom);
+                }
+            }
+            //We didn't get to 'unreached' directly; try going through the 'reached'
+            // list.  If we find anything, we can remove it from 'reached'.
+            for (list<long int>::iterator partfrom = reached.begin();
+                 partfrom != reached.end();)
+            {
+                if (CanReach((*partfrom), (*partto)))
+                {
+                    newintermediates.push_back(*partfrom);
+                    reached.erase(partfrom++);
+                }
+                else
+                {
+                    partfrom++;
+                }
+            }
+        }
+        intermediates = newintermediates;
+    }
+
+    //We never found any way to get to oldRome from anything in unreached.
+    // Hence, our network is invalid.
+    return FLAGLONG;
+}
+
+bool UIVars2DForce::CanReach(long int partfrom, long int partto) const
+{
+    long int pindex = partto*m_npartitions + partfrom;
+    if (GetParamstatus(pindex).Valid())
+    {
+        double pval = GetStartValue(pindex);
+        // If the start value is zero, the populations are unconnected at least
+        // for the first tree, and that is a problem even in a Bayesian run.
+        if (pval == 0.0) return false;
+        else return true;
+    }
+    else
+        return false;
+}
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+UIVarsCoalForce::UIVarsCoalForce(UIVars * myUIVars, long int numCrossPartitions)
+    :   UIVarsSingleForce(
+        myUIVars,
+        force_COAL,
+        numCrossPartitions,
+        defaults::theta,        //
+        defaults::thetaMethod,
+        false,                  // Coal force can't be turned off ever
+        true,                   // Coal force always ON
+        defaults::coalEvents,
+        UIVarsPrior(myUIVars->GetUI(),
+                    force_COAL,
+                    defaults::priortypeTheta,
+#ifdef LAMARC_NEW_FEATURE_RELATIVE_SAMPLING
+                    defaults::samplingRate,
+#endif
+                    defaults::lowerboundTheta,
+                    defaults::upperboundTheta)
+        )
+{
+}
+
+UIVarsCoalForce::UIVarsCoalForce(UIVars * myUIVars, const UIVarsCoalForce& coalForce)
+    :
+    UIVarsSingleForce(myUIVars,coalForce)
+{
+}
+
+void UIVarsCoalForce::AssertOnIllegalStartMethod(method_type method)
+{
+    assert( (method == method_FST)          ||
+            (method == method_WATTERSON)    ||
+            (method == method_PROGRAMDEFAULT));
+    if ((method==method_FST) && !(GetConstUIVars().forces.GetForceLegal(force_MIG)))
+    {
+        throw data_error("FST method is only available for data sets with more than one population and no divergence.");
+    }
+}
+
+void
+UIVarsCoalForce::FillCalculatedStartValues()
+{
+    // do FST values
+    if(GetConstUIVars().datapackplus.GetNCrossPartitions() > 1)
+    {
+        DoubleVec1d estimates;
+        std::deque<bool> isCalculated;
+        const UIVars& vars = GetConstUIVars();
+        DoubleVec1d regionalthetascalars(vars.datapackplus.GetEffectivePopSizes());
+        DoubleVec2d regionalmurates(vars.datamodel.GetRelativeMuRates());
+        ThetaFSTMultiregion(registry.GetDataPack(), regionalthetascalars, regionalmurates, estimates, isCalculated);
+        m_calculatedStartValues.erase(method_FST);
+        m_calculatedStartValues.insert(make_pair(method_FST,estimates));
+
+        std::set<string> defaultedEstimates;
+        for (long int pindex = 0; pindex < static_cast<long int>(isCalculated.size()); pindex++)
+        {
+            if (!isCalculated[pindex])
+            {
+                string name = GetParamName(pindex);
+                defaultedEstimates.insert(name);
+            }
+        }
+        if (defaultedEstimates.size())
+        {
+            std::set<string>::iterator onename;
+            string warning = "Warning:  calculating FST estimates for ";
+            for (onename = defaultedEstimates.begin(); onename != defaultedEstimates.end(); onename++)
+            {
+                warning += "\"" + *onename + "\", ";
+            }
+            warning += "is impossible due to the data for the populations involved.  If the FST method is invoked to obtain starting values for those parameters, defaults will be used instead.";
+            GetConstUIVars().GetUI()->AddWarning(warning);
+        }
+    }
+    // do Watterson values
+    {
+        DoubleVec1d estimates;
+        std::deque<bool> isCalculated;
+        const UIVars& vars = GetConstUIVars();
+        DoubleVec1d regionalthetascalars(vars.datapackplus.GetEffectivePopSizes());
+        DoubleVec2d regionalmurates(vars.datamodel.GetRelativeMuRates());
+        ThetaWattersonMultiregion(registry.GetDataPack(), regionalthetascalars,
+                                  regionalmurates, estimates, isCalculated);
+        m_calculatedStartValues.erase(method_WATTERSON);
+        m_calculatedStartValues.insert(make_pair(method_WATTERSON,estimates));
+
+        std::set<string> defaultedEstimates;
+        for (long int pindex = 0; pindex < static_cast<long int>(isCalculated.size()); pindex++)
+        {
+            if (!isCalculated[pindex])
+            {
+                string name = GetParamName(pindex);
+                defaultedEstimates.insert(name);
+            }
+        }
+        if (defaultedEstimates.size())
+        {
+            std::set<string>::iterator onename;
+            string warning = "Warning:  calculating Watterson estimates for ";
+            for (onename = defaultedEstimates.begin();
+                 onename != defaultedEstimates.end(); onename++)
+            {
+                warning += "\"" + *onename + "\", ";
+            }
+            warning += "is impossible because one or more genetic regions have no data or only one data point in the corresponding population(s).  If the Watterson method is invoked to obtain starting values for theta, defaults will be used in that calculation.";
+            GetConstUIVars().GetUI()->AddWarning(warning);
+        }
+    }
+}
+
+bool
+UIVarsCoalForce::AreZeroesValid() const
+{
+    //No force can be invalid or zero for coalescence.  It can't even start at
+    // zero, regardless of whether it's held there.
+    for (long int pindex = 0; pindex<GetNumParameters(); pindex++)
+    {
+        if (!GetParamstatus(pindex).Valid())
+        {
+            return false;
+        }
+        if (GetStartValue(pindex) == 0.0)
+        {
+            return false;
+        }
+    }
+    return true;
+}
+
+//------------------------------------------------------------------------------------
+
+UIVarsMigForce::UIVarsMigForce (UIVars * myUIVars, long int numPopulations, bool onOrOff)
+    :
+    UIVars2DForce(
+        myUIVars,
+        force_MIG,
+        numPopulations,
+        defaults::migration,
+        defaults::migMethod,
+        false,                  // can't change original on/off setting
+        onOrOff,
+        defaults::migEvents,
+        UIVarsPrior(myUIVars->GetUI(),
+                    force_MIG,
+                    defaults::priortypeMig,
+#ifdef LAMARC_NEW_FEATURE_RELATIVE_SAMPLING
+                    defaults::samplingRate,
+#endif
+                    defaults::lowerboundMig,
+                    defaults::upperboundMig)
+        )
+{
+}
+
+UIVarsMigForce::UIVarsMigForce
+(   UIVars * myUIVars, const UIVarsMigForce & migForce)
+    :
+    UIVars2DForce(myUIVars,migForce)
+{
+}
+
+void
+UIVarsMigForce::FillCalculatedStartValues()
+{
+    DoubleVec1d estimates;
+    std::deque<bool> isCalculated;
+    const UIVars& vars = GetConstUIVars();
+    DoubleVec2d regionalmurates(vars.datamodel.GetRelativeMuRates());
+    MigFSTMultiregion(registry.GetDataPack(), regionalmurates,
+                      estimates, isCalculated);
+    m_calculatedStartValues.erase(method_FST);
+    m_calculatedStartValues.insert(make_pair(method_FST,estimates));
+
+    std::set<string> defaultedEstimates;
+    for (long int pindex = 0; pindex < static_cast<long int>(isCalculated.size()); pindex++)
+    {
+        if (!isCalculated[pindex])
+        {
+            string name = GetParamName(pindex);
+            defaultedEstimates.insert(name);
+        }
+    }
+    if (defaultedEstimates.size())
+    {
+        std::set<string>::iterator onename;
+        string warning = uiwarn::calcFST_0;
+        for (onename = defaultedEstimates.begin(); onename != defaultedEstimates.end(); onename++)
+        {
+            warning += "\"" + *onename + "\", ";
+        }
+        warning += uiwarn::calcFST_1;
+        GetConstUIVars().GetUI()->AddWarning(warning);
+        //LS DEBUG:  This warning is always printed out when the user
+        // first fires up LAMARC, and never afterwards, due to the fact
+        // that FST is the default starting method for Migration.  If
+        // this changes, the text of this method should probably change,
+        // too.
+    }
+}
+
+//-------------------------------------------------------------------------------------
+
+UIVarsDivMigForce::UIVarsDivMigForce (UIVars * myUIVars, long int numPopulations, bool onOrOff)
+    :
+    UIVars2DForce(
+        myUIVars,
+        force_DIVMIG,
+        numPopulations,
+        defaults::divMigration,
+        defaults::divMigMethod,
+        false,                  // can't change original on/off setting
+        onOrOff,
+        defaults::migEvents,
+        UIVarsPrior(myUIVars->GetUI(),
+                    force_DIVMIG,
+                    defaults::priortypeDivMig,
+#ifdef LAMARC_NEW_FEATURE_RELATIVE_SAMPLING
+                    defaults::samplingRate,
+#endif
+                    defaults::lowerboundDivMig,
+                    defaults::upperboundDivMig)
+        )
+{
+}
+
+UIVarsDivMigForce::UIVarsDivMigForce
+(   UIVars * myUIVars, const UIVarsDivMigForce & divmigForce)
+    :
+    UIVars2DForce(myUIVars,divmigForce)
+{
+}
+
+void UIVarsDivMigForce::AssertOnIllegalStartMethod(method_type method)
+{
+    if ((method==method_FST) && !(GetConstUIVars().forces.GetForceLegal(force_MIG)))
+    {
+        throw data_error("FST method is only available for data sets with more than one population and no divergence.");
+    }
+    assert(method == method_PROGRAMDEFAULT); // can't use FST here
+}
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+UIVarsDivergenceForce::UIVarsDivergenceForce(UIVars * myUIVars, long int nDivPopulations)
+    :   UIVarsSingleForce(
+        myUIVars,
+        force_DIVERGENCE,
+        (nDivPopulations-1)/2, // truncation deliberate
+        defaults::epochtime,
+        defaults::divMethod,
+        // MDEBUG not sure about the values of these two bools
+        true,                   // Divergence force can't be turned off ever
+        false,                   // Divergence force always ON
+        defaults::epochEvents,
+        UIVarsPrior(myUIVars->GetUI(),
+                    force_DIVERGENCE,
+                    defaults::priortypeEpoch,
+#ifdef LAMARC_NEW_FEATURE_RELATIVE_SAMPLING
+                    defaults::samplingRate,
+#endif
+                    defaults::lowerboundEpoch,
+                    defaults::upperboundEpoch)
+        )
+{
+ // deliberately blank
+}
+
+UIVarsDivergenceForce::UIVarsDivergenceForce(UIVars * myUIVars, const UIVarsDivergenceForce& divForce)
+    :
+    UIVarsSingleForce(myUIVars,divForce)
+{
+    // copy epoch info
+    newpops = divForce.newpops;
+    ancestors = divForce.ancestors;
+}
+
+void UIVarsDivergenceForce::AssertOnIllegalStartMethod(method_type method)
+{
+    assert (method == method_PROGRAMDEFAULT);
+}
+
+bool
+UIVarsDivergenceForce::AreZeroesValid() const
+{
+    //No parameter can be invalid or zero for divergence.  It can't even start at
+    // zero, regardless of whether it's held there.
+    for (long int pindex = 0; pindex<GetNumParameters(); pindex++)
+    {
+        if (!GetParamstatus(pindex).Valid())
+        {
+            return false;
+        }
+        if (GetStartValue(pindex) == 0.0)
+        {
+            return false;
+        }
+    }
+    return true;
+}
+
+void
+UIVarsDivergenceForce::AddNewPops(const vector<string>& newp)
+{
+    newpops.push_back(newp);
+}
+
+void
+UIVarsDivergenceForce::AddAncestor(const string& anc)
+{
+    ancestors.push_back(anc);
+}
+
+vector<vector<string> >
+UIVarsDivergenceForce::GetNewPops() const
+{
+    return newpops;
+}
+
+vector<string>
+UIVarsDivergenceForce::GetAncestors () const
+{
+    return ancestors;
+}
+
+string
+UIVarsDivergenceForce::GetAncestor (long index) const
+{
+    return ancestors[index];
+}
+
+//------------------------------------------------------------------------------------
+
+UIVarsDiseaseForce::UIVarsDiseaseForce(
+    UIVars * myUIVars,
+    long int numDiseaseStates,
+    bool isLegal)
+    :
+    UIVars2DForce(
+        myUIVars,
+        force_DISEASE,
+        numDiseaseStates,
+        defaults::disease,
+        defaults::diseaseMethod,
+        false,              // EWFIX.P5 DISEASE change false => isLegal later
+        // when we figure out how to accomodate
+        // change in number of theta and growth
+        // params when disease forces are turned
+        // on and off in the menu
+        isLegal,
+        defaults::diseaseEvents,
+        UIVarsPrior(myUIVars->GetUI(),
+                    force_DISEASE,
+                    defaults::priortypeDisease,
+#ifdef LAMARC_NEW_FEATURE_RELATIVE_SAMPLING
+                    defaults::samplingRate,
+#endif
+                    defaults::lowerboundDisease,
+                    defaults::upperboundDisease)
+        ),
+    location(defaults::diseaseLocation)
+{
+    //LS NOTE:  When we get multiple diseases states in, this is the place to
+    // set up the invalid and identical parameters (re: the tic-tac-toe setup
+    // we talked about on Joe's blackboard)
+    //
+    /* For example:  say we have two traits, Aa, and XYZ.  You can set things up:
+
+       AX  AY  AZ    aX  aY  aZ
+       AX --  ++  ++    ++  --  --
+       AY ++  --  ++    --  ++  --
+       AZ ++  ++  --    --  --  ++
+
+       aX ++  --  --    --  ++  ++
+       aY --  ++  --    ++  --  ++
+       aZ --  --  ++    ++  ++  --
+
+       So all the ++'s are possible, and all the --'s are set 'invalid', like the
+       diagonal.  Additionally, if A->a is constant, and not influenced by
+       the current XYZ, you can set AX->aX, AY->aY, and AZ->aZ to be equal to
+       each other, and likewise for the other sets of rates.
+    */
+}
+
+UIVarsDiseaseForce::UIVarsDiseaseForce
+(   UIVars * myUIVars, const UIVarsDiseaseForce & diseaseForce)
+    :
+    UIVars2DForce(myUIVars,diseaseForce),
+    location(diseaseForce.location)
+{
+}
+
+//------------------------------------------------------------------------------------
+
+UIVarsRecForce::UIVarsRecForce(UIVars * myUIVars, bool canTurnOn)
+    :
+    UIVarsSingleForce(
+        myUIVars,
+        force_REC,
+        1L,                     // only one param value for recombine
+        defaults::recombinationRate,
+        defaults::recMethod,
+        canTurnOn,              // recombine illegal for unlinked data
+        false,                  // recombine force not required when possible
+        defaults::recEvents,
+        UIVarsPrior(myUIVars->GetUI(),
+                    force_REC,
+                    defaults::priortypeRec,
+#ifdef LAMARC_NEW_FEATURE_RELATIVE_SAMPLING
+                    defaults::samplingRate,
+#endif
+                    defaults::lowerboundRec,
+                    defaults::upperboundRec)
+        )
+{
+}
+
+UIVarsRecForce::UIVarsRecForce(UIVars * myUIVars, const UIVarsRecForce& recForce)
+    :
+    UIVarsSingleForce(myUIVars,recForce)
+{
+}
+
+bool
+UIVarsRecForce::AreZeroesValid() const
+{
+    //Recombination can be zero.  You might as well not have it on if so,
+    // but who are we to quibble?  Also, if we allow different populations to
+    // have different rec values at some point, any of them could be zero.
+    return true;
+}
+
+bool UIVarsRecForce::GetOnOff() const
+{
+    if (GetConstUIVars().traitmodels.AnyMappingAnalyses()) return true;
+    return UIVarsSingleForce::GetOnOff();
+}
+
+void UIVarsRecForce::SetOnOff(bool onOffVal)
+{
+    if (GetConstUIVars().traitmodels.AnyMappingAnalyses() && !onOffVal)
+    {
+        throw data_error("Cannot turn off recombination when there are traits to be mapped.");
+    }
+    if (GetConstUIVars().datapackplus.AnySNPDataWithDefaultLocations() && onOffVal)
+    {
+        throw data_error("We cannot accurately estimate recombination as there is SNP data whose locations are unknown.  "
+                         "If you want to estimate recombination, go back to the converter and use a map file to tell it where the sequenced SNPs are.  "
+                         "See the documentation: the \"Length and Spacing Information with Segment Coordinates\" section in "
+                         "genetic_map.html for how to do this within the GUI converter, or the \"Segments\" section "
+                         "of converter_cmd.html for how to do this using the converter command file.");
+    }
+    UIVarsSingleForce::SetOnOff(onOffVal);
+}
+
+//------------------------------------------------------------------------------------
+
+UIVarsRegionGammaForce::UIVarsRegionGammaForce(UIVars * myUIVars)
+    :
+    UIVarsSingleForce(
+        myUIVars,
+        force_REGION_GAMMA,
+        1L,                     // only one param value for region gamma
+        defaults::gammaOverRegions,
+        defaults::recMethod,
+        true,              // legal to turn on--ONLY for multi-region data
+        false,             // this "force" is not required
+        0,                 // no such thing as region gamma "events"
+        UIVarsPrior(myUIVars->GetUI(),
+                    force_REC,               //BUGBUG!!! no prior allowed!
+                    defaults::priortypeRec,  //what to do???
+#ifdef LAMARC_NEW_FEATURE_RELATIVE_SAMPLING
+                    defaults::samplingRate,
+#endif
+                    defaults::lowerboundRec,
+                    defaults::upperboundRec)
+        )
+{
+    if (GetConstUIVars().datapackplus.GetNumRegions() == 1)
+    {
+        m_canSetOnOff = false;
+    }
+}
+
+UIVarsRegionGammaForce::UIVarsRegionGammaForce(UIVars * myUIVars, const UIVarsRegionGammaForce& regionGammaForce)
+    :
+    UIVarsSingleForce(myUIVars,regionGammaForce)
+{
+}
+
+bool UIVarsRegionGammaForce::GetOnOff() const
+{
+    if (GetConstUIVars().datapackplus.GetNumRegions() == 1) return false;
+    if (GetConstUIVars().chains.GetDoBayesianAnalysis()) return false;
+    if (GetConstUIVars().forces.GetForceOnOff(force_GROW)) return false;
+    return UIVarsSingleForce::GetOnOff();
+}
+
+void
+UIVarsRegionGammaForce::SetOnOff(bool onOffVal)
+{
+    if (GetConstUIVars().datapackplus.GetNumRegions() == 1 && onOffVal)
+    {
+        throw data_error("Cannot analyze the region gamma when there is only one region to analyze.");
+    }
+    if (GetConstUIVars().chains.GetDoBayesianAnalysis() && onOffVal)
+    {
+        throw data_error("Cannot analyze the region gamma during a Bayesian analysis.");
+    }
+    if (GetConstUIVars().forces.GetForceOnOff(force_GROW) && onOffVal)
+    {
+        throw data_error("Cannot analyze the region gamma if you are simultaneously estimating the growth rate.");
+        return;
+    }
+    UIVarsSingleForce::SetOnOff(onOffVal);
+}
+
+bool
+UIVarsRegionGammaForce::AreZeroesValid() const
+{
+    if (!GetParamstatus(0).Valid())
+        return false;
+    if (GetStartValue(0) == 0.0)
+        return false;
+
+    return true;
+}
+
+//------------------------------------------------------------------------------------
+
+UIVarsGrowForce::UIVarsGrowForce(UIVars * myUIVars, long int numCrossPartitions)
+    :
+    UIVarsSingleForce(
+        myUIVars,
+        force_GROW,
+        numCrossPartitions,
+        defaults::growth,
+        defaults::growMethod,
+        true,                   // growth force always legal to turn on
+        false,                  // growth force not required when possible
+        defaults::growEvents,
+        UIVarsPrior(myUIVars->GetUI(),
+                    force_GROW,
+                    defaults::priortypeGrowth,
+#ifdef LAMARC_NEW_FEATURE_RELATIVE_SAMPLING
+                    defaults::samplingRate,
+#endif
+                    defaults::lowerboundGrowth,
+                    defaults::upperboundGrowth)
+        ),
+    growthType(defaults::growType)
+{
+}
+
+UIVarsGrowForce::UIVarsGrowForce(UIVars * myUIVars, const UIVarsGrowForce& growForce)
+    :
+    UIVarsSingleForce(myUIVars,growForce),
+    growthType(growForce.growthType)
+{
+}
+
+bool
+UIVarsGrowForce::AreZeroesValid() const
+{
+    //Any growth can be zero; even up to all of them.
+    return true;
+}
+
+// We don't currently have any STAIRSTEP models, so the case statement
+// is probably wrong for when we have them... Jon 2006/09/15
+growth_scheme
+UIVarsGrowForce::GetGrowthScheme() const
+{
+    switch(growthType)
+    {
+        case growth_CURVE:
+            return growth_EXP;
+            break;
+        case growth_STICK:  // not sure what this case is, assuming
+            // that we can use the same as STICKEXP for
+            // now
+        case growth_STICKEXP:
+            return growth_EXP;
+            break;
+    }
+    return growth_EXP;
+}
+
+void
+UIVarsGrowForce::SetGrowthScheme(growth_scheme g)
+{
+    // this is currently a no-op, as soon as we get STAIRSTEP
+    // we'll need something else, probably ickly complicated here
+    // in order to account for interlocking nature with growth_type
+    // management...refactor growth_type?  Jon 2006/09/15
+}
+
+force_type
+UIVarsGrowForce::GetPhase2Type(force_type f) const
+{
+    if (growthType == growth_STICKEXP) return force_EXPGROWSTICK;
+
+    return f;
+}
+
+//LS NOTE:  Cannot have a parallel 'GetOnOff' for growth here that checks
+// whether gamma is on or off, because gamma checks if *growth* is on or off.
+// If you try it, you'll get an infinite loop.  Yes, this has been... verified.
+
+void UIVarsGrowForce::SetOnOff(bool onOffVal)
+{
+    if (GetConstUIVars().forces.GetForceOnOff(force_REGION_GAMMA))
+    {
+        throw data_error("Cannot estimate growth if you are simulataneously estimating the region gamma.");
+        return;
+    }
+    UIVarsSingleForce::SetOnOff(onOffVal);
+}
+
+//-------------------------------------------------------------
+
+UIVarsLogisticSelectionForce::UIVarsLogisticSelectionForce
+(   UIVars * myUIVars, long numCrossPartitions)
+    :
+    UIVarsSingleForce(
+        myUIVars,
+        force_LOGISTICSELECTION,
+        1, // there's only one selection coefficient
+        defaults::logisticSelection,
+        defaults::lselectMethod,
+        true,                   // always legal to turn on
+        false,                  // but it's not required
+        defaults::lselectEvents,
+        UIVarsPrior(myUIVars->GetUI(),
+                    force_LOGISTICSELECTION,
+                    defaults::priortypeLSelect,
+#ifdef LAMARC_NEW_FEATURE_RELATIVE_SAMPLING
+                    defaults::samplingRate,
+#endif
+                    defaults::lowerboundLSelect,
+                    defaults::upperboundLSelect)
+        )
+    ,selectionType(defaults::selectionType) // MCHECK major allele freq here?
+{
+    if (2 != numCrossPartitions)
+        m_canSetOnOff = false;
+}
+
+UIVarsLogisticSelectionForce::UIVarsLogisticSelectionForce(UIVars * myUIVars,
+                                                           const UIVarsLogisticSelectionForce& logisticSelectionForce) :
+    UIVarsSingleForce(myUIVars, logisticSelectionForce)
+{
+}
+
+bool
+UIVarsLogisticSelectionForce::AreZeroesValid() const
+{
+    // Sure; zero means selection isn't acting.
+    return true;
+}
+
+force_type
+UIVarsLogisticSelectionForce::GetPhase2Type(force_type f) const
+{
+    if (selectionType == selection_STOCHASTIC) return force_LOGSELECTSTICK;
+
+    return f;
+}
+
+//-------------------------------------------------------------
+
+bool
+UIVarsForces::IsInactive::operator()(force_type ft) const
+{
+    const UIVarsSingleForce & theForce
+        = m_vars_forces.getForceRegardlessOfLegality(ft);
+    bool isInactive = !(theForce.GetOnOff());
+    return isInactive;
+}
+
+UIVarsForces::IsInactive::IsInactive(const UIVarsForces& vars_forces)
+    : m_vars_forces(vars_forces)
+{
+}
+
+UIVarsForces::IsInactive::~IsInactive()
+{
+}
+
+bool
+UIVarsForces::IsIllegal::operator()(force_type ft) const
+{
+    const UIVarsSingleForce & theForce
+        = m_vars_forces.getForceRegardlessOfLegality(ft);
+    bool isIllegal = (!theForce.GetLegal());
+    return isIllegal;
+}
+
+UIVarsForces::IsIllegal::IsIllegal(const UIVarsForces& vars_forces)
+    : m_vars_forces(vars_forces)
+{
+}
+
+UIVarsForces::IsIllegal::~IsIllegal()
+{
+}
+
+UIVarsForces::UIVarsForces(UIVars * myUIVars,long int nCrossPartitions, long int nMigPopulations,
+                           long int nDivPopulations, long int nDiseaseStates,bool canMeasureRecombination)
+    :
+    UIVarsComponent(myUIVars),
+    coalForce(myUIVars,nCrossPartitions),
+    diseaseForce(myUIVars,nDiseaseStates,(nDiseaseStates >= 2)),
+    growForce(myUIVars,nCrossPartitions),
+    migForce(myUIVars,nMigPopulations,(nMigPopulations >= 2)),
+    divMigForce(myUIVars,nDivPopulations,(nDivPopulations > 2)),
+    divForce(myUIVars,nDivPopulations),   // yes, integer division with truncation is desired
+    recForce(myUIVars,canMeasureRecombination),
+    regionGammaForce(myUIVars),
+    logisticSelectionForce(myUIVars,nCrossPartitions)
+{
+    //requires that both datapackplus and datamodel be set up in uivars.
+    FillCalculatedStartValues();
+}
+
+UIVarsForces::UIVarsForces(UIVars * myUIVars, const UIVarsForces& forcesRef)
+    :
+    UIVarsComponent(myUIVars),
+    coalForce(myUIVars,forcesRef.coalForce),
+    diseaseForce(myUIVars,forcesRef.diseaseForce),
+    growForce(myUIVars,forcesRef.growForce),
+    migForce(myUIVars,forcesRef.migForce),
+    divMigForce(myUIVars,forcesRef.divMigForce),
+    divForce(myUIVars,forcesRef.divForce),
+    recForce(myUIVars,forcesRef.recForce),
+    regionGammaForce(myUIVars,forcesRef.regionGammaForce),
+    logisticSelectionForce(myUIVars,forcesRef.logisticSelectionForce)
+{
+}
+
+UIVarsForces::~UIVarsForces()
+{
+}
+
+void
+UIVarsForces::FillCalculatedStartValues()
+{
+    coalForce.FillCalculatedStartValues();
+    migForce.FillCalculatedStartValues();
+}
+
+LongVec1d UIVarsForces::GetForceSizes() const
+{
+    LongVec1d retVec;
+    vector<force_type> forces = GetActiveForces();
+    for (unsigned long int fnum = 0; fnum < forces.size(); fnum++)
+    {
+        retVec.push_back(getLegalForce(forces[fnum]).GetNumParameters());
+    }
+    return retVec;
+}
+
+ForceTypeVec1d
+UIVarsForces::GetActiveForces() const
+{
+    ForceTypeVec1d returnVec = GetLegalForces();
+    returnVec.erase(
+        std::remove_if(returnVec.begin(),returnVec.end(),IsInactive(*this)),
+        returnVec.end());
+    return returnVec;
+}
+
+ForceTypeVec1d
+UIVarsForces::GetPhase2ActiveForces() const
+{
+    ForceTypeVec1d tempVec = GetActiveForces();
+    ForceTypeVec1d returnVec;
+    for (unsigned long fnum=0; fnum<tempVec.size(); fnum++)
+    {
+        returnVec.push_back(getLegalForce(tempVec[fnum]).GetPhase2Type(tempVec[fnum]));
+    }
+    return returnVec;
+}
+
+ForceTypeVec1d
+UIVarsForces::GetLegalForces() const
+{
+    ForceTypeVec1d returnVec = GetPossibleForces();
+    returnVec.erase(
+        std::remove_if(returnVec.begin(),returnVec.end(),IsIllegal(*this)),
+        returnVec.end());
+    return returnVec;
+}
+
+ForceTypeVec1d
+UIVarsForces::GetPossibleForces() const
+{
+    //This is where the so-called 'canonical order' of forces in LAMARC
+    // gets set up.  If we add a new force, add it here appropriately.
+    ForceTypeVec1d returnVec;
+    returnVec.push_back(force_COAL);
+    returnVec.push_back(force_MIG);
+    returnVec.push_back(force_DIVMIG);
+    returnVec.push_back(force_DISEASE);
+    returnVec.push_back(force_REC);
+    returnVec.push_back(force_GROW);
+    returnVec.push_back(force_DIVERGENCE);
+    returnVec.push_back(force_LOGISTICSELECTION);
+    returnVec.push_back(force_REGION_GAMMA);
+    return returnVec;
+}
+
+long int UIVarsForces::GetNumGroups(force_type ftype) const
+{
+    return getLegalForce(ftype).GetNumGroups();
+}
+
+long int UIVarsForces::ParamInGroup(force_type ftype, long int pindex) const
+{
+    return getLegalForce(ftype).ParamInGroup(pindex);
+}
+
+bool
+UIVarsForces::GetForceCanTurnOnOff(force_type ftype) const
+{
+    const UIVarsSingleForce & thisForce = getForceRegardlessOfLegality(ftype);
+    return thisForce.GetCanSetOnOff();
+}
+
+bool
+UIVarsForces::GetForceLegal(force_type ftype) const
+{
+    const UIVarsSingleForce & thisForce = getForceRegardlessOfLegality(ftype);
+    return thisForce.GetLegal();
+}
+
+bool
+UIVarsForces::GetForceZeroesValidity(force_type ftype) const
+{
+    const UIVarsSingleForce & thisForce = getForceRegardlessOfLegality(ftype);
+    return thisForce.AreZeroesValid();
+}
+
+void
+UIVarsForces::FixGroups(force_type ftype)
+{
+    UIVarsSingleForce & thisForce = getLegalForce(ftype);
+    thisForce.FixGroups();
+}
+
+const UIVarsSingleForce &
+UIVarsForces::getLegalForce(force_type ftype) const
+{
+    // if you change this method, make sure you change the
+    // non-const version and getForceRegardlessOfLegality too!!
+    switch(ftype)
+    {
+        case force_COAL:
+            assert(coalForce.GetLegal());
+            return coalForce;
+            break;
+        case force_MIG:
+            assert(migForce.GetLegal());
+            return migForce;
+            break;
+        case force_DIVMIG:
+            assert(divMigForce.GetLegal());
+            return divMigForce;
+            break;
+        case force_EXPGROWSTICK:
+        case force_GROW:
+            assert(growForce.GetLegal());
+            return growForce;
+            break;
+        case force_REC:
+            assert(recForce.GetLegal());
+            return recForce;
+            break;
+        case force_DISEASE:
+            assert(diseaseForce.GetLegal());
+            return diseaseForce;
+            break;
+        case force_LOGSELECTSTICK:
+        case force_LOGISTICSELECTION:
+            assert(logisticSelectionForce.GetLegal());
+            return logisticSelectionForce;
+            break;
+        case force_REGION_GAMMA:
+            assert(regionGammaForce.GetLegal());
+            return regionGammaForce;
+            break;
+        case force_DIVERGENCE:
+            assert(divForce.GetLegal());
+            return divForce;
+            break;
+        default:
+            assert(false);              //Uncaught force type.
+    }
+    throw implementation_error("UIVarsForces::getLegalForce(force_type) given an unknown force type.");
+}
+
+UIVarsSingleForce &
+UIVarsForces::getLegalForce(force_type ftype)
+{
+    // if you change this method, make sure you change the
+    // const version and getForceRegardlessOfLegality too!!
+    switch(ftype)
+    {
+        case force_COAL:
+            assert(coalForce.GetLegal());
+            return coalForce;
+            break;
+        case force_MIG:
+            assert(migForce.GetLegal());
+            return migForce;
+            break;
+        case force_DIVMIG:
+            assert(divMigForce.GetLegal());
+            return divMigForce;
+            break;
+        case force_EXPGROWSTICK:
+        case force_GROW:
+            assert(growForce.GetLegal());
+            return growForce;
+            break;
+        case force_REC:
+            assert(recForce.GetLegal());
+            return recForce;
+            break;
+        case force_DISEASE:
+            assert(diseaseForce.GetLegal());
+            return diseaseForce;
+            break;
+        case force_LOGSELECTSTICK:
+        case force_LOGISTICSELECTION:
+            assert(logisticSelectionForce.GetLegal());
+            return logisticSelectionForce;
+            break;
+        case force_REGION_GAMMA:
+            assert(regionGammaForce.GetLegal());
+            return regionGammaForce;
+            break;
+        case force_DIVERGENCE:
+            assert(divForce.GetLegal());
+            return divForce;
+            break;
+        default:
+            assert(false);              //Uncaught force type.
+    }
+    throw implementation_error("UIVarsForces::getLegalForce(force_type) given an unknown force type.");
+}
+
+const UIVarsSingleForce &
+UIVarsForces::getForceRegardlessOfLegality(force_type ftype) const
+{
+    // if you change this method, make sure you change the
+    // const and non-const versions of getLegalForce
+    switch(ftype)
+    {
+        case force_COAL:
+            return coalForce;
+            break;
+        case force_MIG:
+            return migForce;
+            break;
+        case force_DIVMIG:
+            return divMigForce;
+            break;
+        case force_EXPGROWSTICK:
+        case force_GROW:
+            return growForce;
+            break;
+        case force_REC:
+            return recForce;
+            break;
+        case force_DISEASE:
+            return diseaseForce;
+            break;
+        case force_LOGSELECTSTICK:
+        case force_LOGISTICSELECTION:
+            return logisticSelectionForce;
+            break;
+        case force_REGION_GAMMA:
+            return regionGammaForce;
+            break;
+        case force_DIVERGENCE:
+            return divForce;
+            break;
+        default:
+            assert(false);              //Uncaught force type.
+    }
+    throw implementation_error("UIVarsForces::getForceRegardlessOfLegality(force_type) given an unknown force type.");
+}
+
+long int
+UIVarsForces::GetNumParameters(force_type force) const
+{
+    return getLegalForce(force).GetNumParameters();
+}
+
+bool
+UIVarsForces::GetForceOnOff   (force_type force) const
+{
+    const UIVarsSingleForce & thisForce = getForceRegardlessOfLegality(force);
+    return thisForce.GetOnOff();
+}
+
+long int
+UIVarsForces::GetMaxEvents    (force_type force) const
+{
+    return getLegalForce(force).GetMaxEvents();
+}
+
+proftype
+UIVarsForces::GetProfileType  (force_type force) const
+{
+    return getLegalForce(force).GetProfileType();
+}
+
+proftype
+UIVarsForces::GetProfileType    (force_type force, long int id) const
+{
+    return getLegalForce(force).GetProfileType(id);
+}
+
+string
+UIVarsForces::GetProfileTypeSummaryDescription(force_type force)  const
+{
+    return getLegalForce(force).GetProfileTypeSummaryDescription();
+}
+
+ParamStatus
+UIVarsForces::GetParamstatus  (force_type force, long int id) const
+{
+    if (id == uiconst::GLOBAL_ID)
+    {
+        //LS NOTE:  This is called this way from uiParameter::Min and ::Max in force_interface.cpp
+        return ParamStatus(pstat_unconstrained);
+    }
+    return getLegalForce(force).GetParamstatus(id);
+}
+
+ParamStatus
+UIVarsForces::GetGroupParamstatus (force_type force, long int id) const
+{
+    return getLegalForce(force).GetGroupParamstatus(id);
+}
+
+LongVec1d
+UIVarsForces::GetGroupParamList (force_type force, long int id) const
+{
+    return getLegalForce(force).GetGroupParamList(id);
+}
+
+// MFIX This looked bad in merge; functions GetIdentGroups and GetMultGroups may
+// not be functional or correct.
+// JDEBUG -- this is a short term kludge to get something/anything in,
+// it is wrong WRONG wrong!
+vector<ParamGroup>
+UIVarsForces::GetIdentGroups (force_type force) const
+{
+    return getLegalForce(force).GetGroups();
+}
+
+// JDEBUG -- this is a short term kludge to get something/anything in,
+// it is wrong WRONG wrong!
+vector<ParamGroup>
+UIVarsForces::GetMultGroups (force_type force) const
+{
+    return getLegalForce(force).GetGroups();
+}
+
+double
+UIVarsForces::GetStartValue   (force_type force, long int id) const
+{
+    return getLegalForce(force).GetStartValue(id);
+}
+
+double
+UIVarsForces::GetTrueValue   (force_type force, long int id) const
+{
+    return getLegalForce(force).GetTrueValue(id);
+}
+
+force_type
+UIVarsForces::GetPhase2Type   (force_type force) const
+{
+    return getLegalForce(force).GetPhase2Type(force);
+}
+
+method_type
+UIVarsForces::GetStartMethod  (force_type force, long int id) const
+{
+    return getLegalForce(force).GetStartMethod(id);
+}
+
+DoubleVec1d
+UIVarsForces::GetStartValues (force_type force) const
+{
+    return getLegalForce(force).GetStartValues();
+}
+
+long int
+UIVarsForces::GetDiseaseLocation() const
+{
+    assert(diseaseForce.GetLegal());
+    return diseaseForce.GetLocation();
+}
+
+growth_type
+UIVarsForces::GetGrowthType() const
+{
+    return growForce.GetGrowthType();
+}
+
+growth_scheme
+UIVarsForces::GetGrowthScheme() const
+{
+    return growForce.GetGrowthScheme();
+}
+
+bool
+UIVarsForces::GetDoProfile(force_type force, long int id) const
+{
+    return getLegalForce(force).GetDoProfile(id);
+}
+
+bool
+UIVarsForces::GetParamValid(force_type force, long int id) const
+{
+    return getLegalForce(force).GetParamValid(id);
+}
+
+bool
+UIVarsForces::GetParamUnique(force_type force, long int id) const
+{
+    return getLegalForce(force).GetParamUnique(id);
+}
+
+//Getters/setters for Bayesian information
+const UIVarsPrior& UIVarsForces::GetPrior(force_type force, long int pindex) const
+{
+    if (pindex == uiconst::GLOBAL_ID)
+    {
+        return getLegalForce(force).GetDefaultPrior();
+    }
+    return getLegalForce(force).GetPrior(pindex);
+}
+
+const UIVarsPrior& UIVarsForces::GetDefaultPrior(force_type force) const
+{
+    return getLegalForce(force).GetDefaultPrior();
+}
+
+bool UIVarsForces::GetUseDefaultPrior(force_type force, long int pindex) const
+{
+    return getLegalForce(force).GetUseDefaultPrior(pindex);
+}
+
+priortype UIVarsForces::GetPriorType(force_type force, long int pindex) const
+{
+    if (pindex == uiconst::GLOBAL_ID)
+    {
+        return getLegalForce(force).GetDefaultPriorType();
+    }
+    return getLegalForce(force).GetPriorType(pindex);
+}
+
+double    UIVarsForces::GetLowerBound(force_type force, long int pindex) const
+{
+    if (pindex == uiconst::GLOBAL_ID)
+    {
+        return getLegalForce(force).GetDefaultLowerBound();
+    }
+    return getLegalForce(force).GetLowerBound(pindex);
+}
+
+selection_type
+UIVarsForces::GetSelectionType() const
+{
+    return logisticSelectionForce.GetSelectionType();
+}
+
+double    UIVarsForces::GetUpperBound(force_type force, long int pindex) const
+{
+    if (pindex == uiconst::GLOBAL_ID)
+    {
+        return getLegalForce(force).GetDefaultUpperBound();
+    }
+    return getLegalForce(force).GetUpperBound(pindex);
+}
+
+#ifdef LAMARC_NEW_FEATURE_RELATIVE_SAMPLING
+long    UIVarsForces::GetRelativeSampling(force_type force, long int pindex) const
+{
+    if (pindex == uiconst::GLOBAL_ID)
+    {
+        return getLegalForce(force).GetDefaultRelativeSampling();
+    }
+    return getLegalForce(force).GetRelativeSampling(pindex);
+}
+#endif
+
+void UIVarsForces::SetUseDefaultPrior(bool use, force_type force, long int pindex)
+{
+    getLegalForce(force).SetUseDefaultPrior(use, pindex);
+}
+
+void UIVarsForces::SetUseDefaultPriorsForForce(force_type force)
+{
+    getLegalForce(force).SetUseAllDefaultPriors();
+}
+
+void UIVarsForces::SetPriorType(priortype ptype, force_type force, long int pindex)
+{
+    if (pindex == uiconst::GLOBAL_ID)
+    {
+        getLegalForce(force).SetDefaultPriorType(ptype);
+    }
+    else getLegalForce(force).SetPriorType(ptype, pindex);
+}
+
+#ifdef LAMARC_NEW_FEATURE_RELATIVE_SAMPLING
+void UIVarsForces::SetRelativeSampling(long rate, force_type force, long int pindex)
+{
+    if (pindex == uiconst::GLOBAL_ID)
+    {
+        getLegalForce(force).SetDefaultRelativeSampling(rate);
+    }
+    else getLegalForce(force).SetRelativeSampling(rate, pindex);
+}
+#endif
+
+
+void UIVarsForces::SetLowerBound(double bound, force_type force, long int pindex)
+{
+    if (pindex == uiconst::GLOBAL_ID)
+    {
+        getLegalForce(force).SetDefaultLowerBound(bound);
+    }
+    else getLegalForce(force).SetLowerBound(bound, pindex);
+}
+
+void UIVarsForces::SetUpperBound(double bound, force_type force, long int pindex)
+{
+    if (pindex == uiconst::GLOBAL_ID)
+    {
+        getLegalForce(force).SetDefaultUpperBound(bound);
+    }
+    else getLegalForce(force).SetUpperBound(bound, pindex);
+}
+
+string UIVarsForces::GetPriorTypeSummaryDescription(force_type force) const
+{
+    return getLegalForce(force).GetPriorTypeSummaryDescription();
+}
+
+string UIVarsForces::GetPriorTypeSummaryDescription(force_type force, long int pindex, bool sayDefault) const
+{
+    if (pindex != uiconst::GLOBAL_ID)
+    {
+        ParamStatus mystatus = getLegalForce(force).GetParamstatus(pindex);
+        if (!mystatus.Inferred()) return "<" + ToString(mystatus.Status()) + ">";
+        // otherwise do the stuff below
+    }
+    string desc = getLegalForce(force).GetPrior(pindex).GetSummaryDescription();
+    if (pindex != uiconst::GLOBAL_ID)
+    {
+        if (sayDefault)
+        {
+            if (getLegalForce(force).GetUseDefaultPrior(pindex))
+            {
+                desc = "<default>";
+            }
+        }
+    }
+    return desc;
+}
+
+void
+UIVarsForces::SetForceOnOff  (bool onOffVal, force_type force)
+{
+    if(GetForceCanTurnOnOff(force))
+    {
+        return getLegalForce(force).SetOnOff(onOffVal);
+    }
+    else
+    {
+        if (onOffVal != GetForceOnOff(force))
+        {
+            assert(false);
+            string msg;
+            msg = "Unable to turn ";
+            if (onOffVal)
+            {
+                msg += "on";
+            }
+            else
+            {
+                msg += "off";
+            }
+            msg += " the force " + ToString(force)
+                + " for this dataset.  Try removing the force entirely from the "
+                "XML input file.";
+            throw data_error(msg);
+        }
+    }
+}
+
+void
+UIVarsForces::SetMaxEvents   (long int maxEvents,    force_type force)
+{
+    return getLegalForce(force).SetMaxEvents(maxEvents);
+}
+
+void
+UIVarsForces::SetProfileType (proftype ptype,    force_type force)
+{
+    return getLegalForce(force).SetProfileType(ptype);
+}
+
+void
+UIVarsForces::SetProfileType (proftype ptype)
+{
+    // OK, so it's pretty weird to do this for all of the
+    // forces, since some of them will not be legal, but
+    // hey, if we're never accessing them, we can certainly
+    // write to them, huh?
+    coalForce.SetProfileType(ptype);
+    diseaseForce.SetProfileType(ptype);
+    growForce.SetProfileType(ptype);
+    migForce.SetProfileType(ptype);
+    recForce.SetProfileType(ptype);
+    logisticSelectionForce.SetProfileType(ptype);
+    regionGammaForce.SetProfileType(ptype);
+}
+
+void
+UIVarsForces::SetDoProfile   (bool doProfile,    force_type force, long int id)
+{
+    getLegalForce(force).SetDoProfile(doProfile,id);
+}
+
+void
+UIVarsForces::SetDoProfile   (bool doProfile,    force_type force)
+{
+    getLegalForce(force).SetDoProfile(doProfile);
+}
+
+void
+UIVarsForces::SetDoProfile   (bool doProfile)
+{
+    // OK, so it's pretty wierd to do this for all of the
+    // forces, since some of them will not be legal, but
+    // hey, if we're never accessing them, we can certainly
+    // write to them, huh?
+    coalForce.SetDoProfile(doProfile);
+    diseaseForce.SetDoProfile(doProfile);
+    growForce.SetDoProfile(doProfile);
+    migForce.SetDoProfile(doProfile);
+    recForce.SetDoProfile(doProfile);
+    logisticSelectionForce.SetDoProfile(doProfile);
+    regionGammaForce.SetDoProfile(doProfile);
+}
+
+void
+UIVarsForces::SetStartMethod(method_type meth, force_type ftype, long int id)
+{
+    getLegalForce(ftype).SetStartMethod(meth,id);
+}
+
+void
+UIVarsForces::SetParamstatus (const ParamStatus& mystatus, force_type ftype, long int id)
+{
+    getLegalForce(ftype).SetParamstatus(mystatus,id);
+}
+
+void
+UIVarsForces::SetGroupParamstatus (ParamStatus pstat, force_type ftype, long int id)
+{
+    getLegalForce(ftype).SetGroupParamstatus(pstat,id);
+}
+
+void
+UIVarsForces::AddGroup (LongVec1d params, force_type ftype, long int id)
+{
+    getLegalForce(ftype).AddGroup(defaults::groupPstat, params);
+}
+
+void
+UIVarsForces::RemoveParamFromGroup(force_type ftype, long int id)
+{
+    getLegalForce(ftype).RemoveParamIfInAGroup(id);
+}
+
+void
+UIVarsForces::AddParamToGroup(force_type ftype, long int pindex, long int gindex)
+{
+    getLegalForce(ftype).AddParamToGroup(pindex, gindex);
+}
+
+void
+UIVarsForces::AddParamToNewGroup(force_type ftype, long int pindex)
+{
+    getLegalForce(ftype).AddParamToNewGroup(pindex);
+}
+
+void
+UIVarsForces::SetUserStartValue(double startValue, force_type ftype, long int id)
+{
+    getLegalForce(ftype).SetUserStartValue(startValue,id);
+}
+
+void
+UIVarsForces::SetTrueValue(double startValue, force_type ftype, long int id)
+{
+    getLegalForce(ftype).SetTrueValue(startValue,id);
+}
+
+void
+UIVarsForces::SetAllThetaStartValues(double startValue)
+{
+    getLegalForce(force_COAL).SetUserStartValues(startValue);
+}
+
+void
+UIVarsForces::SetAllThetaStartValuesFST()
+{
+    getLegalForce(force_COAL).SetStartMethods(method_FST);
+}
+
+void
+UIVarsForces::SetAllThetaStartValuesWatterson()
+{
+    getLegalForce(force_COAL).SetStartMethods(method_WATTERSON);
+}
+
+void
+UIVarsForces::SetThetaStartValue  (double startValue, long int id)
+{
+    getLegalForce(force_COAL).SetUserStartValue(startValue,id);
+}
+
+void
+UIVarsForces::SetAllMigrationStartValues(double startValue)
+{
+    getLegalForce(force_MIG).SetUserStartValues(startValue);
+}
+
+void
+UIVarsForces::SetAllMigrationStartValuesFST()
+{
+    getLegalForce(force_MIG).SetStartMethods(method_FST);
+}
+
+void
+UIVarsForces::SetMigrationStartValue  (double startValue, long int id)
+{
+    getLegalForce(force_MIG).SetUserStartValue(startValue,id);
+}
+
+void
+UIVarsForces::SetDivergenceEpochStartTime  (double startValue, long int id)
+{
+    getLegalForce(force_DIVERGENCE).SetUserStartValue(startValue,id);
+}
+
+void
+UIVarsForces::SetAllDivMigrationStartValues(double startValue)
+{
+    getLegalForce(force_DIVMIG).SetUserStartValues(startValue);
+}
+
+void
+UIVarsForces::SetDivMigrationStartValue  (double startValue, long int id)
+{
+    getLegalForce(force_DIVMIG).SetUserStartValue(startValue,id);
+}
+
+void
+UIVarsForces::SetAllDiseaseStartValues(double startValue)
+{
+    getLegalForce(force_DISEASE).SetUserStartValues(startValue);
+}
+
+void
+UIVarsForces::SetDiseaseStartValue(double startValue, long int id)
+{
+    getLegalForce(force_DISEASE).SetUserStartValue(startValue,id);
+}
+
+void
+UIVarsForces::SetAllGrowthStartValues(double startValue)
+{
+    getLegalForce(force_GROW).SetUserStartValues(startValue);
+}
+
+void
+UIVarsForces::SetGrowthStartValue(double startValue, long int id)
+{
+    getLegalForce(force_GROW).SetUserStartValue(startValue,id);
+}
+
+void
+UIVarsForces::SetGrowthType(growth_type gType)
+{
+    growForce.SetGrowthType(gType);
+}
+
+void
+UIVarsForces::SetGrowthScheme(growth_scheme gScheme)
+{
+    growForce.SetGrowthScheme(gScheme);
+}
+
+void
+UIVarsForces::SetRecombinationStartValue(double startValue)
+{
+    getLegalForce(force_REC).SetUserStartValues(startValue);
+}
+
+void
+UIVarsForces::SetLogisticSelectionCoefficientStartValue(double startValue)
+{
+    getLegalForce(force_LOGISTICSELECTION).SetUserStartValues(startValue);
+}
+
+void
+UIVarsForces::SetRegionGammaStartValue(double startValue)
+{
+    getLegalForce(force_REGION_GAMMA).SetUserStartValues(startValue);
+}
+
+void
+UIVarsForces::SetSelectionType(selection_type sType)
+{
+    logisticSelectionForce.SetSelectionType(sType);
+}
+
+void
+UIVarsForces::SetDiseaseLocation(long int location)
+{
+    assert(diseaseForce.GetLegal());
+    diseaseForce.SetLocation(location);
+}
+
+bool UIVarsForces::AreZeroesValid(force_type forceId)
+{
+    return getLegalForce(forceId).AreZeroesValid();
+}
+
+bool UIVarsForces::SomeVariableParams() const
+{
+    vector<force_type> forces = GetActiveForces();
+    for (unsigned long int fnum = 0; fnum < forces.size(); fnum++)
+    {
+        if (getLegalForce(forces[fnum]).SomeVariableParams())
+        {
+            return true;
+        }
+    }
+    return false;
+}
+
+std::string UIVarsForces::GetEpochAncestorName(long id) const
+{
+    assert(divForce.GetLegal());
+    std::vector<string>  names = divForce.GetAncestors();
+    //std::vector<string> mine = names[id];
+    std::string retval = names[id];
+    return retval;
+}
+
+std::string UIVarsForces::GetEpochDescendentNames(long id) const
+{
+    assert(divForce.GetLegal());
+    std::vector< std::vector <string> > names = divForce.GetNewPops();
+    std::vector<string> mine = names[id];
+    std::string retval = mine[0] + " " + mine[1];
+    return retval;
+}
+
+//____________________________________________________________________________________
diff --git a/src/ui_vars/ui_vars_forces.h b/src/ui_vars/ui_vars_forces.h
new file mode 100644
index 0000000..6e509a8
--- /dev/null
+++ b/src/ui_vars/ui_vars_forces.h
@@ -0,0 +1,605 @@
+// $Id: ui_vars_forces.h,v 1.56 2013/10/25 17:00:53 mkkuhner Exp $
+
+/*
+ *  Copyright 2004  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+ *
+ *  This software is distributed free of charge for non-commercial use
+ *  and is copyrighted.  Of course, we do not guarantee that the software
+ *  works, and are not responsible for any damage you may cause or have.
+ *
+ */
+
+// This file contains two classes (one with subclasses) which are derived from UIVarsComponent.
+// UIVarsSingleForce represents one force; it has a subtype UIVars2DForce for forces whose
+// parameters form a 2D table (migration, disease, divmigration) and then concrete subclasses
+// for each type of force.  UIVarsForces is analogous to ForceSummary; it contains stuff about all forces.
+
+#ifndef UI_VARS_FORCES_H
+#define UI_VARS_FORCES_H
+
+#include <deque>
+#include <map>
+#include <vector>
+
+#include "local_build.h"                // for definition of LAMARC_NEW_FEATURE_RELATIVE_SAMPLING
+
+#include "constants.h"                  // for method_type
+#include "defaults.h"                   // for force_type
+#include "ui_strings.h"
+#include "ui_vars_component.h"
+#include "ui_vars_prior.h"
+#include "vectorx.h"
+#include "stringx.h"
+#include "paramstat.h"
+
+using std::deque;
+using std::map;
+using std::vector;
+
+class UIVarsForces;
+
+// virtual class with common methods across all force types
+class UIVarsSingleForce : public UIVarsComponent
+{
+  private:
+    // The number of parameters for this force is also the
+    // number of start values, method types, and profiles
+    //
+    // The correct number of parameters for each force is:
+    //      theta/coal       number of cross partitions
+    //      disease         (number of disease states)  ^2
+    //      migration       (number of populations)     ^2
+    //      growth           number of cross partitions
+    //      rec. rate        1
+    //      gamma over regions "force"  1
+    const long  m_numParameters;
+
+    const double      m_defaultValue;   // parameter value if none set
+    const method_type m_defaultMethodType;
+
+  protected: //The gamma force needs to be able to change this.
+    bool              m_canSetOnOff;    // can this force be turned on/off
+
+  private:
+    bool              m_onOff;          // is this force active?
+    long              m_maxEvents; //maximum number of Events for this force
+    proftype          m_profileType;// profiles of parameters for a single
+    // force must have the same proftype
+
+    // per-parameter data
+    deque<bool>                     m_doProfile;
+    DoubleVec1d                     m_userSuppliedStartValues;
+    vector<method_type>             m_startValueMethodTypes;
+    vector<ParamGroup>              m_groups;
+    DoubleVec1d                     m_truevalues;
+    vector<ParamStatus>             m_pstatusValues;
+
+    // Bayesian information
+    UIVarsPrior                     m_defaultPrior;
+    vector<UIVarsPrior>             m_priors;
+    deque<bool>                     m_useDefaultPrior;
+
+    // Never want to create a UIVarsSingleForce without arguments
+    UIVarsSingleForce();                            // undefined
+    UIVarsSingleForce(const UIVarsSingleForce&);    // undefined
+    UIVarsSingleForce& operator=(const UIVarsSingleForce&); // undefined
+
+  protected:
+    // member protected (rather than private) because sub-classes
+    // have need to invalidate various parameters
+    const force_type         m_ftype;
+
+    // wanted this in private area but haven't figured out
+    // the best way to get it there
+    map<method_type,DoubleVec1d>    m_calculatedStartValues;
+
+    void checkIndexValue(long index) const;
+    virtual void AssertOnIllegalStartMethod(method_type);
+    virtual string GetParamName(long pindex);
+
+  public:
+    UIVarsSingleForce(
+        UIVars *,
+        force_type ftype,
+        long nparams,
+        double defaultVal,
+        method_type defaultMethod,
+        bool canBeSetUnset,
+        bool isOn,
+        long eventMax,
+        UIVarsPrior defaultPrior);
+    UIVarsSingleForce(UIVars *, const UIVarsSingleForce&);
+    virtual ~UIVarsSingleForce()        {};
+
+    // pure virtual -- defined by subclasses
+    virtual long        GetNumParameters()  const { return m_numParameters;};
+    virtual bool        GetCanSetOnOff()    const { return m_canSetOnOff;};
+    virtual bool        GetOnOff()          const { return m_onOff;};
+    virtual bool        GetLegal()          const;
+    virtual long        GetMaxEvents()      const { return m_maxEvents;};
+    virtual force_type  GetPhase2Type(force_type orig) const { return orig; };
+
+    // each parameter can have profiling turned ON or OFF (boolean)
+    // but the type of profiling (fixed, percentile) is set at
+    // the per-force level
+    virtual bool            GetDoProfile(long index) const;
+    virtual proftype        GetProfileType()  const;
+    virtual proftype        GetProfileType(long index)  const;
+    virtual string          GetProfileTypeSummaryDescription() const;
+    virtual string          GetPriorTypeSummaryDescription() const;
+    virtual paramlistcondition  GetParamListCondition() const;
+    virtual void            SetDoProfile(bool doIt, long index);
+    virtual void            SetDoProfile(bool doIt);
+    virtual void            SetProfileType(proftype x);
+
+    virtual ParamStatus     GetParamstatus(long index) const;
+    virtual void   SetParamstatus(const ParamStatus& mystatus, long index);
+
+    virtual double          GetStartValue(long index)   const;
+    virtual DoubleVec1d     GetStartValues()  const;
+    virtual double          GetMinStartValue(long pindex) const;
+    virtual double          GetMaxStartValue(long pindex) const;
+    virtual void            SetUserStartValue(double startVal, long index);
+    virtual void            SetUserStartValues(double startVal);
+
+    virtual method_type     GetStartMethod(long index)  const;
+
+    virtual double          GetTrueValue(long index) const;
+    virtual void            SetTrueValue(double trueval, long index);
+
+    //Getters and setters for Bayesian information
+    virtual const UIVarsPrior& GetPrior(long pindex) const;
+    virtual priortype GetPriorType(long pindex) const;
+    virtual double GetLowerBound(long pindex)   const;
+    virtual double GetUpperBound(long pindex)   const;
+#ifdef LAMARC_NEW_FEATURE_RELATIVE_SAMPLING
+    virtual long   GetRelativeSampling(long pindex) const;
+#endif
+
+    virtual void SetPriorType(priortype ptype, long pindex);
+    virtual void SetUngroupedPriorType(priortype ptype, long pindex);
+    virtual void SetLowerBound(double bound, long pindex);
+    virtual void SetUngroupedLowerBound(double bound, long pindex);
+    virtual void SetUpperBound(double bound, long pindex);
+    virtual void SetUngroupedUpperBound(double bound, long pindex);
+#ifdef LAMARC_NEW_FEATURE_RELATIVE_SAMPLING
+    virtual void SetRelativeSampling(long rate, long pindex);
+    virtual void SetUngroupedRelativeSampling(long rate, long pindex);
+#endif
+
+    virtual bool GetUseDefaultPrior(long pindex) const;
+    virtual const UIVarsPrior& GetDefaultPrior() const;
+    virtual priortype GetDefaultPriorType() const;
+    virtual double GetDefaultLowerBound()   const;
+    virtual double GetDefaultUpperBound()   const;
+#ifdef LAMARC_NEW_FEATURE_RELATIVE_SAMPLING
+    virtual long GetDefaultRelativeSampling() const;
+#endif
+
+    virtual void SetUseDefaultPrior(bool use, long pindex);
+    virtual void SetUseAllDefaultPriors();
+    virtual void SetDefaultPriorType(priortype ptype);
+    virtual void SetDefaultLowerBound(double bound);
+    virtual void SetDefaultUpperBound(double bound);
+#ifdef LAMARC_NEW_FEATURE_RELATIVE_SAMPLING
+    virtual void SetDefaultRelativeSampling(long rate);
+#endif
+
+    virtual void SetStartMethods(method_type method);
+    // override this method to allow only legal values of method_type
+    virtual void SetStartMethod(method_type method, long index);
+
+    virtual void        SetMaxEvents(long maxEvents);
+
+    virtual void        SetOnOff(bool x);
+
+    virtual bool        GetParamValid(long id) const;
+    virtual bool        GetParamUnique(long id) const;
+
+    virtual bool        AreZeroesValid() const = 0;
+    virtual bool        SomeVariableParams() const;
+
+    // Public interface with the groups:
+    virtual void AddGroup(ParamStatus mystatus, LongVec1d indices);
+    virtual void AddParamToGroup(long pindex, long gindex);
+    virtual void AddParamToNewGroup(long pindex);
+    virtual ParamStatus GetGroupParamstatus(long gindex) const;
+    virtual LongVec1d   GetGroupParamList  (long gindex) const;
+    virtual std::vector<ParamGroup> GetGroups() const {return m_groups;};
+    virtual bool AreGroupsValid() const;
+    virtual void FixGroups();
+    virtual void SetGroupParamstatus(ParamStatus pstat, long gindex);
+    virtual long ParamInGroup(long pindex) const;
+    virtual long GetNumGroups() const {return m_groups.size();};
+    virtual void RemoveParamIfInAGroup(long pindex);
+
+  private:
+    //Private interface with the groups:
+    virtual void SetDoProfilesForGroup(bool doIt, long gindex);
+    virtual void checkGIndexValue(long gindex) const;
+
+    virtual double GetUngroupedStartValue(long index) const;
+};
+
+class UIVars2DForce : public UIVarsSingleForce
+{
+  private:
+    UIVars2DForce();                                  // undefined
+    UIVars2DForce(const UIVars2DForce&);              // undefined
+    void UpdateReachList(long testRome, std::list<long>& unreached,
+                         std::list<long>& reached) const;
+    long PickANewRome(long oldRome, std::list<long> unreached,
+                      std::list<long> reached) const;
+    bool CanReach(long partfrom, long partto) const;
+    UIVars2DForce& operator=(const UIVars2DForce&); // undefined
+  protected:
+    virtual void AssertOnIllegalStartMethod(method_type);
+    long m_npartitions;
+  public:
+    UIVars2DForce(UIVars *,
+                  force_type ftype,
+                  long npartitions,
+                  double defaultVal,
+                  method_type defaultMethod,
+                  bool canBeSetUnset,
+                  bool isOn,
+                  long eventMax,
+                  UIVarsPrior defaultPrior);
+    UIVars2DForce(UIVars *,const UIVars2DForce&);
+    virtual ~UIVars2DForce()          {};
+    virtual bool        GetParamValid(long id) const;
+    virtual bool  AreZeroesValid() const;
+};
+
+class UIVarsCoalForce : public UIVarsSingleForce
+{
+  private:
+    UIVarsCoalForce();                                  // undefined
+    UIVarsCoalForce& operator=(const UIVarsCoalForce&); // undefined
+    UIVarsCoalForce(const UIVarsCoalForce&);            // undefined
+  protected:
+    virtual void AssertOnIllegalStartMethod(method_type);
+  public:
+    UIVarsCoalForce(UIVars *,long numCrossPartitions);
+    UIVarsCoalForce(UIVars *,const UIVarsCoalForce&);
+    virtual ~UIVarsCoalForce()          {};
+    virtual bool        AreZeroesValid() const;
+    void FillCalculatedStartValues();
+
+};
+
+class UIVarsMigForce : public UIVars2DForce
+{
+  private:
+    UIVarsMigForce();                                   // undefined
+    UIVarsMigForce& operator=(const UIVarsMigForce&);   // undefined
+    UIVarsMigForce(const UIVarsMigForce&);              // undefined
+  public:
+    UIVarsMigForce(UIVars*, long numPopulations,bool onOrOff);
+    UIVarsMigForce(UIVars*, const UIVarsMigForce&);
+    virtual ~UIVarsMigForce()          {};
+    void FillCalculatedStartValues();
+};
+
+class UIVarsDivMigForce : public UIVars2DForce
+{
+  private:
+    UIVarsDivMigForce();                                   // undefined
+    UIVarsDivMigForce& operator=(const UIVarsMigForce&);   // undefined
+    UIVarsDivMigForce(const UIVarsMigForce&);              // undefined
+  protected:
+    virtual void AssertOnIllegalStartMethod(method_type);
+  public:
+    UIVarsDivMigForce(UIVars*, long numPopulations,bool onOrOff);
+    UIVarsDivMigForce(UIVars*, const UIVarsDivMigForce&);
+    virtual ~UIVarsDivMigForce()          {};
+    virtual bool  AreZeroesValid() const { return true; }; // zero always valid here
+};
+
+class UIVarsDivergenceForce : public UIVarsSingleForce
+{
+  private:
+    UIVarsDivergenceForce();                                  // undefined
+    UIVarsDivergenceForce& operator=(const UIVarsDivergenceForce&); // undefined
+    UIVarsDivergenceForce(const UIVarsDivergenceForce&);            // undefined
+    // in the following two vectors, each entry is an epoch
+    std::vector<std::vector<std::string> > newpops;
+    std::vector<std::string> ancestors;
+  protected:
+    virtual void AssertOnIllegalStartMethod(method_type);
+  public:
+    UIVarsDivergenceForce(UIVars *,long numDivPopulations);
+    UIVarsDivergenceForce(UIVars *,const UIVarsDivergenceForce&);
+    virtual ~UIVarsDivergenceForce()          {};
+    virtual bool        AreZeroesValid() const;
+    void AddNewPops(const std::vector<std::string>& newp);
+    void AddAncestor(const std::string& anc);
+    std::vector<std::vector<std::string> > GetNewPops() const;
+    std::vector<std::string> GetAncestors() const;
+    std::string GetAncestor(long index) const;
+
+};
+
+class UIVarsDiseaseForce : public UIVars2DForce
+{
+  private:
+    long                location;
+    //      string              diseaseName;
+    // LS Note:  If we allow more than one disease/trait at once, and we want
+    //  to model this using one disease force, we'll presumably need a
+    //  vector of strings for the names instead of just the name.  Or we can
+    //  keep doing what we're doing now, which is to use the disease state
+    //  to refer to it ('rate from healthy to diseased', etc.).
+
+    UIVarsDiseaseForce();                                       // undefined
+    UIVarsDiseaseForce(const UIVarsDiseaseForce&);              // undefined
+    UIVarsDiseaseForce& operator=(const UIVarsDiseaseForce&);   // undefined
+  public:
+    UIVarsDiseaseForce(UIVars*, long numDiseaseStates, bool canTurnOnOff);
+    UIVarsDiseaseForce(UIVars*, const UIVarsDiseaseForce&);
+    virtual ~UIVarsDiseaseForce()   {};
+
+    virtual long        GetLocation()       const {return location;};
+
+    virtual void    SetLocation(long x)             { location = x; };
+};
+
+class UIVarsRecForce : public UIVarsSingleForce
+{
+  private:
+    UIVarsRecForce();                                   // undefined
+    UIVarsRecForce(const UIVarsRecForce&);              // undefined
+    UIVarsRecForce& operator=(const UIVarsRecForce&);   // undefined
+  public:
+    UIVarsRecForce(UIVars*, bool canTurnOn);
+    UIVarsRecForce(UIVars*,const UIVarsRecForce&);
+    virtual ~UIVarsRecForce()          {};
+    virtual void        SetOnOff(bool onOffVal);
+    virtual bool        GetOnOff()       const;
+    virtual bool        AreZeroesValid() const;
+};
+
+class UIVarsRegionGammaForce : public UIVarsSingleForce
+{
+  private:
+    UIVarsRegionGammaForce();
+    UIVarsRegionGammaForce(const UIVarsRegionGammaForce&);
+    UIVarsRegionGammaForce& operator=(const UIVarsRegionGammaForce&);
+  public:
+    UIVarsRegionGammaForce(UIVars*);
+    UIVarsRegionGammaForce(UIVars*,const UIVarsRegionGammaForce&);
+    virtual ~UIVarsRegionGammaForce()          {};
+    virtual bool GetOnOff() const;
+    virtual void SetOnOff(bool);
+    virtual bool AreZeroesValid() const;
+};
+
+class UIVarsGrowForce : public UIVarsSingleForce
+{
+  private:
+    UIVarsGrowForce();                                  // undefined
+    UIVarsGrowForce(const UIVarsGrowForce&);            // undefined
+    UIVarsGrowForce& operator=(const UIVarsGrowForce&); // undefined
+
+    growth_type    growthType;
+
+  public:
+    UIVarsGrowForce(UIVars*, long numCrossPartitions);
+    UIVarsGrowForce(UIVars*, const UIVarsGrowForce&);
+    virtual ~UIVarsGrowForce()          {};
+    virtual bool        AreZeroesValid() const;
+    virtual growth_type GetGrowthType() const {return growthType;};
+    virtual void        SetGrowthType(growth_type g) {growthType=g;};
+    virtual growth_scheme GetGrowthScheme() const;
+    virtual void        SetGrowthScheme(growth_scheme g);
+    virtual force_type  GetPhase2Type(force_type f) const;
+    virtual void        SetOnOff(bool);
+};
+
+class UIVarsLogisticSelectionForce : public UIVarsSingleForce
+{
+  private:
+    UIVarsLogisticSelectionForce();
+    UIVarsLogisticSelectionForce(const UIVarsLogisticSelectionForce&);
+    UIVarsLogisticSelectionForce& operator=(const UIVarsLogisticSelectionForce&);
+
+    // double m_observedMajorAlleleFrequency; // ??
+    selection_type selectionType;
+
+  public:
+    UIVarsLogisticSelectionForce(UIVars*, long numCrossPartitions);
+    UIVarsLogisticSelectionForce(UIVars*, const UIVarsLogisticSelectionForce&);
+    virtual ~UIVarsLogisticSelectionForce()          {};
+    virtual bool        AreZeroesValid() const;
+    virtual selection_type GetSelectionType() const {return selectionType;};
+    virtual void        SetSelectionType(selection_type s) {selectionType=s;};
+    virtual force_type  GetPhase2Type(force_type f) const;
+};
+
+//------------------------------------------------------------------------------------
+
+class UIVarsForces : public UIVarsComponent
+{
+  private:
+    UIVarsForces();                             // undefined
+    UIVarsForces(const UIVarsForces&);         // undefined
+
+    class IsInactive : public std::unary_function<force_type,bool>
+    {
+      private:
+        const UIVarsForces& m_vars_forces;
+      public:
+        IsInactive(const UIVarsForces&);
+        ~IsInactive();
+        bool operator()(force_type f) const;
+    };
+    class IsIllegal : public std::unary_function<force_type,bool>
+    {
+      private:
+        const UIVarsForces& m_vars_forces;
+      public:
+        IsIllegal(const UIVarsForces&);
+        ~IsIllegal();
+        bool operator()(force_type f) const;
+    };
+
+    UIVarsCoalForce         coalForce;
+    UIVarsDiseaseForce      diseaseForce;
+    UIVarsGrowForce         growForce;
+    UIVarsMigForce          migForce;
+    UIVarsDivMigForce       divMigForce;
+    UIVarsDivergenceForce   divForce;
+    UIVarsRecForce          recForce;
+    UIVarsRegionGammaForce  regionGammaForce;
+    UIVarsLogisticSelectionForce logisticSelectionForce;
+  protected:
+    const UIVarsSingleForce &     getLegalForce(force_type) const;
+    UIVarsSingleForce &     getLegalForce(force_type);
+    const UIVarsSingleForce &     getForceRegardlessOfLegality(force_type) const;
+  public:
+    // one might argue that the constructors should have
+    // restricted access since only UIVars should
+    // be creating these puppies.
+    UIVarsForces(UIVars *,long nCrossPartitions, long nMigPopulations, long nDivPopulations, long nDiseaseStates, bool canMeasureRecombination);
+    UIVarsForces(UIVars *,const UIVarsForces&);
+    virtual ~UIVarsForces();
+
+    void FillCalculatedStartValues();
+
+    // All public non-constructors should be legal to perform
+    // from the menu. If not, put 'em in the protected section
+    LongVec1d             GetForceSizes() const;
+    vector<force_type>    GetActiveForces() const;
+    // the following guarantees a unique tag for any legal Phase 2
+    // force, eg Stick variants of existing forces.  Mary
+    vector<force_type>    GetPhase2ActiveForces() const;
+    vector<force_type>    GetLegalForces() const;
+    vector<force_type>    GetPossibleForces() const;
+    virtual long          GetNumGroups(force_type force) const;
+    virtual long          ParamInGroup(force_type ftype, long pindex) const;
+
+    bool            GetForceCanTurnOnOff(force_type force) const;
+    bool            GetForceLegal(force_type force) const;
+    bool            GetForceZeroesValidity(force_type force) const;
+    void            FixGroups(force_type force);
+
+    long            GetDiseaseLocation  () const;
+    bool            GetDoProfile        (force_type force, long id) const;
+    growth_type     GetGrowthType       () const;
+    growth_scheme   GetGrowthScheme     () const;
+    long            GetNumParameters    (force_type force) const;
+    bool            GetParamValid       (force_type force, long id) const;
+    bool            GetParamUnique      (force_type force, long id) const;
+    bool            GetForceOnOff       (force_type force) const;
+    long            GetMaxEvents        (force_type force) const;
+    proftype        GetProfileType      (force_type force) const;
+    proftype        GetProfileType      (force_type force, long id) const;
+    method_type     GetStartMethod      (force_type force, long id) const;
+    ParamStatus     GetParamstatus      (force_type force, long id) const;
+    ParamStatus     GetGroupParamstatus (force_type force, long id) const;
+    LongVec1d       GetGroupParamList   (force_type force, long id) const;
+    // MFIX -- these functions need to be written; only stubs exist!
+    std::vector<ParamGroup> GetIdentGroups   (force_type force) const;
+    std::vector<ParamGroup> GetMultGroups   (force_type force) const;
+    double          GetStartValue       (force_type force, long id) const;
+    DoubleVec1d     GetStartValues      (force_type)  const;
+    double          GetTrueValue        (force_type force, long id) const;
+    force_type      GetPhase2Type       (force_type force) const;
+    selection_type  GetSelectionType    () const;
+
+    //Getters/setters for Bayesian information
+    virtual bool GetUseDefaultPrior(force_type force, long pindex) const;
+    virtual const UIVarsPrior& GetDefaultPrior (force_type force) const;
+    virtual const UIVarsPrior& GetPrior     (force_type force, long pindex) const;
+    virtual priortype GetPriorType (force_type force, long pindex) const;
+    virtual double    GetLowerBound(force_type force, long pindex) const;
+    virtual double    GetUpperBound(force_type force, long pindex) const;
+#ifdef LAMARC_NEW_FEATURE_RELATIVE_SAMPLING
+    virtual long      GetRelativeSampling(force_type force, long pindex) const;
+#endif
+
+    virtual void SetUseDefaultPrior(bool use,force_type force, long pindex);
+    virtual void SetUseDefaultPriorsForForce(force_type force);
+    virtual void SetPriorType (priortype ptype, force_type force, long pindex);
+    virtual void SetLowerBound(double bound, force_type force, long pindex);
+    virtual void SetUpperBound(double bound, force_type force, long pindex);
+#ifdef LAMARC_NEW_FEATURE_RELATIVE_SAMPLING
+    virtual void SetRelativeSampling(long rate, force_type force, long pindex);
+#endif
+    virtual string GetPriorTypeSummaryDescription(force_type force) const;
+    virtual string GetPriorTypeSummaryDescription(force_type force, long pindex, bool sayDefault=true) const;
+
+    // allows display such as "(some) fixed" for menu summary
+    string  GetProfileTypeSummaryDescription(force_type force) const;
+
+    void SetForceOnOff  (bool onOff,        force_type forceId);
+    void SetMaxEvents   (long maxEvents,    force_type forceId);
+    //
+    void SetProfileType (proftype ptype);
+    void SetProfileType (proftype ptype,    force_type forceId);
+    //
+    void SetDoProfile   (bool doProfile);
+    void SetDoProfile   (bool doProfile,    force_type forceId);
+    void SetDoProfile   (bool doProfile,    force_type forceId, long id);
+
+    void SetUserStartValue  (double startValue, force_type forceId, long id);
+    void SetStartMethod (method_type startMethod, force_type forceId, long id);
+    void SetTrueValue  (double startValue, force_type forceId, long id);
+    void SetParamstatus (const ParamStatus& mystatus, force_type force, long id);
+    void SetGroupParamstatus (ParamStatus pstat, force_type force, long id);
+    void AddGroup (LongVec1d params, force_type force, long id);
+    void RemoveParamFromGroup (force_type force, long id);
+    void AddParamToGroup (force_type force, long pindex, long gindex);
+    void AddParamToNewGroup (force_type force, long pindex);
+
+    void SetAllThetaStartValues             (double startValue);
+    void SetAllThetaStartValuesFST          ();
+    void SetAllThetaStartValuesWatterson    ();
+    void SetThetaStartValue                 (double startValue, long id);
+
+    void SetAllMigrationStartValues     (double startValue);
+    void SetAllMigrationStartValuesFST  ();
+    void SetMigrationStartValue         (double startValue, long id);
+
+    void SetDivergenceEpochStartTime  (double startValue, long int id);
+
+    void SetAllDivMigrationStartValues     (double startValue);
+    void SetDivMigrationStartValue         (double startValue, long id);
+
+    void SetAllDiseaseStartValues   (double startValue);
+    void SetDiseaseStartValue       (double startValue, long id);
+    void SetDiseaseLocation         (long loc);
+
+    void SetAllGrowthStartValues            (double startValue);
+    void SetGrowthStartValue                (double startValue, long id);
+    void SetGrowthType                      (growth_type gType);
+    void SetGrowthScheme                    (growth_scheme gScheme);
+
+    void SetRecombinationStartValue         (double startValue);
+
+    void SetRegionGammaStartValue         (double startValue);
+
+    void SetLogisticSelectionCoefficientStartValue (double startValue);
+    void SetSelectionType                    (selection_type sType);
+
+    bool AreZeroesValid(force_type forceId);
+    bool SomeVariableParams() const;
+
+    // pass-through functions for Divergence force; JNOTE will be removed when this is
+    // handled properly by SetGet machinery
+    void AddNewPops(const std::vector<std::string>& newpops) {divForce.AddNewPops(newpops); };
+    void AddAncestor(const std::string& ancestor) {divForce.AddAncestor(ancestor); };
+    std::vector<std::vector<std::string> > GetNewPops() const { return divForce.GetNewPops(); };
+    std::vector<std::string> GetAncestors() const { return divForce.GetAncestors(); };
+
+    const UIVarsRegionGammaForce& GetUIVarsRegionGammaForce() const
+    { return regionGammaForce; };
+
+    std::string GetEpochAncestorName(long id) const;
+    std::string GetEpochDescendentNames(long id) const;
+
+};
+
+#endif  // UI_VARS_FORCES_H
+
+//____________________________________________________________________________________
diff --git a/src/ui_vars/ui_vars_prior.cpp b/src/ui_vars/ui_vars_prior.cpp
new file mode 100644
index 0000000..8882208
--- /dev/null
+++ b/src/ui_vars/ui_vars_prior.cpp
@@ -0,0 +1,217 @@
+// $Id: ui_vars_prior.cpp,v 1.16 2012/06/30 01:32:43 bobgian Exp $
+
+/*
+ *  Copyright 2004  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+ *
+ *  This software is distributed free of charge for non-commercial use
+ *  and is copyrighted.  Of course, we do not guarantee that the software
+ *  works, and are not responsible for any damage you may cause or have.
+ *
+ */
+
+#include <cassert>
+
+#include "local_build.h"                // for definition of LAMARC_NEW_FEATURE_RELATIVE_SAMPLING
+
+#include "ui_vars_prior.h"
+#include "ui_vars_forces.h"
+#include "ui_vars.h"
+#include "defaults.h"
+#include "errhandling.h"
+#include "stringx.h"
+
+//------------------------------------------------------------------------------------
+
+UIVarsPrior::UIVarsPrior(
+    UIInterface * ui,
+    force_type force,
+    priortype type,
+#ifdef LAMARC_NEW_FEATURE_RELATIVE_SAMPLING
+    long rate,
+#endif
+    double lowerbound,
+    double upperbound)
+    : m_ui(ui),
+      m_forcetype(force),
+      m_priortype(type),
+#ifdef LAMARC_NEW_FEATURE_RELATIVE_SAMPLING
+      m_relativeSampling(rate),
+#endif
+      m_lowerbound(lowerbound),
+      m_upperbound(upperbound)
+{
+}
+
+UIVarsPrior::UIVarsPrior(force_type shouldBeGamma)
+    : m_ui(NULL),
+      m_forcetype(shouldBeGamma),
+      m_priortype(LINEAR),
+#ifdef LAMARC_NEW_FEATURE_RELATIVE_SAMPLING
+      m_relativeSampling(1),
+#endif
+      m_lowerbound(0.0),
+      m_upperbound(0.0)
+{
+    assert (shouldBeGamma == force_REGION_GAMMA);
+}
+
+UIVarsPrior::~UIVarsPrior()
+{
+}
+
+void UIVarsPrior::SetPriorType(priortype type)
+{
+    m_priortype = type;
+}
+
+void UIVarsPrior::SetLowerBound(double bound)
+{
+    double defbound = 0;
+    switch(m_forcetype)
+    {
+        case force_COAL:
+            defbound = defaults::minboundTheta;
+            break;
+        case force_MIG:
+            defbound = defaults::minboundMig;
+            break;
+        case force_DISEASE:
+            defbound = defaults::minboundDisease;
+            break;
+        case force_REC:
+            defbound = defaults::minboundRec;
+            break;
+        case force_EXPGROWSTICK:
+        case force_GROW:
+            defbound = defaults::minboundGrowth;
+            break;
+        case force_DIVMIG:
+            defbound = defaults::minboundDivMig;
+            break;
+        case force_DIVERGENCE:
+            defbound = defaults::minboundEpoch;
+            break;
+        case force_LOGSELECTSTICK:
+        case force_LOGISTICSELECTION:
+            defbound = defaults::minboundLSelect;
+            break;
+        case force_REGION_GAMMA:
+            throw implementation_error("Method UIVarsPrior::SetLowerBound() was called for force_REGION_GAMMA; this is illegal.");
+            break;
+        default:
+            assert(false);              //Uncaught force type.
+    }
+    if (bound < defbound)
+    {
+        m_lowerbound = defbound;
+        string msg = "Warning:  the minimum lower bound for "
+            + ToString(m_forcetype) + " priors is " + ToString(defbound)
+            + ":  setting the lower bound there.";
+        m_ui->AddWarning(msg);
+        return;
+    }
+    if (bound > m_upperbound)
+    {
+        throw data_error("The lower bound of the prior must be less than the upper bound of the prior.");
+    }
+    if (bound == m_upperbound)
+    {
+        throw data_error("The lower bound of the prior must be less than the upper bound of the prior.\n"
+                         "If you wish to hold this parameter constant, go to the constraints menu and\n"
+                         "set it constant there.");
+    }
+    m_lowerbound = bound;
+}
+
+#ifdef LAMARC_NEW_FEATURE_RELATIVE_SAMPLING
+void UIVarsPrior::SetRelativeSampling(long rate)
+{
+    if (rate < 1)
+    {
+        throw data_error("Relative sampling rates must be integers greater than or equal to 1.");
+    }
+    m_relativeSampling = rate;
+}
+#endif
+
+void UIVarsPrior::SetUpperBound(double bound)
+{
+    double defbound = 0;
+    switch(m_forcetype)
+    {
+        case force_COAL:
+            defbound  = defaults::maxboundTheta;
+            break;
+        case force_MIG:
+            defbound = defaults::maxboundMig;
+            break;
+        case force_DISEASE:
+            defbound = defaults::maxboundDisease;
+            break;
+        case force_REC:
+            defbound = defaults::maxboundRec;
+            break;
+        case force_DIVMIG:
+            defbound = defaults::maxboundDivMig;
+            break;
+        case force_DIVERGENCE:
+            defbound = defaults::maxboundEpoch;
+            break;
+        case force_EXPGROWSTICK:
+        case force_GROW:
+            defbound  = defaults::maxboundGrowth;
+            break;
+        case force_LOGSELECTSTICK:
+        case force_LOGISTICSELECTION:
+            defbound = defaults::maxboundLSelect;
+            break;
+        case force_REGION_GAMMA:
+            throw implementation_error("Method UIVarsPrior::SetLowerBound() was called for force_REGION_GAMMA; this is illegal.");
+            break;
+        default:
+            assert(false);              //Uncaught force type.
+    }
+    if (bound > defbound)
+    {
+        m_upperbound = defbound;
+        string msg = "Warning:  the maximum upper bound for "
+            + ToString(m_forcetype) + " priors is " + ToString(defbound)
+            + ":  setting the upper bound there.";
+        m_ui->AddWarning(msg);
+        return;
+    }
+    if (bound < m_lowerbound)
+    {
+        throw data_error("The upper bound of the prior must be greater than the upper bound of the prior.");
+    }
+    if (bound == m_lowerbound)
+    {
+        throw data_error("The upper bound of the prior must be greater than the upper bound of the prior.\n"
+                         "If you wish to hold this parameter constant, go to the constraints menu and\n"
+                         "set it constant there.");
+    }
+    m_upperbound = bound;
+}
+
+double UIVarsPrior::GetBinwidth() const
+{
+    //The basic intent here is to have the binwidth in the fourth significant
+    // digit.  For logarithmic priors, this is easy.
+    if (m_priortype == LOGARITHMIC)
+    {
+        return 0.001;
+    }
+
+    //For linear priors, take the difference of the bounds and take the fourth
+    // significant digit of that number.
+    int exponent = static_cast<int>(log10(m_upperbound - m_lowerbound));
+    return pow(10.0,exponent-4);
+};
+
+string UIVarsPrior::GetSummaryDescription() const
+{
+    string desc = "(" + ToString(m_priortype) + ") " + Pretty(m_lowerbound,6) + " - " + Pretty(m_upperbound,6);
+    return desc;
+}
+
+//____________________________________________________________________________________
diff --git a/src/ui_vars/ui_vars_prior.h b/src/ui_vars/ui_vars_prior.h
new file mode 100644
index 0000000..bce5362
--- /dev/null
+++ b/src/ui_vars/ui_vars_prior.h
@@ -0,0 +1,76 @@
+// $Id: ui_vars_prior.h,v 1.9 2012/06/30 01:32:43 bobgian Exp $
+
+/*
+ *  Copyright 2004  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+ *
+ *  This software is distributed free of charge for non-commercial use
+ *  and is copyrighted.  Of course, we do not guarantee that the software
+ *  works, and are not responsible for any damage you may cause or have.
+ *
+ */
+
+#ifndef UI_VARS_PRIOR_H
+#define UI_VARS_PRIOR_H
+
+#include "local_build.h"                // for definition of LAMARC_NEW_FEATURE_RELATIVE_SAMPLING
+#include "constants.h"                  // for priortype
+
+class UIInterface;
+class UIVarsForces;
+
+//This class is nothing more than a struct right now, but I made it a class
+// anyway in case we get more complicated priors in the future.  Also, the
+// binwidth member variable may at some point be calculated based on the
+// upper and lower bounds instead of being set directly.
+
+class UIVarsPrior
+{
+  private:
+    UIVarsPrior();    // undefined
+    UIInterface *   m_ui;
+    force_type m_forcetype; //Used for boundary checking.
+    priortype  m_priortype;
+#ifdef LAMARC_NEW_FEATURE_RELATIVE_SAMPLING
+    long       m_relativeSampling;
+#endif
+    double     m_lowerbound;
+    double     m_upperbound;
+
+  public:
+    UIVarsPrior(UIInterface * ui,
+                force_type force,
+                priortype type,
+#ifdef LAMARC_NEW_FEATURE_RELATIVE_SAMPLING
+                long rate,
+#endif
+                double lowerbound,
+                double upperbound);
+    UIVarsPrior(force_type shouldBeGamma);
+
+    //Use the default copy constructor.
+    virtual ~UIVarsPrior();
+
+    virtual priortype GetPriorType()  const {return m_priortype;};
+    virtual double    GetLowerBound() const {return m_lowerbound;};
+    virtual double    GetUpperBound() const {return m_upperbound;};
+#ifdef LAMARC_NEW_FEATURE_RELATIVE_SAMPLING
+    virtual long      GetRelativeSampling() const {return m_relativeSampling;};
+#endif
+    virtual double    GetBinwidth() const;
+
+    virtual void SetPriorType(priortype type);
+    virtual void SetLowerBound(double bound);
+    virtual void SetUpperBound(double bound);
+#ifdef LAMARC_NEW_FEATURE_RELATIVE_SAMPLING
+    virtual void SetRelativeSampling(long rate);
+#endif
+    //  virtual void SetBinwidth(double bin);
+    //  The binwidth is a function of all the other settings, and cannot (now)
+    //   be set on its own.  Perhaps this should be revisited in the future.
+
+    virtual string GetSummaryDescription() const;
+};
+
+#endif  // UI_VARS_PRIOR_H
+
+//____________________________________________________________________________________
diff --git a/src/ui_vars/ui_vars_traitmodels.cpp b/src/ui_vars/ui_vars_traitmodels.cpp
new file mode 100644
index 0000000..4f6ed5e
--- /dev/null
+++ b/src/ui_vars/ui_vars_traitmodels.cpp
@@ -0,0 +1,373 @@
+// $Id: ui_vars_traitmodels.cpp,v 1.18 2012/06/30 01:32:43 bobgian Exp $
+
+/*
+ *  Copyright 2004  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+ *
+ *  This software is distributed free of charge for non-commercial use
+ *  and is copyrighted.  Of course, we do not guarantee that the software
+ *  works, and are not responsible for any damage you may cause or have.
+ *
+ */
+
+#include <cassert>
+
+#include "locus.h"
+#include "ui_regid.h"
+#include "ui_vars.h"
+#include "ui_vars_traitmodels.h"
+
+#define DEFAULTMLOC mloc_mapfloat
+#define MAXMULTIHAP 20
+
+using namespace std;
+
+//------------------------------------------------------------------------------------
+
+string ToString(mloc_type type)
+{
+    switch(type)
+    {
+        case mloc_data:
+            return "use as data";
+        case mloc_mapjump:
+            return "mapping (jump)";
+        case mloc_mapfloat:
+            return "mapping (float)";
+        case mloc_partition:
+            return "partition the data";
+    }
+    assert(false);
+    throw implementation_error("Uncaught moving locus analysis type.");
+}
+
+string ToXMLString(mloc_type type)
+{
+    switch(type)
+    {
+        case mloc_data:
+            return "data";
+        case mloc_mapjump:
+            return "jump";
+        case mloc_mapfloat:
+            return "float";
+        case mloc_partition:
+            return "partition";
+    }
+    assert(false);
+    throw implementation_error("Uncaught moving locus analysis type.");
+}
+
+mloc_type ProduceMlocTypeOrBarf(const string& in)
+{
+    string st = in;
+    LowerCase(st);
+    if (st == "data") { return mloc_data; };
+    if (st == "jump") { return mloc_mapjump; };
+    if (st == "float") { return mloc_mapfloat; };
+    if (st == "partition") { return mloc_partition; };
+    throw data_error("Illegal trait analysis setting \""+in+"\"");
+}
+
+UIVarsSingleTraitModel::UIVarsSingleTraitModel(UIRegId regionId, string name,
+                                               rangeset mrange,rangepair fullr,
+                                               const Locus* locus,
+                                               long multihapnum)
+    : m_region(regionId.GetRegion()),
+      m_locus(regionId.GetLocus()),
+      m_datatype(regionId.GetDataType()),
+      m_type(DEFAULTMLOC),
+      m_range(mrange),
+      m_fullrange(fullr),
+      m_name(name),
+      m_phenotypes(regionId.GetRegion(), locus->GetName()),
+      m_multihapnum(multihapnum)
+{
+    assert(m_datatype == dtype_kallele); //We don't have models for other types.
+    assert(m_range.size() > 0);
+    if (m_range.size() == 1)
+    {
+        if (m_range.begin()->first +1 == m_range.begin()->second)
+        {
+            m_type = mloc_data;
+        }
+    }
+    if ((m_type != mloc_data) && (m_multihapnum > MAXMULTIHAP))
+    {
+        m_type = mloc_mapjump;
+    }
+}
+
+UIVarsSingleTraitModel::~UIVarsSingleTraitModel()
+{
+}
+
+void UIVarsSingleTraitModel::SetAnalysisType(mloc_type type)
+{
+    m_type = type;
+}
+
+void UIVarsSingleTraitModel::SetRange(rangeset range)
+{
+    m_range = range;
+}
+
+void UIVarsSingleTraitModel::AddPhenotype(StringVec1d& alleles, string name, double penetrance)
+{
+    m_phenotypes.AddPhenotype(alleles, name, penetrance);
+}
+
+//------------------------------------------------------------------------------------
+
+UIVarsTraitModels::UIVarsTraitModels(UIVars* myUIVars)
+    : UIVarsComponent(myUIVars),
+      m_individualModels()
+{
+    //Requires that datapackplus be set up in the uivars.
+    long nregions = GetConstUIVars().datapackplus.GetNumRegions();
+    for(long regionIndex=0; regionIndex < nregions; regionIndex++)
+    {
+        long numLoci = GetConstUIVars().datapackplus.GetNumLoci(regionIndex);
+        for(long locusIndex=0; locusIndex < numLoci; locusIndex++)
+        {
+            if (GetConstUIVars().datapackplus.IsMovable(regionIndex, locusIndex))
+            {
+                UIRegId regID(regionIndex, locusIndex, GetConstUIVars());
+                AddTraitModel(regID);
+            }
+        }
+    }
+}
+
+UIVarsTraitModels::UIVarsTraitModels(UIVars* myUIVars, const UIVarsTraitModels& clone)
+    : UIVarsComponent(myUIVars),
+      m_individualModels(clone.m_individualModels)
+{
+}
+
+UIVarsTraitModels::~UIVarsTraitModels()
+{
+}
+
+void UIVarsTraitModels::AddTraitModel(UIRegId regID)
+{
+    assert(m_individualModels.find(regID) == m_individualModels.end());
+    string name = GetConstUIVars().datapackplus.GetName(regID.GetRegion(), regID.GetLocus());
+    rangeset mrange = GetConstUIVars().datapackplus.GetRange(regID.GetRegion(), regID.GetLocus());
+    rangepair fullrange = GetConstUIVars().datapackplus.GetRegionSiteSpan(regID.GetRegion());
+    long multihapnum = GetConstUIVars().datapackplus.GetNumIndividualsWithMultipleHaplotypes(regID.GetRegion(), name);
+    if (mrange.size() == 0)
+    {
+        //Nothing originally set, so we'll allow the whole thing.
+        mrange.insert(fullrange);
+    }
+    const Locus* locus = GetConstUIVars().datapackplus.GetConstLocusPointer(regID.GetRegion(), regID.GetLocus());
+    fullrange.second++; // EWFIX. WHY WHY WHY
+    UIVarsSingleTraitModel tmodel(regID, name, mrange, fullrange, locus, multihapnum);
+    m_individualModels.insert(make_pair(regID, tmodel));
+}
+
+//Setters
+
+void UIVarsTraitModels::SetAnalysisType(UIRegId regID, mloc_type type)
+{
+    assert(m_individualModels.find(regID) != m_individualModels.end());
+    UIVarsSingleTraitModel& model = m_individualModels.find(regID)->second;
+    switch(type)
+    {
+        case mloc_data:
+            //We need to make sure there's only one site where it can be placed.
+            if (!OneSite(model.GetRange()))
+            {
+                throw data_error("You may not set the trait analysis type to 'data' without specifying the exact position of your trait.");
+            }
+            break;
+        case mloc_partition:
+            throw data_error("We are not set up to use trait data to set partitions yet.  Soon!");
+        case mloc_mapfloat:
+            if (model.GetMultiHapNum() > MAXMULTIHAP)
+            {
+                throw data_error("You have too many individuals (" + ToString(model.GetMultiHapNum()) + ", which is more than maximum " + ToString(MAXMULTIHAP) + " allowed) with multiple haplotype resolutions (such as heterozygotes or individuals with a dominant phenotype) to perform a 'floating' analysis--the number of complete likelihood calculations required per tree is 2^N, with N the number of individuals with multiple resolutions.  We recommend either a 'jumping' analysis, or that  [...]
+            }
+            //Otherwise, fall through to:
+        case mloc_mapjump:
+            if (OneSite(model.GetRange()))
+            {
+                GetConstUIVars().GetUI()->AddWarning("You currently have only a single site where your trait is allowed.  The mapping analysis will not have anything to compare it to.  Use the 'A' option to add more sites to the analysis.");
+            }
+            break;
+    }
+    model.SetAnalysisType(type);
+}
+
+void UIVarsTraitModels::AddRange(UIRegId regID, rangepair addpart)
+{
+    assert(m_individualModels.find(regID) != m_individualModels.end());
+    rangeset rset = m_individualModels.find(regID)->second.GetRange();
+    rset = AddPairToRange(addpart, rset);
+    SetRange(regID, rset);
+}
+
+void UIVarsTraitModels::RemoveRange(UIRegId regID, rangepair removepart)
+{
+    assert(m_individualModels.find(regID) != m_individualModels.end());
+    rangeset rset = m_individualModels.find(regID)->second.GetRange();
+    rset = RemovePairFromRange(removepart, rset);
+    SetRange(regID, rset);
+}
+
+void UIVarsTraitModels::SetRangeToPoint(UIRegId regID, long site)
+{
+    assert(m_individualModels.find(regID) != m_individualModels.end());
+    rangepair rp(make_pair(site, site+1));
+    rangeset rset;
+    rset.insert(rp);
+    SetRange(regID, rset);
+}
+
+void UIVarsTraitModels::AddPhenotype(UIRegId regID, StringVec1d& alleles, string name, double penetrance)
+{
+    assert(m_individualModels.find(regID) != m_individualModels.end());
+    m_individualModels.find(regID)->second.AddPhenotype(alleles, name, penetrance);
+}
+
+//Getters:
+long UIVarsTraitModels::GetNumMovableLoci() const
+{
+    return m_individualModels.size();
+}
+
+mloc_type UIVarsTraitModels::GetAnalysisType(UIRegId regID) const
+{
+    assert(m_individualModels.find(regID) != m_individualModels.end());
+    return m_individualModels.find(regID)->second.GetAnalysisType();
+}
+
+rangeset UIVarsTraitModels::GetRange(UIRegId regID) const
+{
+    assert(m_individualModels.find(regID) != m_individualModels.end());
+    return m_individualModels.find(regID)->second.GetRange();
+}
+
+long UIVarsTraitModels::GetInitialMapPosition(UIRegId regID) const
+{
+    //Just use the leftmost valid point.
+    return GetRange(regID).begin()->first;
+}
+
+string UIVarsTraitModels::GetName(UIRegId regID) const
+{
+    assert(m_individualModels.find(regID) != m_individualModels.end());
+    return m_individualModels.find(regID)->second.GetName();
+}
+
+Phenotypes UIVarsTraitModels::GetPhenotypes(UIRegId regID) const
+{
+    assert(m_individualModels.find(regID) != m_individualModels.end());
+    return m_individualModels.find(regID)->second.GetPhenotypes();
+}
+
+vector<UIRegId> UIVarsTraitModels::GetRegIDs() const
+{
+    vector<UIRegId> retvec;
+    for (map<UIRegId, UIVarsSingleTraitModel>::const_iterator model = m_individualModels.begin(); model != m_individualModels.end(); model++)
+    {
+        retvec.push_back(model->first);
+    }
+    return retvec;
+}
+
+bool UIVarsTraitModels::AnyJumpingAnalyses() const
+{
+    for (map<UIRegId, UIVarsSingleTraitModel>::const_iterator model = m_individualModels.begin(); model != m_individualModels.end(); model++)
+    {
+        if (model->second.GetAnalysisType() == mloc_mapjump) return true;
+    }
+    return false;
+}
+
+bool UIVarsTraitModels::AnyMappingAnalyses() const
+{
+    for (map<UIRegId, UIVarsSingleTraitModel>::const_iterator model = m_individualModels.begin(); model != m_individualModels.end(); model++)
+    {
+        if (model->second.GetAnalysisType() == mloc_mapjump) return true;
+        if (model->second.GetAnalysisType() == mloc_mapfloat) return true;
+    }
+    return false;
+}
+
+void UIVarsTraitModels::SetRange(UIRegId regID, rangeset rset)
+{
+    UIVarsSingleTraitModel& model = m_individualModels.find(regID)->second;
+
+    rangepair fullrange = model.GetFullRange();
+    if (rset.size() == 0)
+    {
+        throw data_error("You must leave at least one allowable site for this trait.  The sites must be chosen from the range " + ToStringUserUnits(fullrange) + ".");
+    }
+
+    rangepair below = make_pair(rset.begin()->first-1, fullrange.first);
+    rangepair above = make_pair(fullrange.second, rset.rbegin()->second);
+    rangeset truerange = rset;
+
+    if (below.first < below.second)
+    {
+        truerange = RemovePairFromRange(below, truerange);
+    }
+    if (above.first < above.second)
+    {
+        truerange = RemovePairFromRange(above, truerange);
+    }
+
+    if (truerange.size() == 0)
+    {
+        throw data_error("You must leave at least one allowable site for this trait.  The sites must be chosen from the range "
+                         + ToStringUserUnits(fullrange)
+                         + ".");
+    }
+    if (truerange != rset)
+    {
+        GetConstUIVars().GetUI()->AddWarning("One or more of the added sites for this trait are outside of the range of the segments in this region.  Truncating the added range.");
+    }
+
+    switch (model.GetAnalysisType())
+    {
+        case mloc_data:
+        case mloc_partition:
+            if (!OneSite(truerange))
+            {
+                model.SetAnalysisType(mloc_mapfloat);
+                GetConstUIVars().GetUI()->AddWarning("If you don't know where your trait is located, you may perform a mapping analysis,"
+                                                     " but you may not use the trait as data or use it to partition your data.  "
+                                                     "Changing the analysis type to map the trait after collecting trees ('float').");
+            }
+            break;
+        case mloc_mapjump:
+        case mloc_mapfloat:
+            if (OneSite(truerange))
+            {
+                //LS DEBUG MAPPING: change after implementation
+#ifdef NDEBUG
+                GetConstUIVars().GetUI()->AddWarning("You currently have only a single site where your trait is allowed.  "
+                                                     "The mapping analysis will not have anything to compare it to.  "
+                                                     "Use the 'A' option to add more sites.");
+#else
+                GetConstUIVars().GetUI()->AddWarning("You currently have only a single site where your trait is allowed.  "
+                                                     "The mapping analysis will not have anything to compare it to.  "
+                                                     "Use the 'A' option to add more sites, or the 'U' or 'P' options to change the type of analysis.");
+#endif
+            }
+    }
+    model.SetRange(truerange);
+}
+
+//Private functions
+
+bool UIVarsTraitModels::OneSite(rangeset rset)
+{
+    assert(rset.size() > 0);
+    if (rset.size() > 1) return false;
+    if (rset.begin()->first+1 == rset.begin()->second) return true;
+    return false;
+}
+
+//____________________________________________________________________________________
diff --git a/src/ui_vars/ui_vars_traitmodels.h b/src/ui_vars/ui_vars_traitmodels.h
new file mode 100644
index 0000000..39ffa0c
--- /dev/null
+++ b/src/ui_vars/ui_vars_traitmodels.h
@@ -0,0 +1,101 @@
+// $Id: ui_vars_traitmodels.h,v 1.11 2012/06/30 01:32:43 bobgian Exp $
+
+/*
+ *  Copyright 2004  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+ *
+ *  This software is distributed free of charge for non-commercial use
+ *  and is copyrighted.  Of course, we do not guarantee that the software
+ *  works, and are not responsible for any damage you may cause or have.
+ *
+ */
+
+#ifndef UI_VARS_TRAITMODELS_H
+#define UI_VARS_TRAITMODELS_H
+
+#include "datatype.h"           // for data_type
+#include "phenotypes.h"
+#include "rangex.h"
+#include "ui_vars_component.h"
+
+class UIRegId;
+
+enum mloc_type {mloc_data, mloc_mapjump, mloc_mapfloat, mloc_partition};
+
+string ToString(mloc_type);
+string ToXMLString(mloc_type);
+mloc_type ProduceMlocTypeOrBarf(const string& in);
+
+class UIVarsSingleTraitModel
+{
+  private:
+    UIVarsSingleTraitModel();    // undefined
+    long m_region;
+    long m_locus;
+    data_type m_datatype;
+    mloc_type m_type;
+    rangeset m_range;
+    rangepair m_fullrange;
+    string m_name;
+    Phenotypes m_phenotypes;
+    long m_multihapnum;
+
+  public:
+    UIVarsSingleTraitModel(UIRegId regionId, string name, rangeset mrange,
+                           rangepair fullrange, const Locus* locus,
+                           long multihapnum);
+    virtual ~UIVarsSingleTraitModel();
+    //Setters
+    void SetAnalysisType(mloc_type type);
+    void SetRange(rangeset range);
+    void AddPhenotype(StringVec1d& alleles, string name, double penetrance);
+
+    //Getters
+    mloc_type GetAnalysisType() const {return m_type;};
+    rangeset  GetRange() const {return m_range;};
+    rangepair GetFullRange() const {return m_fullrange;};
+    string    GetName()  const {return m_name;};
+    Phenotypes GetPhenotypes() const {return m_phenotypes;};
+    long      GetMultiHapNum() const {return m_multihapnum;};
+};
+
+class UIVarsTraitModels : public UIVarsComponent
+{
+  private:
+    UIVarsTraitModels();  // undefined
+    UIVarsTraitModels(const UIVarsTraitModels&); // undefined
+    std::map<UIRegId, UIVarsSingleTraitModel>   m_individualModels;
+
+  public:
+    UIVarsTraitModels(UIVars*);
+    UIVarsTraitModels(UIVars*, const UIVarsTraitModels&);
+    virtual ~UIVarsTraitModels();
+
+    void AddTraitModel(UIRegId regID);
+
+    //Setters
+    void SetAnalysisType(UIRegId regID, mloc_type type);
+    void AddRange(UIRegId regID, rangepair addpart);
+    void RemoveRange(UIRegId regID, rangepair removepart);
+    void SetRangeToPoint(UIRegId regID, long site);
+    void AddPhenotype(UIRegId regID, StringVec1d& alleles, string name, double penetrance);
+    //Note:  AddPhenotype is currently only called from the XML.
+
+    //Getters
+    long GetNumMovableLoci() const;
+    mloc_type GetAnalysisType(UIRegId regID) const;
+    rangeset GetRange(UIRegId regID) const;
+    long GetInitialMapPosition(UIRegId regID) const;
+    string   GetName(UIRegId regID) const;
+    Phenotypes GetPhenotypes(UIRegId regID) const;
+    vector<UIRegId> GetRegIDs() const;
+    bool AnyJumpingAnalyses() const;
+    bool AnyMappingAnalyses() const;
+    void SetRange(UIRegId regID, rangeset rset);
+
+  private:
+    bool OneSite(rangeset rset);
+};
+
+#endif  // UI_VARS_TRAITMODELS_H
+
+//____________________________________________________________________________________
diff --git a/src/ui_vars/ui_vars_userparams.cpp b/src/ui_vars/ui_vars_userparams.cpp
new file mode 100644
index 0000000..d3c95ed
--- /dev/null
+++ b/src/ui_vars/ui_vars_userparams.cpp
@@ -0,0 +1,560 @@
+// $Id: ui_vars_userparams.cpp,v 1.36 2013/10/25 17:00:53 mkkuhner Exp $
+
+/*
+ *  Copyright 2004  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+ *
+ *  This software is distributed free of charge for non-commercial use
+ *  and is copyrighted.  Of course, we do not guarantee that the software
+ *  works, and are not responsible for any damage you may cause or have.
+ *
+ */
+
+#include <cassert>
+#include <fstream> //For summary file testing.
+
+#include "local_build.h"
+
+#include "defaults.h"
+#include "errhandling.h"
+#include "timex.h"      // for GetTime()
+#include "ui_regid.h"
+#include "ui_vars.h"
+
+//------------------------------------------------------------------------------------
+
+UIVarsUserParameters::UIVarsUserParameters(UIVars * myUIVars,string fileName)
+    :
+    UIVarsComponent(myUIVars),
+    m_curveFilePrefix                 (defaults::curvefileprefix),
+    m_mapFilePrefix                   (defaults::mapfileprefix),
+    m_reclocFilePrefix                (defaults::reclocfileprefix),
+    m_traceFilePrefix                 (defaults::tracefileprefix),
+    m_newickTreeFilePrefix            (defaults::newicktreefileprefix),
+#ifdef LAMARC_QA_TREE_DUMP
+    m_argFilePrefix                   (defaults::argfileprefix),
+#endif // LAMARC_QA_TREE_DUMP
+    m_dataFileName                    (fileName),
+    m_plotPost                        (defaults::plotpost),
+    m_programStartTime                (defaults::programstarttime),
+    m_progress                        (defaults::progress),
+    m_hasOldClockSeed                 (defaults::hasoldrandomseed),
+    m_oldClockSeed                    (defaults::randomseed),
+    m_randomSeed                      (defaults::randomseed),
+    m_readSumFile                     (defaults::readsumfile),
+    m_resultsFileName                 (defaults::resultsfilename),
+    m_treeSumInFileName               (defaults::treesuminfilename),
+    m_treeSumOutFileName              (defaults::treesumoutfilename),
+    m_useSystemClock                  (defaults::usesystemclock),
+    m_verbosity                       (defaults::verbosity),
+    m_writeCurveFiles                 (defaults::writecurvefiles),
+    m_writeSumFile                    (defaults::writesumfile),
+    m_writeReclocFiles                (defaults::writereclocfiles),
+    m_writeTraceFiles                 (defaults::writetracefiles),
+    m_writeNewickTreeFiles            (defaults::writenewicktreefiles),
+#ifdef LAMARC_QA_TREE_DUMP
+    m_writeArgFiles                   (defaults::writeargfiles),
+    m_writeManyArgs                   (defaults::writemanyargs),
+#endif // LAMARC_QA_TREE_DUMP
+    m_xmlOutFileName                  (defaults::xmloutfilename),
+    m_xmlReportFileName               (defaults::xmlreportfilename),
+    m_profilePrefix                   (defaults::profileprefix)
+{
+    SetUseSystemClock(true);// get a good initial random seed
+    SetProgramStartTime();
+}
+
+UIVarsUserParameters::UIVarsUserParameters(UIVars * myUIVars,const UIVarsUserParameters& uparams)
+    :
+    UIVarsComponent(myUIVars),
+    m_curveFilePrefix                 (uparams.m_curveFilePrefix),
+    m_mapFilePrefix                   (uparams.m_mapFilePrefix),
+    m_reclocFilePrefix                (uparams.m_reclocFilePrefix),
+    m_traceFilePrefix                 (uparams.m_traceFilePrefix),
+    m_newickTreeFilePrefix            (uparams.m_newickTreeFilePrefix),
+#ifdef LAMARC_QA_TREE_DUMP
+    m_argFilePrefix                   (uparams.m_argFilePrefix),
+#endif // LAMARC_QA_TREE_DUMP
+    m_dataFileName                    (uparams.m_dataFileName),
+    m_plotPost                        (uparams.m_plotPost),
+    m_programStartTime                (uparams.m_programStartTime),
+    m_progress                        (uparams.m_progress),
+    m_hasOldClockSeed                 (uparams.m_hasOldClockSeed),
+    m_oldClockSeed                    (uparams.m_oldClockSeed),
+    m_randomSeed                      (uparams.m_randomSeed),
+    m_readSumFile                     (uparams.m_readSumFile),
+    m_resultsFileName                 (uparams.m_resultsFileName),
+    m_treeSumInFileName               (uparams.m_treeSumInFileName),
+    m_treeSumOutFileName              (uparams.m_treeSumOutFileName),
+    m_useSystemClock                  (uparams.m_useSystemClock),
+    m_verbosity                       (uparams.m_verbosity),
+    m_writeCurveFiles                 (uparams.m_writeCurveFiles),
+    m_writeSumFile                    (uparams.m_writeSumFile),
+    m_writeReclocFiles                (uparams.m_writeReclocFiles),
+    m_writeTraceFiles                 (uparams.m_writeTraceFiles),
+    m_writeNewickTreeFiles            (uparams.m_writeNewickTreeFiles),
+#ifdef LAMARC_QA_TREE_DUMP
+    m_writeArgFiles                   (uparams.m_writeArgFiles),
+    m_writeManyArgs                   (uparams.m_writeManyArgs),
+#endif // LAMARC_QA_TREE_DUMP
+    m_xmlOutFileName                  (uparams.m_xmlOutFileName),
+    m_xmlReportFileName               (uparams.m_xmlReportFileName),
+    m_profilePrefix                   (uparams.m_profilePrefix)
+{
+}
+
+UIVarsUserParameters::~UIVarsUserParameters()
+{
+}
+
+bool  UIVarsUserParameters::GetReadSumFile() const
+{
+    return m_readSumFile;
+}
+
+bool UIVarsUserParameters::GetWriteSumFile() const
+{
+    return m_writeSumFile;
+}
+
+bool UIVarsUserParameters::GetWriteNewickTreeFiles() const
+{
+    if (GetConstUIVars().forces.GetForceOnOff(force_MIG) || 
+        GetConstUIVars().forces.GetForceOnOff(force_DIVERGENCE) ||
+        GetConstUIVars().forces.GetForceOnOff(force_REC))
+    {
+        return false;
+    }
+    return m_writeNewickTreeFiles;
+}
+
+//LS DEBUG:  Do we want to test for leading and/or trailing whitespace for all
+// these various file names and prefixes?
+
+void UIVarsUserParameters::SetCurveFilePrefix(string x)
+{
+    CheckPrefix(x);
+    m_curveFilePrefix = x;
+    CheckCurveFiles();
+};
+
+void UIVarsUserParameters::SetMapFilePrefix(string x)
+{
+    CheckPrefix(x);
+    m_mapFilePrefix = x;
+    CheckMapFiles();
+};
+
+void UIVarsUserParameters::SetReclocFilePrefix(string x)
+{
+    CheckPrefix(x);
+    m_reclocFilePrefix = x;
+    CheckReclocFiles();
+};
+
+void UIVarsUserParameters::SetTraceFilePrefix(string x)
+{
+    CheckPrefix(x);
+    m_traceFilePrefix = x;
+    CheckTraceFiles();
+};
+
+void UIVarsUserParameters::SetNewickTreeFilePrefix(string x)
+{
+    CheckPrefix(x);
+    m_newickTreeFilePrefix = x;
+    CheckNewickTreeFile();
+};
+
+#ifdef LAMARC_QA_TREE_DUMP
+void UIVarsUserParameters::SetArgFilePrefix(string x)
+{
+    CheckPrefix(x);
+    m_argFilePrefix = x;
+    CheckArgFile();
+};
+#endif // LAMARC_QA_TREE_DUMP
+
+void UIVarsUserParameters::SetDataFileName(string x)
+{
+    //This routine is not currently used, since the dataFileName is set in
+    // the constructor and never changed.
+    assert(false);
+    if (x == "")
+    {
+        throw data_error("Expected the name of a file but got \"\".");
+    }
+    m_dataFileName = x;
+};
+
+void UIVarsUserParameters::SetResultsFileName(string x)
+{
+    if (x == "")
+    {
+        throw data_error("Expected the name of a file but got \"\".");
+    }
+    m_resultsFileName = x;
+    std::ifstream testsum(m_resultsFileName.c_str(), std::ios::in);
+    if(testsum)
+    {
+        string msg = "Warning:  your current settings will overwrite \""
+            + m_resultsFileName
+            + "\". Change the name of the output file or move the "
+            + "existing file if that's not OK.";
+        GetConstUIVars().GetUI()->AddWarning(msg);
+    }
+};
+
+void UIVarsUserParameters::SetXMLOutFileName(string x)
+{
+    if (x == "")
+    {
+        throw data_error("Expected the name of a file but got \"\".");
+    }
+    m_xmlOutFileName = x;
+}
+
+void UIVarsUserParameters::SetXMLReportFileName(string x)
+{
+    if (x == "")
+    {
+        throw data_error("Expected the name of a file but got \"\".");
+    }
+    m_xmlReportFileName = x;
+}
+
+void UIVarsUserParameters::SetProfilePrefix(string x)
+{
+    CheckPrefix(x);
+    m_profilePrefix = x;
+}
+
+// problems: 8/02 daniel
+// - will open directories as if they're sumfiles if told to do so
+// - no persistence in asking for asking for a file to write to when overwriting is warned against... other case is fine
+
+//LS DEBUG:  Daniel's first above problem is still true, and I have no idea
+// what the second problem even means.
+
+void UIVarsUserParameters::SetTreeSumInFileName(string x)
+{
+    if (x == "")
+    {
+        throw data_error("Expected the name of a file but got \"\".");
+    }
+    m_treeSumInFileName = x;
+    CheckReadSumFile();
+};
+
+void UIVarsUserParameters::SetTreeSumOutFileName(string x)
+{
+    if (x == "")
+    {
+        throw data_error("Expected the name of a file but got \"\".");
+    }
+    m_treeSumOutFileName = x;
+    // warn the user if an older version may be clobbered
+    CheckWriteSumFile();
+};
+
+void UIVarsUserParameters::SetWriteCurveFiles(bool x)
+{
+    if (x && !m_writeCurveFiles)
+    {
+        //We're turning on curve files from being off, so check.
+        CheckCurveFiles();
+    }
+    m_writeCurveFiles = x;
+}
+
+void UIVarsUserParameters::SetWriteReclocFiles(bool x)
+{
+    if (x && !m_writeReclocFiles)
+    {
+        //We're turning on recloc files from being off, so check.
+        CheckReclocFiles();
+    }
+    m_writeReclocFiles = x;
+}
+
+void UIVarsUserParameters::SetWriteTraceFiles(bool x)
+{
+    if (x && !m_writeTraceFiles)
+    {
+        //We're turning on trace files from being off, so check.
+        CheckTraceFiles();
+    }
+    m_writeTraceFiles = x;
+}
+
+void UIVarsUserParameters::SetWriteNewickTreeFiles(bool x)
+{
+    if (x && GetConstUIVars().forces.GetForceOnOff(force_MIG))
+    {
+        GetConstUIVars().GetUI()->AddWarning("Cannot write a newick tree while performing an analysis with migration.");
+        return;
+    }
+    if (x && GetConstUIVars().forces.GetForceOnOff(force_DIVERGENCE))
+    {
+        GetConstUIVars().GetUI()->AddWarning("Cannot write a newick tree while performing an analysis with divergence.");
+        return;
+    }
+    if (x && GetConstUIVars().forces.GetForceOnOff(force_REC))
+    {
+        GetConstUIVars().GetUI()->AddWarning("Cannot write a newick tree while performing an analysis with recombination.");
+        return;
+    }
+    m_writeNewickTreeFiles = x;
+    CheckNewickTreeFile();
+}
+
+#ifdef LAMARC_QA_TREE_DUMP
+void UIVarsUserParameters::SetWriteArgFiles(bool x)
+{
+    m_writeArgFiles = x;
+    CheckArgFile();
+}
+
+void UIVarsUserParameters::SetWriteManyArgs(bool x)
+{
+    m_writeManyArgs = x;
+    CheckArgFile();
+}
+#endif // LAMARC_QA_TREE_DUMP
+
+void UIVarsUserParameters::SetReadSumFile(bool x)
+{
+    m_readSumFile = x;
+    CheckReadSumFile();
+}
+
+void UIVarsUserParameters::SetWriteSumFile(bool x)
+{
+    m_writeSumFile = x;
+    CheckWriteSumFile();
+}
+
+void UIVarsUserParameters::SetProgramStartTime()
+{
+    m_programStartTime = GetTime();
+}
+
+void UIVarsUserParameters::SetRandomSeed(long seed)
+{
+    // We need the closest 4n+1.  And it probably has to be positive; we're
+    // enforcing this latter blindly since we know it will work instead of
+    // scratching our heads at the algorithm to be sure.
+    m_randomSeed = 4 * static_cast<long>( abs(seed) / 4 ) + 1;
+    if (m_randomSeed != seed)
+    {
+        string warn = "Warning:  Using " + ToString(m_randomSeed)
+            + " as the random seed (the closest positive integer of the form 4n+1) instead of "
+            + ToString(seed) + ".";
+        GetConstUIVars().GetUI()->AddWarning(warn);
+    }
+    m_useSystemClock = false;
+}
+
+void UIVarsUserParameters::SetUseSystemClock(bool val)
+{
+    if(val)
+    {
+        // if we're setting this value to "true",
+        // we need to generate the seed we'll
+        // be using
+        m_randomSeed = 4 * static_cast<long>( time(NULL) / 4 ) + 1;
+        m_useSystemClock = true;
+    }
+    else
+    {
+        throw implementation_error("SetUseSystemClock not expected to get false argument");
+    }
+}
+
+long UIVarsUserParameters::GetOldClockSeed() const
+{
+    assert(m_hasOldClockSeed);
+    return m_oldClockSeed;
+}
+
+void UIVarsUserParameters::SetOldClockSeed(long seed)
+{
+    m_hasOldClockSeed = true;
+    m_oldClockSeed = seed;
+}
+
+void UIVarsUserParameters::SetUseOldClockSeed(bool val)
+{
+    if(val)
+    {
+        m_randomSeed = m_oldClockSeed;
+        m_useSystemClock = false;
+    }
+    else
+    {
+        throw implementation_error("SetUseOldClockSeed not expected to get false argument");
+    }
+}
+
+void UIVarsUserParameters::CheckReadSumFile()
+{
+    if (m_readSumFile)
+    {
+        std::ifstream testsum(m_treeSumInFileName.c_str(), std::ios::in);
+        if(!testsum)
+        {
+            string msg = "Warning:  cannot open or read file \""
+                + m_treeSumInFileName
+                + "\". Change the name of the input summary file or "
+                + "fix permissions on the file.";
+            GetConstUIVars().GetUI()->AddWarning(msg);
+        }
+        else
+        {
+            CheckBothSumFiles();
+        }
+    }
+}
+
+void UIVarsUserParameters::CheckWriteSumFile()
+{
+    if (m_writeSumFile)
+    {
+        if (!CheckBothSumFiles())
+        {
+            std::ifstream testsum(m_treeSumOutFileName.c_str(), std::ios::in);
+            if(testsum)
+            {
+                string msg = "Warning:  your current settings will overwrite \""
+                    + m_treeSumOutFileName
+                    + "\". Change the name of the output summary file or move the "
+                    + "existing file if that's not OK.";
+                GetConstUIVars().GetUI()->AddWarning(msg);
+            }
+        }
+    }
+}
+
+bool UIVarsUserParameters::CheckBothSumFiles()
+{
+    if (m_writeSumFile && m_readSumFile && (m_treeSumOutFileName == m_treeSumInFileName))
+    {
+        string msg = "Warning:  you are reading and writing to the same "
+            "summary file.  LAMARC is designed to handle this situation, but "
+            "oddnesses with your file system during a LAMARC run could cause "
+            "data loss.";
+        GetConstUIVars().GetUI()->AddWarning(msg);
+        return true;
+    }
+    return false;
+}
+
+void UIVarsUserParameters::CheckNewickTreeFile()
+{
+    string oneNewickTreeFileName = m_newickTreeFilePrefix + "_";
+    oneNewickTreeFileName += SpacesToUnderscores(GetConstUIVars().datapackplus.GetSimpleRegionName(0)) + ".txt";
+    std::ifstream testsum(oneNewickTreeFileName.c_str(), std::ios::in);
+    if(testsum && GetWriteNewickTreeFiles())
+    {
+        string msg = "Warning:  your current settings will overwrite \""
+            + oneNewickTreeFileName
+            + "\" and may also overwrite other files in the same directory with the "
+            + "same prefix. Change the name of the Newick Tree file prefix or "
+            + "move the existing file(s) if that's not OK.";
+        GetConstUIVars().GetUI()->AddWarning(msg);
+    }
+}
+
+#ifdef LAMARC_QA_TREE_DUMP
+void UIVarsUserParameters::CheckArgFile()
+{
+    string oneArgFileName = m_argFilePrefix + "_";
+    oneArgFileName += SpacesToUnderscores(GetConstUIVars().datapackplus.GetSimpleRegionName(0)) + ".txt";
+    std::ifstream testsum(oneArgFileName.c_str(), std::ios::in);
+    if(testsum && GetWriteArgFiles())
+    {
+        string msg = "Warning:  your current settings will overwrite \""
+            + oneArgFileName
+            + "\" and may also overwrite other files in the same directory with the "
+            + "same prefix. Change the name of the AREG file prefix or "
+            + "move the existing file(s) if that's not OK.";
+        GetConstUIVars().GetUI()->AddWarning(msg);
+    }
+}
+#endif // LAMARC_QA_TREE_DUMP
+
+void UIVarsUserParameters::CheckCurveFiles()
+{
+    string oneCurveFileName = m_curveFilePrefix + "_reg1_Theta1.txt"; // EWFIX -- really? will this name always exist
+    std::ifstream testsum(oneCurveFileName.c_str(), std::ios::in);
+    if(testsum && m_writeCurveFiles && GetConstUIVars().chains.GetDoBayesianAnalysis())
+    {
+        string msg = "Warning:  your current settings will overwrite \""
+            + oneCurveFileName
+            + "\" and will probably overwrite other files in the same directory with "
+            + "the same prefix. Change the name of the Bayesian curvefile prefix or "
+            + "move the existing file(s) if that's not OK.";
+        GetConstUIVars().GetUI()->AddWarning(msg);
+    }
+}
+
+void UIVarsUserParameters::CheckMapFiles()
+{
+    string oneMapFileName = m_mapFilePrefix + "_";
+    const UIVarsTraitModels & tms = GetConstUIVars().traitmodels;
+    vector<UIRegId> ids = tms.GetRegIDs();
+    assert(!ids.empty());
+    oneMapFileName += SpacesToUnderscores(tms.GetName(ids[0]));
+    oneMapFileName += ".txt";
+    std::ifstream testsum(oneMapFileName.c_str(), std::ios::in);
+    if(testsum)
+    {
+        string msg = "Warning:  your current settings will overwrite \""
+            + oneMapFileName
+            + "\" and will probably overwrite other files in the same directory with "
+            + "the same prefix. Change the name of the mapping prefix or "
+            + "move the existing file(s) if that's not OK.";
+        GetConstUIVars().GetUI()->AddWarning(msg);
+    }
+}
+
+void UIVarsUserParameters::CheckReclocFiles()
+{
+    string oneReclocFileName = m_reclocFilePrefix + "_";
+    oneReclocFileName += SpacesToUnderscores(GetConstUIVars().datapackplus.GetSimpleRegionName(0)) + "_1.txt";
+    std::ifstream testsum(oneReclocFileName.c_str(), std::ios::in);
+    if(testsum && GetWriteReclocFiles())
+    {
+        string msg = "Warning:  your current settings will overwrite \""
+            + oneReclocFileName
+            + "\" and may also overwrite other files in the same directory with the "
+            + "same prefix. Change the name of the recloc prefix or "
+            + "move the existing file(s) if that's not OK.";
+        GetConstUIVars().GetUI()->AddWarning(msg);
+    }
+}
+
+void UIVarsUserParameters::CheckTraceFiles()
+{
+    string oneTraceFileName = m_traceFilePrefix + "_";
+    oneTraceFileName += SpacesToUnderscores(GetConstUIVars().datapackplus.GetSimpleRegionName(0)) + "_1.txt";
+    std::ifstream testsum(oneTraceFileName.c_str(), std::ios::in);
+    if(testsum && GetWriteTraceFiles())
+    {
+        string msg = "Warning:  your current settings will overwrite \""
+            + oneTraceFileName
+            + "\" and may also overwrite other files in the same directory with the "
+            + "same prefix. Change the name of the tracefile prefix or "
+            + "move the existing file(s) if that's not OK.";
+        GetConstUIVars().GetUI()->AddWarning(msg);
+    }
+}
+
+void UIVarsUserParameters::CheckPrefix(string x)
+{
+    if (x == "")
+    {
+        throw data_error("Expected a prefix but got \"\".");
+    }
+}
+
+//____________________________________________________________________________________
diff --git a/src/ui_vars/ui_vars_userparams.h b/src/ui_vars/ui_vars_userparams.h
new file mode 100644
index 0000000..4e4fd98
--- /dev/null
+++ b/src/ui_vars/ui_vars_userparams.h
@@ -0,0 +1,164 @@
+// $Id: ui_vars_userparams.h,v 1.24 2012/06/30 01:32:43 bobgian Exp $
+
+/*
+ *  Copyright 2004  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+ *
+ *  This software is distributed free of charge for non-commercial use
+ *  and is copyrighted.  Of course, we do not guarantee that the software
+ *  works, and are not responsible for any damage you may cause or have.
+ *
+ */
+
+#ifndef UI_VARS_USERPARAMS_H
+#define UI_VARS_USERPARAMS_H
+
+#include <string>
+
+#include "local_build.h"
+
+#include "constants.h"          // for verbosity_type
+#include "ui_vars_component.h"
+
+class UIVars;
+
+using std::string;
+
+// variables that can be changed by the user
+class UIVarsUserParameters : public UIVarsComponent
+{
+  protected:
+    string                  m_curveFilePrefix;
+    string                  m_mapFilePrefix;
+    string                  m_reclocFilePrefix;
+    string                  m_traceFilePrefix;
+    string                  m_newickTreeFilePrefix;
+#ifdef LAMARC_QA_TREE_DUMP
+    string                  m_argFilePrefix;
+#endif // LAMARC_QA_TREE_DUMP
+    string                  m_dataFileName;
+    bool                    m_plotPost;
+    time_t                  m_programStartTime;
+    verbosity_type          m_progress;
+    bool                    m_hasOldClockSeed;
+    long                    m_oldClockSeed;
+    long                    m_randomSeed;
+    bool                    m_readSumFile;
+    string                  m_resultsFileName;
+    string                  m_treeSumInFileName;
+    string                  m_treeSumOutFileName;
+    bool                    m_useSystemClock;
+    verbosity_type          m_verbosity;
+    bool                    m_writeCurveFiles;
+    bool                    m_writeSumFile;
+    bool                    m_writeReclocFiles;
+    bool                    m_writeTraceFiles;
+    bool                    m_writeNewickTreeFiles;
+#ifdef LAMARC_QA_TREE_DUMP
+    bool                    m_writeArgFiles;
+    bool                    m_writeManyArgs;
+#endif // LAMARC_QA_TREE_DUMP
+    string                  m_xmlOutFileName;
+    string                  m_xmlReportFileName;
+    string                  m_profilePrefix;
+
+    UIVarsUserParameters();                             // undefined
+    UIVarsUserParameters(const UIVarsUserParameters&);  // undefined
+
+  public:
+    // one might argue that the constructors should have
+    // restricted access since only UIVars should
+    // be creating these puppies.
+    UIVarsUserParameters(UIVars*,string fileName);
+    UIVarsUserParameters(UIVars*,const UIVarsUserParameters&);
+    virtual ~UIVarsUserParameters();
+
+    /////////////////////////////////////////////////////////////
+    virtual string         GetCurveFilePrefix()    const {return m_curveFilePrefix;};
+    virtual bool           GetHasOldClockSeed()    const {return m_hasOldClockSeed;};
+    virtual string         GetMapFilePrefix()      const {return m_mapFilePrefix;};
+    virtual string         GetReclocFilePrefix()   const {return m_reclocFilePrefix;};
+    virtual string         GetTraceFilePrefix()    const {return m_traceFilePrefix;};
+    virtual string       GetNewickTreeFilePrefix() const {return m_newickTreeFilePrefix;};
+#ifdef LAMARC_QA_TREE_DUMP
+    virtual string         GetArgFilePrefix()      const {return m_argFilePrefix;};
+#endif // LAMARC_QA_TREE_DUMP
+    virtual string         GetDataFileName()       const {return m_dataFileName;};
+    virtual long           GetOldClockSeed()       const ;
+    virtual bool           GetPlotPost()           const {return m_plotPost;};
+    virtual string         GetProfilePrefix()      const {return m_profilePrefix;};
+    virtual time_t         GetProgramStartTime()   const {return m_programStartTime;};
+    virtual verbosity_type GetProgress()           const {return m_progress;};
+    virtual long           GetRandomSeed()         const {return m_randomSeed;};
+    virtual string         GetResultsFileName()    const {return m_resultsFileName;};
+    virtual string         GetTreeSumInFileName()  const {return m_treeSumInFileName;};
+    virtual string         GetTreeSumOutFileName() const {return m_treeSumOutFileName;};
+    virtual bool           GetUseSystemClock()     const {return m_useSystemClock;};
+    virtual verbosity_type GetVerbosity()          const {return m_verbosity;};
+    virtual bool           GetReadSumFile()        const;
+    virtual bool           GetWriteSumFile()       const;
+    virtual bool           GetWriteCurveFiles()    const {return m_writeCurveFiles;};
+    virtual bool           GetWriteReclocFiles()   const {return m_writeReclocFiles;};
+    virtual bool           GetWriteTraceFiles()    const {return m_writeTraceFiles;};
+    virtual bool           GetWriteNewickTreeFiles() const;
+#ifdef LAMARC_QA_TREE_DUMP
+    virtual bool           GetWriteArgFiles()      const {return m_writeArgFiles;};
+    virtual bool           GetWriteManyArgs()      const {return m_writeManyArgs;};
+#endif // LAMARC_QA_TREE_DUMP
+    virtual string         GetXMLOutFileName()     const {return m_xmlOutFileName;};
+    virtual string         GetXMLReportFileName()  const {return m_xmlReportFileName;};
+
+    /////////////////////////////////////////////////////////////
+    virtual void SetCurveFilePrefix(string x);
+    virtual void SetMapFilePrefix(string x);
+    virtual void SetReclocFilePrefix(string x);
+    virtual void SetTraceFilePrefix(string x);
+    virtual void SetNewickTreeFilePrefix(string x);
+#ifdef LAMARC_QA_TREE_DUMP
+    virtual void SetArgFilePrefix(string x);
+#endif // LAMARC_QA_TREE_DUMP
+    virtual void SetDataFileName(string x);
+    virtual void SetPlotPost(bool x)              {m_plotPost = x;};
+    virtual void SetProfilePrefix(string x);
+    virtual void SetProgress(verbosity_type x)    {m_progress = x;};
+    virtual void SetResultsFileName(string x);
+    virtual void SetTreeSumInFileName(string x);
+    virtual void SetTreeSumOutFileName(string x);
+    virtual void SetVerbosity(verbosity_type x)   {m_verbosity = x;};
+    virtual void SetWriteCurveFiles(bool x);
+    virtual void SetWriteReclocFiles(bool x);
+    virtual void SetWriteTraceFiles(bool x);
+    virtual void SetWriteNewickTreeFiles(bool x);
+#ifdef LAMARC_QA_TREE_DUMP
+    virtual void SetWriteArgFiles(bool x);
+    virtual void SetWriteManyArgs(bool x);
+#endif // LAMARC_QA_TREE_DUMP
+    virtual void SetReadSumFile(bool x);
+    virtual void SetWriteSumFile(bool x);
+    virtual void SetXMLOutFileName(string x);
+    virtual void SetXMLReportFileName(string x);
+    // these Setters have more "interesting" logic. See individual
+    // method implementations for details
+    virtual void SetProgramStartTime();
+    virtual void SetOldClockSeed(long x);
+    virtual void SetRandomSeed(long x);
+    virtual void SetUseOldClockSeed(bool x);
+    virtual void SetUseSystemClock(bool x);
+
+  private:
+    void CheckReadSumFile();
+    void CheckWriteSumFile();
+    bool CheckBothSumFiles();
+    void CheckNewickTreeFile();
+#ifdef LAMARC_QA_TREE_DUMP
+    void CheckArgFile();
+#endif // LAMARC_QA_TREE_DUMP
+    void CheckCurveFiles();
+    void CheckMapFiles();
+    void CheckReclocFiles();
+    void CheckTraceFiles();
+    void CheckPrefix(string x);
+};
+
+#endif  // UI_VARS_USERPARAMS_H
+
+//____________________________________________________________________________________
diff --git a/src/xml/lamarc_input_schema.h b/src/xml/lamarc_input_schema.h
new file mode 100644
index 0000000..7f1ea59
--- /dev/null
+++ b/src/xml/lamarc_input_schema.h
@@ -0,0 +1,245 @@
+// $Id: lamarc_input_schema.h,v 1.19 2010/03/17 17:26:00 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+// NOTE -- this file is no longer used by lamarc. It is kept in the
+// cvs archive since it is a useful reference. -- ewalkup
+
+#ifndef LAMARC_INPUT_SCHEMA_H
+#define LAMARC_INPUT_SCHEMA_H
+
+#include <string>
+
+const std::string XML_SCHEMA_CONTENTS =
+
+    "<?xml version='1.0' encoding='UTF-8'?>\n"
+    "<xsd:schema xmlns:xsd='http://www.w3.org/2001/XMLSchema'>\n"
+    "\n"
+    "<!-- This is lamarc_input_schema.xsd -->\n"
+    "<!-- Please do not remove or alter this file. Lamarc places this file in the directory    -->\n"
+    "<!-- you invoke lamarc from and then uses it to check that your xml input file is correct -->\n"
+    "\n"
+    "<xsd:element name='lamarc' type='lamarcFormat' />\n"
+    "\n"
+    "<xsd:complexType name='lamarcFormat'>\n"
+    "    <xsd:all>\n"
+    "        <xsd:element name='data'     type='Data'    minOccurs='1' maxOccurs='1'/>\n"
+    "        <xsd:element name='forces'   type='Forces'  minOccurs='0' maxOccurs='1'/>\n"
+    "        <xsd:element name='chains'   type='Chains'  minOccurs='0' maxOccurs='1'/>\n"
+    "        <xsd:element name='format'   type='Format'  minOccurs='0' maxOccurs='1'/>\n"
+    "        <xsd:element name='model'    type='Model'   minOccurs='0' maxOccurs='1'/>\n"
+    "    </xsd:all>\n"
+    "</xsd:complexType>\n"
+    "\n"
+    "<xsd:complexType name='Chains'>\n"
+    "    <xsd:all>\n"
+    "        <xsd:element name='initial'  type='ChainInfo' minOccurs='0' maxOccurs='1'/>\n"
+    "        <xsd:element name='final'    type='ChainInfo' minOccurs='0' maxOccurs='1'/>\n"
+    "        <xsd:element name='heating'  minOccurs='0' maxOccurs='1'>\n"
+    "            <xsd:complexType>\n"
+    "                <xsd:all>\n"
+    "                    <xsd:element name='temperatures' type='xsd:string' minOccurs='0' maxOccurs='1'/>\n"
+    "                    <xsd:element name='swap-interval' type='xsd:positiveInteger' minOccurs='0' maxOccurs='1'/>\n"
+    "                    <xsd:element name='adaptive' type='xsd:boolean' minOccurs='0' maxOccurs='1'/>\n"
+    "                </xsd:all>\n"
+    "            </xsd:complexType>\n"
+    "        </xsd:element>\n"
+    "        <xsd:element name='replicates' type='xsd:positiveInteger' minOccurs='0' maxOccurs='1'/>\n"
+    "        <xsd:element name='strategy'     minOccurs='0' maxOccurs='1'>\n"
+    "            <xsd:complexType>\n"
+    "                <xsd:all>\n"
+    "                    <xsd:element name='haplotyping' type='xsd:decimal' minOccurs='0' maxOccurs='1'/>\n"
+    "                    <xsd:element name='resimulating' type='xsd:decimal' minOccurs='0' maxOccurs='1'/>\n"
+    "                </xsd:all>\n"
+    "            </xsd:complexType>\n"
+    "        </xsd:element>\n"
+    "    </xsd:all>\n"
+    "</xsd:complexType>\n"
+    "\n"
+    "<xsd:complexType name='ChainInfo'>\n"
+    "    <xsd:all>\n"
+    "        <xsd:element name='number'      type='xsd:nonNegativeInteger' minOccurs='0' maxOccurs='1'/>\n"
+    "        <xsd:element name='samples'     type='xsd:positiveInteger' minOccurs='0' maxOccurs='1'/>\n"
+    "        <xsd:element name='discard'     type='xsd:nonNegativeInteger' minOccurs='0' maxOccurs='1'/>\n"
+    "        <xsd:element name='interval'    type='xsd:positiveInteger' minOccurs='0' maxOccurs='1'/>\n"
+    "    </xsd:all>\n"
+    "</xsd:complexType>\n"
+    "\n"
+    "\n"
+    "<xsd:complexType name='Data'>\n"
+    "    <xsd:sequence>\n"
+    "        <xsd:element name='region' minOccurs='1' maxOccurs='unbounded'>\n"
+    "            <xsd:complexType>\n"
+    "                <xsd:sequence>\n"
+    "                    <xsd:element name='model' type='Model' minOccurs='0' maxOccurs='1'/>\n"
+    "                    <xsd:element name='tree' minOccurs='0' maxOccurs='1'>\n"
+    "                        <xsd:complexType>\n"
+    "                            <xsd:simpleContent>\n"
+    "                                <xsd:extension base='xsd:string'>\n"
+    "                                    <xsd:attribute name='type' type='xsd:string'/>\n"
+    "                                </xsd:extension>\n"
+    "                            </xsd:simpleContent>\n"
+    "                        </xsd:complexType>\n"
+    "                    </xsd:element>\n"
+    "                    <xsd:element name='spacing' minOccurs='0' maxOccurs='1'>\n"
+    "                        <xsd:complexType>\n"
+    "                            <xsd:sequence>\n"
+    "                                <xsd:element name='block' type='Block' minOccurs='1' maxOccurs='unbounded'/>\n"
+    "                            </xsd:sequence>\n"
+    "                        </xsd:complexType>\n"
+    "                    </xsd:element>\n"
+    "                    <xsd:element name='population' type='Population' minOccurs='1' maxOccurs='unbounded'/>\n"
+    "                </xsd:sequence>\n"
+    "                <xsd:attribute name='name' type='xsd:string'/>\n"
+    "            </xsd:complexType>\n"
+    "        </xsd:element>\n"
+    "    </xsd:sequence>\n"
+    "</xsd:complexType>\n"
+    "\n"
+    "<xsd:complexType name='Block'>\n"
+    "    <xsd:all>\n"
+    "        <xsd:element name='map-position' type='xsd:nonNegativeInteger' minOccurs='0' maxOccurs='1'/>\n"
+    "        <xsd:element name='length' type='xsd:positiveInteger' minOccurs='0' maxOccurs='1'/>\n"
+    "        <xsd:element name='locations' type='xsd:string' minOccurs='0' maxOccurs='1'/>\n"
+    "        <xsd:element name='offset' type='xsd:nonNegativeInteger' minOccurs='0' maxOccurs='1'/>\n"
+    "        <xsd:element name='marker-weights' type='xsd:string' minOccurs='0' maxOccurs='1'/>\n"
+    "    </xsd:all>\n"
+    "</xsd:complexType>\n"
+    "\n"
+    "<xsd:complexType name='Population'>\n"
+    "    <xsd:sequence>\n"
+    "        <xsd:element name='individual' minOccurs='0' maxOccurs='unbounded'>\n"
+    "            <xsd:complexType>\n"
+    "                <xsd:sequence>\n"
+    "                    <xsd:element name='phase' minOccurs='0' maxOccurs='unbounded'>\n"
+    "                        <xsd:complexType>\n"
+    "                            <xsd:simpleContent>\n"
+    "                                <xsd:extension base='xsd:string'>\n"
+    "                                    <xsd:attribute name='type' type='xsd:string'/>\n"
+    "                                </xsd:extension>\n"
+    "                            </xsd:simpleContent>\n"
+    "                        </xsd:complexType>\n"
+    "                    </xsd:element>\n"
+    "                    <xsd:element name='sample' minOccurs='1' maxOccurs='unbounded'>\n"
+    "                        <xsd:complexType>\n"
+    "                            <xsd:sequence>\n"
+    "                                <xsd:element name='datablock' minOccurs='1' maxOccurs='unbounded'>\n"
+    "                                    <xsd:complexType>\n"
+    "                                        <xsd:simpleContent>\n"
+    "                                            <xsd:extension base='xsd:string'>\n"
+    "                                                <xsd:attribute name='type' type='xsd:string'/>\n"
+    "                                            </xsd:extension>\n"
+    "                                        </xsd:simpleContent>\n"
+    "                                    </xsd:complexType>\n"
+    "                                </xsd:element>\n"
+    "                                <xsd:element name='status' minOccurs='0' maxOccurs='1'>\n"
+    "                                    <xsd:complexType>\n"
+    "                                        <xsd:sequence>\n"
+    "                                            <xsd:element name='disease-status' minOccurs='0' maxOccurs='unbounded'>\n"
+    "                                                <xsd:complexType>\n"
+    "                                                    <xsd:simpleContent>\n"
+    "                                                        <xsd:extension base='xsd:string'/>\n"
+    "                                                    </xsd:simpleContent>\n"
+    "                                                </xsd:complexType>\n"
+    "                                            </xsd:element>\n"
+    "                                        </xsd:sequence>\n"
+    "                                    </xsd:complexType>\n"
+    "                                </xsd:element>\n"
+    "                            </xsd:sequence>\n"
+    "                            <xsd:attribute name='name' type='xsd:string'/>\n"
+    "                        </xsd:complexType>\n"
+    "                    </xsd:element>\n"
+    "                </xsd:sequence>\n"
+    "                <xsd:attribute name='name' type='xsd:string'/>\n"
+    "            </xsd:complexType>\n"
+    "        </xsd:element>\n"
+    "    </xsd:sequence>\n"
+    "    <xsd:attribute name='name' type='xsd:string'/>\n"
+    "</xsd:complexType>\n"
+    "\n"
+    "<xsd:complexType name='Forces'>\n"
+    "    <xsd:all>\n"
+    "        <xsd:element name='coalescence'   type='ForceType'  minOccurs='1' maxOccurs='1'/>\n"
+    "        <xsd:element name='migration'     type='ForceType'  minOccurs='0' maxOccurs='1'/>\n"
+    "        <xsd:element name='recombination' type='ForceType'  minOccurs='0' maxOccurs='1'/>\n"
+    "        <xsd:element name='growth'        type='ForceType'  minOccurs='0' maxOccurs='1'/>\n"
+    "        <xsd:element name='logistic-selection' type='ForceType'  minOccurs='0' maxOccurs='1'/>\n"
+    "        <xsd:element name='disease'       type='ForceType'  minOccurs='0' maxOccurs='1'/>\n"
+    "        <xsd:element name='gamma-over-regions'       type='ForceType'  minOccurs='0' maxOccurs='1'/>\n"
+    "    </xsd:all>\n"
+    "</xsd:complexType>\n"
+    "\n"
+    "<xsd:complexType name='ForceType'>\n"
+    "    <xsd:all>\n"
+    "        <xsd:element name='start-values'  type='xsd:string'  minOccurs='0' maxOccurs='1'/>\n"
+    "        <xsd:element name='method'        type='xsd:string'  minOccurs='0' maxOccurs='1'/>\n"
+    "        <xsd:element name='max-events'    type='xsd:nonNegativeInteger'  minOccurs='0' maxOccurs='1'/>\n"
+    "        <xsd:element name='profiles'      type='xsd:string'  minOccurs='0' maxOccurs='1'/>\n"
+    "        <xsd:element name='location'      type='xsd:integer' minOccurs='0' maxOccurs='1'/>\n"
+    "    </xsd:all>\n"
+    "    <xsd:attribute name='value' type='xsd:string'/>\n"
+    "</xsd:complexType>\n"
+    "\n"
+    "\n"
+    "<xsd:complexType name='Format'>\n"
+    "    <xsd:all>\n"
+    "        <xsd:element name='seed'  type='xsd:nonNegativeInteger'  minOccurs='0' maxOccurs='1'/>\n"
+    "        <xsd:element name='verbosity'  type='verbosity_type'  minOccurs='0' maxOccurs='1'/>\n"
+    "        <xsd:element name='progress-reports'  type='verbosity_type'  minOccurs='0' maxOccurs='1'/>\n"
+    "        <xsd:element name='echo'  type='xsd:boolean'  minOccurs='0' maxOccurs='1'/>\n"
+    "        <xsd:element name='parameter-file'  type='xsd:string'  minOccurs='0' maxOccurs='1'/>\n"
+    "        <xsd:element name='results-file'  type='xsd:string'  minOccurs='0' maxOccurs='1'/>\n"
+    "        <xsd:element name='summary-file'  type='xsd:string'  minOccurs='0' maxOccurs='1'/>\n"
+    "        <xsd:element name='use-in-summary'  type='xsd:string'  minOccurs='0' maxOccurs='1'/>\n"
+    "        <xsd:element name='in-summary-file'  type='xsd:string'  minOccurs='0' maxOccurs='1'/>\n"
+    "        <xsd:element name='use-out-summary'  type='xsd:string'  minOccurs='0' maxOccurs='1'/>\n"
+    "        <xsd:element name='out-summary-file'  type='xsd:string'  minOccurs='0' maxOccurs='1'/>\n"
+    "        <xsd:element name='out-xml-file'  type='xsd:string'  minOccurs='0' maxOccurs='1'/>\n"
+    "        <xsd:element name='plotting' minOccurs='0' maxOccurs='1'>\n"
+    "            <xsd:complexType>\n"
+    "                <xsd:all>\n"
+    "                    <xsd:element name='profile' type='xsd:string' minOccurs='0' maxOccurs='1'/>\n"
+    "                    <xsd:element name='posterior' type='xsd:string' minOccurs='0' maxOccurs='1'/>\n"
+    "                </xsd:all>\n"
+    "            </xsd:complexType>\n"
+    "        </xsd:element>\n"
+    "    </xsd:all>\n"
+    "</xsd:complexType>\n"
+    "\n"
+    "<xsd:simpleType name='verbosity_type'>\n"
+    "    <xsd:restriction base='xsd:string'>\n"
+    "        <xsd:pattern value='\\s*(verbose|normal|concise|none)\\s*'/>\n"
+    "    </xsd:restriction>\n"
+    "</xsd:simpleType>\n"
+    "\n"
+    "<xsd:complexType name='Model'>\n"
+    "    <xsd:all>\n"
+    "        <xsd:element name='base-freqs'  type='xsd:string'  minOccurs='0' maxOccurs='1'/>\n"
+    "        <xsd:element name='gtr-rates'  type='xsd:string'  minOccurs='0' maxOccurs='1'/>\n"
+    "        <xsd:element name='ttratio'  type='xsd:decimal'  minOccurs='0' maxOccurs='1'/>\n"
+    "        <xsd:element name='categories' minOccurs='0' maxOccurs='1'>\n"
+    "            <xsd:complexType>\n"
+    "                <xsd:all>\n"
+    "                    <xsd:element name='num-categories' type='xsd:positiveInteger' minOccurs='0' maxOccurs='1'/>\n"
+    "                    <xsd:element name='rates' type='xsd:string' minOccurs='0' maxOccurs='1'/>\n"
+    "                    <xsd:element name='probabilities' type='xsd:string' minOccurs='0' maxOccurs='1'/>\n"
+    "                    <xsd:element name='autocorrelation' type='xsd:decimal' minOccurs='0' maxOccurs='1'/>\n"
+    "                </xsd:all>\n"
+    "            </xsd:complexType>\n"
+    "        </xsd:element>\n"
+    "        <xsd:element name='normalize'  type='xsd:string'  minOccurs='0' maxOccurs='1'/>\n"
+    "    </xsd:all>\n"
+    "    <xsd:attribute name='name' type='xsd:string'/>\n"
+    "</xsd:complexType>\n"
+    "\n"
+    "</xsd:schema>\n";
+
+#endif // LAMARC_INPUT_SCHEMA_H
+
+//____________________________________________________________________________________
diff --git a/src/xml/lamarcschema.cpp b/src/xml/lamarcschema.cpp
new file mode 100644
index 0000000..7124eaf
--- /dev/null
+++ b/src/xml/lamarcschema.cpp
@@ -0,0 +1,328 @@
+// $Id: lamarcschema.cpp,v 1.37 2012/06/30 01:32:43 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include "local_build.h"
+#include "parsetreeschema.h"
+#include "xml_strings.h"
+
+LamarcSchema::LamarcSchema()
+{
+    const bool required = true;
+    const bool optional = false;
+    const bool onlyone = true;
+    const bool many = false;
+    AddTag(xmlstr::XML_TAG_LAMARC);
+    AddAttribute(optional,xmlstr::XML_TAG_LAMARC,xmlstr::XML_ATTRTYPE_VERSION);
+
+    AddSubtag(required, onlyone, xmlstr::XML_TAG_LAMARC,xmlstr::XML_TAG_DATA);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_LAMARC,xmlstr::XML_TAG_FORCES);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_LAMARC,xmlstr::XML_TAG_CHAINS);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_LAMARC,xmlstr::XML_TAG_FORMAT);
+    AddSubtag(optional, many, xmlstr::XML_TAG_LAMARC,xmlstr::XML_TAG_MODEL);
+
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_FORCES,xmlstr::XML_TAG_COALESCENCE);
+
+    //LS DEBUG:  is this valid? :
+    //AddAttribute(optional ,xmlstr::XML_TAG_COALESCENCE,xmlstr::XML_ATTRTYPE_VALUE);
+
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_COALESCENCE,xmlstr::XML_TAG_START_VALUES);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_COALESCENCE,xmlstr::XML_TAG_METHOD);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_COALESCENCE,xmlstr::XML_TAG_TRUEVALUE);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_COALESCENCE,xmlstr::XML_TAG_MAX_EVENTS);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_COALESCENCE,xmlstr::XML_TAG_CONSTRAINTS);
+    AddSubtag(optional, many, xmlstr::XML_TAG_COALESCENCE,xmlstr::XML_TAG_GROUP);
+    AddSubtag(optional, many, xmlstr::XML_TAG_COALESCENCE,xmlstr::XML_TAG_PRIOR);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_COALESCENCE,xmlstr::XML_TAG_PROFILES);
+
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_FORCES,xmlstr::XML_TAG_DISEASE);
+    AddAttribute(optional ,xmlstr::XML_TAG_DISEASE,xmlstr::XML_ATTRTYPE_VALUE);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_DISEASE,xmlstr::XML_TAG_START_VALUES);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_DISEASE,xmlstr::XML_TAG_METHOD);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_DISEASE,xmlstr::XML_TAG_MAX_EVENTS);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_DISEASE,xmlstr::XML_TAG_TRUEVALUE);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_DISEASE,xmlstr::XML_TAG_CONSTRAINTS);
+    AddSubtag(optional, many, xmlstr::XML_TAG_DISEASE,xmlstr::XML_TAG_GROUP);
+    AddSubtag(optional, many, xmlstr::XML_TAG_DISEASE,xmlstr::XML_TAG_PRIOR);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_DISEASE,xmlstr::XML_TAG_PROFILES);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_DISEASE,xmlstr::XML_TAG_DISEASELOCATION);
+
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_FORCES,xmlstr::XML_TAG_GROWTH);
+    AddAttribute(optional ,xmlstr::XML_TAG_GROWTH,xmlstr::XML_ATTRTYPE_TYPE);
+    AddAttribute(optional ,xmlstr::XML_TAG_GROWTH,xmlstr::XML_ATTRTYPE_VALUE);
+
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_GROWTH,xmlstr::XML_TAG_START_VALUES);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_GROWTH,xmlstr::XML_TAG_METHOD);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_GROWTH,xmlstr::XML_TAG_MAX_EVENTS);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_GROWTH,xmlstr::XML_TAG_TRUEVALUE);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_GROWTH,xmlstr::XML_TAG_CONSTRAINTS);
+    AddSubtag(optional, many, xmlstr::XML_TAG_GROWTH,xmlstr::XML_TAG_GROUP);
+    AddSubtag(optional, many, xmlstr::XML_TAG_GROWTH,xmlstr::XML_TAG_PRIOR);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_GROWTH,xmlstr::XML_TAG_PROFILES);
+
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_FORCES,xmlstr::XML_TAG_LOGISTICSELECTION);
+    AddAttribute(optional ,xmlstr::XML_TAG_LOGISTICSELECTION,xmlstr::XML_ATTRTYPE_VALUE);
+
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_FORCES,xmlstr::XML_TAG_STOCHASTICSELECTION);
+    AddAttribute(optional ,xmlstr::XML_TAG_STOCHASTICSELECTION,xmlstr::XML_ATTRTYPE_VALUE);
+
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_STOCHASTICSELECTION,xmlstr::XML_TAG_START_VALUES);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_STOCHASTICSELECTION,xmlstr::XML_TAG_METHOD);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_STOCHASTICSELECTION,xmlstr::XML_TAG_MAX_EVENTS);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_STOCHASTICSELECTION,xmlstr::XML_TAG_TRUEVALUE);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_STOCHASTICSELECTION,xmlstr::XML_TAG_CONSTRAINTS);
+    AddSubtag(optional, many, xmlstr::XML_TAG_STOCHASTICSELECTION,xmlstr::XML_TAG_GROUP);
+    AddSubtag(optional, many, xmlstr::XML_TAG_STOCHASTICSELECTION,xmlstr::XML_TAG_PRIOR);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_STOCHASTICSELECTION,xmlstr::XML_TAG_PROFILES);
+
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_LOGISTICSELECTION,xmlstr::XML_TAG_START_VALUES);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_LOGISTICSELECTION,xmlstr::XML_TAG_METHOD);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_LOGISTICSELECTION,xmlstr::XML_TAG_MAX_EVENTS);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_LOGISTICSELECTION,xmlstr::XML_TAG_TRUEVALUE);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_LOGISTICSELECTION,xmlstr::XML_TAG_CONSTRAINTS);
+    AddSubtag(optional, many, xmlstr::XML_TAG_LOGISTICSELECTION,xmlstr::XML_TAG_GROUP);
+    AddSubtag(optional, many, xmlstr::XML_TAG_LOGISTICSELECTION,xmlstr::XML_TAG_PRIOR);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_LOGISTICSELECTION,xmlstr::XML_TAG_PROFILES);
+
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_FORCES,xmlstr::XML_TAG_MIGRATION);
+    AddAttribute(optional ,xmlstr::XML_TAG_MIGRATION,xmlstr::XML_ATTRTYPE_VALUE);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_MIGRATION,xmlstr::XML_TAG_START_VALUES);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_MIGRATION,xmlstr::XML_TAG_METHOD);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_MIGRATION,xmlstr::XML_TAG_TRUEVALUE);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_MIGRATION,xmlstr::XML_TAG_MAX_EVENTS);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_MIGRATION,xmlstr::XML_TAG_CONSTRAINTS);
+    AddSubtag(optional, many, xmlstr::XML_TAG_MIGRATION,xmlstr::XML_TAG_GROUP);
+    AddSubtag(optional, many, xmlstr::XML_TAG_MIGRATION,xmlstr::XML_TAG_PRIOR);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_MIGRATION,xmlstr::XML_TAG_PROFILES);
+
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_FORCES,xmlstr::XML_TAG_DIVMIG);
+    AddAttribute(optional ,xmlstr::XML_TAG_DIVMIG,xmlstr::XML_ATTRTYPE_VALUE);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_DIVMIG,xmlstr::XML_TAG_START_VALUES);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_DIVMIG,xmlstr::XML_TAG_METHOD);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_DIVMIG,xmlstr::XML_TAG_TRUEVALUE);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_DIVMIG,xmlstr::XML_TAG_MAX_EVENTS);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_DIVMIG,xmlstr::XML_TAG_CONSTRAINTS);
+    AddSubtag(optional, many, xmlstr::XML_TAG_DIVMIG,xmlstr::XML_TAG_GROUP);
+    AddSubtag(optional, many, xmlstr::XML_TAG_DIVMIG,xmlstr::XML_TAG_PRIOR);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_DIVMIG,xmlstr::XML_TAG_PROFILES);
+
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_FORCES,xmlstr::XML_TAG_DIVERGENCE);
+    AddAttribute(optional ,xmlstr::XML_TAG_DIVERGENCE,xmlstr::XML_ATTRTYPE_VALUE);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_DIVERGENCE,xmlstr::XML_TAG_START_VALUES);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_DIVERGENCE,xmlstr::XML_TAG_METHOD);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_DIVERGENCE,xmlstr::XML_TAG_TRUEVALUE);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_DIVERGENCE,xmlstr::XML_TAG_MAX_EVENTS);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_DIVERGENCE,xmlstr::XML_TAG_CONSTRAINTS);
+    AddSubtag(optional, many, xmlstr::XML_TAG_DIVERGENCE,xmlstr::XML_TAG_GROUP);
+    AddSubtag(optional, many, xmlstr::XML_TAG_DIVERGENCE,xmlstr::XML_TAG_PRIOR);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_DIVERGENCE,xmlstr::XML_TAG_PROFILES);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_DIVERGENCE,xmlstr::XML_TAG_POPTREE);
+
+    AddSubtag(optional, many, xmlstr::XML_TAG_POPTREE,xmlstr::XML_TAG_EPOCH_BOUNDARY);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_EPOCH_BOUNDARY,xmlstr::XML_TAG_NEWPOP);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_EPOCH_BOUNDARY,xmlstr::XML_TAG_ANCESTOR);
+
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_FORCES,xmlstr::XML_TAG_RECOMBINATION);
+    AddAttribute(optional ,xmlstr::XML_TAG_RECOMBINATION,xmlstr::XML_ATTRTYPE_VALUE);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_RECOMBINATION,xmlstr::XML_TAG_START_VALUES);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_RECOMBINATION,xmlstr::XML_TAG_METHOD);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_RECOMBINATION,xmlstr::XML_TAG_TRUEVALUE);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_RECOMBINATION,xmlstr::XML_TAG_MAX_EVENTS);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_RECOMBINATION,xmlstr::XML_TAG_CONSTRAINTS);
+    AddSubtag(optional, many, xmlstr::XML_TAG_RECOMBINATION,xmlstr::XML_TAG_GROUP);
+    AddSubtag(optional, many, xmlstr::XML_TAG_RECOMBINATION,xmlstr::XML_TAG_PRIOR);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_RECOMBINATION,xmlstr::XML_TAG_PROFILES);
+
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_FORCES,xmlstr::XML_TAG_REGION_GAMMA);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_REGION_GAMMA,xmlstr::XML_TAG_START_VALUES);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_REGION_GAMMA,xmlstr::XML_TAG_PROFILES);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_REGION_GAMMA,xmlstr::XML_TAG_CONSTRAINTS);
+
+    AddAttribute(optional, xmlstr::XML_TAG_GROUP,xmlstr::XML_ATTRTYPE_CONSTRAINT);
+
+    AddAttribute(required, xmlstr::XML_TAG_PRIOR,xmlstr::XML_ATTRTYPE_TYPE);
+    AddSubtag(required, onlyone, xmlstr::XML_TAG_PRIOR,xmlstr::XML_TAG_PRIORLOWERBOUND);
+    AddSubtag(required, onlyone, xmlstr::XML_TAG_PRIOR,xmlstr::XML_TAG_PRIORUPPERBOUND);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_PRIOR,xmlstr::XML_TAG_PARAMINDEX);
+
+#ifdef LAMARC_NEW_FEATURE_RELATIVE_SAMPLING
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_PRIOR,xmlstr::XML_TAG_RELATIVE_SAMPLE_RATE);
+#endif
+
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_CHAINS,xmlstr::XML_TAG_REPLICATES);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_CHAINS,xmlstr::XML_TAG_HEATING);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_CHAINS,xmlstr::XML_TAG_STRATEGY);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_CHAINS,xmlstr::XML_TAG_INITIAL);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_CHAINS,xmlstr::XML_TAG_FINAL);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_CHAINS,xmlstr::XML_TAG_BAYESIAN_ANALYSIS);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_HEATING,xmlstr::XML_TAG_HEATING_STRATEGY);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_HEATING,xmlstr::XML_TAG_TEMPERATURES);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_HEATING,xmlstr::XML_TAG_SWAP_INTERVAL);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_STRATEGY,xmlstr::XML_TAG_BAYESIAN);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_STRATEGY,xmlstr::XML_TAG_RESIMULATING);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_STRATEGY,xmlstr::XML_TAG_HAPLOTYPING);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_STRATEGY,xmlstr::XML_TAG_LOCUSARRANGER);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_STRATEGY,xmlstr::XML_TAG_TREESIZE);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_STRATEGY,xmlstr::XML_TAG_EPOCHSIZEARRANGER);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_INITIAL,xmlstr::XML_TAG_NUMBER);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_INITIAL,xmlstr::XML_TAG_SAMPLES);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_INITIAL,xmlstr::XML_TAG_DISCARD);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_INITIAL,xmlstr::XML_TAG_INTERVAL);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_FINAL, xmlstr::XML_TAG_NUMBER);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_FINAL, xmlstr::XML_TAG_SAMPLES);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_FINAL, xmlstr::XML_TAG_DISCARD);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_FINAL, xmlstr::XML_TAG_INTERVAL);
+
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_FORMAT, xmlstr::XML_TAG_PROGRESS_REPORTS);
+    DeprecatedSubtag(xmlstr::XML_TAG_FORMAT, xmlstr::XML_TAG_ECHO);
+    DeprecatedSubtag(xmlstr::XML_TAG_FORMAT, xmlstr::XML_TAG_PARAMETER_FILE);
+    DeprecatedSubtag(xmlstr::XML_TAG_FORMAT, xmlstr::XML_TAG_OLD_SUMMARY_FILE);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_FORMAT, xmlstr::XML_TAG_VERBOSITY);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_FORMAT, xmlstr::XML_TAG_PLOTTING);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_FORMAT, xmlstr::XML_TAG_SEED);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_FORMAT, xmlstr::XML_TAG_SEED_FROM_CLOCK);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_FORMAT, xmlstr::XML_TAG_RESULTS_FILE);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_FORMAT, xmlstr::XML_TAG_IN_SUMMARY_FILE);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_FORMAT, xmlstr::XML_TAG_OUT_SUMMARY_FILE);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_FORMAT, xmlstr::XML_TAG_OUT_XML_FILE);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_FORMAT, xmlstr::XML_TAG_REPORT_XML_FILE);
+
+#ifdef LAMARC_QA_TREE_DUMP
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_FORMAT, xmlstr::XML_TAG_ARGFILE_PREFIX);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_FORMAT, xmlstr::XML_TAG_MANY_ARGFILES);
+#endif // LAMARC_QA_TREE_DUMP
+
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_FORMAT, xmlstr::XML_TAG_CURVEFILE_PREFIX);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_FORMAT, xmlstr::XML_TAG_NEWICKTREEFILE_PREFIX);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_FORMAT, xmlstr::XML_TAG_PROFILE_PREFIX);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_FORMAT, xmlstr::XML_TAG_RECLOCFILE_PREFIX);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_FORMAT, xmlstr::XML_TAG_TRACEFILE_PREFIX);
+
+#ifdef LAMARC_QA_TREE_DUMP
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_FORMAT, xmlstr::XML_TAG_USE_ARGFILES);
+#endif // LAMARC_QA_TREE_DUMP
+
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_FORMAT, xmlstr::XML_TAG_USE_CURVEFILES);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_FORMAT, xmlstr::XML_TAG_USE_IN_SUMMARY);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_FORMAT, xmlstr::XML_TAG_USE_NEWICKTREEFILE);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_FORMAT, xmlstr::XML_TAG_USE_OUT_SUMMARY);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_FORMAT, xmlstr::XML_TAG_USE_RECLOCFILE);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_FORMAT, xmlstr::XML_TAG_USE_TRACEFILE);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_FORMAT, xmlstr::XML_TAG_CONVERT_OUTPUT);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_PLOTTING, xmlstr::XML_TAG_PROFILE);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_PLOTTING, xmlstr::XML_TAG_POSTERIOR);
+
+    AddSubtag(required, many, xmlstr::XML_TAG_DATA,xmlstr::XML_TAG_REGION);
+
+    AddAttribute(optional, xmlstr::XML_TAG_REGION,xmlstr::XML_ATTRTYPE_NAME);
+    AddSubtag(optional, many, xmlstr::XML_TAG_REGION,xmlstr::XML_TAG_MODEL);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_REGION,xmlstr::XML_TAG_SPACING);
+    AddSubtag(required, many, xmlstr::XML_TAG_REGION,xmlstr::XML_TAG_POPULATION);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_REGION,xmlstr::XML_TAG_EFFECTIVE_POPSIZE);
+
+    // Turn on Newick tree and ARG input.
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_REGION,xmlstr::XML_TAG_TREE);
+    AddAttribute(required, xmlstr::XML_TAG_TREE,xmlstr::XML_ATTRTYPE_TYPE);
+    AddSubtag(optional, many, xmlstr::XML_TAG_TREE,xmlstr::XML_TAG_KEY);
+    AddAttribute(required, xmlstr::XML_TAG_KEY,xmlstr::XML_ATTRTYPE_ID);
+    AddAttribute(required, xmlstr::XML_TAG_KEY,xmlstr::XML_ATTRTYPE_FOR);
+    AddAttribute(required, xmlstr::XML_TAG_KEY,xmlstr::XML_ATTRTYPE_ATTR_NAME);
+    AddAttribute(required, xmlstr::XML_TAG_KEY,xmlstr::XML_ATTRTYPE_ATTR_TYPE);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_TREE,xmlstr::XML_TAG_GRAPH);
+    AddAttribute(required, xmlstr::XML_TAG_GRAPH,xmlstr::XML_ATTRTYPE_ID);
+    AddAttribute(required, xmlstr::XML_TAG_GRAPH,xmlstr::XML_ATTRTYPE_EDGEDEFAULT);
+    AddSubtag(optional, many, xmlstr::XML_TAG_GRAPH,xmlstr::XML_TAG_NODE);
+    AddAttribute(required, xmlstr::XML_TAG_NODE,xmlstr::XML_ATTRTYPE_ID);
+    AddSubtag(required, many, xmlstr::XML_TAG_NODE,xmlstr::XML_TAG_ARGDATA);
+    AddAttribute(required, xmlstr::XML_TAG_ARGDATA,xmlstr::XML_ATTRTYPE_KEY);
+    AddSubtag(optional, many, xmlstr::XML_TAG_GRAPH,xmlstr::XML_TAG_EDGE);
+    AddAttribute(required, xmlstr::XML_TAG_EDGE,xmlstr::XML_ATTRTYPE_SOURCE);
+    AddAttribute(required, xmlstr::XML_TAG_EDGE,xmlstr::XML_ATTRTYPE_TARGET);
+    AddSubtag(required, many, xmlstr::XML_TAG_EDGE,xmlstr::XML_TAG_ARGDATA);
+
+    //The 'traits' tag
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_REGION,xmlstr::XML_TAG_TRAITS);
+    AddSubtag(required, many, xmlstr::XML_TAG_TRAITS,xmlstr::XML_TAG_TRAIT);
+    AddSubtag(required, onlyone, xmlstr::XML_TAG_TRAIT,xmlstr::XML_TAG_NAME);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_TRAIT,xmlstr::XML_TAG_ANALYSIS);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_TRAIT,xmlstr::XML_TAG_POSSIBLE_LOCATIONS);
+    AddSubtag(required, many, xmlstr::XML_TAG_POSSIBLE_LOCATIONS,xmlstr::XML_TAG_RANGE);
+    AddSubtag(required, onlyone, xmlstr::XML_TAG_RANGE,xmlstr::XML_TAG_START);
+    AddSubtag(required, onlyone, xmlstr::XML_TAG_RANGE,xmlstr::XML_TAG_END);
+
+    //DeprecatedSubtag(xmlstr::XML_TAG_TRAITS,xmlstr::XML_TAG_POSSIBLE_ALLELES);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_TRAIT,xmlstr::XML_TAG_PHENOTYPES);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_TRAIT,xmlstr::XML_TAG_MODEL);
+    AddSubtag(required, many, xmlstr::XML_TAG_PHENOTYPES,xmlstr::XML_TAG_GENOTYPE);
+    AddSubtag(required, onlyone, xmlstr::XML_TAG_GENOTYPE,xmlstr::XML_TAG_ALLELES);
+    AddSubtag(required, many, xmlstr::XML_TAG_GENOTYPE,xmlstr::XML_TAG_PHENOTYPE);
+    AddSubtag(required, many, xmlstr::XML_TAG_PHENOTYPE,xmlstr::XML_TAG_PHENOTYPE_NAME);
+    AddSubtag(required, many, xmlstr::XML_TAG_PHENOTYPE,xmlstr::XML_TAG_PENETRANCE);
+
+    AddAttribute(optional, xmlstr::XML_TAG_POPULATION,xmlstr::XML_ATTRTYPE_NAME);
+    AddSubtag(optional, many, xmlstr::XML_TAG_POPULATION,xmlstr::XML_TAG_INDIVIDUAL);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_POPULATION,xmlstr::XML_TAG_PANEL);
+
+    AddAttribute(optional, xmlstr::XML_TAG_INDIVIDUAL,xmlstr::XML_ATTRTYPE_NAME);
+    AddSubtag(required, many, xmlstr::XML_TAG_INDIVIDUAL,xmlstr::XML_TAG_SAMPLE);
+
+    AddAttribute(optional, xmlstr::XML_TAG_PANEL,xmlstr::XML_ATTRTYPE_NAME);
+    AddAttribute(optional, xmlstr::XML_TAG_PANEL,xmlstr::XML_ATTRTYPE_LOCUS_NAME);
+    AddSubtag(required, onlyone, xmlstr::XML_TAG_PANEL,xmlstr::XML_TAG_PANELSIZE);
+
+    // LS DEBUG -- fill in hierarchy for genotype resolutions
+    AddSubtag(optional, many, xmlstr::XML_TAG_INDIVIDUAL,xmlstr::XML_TAG_GENOTYPE_RESOLUTIONS);
+    AddSubtag(required, onlyone, xmlstr::XML_TAG_GENOTYPE_RESOLUTIONS, xmlstr::XML_TAG_TRAIT_NAME);
+    AddSubtag(required, many, xmlstr::XML_TAG_GENOTYPE_RESOLUTIONS, xmlstr::XML_TAG_HAPLOTYPES);
+    AddSubtag(required, onlyone, xmlstr::XML_TAG_HAPLOTYPES, xmlstr::XML_TAG_ALLELES);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_HAPLOTYPES, xmlstr::XML_TAG_PENETRANCE);
+    AddSubtag(optional, many   , xmlstr::XML_TAG_INDIVIDUAL,xmlstr::XML_TAG_PHASE);
+    AddAttribute(optional, xmlstr::XML_TAG_PHASE,xmlstr::XML_ATTRTYPE_TYPE);
+
+    AddAttribute(optional, xmlstr::XML_TAG_SAMPLE,xmlstr::XML_ATTRTYPE_NAME);
+    AddSubtag(required, many, xmlstr::XML_TAG_SAMPLE,xmlstr::XML_TAG_DATABLOCK);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_SAMPLE,xmlstr::XML_TAG_STATUS);
+    AddSubtag(optional, many, xmlstr::XML_TAG_STATUS,xmlstr::XML_TAG_DISEASESTATUS);
+
+    AddAttribute(required, xmlstr::XML_TAG_DATABLOCK,xmlstr::XML_ATTRTYPE_TYPE);
+    AddAttribute(optional, xmlstr::XML_TAG_DATABLOCK,xmlstr::XML_ATTRTYPE_SOURCE);
+    AddAttribute(optional, xmlstr::XML_TAG_DATABLOCK,xmlstr::XML_ATTRTYPE_LOCUS_NAME);
+
+    AddAttribute(required, xmlstr::XML_TAG_MODEL,xmlstr::XML_ATTRTYPE_NAME);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_MODEL,xmlstr::XML_TAG_AUTOCORRELATION);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_MODEL,xmlstr::XML_TAG_BASE_FREQS);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_MODEL,xmlstr::XML_TAG_GTRRATES);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_MODEL,xmlstr::XML_TAG_CATEGORIES);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_MODEL,xmlstr::XML_TAG_NORMALIZE);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_MODEL,xmlstr::XML_TAG_TTRATIO);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_MODEL,xmlstr::XML_TAG_RELATIVE_MURATE);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_MODEL,xmlstr::XML_TAG_ALPHA);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_MODEL,xmlstr::XML_TAG_ISOPT);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_MODEL,xmlstr::XML_TAG_PER_BASE_ERROR_RATE);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_CATEGORIES,xmlstr::XML_TAG_NUM_CATEGORIES);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_CATEGORIES,xmlstr::XML_TAG_RATES);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_CATEGORIES,xmlstr::XML_TAG_PROBABILITIES);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_CATEGORIES,xmlstr::XML_TAG_AUTOCORRELATION);
+
+    AddSubtag(required, many, xmlstr::XML_TAG_SPACING,xmlstr::XML_TAG_BLOCK);
+
+    AddAttribute(optional, xmlstr::XML_TAG_BLOCK,xmlstr::XML_ATTRTYPE_NAME);
+    DeprecatedSubtag(xmlstr::XML_TAG_BLOCK,xmlstr::XML_TAG_MARKER_WEIGHTS);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_BLOCK,xmlstr::XML_TAG_MAP_POSITION);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_BLOCK,xmlstr::XML_TAG_LENGTH);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_BLOCK,xmlstr::XML_TAG_LOCATIONS);
+    AddSubtag(optional, onlyone, xmlstr::XML_TAG_BLOCK,xmlstr::XML_TAG_OFFSET);
+
+}
+
+LamarcSchema::~LamarcSchema()
+{
+}
+
+//____________________________________________________________________________________
diff --git a/src/xml/parsetreeschema.cpp b/src/xml/parsetreeschema.cpp
new file mode 100644
index 0000000..06dcf59
--- /dev/null
+++ b/src/xml/parsetreeschema.cpp
@@ -0,0 +1,429 @@
+// $Id: parsetreeschema.cpp,v 1.6 2011/03/08 19:22:01 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+#include <iostream>
+
+#include "errhandling.h"
+#include "parsetreeschema.h"
+#include "xml_strings.h"
+
+//------------------------------------------------------------------------------------
+
+ElementInfo::ElementInfo(const string child)
+    :
+    m_child(child),
+    m_required(false),
+    m_oneOnly(false),
+    m_deprecated(true)
+{
+}
+
+ElementInfo::ElementInfo(const string child, bool required, bool oneOnly)
+    :
+    m_child(child),
+    m_required(required),
+    m_oneOnly(oneOnly),
+    m_deprecated(false)
+{
+}
+
+ElementInfo::~ElementInfo()
+{
+}
+
+const string &
+ElementInfo::GetChild() const
+{
+    return m_child;
+}
+
+bool
+ElementInfo::GetRequired() const
+{
+    return m_required;
+}
+
+bool
+ElementInfo::GetOneOnly() const
+{
+    return m_oneOnly;
+}
+
+bool
+ElementInfo::GetDeprecated() const
+{
+    return m_deprecated;
+}
+
+string
+ElementInfo::DebugDump() const
+{
+    return "elem";
+}
+
+string
+AttributeInfo::DebugDump() const
+{
+    return "attr";
+}
+
+bool
+ElementInfoCompare::operator()(const ElementInfo e1, const ElementInfo e2) const
+{
+    string s1 = e1.GetChild();
+    string s2 = e2.GetChild();
+    int val = s1.compare(s2);
+    return (val < 0);
+}
+
+bool
+AttributeInfoCompare::operator()(const AttributeInfo a1, const AttributeInfo a2) const
+{
+    string s1 = a1.GetName();
+    string s2 = a2.GetName();
+    int val = s1.compare(s2);
+    return (val < 0);
+}
+
+AttributeInfo::AttributeInfo(const string name, bool required)
+    :
+    m_name(name),
+    m_required(required)
+{
+}
+
+AttributeInfo::~AttributeInfo()
+{
+}
+
+const string &
+AttributeInfo::GetName() const
+{
+    return m_name;
+}
+
+bool
+AttributeInfo::GetRequired() const
+{
+    return m_required;
+}
+
+VerificationInfo::VerificationInfo()
+{
+}
+
+VerificationInfo::~VerificationInfo()
+{
+}
+
+attrSet::const_iterator
+VerificationInfo::findAttr(const string attrName) const
+{
+    AttributeInfo a(attrName,false);
+    return m_attributeInfo.find(a);
+}
+
+elemSet::const_iterator
+VerificationInfo::findElem(const string elemName) const
+{
+    ElementInfo e(elemName,false,false);
+    return m_elementInfo.find(e);
+}
+
+std::set<string>
+VerificationInfo::RequiredAttributes() const
+{
+    std::set<string> requireds;
+
+    for(attrSet::const_iterator i = m_attributeInfo.begin();
+        i != m_attributeInfo.end();
+        i++)
+    {
+        const AttributeInfo & attrRef = *i;
+        if(attrRef.GetRequired())
+        {
+            requireds.insert(attrRef.GetName());
+        }
+    }
+    return requireds;
+}
+
+std::set<string>
+VerificationInfo::RequiredElements() const
+{
+    std::set<string> requireds;
+
+    for(elemSet::const_iterator i = m_elementInfo.begin();
+        i != m_elementInfo.end();
+        i++)
+    {
+        const ElementInfo & elemRef = *i;
+        if(elemRef.GetRequired())
+        {
+            requireds.insert(elemRef.GetChild());
+        }
+    }
+    return requireds;
+}
+
+bool
+VerificationInfo::AllowsAttribute(string attrName) const
+{
+    attrSet::const_iterator iter = findAttr(attrName);
+    return (iter != m_attributeInfo.end());
+}
+
+bool
+VerificationInfo::AllowsElement(string elemName) const
+{
+    elemSet::const_iterator iter = findElem(elemName);
+    return (iter != m_elementInfo.end());
+}
+
+bool
+VerificationInfo::AllowsAdditionalElements(string elemName) const
+{
+    elemSet::const_iterator iter = findElem(elemName);
+    if (iter != m_elementInfo.end())
+    {
+        const ElementInfo & elem = *iter;
+        return !(elem.GetOneOnly());
+    }
+    return false;
+
+}
+
+bool
+VerificationInfo::DeprecatedElement(string elemName) const
+{
+    elemSet::const_iterator iter = findElem(elemName);
+    if (iter != m_elementInfo.end())
+    {
+        const ElementInfo & elem = *iter;
+        return (elem.GetDeprecated());
+    }
+    return false;
+
+}
+
+void
+VerificationInfo::AddAttribute(bool required, string parentName, string name)
+{
+    attrSet::const_iterator i = findAttr(name);
+    if(i != m_attributeInfo.end())
+    {
+        throw implementation_error(xmlstr::XML_IERR_DUP_ATTR_0
+                                   + name
+                                   + xmlstr::XML_IERR_DUP_ATTR_1
+                                   + parentName
+                                   + xmlstr::XML_IERR_DUP_ATTR_2);
+    }
+    m_attributeInfo.insert(AttributeInfo(name,required));
+}
+
+void
+VerificationInfo::AddElement(bool required, bool onlyOne,
+                             string parentName, string name)
+{
+    elemSet::const_iterator i = findElem(name);
+    if(i != m_elementInfo.end())
+    {
+        throw implementation_error(xmlstr::XML_IERR_DUP_CHILD_0
+                                   + name
+                                   + xmlstr::XML_IERR_DUP_CHILD_1
+                                   + parentName
+                                   + xmlstr::XML_IERR_DUP_CHILD_2);
+    }
+    m_elementInfo.insert(ElementInfo(name,required,onlyOne));
+}
+
+void
+VerificationInfo::AddDeprecatedElement(string parentName, string name)
+{
+    elemSet::const_iterator i = findElem(name);
+    if(i != m_elementInfo.end())
+    {
+        throw implementation_error(xmlstr::XML_IERR_DUP_CHILD_0
+                                   + name
+                                   + xmlstr::XML_IERR_DUP_CHILD_1
+                                   + parentName
+                                   + xmlstr::XML_IERR_DUP_CHILD_2);
+    }
+    m_elementInfo.insert(ElementInfo(name));
+}
+
+string
+VerificationInfo::DebugDump() const
+{
+    string retVal = "";
+    elemSet::const_iterator i;
+    for(i=m_elementInfo.begin(); i != m_elementInfo.end(); i++)
+    {
+        const ElementInfo & elem = *i;
+        retVal += elem.GetChild();
+        retVal += " ";
+    }
+    return retVal;
+}
+
+void
+ParseTreeSchema::AddAttribute(bool required, string parent, string attr)
+{
+    std::map<string,VerificationInfo>::iterator i = m_schema.find(parent);
+    if(i == m_schema.end())
+    {
+        m_schema[parent] = VerificationInfo();
+        i = m_schema.find(parent);
+        assert(i != m_schema.end());
+    }
+    ((*i).second).AddAttribute(required,parent,attr);
+}
+
+void
+ParseTreeSchema::AddTag(string tagName)
+{
+    // find the entry for tagName
+    std::map<string,VerificationInfo>::iterator i = m_schema.find(tagName);
+    if(i != m_schema.end())
+        // it already exists
+    {
+        throw implementation_error(xmlstr::XML_IERR_DUP_TAG_0
+                                   + tagName
+                                   + xmlstr::XML_IERR_DUP_TAG_1);
+    }
+
+    // create new item
+    m_schema[tagName] = VerificationInfo();
+}
+
+void
+ParseTreeSchema::AddSubtag( bool required,
+                            bool onlyOne,
+                            string parent,
+                            string child)
+{
+    // find the schema entry for parent
+    std::map<string,VerificationInfo>::iterator i = m_schema.find(parent);
+    if(i == m_schema.end())
+    {
+        throw implementation_error(xmlstr::XML_IERR_NO_PARENT_TAG_0
+                                   + child
+                                   + xmlstr::XML_IERR_NO_PARENT_TAG_1
+                                   + parent
+                                   + xmlstr::XML_IERR_NO_PARENT_TAG_2);
+
+    }
+
+    // add new info to parent
+    ((*i).second).AddElement(required,onlyOne,parent,child);
+
+    // add blank tag for child if necessary
+    if(m_schema.find(child) == m_schema.end())
+    {
+        AddTag(child);
+    }
+}
+
+void
+ParseTreeSchema::DeprecatedSubtag(string parent, string child)
+{
+    // find the schema entry for parent
+    std::map<string,VerificationInfo>::iterator i = m_schema.find(parent);
+    if(i == m_schema.end())
+    {
+        throw implementation_error(xmlstr::XML_IERR_NO_PARENT_TAG_0
+                                   + child
+                                   + xmlstr::XML_IERR_NO_PARENT_TAG_1
+                                   + parent
+                                   + xmlstr::XML_IERR_NO_PARENT_TAG_2);
+
+    }
+
+    // add new info to parent
+    ((*i).second).AddDeprecatedElement(parent,child);
+
+    // add blank tag for child if necessary
+    if(m_schema.find(child) == m_schema.end())
+    {
+        AddTag(child);
+    }
+}
+
+void
+ParseTreeSchema::DebugDump(const string header) const
+{
+    std::cout << header << std::endl;
+    std::map<string,VerificationInfo>::const_iterator i;
+    for(i=m_schema.begin(); i != m_schema.end(); i++)
+    {
+        const string tagName = (*i).first;
+        const VerificationInfo & veri = (*i).second;
+
+        std::cout << "\t" << tagName << " " << veri.DebugDump() << std::endl;
+    }
+}
+
+ParseTreeSchema::ParseTreeSchema()
+{
+}
+
+ParseTreeSchema::~ParseTreeSchema()
+{
+}
+
+const VerificationInfo &
+ParseTreeSchema::getInfo(const string parent,int lineNo) const
+{
+    std::map<string,VerificationInfo>::const_iterator i = m_schema.find(parent);
+    if(i == m_schema.end())
+    {
+        throw unrecognized_tag_error(parent,lineNo);
+    }
+    return (*i).second;
+}
+
+bool
+ParseTreeSchema::AllowsAttribute(const string parent, const string attr, int lineNo) const
+{
+    return getInfo(parent,lineNo).AllowsAttribute(attr);
+}
+
+bool
+ParseTreeSchema::AllowsElement(const string parent, const string child, int lineNo) const
+{
+    return getInfo(parent,lineNo).AllowsElement(child);
+}
+
+bool
+ParseTreeSchema::AllowsAdditionalElements(const string parent, const string child, int lineNo) const
+{
+    return getInfo(parent,lineNo).AllowsAdditionalElements(child);
+}
+
+bool
+ParseTreeSchema::DeprecatedElement(const string parent, const string child, int lineNo) const
+{
+    return getInfo(parent,lineNo).DeprecatedElement(child);
+}
+
+std::set<string>
+ParseTreeSchema::RequiredAttributes(const string parent, int lineNo) const
+{
+    return getInfo(parent,lineNo).RequiredAttributes();
+}
+
+std::set<string>
+ParseTreeSchema::RequiredElements(const string parent, int lineNo) const
+{
+    return getInfo(parent,lineNo).RequiredElements();
+}
+
+//____________________________________________________________________________________
diff --git a/src/xml/parsetreeschema.h b/src/xml/parsetreeschema.h
new file mode 100644
index 0000000..3a4e319
--- /dev/null
+++ b/src/xml/parsetreeschema.h
@@ -0,0 +1,141 @@
+// $Id: parsetreeschema.h,v 1.5 2011/03/07 06:08:54 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef PARSETREESCHEMA_H
+#define PARSETREESCHEMA_H
+
+#include <map>
+#include <set>
+#include <string>
+
+using std::string;
+
+class ElementInfo
+{
+  private:
+    ElementInfo();      // undefined
+
+    string  m_child;
+    bool    m_required;     // if true, parent requires this attribute
+    bool    m_oneOnly;      // if true, parent allows only one of this attribute
+    bool    m_deprecated;   // if true, ignore and issue warning
+  protected:
+
+  public:
+    ElementInfo(const string child);
+    ElementInfo(const string child, bool required, bool oneOnly);
+    virtual ~ElementInfo() ;
+
+    const string & GetChild()       const ;
+    bool     GetRequired()    const ;
+    bool     GetOneOnly()     const ;
+    bool     GetDeprecated()  const ;
+
+    string DebugDump() const;
+};
+
+class AttributeInfo
+{
+  private:
+    AttributeInfo();      // undefined
+
+    string  m_name;
+    bool    m_required;
+  protected:
+
+  public:
+    AttributeInfo(const string name, bool required);
+    virtual ~AttributeInfo() ;
+
+    const string & GetName()    const ;
+    bool     GetRequired()const ;
+
+    string DebugDump() const;
+};
+
+struct ElementInfoCompare
+{
+    bool operator()(const ElementInfo, const ElementInfo) const;
+};
+
+struct AttributeInfoCompare
+{
+    bool operator()(const AttributeInfo, const AttributeInfo) const;
+};
+
+typedef std::set<AttributeInfo,AttributeInfoCompare>    attrSet;
+typedef std::set<ElementInfo,  ElementInfoCompare>      elemSet;
+
+class VerificationInfo
+{
+  private:
+    attrSet                     m_attributeInfo;
+    elemSet                     m_elementInfo;
+  protected:
+    attrSet::const_iterator findAttr(const string) const;
+    elemSet::const_iterator   findElem(const string) const;
+  public:
+    VerificationInfo() ;
+    virtual ~VerificationInfo() ;
+
+    std::set<string>  RequiredAttributes() const;
+    std::set<string>  RequiredElements() const;
+
+    bool            AllowsAttribute(const string name) const;
+    bool            AllowsElement(const string name) const;
+    bool            AllowsAdditionalElements(const string name) const;
+    bool            DeprecatedElement(const string name) const;
+
+    void    AddAttribute(bool required, string parentName, string name);
+    void    AddElement(bool required, bool onlyOne, string parentName, string name);
+    void    AddDeprecatedElement(string parentName, string name);
+
+    string DebugDump() const;
+};
+
+class ParseTreeSchema
+{
+  private:
+    std::map<string,VerificationInfo>   m_schema;
+
+  protected:
+    void                AddAttribute(bool required, string parent, string attr);
+    void                AddSubtag   (bool required, bool onlyOne,  string parent, string child);
+    void                DeprecatedSubtag(string parent, string child);
+    void                AddTag      (string tagName);
+    const VerificationInfo &  getInfo(const string parent, int lineNo) const;
+
+  public:
+    ParseTreeSchema();
+    virtual ~ParseTreeSchema();
+
+    std::set<string>  RequiredAttributes(const string parent, int lineNo) const;
+    std::set<string>  RequiredElements(const string parent, int lineNo) const;
+
+    bool            AllowsAttribute(const string parent, const string name, int lineNo) const;
+    bool            AllowsElement(const string parent, const string child, int lineNo) const;
+    bool            AllowsAdditionalElements(const string parent, const string child, int lineNo) const;
+    bool            DeprecatedElement(const string parent, const string child, int lineNo) const;
+
+    void DebugDump(const string) const;
+};
+
+class LamarcSchema  : public ParseTreeSchema
+{
+  private:
+  protected:
+  public:
+    LamarcSchema();
+    virtual ~LamarcSchema();
+};
+
+#endif // PARSETREESCHEMA_H
+
+//____________________________________________________________________________________
diff --git a/src/xml/parsetreetodata.cpp b/src/xml/parsetreetodata.cpp
new file mode 100644
index 0000000..f03e4dc
--- /dev/null
+++ b/src/xml/parsetreetodata.cpp
@@ -0,0 +1,1780 @@
+// $Id: parsetreetodata.cpp,v 1.57 2013/11/08 21:46:22 mkkuhner Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+#include <boost/algorithm/string.hpp>
+
+#include "argtree.h"
+#include "datapack.h"
+#include "datatype.h"                   // to set the datatype of a region and to proofread data
+#include "errhandling.h"
+#include "newick.h"                     // for UserTree and subclasses thereof
+#include "parsetreetodata.h"
+#include "random.h"
+#include "region.h"
+#include "registry.h"
+#include "stringx.h"
+#include "timex.h"
+#include "tree.h"
+#include "types.h"                      // for access to DataType's auto_ptr type
+#include "ui_strings.h"
+#include "xml.h"
+#include "xml_strings.h"
+
+using std::string;
+
+//------------------------------------------------------------------------------------
+
+ParseTreeToData::ParseTreeToData(XmlParser& parser, DataPack& dp)
+    :
+    ParseTreeWalker(parser),
+    datapack(dp),
+    m_pCurrRegion(NULL),
+    m_currpopulation(FLAGLONG),
+    m_currindno(FLAGLONG),
+    m_randomNameSource(new Random()),
+    m_migrationalForce(force_NONE)
+{
+    m_randomNameSource->Seed(GetTime());
+}
+
+//------------------------------------------------------------------------------------
+
+ParseTreeToData::~ParseTreeToData()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+string
+ParseTreeToData::XmlRandomLongAsString()
+{
+    return ToString(m_randomNameSource->Long(XML_RANDOM_NAME_LENGTH));
+}
+
+//------------------------------------------------------------------------------------
+
+void
+ParseTreeToData::ProcessFileData()
+{
+    // MCHECK
+    TiXmlElement* docElement = m_parser.GetRootElement();
+    const char * tagValue = docElement->Value();
+    string tagString(tagValue);
+    bool matches = CaselessStrCmp(xmlstr::XML_TAG_LAMARC,tagValue);
+    if(!matches)
+    {
+        string msg = m_parser.GetFileName() + ": " + xmlstr::XML_ERR_NOT_LAMARC;
+        m_parser.ThrowDataError(msg);
+    }
+
+    // check to see if Divergence is in play
+    DiagnoseTagForMigrationalForce(m_parser.GetRootElement());
+
+    // parse the actual data
+    TiXmlElement* format = singleOptionalChild(m_parser.GetRootElement(),xmlstr::XML_TAG_FORMAT);
+    if (format != NULL)
+    {
+        DoFormat(format);
+    }
+    DoData(singleRequiredChild(m_parser.GetRootElement(),xmlstr::XML_TAG_DATA));
+
+    // now do some post-analysis of data
+
+    // at the moment this checks if there are multiple pops, and if
+    // there aren't it removes population info from the stored tips
+    datapack.RemoveUneededPartitions();
+
+    // if divergence is in effect, pre-parse Divergence force and modify DataPack
+    // accordingly (increasing number of partitions and xpartitions)
+    // parse down to <divergence> block if it exists
+    TiXmlElement * forcesElement =
+        singleOptionalChild(docElement,xmlstr::XML_TAG_FORCES);
+    if(forcesElement != NULL)
+    {
+        TiXmlElement * forceElement = singleOptionalChild(forcesElement,xmlstr::XML_TAG_DIVERGENCE);
+        if (forceElement != NULL)
+        {
+            TiXmlElement * epochsElement = singleRequiredChild(forceElement,xmlstr::XML_TAG_POPTREE);
+            TiXmlNode * child = NULL;
+            while((child = epochsElement->IterateChildren(xmlstr::XML_TAG_EPOCH_BOUNDARY, child)))
+            {
+                TiXmlElement * boundaryElement = child->ToElement();
+                TiXmlElement * ancestorElement =
+                    singleOptionalChild(boundaryElement,xmlstr::XML_TAG_ANCESTOR);
+                string epochname = getNodeText(ancestorElement);
+                datapack.AddPartition(force_DIVMIG,epochname);
+            }
+        }
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+void
+ParseTreeToData::DoFormat(TiXmlElement * formatElement)
+{
+    TiXmlElement* convertElement = singleOptionalChild(formatElement, xmlstr::XML_TAG_CONVERT_OUTPUT);
+    if (convertElement != NULL)
+    {
+        string convert = getNodeText(convertElement);
+        registry.SetConvertOutputToEliminateZeroes(ProduceBoolOrBarf(convert));
+    }
+}
+
+void
+ParseTreeToData::DoData(TiXmlElement * dataElement)
+{
+
+    TiXmlNode * child = NULL;
+    long regionNumber = 0;
+
+    while((child = dataElement->IterateChildren(xmlstr::XML_TAG_REGION,child)))
+    {
+        TiXmlElement * regionElement = child->ToElement();
+        DoRegion(regionElement,regionNumber);
+        regionNumber++;
+    }
+
+} // DoData
+
+//------------------------------------------------------------------------------------
+
+void
+ParseTreeToData::DoRegion(TiXmlElement * regionElement, long regionId)
+{
+
+    // set up a region
+    string regname = getNodeAttributeValue(regionElement,xmlstr::XML_ATTRTYPE_NAME);
+    if (datapack.IsDuplicateRegionName(regname))
+    {
+        string problem(xmlstr::XML_ERR_DUPLICATE_REGIONNAME_0);
+        problem += regname + xmlstr::XML_ERR_DUPLICATE_REGIONNAME_1;
+        m_parser.ThrowDataError(problem);
+    }
+    m_pCurrRegion = new Region(regname);
+    datapack.SetRegion(m_pCurrRegion);
+    m_currindno = 0;
+    m_pCurrRegion->SetSnpPanel(false);
+
+    DoPopulations(regionElement);
+
+    DoSpacing(singleOptionalChild(regionElement,xmlstr::XML_TAG_SPACING));
+
+    const Region *cnst_Reg =  m_pCurrRegion;// jrmhack
+    const Individual& testind2 = cnst_Reg->GetIndividual(0); // jrmhack
+    DoPhases(regionElement);
+
+    const Individual& testind3 = cnst_Reg->GetIndividual(0); // jrmhack
+    DoEffectivePopSize(
+        singleOptionalChild(regionElement,xmlstr::XML_TAG_EFFECTIVE_POPSIZE)
+        );
+
+    TiXmlElement * treeElement
+        = singleOptionalChild(regionElement,xmlstr::XML_TAG_TREE);
+    if (treeElement != NULL)
+    {
+        string treetype = getNodeAttributeValue(treeElement,xmlstr::XML_ATTRTYPE_TYPE);
+
+        if (treetype == xmlstr::XML_STRING_NEWICK)
+        {
+            m_pCurrRegion->SetUserTree(DoTree(treeElement));
+        }
+        else if (treetype == xmlstr::XML_STRING_ARG)
+        {
+            registry.SetARGfound(true);
+        }
+    }
+
+    string errorString;
+    if (!m_pCurrRegion->IsValidRegion(errorString))
+    {
+        m_parser.ThrowDataError(xmlstr::XML_ERR_INCONSISTENT_REGION +
+                                m_pCurrRegion->GetRegionName()
+                                + xmlstr::XML_STRING_COLON + errorString
+                                , regionElement->Row());
+    }
+
+} // DoRegion
+
+//------------------------------------------------------------------------------------
+
+void
+ParseTreeToData::DoPopulations(TiXmlElement* regionElement)
+{
+
+    TiXmlNode * child = NULL;
+    while((child = regionElement->IterateChildren(xmlstr::XML_TAG_POPULATION,child)))
+    {
+        TiXmlElement * populationElement = child->ToElement();
+        DoPopulation(populationElement);
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+void
+ParseTreeToData::DoPopulation(TiXmlElement * populationElement)
+{
+    string popname = getNodeAttributeValue(populationElement,xmlstr::XML_ATTRTYPE_NAME);
+    m_currpopulation = datapack.AddPartition(TagForMigrationalForce(),popname);
+
+    TiXmlNode * child = NULL;
+    while((child = populationElement->IterateChildren(xmlstr::XML_TAG_INDIVIDUAL,child)))
+    {
+        TiXmlElement * individualElement = child->ToElement();
+        DoIndividual(individualElement);
+    }
+
+    while((child = populationElement->IterateChildren(xmlstr::XML_TAG_PANEL,child)))
+    {
+        TiXmlElement * panelElement = child->ToElement();
+        DoPanel(panelElement, popname);
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+void
+ParseTreeToData::DoIndividual(TiXmlElement * individualElement)
+{
+    Individual newind;
+    m_currIndividual = newind; //barring a .clear() function or the like.
+    m_currIndividual.SetId(m_currindno);
+
+    string name = getNodeAttributeValue(individualElement,xmlstr::XML_ATTRTYPE_NAME);
+    if (name.empty())
+    {
+        name = ToString(m_currindno);
+    }
+    m_currIndividual.SetName(name);
+
+    DoSamples(individualElement);
+
+    DoGenotypeResolutions(individualElement);
+    m_pCurrRegion->AddIndividual(m_currIndividual);
+
+    m_currindno++;
+}
+
+//------------------------------------------------------------------------------------
+
+void
+ParseTreeToData::DoPanel(TiXmlElement * panelElement, string popname)
+{
+    // set up panel name
+    string regionname = m_pCurrRegion->GetRegionName();
+    string panelname = getNodeAttributeValue(panelElement,xmlstr::XML_ATTRTYPE_NAME);
+    if (panelname.empty())
+        panelname = "panel";
+    string rootname = regionname + ToString('_');
+    rootname += popname;
+    rootname += ToString('_');
+    rootname += panelname;
+
+    // get panel size
+    TiXmlNode * child = NULL;
+    int panelsize = 0;
+    while((child = panelElement->IterateChildren(xmlstr::XML_TAG_PANELSIZE,child)))
+    {
+        TiXmlElement * panelSizeElement = child->ToElement();
+        string contents = getNodeText(panelSizeElement);
+        panelsize = atoi(contents.c_str());
+    }
+
+    if (panelsize <= 0)
+    {
+        string problem = "Invalid panel count: ";
+        problem += ToString(panelsize);
+        problem += " in <Panel-size>";
+        m_parser.ThrowDataError(problem);
+
+    }
+
+    // turn on panel flag
+    m_pCurrRegion->SetSnpPanel(true);
+
+    long nloc = m_pCurrRegion->GetNloci();
+
+    Individual newind;
+
+    long whichhap = 0; //???
+    for (int i=0; i<panelsize; i++)
+    {
+        string panelname = rootname + "_p";
+        panelname += ToString(i);
+        m_currIndividual = newind; //barring a .clear() function or the like.
+        m_currIndividual.SetId(m_currindno);
+        m_currIndividual.SetName(panelname);
+
+        for (long locus=0; locus<nloc; locus++)
+        {
+            // define tip
+            Locus loc =  m_pCurrRegion->GetLocus(locus);
+            long nsites = loc.GetNsites();
+            string tval;
+            tval.assign(nsites, '?');
+
+            // copied from DoSamples
+            m_tipdata.Clear();  // prepare m_tipdata for new data
+            m_tipdata.label = panelname;
+            m_tipdata.SetDataSource("panel");
+            m_tipdata.AddPartition(
+                std::make_pair(TagForMigrationalForce(),
+                               datapack.GetPartitionName(
+                                   TagForMigrationalForce(),m_currpopulation)
+                    ));
+            m_tipdata.m_popname = datapack.
+                GetPartitionName(TagForMigrationalForce(),
+                                 m_currpopulation);
+            // NB--we redundantly store this because
+            // the partitions of AddPartition are for branch use and don't
+            // treat migration specially--this one is for xml output use
+            // which currently still does treat migration specially from
+            // other partitions (<status> in xml terms).
+            m_tipdata.individual = m_currIndividual.GetId();
+            m_tipdata.m_hap = whichhap;
+            m_tipdata.m_locus = locus;
+
+            // get block datatype
+            string datatype = ToString(loc.GetDataTypePtr()->GetType());
+            DataType_ptr dt(CreateDataType(datatype));
+            string baddata;
+            bool good_data = dt->Proofread(tval, m_tipdata.data, baddata);
+            if (!good_data)
+            {
+                string problem = "Invalid genetic data in population "
+                    + ToString(m_currpopulation)
+                    + " sample "
+                    + m_tipdata.label
+                    + "\n";
+                problem += "Offending allele: " + baddata;
+                m_parser.ThrowDataError(problem);
+            }
+            m_pCurrRegion->SetTipData(locus, m_tipdata);
+            // ++whichhap; //???
+        }
+        m_pCurrRegion->AddIndividual(m_currIndividual);
+        m_currindno++;
+    }
+} // DoPanels
+
+//------------------------------------------------------------------------------------
+
+void
+ParseTreeToData::DoSamples(TiXmlElement * individualElement)
+{
+    long whichhap = 0;
+    TiXmlNode * child = NULL;
+    vector<TiXmlElement*> samples = getAllOptionalDescendantElements(individualElement, xmlstr::XML_TAG_SAMPLE);
+    m_pCurrRegion->AddPloidy(samples.size());
+    while((child = individualElement->IterateChildren(xmlstr::XML_TAG_SAMPLE,child)))
+    {
+        TiXmlElement * sampleElement = child->ToElement();
+        string name = getNodeAttributeValue(sampleElement,xmlstr::XML_ATTRTYPE_NAME);
+        if (name.empty())
+            name = m_currIndividual.GetName() + XmlRandomLongAsString();
+        if (m_pCurrRegion->IsDuplicateTipName(name))
+        {
+            string problem(xmlstr::XML_ERR_DUPLICATE_SAMPLENAME_0);
+            problem += name + xmlstr::XML_ERR_DUPLICATE_SAMPLENAME_1;
+            problem += m_pCurrRegion->GetRegionName();
+            m_parser.ThrowDataError(problem);
+        }
+        m_tipdata.Clear();  // prepare m_tipdata for new data
+        m_tipdata.label = name;
+        m_tipdata.AddPartition(
+            std::make_pair(TagForMigrationalForce(),
+                           datapack.GetPartitionName(TagForMigrationalForce(),m_currpopulation))
+            );
+        m_tipdata.m_popname = datapack.GetPartitionName(TagForMigrationalForce(),
+                                                        m_currpopulation);  
+        // NB--we redundantly store this because
+        // the partitions of AddPartition are for branch use and don't
+        // treat migration specially--this one is for xml output use
+        // which currently still does treat migration specially from
+        // other partitions (<status> in xml terms).
+        m_tipdata.individual = m_currIndividual.GetId();
+        m_tipdata.m_hap = whichhap;
+
+        DoStatus(singleOptionalChild(sampleElement,xmlstr::XML_TAG_STATUS));
+        DoDataBlocks(sampleElement);    ++whichhap;
+    }
+} // DoSamples
+
+//------------------------------------------------------------------------------------
+
+void
+ParseTreeToData::DoStatus(TiXmlElement * statusElement)
+{
+    if(statusElement != NULL)
+    {
+        TiXmlNode * child = NULL;
+        while((child = statusElement->IterateChildren(xmlstr::XML_TAG_DISEASESTATUS,child)))
+        {
+            TiXmlElement * diseaseElement = child->ToElement();
+            DoDisease(diseaseElement);
+        }
+    }
+} // DoStatus
+
+//------------------------------------------------------------------------------------
+
+void
+ParseTreeToData::DoDisease(TiXmlElement * diseaseElement)
+{
+    // remove leading whitespace from dstatus
+    string dstatus = getNodeText(diseaseElement);
+    string whitespace(" ");
+    long firstnonwhite = dstatus.find_first_not_of(whitespace);
+    dstatus.assign(dstatus,firstnonwhite,dstatus.length()-firstnonwhite);
+
+    datapack.AddPartition(force_DISEASE,dstatus);
+    m_tipdata.AddPartition(std::make_pair(force_DISEASE,dstatus));
+
+} // DoDisease
+
+//------------------------------------------------------------------------------------
+
+void
+ParseTreeToData::DoDataBlocks(TiXmlElement * sampleElement)
+{
+    long locus = 0;
+
+    TiXmlNode * child = NULL;
+    while((child = sampleElement->IterateChildren(xmlstr::XML_TAG_DATABLOCK,child)))
+    {
+        TiXmlElement * dataBlockElement = child->ToElement();
+
+        // if this is a new locus, add it to the Region
+        if (locus == m_pCurrRegion->GetNloci())
+        {
+            m_pCurrRegion->AddLocus();
+        }
+        assert(locus < m_pCurrRegion->GetNloci());
+
+        // get block datatype
+        string datatype = getNodeAttributeValue(dataBlockElement,xmlstr::XML_ATTRTYPE_TYPE);
+        DataType_ptr dt(CreateDataType(datatype));
+        string datasource = getNodeAttributeValue(dataBlockElement,xmlstr::XML_ATTRTYPE_SOURCE);
+
+        m_pCurrRegion->SetDataType(locus, dt);
+
+        // Read the data
+        m_tipdata.data.clear();
+        string contents = getNodeText(dataBlockElement);
+        string baddata;
+        bool good_data = dt->Proofread(contents, m_tipdata.data, baddata);
+        if (!good_data)
+        {
+            string problem = "Invalid genetic data in population " + ToString(m_currpopulation)
+                + " sample " + m_tipdata.label + "\n";
+            problem += "Offending allele: " + baddata;
+            m_parser.ThrowDataError(problem);
+        }
+        m_tipdata.SetDataSource(datasource);
+        if((CaselessStrCmp(datatype, lamarcstrings::SNP)) &&
+           (CaselessStrCmp(datasource, lamarcstrings::PANEL)))
+            m_pCurrRegion->SetSnpPanel(true);
+
+        long len = m_tipdata.data.size();
+
+        try
+        {
+            m_pCurrRegion->SetNmarkers(locus, len);
+        }
+        catch (const std::exception& e)
+        {
+            long expectedNmarkers = m_pCurrRegion->GetNmarkers(locus);
+            long localLineNumber = dataBlockElement->Row();
+            string tagName = xmlstr::XML_TAG_DATABLOCK;
+            string message = string ("Data element \"" + tagName + "\" at input line ")
+                + ToString(localLineNumber) + string( " has ") + ToString(len);
+            message += " data elements but previous has " + ToString(expectedNmarkers);
+            m_parser.ThrowDataError (message);
+        }
+        // save info on which locus this tipdata belongs to
+        m_tipdata.m_locus = locus;
+        // put this info into the storehouse
+        m_pCurrRegion->SetTipData(locus, m_tipdata);
+        ++locus;
+    }
+} // DoDataBlocks
+
+//------------------------------------------------------------------------------------
+
+void ParseTreeToData::DoGenotypeResolutions(TiXmlElement * individualElement)
+{
+    TiXmlNode * child = NULL;
+    while((child = individualElement->IterateChildren(xmlstr::XML_TAG_GENOTYPE_RESOLUTIONS,child)))
+    {
+        TiXmlElement * genotypeResElement = child->ToElement();
+        TiXmlElement * traitElement = singleRequiredChild(genotypeResElement, xmlstr::XML_TAG_TRAIT_NAME);
+        string tname = getNodeText(traitElement);
+
+        // if this is a new locus, add it to the Region
+        if (!(m_pCurrRegion->HasLocus(tname)))
+        {
+            m_pCurrRegion->AddLocus(true, tname); //true == movable (for later)
+            //Note:  passing 'true' to the locus constructor sets up a locus
+            // with a single marker and one site.
+        }
+
+        // Copy the vector of TipDatas from an older locus to the new one,
+        //  and clear out the data from it.  This is because we don't have
+        //  information about the samples here, only when we read in the sample
+        //  data (which we already did).
+        //
+        // We need to do this every time instead of just the first time because
+        //  this code gets run once per individual.  Ideally, we'd only run it the
+        //  *last* time, but running it every time works.
+        //
+        // This is, uh, kind of a hack.  Like a lot of this code.  Ideally, *all*
+        //  the TipData stuff would just be in the Individuals.
+        m_pCurrRegion->CopyTipDataForLocus(tname);
+        long locusnum = m_pCurrRegion->GetLocusIndex(tname);
+        assert(locusnum < m_pCurrRegion->GetNloci());
+
+        // The datatype must be K-allele (for now)
+        // Also, this would be a good thing to move to the constructor
+        // eventually.  Because as it is, we have to re-set this for
+        // every individual.  But whatever.
+        DataType_ptr dt(CreateDataType(lamarcstrings::KALLELE));
+        m_pCurrRegion->SetDataType(locusnum, dt);
+
+        // Read the haplotypes
+
+        TiXmlNode * genchild = NULL;
+
+        while ((genchild = genotypeResElement->IterateChildren(xmlstr::XML_TAG_HAPLOTYPES,genchild)))
+        {
+            TiXmlElement* haplotypesElement = genchild->ToElement();
+            TiXmlElement* penetranceElement = singleOptionalChild(haplotypesElement, xmlstr::XML_TAG_PENETRANCE);
+            TiXmlElement* allelesElement    = singleRequiredChild(haplotypesElement, xmlstr::XML_TAG_ALLELES);
+            double penetrance = 1.0;
+            if (penetranceElement != NULL)
+            {
+                penetrance = ProduceDoubleOrBarf(getNodeText(penetranceElement));
+            }
+            string baddata;
+            StringVec1d alleles;
+            dt->Proofread(getNodeText(allelesElement), alleles, baddata);
+            //Kallele data always returns true;
+            m_currIndividual.AddHaplotype(m_pCurrRegion->GetID(), tname, 0, alleles, penetrance);
+        }
+    }
+}
+
+//------------------------------------------------------------------------------------
+// The following routines are used to establish the map
+//------------------------------------------------------------------------------------
+
+void
+ParseTreeToData::DoSpacing(TiXmlElement * spacingElement)
+{
+    // NB: DoSpacing has to be called *after* DoPopulations
+    // because otherwise defaults can't be meaningfully set here
+    if (spacingElement == NULL)
+    {
+        long locus;
+        for (locus = 0; locus < m_pCurrRegion->GetNloci(); ++locus)
+        {
+            m_pCurrRegion->SetPositions(locus);  // set to default
+            m_pCurrRegion->SetNsites(locus, m_pCurrRegion->GetNmarkers(locus));
+        }
+        // all other spacing parameters are set by default in constructor
+    }
+    else
+    {
+        DoBlocks(spacingElement);
+    }
+
+} // DoSpacing
+
+//------------------------------------------------------------------------------------
+
+void
+ParseTreeToData::DoBlocks(TiXmlElement * spacingElement)
+{
+    long locus = 0;
+
+    TiXmlNode * child = NULL;
+    while((child = spacingElement->IterateChildren(xmlstr::XML_TAG_BLOCK,child)))
+    {
+        TiXmlElement * blockElement = child->ToElement();
+
+        string locusName = getNodeAttributeValue(blockElement,xmlstr::XML_ATTRTYPE_NAME);
+        if (!locusName.empty())
+        {
+            m_pCurrRegion->SetName(locus, locusName);
+        }
+
+        // Keep DoOffset before DoMapPosition, as it is the default for single locus map position
+        DoOffset(singleOptionalChild(blockElement,xmlstr::XML_TAG_OFFSET),locus);
+        DoMapPosition(singleOptionalChild(blockElement,xmlstr::XML_TAG_MAP_POSITION),locus);
+        DoLength(singleOptionalChild(blockElement,xmlstr::XML_TAG_LENGTH),locus);
+        DoLocations(singleOptionalChild(blockElement,xmlstr::XML_TAG_LOCATIONS),locus);
+
+        ++locus;
+    }
+    assert(locus > 0);  // should be assured by schema logic
+} // DoBlocks
+
+//------------------------------------------------------------------------------------
+
+void
+ParseTreeToData::DoMapPosition(TiXmlElement * mapPositionElement, long locus)
+{
+
+    long mapPosition = m_pCurrRegion->GetOffset(locus);     // default value if mapPositionElement is Null
+
+    if (mapPositionElement != NULL) // no mapPosition; assume 1
+    {
+        try
+        {
+            mapPosition = ProduceLongOrBarf(getNodeText(mapPositionElement));
+        }
+        catch (const data_error& e)
+        {
+            m_parser.ThrowDataError(string(e.what()),mapPositionElement->Row());
+        }
+    }
+    m_pCurrRegion->SetGlobalMapPosition(locus, mapPosition);
+
+} // DoMapPosition
+
+//------------------------------------------------------------------------------------
+
+void
+ParseTreeToData::DoLength(TiXmlElement * lengthElement, long locus)
+{
+    // default value if lengthElement is Null
+    long length = m_pCurrRegion->GetNmarkers(locus);
+
+    if (lengthElement != NULL) // no mapPosition; assume 0
+    {
+        try
+        {
+            length = ProduceLongOrBarf(getNodeText(lengthElement));
+        }
+        catch (const data_error& e)
+        {
+            m_parser.ThrowDataError(string(e.what()),lengthElement->Row());
+        }
+    }
+    m_pCurrRegion->SetNsites(locus, length);
+
+} // DoLength
+
+//------------------------------------------------------------------------------------
+
+void
+ParseTreeToData::DoLocations(TiXmlElement * locationsElement, long locus)
+{
+    if(locationsElement == NULL)
+    {
+        m_pCurrRegion->SetPositions(locus);  // to default
+    }
+    else
+    {
+        string content = getNodeText(locationsElement);
+        vector<long> locations;
+        string token;
+        long offset = m_pCurrRegion->GetOffset(locus);
+        long length = m_pCurrRegion->GetLocusNsites(locus);
+        long nmarkers = m_pCurrRegion->GetNmarkers(locus);
+
+        try
+        {
+            StringVec1d values = getNodeTextSplitOnWhitespace(locationsElement);
+            if (static_cast<size_t>(nmarkers) != values.size())
+            {
+                throw data_error("Incorrect number of locations listed (" + ToString(values.size()) + "); you should have " + ToString(nmarkers) + ".");
+            }
+
+            StringVec1d::iterator siter;
+            for(siter = values.begin(); siter != values.end(); siter++)
+            {
+                // we subtract the offset here, making all positions
+                // relative to zero
+                long pos = ProduceLongOrBarf(*siter) - offset;
+                if (pos < 0)
+                {
+                    throw data_error("One or more location values are lower than " + ToString(offset) + ", the offset for locus " + ToString(locus+1) + ".");
+                }
+                if (pos >= length)
+                {
+                    throw data_error("One or more location values are higher than " + ToString(length + offset - 1) + ", the offset plus the length of locus " + ToString(locus+1) + ".");
+                }
+                // we subtract the offset here, making all positions
+                // relative to zero
+                locations.push_back(pos);
+            }
+            m_pCurrRegion->SetPositions(locus, locations);
+        }
+        catch (const data_error& e)
+        {
+            m_parser.ThrowDataError(string(e.what()),locationsElement->Row());
+        }
+    }
+} // DoLocations
+
+//------------------------------------------------------------------------------------
+
+void
+ParseTreeToData::DoOffset(TiXmlElement * offsetElement, long locus)
+{
+    long offset = 0L;     // default value if offsetElement is Null
+
+    if (offsetElement != NULL) // no offset; assume 0
+    {
+        try
+        {
+            offset = ProduceLongOrBarf(getNodeText(offsetElement));
+        }
+        catch (const data_error& e)
+        {
+            m_parser.ThrowDataError(string(e.what()),offsetElement->Row());
+        }
+    }
+
+    m_pCurrRegion->SetOffset(locus, offset);
+
+} // DoOffset
+
+//------------------------------------------------------------------------------------
+
+void
+ParseTreeToData::DoPhases(TiXmlElement * regionElement)
+// read information about sites of known/unknown phase
+// attribute "unknown" means that this is a list of phase-unknown
+// sites (to be stored as-is) whereas attribute "known" means that
+// this is a list of phase-known sites (and we store the
+// inverse, the phase-unknown sites).
+{
+    //Because populations have to be done before spacing, and because phases
+    // have to be done after spacing, we have to loop over exactly the same
+    // things that 'DoPopulations' did, before, to get to the individuals.
+    TiXmlNode * pop = NULL;
+    long indnum = 0;
+
+    // panel members don't have phases so create an empty vector
+    LongVec2d panelsites(m_pCurrRegion->GetNumFixedLoci());
+    int nind = m_pCurrRegion->GetNIndividuals();
+    const Region *cnst_Reg =  m_pCurrRegion;// jrmhack around c++ inflexibility
+
+    while((pop = regionElement->IterateChildren(xmlstr::XML_TAG_POPULATION,pop)))
+    {
+        TiXmlElement * populationElement = pop->ToElement();
+        TiXmlNode * ind = NULL;
+        while((ind = populationElement->IterateChildren(xmlstr::XML_TAG_INDIVIDUAL,ind)))
+        {
+            TiXmlElement * individualElement = ind->ToElement();
+            string xmlindname = getNodeAttributeValue(individualElement,xmlstr::XML_ATTRTYPE_NAME);
+            string token;
+
+            LongVec2d phasesites;
+
+            vector<TiXmlElement *> phaseElements
+                = getAllOptionalDescendantElements (individualElement,xmlstr::XML_TAG_PHASE);
+            if(phaseElements.size() == 0)
+            {
+                // no phase info available; we set empty vectors
+                //EWFIX.P5 DIMENSION--needs to be 3d?
+                LongVec2d phases(m_pCurrRegion->GetNumFixedLoci());
+                phasesites = phases;
+            }
+            else
+            {
+                TiXmlNode * child = NULL;
+                long locus = 0;
+                while((child = individualElement->IterateChildren(xmlstr::XML_TAG_PHASE,child)))
+                {
+                    TiXmlElement * phaseElement = child->ToElement();
+
+                    string content = getNodeText(phaseElement);
+                    string phasetype = getNodeAttributeValue(phaseElement,xmlstr::XML_ATTRTYPE_TYPE);
+                    StripLeadingSpaces(phasetype);
+                    StripTrailingSpaces(phasetype);
+
+                    LongVec1d phases = ProduceLongVec1dOrBarf(getNodeText(phaseElement));
+                    long offset = m_pCurrRegion->GetOffset(locus);
+                    long length = m_pCurrRegion->GetLocusNsites(locus);
+                    for (LongVec1d::iterator phase=phases.begin(); phase != phases.end(); phase++)
+                    {
+                        *phase -= offset;
+                        if (*phase < 0)
+                        {
+                            m_parser.ThrowDataError("One or more phase values are lower than " + ToString(offset) + ", the offset for locus " + ToString(locus+1) + ".", phaseElement->Row());
+                        }
+                        if (*phase >= length)
+                        {
+                            m_parser.ThrowDataError("One or more phase values are higher than " + ToString(length + offset - 1) + ", the offset plus the length of locus " + ToString(locus+1) + ".", phaseElement->Row());
+                        }
+                    }
+                    std::sort(phases.begin(), phases.end());
+
+                    if (phasetype == xmlstr::XML_ATTRVALUE_KNOWN)
+                    {
+                        //We need to reverse the vector, though only for markers.
+                        LongVec1d all_locations = m_pCurrRegion->GetLocus(locus).GetMarkerLocations();
+                        for (LongVec1d::iterator knownphase = phases.begin();
+                             knownphase != phases.end() ; knownphase++)
+                        {
+                            LongVec1d::iterator known = FindValIn(*knownphase, all_locations);
+                            if (known == all_locations.end())
+                            {
+                                m_parser.ThrowDataError("Site " + ToString(*knownphase + offset) + " in locus " + ToString(locus+1) + " does not have a marker associated with it, and may therefore not be set 'phase known'.", phaseElement->Row());
+                            }
+                            all_locations.erase(known);
+                        }
+                        phases = all_locations;
+                    }
+                    else if (!(phasetype == xmlstr::XML_ATTRVALUE_UNKNOWN))
+                    {
+                        m_parser.ThrowDataError("Unknown type of phase information " + phasetype, phaseElement->Row());
+                    }
+                    phasesites.push_back(phases);
+                    ++locus;
+                }
+            }
+
+            // Error check for consistency
+            if (phasesites.size() != static_cast<unsigned long>(m_pCurrRegion->GetNumFixedLoci()))
+            {
+                m_parser.ThrowDataError("Number of phase entries inconsistent with number of segments",individualElement->Row());
+            }
+
+            // Note: the following is a hack to get panels working
+            // without a major refactoring of ParseTreeToData
+
+            // find individual and set phase markers
+            for (int i=0; i<nind;i++)
+            {
+                string localindname = (cnst_Reg->GetIndividual(i)).GetName();
+                //if(CompareWOCase(localindname,xmlindname))
+                if(xmlindname.compare(localindname) == 0)
+                {
+                    try
+                    {
+                        m_pCurrRegion->SetPhaseMarkers(i, phasesites);
+                    }
+                    catch (const data_error& e)
+                    {
+                        m_parser.ThrowDataError(e.what(), individualElement->Row());
+                    }
+                }
+            }
+            indnum++;
+        }
+
+        // now find all the panels (assuming they are everything
+        // that does not have phasesites set) and set their phase markers
+        for (int i=0; i<nind;i++)
+        {
+            string indname = (cnst_Reg->GetIndividual(i)).GetName();
+            if(cnst_Reg->GetIndividual(i).GetPhaseSites().empty())
+                //            vector<Branch_ptr> tipvec= (cnst_Reg->GetIndividual(i)).GetAllTips();
+                //            int numtips = tipvec.size();
+                // all tips should be of the same type so use the first one
+                //            if (tipvec[0]->m_isSample == 0) // jrmfix this has to go
+            {
+                m_pCurrRegion->SetPhaseMarkers(i, panelsites);
+            }
+        }
+    }
+} // DoPhases
+
+//------------------------------------------------------------------------------------
+
+UserTree*
+ParseTreeToData::DoTree(TiXmlElement * treeElement)
+{
+    // WARNING:  This should eventually be done with a TreeFactory.
+    return new NewickTree(getNodeText(treeElement));
+} // DoTree
+
+//------------------------------------------------------------------------------------
+
+void
+ParseTreeToData::DoEffectivePopSize(TiXmlElement * sizeElement)
+{
+    // default value if sizeElement is Null
+    double size = defaults::effpopsize;
+
+    if (sizeElement != NULL)
+    {
+        try
+        {
+            size = ProduceDoubleOrBarf(getNodeText(sizeElement));
+        }
+        catch (const data_error& e)
+        {
+            m_parser.ThrowDataError(string(e.what()),sizeElement->Row());
+        }
+    }
+    //LS NOTE:  This sets the effective population size in the actual data
+    // pack, but in the menu, there is an effective population size menu
+    // where the information is stored in ui_vars_datapackplus.  This means
+    // that when the datapackplus is created, it must set those values properly
+    // from what we read in here.  This all works for now, but is a bit arcane.
+    // If we move to having a front-end datapack as well as a back-end datapack,
+    // this would theoretically become a bit simpler.
+    if (size <= 0)
+    {
+        m_parser.ThrowDataError("All effective population sizes must be positive.");
+    }
+    m_pCurrRegion->SetEffectivePopSize(size);
+
+} // DoEffectivePopSize
+
+//____________________________________________________________________________________
+
+LongVec1d::iterator FindValIn(long val, LongVec1d& vec)
+{
+    LongVec1d::iterator found = vec.begin();
+    for (; found != vec.end(); found++)
+    {
+        if (*found == val) return found;
+    }
+    return found;
+}
+
+//____________________________________________________________________________________
+
+void
+ParseTreeToData::DiagnoseTagForMigrationalForce(TiXmlElement * rootElement)
+{
+    m_migrationalForce = force_MIG;
+
+    TiXmlElement * forcesElement = singleOptionalChild(rootElement,xmlstr::XML_TAG_FORCES);
+    if (forcesElement == NULL) return;
+
+    TiXmlElement * forceElement = singleOptionalChild(forcesElement,xmlstr::XML_TAG_DIVERGENCE);
+    if (forceElement == NULL) return;
+
+    m_migrationalForce = force_DIVMIG;
+    return;
+}
+
+//____________________________________________________________________________________
+
+force_type
+ParseTreeToData::TagForMigrationalForce() const
+{
+    assert(m_migrationalForce != force_NONE);
+    return m_migrationalForce;
+}
+
+//____________________________________________________________________________________
+
+bool
+ParseTreeToData::CheckARGtree(vector<UIId> ids, bool batchmode)
+{
+    // This is an unspeakably long function, but breaking it into
+    // subroutines really doesn't gain anything.
+    // I tried to keep it very modular so it's obvious what is going on.
+    // That makes it a bit more verbose.
+    bool retval = true;
+    // makes sure that the ARG tree for a region is still valid
+    TiXmlElement* docElement    = m_parser.GetRootElement();
+    TiXmlElement* forcesElement = singleRequiredChild(docElement,xmlstr::XML_TAG_FORCES);
+    TiXmlElement* dataElement   = singleRequiredChild(docElement,xmlstr::XML_TAG_DATA);
+    TiXmlNode * child = NULL;
+    while((child = dataElement->IterateChildren(xmlstr::XML_TAG_REGION,child)))
+    {
+        string nodetype;
+        TiXmlElement * regionElement = child->ToElement();
+        TiXmlElement * treeElement = singleOptionalChild(regionElement,xmlstr::XML_TAG_TREE);
+        if (treeElement != NULL)
+        {
+            string treetype = getNodeAttributeValue(treeElement,xmlstr::XML_ATTRTYPE_TYPE);
+            if (treetype == xmlstr::XML_STRING_ARG)
+            {
+                // validate node data
+                TiXmlElement * graphElement = singleOptionalChild(treeElement,xmlstr::XML_TAG_GRAPH);
+                TiXmlNode * node = NULL;
+                while((node = graphElement->IterateChildren(xmlstr::XML_TAG_NODE,node)))
+                {
+                    // pick up node element
+                    TiXmlElement * nodeElement = node->ToElement();
+                    string id = getNodeAttributeValue(nodeElement,xmlstr::XML_ATTRTYPE_ID);
+
+                    string label;
+                    bool typefound = false;
+                    bool labelfound = false;
+                    bool timefound = false;
+                    bool recfound = false;
+                    TiXmlNode * argdata = NULL;
+
+                    // check for argdata keys and pick up label data
+                    while((argdata = nodeElement->IterateChildren(xmlstr::XML_TAG_ARGDATA,argdata)))
+                    {
+                        TiXmlElement * argdataElement = argdata->ToElement();
+                        string key = getNodeAttributeValue(argdataElement,xmlstr::XML_ATTRTYPE_KEY);
+                        if (key == xmlstr::XML_ATTRVALUE_NODE_TYPE)
+                        {
+                            nodetype = getNodeText(argdataElement);
+                            typefound = true;
+                        }
+                        else if (key == xmlstr::XML_ATTRVALUE_NODE_LABEL)
+                        {
+                            label = getNodeText(argdataElement);
+                            labelfound = true;
+                        }
+                        else if (key == xmlstr::XML_ATTRVALUE_NODE_TIME)
+                        {
+                            timefound = true;
+                        }
+                        else if(key == xmlstr::XML_ATTRVALUE_REC_LOCATION)
+                        {
+                            recfound = true;
+                        }
+                        else
+                        {
+                            // unknown key
+                            string error_msg = "ERROR found in ParseTreeToData::CheckARGtree: ARG tree node: ";
+                            error_msg += id;
+                            error_msg += " contains unknown key: ";
+                            error_msg += key;
+                            error_msg += ". ARG tree is not valid.\n\n";
+                            retval = false;
+                            printf("%s", error_msg.c_str());
+                        }
+                    }
+
+                    // data present checks
+                    if (!typefound)
+                    {
+                        string error_msg = "ERROR found in ParseTreeToData::CheckARGtree: ARG tree node: ";
+                        error_msg += id;
+                        error_msg += " has no node_type";
+                        error_msg += ". ARG tree is not valid.\n\n";
+                        retval = false;
+                        printf("%s", error_msg.c_str());
+                    }
+
+                    if (!timefound)
+                    {
+                        string error_msg = "ERROR found in ParseTreeToData::CheckARGtree: ARG tree node: ";
+                        error_msg += id;
+                        error_msg += " type: ";
+                        error_msg += nodetype;
+                        error_msg += " has no node_time";
+                        error_msg += ". ARG tree is not valid.\n\n";
+                        retval = false;
+                        printf("%s", error_msg.c_str());
+                    }
+
+                    if (nodetype == xmlstr::XML_BRANCHTYPE_TIP)
+                    {
+                        if (!labelfound)
+                        {
+                            string error_msg = "ERROR found in ParseTreeToData::CheckARGtree: ARG tree node: ";
+                            error_msg += id;
+                            error_msg += " type: ";
+                            error_msg += nodetype;
+                            error_msg += " does not contain a node_label";
+                            error_msg += ". ARG tree is not valid.\n\n";
+                            retval = false;
+                            printf("%s", error_msg.c_str());
+                        }
+                    }
+                    else if (nodetype == xmlstr::XML_BRANCHTYPE_REC)
+                    {
+                        if (!recfound)
+                        {
+                            string error_msg = "ERROR found in ParseTreeToData::CheckARGtree: ARG tree node: ";
+                            error_msg += id;
+                            error_msg += " type: ";
+                            error_msg += nodetype;
+                            error_msg += " does not contain a rec_location";
+                            error_msg += ". ARG tree is not valid.\n\n";
+                            retval = false;
+                            printf("%s", error_msg.c_str());
+                        }
+                    }
+
+                    // see if input tip exists in the population sample name set
+                    bool tipfound = false;
+                    if (nodetype == xmlstr::XML_BRANCHTYPE_TIP)
+                    {
+                        // check labels against tips defined in data set
+                        TiXmlNode * pop = NULL;
+                        while((pop = regionElement->IterateChildren(xmlstr::XML_TAG_POPULATION,pop)))
+                        {
+                            TiXmlElement * populationElement = pop->ToElement();
+                            TiXmlNode * indiv = NULL;
+                            while((indiv = populationElement->IterateChildren(xmlstr::XML_TAG_INDIVIDUAL,indiv)))
+                            {
+                                TiXmlElement * individualElement = indiv->ToElement();
+                                TiXmlNode * samp = NULL;
+                                while((samp = individualElement->IterateChildren(xmlstr::XML_TAG_SAMPLE,samp)))
+                                {
+                                    TiXmlElement * sampleElement = samp->ToElement();
+                                    string name = getNodeAttributeValue(sampleElement,xmlstr::XML_ATTRTYPE_NAME);
+                                    boost::trim(label);
+                                    boost::trim(name);
+                                    if (label == name)
+                                    {
+                                        tipfound = true;
+                                    }
+                                }
+                            }
+                        }
+
+                        if (!tipfound)
+                        {
+                            // write an error message
+                            string error_msg = "ERROR found in ParseTreeToData::CheckARGtree: ARG tree node: ";
+                            error_msg += id;
+                            error_msg += " Tip: ";
+                            error_msg += label;
+                            error_msg += " not found in Population Sample names. ARG tree is not valid.\n\n";
+                            retval = false;
+                            printf("%s", error_msg.c_str());
+                        }
+                    }
+                    else
+                    {
+                        // This is the one place the batch and interactive versions diverge.
+                        //
+                        // In interactive mode the user can set forces which then need to be
+                        // checked against the forces in the ARG tree after they hit execute.
+                        // Those are in the ids array.
+                        //
+                        // In a batch run, the forces can't change but they
+                        // must be picked up from the XML because the ids array
+                        // doesn't get defined until parsetreetosettings is run,
+                        // which happens after parsetreetodata.
+
+                        // check type against allowed forces
+                        bool forcefound = true;
+                        if (nodetype == xmlstr::XML_BRANCHTYPE_COAL)
+                        {
+                            if (batchmode)
+                            {
+                                TiXmlElement * forceElement = singleOptionalChild(forcesElement,xmlstr::XML_TAG_COALESCENCE);
+                                if(forceElement == NULL)
+                                {
+                                    forcefound = false;
+                                }
+                            }
+                            else
+                            {
+                                if(find(ids.begin(), ids.end(), force_COAL) == ids.end())
+                                {
+                                    forcefound = false;
+                                }
+                            }
+                        }
+                        else if (nodetype == xmlstr::XML_BRANCHTYPE_DISEASE)
+                        {
+                            if (batchmode)
+                            {
+                                TiXmlElement * forceElement = singleOptionalChild(forcesElement,xmlstr::XML_TAG_DISEASE);
+                                if(forceElement == NULL)
+                                {
+                                    forcefound = false;
+                                }
+                            }
+                            else
+                            {
+                                if(find(ids.begin(), ids.end(), force_DISEASE) == ids.end())
+                                {
+                                    forcefound = false;
+                                }
+                            }
+                        }
+                        else if (nodetype == xmlstr::XML_BRANCHTYPE_DIVMIG)
+                        {
+                            if (batchmode)
+                            {
+                                TiXmlElement * forceElement = singleOptionalChild(forcesElement,xmlstr::XML_TAG_DIVMIG);
+                                if(forceElement == NULL)
+                                {
+                                    forcefound = false;
+                                }
+                            }
+                            else
+                            {
+                                if(find(ids.begin(), ids.end(), force_DIVMIG) == ids.end())
+                                {
+                                    forcefound = false;
+                                }
+                            }
+                        }
+                        else if (nodetype == xmlstr::XML_BRANCHTYPE_EPOCH)
+                        {
+                            if (batchmode)
+                            {
+                                TiXmlElement * forceElement = singleOptionalChild(forcesElement,xmlstr::XML_TAG_DIVERGENCE);
+                                if(forceElement == NULL)
+                                {
+                                    forcefound = false;
+                                }
+                            }
+                            else
+                            {
+                                if(find(ids.begin(), ids.end(), force_DIVERGENCE) == ids.end())
+                                {
+                                    forcefound = false;
+                                }
+                            }
+                        }
+                        else if (nodetype == xmlstr::XML_BRANCHTYPE_MIG)
+                        {
+                            if (batchmode)
+                            {
+                                TiXmlElement * forceElement = singleOptionalChild(forcesElement,xmlstr::XML_TAG_MIGRATION);
+                                if(forceElement == NULL)
+                                {
+                                    forcefound = false;
+                                }
+                            }
+                            else
+                            {
+                                if(find(ids.begin(), ids.end(), force_MIG) == ids.end())
+                                {
+                                    forcefound = false;
+                                }
+                            }
+                        }
+                        else if (nodetype == xmlstr::XML_BRANCHTYPE_REC)
+                        {
+                            if (batchmode)
+                            {
+                                TiXmlElement * forceElement = singleOptionalChild(forcesElement,xmlstr::XML_TAG_RECOMBINATION);
+                                if(forceElement == NULL)
+                                {
+                                    forcefound = false;
+                                }
+                            }
+                            else
+                            {
+                                if(find(ids.begin(), ids.end(), force_REC) == ids.end())
+                                {
+                                    forcefound = false;
+                                }
+                            }
+                        }
+                        else
+                        {
+                            forcefound = false;
+                        }
+                        if (!forcefound)
+                        {
+                            // write an error message
+                            string error_msg = "ERROR found in ParseTreeToData::CheckARGtree: force type: ";
+                            error_msg += nodetype;
+                            error_msg += " is not active. ARG tree is not valid.\n\n";
+                            retval = false;
+                            printf("%s", error_msg.c_str());
+                        }
+                    }
+                }
+
+                // check population samples against the ARG tips
+                TiXmlNode * pop = NULL;
+                while((pop = regionElement->IterateChildren(xmlstr::XML_TAG_POPULATION,pop)))
+                {
+                    TiXmlElement * populationElement = pop->ToElement();
+                    TiXmlNode * indiv = NULL;
+                    while((indiv = populationElement->IterateChildren(xmlstr::XML_TAG_INDIVIDUAL,indiv)))
+                    {
+                        TiXmlElement * individualElement = indiv->ToElement();
+                        TiXmlNode * samp = NULL;
+                        while((samp = individualElement->IterateChildren(xmlstr::XML_TAG_SAMPLE,samp)))
+                        {
+                            TiXmlElement * sampleElement = samp->ToElement();
+                            string name = getNodeAttributeValue(sampleElement,xmlstr::XML_ATTRTYPE_NAME);
+
+                            bool samplefound = false;
+                            TiXmlElement * graphElement = singleOptionalChild(treeElement,xmlstr::XML_TAG_GRAPH);
+                            TiXmlNode * node = NULL;
+                            while((node = graphElement->IterateChildren(xmlstr::XML_TAG_NODE,node)))
+                            {
+                                // pick up branch element
+                                TiXmlElement * nodeElement = node->ToElement();
+                                string eletype;
+                                string elelabel;
+                                TiXmlNode * argdata = NULL;
+                                while((argdata = nodeElement->IterateChildren(xmlstr::XML_TAG_ARGDATA,argdata)))
+                                {
+                                    TiXmlElement * argdataElement = argdata->ToElement();
+                                    string key = getNodeAttributeValue(argdataElement,xmlstr::XML_ATTRTYPE_KEY);
+                                    if (key == xmlstr::XML_ATTRVALUE_NODE_TYPE)
+                                    {
+                                        eletype = getNodeText(argdataElement);
+                                    }
+                                    if (key == xmlstr::XML_ATTRVALUE_NODE_LABEL)
+                                    {
+                                        elelabel = getNodeText(argdataElement);
+                                    }
+                                }
+                                if (eletype == xmlstr::XML_BRANCHTYPE_TIP)
+                                {
+                                    boost::trim(elelabel);
+                                    boost::trim(name);
+                                    if (elelabel == name)
+                                    {
+                                        samplefound = true;
+                                    }
+                                }
+                            }
+                            if (!samplefound)
+                            {
+                                // write an error message
+                                string error_msg = "ERROR found in ParseTreeToData::CheckARGtree: Population Sample name: ";
+                                error_msg += name;
+                                error_msg += " not found in ARG tree Tips. ARG tree is not valid.\n\n";
+                                retval = false;
+                                printf("%s", error_msg.c_str());
+                            }
+                        }
+                    }
+                }
+
+                // check edge information
+                TiXmlNode * edge = NULL;
+                int nedge = 0;
+                while((edge = graphElement->IterateChildren(xmlstr::XML_TAG_EDGE,edge)))
+                {
+                    // pick up edge element
+                    TiXmlElement * edgeElement = edge->ToElement();
+
+                    // check source and target defined
+                    string source = getNodeAttributeValue(edgeElement,xmlstr::XML_ATTRTYPE_SOURCE);
+                    if (source.empty())
+                    {
+                        // write an error message
+                        string error_msg = "ERROR found in ParseTreeToData::CheckARGtree: Edge: ";
+                        error_msg += nedge;
+                        error_msg +=" does not have a source. ";
+                        error_msg += " ARG tree is not valid.\n\n";
+                        retval = false;
+                        printf("%s", error_msg.c_str());
+                    }
+
+                    string target = getNodeAttributeValue(edgeElement,xmlstr::XML_ATTRTYPE_TARGET);
+                    if (target.empty())
+                    {
+                        // write an error message
+                        string error_msg = "ERROR found in ParseTreeToData::CheckARGtree: Edge: ";
+                        error_msg += nedge;
+                        error_msg +=" does not have target. ";
+                        error_msg += "ARG tree is not valid.\n";
+                        retval = false;
+                        printf("%s", error_msg.c_str());
+                    }
+
+                    if (source == target)
+                    {
+                        // write an error message
+                        string error_msg = "ERROR found in ParseTreeToData::CheckARGtree: Edge: ";
+                        error_msg += nedge;
+                        error_msg +=" source and target same: ";
+                        error_msg += source;
+                        error_msg += ". ARG tree is not valid.\n";
+                        retval = false;
+                        printf("%s", error_msg.c_str());
+                    }
+
+                    // check argdata
+                    bool partfound = false;
+                    TiXmlNode * argdata = NULL;
+                    while((argdata = edgeElement->IterateChildren(xmlstr::XML_TAG_ARGDATA,argdata)))
+                    {
+                        TiXmlElement * argdataElement = argdata->ToElement();
+                        string key = getNodeAttributeValue(argdataElement,xmlstr::XML_ATTRTYPE_KEY);
+
+                        // check if forces the partitions apply to exist
+                        if (key == xmlstr::XML_ATTRVALUE_PARTITIONS)
+                        {
+                            string partitions = getNodeText(argdataElement);
+                            if (partitions.length() >0)
+                            {
+                                unsigned int idx = partitions.find(',');
+                                unsigned int stidx = 0;
+                                do
+                                {
+                                    // make sure forces are still defined
+                                    string psubstr = partitions.substr(stidx, idx-stidx);
+                                    unsigned int jdx = psubstr.find(':');
+                                    if (jdx == psubstr.length())
+                                    {
+                                        // write an error message
+                                        string error_msg = "ERROR found in ParseTreeToData::CheckARGtree: Edge: ";
+                                        error_msg += nedge;
+                                        error_msg +=" force: ";
+                                        error_msg += psubstr;
+                                        error_msg += " does not have a partition. ARG tree is not valid.\n";
+                                        retval = false;
+                                        printf("%s", error_msg.c_str());
+                                    }
+                                    else
+                                    {
+                                        // validate forces
+                                        string forcekind = psubstr.substr(0, jdx);
+                                        string forcevalue = psubstr.substr(jdx+1, psubstr.length());
+
+                                        bool forceactive = true;
+                                        bool popfound = true;
+                                        if (forcekind == uistr::disease)
+                                        {
+                                            if(find(ids.begin(), ids.end(), force_DISEASE) == ids.end())
+                                            {
+                                                forceactive = false;
+                                            }
+                                            string error_msg = "ERROR found in ParseTreeToData::CheckARGtree: Edge: ";
+                                            error_msg += nedge;
+                                            error_msg +=" force: ";
+                                            error_msg += forcekind;
+                                            error_msg += " not yet implemented.";
+                                            error_msg += " ARG tree is not valid.\n";
+                                            retval = false;
+                                            printf("%s", error_msg.c_str());
+                                        }
+                                        else if (forcekind == uistr::divmigration)
+                                        {
+                                            if(find(ids.begin(), ids.end(), force_DIVERGENCE) == ids.end())
+                                            {
+                                                forceactive = false;
+                                            }
+                                            else
+                                            {
+                                                popfound = false;
+                                                // check population names against the force value
+                                                TiXmlNode * pop = NULL;
+                                                while((pop = regionElement->IterateChildren(xmlstr::XML_TAG_POPULATION,pop)))
+                                                {
+                                                    TiXmlElement * populationElement = pop->ToElement();
+                                                    string source =
+                                                        getNodeAttributeValue(populationElement,xmlstr::XML_ATTRTYPE_NAME);
+                                                    if (source == forcevalue)
+                                                    {
+                                                        popfound = true;
+                                                    }
+                                                }
+
+                                                if (!popfound)
+                                                {
+                                                    // check ancestor names against the force value
+                                                    TiXmlElement * forcesElement =
+                                                        singleOptionalChild(docElement,xmlstr::XML_TAG_FORCES);
+                                                    if(forcesElement != NULL)
+                                                    {
+                                                        TiXmlElement * forceElement =
+                                                            singleOptionalChild(forcesElement,xmlstr::XML_TAG_DIVERGENCE);
+                                                        if (forceElement != NULL)
+                                                        {
+                                                            TiXmlElement * epochsElement =
+                                                                singleRequiredChild(forceElement,xmlstr::XML_TAG_POPTREE);
+                                                            TiXmlNode * epchild = NULL;
+                                                            while((epchild =
+                                                                   epochsElement->IterateChildren(xmlstr::XML_TAG_EPOCH_BOUNDARY, epchild)))
+                                                            {
+                                                                TiXmlElement * boundaryElement = epchild->ToElement();
+                                                                TiXmlElement * ancestorElement =
+                                                                    singleOptionalChild(boundaryElement,xmlstr::XML_TAG_ANCESTOR);
+                                                                string epochname = getNodeText(ancestorElement);
+                                                                if (epochname == forcevalue)
+                                                                {
+                                                                    popfound = true;
+                                                                }
+                                                            }
+                                                        }
+                                                    }
+                                                }
+                                            }
+                                        }
+                                        else if(forcekind == uistr::migration)
+                                        {
+                                            if(find(ids.begin(), ids.end(), force_MIG) == ids.end())
+                                            {
+                                                forceactive = false;
+                                            }
+                                            else
+                                            {
+                                                popfound = false;
+                                                // check population names against the force value
+                                                TiXmlNode * pop = NULL;
+                                                while((pop = regionElement->IterateChildren(xmlstr::XML_TAG_POPULATION,pop)))
+                                                {
+                                                    TiXmlElement * populationElement = pop->ToElement();
+                                                    string source = getNodeAttributeValue(populationElement,xmlstr::XML_ATTRTYPE_NAME);
+                                                    if (source == forcevalue)
+                                                    {
+                                                        popfound = true;
+                                                    }
+                                                }
+                                            }
+                                        }
+                                        else
+                                        {
+                                            // invalid divergence force
+                                            // write an error message
+                                            string error_msg = "ERROR found in ParseTreeToData::CheckARGtree: Edge: ";
+                                            error_msg += nedge;
+                                            error_msg +=" force: ";
+                                            error_msg += forcekind;
+                                            error_msg += " is not a valid divergence force.";
+                                            error_msg += " ARG tree is not valid.\n";
+                                            retval = false;
+                                            printf("%s", error_msg.c_str());
+                                        }
+
+                                        if (!forceactive)
+                                        {
+                                            // write an error message
+                                            string error_msg = "ERROR found in ParseTreeToData::CheckARGtree: Edge: ";
+                                            error_msg += nedge;
+                                            error_msg +=" force: ";
+                                            error_msg += forcekind;
+                                            error_msg += " is not active. ARG tree is not valid.\n";
+                                            retval = false;
+                                            printf("%s", error_msg.c_str());
+                                        }
+
+                                        if (!popfound)
+                                        {
+                                            // write an error message
+                                            string error_msg = "ERROR found in ParseTreeToData::CheckARGtree: Edge: ";
+                                            error_msg += nedge;
+                                            error_msg +=" force: ";
+                                            error_msg += forcekind;
+                                            error_msg +=" applies to population: ";
+                                            error_msg += forcevalue;
+                                            error_msg += " which does not exist. ARG tree is not valid.\n";
+                                            retval = false;
+                                            printf("%s", error_msg.c_str());
+                                        }
+                                    }
+                                    stidx = idx + 1;
+                                    idx = partitions.find(',', stidx);
+                                }
+                                while (idx<partitions.length());
+                            }
+                            else
+                            {
+                                partfound = true;
+                            }
+                        }
+
+                        else if (key == xmlstr::XML_ATTRVALUE_TRANS_SITES)
+                        {
+                            string sites = getNodeText(argdataElement);
+                            if (sites.length() <=0)
+                            {
+                                // no sites
+                                string error_msg = "ERROR found in ParseTreeToData::CheckARGtree: ARG tree Edge: ";
+                                error_msg += nedge;
+                                error_msg +="  no transmitted sites. ARG tree is not valid.\n";
+                                retval = false;
+                                printf("%s", error_msg.c_str());
+                            }
+                        }
+
+                        // something wacky happened
+                        else if (key != xmlstr::XML_ATTRVALUE_LIVE_SITES) // live_sites is in here for GraphML analysis
+                                                                          // it's not needed by LAMARC
+                        {
+                            // unknown key
+                            string error_msg = "ERROR found in ParseTreeToData::CheckARGtree: ARG tree Edge: ";
+                            error_msg += nedge;
+                            error_msg +=" contains unknown key: ";
+                            error_msg += key;
+                            error_msg += ". ARG tree is not valid.\n";
+                            retval = false;
+                            printf("%s", error_msg.c_str());
+                        }
+                    }
+
+                    // make sure source and target exist in the ARG tree
+                    bool sourcefound = false;
+                    bool targetfound = false;
+                    TiXmlElement * graphElement = singleOptionalChild(treeElement,xmlstr::XML_TAG_GRAPH);
+                    TiXmlNode * node = NULL;
+                    while((node = graphElement->IterateChildren(xmlstr::XML_TAG_NODE,node)))
+                    {
+                        // pick up node element
+                        TiXmlElement * nodeElement = node->ToElement();
+                        string id = getNodeAttributeValue(nodeElement,xmlstr::XML_ATTRTYPE_ID);
+                        if (source == id)
+                        {
+                            sourcefound = true;
+                        }
+                        if (target == id)
+                        {
+                            targetfound = true;
+                        }
+                    }
+
+                    if ((!sourcefound) && (source != "1"))
+                    {
+                        string error_msg = "ERROR found in ParseTreeToData::CheckARGtree: ARG tree edge source: ";
+                        error_msg += source;
+                        error_msg += " not a node. ARG tree is not valid.\n";
+                        retval = false;
+                        printf("%s", error_msg.c_str());
+                    }
+
+                    if (!targetfound)
+                    {
+                        string error_msg = "ERROR found in ParseTreeToData::CheckARGtree: ARG tree edge target: ";
+                        error_msg += target;
+                        error_msg += " not a node. ARG tree is not valid.\n";
+                        retval = false;
+                        printf("%s", error_msg.c_str());
+                    }
+
+                    nedge++;
+                }
+            }
+        }
+    }
+
+    return retval;
+} //CheckARGtree
+
+//____________________________________________________________________________________
+
+bool
+ParseTreeToData::DoARGtree()
+{
+    // this takes the xml input data for an ARG tree which is node centric
+    // and turns it into a local data structure that is edge centric for Lamarc
+
+    bool retval = true;
+
+    // build the ARG tree for use in ARGTree::ToLamarcTree
+    TiXmlElement* docElement = m_parser.GetRootElement();
+    TiXmlElement* dataElement = singleRequiredChild(docElement,xmlstr::XML_TAG_DATA);
+    TiXmlNode * child = NULL;
+    while((child = dataElement->IterateChildren(xmlstr::XML_TAG_REGION,child)))
+    {
+        TiXmlElement * regionElement = child->ToElement();
+        string regionName = getNodeAttributeValue(regionElement,xmlstr::XML_ATTRTYPE_NAME);
+        Region* region = datapack.GetRegionByName(regionName);
+
+        TiXmlElement * treeElement = singleOptionalChild(regionElement,xmlstr::XML_TAG_TREE);
+        if (treeElement != NULL)
+        {
+            string treetype = getNodeAttributeValue(treeElement,xmlstr::XML_ATTRTYPE_TYPE);
+            if (treetype == xmlstr::XML_STRING_ARG)
+            {
+                // create edges
+                TiXmlElement * graphElement = singleOptionalChild(treeElement,xmlstr::XML_TAG_GRAPH);
+                TiXmlNode * edge = NULL;
+                while((edge = graphElement->IterateChildren(xmlstr::XML_TAG_EDGE,edge)))
+                {
+                    // get edge element data
+                    TiXmlElement * edgeElement = edge->ToElement();
+                    long source = atol(getNodeAttributeValue(edgeElement,xmlstr::XML_ATTRTYPE_SOURCE).c_str());
+                    long target = atol(getNodeAttributeValue(edgeElement,xmlstr::XML_ATTRTYPE_TARGET).c_str());
+                    ARGEdge argedge(target, source);
+
+                    string partitions = "";
+                    string livesites = "";
+                    string transmittedsites = "";
+                    TiXmlNode * argdata = NULL;
+                    while((argdata = edgeElement->IterateChildren(xmlstr::XML_TAG_ARGDATA,argdata)))
+                    {
+                        TiXmlElement * argdataElement = argdata->ToElement();
+                        string key = getNodeAttributeValue(argdataElement,xmlstr::XML_ATTRTYPE_KEY);
+                        if (key == xmlstr::XML_ATTRVALUE_PARTITIONS)
+                        {
+                            partitions = getNodeText(argdataElement);
+                        }
+                        else if (key == xmlstr::XML_ATTRVALUE_LIVE_SITES)
+                        {
+                            livesites = getGraphMLNodeText(argdataElement); // fix GraphML tweaks
+                        }
+                        else if (key == xmlstr::XML_ATTRVALUE_TRANS_SITES)
+                        {
+                            transmittedsites = getGraphMLNodeText(argdataElement); // fix GraphML tweaks
+                        }
+                    }
+                    argedge.SetPartitions(partitions);
+                    argedge.SetLiveSites(livesites);  
+                    argedge.SetTransmittedSites(transmittedsites); 
+                    region->m_argedges.push_back(argedge);
+                }
+
+                // read nodes and hook them to the edges
+                TiXmlNode * node = NULL;
+                while((node = graphElement->IterateChildren(xmlstr::XML_TAG_NODE,node)))
+                {
+                    // get node element data
+                    TiXmlElement * nodeElement = node->ToElement();
+                    long nodeid = atol(getNodeAttributeValue(nodeElement,xmlstr::XML_ATTRTYPE_ID).c_str());
+                    string type = "";
+                    string label = "";
+                    double time = FLAGDOUBLE;
+                    long recloc = FLAGLONG;
+                    TiXmlNode * data = NULL;
+                    while((data = nodeElement->IterateChildren(xmlstr::XML_TAG_ARGDATA,data)))
+                    {
+                        TiXmlElement * dataElement = data->ToElement();
+                        string key = getNodeAttributeValue(dataElement,xmlstr::XML_ATTRTYPE_KEY);
+                        if (key == xmlstr::XML_ATTRVALUE_NODE_TYPE)
+                        {
+                            type = getNodeText(dataElement);
+                        }
+                        else if (key == xmlstr::XML_ATTRVALUE_NODE_LABEL)
+                        {
+                            label = getNodeText(dataElement);
+                        }
+                        else if (key == xmlstr::XML_ATTRVALUE_NODE_TIME)
+                        {
+                            time = ProduceDoubleOrBarf(getNodeText(dataElement));
+                        }
+                        else if (key == xmlstr::XML_ATTRVALUE_REC_LOCATION)
+                        {
+                            recloc = atol(getNodeText(dataElement).c_str()) - 2; // undo GraphML tweak
+                        }
+                    }
+
+                    // find the edge that points at this node
+                    for(size_t edge=0; edge<region->m_argedges.size(); edge++)
+                    {
+                        if (region->m_argedges[edge].GetTarget() == nodeid)
+                        {
+                            region->m_argedges[edge].SetTargetId(nodeid);
+                            region->m_argedges[edge].SetType(type);
+                            region->m_argedges[edge].SetTime(time);
+                            region->m_argedges[edge].SetLabel(label);
+                            region->m_argedges[edge].SetRecLoc(recloc);
+                        }
+                    }
+                }
+
+#ifndef NDEBUG
+                // debug print of final edge list to see what happened
+                printf("\n****Final edge list at end of DoARGtree****\n");
+                for(size_t edge=0; edge<region->m_argedges.size(); edge++)
+                {
+                    printf("\nedge: %i  target: %i source: %i\n", (int)edge, (int)(region->m_argedges[edge].GetTarget()), (int)region->m_argedges[edge].GetSource());
+                    printf("Partitions: %s\n", region->m_argedges[edge].GetPartitions().c_str());
+                    printf("Live sites: %s\n",  region->m_argedges[edge].GetLiveSites().c_str());
+                    printf("Transmitted sites: %s\n",  region->m_argedges[edge].GetTransmittedSites().c_str());
+                    printf("Target Id: %i Label: %s\n", (int)region->m_argedges[edge].GetTargetId(), region->m_argedges[edge].GetLabel().c_str());
+                    printf("Type: %s  Time: %f\n", region->m_argedges[edge].GetType().c_str(), region->m_argedges[edge].GetTime());
+                    printf("Rec Loc: %i\n", (int)(region->m_argedges[edge].GetRecLoc()));
+                }
+#endif
+            }
+        }
+    }
+
+    return retval;
+} //DoARGtree
+
+//____________________________________________________________________________________
diff --git a/src/xml/parsetreetodata.h b/src/xml/parsetreetodata.h
new file mode 100644
index 0000000..aed444e
--- /dev/null
+++ b/src/xml/parsetreetodata.h
@@ -0,0 +1,102 @@
+// $Id: parsetreetodata.h,v 1.19 2012/06/30 01:32:43 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef PARSETREETODATA_H
+#define PARSETREETODATA_H
+
+#include <string>
+#include "individual.h"     // for Individual
+#include "tinyxml.h"
+#include "locus.h"          // for TipData
+#include "parsetreewalker.h"
+
+/******************************************************************
+ Walks the TinyXML parse tree generated by XmlParser (xml.h)
+ and creates the DataPack.
+
+ Based on a class by Jim Sloan, rewritten by Mary Kuhner, and
+ then by Elizabeth Walkup
+*********************************************************************/
+
+class DataPack;
+class Random;
+class Region;
+class UserTree;
+
+class ParseTreeToData   : public ParseTreeWalker
+{
+  private:
+    DataPack&               datapack;
+    Region*                 m_pCurrRegion;
+    long                    m_currpopulation;
+    long                    m_currindno;
+    Individual              m_currIndividual;
+    TipData                 m_tipdata;
+    std::auto_ptr<Random>   m_randomNameSource;
+    force_type              m_migrationalForce;
+
+    // purposely not implemented, this class is meant to be a singleton
+    ParseTreeToData();
+    ParseTreeToData(const ParseTreeToData&);
+    ParseTreeToData& operator=(const ParseTreeToData&);
+
+  protected:
+
+    // for creating less-likely-to-crash names
+    std::string XmlRandomLongAsString();
+
+    // main line data reading routines
+    void        DoFormat(TiXmlElement*);
+    void        DoData(TiXmlElement*);
+    void        DoRegion(TiXmlElement*, long regionId);
+    void        DoPopulations(TiXmlElement*);
+    void        DoPopulation(TiXmlElement*);
+    void        DoPanel(TiXmlElement*, string popname);
+    void        DoIndividual(TiXmlElement*);
+    void        DoSamples(TiXmlElement *);
+    void        DoStatus(TiXmlElement *);
+    void        DoDisease(TiXmlElement *);
+    void        DoDataBlocks(TiXmlElement *);
+    void        DoGenotypeResolutions(TiXmlElement *);
+
+    // auxillary map location information
+    void        DoSpacing(TiXmlElement* );
+    void        DoBlocks(TiXmlElement* );
+    void        DoMapPosition(TiXmlElement* , long locus);
+    void        DoLength(TiXmlElement* , long locus);
+    void        DoLocations(TiXmlElement* , long locus);
+    void        DoOffset(TiXmlElement* , long locus);
+
+    // auxiliary data reading routines
+    void        DoPhases(TiXmlElement* );  // DoSamples *must* be called first!
+    void        DoEffectivePopSize(TiXmlElement* );
+
+    // main line tree reading routines--currently called in DoRegions()
+    UserTree*   DoTree(TiXmlElement* xmlllist);
+
+    // diagnosis of migration versus divergence
+    void        DiagnoseTagForMigrationalForce(TiXmlElement* rootElement);
+    force_type  TagForMigrationalForce() const;
+
+  public:
+    ParseTreeToData(XmlParser&,DataPack&);
+    virtual ~ParseTreeToData();
+    void     ProcessFileData();  // processes datapack part of structures
+
+    bool     CheckARGtree(vector<UIId> ids, bool batchmode); // checks input ARG trees are valid
+    bool     DoARGtree();                                    // links up ARG trees to regions
+
+};
+
+LongVec1d::iterator FindValIn(long, LongVec1d&);
+
+#endif // PARSETREETODATA_H
+
+//____________________________________________________________________________________
diff --git a/src/xml/parsetreetosettings.cpp b/src/xml/parsetreetosettings.cpp
new file mode 100644
index 0000000..996b10d
--- /dev/null
+++ b/src/xml/parsetreetosettings.cpp
@@ -0,0 +1,1484 @@
+// $Id: parsetreetosettings.cpp,v 1.85 2013/06/03 17:23:13 jyamato Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+
+#include "local_build.h"
+#include "constants.h"
+#include "errhandling.h"
+#include "mathx.h"
+#include "parsetreetosettings.h"
+#include "parsetreewalker.h"
+#include "stringx.h"
+#include "ui_constants.h"               // for uiconst::GLOBAL_ID
+#include "ui_interface.h"
+#include "ui_regid.h"
+#include "ui_strings.h"
+#include "ui_vars.h"
+#include "xml.h"
+#include "xml_strings.h"
+
+using std::string;
+
+//------------------------------------------------------------------------------------
+
+ParseTreeToSettings::ParseTreeToSettings(XmlParser& parser, UIInterface& ui)
+    :
+    ParseTreeWalker(parser),
+    uiInterface(ui)
+{
+}
+
+//------------------------------------------------------------------------------------
+
+ParseTreeToSettings::~ParseTreeToSettings()
+{
+}
+
+//------------------------------------------------------------------------------------
+
+void
+ParseTreeToSettings::ProcessFileSettings()
+{
+    TiXmlElement * docElement = m_parser.GetRootElement();
+
+    const char * tagValue = docElement->Value();
+    string tagString(tagValue);
+    bool matches = CaselessStrCmp(xmlstr::XML_TAG_LAMARC,tagValue);
+    if(!matches)
+    {
+        string msg = m_parser.GetFileName() + ": " + xmlstr::XML_ERR_NOT_LAMARC;
+        m_parser.ThrowDataError(msg);
+    }
+
+    vector<TiXmlElement *> globalModelElements
+        = getAllOptionalDescendantElements(docElement,xmlstr::XML_TAG_MODEL);
+    for (unsigned long i=0; i<globalModelElements.size(); ++i)
+    {
+        DoDLModel(globalModelElements[i],uiconst::GLOBAL_ID);
+    }
+
+    DoDataModels(singleRequiredChild(docElement,xmlstr::XML_TAG_DATA));
+    DoTraits(singleRequiredChild(docElement,xmlstr::XML_TAG_DATA));
+
+    TiXmlElement * chainsElement
+        = singleOptionalChild(docElement,xmlstr::XML_TAG_CHAINS);
+    if(chainsElement != NULL)
+    {
+        DoChainParams(chainsElement);
+    }
+
+    TiXmlElement * formatElement
+        = singleOptionalChild(docElement,xmlstr::XML_TAG_FORMAT);
+    if(formatElement != NULL)
+    {
+        DoUserParams(formatElement);
+    }
+
+    // This *must* be after DoChainParams because DIVMIG requires
+    // Bayesian and that the Epoch Arranger be on.
+    TiXmlElement * forcesElement
+        = singleOptionalChild(docElement,xmlstr::XML_TAG_FORCES);
+    if(forcesElement != NULL)
+    {
+        DoForces(forcesElement);
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+void
+ParseTreeToSettings::DoOptionalElement(TiXmlElement* ancestor, string tagName,string uiLabel, UIId id)
+{
+    TiXmlElement * node = singleOptionalChild(ancestor,tagName);
+    if(node == NULL)
+    {
+        return;
+    }
+    try
+    {
+        uiInterface.doSet(uiLabel,getNodeText(node),id);
+    }
+    catch (data_error & d)
+    {
+        m_parser.ThrowDataError(d.what(), node->Row());
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+void
+ParseTreeToSettings::DoRequiredElement(TiXmlElement* ancestor, string tagName,string uiLabel, UIId id)
+{
+    TiXmlElement * node = singleRequiredChild(ancestor,tagName);
+    if(node == NULL)
+    {
+        m_parser.ThrowDataError(xmlstr::XML_ERR_MISSING_TAG_HIER_0
+                                + tagName
+                                + xmlstr::XML_ERR_MISSING_TAG_HIER_1
+                                + ancestor->Value()
+                                + xmlstr::XML_ERR_MISSING_TAG_HIER_2,
+                                node->Row());
+    }
+    uiInterface.doSet(uiLabel,getNodeText(node),id);
+}
+
+//------------------------------------------------------------------------------------
+
+void
+ParseTreeToSettings::DoDataModels(TiXmlElement * dataElement)
+{
+    TiXmlNode * child = NULL;
+
+    long regionNumber = 0;
+
+    while((child = dataElement->IterateChildren(xmlstr::XML_TAG_REGION,child)))
+    {
+        TiXmlElement * regionElement = child->ToElement();
+        DoRegionDataModels(regionElement,regionNumber);
+        regionNumber++;
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+void
+ParseTreeToSettings::DoRegionDataModels(TiXmlElement * regionElement, long regionId)
+{
+    TiXmlNode * child = NULL;
+
+    long maxLoci = uiInterface.GetCurrentVars().datapackplus.GetMaxLoci();
+    long locusId = 0;
+
+    while((child = regionElement->IterateChildren(xmlstr::XML_TAG_MODEL,child)))
+    {
+        TiXmlElement * modelElement = child->ToElement();
+        // EWFIX.P4 LOCUS.HACK.HACK.HACK
+        // probably making more than we need
+        // also need to handle per-locus models later
+        // probably does something awful for mismatched types
+        //
+        //JDEBUG--evil hack
+        // we now handle multiple loci, but...ick...sequential implied
+        // ordering
+        if (locusId >= maxLoci)  // too many loci?
+        {
+            std::string errstring("Warning:  More datamodels found");
+            errstring += " than segments, only the first ";
+            errstring += ToString(locusId);
+            errstring += " datamodels were used.\n";
+            uiInterface.AddWarning(errstring);
+            return;
+        }
+
+        DoDLModel(modelElement,regionId,locusId);
+        ++locusId;
+    }
+    // JDEBUG
+    // ideally we would now fill out the remaining loci (if any) with
+    // default datamodels for their datatype...but this may conflict
+    // with registry::InstallDataModel/registry::CreateDataModel and
+    // it's use of the single "regional" datamodel
+}
+
+//------------------------------------------------------------------------------------
+
+void
+ParseTreeToSettings::DoDLModel(TiXmlElement * modelElement, long regionId, long locusId)
+{
+    string modelName = getNodeAttributeValue(modelElement,xmlstr::XML_ATTRTYPE_NAME);
+    //Here is where we change the regionId to the correct flag if it had been
+    // set to GLOBAL_ID.  It must be recursive for the KAllele case (for now).
+    if (regionId == uiconst::GLOBAL_ID)
+    {
+        switch (ProduceModelTypeOrBarf(modelName))
+        {
+            case F84:
+            case GTR:
+                DoDLModel(modelElement, uiconst::GLOBAL_DATAMODEL_NUC_ID);
+                return;
+            case Brownian:
+            case Stepwise:
+            case MixedKS:
+                DoDLModel(modelElement, uiconst::GLOBAL_DATAMODEL_MSAT_ID);
+                return;
+            case KAllele:
+                //Here we have to set the global model for *both* the microsat *and*
+                // 'kallele' data.
+                DoDLModel(modelElement, uiconst::GLOBAL_DATAMODEL_MSAT_ID);
+                DoDLModel(modelElement, uiconst::GLOBAL_DATAMODEL_KALLELE_ID);
+                return;
+        }
+        assert(false); //Uncaught model type.
+        regionId = uiconst::GLOBAL_DATAMODEL_NUC_ID;
+    }
+    assert(regionId != uiconst::GLOBAL_ID);
+
+    UIId locusUIId(regionId,locusId);
+
+    uiInterface.doSet(uistr::dataModel,modelName,locusUIId);
+
+    // comments below show which models should allow the setting
+
+    // base frequencies -- F84, GTR (but don't allow calculated)
+    TiXmlElement * baseFrequenciesElement
+        = singleOptionalChild(modelElement,xmlstr::XML_TAG_BASE_FREQS);
+    if(baseFrequenciesElement != NULL)
+    {
+        DoBaseFrequencies(baseFrequenciesElement,locusUIId);
+    }
+    // ttratio -- F84
+    DoOptionalElement(modelElement,xmlstr::XML_TAG_TTRATIO,uistr::TTRatio,locusUIId);
+    // normalize -- F84 GTR Stepwise KAllele
+    DoOptionalElement(modelElement,xmlstr::XML_TAG_NORMALIZE,uistr::normalization,locusUIId);
+    // gtrrates -- GTR
+    TiXmlElement * gtrElement
+        = singleOptionalChild(modelElement,xmlstr::XML_TAG_GTRRATES);
+    if(gtrElement != NULL)
+    {
+        DoGTR(gtrElement,locusUIId);
+    }
+    // error rate -- GTR and F84
+    DoOptionalElement(modelElement,xmlstr::XML_TAG_PER_BASE_ERROR_RATE,uistr::perBaseErrorRate,locusUIId);
+    // alpha -- mixedKS
+    DoOptionalElement(modelElement,xmlstr::XML_TAG_ALPHA,uistr::alpha,locusUIId);
+    // optimize -- mixedKS
+    DoOptionalElement(modelElement,xmlstr::XML_TAG_ISOPT,uistr::optimizeAlpha,locusUIId);
+
+    // categories F84 GTR Stepwise Brownian KAllele
+    TiXmlElement * categoriesElement
+        = singleOptionalChild(modelElement,xmlstr::XML_TAG_CATEGORIES);
+    if(categoriesElement != NULL)
+    {
+        DoCategories(categoriesElement,locusUIId);
+    }
+
+    // relative mutation rate -- all
+    DoOptionalElement(modelElement,xmlstr::XML_TAG_RELATIVE_MURATE,uistr::relativeMuRate,locusUIId);
+} // DoDLModel
+
+//------------------------------------------------------------------------------------
+
+void
+ParseTreeToSettings::DoCategories(TiXmlElement * categoriesElement, UIId locusUIId)
+{
+    DoOptionalElement(categoriesElement,xmlstr::XML_TAG_NUM_CATEGORIES,uistr::categoryCount,locusUIId);
+    DoOptionalElement(categoriesElement,xmlstr::XML_TAG_AUTOCORRELATION,uistr::autoCorrelation,locusUIId);
+
+    TiXmlElement * catRates = singleOptionalChild(categoriesElement,xmlstr::XML_TAG_RATES);
+    TiXmlElement * catProbs = singleOptionalChild(categoriesElement,xmlstr::XML_TAG_PROBABILITIES);
+
+    if(catRates != NULL)
+    {
+        StringVec1d strings = getNodeTextSplitOnWhitespace(catRates);
+        long catCount = uiInterface.doGetLong(uistr::categoryCount,locusUIId);
+        long numToSet = catCount;
+        if((long)strings.size() > catCount)
+        {
+            uiInterface.AddWarning("Warning:  The number of supplied category rates and/or probabilities is greater than the number of categories.  Discarding the extras.");
+        }
+        else
+        {
+            if((long)strings.size() < catCount)
+            {
+                uiInterface.AddWarning("Warning:  The number of supplied category rates and/or probabilities is less than the number of categories.  Using defaults for the extras.");
+                numToSet = strings.size();
+            }
+        }
+
+        for(long index=0;index < numToSet; index++)
+        {
+            UIId innerId(locusUIId.GetIndex1(),locusUIId.GetIndex2(),index);
+            uiInterface.doSet(uistr::categoryRate,strings[index],innerId);
+        }
+    }
+    if(catProbs != NULL)
+    {
+        StringVec1d strings = getNodeTextSplitOnWhitespace(catProbs);
+        long catCount = uiInterface.doGetLong(uistr::categoryCount,locusUIId);
+        long numToSet = catCount;
+        if((long)strings.size() > catCount)
+        {
+            uiInterface.AddWarning("Warning:  The number of supplied category rates and/or probabilities is greater than the number of categories.  Discarding the extras.");
+        }
+        else
+        {
+            if((long)strings.size() < catCount)
+            {
+                uiInterface.AddWarning("Warning:  The number of supplied category rates and/or probabilities is less than the number of categories.  Using defaults for the extras.");
+                numToSet = strings.size();
+            }
+        }
+
+        for(long index=0;index < numToSet; index++)
+        {
+            UIId innerId(locusUIId.GetIndex1(),locusUIId.GetIndex2(),index);
+            uiInterface.doSet(uistr::categoryProbability,strings[index],innerId);
+        }
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+void
+ParseTreeToSettings::DoBaseFrequencies(TiXmlElement * baseFreqsElem, UIId thisId)
+{
+
+    // kinda grotty, but we want to ignore case differences
+    string frequencies = getNodeText(baseFreqsElem);
+    string tag = xmlstr::XML_TAG_CALCULATED;
+    LowerCase(frequencies);
+    LowerCase(tag);
+    string::size_type index = frequencies.find(tag);
+
+    if(index != std::string::npos)
+    {
+        uiInterface.doSet(uistr::freqsFromData,"true",thisId);
+    }
+    else
+    {
+        StringVec1d strings;
+        bool gotStrings = FromString(frequencies,strings);
+        if(gotStrings)
+        {
+            if(strings.size() == 4)
+            {
+                uiInterface.doSet(uistr::baseFrequencyA,strings[0],thisId);
+                uiInterface.doSet(uistr::baseFrequencyC,strings[1],thisId);
+                uiInterface.doSet(uistr::baseFrequencyG,strings[2],thisId);
+                uiInterface.doSet(uistr::baseFrequencyT,strings[3],thisId);
+            }
+            else if (strings.size() == 0)
+            {
+                uiInterface.AddWarning("Warning:  no supplied base frequencies; using a set of defaults.");
+
+            }
+            else
+            {
+                uiInterface.AddWarning("Warning:  wrong number of supplied base frequencies; using a set of defaults.");
+            }
+        }
+        else
+        {
+            uiInterface.AddWarning("Warning:  no supplied base frequencies; using a set of defaults.");
+        }
+    }
+} // DoBaseFrequencies
+
+//------------------------------------------------------------------------------------
+
+void
+ParseTreeToSettings::DoGTR(TiXmlElement * gtrElem, UIId thisId)
+{
+    StringVec1d strings = getNodeTextSplitOnWhitespace(gtrElem);
+    if(strings.size() == 6)
+    {
+        uiInterface.doSet(uistr::gtrRateAC,strings[0],thisId);
+        uiInterface.doSet(uistr::gtrRateAG,strings[1],thisId);
+        uiInterface.doSet(uistr::gtrRateAT,strings[2],thisId);
+        uiInterface.doSet(uistr::gtrRateCG,strings[3],thisId);
+        uiInterface.doSet(uistr::gtrRateCT,strings[4],thisId);
+        uiInterface.doSet(uistr::gtrRateGT,strings[5],thisId);
+    }
+    else if (strings.size() == 0)
+    {
+        uiInterface.AddWarning("Warning:  no GTR rates supplied:  using a set of defaults.");
+    }
+    else
+    {
+        uiInterface.AddWarning("Warning:  incorrect number of GTR rates supplied:  using a set of defaults.");
+    }
+} // DoGTR
+
+//------------------------------------------------------------------------------------
+
+void
+ParseTreeToSettings::DoChainParams(TiXmlElement * chainsElement)
+{
+    DoOptionalElement(chainsElement,xmlstr::XML_TAG_REPLICATES,uistr::replicates);
+
+    TiXmlElement * initialChainElement
+        = singleOptionalChild(chainsElement,xmlstr::XML_TAG_INITIAL);
+    if(initialChainElement != NULL)
+    {
+        DoOptionalElement(initialChainElement,xmlstr::XML_TAG_NUMBER,uistr::initialChains);
+        DoOptionalElement(initialChainElement,xmlstr::XML_TAG_SAMPLES,uistr::initialSamples);
+        DoOptionalElement(initialChainElement,xmlstr::XML_TAG_INTERVAL,uistr::initialInterval);
+        DoOptionalElement(initialChainElement,xmlstr::XML_TAG_DISCARD,uistr::initialDiscard);
+    }
+
+    TiXmlElement * finalChainElement
+        = singleOptionalChild(chainsElement,xmlstr::XML_TAG_FINAL);
+    if(finalChainElement != NULL)
+    {
+        DoOptionalElement(finalChainElement,xmlstr::XML_TAG_NUMBER,uistr::finalChains);
+        DoOptionalElement(finalChainElement,xmlstr::XML_TAG_SAMPLES,uistr::finalSamples);
+        DoOptionalElement(finalChainElement,xmlstr::XML_TAG_INTERVAL,uistr::finalInterval);
+        DoOptionalElement(finalChainElement,xmlstr::XML_TAG_DISCARD,uistr::finalDiscard);
+    }
+
+    TiXmlElement * heatingElement
+        = singleOptionalChild(chainsElement,xmlstr::XML_TAG_HEATING);
+    if(heatingElement != NULL)
+    {
+        TiXmlElement * temperatures
+            = singleOptionalChild(heatingElement,xmlstr::XML_TAG_TEMPERATURES);
+        if(temperatures != NULL) DoTemperatures(getNodeTextSplitOnWhitespace(temperatures));
+
+        TiXmlElement * intervals
+            = singleOptionalChild(heatingElement,xmlstr::XML_TAG_SWAP_INTERVAL);
+        if(intervals != NULL) DoSwapInterval(intervals);
+
+        DoOptionalElement(heatingElement,xmlstr::XML_TAG_HEATING_STRATEGY,uistr::tempAdapt);
+    }
+
+    // must do processing for XML_TAG_BAYESIAN_ANALYSIS before processing for
+    // the XML_TAG_BAYESIAN strategy element because if they are inconsistent
+    // we need to throw--the BAYESIAN_ANALYSIS tag takes precedence.
+    DoOptionalElement(chainsElement,xmlstr::XML_TAG_BAYESIAN_ANALYSIS,uistr::bayesian);
+    TiXmlElement * strategyElement
+        = singleOptionalChild(chainsElement,xmlstr::XML_TAG_STRATEGY);
+    if(strategyElement != NULL)
+    {
+        DoOptionalElement(strategyElement,xmlstr::XML_TAG_RESIMULATING,uistr::dropArranger);
+        uiInterface.GetCurrentVars().chains.RescaleDefaultSizeArranger();
+        DoOptionalElement(strategyElement,xmlstr::XML_TAG_TREESIZE,uistr::sizeArranger);
+        DoOptionalElement(strategyElement,xmlstr::XML_TAG_STAIRARRANGER,uistr::stairArranger);
+        DoOptionalElement(strategyElement,xmlstr::XML_TAG_HAPLOTYPING,uistr::hapArranger);
+        DoOptionalElement(strategyElement,xmlstr::XML_TAG_BAYESIAN,uistr::bayesArranger);
+        DoOptionalElement(strategyElement,xmlstr::XML_TAG_EPOCHSIZEARRANGER,uistr::epochSizeArranger);
+    }
+
+} // DoChainParams
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+void
+ParseTreeToSettings::DoTemperatures(StringVec1d temperatures)
+{
+    if(temperatures.empty())
+    {
+        uiInterface.AddWarning("Warning:  No supplied temperatures for heated chains:  using a set of defaults.");
+    }
+    else
+    {
+        uiInterface.doSet(uistr::heatedChainCount,ToString(temperatures.size()));
+        for(long index = 0; index < (long)temperatures.size(); index++)
+        {
+            uiInterface.doSet(uistr::heatedChain,temperatures[index],UIId(index));
+        }
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+void
+ParseTreeToSettings::DoSwapInterval(TiXmlElement * intervals)
+{
+    StringVec1d values = getNodeTextSplitOnWhitespace(intervals);
+    if(values.size() > 0)
+    {
+        uiInterface.doSet(uistr::tempInterval,values[values.size()-1]);
+    }
+    if(values.empty())
+    {
+        uiInterface.AddWarning("Warning:  empty <" + xmlstr::XML_TAG_SWAP_INTERVAL + "> tag found; using defaults.");
+    }
+
+} // DoSwapInterval
+
+//------------------------------------------------------------------------------------
+
+void
+ParseTreeToSettings::DoSeedParams(TiXmlElement * formatElement)
+{
+    TiXmlElement * node1 = singleOptionalChild(formatElement,xmlstr::XML_TAG_SEED);
+    TiXmlElement * node2 = singleOptionalChild(formatElement,xmlstr::XML_TAG_SEED_FROM_CLOCK);
+    if(node1 != NULL && node2 != NULL)
+    {
+        m_parser.ThrowDataError(xmlstr::XML_ERR_BOTH_SEED_TYPES_0
+                                + ToString(node1->Row())
+                                + xmlstr::XML_ERR_BOTH_SEED_TYPES_1
+                                + ToString(node2->Row())
+                                + xmlstr::XML_ERR_BOTH_SEED_TYPES_2
+                                + m_parser.GetFileName()
+                                + xmlstr::XML_ERR_BOTH_SEED_TYPES_3
+            );
+    }
+
+    DoOptionalElement(formatElement,xmlstr::XML_TAG_SEED,uistr::randomSeed);
+    DoOptionalElement(formatElement,xmlstr::XML_TAG_SEED_FROM_CLOCK,uistr::setOldSeedFromClock);
+}
+
+//------------------------------------------------------------------------------------
+
+void
+ParseTreeToSettings::DoUserParams(TiXmlElement * formatElement)
+{
+    DoOptionalElement(formatElement,xmlstr::XML_TAG_VERBOSITY,uistr::verbosity);
+    DoOptionalElement(formatElement,xmlstr::XML_TAG_PROGRESS_REPORTS,uistr::progress);
+
+    TiXmlElement * plottingElement
+        = singleOptionalChild(formatElement,xmlstr::XML_TAG_PLOTTING);
+    if(plottingElement != NULL)
+    {
+        DoOptionalElement(plottingElement,xmlstr::XML_TAG_POSTERIOR,uistr::plotPost);
+    }
+
+    DoSeedParams(formatElement);
+    DoOptionalElement(formatElement,xmlstr::XML_TAG_RESULTS_FILE,uistr::resultsFileName);
+    //LS NOTE:  These are off by default, so we only warn that there's a
+    // problem if we turn them off, meaning we should set the filename first.
+    DoOptionalElement(formatElement,xmlstr::XML_TAG_OLD_SUMMARY_FILE,uistr::treeSumInFileName);
+    DoOptionalElement(formatElement,xmlstr::XML_TAG_IN_SUMMARY_FILE,uistr::treeSumInFileName);
+    DoOptionalElement(formatElement,xmlstr::XML_TAG_OUT_SUMMARY_FILE,uistr::treeSumOutFileName);
+    DoOptionalElement(formatElement,xmlstr::XML_TAG_USE_IN_SUMMARY,uistr::treeSumInFileEnabled);
+    DoOptionalElement(formatElement,xmlstr::XML_TAG_USE_OUT_SUMMARY,uistr::treeSumOutFileEnabled);
+#ifdef LAMARC_QA_TREE_DUMP
+    DoOptionalElement(formatElement,xmlstr::XML_TAG_ARGFILE_PREFIX,uistr::argFilePrefix);
+    DoOptionalElement(formatElement,xmlstr::XML_TAG_USE_ARGFILES,uistr::useArgFiles);
+    DoOptionalElement(formatElement,xmlstr::XML_TAG_MANY_ARGFILES,uistr::manyArgFiles);
+#endif // LAMARC_QA_TREE_DUMP
+    DoOptionalElement(formatElement,xmlstr::XML_TAG_NEWICKTREEFILE_PREFIX,uistr::newickTreeFilePrefix);
+    DoOptionalElement(formatElement,xmlstr::XML_TAG_USE_NEWICKTREEFILE,uistr::useNewickTreeFiles);
+    //LS NOTE:  And these two are on by default, so we warn only if it's still
+    // on when we change the name.
+    DoOptionalElement(formatElement,xmlstr::XML_TAG_USE_CURVEFILES,uistr::useCurveFiles);
+    DoOptionalElement(formatElement,xmlstr::XML_TAG_CURVEFILE_PREFIX,uistr::curveFilePrefix);
+    DoOptionalElement(formatElement,xmlstr::XML_TAG_PROFILE_PREFIX,uistr::profileprefix);
+    DoOptionalElement(formatElement,xmlstr::XML_TAG_USE_RECLOCFILE,uistr::useReclocFiles);
+    DoOptionalElement(formatElement,xmlstr::XML_TAG_RECLOCFILE_PREFIX,uistr::reclocFilePrefix);
+    DoOptionalElement(formatElement,xmlstr::XML_TAG_USE_TRACEFILE,uistr::useTraceFiles);
+    DoOptionalElement(formatElement,xmlstr::XML_TAG_TRACEFILE_PREFIX,uistr::traceFilePrefix);
+    //LS NOTE:  And this one we always write, so there are no warnings about
+    // the setting.
+    DoOptionalElement(formatElement,xmlstr::XML_TAG_OUT_XML_FILE,uistr::xmlOutFileName);
+    DoOptionalElement(formatElement,xmlstr::XML_TAG_REPORT_XML_FILE,uistr::xmlReportFileName);
+
+} // DoUserParams
+
+//------------------------------------------------------------------------------------
+// Force parameter functions
+//------------------------------------------------------------------------------------
+
+void
+ParseTreeToSettings::DoForces(TiXmlElement * forcesElement)
+{
+    DoForceIfPresent(forcesElement,force_DIVERGENCE,xmlstr::XML_TAG_DIVERGENCE);
+    DoForceIfPresent(forcesElement,force_COAL,xmlstr::XML_TAG_COALESCENCE);
+    DoForceIfPresent(forcesElement,force_MIG,xmlstr::XML_TAG_MIGRATION);
+    DoForceIfPresent(forcesElement,force_REC,xmlstr::XML_TAG_RECOMBINATION);
+    DoForceIfPresent(forcesElement,force_GROW,xmlstr::XML_TAG_GROWTH);
+    DoForceIfPresent(forcesElement,force_LOGISTICSELECTION,
+                     xmlstr::XML_TAG_LOGISTICSELECTION);
+    DoForceIfPresent(forcesElement,force_LOGSELECTSTICK,
+                     xmlstr::XML_TAG_STOCHASTICSELECTION);
+    DoForceIfPresent(forcesElement,force_DISEASE,xmlstr::XML_TAG_DISEASE);
+    DoForceIfPresent(forcesElement,force_REGION_GAMMA,
+                     xmlstr::XML_TAG_REGION_GAMMA);
+    DoForceIfPresent(forcesElement,force_DIVMIG,xmlstr::XML_TAG_DIVMIG);
+
+} // DoForces
+
+//------------------------------------------------------------------------------------
+
+void
+ParseTreeToSettings::DoForceIfPresent(
+    TiXmlElement * forcesElement,
+    force_type forcetype,
+    const string& forcetag)
+{
+    TiXmlElement * forceElement = singleOptionalChild(forcesElement,forcetag);
+
+    if(forceElement != NULL)
+    {
+        DoForce(forceElement,forcetype);
+    }
+
+}
+
+//------------------------------------------------------------------------------------
+
+void
+ParseTreeToSettings::DoForce(TiXmlElement* forceElement, force_type forcetype)
+{
+    UIId forceId(forcetype);
+
+    string forceOnOff = getNodeAttributeValue(forceElement,xmlstr::XML_ATTRTYPE_VALUE);
+    if(!forceOnOff.empty())
+    {
+        uiInterface.doSet(uistr::forceOnOff,forceOnOff,forceId);
+    }
+    else
+    {
+        uiInterface.doSet(uistr::forceOnOff,"on",forceId);
+    }
+
+    if(forcetype == force_GROW)
+    {
+        string growthType = getNodeAttributeValue(forceElement,xmlstr::XML_ATTRTYPE_TYPE);
+        if(!growthType.empty())
+        {
+            uiInterface.doSet(uistr::growthType,growthType);
+        }
+    }
+
+    if(forcetype == force_LOGSELECTSTICK)
+    {
+        string stype(lamarcstrings::shortSSelectionName);
+        uiInterface.doSet(uistr::selectType,stype);
+    }
+
+    if(forcetype == force_DIVERGENCE)
+    {
+        // Parse the population-tree hierarchy.
+        TiXmlElement* popElement =
+            singleRequiredChild(forceElement,xmlstr::XML_TAG_POPTREE);
+        TiXmlNode * child = NULL;
+
+        // Dreadful quick-n-dirty structures to make sure we have a legal tree.
+        std::map<std::string,std::string> popToAnc;
+        std::set<std::string> ancSet;
+        std::map<std::string,std::string>::iterator mit;
+        std::set<std::string>::iterator sit;
+
+        while((child = popElement->IterateChildren(xmlstr::XML_TAG_EPOCH_BOUNDARY,child)))
+        {
+            TiXmlElement * boundaryElement = child->ToElement();
+            TiXmlElement* newpopElement =
+                singleRequiredChild(boundaryElement,xmlstr::XML_TAG_NEWPOP);
+            StringVec1d newpops = getNodeTextSplitOnWhitespace(newpopElement);
+
+            if (newpops.size() != 2)
+            {
+                // Throw wrong number of populations error.
+                m_parser.ThrowDataError(xmlstr::XML_ERR_NEWPOP, newpopElement->Row());
+            }
+
+            // JNOTE: Not using the SetGet machinery because of obscure "unsolved problems".
+            // Fix this right! at some point.
+            uiInterface.GetCurrentVars().forces.AddNewPops(newpops);
+
+            TiXmlElement* ancestorElement = singleRequiredChild(boundaryElement,xmlstr::XML_TAG_ANCESTOR);
+            string ancname = getNodeText(ancestorElement);
+            if(ancname.empty())
+            {
+                m_parser.ThrowDataError(xmlstr::XML_ERR_EMPTY_ANCESTOR, ancestorElement->Row());
+            }
+            else
+            {
+                // JNOTE: Not using the SetGet machinery because of obscure "unsolved problems".
+                // Fix this right! at some point.
+                // uiInterface.doSet(uistr::divergenceEpochAncestor,ancname);
+                uiInterface.GetCurrentVars().forces.AddAncestor(ancname);
+            }
+
+            // Check newpops[0] not already a child.
+            mit = popToAnc.find(newpops[0]);
+            if(mit != popToAnc.end())
+            {
+                std::string oldAnc = (*mit).second;
+                m_parser.ThrowDataError(xmlstr::XML_ERR_MULTIPLE_ANCESTORS_0
+                                        + newpops[0]
+                                        + xmlstr::XML_ERR_MULTIPLE_ANCESTORS_1
+                                        + oldAnc
+                                        + xmlstr::XML_ERR_MULTIPLE_ANCESTORS_2
+                                        + ancname
+                                        + xmlstr::XML_ERR_MULTIPLE_ANCESTORS_3,
+                                        ancestorElement->Row()
+                    );
+            }
+
+            // Check newpops[1] not already a child.
+            mit = popToAnc.find(newpops[1]);
+            if(mit != popToAnc.end())
+            {
+                std::string oldAnc = (*mit).second;
+                m_parser.ThrowDataError(xmlstr::XML_ERR_MULTIPLE_ANCESTORS_0
+                                        + newpops[1]
+                                        + xmlstr::XML_ERR_MULTIPLE_ANCESTORS_1
+                                        + oldAnc
+                                        + xmlstr::XML_ERR_MULTIPLE_ANCESTORS_2
+                                        + ancname
+                                        + xmlstr::XML_ERR_MULTIPLE_ANCESTORS_3,
+                                        ancestorElement->Row()
+                    );
+            }
+
+            // Check ancname not already used.
+            sit = ancSet.find(ancname);
+            if(sit != ancSet.end())
+            {
+                m_parser.ThrowDataError(xmlstr::XML_ERR_DUPLICATE_ANCESTOR_0
+                                        + ancname
+                                        + xmlstr::XML_ERR_DUPLICATE_ANCESTOR_1,
+                                        ancestorElement->Row()
+                    );
+
+            }
+
+            // Everything OK! Let's add data -- edit only one of the three at your peril.
+            popToAnc[newpops[0]] = ancname;
+            popToAnc[newpops[1]] = ancname;
+            ancSet.insert(ancname);
+        }
+
+        // Now, check that we have the following:
+        // * number of keys mapping pops to ancestors in (2*pops) - 2
+        //   num pops + num internal nodes - one root
+        // * exactly one ancestor not a key in map from pops to ancestors
+        // * every pop name is a key in map from pops to ancestors
+        // * no pop name a key in set of ancestors
+
+        size_t numNotRoot = popToAnc.size();
+        size_t numInternal = ancSet.size();
+
+        std::set<std::string> popNamesSpecifiedInData;
+        TiXmlElement * docElement = m_parser.GetRootElement();
+        TiXmlElement * dataElem = singleRequiredChild(docElement,xmlstr::XML_TAG_DATA);
+
+        TiXmlNode * regChild = NULL;
+        while((regChild = dataElem->IterateChildren(xmlstr::XML_TAG_REGION,regChild)))
+        {
+            TiXmlElement * regionElement = regChild->ToElement();
+            TiXmlNode * popChild = NULL;
+            while((popChild = regionElement->IterateChildren(xmlstr::XML_TAG_POPULATION,popChild)))
+            {
+                TiXmlElement * populationElement = popChild->ToElement();
+                std::string thisPopName = getNodeAttributeValue(populationElement,xmlstr::XML_ATTRTYPE_NAME);
+                popNamesSpecifiedInData.insert(thisPopName);
+            }
+        }
+
+        size_t popCount = popNamesSpecifiedInData.size();
+
+        for(std::set<std::string>::const_iterator si = popNamesSpecifiedInData.begin(); si != popNamesSpecifiedInData.end(); si++)
+        {
+            std::string popName = *si;
+            mit = popToAnc.find(popName);
+            if (mit == popToAnc.end())
+            {
+                m_parser.ThrowDataError(xmlstr::XML_ERR_MISSING_NEWPOP_0
+                                        + popName
+                                        + xmlstr::XML_ERR_MISSING_NEWPOP_1,
+                                        popElement->Row());
+            }
+
+            sit = ancSet.find(popName);
+            if(sit != ancSet.end())
+            {
+                m_parser.ThrowDataError(xmlstr::XML_ERR_ANCESTOR_TRUEPOP_0
+                                        + popName
+                                        + xmlstr::XML_ERR_ANCESTOR_TRUEPOP_1,
+                                        popElement->Row());
+            }
+        }
+
+        if(numNotRoot != (popNamesSpecifiedInData.size() * 2) - 2 )
+        {
+            m_parser.ThrowDataError(xmlstr::XML_ERR_BAD_ANCESTOR_TREE,popElement->Row());
+        }
+
+        size_t nonRootAncestors = 0;
+        for(sit = ancSet.begin(); sit != ancSet.end(); sit++)
+        {
+            mit = popToAnc.find(*sit);
+            if (mit != popToAnc.end())
+            {
+                nonRootAncestors++;
+            }
+        }
+
+        if(nonRootAncestors != ancSet.size() - 1)
+        {
+            m_parser.ThrowDataError(xmlstr::XML_ERR_BAD_ANCESTOR_TREE,popElement->Row());
+        }
+
+        // UGH -- this is awful -- we must change to bayesian and force
+        // epoch arranger to be non-zero
+        if(!uiInterface.doGetBool(uistr::bayesian))
+        {
+            // this should do a decent enough job of choosing a value for the arranger frequency
+            uiInterface.doSet(uistr::bayesian,"true");
+        }
+        double epochFreq = uiInterface.doGetDouble(uistr::epochSizeArranger);
+        if (epochFreq <= 0.0) {
+           double bayesFreq = uiInterface.doGetDouble(uistr::bayesArranger);
+           double resimFreq = uiInterface.doGetDouble(uistr::dropArranger);
+           epochFreq = (bayesFreq > resimFreq) ? resimFreq : bayesFreq;
+           uiInterface.doSet(uistr::epochSizeArranger,ToString(epochFreq));
+        }
+
+
+
+#if 0
+        TiXmlElement* boundaryElement =
+            singleRequiredChild(popElement,xmlstr::XML_TAG_EPOCH_BOUNDARY);
+        do
+        {
+            TiXmlElement* newpopElement =
+                singleRequiredChild(boundaryElement,xmlstr::XML_TAG_NEWPOP);
+            StringVec1d newpops = getNodeTextSplitOnWhitespace(newpopElement);
+            // JNOTE: not using the SetGet machinery because of obscure "unsolved problems"
+            // fix this right! at some point
+            uiInterface.GetCurrentVars().forces.AddNewPops(newpops);
+
+            TiXmlElement* ancestorElement =
+                singleRequiredChild(boundaryElement,xmlstr::XML_TAG_ANCESTOR);
+            string ancname = getNodeText(ancestorElement);
+            // JNOTE: not using the SetGet machinery because of obscure "unsolved problems"
+            // fix this right! at some point
+            uiInterface.GetCurrentVars().forces.AddAncestor(ancname);
+
+            boundaryElement = singleOptionalChild(popElement,xmlstr::XML_TAG_EPOCH_BOUNDARY);
+        } while (boundaryElement != NULL);
+#endif
+    }
+
+    if(uiInterface.doGetBool(uistr::forceLegal,forceId))
+    {
+        DoOptionalElement(forceElement,xmlstr::XML_TAG_MAX_EVENTS,uistr::maxEvents,forceId);
+        DoOptionalElement(forceElement,xmlstr::XML_TAG_DISEASELOCATION,uistr::diseaseLocation,forceId);
+
+        //LS NOTE:  Order matters in this next section. First, we do profiles,
+        // because if they're done after the groups, the 'none' entries
+        // override any (correct) 'on' entries for grouped parameters.  Next
+        // we do constraints, then groups, because the groups tags need
+        // to override the constraints tags.
+        //
+        // Then we do the priors, because they will be affected by the groups
+        // and constraints.
+        //
+        // Then we do start values, because they will be affected by the
+        // constraints, groups, priors, and methods.  Sadly, we have to do a bit
+        // of munging in DoStartValuesAndMethods because the Methods are, in
+        // turn, affected by the process of setting the start values.
+        DoProfiles(forceElement,forceId);
+        DoConstraints(forceElement,forceId);
+        DoGroups(forceElement, forceId);
+        DoPriors(forceElement, forceId);
+        DoStartValuesAndMethods(forceElement,forceId);
+        DoTrueValues(forceElement,forceId);
+
+        // LS DEBUG -- we can at least check the zeroes for validity.
+        //  Eventually the method to do this will change, so don't copy this
+        //  technique elsewhere.
+        if (!uiInterface.GetCurrentVars().forces.GetForceZeroesValidity(forcetype))
+        {
+            string err = "Invalid settings for force ";
+            err += ToString(forcetype) + ".  Too many parameters are set invalid or have a start value of 0.0.";
+            throw data_error(err);
+        }
+        uiInterface.GetCurrentVars().forces.FixGroups(forcetype);
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+void
+ParseTreeToSettings::DoStartValuesAndMethods(TiXmlElement* forceElement, UIId forceId)
+{
+    TiXmlElement * methodsElement =
+        singleOptionalChild(forceElement,xmlstr::XML_TAG_METHOD);
+    TiXmlElement * startValuesElement =
+        singleOptionalChild(forceElement,xmlstr::XML_TAG_START_VALUES);
+
+    if(methodsElement == NULL && startValuesElement == NULL)
+        // simply return and take the default values
+    {
+        return;
+    }
+
+    force_type thisForce = forceId.GetForceType();
+    long expectedNumParameters =
+        uiInterface.GetCurrentVars().forces.GetNumParameters(thisForce);
+    StringVec1d values;
+    StringVec1d methods;
+
+    if(startValuesElement != NULL)
+    {
+        values = getNodeTextSplitOnWhitespace(startValuesElement);
+        if(static_cast<long>(values.size()) != expectedNumParameters)
+        {
+            m_parser.ThrowDataError(xmlstr::XML_ERR_START_VALUE_COUNT_0
+                                    + ToString(expectedNumParameters)
+                                    + xmlstr::XML_ERR_START_VALUE_COUNT_1
+                                    + ToString(thisForce)
+                                    + xmlstr::XML_ERR_START_VALUE_COUNT_2
+                                    + ToString(values.size())
+                                    + xmlstr::XML_ERR_START_VALUE_COUNT_3,
+                                    startValuesElement->Row());
+        }
+    }
+
+    if(methodsElement != NULL)
+    {
+        methods = getNodeTextSplitOnWhitespace(methodsElement);
+        if(static_cast<long>(methods.size()) != expectedNumParameters)
+        {
+            m_parser.ThrowDataError(xmlstr::XML_ERR_METHOD_TYPE_COUNT_0
+                                    + ToString(expectedNumParameters)
+                                    + xmlstr::XML_ERR_METHOD_TYPE_COUNT_1
+                                    + ToString(thisForce)
+                                    + xmlstr::XML_ERR_METHOD_TYPE_COUNT_2
+                                    + ToString(methods.size())
+                                    + xmlstr::XML_ERR_METHOD_TYPE_COUNT_2,
+                                    methodsElement->Row());
+        }
+    }
+
+    // at this point, we either have a full set of method types or none
+    // and we have either a full set of start values or none
+
+    if(methodsElement == NULL)
+        // an easy case -- all methods become type USER
+    {
+        for(long index=0; index < (long)values.size(); index++)
+        {
+            UIId id(forceId.GetForceType(),index);
+            uiInterface.doSet(uistr::startValue,values[index],id);
+            uiInterface.doSet(uistr::startValueMethod,ToString(method_USER),id);
+        }
+        return;
+    }
+
+    if(startValuesElement == NULL)
+        // OK as long as method isn't USER
+    {
+        for(long index=0; index < (long)methods.size(); index++)
+        {
+            UIId id(forceId.GetForceType(),index);
+            uiInterface.doSet(uistr::startValueMethod,methods[index],id);
+
+            if(StringMatchesMethodType(methods[index],method_USER))
+            {
+                double defaultStartVal
+                    = uiInterface.GetCurrentVars().forces.GetStartValue(forceId.GetForceType(),index);
+                uiInterface.AddWarning(xmlstr::XML_ERR_METHOD_USER_WITHOUT_VALUE_0
+                                       +ToString(forceId.GetForceType())
+                                       +xmlstr::XML_ERR_METHOD_USER_WITHOUT_VALUE_1
+                                       +ToString(defaultStartVal)
+                                       +xmlstr::XML_ERR_METHOD_USER_WITHOUT_VALUE_2);
+            }
+        }
+        return;
+    }
+
+    // OK. Now we should have a full set of method types and start values
+    assert((long)methods.size() == expectedNumParameters);
+    assert((long)values.size() == expectedNumParameters);
+
+    for(long index=0; index < (long)values.size(); index++)
+    {
+        UIId id(forceId.GetForceType(),index);
+        uiInterface.doSet(uistr::startValue,values[index],id);
+    }
+    DoubleVec1d usersStartValues = uiInterface.GetCurrentVars().forces.GetStartValues(forceId.GetForceType());
+
+    for(long index=0; index < (long)values.size(); index++)
+    {
+        UIId id(forceId.GetForceType(),index);
+        uiInterface.doSet(uistr::startValueMethod,methods[index],id);
+    }
+    DoubleVec1d overriddenStartValues = uiInterface.GetCurrentVars().forces.GetStartValues(forceId.GetForceType());
+
+    assert(usersStartValues.size() == overriddenStartValues.size());
+    assert(usersStartValues.size() == methods.size());
+
+    for(long index=0; index < (long)methods.size(); index++)
+    {
+        double userVal = usersStartValues[index];
+        double overriddenValue = overriddenStartValues[index];
+
+        if (fabs(userVal - overriddenValue) > 0.00001)
+        {
+            method_type culprit
+                = uiInterface.doGetMethodType(uistr::startValueMethod,UIId(thisForce,index));
+            uiInterface.AddWarning("Warning:  setting the start method for a "
+                                   + ToString(forceId.GetForceType())
+                                   + " parameter to "
+                                   + ToString(culprit, true)
+                                   + " will override the value set in the <"
+                                   + xmlstr::XML_TAG_START_VALUES
+                                   + "> tag.");
+        }
+    }
+
+#if 0
+    bool someStartValues = false;
+    // set start values first since these calls set methods
+    if(startValuesElement != NULL)
+    {
+        someStartValues = true;
+        StringVec1d values = getNodeTextSplitOnWhitespace(startValuesElement);
+        for(long index=0; index < (long)values.size(); index++)
+        {
+            UIId id(forceId.GetForceType(),index);
+            uiInterface.doSet(uistr::startValue,values[index],id);
+        }
+        if(values.empty())
+        {
+            someStartValues = false;
+            uiInterface.AddWarning("Warning:  empty <" + xmlstr::XML_TAG_START_VALUES + "> tag found; using defaults.");
+        }
+
+    }
+    else
+    {
+        uiInterface.AddWarning("Warning:  missing <" + xmlstr::XML_TAG_START_VALUES + "> tag ; using defaults.");
+    }
+
+    DoubleVec1d originalStartValues = uiInterface.GetCurrentVars().forces.GetStartValues(forceId.GetForceType());
+    if(methodsElement != NULL)
+    {
+        StringVec1d methods = getNodeTextSplitOnWhitespace(methodsElement);
+        for(long index=0; index < (long)methods.size(); index++)
+        {
+            UIId id(forceId.GetForceType(),index);
+            uiInterface.doSet(uistr::startValueMethod,methods[index],id);
+        }
+        if(methods.empty())
+        {
+            uiInterface.AddWarning("Warning:  empty <" + xmlstr::XML_TAG_METHOD + "> tag found; using defaults");
+        }
+        else
+        {
+            if (someStartValues)
+            {
+                DoubleVec1d newStartValues = uiInterface.GetCurrentVars().forces.GetStartValues(forceId.GetForceType());
+                for (unsigned long i=0; i<originalStartValues.size(); i++)
+                {
+                    if (fabs(originalStartValues[i] - newStartValues[i]) > 0.00001)
+                    {
+                        method_type culprit = uiInterface.doGetMethodType(uistr::startValueMethod,UIId(forceId.GetForceType(), i));
+                        uiInterface.AddWarning("Warning:  setting the start method for a "
+                                               + ToString(forceId.GetForceType())
+                                               + " parameter to "
+                                               + ToString(culprit, true)
+                                               + " will override the value set in the <"
+                                               + xmlstr::XML_TAG_START_VALUES + "> tag.");
+                    }
+                }
+            }
+        }
+    }
+#endif
+}
+
+//------------------------------------------------------------------------------------
+
+void
+ParseTreeToSettings::DoTrueValues(TiXmlElement* forceElement, UIId forceId)
+{
+    TiXmlElement * trueValuesElement =
+        singleOptionalChild(forceElement,xmlstr::XML_TAG_TRUEVALUE);
+    if(trueValuesElement != NULL)
+    {
+        StringVec1d values = getNodeTextSplitOnWhitespace(trueValuesElement);
+        for(unsigned long index=0; index < values.size(); index++)
+        {
+            UIId id(forceId.GetForceType(),index);
+            uiInterface.doSet(uistr::trueValue,values[index],id);
+        }
+        if(values.empty())
+        {
+            uiInterface.AddWarning("Warning:  empty <" + xmlstr::XML_TAG_TRUEVALUE + "> tag found; using defaults");
+        }
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+void
+ParseTreeToSettings::DoProfiles(TiXmlElement * forceElement, UIId forceId)
+{
+    TiXmlElement * profilesElement =
+        singleOptionalChild(forceElement,xmlstr::XML_TAG_PROFILES);
+
+    if(profilesElement != NULL)
+    {
+        string profilesString = getNodeText(profilesElement);
+        ProftypeVec1d profiles = ProduceProftypeVec1dOrBarf(profilesString);
+        size_t index;
+        bool hasFix = false;
+        bool hasPerc= false;
+        for(index=0; index < profiles.size(); index++)
+        {
+            UIId localId(forceId.GetForceType(),index);
+            switch(profiles[index])
+            {
+                case profile_PERCENTILE:
+                    hasPerc = true;
+                    uiInterface.doSet(uistr::profileByID,"true",localId);
+                    break;
+                case profile_FIX:
+                    hasFix = true;
+                    uiInterface.doSet(uistr::profileByID,"true",localId);
+                    break;
+                case profile_NONE:
+                    uiInterface.doSet(uistr::profileByID,"false",localId);
+                    break;
+            }
+        }
+        if(hasFix && !hasPerc)
+        {
+            uiInterface.doSet(uistr::profileByForce,"fixed",forceId);
+        }
+        else
+        {
+            uiInterface.doSet(uistr::profileByForce,"percentile",forceId);
+            if(hasFix)
+            {
+                // add warning
+                std::string errstring("Warning:  <profiles> element near line ");
+                errstring += ToString(profilesElement->Row());
+                errstring += " contains both fixed and percentile profiling.";
+                errstring += " We only allow one type per force and are";
+                errstring += " changing the type to percentile profiling.";
+                uiInterface.AddWarning(errstring);
+            }
+        }
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+void
+ParseTreeToSettings::DoConstraints(TiXmlElement * forceElement, UIId forceId)
+{
+    TiXmlElement * constraintsElement =
+        singleOptionalChild(forceElement,xmlstr::XML_TAG_CONSTRAINTS);
+
+    if(constraintsElement != NULL)
+    {
+        string constraintsString = getNodeText(constraintsElement);
+        vector < ParamStatus > constraints
+            = ProduceParamstatusVec1dOrBarf(constraintsString);
+        size_t index;
+        for(index=0; index < constraints.size(); index++)
+        {
+            UIId localID(forceId.GetForceType(),index);
+            uiInterface.doSet(uistr::constraintType,ToString(constraints[index].Status()) ,localID);
+        }
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+void
+ParseTreeToSettings::DoGroups(TiXmlElement * forceElement, UIId forceId)
+{
+    TiXmlNode * child = NULL;
+    size_t index = 0;
+    while((child = forceElement->IterateChildren(xmlstr::XML_TAG_GROUP,child)))
+    {
+        TiXmlElement * groupElement = child->ToElement();
+        DoGroup(groupElement, forceId, index);
+        index++;
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+void
+ParseTreeToSettings::DoGroup(TiXmlElement * groupElement, UIId forceId,
+                             size_t index)
+{
+    string constraintType
+        = getNodeAttributeValue(groupElement,xmlstr::XML_ATTRTYPE_CONSTRAINT);
+    string indicesString = getNodeText(groupElement);
+
+    UIId localID(forceId.GetForceType(),index);
+    uiInterface.doSet(uistr::groupParamList, indicesString, localID);
+    uiInterface.doSet(uistr::groupConstraintType, constraintType, localID);
+}
+
+//------------------------------------------------------------------------------------
+
+void
+ParseTreeToSettings::DoPriors(TiXmlElement * forceElement, UIId forceId)
+{
+    TiXmlNode * child = NULL;
+    while((child = forceElement->IterateChildren(xmlstr::XML_TAG_PRIOR,child)))
+    {
+        TiXmlElement * priorElement = child->ToElement();
+        DoPrior(priorElement, forceId);
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+void
+ParseTreeToSettings::DoPrior(TiXmlElement * priorElement, UIId forceId)
+{
+    TiXmlElement * parameterElement =
+        singleOptionalChild(priorElement,xmlstr::XML_TAG_PARAMINDEX);
+
+    long paramID = uiconst::GLOBAL_ID;
+
+    if (parameterElement)
+    {
+        string paramName = getNodeText(parameterElement);
+        try
+        {
+            paramID = ProduceLongOrBarf(paramName);
+            paramID--;
+        }
+        catch (const data_error& e)
+        {
+            if ((!CaselessStrCmp(paramName, uistr::defaultStr)) && (!CaselessStrCmp(paramName, uistr::allStr)))
+            {
+                m_parser.ThrowDataError("Parameter index must be an integer or 'all'", priorElement->Row());
+            }
+            paramID = uiconst::GLOBAL_ID;
+        }
+    }
+    UIId localId(forceId.GetForceType(), paramID);
+
+    string priorTypeString
+        = getNodeAttributeValue(priorElement,xmlstr::XML_ATTRTYPE_TYPE);
+    uiInterface.doSet(uistr::priorType, priorTypeString, localId);
+
+    try
+    {
+        TiXmlElement * lowerBoundElement =
+            singleRequiredChild(priorElement,xmlstr::XML_TAG_PRIORLOWERBOUND);
+        string lowerString = getNodeText(lowerBoundElement);
+        uiInterface.doSet(uistr::priorLowerBound, lowerString, localId);
+
+        TiXmlElement * upperBoundElement =
+            singleRequiredChild(priorElement,xmlstr::XML_TAG_PRIORUPPERBOUND);
+        string upperString = getNodeText(upperBoundElement);
+        uiInterface.doSet(uistr::priorUpperBound, upperString, localId);
+    }
+    catch (const data_error&)
+    {
+        //Presumably, the lower bound was higher than the *default* upper
+        // bound--try them in the reverse order instead.
+        TiXmlElement * upperBoundElement =
+            singleRequiredChild(priorElement,xmlstr::XML_TAG_PRIORUPPERBOUND);
+        string upperString = getNodeText(upperBoundElement);
+        uiInterface.doSet(uistr::priorUpperBound, upperString, localId);
+
+        TiXmlElement * lowerBoundElement =
+            singleRequiredChild(priorElement,xmlstr::XML_TAG_PRIORLOWERBOUND);
+        string lowerString = getNodeText(lowerBoundElement);
+        uiInterface.doSet(uistr::priorLowerBound, lowerString, localId);
+
+    }
+
+#ifdef LAMARC_NEW_FEATURE_RELATIVE_SAMPLING
+    TiXmlElement * sampleRateElement =
+        singleOptionalChild(priorElement,xmlstr::XML_TAG_RELATIVE_SAMPLE_RATE);
+    if (sampleRateElement)
+    {
+        string rateString = getNodeText(sampleRateElement);
+        uiInterface.doSet(uistr::relativeSampleRate,rateString,localId);
+    }
+#endif
+}
+
+//------------------------------------------------------------------------------------
+
+void ParseTreeToSettings::DoTraits(TiXmlElement * dataElement)
+{
+    TiXmlNode * child = NULL;
+
+    long regionNumber = 0;
+
+    while((child = dataElement->IterateChildren(xmlstr::XML_TAG_REGION,child)))
+    {
+        TiXmlElement * traitsElement = singleOptionalChild(child->ToElement(), xmlstr::XML_TAG_TRAITS);
+
+        DoRegionTraits(traitsElement,regionNumber);
+        regionNumber++;
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+void ParseTreeToSettings::DoRegionTraits(TiXmlElement * traitsElement, long regionNumber)
+{
+    if (traitsElement != NULL)
+    {
+        TiXmlNode * traitNode = NULL;
+        while((traitNode = traitsElement->IterateChildren(xmlstr::XML_TAG_TRAIT,traitNode)))
+        {
+            TiXmlElement * traitElement = traitNode->ToElement();
+            DoRegionTrait(traitElement, regionNumber);
+        }
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+void ParseTreeToSettings::DoRegionTrait(TiXmlElement * traitElement, long regionNumber)
+{
+    TiXmlElement * nameElement = singleRequiredChild(traitElement, xmlstr::XML_TAG_NAME);
+    string tname = getNodeText(nameElement);
+    if (!uiInterface.GetCurrentVars().datapackplus.HasLocus(regionNumber, tname))
+    {
+        throw data_error("Error:  trait settings found for '"
+                         + tname
+                         + "', but no data was found for this trait.  Check spelling, or delete these settings.");
+    }
+    long locusNumber = uiInterface.GetCurrentVars().datapackplus.GetLocusIndex(regionNumber, tname);
+
+    TiXmlElement * modelElement = singleOptionalChild(traitElement, xmlstr::XML_TAG_MODEL);
+    if (modelElement != NULL)
+    {
+        DoDLModel(modelElement,regionNumber,locusNumber);
+    }
+
+    TiXmlElement * locationsElement = singleOptionalChild(traitElement, xmlstr::XML_TAG_POSSIBLE_LOCATIONS);
+    if (locationsElement != NULL)
+    {
+        DoLocations(locationsElement, regionNumber, locusNumber);
+    }
+
+    // Do this after setting the range (locations) so we can warn/throw appropriately
+    TiXmlElement * analysisElement = singleOptionalChild(traitElement, xmlstr::XML_TAG_ANALYSIS);
+    if (analysisElement != NULL)
+    {
+        DoAnalysis(analysisElement, regionNumber, locusNumber);
+    }
+
+    TiXmlElement * phenotypesElement = singleOptionalChild(traitElement, xmlstr::XML_TAG_PHENOTYPES);
+    if (phenotypesElement != NULL)
+    {
+        DoPhenotypes(phenotypesElement, regionNumber, locusNumber);
+    }
+}
+
+//------------------------------------------------------------------------------------
+
+void ParseTreeToSettings::DoLocations(TiXmlElement* locationsElement, long regionNumber, long locusNumber)
+{
+    UIRegId locId(regionNumber,locusNumber, uiInterface.GetCurrentVars());
+    rangeset rset;
+
+    //Collect the whole range, then set it afterwards.
+    TiXmlNode* rangeNode = NULL;
+    while ((rangeNode = locationsElement->IterateChildren(xmlstr::XML_TAG_RANGE, rangeNode)))
+    {
+        TiXmlElement * rangeElement = rangeNode->ToElement();
+        TiXmlElement * startElement = singleRequiredChild(rangeElement, xmlstr::XML_TAG_START);
+        TiXmlElement * endElement   = singleRequiredChild(rangeElement, xmlstr::XML_TAG_END);
+        long start = ProduceLongOrBarf(getNodeText(startElement));
+        long end   = ProduceLongOrBarf(getNodeText(endElement));
+        rangepair range = std::make_pair(start, end);
+        range.second++;
+        rset = AddPairToRange(range, rset);
+    }
+    uiInterface.GetCurrentVars().traitmodels.SetRange(locId, rset);
+}
+
+//------------------------------------------------------------------------------------
+
+void ParseTreeToSettings::DoAnalysis(TiXmlElement* analysisElement, long regionNumber, long locusNumber)
+{
+    UIRegId locId(regionNumber,locusNumber, uiInterface.GetCurrentVars());
+    string analysisString = getNodeText(analysisElement);
+    mloc_type analysis = ProduceMlocTypeOrBarf(analysisString);
+    uiInterface.GetCurrentVars().traitmodels.SetAnalysisType(locId, analysis);
+}
+
+//------------------------------------------------------------------------------------
+
+void ParseTreeToSettings::DoPhenotypes(TiXmlElement* phenotypesElement, long regionNumber, long locusNumber)
+{
+    StringVec2d alleles = uiInterface.GetCurrentVars().datapackplus.GetUniqueAlleles(regionNumber, locusNumber);
+    size_t nalleles = alleles[0].size(); //Assumes only one marker
+
+    std::set<long> ploidies = uiInterface.GetCurrentVars().datapackplus.GetPloidies(regionNumber);
+    //Now calculate how many genotypes we need.  For a given number
+    // of alleles A, and ploidy P, we need:
+    //
+    //  (A + P - 1)!
+    //  ------------
+    //   P!(A - 1)!
+    //
+    // which is the formula for 'combination with repetition'.  See, for example
+    // http://en.wikipedia.org/wiki/Combinatorics
+    //
+    // That's gotta be some sort of milestone--a URL in a comment!  Wow.
+    //
+    // At any rate, we need to calculate this formula for each possible ploidy,
+    // and add them up.  Normally, there'd be only one, but there are exceptions
+    // like if our region is on the X chromosome.
+
+    size_t targetCombs = 0;
+    for (std::set<long>::iterator ploid = ploidies.begin(); ploid != ploidies.end(); ploid++)
+    {
+        targetCombs += static_cast<long>(factorial(nalleles + (*ploid) - 1)) /
+            (static_cast<long>(factorial(*ploid)) * static_cast<long>(factorial(nalleles - 1)));
+    }
+
+    vector<TiXmlElement *> genotypeElements = getAllOptionalDescendantElements(phenotypesElement, xmlstr::XML_TAG_GENOTYPE);
+    if (genotypeElements.size() < targetCombs)
+    {
+        string msg = "Error:  not enough genotypes listed in your phenotype list.  You must provide a set of phenotypes"
+            " for every possible combination of alleles (for which you have "
+            + ToString(nalleles) + ")";
+        if (ploidies.size() > 1)
+        {
+            msg += ".  Also note that you have "
+                + ToString(ploidies.size())
+                + " different ploidies in your list of individuals, meaning that you must provide a complete"
+                " list of combinations of alleles for all different possible numbers of genes, too.";
+        }
+        else
+        {
+            msg += ", for the ploidy of your individuals (which are all " + ToString(*ploidies.begin()) + ".)";
+        }
+        msg += "  All told, you should have "
+            + ToString(targetCombs)
+            + " genotypes, while you currently have "
+            + ToString(genotypeElements.size()) + ".";
+
+        throw data_error(msg);
+    }
+    if (genotypeElements.size() > targetCombs)
+    {
+        string msg = "Error:  You have provided too many genotypes for the number of alleles for your trait ("
+            + ToString(nalleles)
+            + ") for the ploidies of your sampled individuals.  You should have "
+            + ToString(targetCombs)
+            + ", but currently have "
+            + ToString(genotypeElements.size())
+            + ".  Remember that order doesn't matter for genotypes--being heterozygous should give you the same phenotype regardless of which allele is which.";
+
+        throw data_error(msg);
+    }
+
+    for (vector<TiXmlElement* >::iterator genotypeElement = genotypeElements.begin(); genotypeElement != genotypeElements.end(); genotypeElement++)
+    {
+        //Get the set of alleles.
+        TiXmlElement* alleleElement = singleRequiredChild(*genotypeElement, xmlstr::XML_TAG_ALLELES);
+        StringVec1d alleles = getNodeTextSplitOnWhitespace(alleleElement);
+
+        //Get the (potentially many) associated phenotypes.
+        TiXmlNode * phenotypeNode = NULL;
+        while ((phenotypeNode = (*genotypeElement)->IterateChildren(xmlstr::XML_TAG_PHENOTYPE, phenotypeNode)))
+        {
+            TiXmlElement* phenotypeElement = phenotypeNode->ToElement();
+            TiXmlElement* phenNameElement = singleRequiredChild(phenotypeElement, xmlstr::XML_TAG_PHENOTYPE_NAME);
+            TiXmlElement* penetranceElement = singleRequiredChild(phenotypeElement, xmlstr::XML_TAG_PENETRANCE);
+
+            string phenName = getNodeText(phenNameElement);
+            double penetrance = ProduceDoubleOrBarf(getNodeText(penetranceElement));
+            UIRegId locId(regionNumber,locusNumber, uiInterface.GetCurrentVars());
+            uiInterface.GetCurrentVars().traitmodels.AddPhenotype(locId, alleles, phenName, penetrance);
+        }
+    }
+}
+
+//____________________________________________________________________________________
diff --git a/src/xml/parsetreetosettings.h b/src/xml/parsetreetosettings.h
new file mode 100644
index 0000000..fb213f2
--- /dev/null
+++ b/src/xml/parsetreetosettings.h
@@ -0,0 +1,93 @@
+// $Id: parsetreetosettings.h,v 1.17 2011/03/07 06:08:54 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef PARSETREETOSETTINGS_H
+#define PARSETREETOSETTINGS_H
+
+#include <string>
+#include "defaults.h"       // for force_type
+#include "parsetreewalker.h"
+#include "vectorx.h"
+
+class TiXmlElement;
+class UIInterface;
+class UIId;
+
+/******************************************************************
+ Walks the TinyXML parse tree generated by XmlParser (xml.h)
+ and fills the UIVars variable store via UIInterface
+
+ Based on a class by Jim Sloan, rewritten by Mary Kuhner, and
+ then by Elizabeth Walkup
+*********************************************************************/
+
+class ParseTreeToSettings   : public ParseTreeWalker
+{
+  private:
+    UIInterface&            uiInterface;
+
+    // purposely not implemented, this class is meant to be a singleton
+    ParseTreeToSettings();
+    ParseTreeToSettings(const ParseTreeToSettings&);
+    ParseTreeToSettings& operator=(const ParseTreeToSettings&);
+
+  protected:
+
+    // for stuffing data into UIInterface
+    void DoOptionalElement(TiXmlElement* ancestor, std::string xmlTag, std::string uiIdentifier, UIId id = NO_ID());
+    void DoRequiredElement(TiXmlElement* ancestor, std::string xmlTag, std::string uiIdentifier, UIId id = NO_ID());
+
+    // main line data-model reading routines
+    void        DoDataModels(TiXmlElement*);
+    void        DoRegionDataModels(TiXmlElement*,long regionId);
+    void        DoDLModel(TiXmlElement*, long regionId ,long locusId=0);
+    void        DoBaseFrequencies(TiXmlElement*, UIId id);
+    void        DoGTR(TiXmlElement*, UIId id);
+    void        DoCategories(TiXmlElement*,UIId id);
+
+    // main line chain parameter reading routines
+    void        DoChainParams(TiXmlElement *);
+    void        DoTemperatures(StringVec1d);
+    void        DoSwapInterval(TiXmlElement *);
+
+    // main line user parameter reading routines
+    void        DoUserParams(TiXmlElement *);
+    void        DoSeedParams(TiXmlElement *); // subset of UserParams
+
+    // main line force parameter reading routines
+    void        DoForces(TiXmlElement *);
+    void        DoForceIfPresent(TiXmlElement *, force_type forcetype, const string& forcetag);
+    void        DoForce(TiXmlElement *, force_type);
+    void        DoStartValuesAndMethods(TiXmlElement *, UIId);
+    void        DoTrueValues(TiXmlElement *, UIId);
+    void        DoProfiles(TiXmlElement *, UIId);
+    void        DoConstraints(TiXmlElement *, UIId);
+    void        DoGroups(TiXmlElement *, UIId);
+    void        DoGroup(TiXmlElement *, UIId, size_t index);
+    void        DoPriors(TiXmlElement *, UIId);
+    void        DoPrior(TiXmlElement *, UIId);
+
+    void DoTraits(TiXmlElement * dataElement);
+    void DoRegionTraits(TiXmlElement * traitsElement, long);
+    void DoRegionTrait(TiXmlElement * traitElement, long);
+    void DoLocations(TiXmlElement* locationsElement, long, long);
+    void DoAnalysis(TiXmlElement* analysisElement, long, long);
+    void DoPhenotypes(TiXmlElement* phenotypesElement, long, long);
+
+  public:
+    ParseTreeToSettings(XmlParser&, UIInterface &);
+    virtual ~ParseTreeToSettings();
+    void        ProcessFileSettings();    // processes remaining structures
+
+};
+
+#endif // PARSETREETOSETTINGS_H
+
+//____________________________________________________________________________________
diff --git a/src/xml/parsetreewalker.cpp b/src/xml/parsetreewalker.cpp
new file mode 100644
index 0000000..63909ea
--- /dev/null
+++ b/src/xml/parsetreewalker.cpp
@@ -0,0 +1,155 @@
+// $Id: parsetreewalker.cpp,v 1.11 2013/08/22 18:03:12 jmcgill Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include "menu_strings.h"   // for menustr::emptyString
+#include "parsetreewalker.h"
+#include "stringx.h"        // for StripLeadingSpaces and similar
+#include "tinyxml.h"
+#include "vectorx.h"        // for StringVec1d
+#include "xml.h"
+#include "xml_strings.h"
+
+using std::string;
+
+ParseTreeWalker::ParseTreeWalker(XmlParser & parser)
+    :
+    m_parser(parser)
+{
+}
+
+ParseTreeWalker::~ParseTreeWalker()
+{
+}
+
+TiXmlElement *
+ParseTreeWalker::getSingleDescendantElement(TiXmlElement* ancestor,
+                                            string nodeName,bool required)
+{
+    TiXmlNode * firstChild = ancestor->IterateChildren(nodeName,NULL);
+    if(firstChild == NULL)
+    {
+        if(required)
+        {
+            m_parser.ThrowInternalXMLError(xmlstr::XML_ERR_MISSING_TAG_0+nodeName+xmlstr::XML_ERR_MISSING_TAG_1);
+        }
+        return NULL;
+    }
+    TiXmlNode * secondChild = ancestor->IterateChildren(nodeName,firstChild);
+    if (secondChild != NULL)
+    {
+        m_parser.ThrowInternalXMLError(
+            xmlstr::XML_ERR_EXTRA_TAG_0
+            +nodeName
+            +xmlstr::XML_ERR_EXTRA_TAG_1
+            +ancestor->Value()
+            +xmlstr::XML_ERR_EXTRA_TAG_2
+            +ToString(ancestor->Row()));
+        return NULL;
+    }
+    return firstChild->ToElement();
+}
+
+vector<TiXmlElement *>
+ParseTreeWalker::getAllOptionalDescendantElements(TiXmlElement* ancestor,
+                                                  string nodeName)
+{
+    vector<TiXmlElement *> returnVec;
+    TiXmlNode * child = NULL;
+    while((child = ancestor->IterateChildren(nodeName, child)))
+    {
+        returnVec.push_back(child->ToElement());
+    }
+    return returnVec;
+}
+
+TiXmlElement *
+ParseTreeWalker::singleOptionalChild(TiXmlElement* ancestor, string nodeName)
+{
+    return getSingleDescendantElement(ancestor,nodeName,false);
+}
+
+TiXmlElement *
+ParseTreeWalker::singleRequiredChild(TiXmlElement* ancestor, string nodeName)
+{
+    return getSingleDescendantElement(ancestor,nodeName,true);
+}
+
+string
+ParseTreeWalker::getNodeText(TiXmlElement * node)
+{
+    string outstring = "";
+    TiXmlNode * child = NULL;
+    while((child = node->IterateChildren(child)))
+    {
+        TiXmlHandle handle(child);
+        if(handle.Text())
+        {
+            outstring += child->Value();
+        }
+    }
+    StripLeadingSpaces(outstring);
+    StripTrailingSpaces(outstring);
+    return outstring;
+}
+
+string
+ParseTreeWalker::getGraphMLNodeText(TiXmlElement * node)
+{
+    string instring = "";
+    string outstring = "";
+    TiXmlNode * child = NULL;
+    while((child = node->IterateChildren(child)))
+    {
+        TiXmlHandle handle(child);
+        if(handle.Text())
+        {
+            instring += child->Value();
+        }
+    }
+    StripLeadingSpaces(instring);
+    StripTrailingSpaces(instring);
+    
+    // fix GraphML tweaks
+    rangeset rset = ToRangeSet(instring);
+    rangeset::iterator rpair = rset.begin();
+    for ( rpair; rpair != rset.end() ; ++rpair)
+    {
+        const_cast<long&>(rpair->first) =  rpair->first - 1;   // JRMhack to get around const
+        const_cast<long&>(rpair->second) = rpair->second - 1;  // JRMhack to get around const
+    }
+    outstring = ToString(rset);
+    return outstring;
+}
+
+StringVec1d
+ParseTreeWalker::getNodeTextSplitOnWhitespace(TiXmlElement * node)
+{
+    string allText = getNodeText(node);
+    StringVec1d strings;
+    FromString(allText,strings);
+    return strings;
+}
+
+string
+ParseTreeWalker::getNodeAttributeValue(TiXmlElement * node, string attrName)
+{
+    const std::string * attrValue = node->Attribute(attrName);
+    if(attrValue)
+    {
+        return *attrValue;
+    }
+    else
+    {
+        return menustr::emptyString;
+    }
+
+}
+
+//____________________________________________________________________________________
diff --git a/src/xml/parsetreewalker.h b/src/xml/parsetreewalker.h
new file mode 100644
index 0000000..eddeadc
--- /dev/null
+++ b/src/xml/parsetreewalker.h
@@ -0,0 +1,56 @@
+// $Id: parsetreewalker.h,v 1.10 2013/08/22 18:03:12 jmcgill Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef PARSETREEWALKER_H
+#define PARSETREEWALKER_H
+
+#include <string>
+#include "vectorx.h"    // for StringVec1d
+#include "xml.h"
+#include "rangex.h"
+
+class TiXmlElement;
+class ParseTreeSchema;
+
+using std::string;
+
+// class implements methods shared by
+//      ParseTreeToData     -- which parses everything under <data> tag
+//      ParseTreeToSettings -- which parses everything else
+// methods are utility methods for finding a child tag of a specific
+// name, and getting attribute text or text between tags, etc.
+class ParseTreeWalker
+{
+  private:
+    ParseTreeWalker();      // undefined
+
+  protected:
+    XmlParser &     m_parser;
+
+    TiXmlElement *  getSingleDescendantElement(TiXmlElement* ancestor, string nodeName,bool required);
+    vector<TiXmlElement *> getAllOptionalDescendantElements(TiXmlElement* ancestor,
+                                                            string nodeName);
+    TiXmlElement *  singleOptionalChild(TiXmlElement* ancestor, string nodeName);
+    TiXmlElement *  singleRequiredChild(TiXmlElement* ancestor, string nodeName);
+    string          getNodeText(TiXmlElement *);
+    string          getGraphMLNodeText(TiXmlElement *);
+    StringVec1d     getNodeTextSplitOnWhitespace(TiXmlElement *);
+    string          getNodeAttributeValue(TiXmlElement*,string attributeName);
+
+    void            checkSchema(TiXmlElement * topElem, ParseTreeSchema&);
+
+  public:
+    ParseTreeWalker(XmlParser & parser);
+    virtual ~ParseTreeWalker();
+};
+
+#endif // PARSETREEWALKER_H
+
+//____________________________________________________________________________________
diff --git a/src/xml/tixml_base.cpp b/src/xml/tixml_base.cpp
new file mode 100644
index 0000000..593111e
--- /dev/null
+++ b/src/xml/tixml_base.cpp
@@ -0,0 +1,166 @@
+// $Id: tixml_base.cpp,v 1.7 2011/03/08 19:22:01 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include "errhandling.h"
+#include "stringx.h"
+#include "tinyxml.h"
+#include "tixml_base.h"
+
+const std::string tibasestr::EXTRA_TAG_0     =   "incorrect xml: extra tag \"";
+const std::string tibasestr::EXTRA_TAG_1     =   "\".";
+const std::string tibasestr::MISSING_TAG_0   =   "incorrect xml: missing tag \"";
+const std::string tibasestr::MISSING_TAG_1   =   "\".";
+const std::string tibasestr::NOT_DOUBLE_0    =   "incorrect xml: tag \"";
+const std::string tibasestr::NOT_DOUBLE_1    =   "\" is not a double.";
+const std::string tibasestr::NOT_LONG_0      =   "incorrect xml: tag \"";
+const std::string tibasestr::NOT_LONG_1      =   "\" is not an integer.";
+const std::string tibasestr::NOT_SIZE_T_0    =   "incorrect xml: tag \"";
+const std::string tibasestr::NOT_SIZE_T_1    =   "\" is not a non-negative integer.";
+
+TiXmlElement *
+ti_singleElement(TiXmlElement* ancestor, std::string nodeName,bool required)
+{
+    TiXmlNode * firstChild = ancestor->IterateChildren(nodeName,NULL);
+    if(firstChild == NULL)
+    {
+        if(required)
+        {
+            incorrect_xml_missing_tag e(tibasestr::MISSING_TAG_0+nodeName+tibasestr::MISSING_TAG_1,nodeName);
+            throw e;
+        }
+        return NULL;
+    }
+    TiXmlNode * secondChild = ancestor->IterateChildren(nodeName,firstChild);
+    if (secondChild != NULL)
+    {
+        incorrect_xml_extra_tag e(tibasestr::EXTRA_TAG_0+nodeName+tibasestr::EXTRA_TAG_1,nodeName);
+        throw e;
+        return NULL;
+    }
+    return firstChild->ToElement();
+}
+
+TiXmlElement *
+ti_optionalChild(TiXmlElement* ancestor, std::string nodeName)
+{
+    return ti_singleElement(ancestor,nodeName,false);
+}
+
+TiXmlElement *
+ti_requiredChild(TiXmlElement* ancestor, std::string nodeName)
+{
+    return ti_singleElement(ancestor,nodeName,true);
+}
+
+std::string
+ti_nodeText(TiXmlElement * node)
+{
+    std::string outstring;
+    TiXmlNode * child = NULL;
+    while((child = node->IterateChildren(child)))
+    {
+        TiXmlHandle handle(child);
+        if(handle.Text())
+        {
+            outstring += child->Value();
+        }
+    }
+    StripLeadingSpaces(outstring);
+    StripTrailingSpaces(outstring);
+    return outstring;
+}
+
+bool
+ti_hasAttribute(TiXmlElement * node, std::string attrName)
+{
+    const std::string * attrValue = node->Attribute(attrName);
+    return (attrValue != NULL);
+}
+
+std::string
+ti_attributeValue(TiXmlElement * node, std::string attrName)
+{
+    const std::string * attrValue = node->Attribute(attrName);
+    if(attrValue == NULL) return std::string("");
+    return (*attrValue);
+}
+
+double
+ti_double_from_text(TiXmlElement * node)
+{
+    std::string nodeText = ti_nodeText(node);
+    double value = DBL_BIG;
+    try
+    {
+        value = ProduceDoubleOrBarf(nodeText);
+    }
+    catch(data_error f)
+    {
+        incorrect_xml_not_double e(tibasestr::NOT_DOUBLE_0+nodeText+tibasestr::NOT_DOUBLE_1,nodeText);
+        throw e;
+    }
+    return value;
+}
+
+long
+ti_long_from_text(TiXmlElement * node) throw(incorrect_xml)
+{
+    std::string nodeText = ti_nodeText(node);
+    long value = LONG_MAX;
+    try
+    {
+        value = ProduceLongOrBarf(nodeText);
+    }
+    catch(data_error f)
+    {
+        incorrect_xml_not_long e(tibasestr::NOT_LONG_0+nodeText+tibasestr::NOT_LONG_1,nodeText);
+        throw e;
+    }
+    return value;
+}
+
+size_t
+ti_size_t_from_text(TiXmlElement * node) throw(incorrect_xml)
+{
+    long value = ti_long_from_text(node);
+    if(value < 0)
+    {
+        std::string nodeText = ti_nodeText(node);
+        incorrect_xml_not_size_t e(tibasestr::NOT_SIZE_T_0+nodeText+tibasestr::NOT_SIZE_T_1,nodeText);
+        throw e;
+    }
+    return (size_t)value;
+}
+
+std::vector<TiXmlElement *>
+ti_optionalChildren(TiXmlElement* ancestor, std::string nodeName)
+{
+    std::vector<TiXmlElement *> returnVec;
+    TiXmlNode * child = NULL;
+    while((child = ancestor->IterateChildren(nodeName, child)))
+    {
+        returnVec.push_back(child->ToElement());
+    }
+    return returnVec;
+}
+
+std::vector<TiXmlElement *>
+ti_requiredChildren(TiXmlElement* ancestor, std::string nodeName)
+{
+    std::vector<TiXmlElement *> returnVec = ti_optionalChildren(ancestor,nodeName);
+    if(returnVec.empty())
+    {
+        incorrect_xml_missing_tag e(tibasestr::MISSING_TAG_0+nodeName+tibasestr::MISSING_TAG_1,nodeName);
+        throw e;
+    }
+    return returnVec;
+}
+
+//____________________________________________________________________________________
diff --git a/src/xml/tixml_base.h b/src/xml/tixml_base.h
new file mode 100644
index 0000000..428b9de
--- /dev/null
+++ b/src/xml/tixml_base.h
@@ -0,0 +1,52 @@
+// $Id: tixml_base.h,v 1.5 2012/06/30 01:32:43 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef TIXML_BASE_H
+#define TIXML_BASE_H
+
+#include <string>
+#include <vector>
+
+#include "errhandling.h"
+
+class TiXmlElement;
+
+class tibasestr
+{
+  public:
+    static const std::string EXTRA_TAG_0 ;
+    static const std::string EXTRA_TAG_1 ;
+    static const std::string MISSING_TAG_0 ;
+    static const std::string MISSING_TAG_1 ;
+    static const std::string NOT_DOUBLE_0 ;
+    static const std::string NOT_DOUBLE_1 ;
+    static const std::string NOT_LONG_0 ;
+    static const std::string NOT_LONG_1 ;
+    static const std::string NOT_SIZE_T_0 ;
+    static const std::string NOT_SIZE_T_1 ;
+};
+
+TiXmlElement *          ti_singleElement(TiXmlElement* ancestor, std::string nodeName,bool required);
+TiXmlElement *          ti_optionalChild(TiXmlElement* ancestor, std::string nodeName);
+TiXmlElement *          ti_requiredChild(TiXmlElement* ancestor, std::string nodeName);
+std::string             ti_nodeText(TiXmlElement *);
+std::string             ti_attributeValue(TiXmlElement*,std::string attributeName);
+bool                    ti_hasAttribute(TiXmlElement*,std::string attributeName);
+
+double                  ti_double_from_text(TiXmlElement *);
+long                    ti_long_from_text(TiXmlElement *) throw (incorrect_xml);
+size_t                  ti_size_t_from_text(TiXmlElement *) throw (incorrect_xml);
+
+std::vector<TiXmlElement *>  ti_optionalChildren(TiXmlElement* ancestor, std::string nodeName);
+std::vector<TiXmlElement *>  ti_requiredChildren(TiXmlElement* ancestor, std::string nodeName);
+
+#endif  // TIXML_BASE_H
+
+//____________________________________________________________________________________
diff --git a/src/xml/toxml.cpp b/src/xml/toxml.cpp
new file mode 100644
index 0000000..7482314
--- /dev/null
+++ b/src/xml/toxml.cpp
@@ -0,0 +1,332 @@
+// $Id: toxml.cpp,v 1.25 2012/06/30 01:32:43 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+
+#include "toxml.h"
+#include "stringx.h"      // for MakeTag/MakeIndent/etc in foo::ToXML()
+#include "xml_strings.h"  // for xmlstr::foo in foo::ToXML()
+#include "registry.h"     // for GetForceSummary in SampleXML::ToXML()
+#include "forcesummary.h" // for GetForceByTag in SampleXML::ToXML()
+#include "force.h"        // for PartitionForce in SampleXML::ToXML()
+#include "region.h"
+
+#ifdef DMALLOC_FUNC_CHECK
+#include "/usr/local/include/dmalloc.h"
+#endif
+
+using namespace std;
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+PopulationXML::PopulationXML(const Region& region, const string& name)
+    : m_name(name)
+{
+    // break out population-specific vector<tipdata>
+    vector<TipData> popdata;
+    long loc;
+    for(loc = 0; loc < region.GetNloci(); ++loc)
+    {
+        vector<TipData> locpopdata(region.GetLocus(loc).
+                                   GetPopulationTipData(m_name));
+        popdata.insert(popdata.end(),locpopdata.begin(),locpopdata.end());
+    }
+
+    // classify and process the tips
+    long ind;
+    m_panelcount = 0;
+    m_panelname = "";
+    const TipData* tip;
+    for(ind = 0; ind < region.GetNIndividuals(); ++ind)
+    {
+        tip = IndividualIsIn(popdata, ind);
+        if (tip != NULL)
+        {
+            if (tip->GetDataSource() == dsource_panel)
+            {
+                // panel members
+                // cout << "panel: " << tip->label << endl;
+                m_panelcount += 1;
+                if (m_panelcount == 1)
+                {
+                    // extract the panel name
+                    // format is "region_population_panelname_p<number>"
+                    string fullname = tip->label;
+                    int lunder = fullname.find("_");           // end of region name
+                    int nunder = fullname.find("_", lunder+1); // end of population name
+                    int runder = fullname.rfind("_");          // before p# counter
+                    m_panelname = fullname.substr(nunder+1, runder-nunder-1);
+                }
+            }
+            else
+            {
+                // samples
+                // cout << "sample: " << tip->label << endl;
+                // pass Individuals to Individual ctor
+                m_individuals.push_back(IndividualXML(region,popdata,ind));
+            }
+        }
+    }
+} // PopulationXML::ctor
+
+//------------------------------------------------------------------------------------
+
+const TipData* PopulationXML::IndividualIsIn(const vector<TipData>& popdata, long whichind) const
+{
+    vector<TipData>::const_iterator tip;
+    for(tip = popdata.begin(); tip != popdata.end(); ++tip)
+    {
+        if (tip->BelongsTo(whichind))
+        {
+            return &(*tip);  // essentially a cast
+        }
+    }
+
+    return NULL;
+} // PopulationXML::IndividualIsIn
+
+//------------------------------------------------------------------------------------
+
+StringVec1d PopulationXML::ToXML(unsigned long nspaces) const
+{
+    assert(!m_name.empty());
+    StringVec1d xmllines;
+
+    string line = MakeIndent(MakeTagWithName(xmlstr::XML_TAG_POPULATION,m_name),nspaces);
+    xmllines.push_back(line);
+
+    // panel size name and counter
+    if (m_panelcount > 0)
+    {
+        nspaces += INDENT_DEPTH;
+        line = MakeIndent(MakeTagWithName(xmlstr::XML_TAG_PANEL,m_panelname),nspaces);
+        xmllines.push_back(line);
+
+        nspaces += INDENT_DEPTH;
+        line = MakeIndent(MakeTag(xmlstr::XML_TAG_PANELSIZE),nspaces);
+        xmllines.push_back(line);
+
+        nspaces += INDENT_DEPTH;
+        line = MakeIndent(ToString(m_panelcount),nspaces);
+        xmllines.push_back(line);
+        nspaces -= INDENT_DEPTH;
+
+        line = MakeIndent(MakeCloseTag(xmlstr::XML_TAG_PANELSIZE),nspaces);
+        xmllines.push_back(line);
+        nspaces -= INDENT_DEPTH;
+
+        line = MakeIndent(MakeCloseTag(xmlstr::XML_TAG_PANEL),nspaces);
+        xmllines.push_back(line);
+        nspaces -= INDENT_DEPTH;
+    }
+
+    // individual samples
+    nspaces += INDENT_DEPTH;
+    vector<IndividualXML>::const_iterator ind;
+    for(ind = m_individuals.begin(); ind != m_individuals.end(); ++ind)
+    {
+        StringVec1d indxml(ind->ToXML(nspaces));
+        xmllines.insert(xmllines.end(),indxml.begin(),indxml.end());
+    }
+    nspaces -= INDENT_DEPTH;
+
+    line = MakeIndent(MakeCloseTag(xmlstr::XML_TAG_POPULATION),nspaces);
+    xmllines.push_back(line);
+
+    return xmllines;
+} // PopulationXML::ToXML
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+IndividualXML::IndividualXML(const Region& region,
+                             const vector<TipData>& popdata, long whichind)
+    : m_name(region.GetIndividual(whichind).GetName()),
+      m_phases(region.GetIndividual(whichind).GetPhaseSites()),
+      m_individual(region.GetIndividual(whichind))
+{
+    Individual testind = region.GetIndividual(whichind); // jrmhack
+
+    // break out individual-specific vector<tipdata>
+    vector<TipData> inddata;
+    vector<TipData>::const_iterator tip;
+    for(tip = popdata.begin(); tip != popdata.end(); ++tip)
+    {
+        if (tip->BelongsTo(whichind)) inddata.push_back(*tip);
+    }
+
+    // parse into haplotypes
+    long biggest(FLAGLONG);
+    vector<TipData>::iterator hap;
+    //Find out how many haplotypes there are
+    for(hap = inddata.begin(); hap != inddata.end(); ++hap)
+    {
+        if (hap->m_hap > biggest)
+        {
+            biggest = hap->m_hap;
+        }
+    }
+    vector<vector<TipData> > haps(biggest+1);
+    vector<TipData>          traits;
+    for(hap = inddata.begin(); hap != inddata.end(); ++hap)
+    {
+        if (!hap->m_nodata)
+        {
+            //We don't have Samples for m_nodata tips.
+            haps[hap->m_hap].push_back(*hap);
+        }
+    }
+
+    // pass the haplotypes to Sample ctor
+    vector<vector<TipData> >::iterator hapit;
+    for(hapit = haps.begin(); hapit != haps.end(); ++hapit)
+    {
+        m_samples.push_back(SampleXML(region,*hapit));
+    }
+
+} // IndividualXML::ctor
+
+//------------------------------------------------------------------------------------
+
+StringVec1d IndividualXML::ToXML(unsigned long nspaces) const
+{
+    assert(!m_name.empty() && !m_phases.empty());
+    StringVec1d xmllines;
+
+    string line = MakeIndent(MakeTagWithName(xmlstr::XML_TAG_INDIVIDUAL,m_name),nspaces);
+    xmllines.push_back(line);
+
+    nspaces += INDENT_DEPTH;
+
+    if (!m_phases.empty())
+    {
+        bool hasAnyPhaseData = false;
+        LongVec2d::const_iterator phaseIter;
+        for(phaseIter=m_phases.begin(); phaseIter != m_phases.end(); phaseIter++)
+        {
+            if((*phaseIter).size() > 0)
+            {
+                hasAnyPhaseData = true;
+                break;
+            }
+        }
+        if(hasAnyPhaseData)
+        {
+            LongVec2d::const_iterator locus;
+            for(locus = m_phases.begin(); locus != m_phases.end(); ++locus)
+            {
+                line = MakeIndent(MakeTagWithType(xmlstr::XML_TAG_PHASE,
+                                                  xmlstr::XML_ATTRVALUE_UNKNOWN),nspaces);
+                xmllines.push_back(line);
+                nspaces += INDENT_DEPTH;
+                line = MakeIndent(ToString(*locus),nspaces);
+                xmllines.push_back(line);
+                nspaces -= INDENT_DEPTH;
+                line = MakeIndent(MakeCloseTag(xmlstr::XML_TAG_PHASE),nspaces);
+                xmllines.push_back(line);
+            }
+        }
+    }
+
+    StringVec1d traitxml = m_individual.GetTraitXML(nspaces);
+    xmllines.insert(xmllines.end(), traitxml.begin(), traitxml.end());
+
+    vector<SampleXML>::const_iterator sample;
+    for(sample = m_samples.begin(); sample != m_samples.end(); ++sample)
+    {
+        StringVec1d samplexml(sample->ToXML(nspaces));
+        xmllines.insert(xmllines.end(),samplexml.begin(),samplexml.end());
+    }
+    nspaces -= INDENT_DEPTH;
+
+    line = MakeIndent(MakeCloseTag(xmlstr::XML_TAG_INDIVIDUAL),nspaces);
+    xmllines.push_back(line);
+
+    return xmllines;
+} // IndividualXML::ToXML
+
+//------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------
+
+SampleXML::SampleXML(const Region& region, const vector<TipData>& loci) :
+    m_stati(loci[0].partitions)
+{
+    vector<TipData>::const_iterator loc;
+    for(loc = loci.begin(); loc != loci.end(); ++loc)
+    {
+        assert(m_stati == loc->partitions);
+        DataType_ptr mydatatype(region.GetLocus(loc->m_locus).GetDataTypePtr());
+        m_geneticdatatype.push_back(mydatatype->GetXMLTag());
+        m_geneticdata.push_back(loc->GetFormattedData(mydatatype->GetDelimiter()));
+        m_geneticdatasource.push_back(loc->GetDataSource());
+    }
+    // JDEBUG--is this an appropiate name?
+    m_name = loci[0].label;
+
+} // SampleXML::ctor
+
+//------------------------------------------------------------------------------------
+
+StringVec1d SampleXML::ToXML(unsigned long nspaces) const
+{
+    assert(!m_name.empty() && !m_geneticdata.empty());
+    StringVec1d xmllines;
+
+    string line = MakeIndent(MakeTagWithName(xmlstr::XML_TAG_SAMPLE,m_name),nspaces);
+    xmllines.push_back(line);
+    nspaces += INDENT_DEPTH;
+    StringVec1d::const_iterator gdata, gtype;
+    DataSourceVec1d::const_iterator gsource;
+    assert(m_geneticdata.size() == m_geneticdatatype.size());
+    assert(m_geneticdata.size() == m_geneticdatasource.size());
+    for(gdata = m_geneticdata.begin(), gtype = m_geneticdatatype.begin(), gsource = m_geneticdatasource.begin();
+        gdata != m_geneticdata.end(); ++gdata, ++gtype, ++gsource)
+    {
+        line = MakeIndent(MakeTagWithType(xmlstr::XML_TAG_DATABLOCK,*gtype),nspaces);
+        xmllines.push_back(line);
+        nspaces += INDENT_DEPTH;
+        xmllines.push_back(MakeIndent(*gdata,nspaces));
+        nspaces -= INDENT_DEPTH;
+        line = MakeIndent(MakeCloseTag(xmlstr::XML_TAG_DATABLOCK),nspaces);
+        xmllines.push_back(line);
+    }
+    // JDEBUG -- evil kludgy if-code to handle the fact that migration partitions are
+    // handled differently from all other partitions in the xml
+    if (m_stati.size() > 1 ||
+        (m_stati.size() == 1 &&
+         m_stati.find(force_MIG) == m_stati.end() &&
+         m_stati.find(force_DIVMIG) == m_stati.end()))
+    {
+        line = MakeIndent(MakeTag(xmlstr::XML_TAG_STATUS),nspaces);
+        xmllines.push_back(line);
+        nspaces += INDENT_DEPTH;
+        map<force_type,string>::const_iterator status;
+        for(status = m_stati.begin(); status != m_stati.end(); ++status)
+        {
+            line = dynamic_cast<PartitionForce*>
+                ( *(registry.GetForceSummary().GetForceByTag(status->first)) )->
+                MakeStatusXML(status->second);
+
+            if (!line.empty())
+                xmllines.push_back(MakeIndent(line,nspaces));
+        }
+        nspaces -= INDENT_DEPTH;
+        line = MakeIndent(MakeCloseTag(xmlstr::XML_TAG_STATUS),nspaces);
+        xmllines.push_back(line);
+    }
+    nspaces -= INDENT_DEPTH;
+    line = MakeIndent(MakeCloseTag(xmlstr::XML_TAG_SAMPLE),nspaces);
+    xmllines.push_back(line);
+
+    return xmllines;
+} // SampleXML::ToXML
+
+//____________________________________________________________________________________
diff --git a/src/xml/toxml.h b/src/xml/toxml.h
new file mode 100644
index 0000000..18f1c71
--- /dev/null
+++ b/src/xml/toxml.h
@@ -0,0 +1,106 @@
+// $Id: toxml.h,v 1.14 2012/06/30 01:32:43 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+// PopulationXML is a helper class used by Region::MakePopXML() to create an
+// XML infile for lamarc identical (in execution) to the one used to begin
+// the program run.  PopulationXML's chief job is to manage a container of
+// IndividualXMLs and provide other population level parameters.
+//
+// IndividualXML is a helper class to PopulationXML used to manage the individual
+// level parameters and a container of SampleXML to be used for creation of a
+// lamarc xml input file.  IndividualXML's are expected to be made by
+// Individual::ToXML().
+//
+// SampleXML is a helper class to IndividualXML providing expertise on the sample
+// level parameters including the actual genetic data.  SampleXML's are expected
+// to be made by TipData::ToXML().
+//
+// WARNING:  These classes all provide a default (no arguments) constructor to
+// meet the contract for being placed within STL containers, but attempts to call
+// any member functions on a default constructed object will cause an assert!
+//
+// Written by Jon Yamato
+
+#ifndef TOXML_H
+#define TOXML_H
+
+#include <map>
+#include <string>
+#include <vector>
+
+#include "constants.h"
+#include "defaults.h"
+#include "individual.h"
+#include "types.h"
+#include "vectorx.h"
+
+//------------------------------------------------------------------------------------
+
+class TipData;
+class Region;
+class Locus;
+
+//------------------------------------------------------------------------------------
+
+class SampleXML
+{
+  private:
+    std::string m_name;
+    DataSourceVec1d m_geneticdatasource;
+    StringVec1d m_geneticdatatype;
+    StringVec1d m_geneticdata;
+    std::map<force_type,std::string> m_stati;
+
+  public:
+    // we accept the default ctor, copy ctor and operator=
+    SampleXML(const Region& region, const std::vector<TipData>& loci);
+
+    StringVec1d ToXML(unsigned long nspaces) const;
+};
+
+//------------------------------------------------------------------------------------
+
+class IndividualXML
+{
+  private:
+    std::string m_name;
+    LongVec2d m_phases;    // dim: loci x markers
+    std::vector<SampleXML> m_samples;
+    Individual m_individual; //oddly, only useful for trait data
+
+  public:
+    // we accept the default ctor, copy ctor and operator=
+    IndividualXML(const Region& region, const std::vector<TipData>& popdata, long whichind);
+
+    StringVec1d ToXML(unsigned long nspaces) const;
+};
+
+//------------------------------------------------------------------------------------
+
+class PopulationXML
+{
+  private:
+    std::string m_name;
+    std::vector<IndividualXML> m_individuals;
+    std::string m_panelname;
+    long m_panelcount;
+
+    const TipData* IndividualIsIn(const vector<TipData>& pdata, long whichind) const;
+
+  public:
+    // we accept the default ctor, copy ctor and operator=
+    PopulationXML(const Region& region, const std::string& name);
+
+    StringVec1d ToXML(unsigned long nspaces) const;
+};
+
+#endif // TOXML_H
+
+//____________________________________________________________________________________
diff --git a/src/xml/xml.cpp b/src/xml/xml.cpp
new file mode 100644
index 0000000..7b01a35
--- /dev/null
+++ b/src/xml/xml.cpp
@@ -0,0 +1,295 @@
+// $Id: xml.cpp,v 1.124 2010/09/27 03:19:46 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <cassert>
+
+#include "errhandling.h"
+#include "front_end_warnings.h"
+#include "stringx.h"
+#include "tinyxml.h"
+#include "xml.h"
+#include "xml_strings.h"
+
+using std::string;
+
+//------------------------------------------------------------------------------------
+
+XmlErrorSupport::XmlErrorSupport(string fileName)
+    :
+    m_fileName(fileName),
+    m_errorMessage("")
+{
+}
+
+XmlErrorSupport::~XmlErrorSupport()
+{
+}
+
+string
+XmlErrorSupport::GetFileName()
+{
+    return m_fileName;
+}
+
+void
+XmlErrorSupport::ThrowDataError(const string& reason)
+{
+    m_errorMessage.append(xmlstr::XML_ERR_DATA_ERR_0 + reason);
+    incorrect_data e(m_errorMessage);
+    throw e;
+}
+
+void
+XmlErrorSupport::ThrowDataError(const string& reason, long lineno)
+{
+    m_errorMessage.append( xmlstr::XML_ERR_DATA_ERR_0
+                           + xmlstr::XML_ERR_DATA_ERR_1
+                           + m_fileName
+                           + xmlstr::XML_ERR_DATA_ERR_2
+                           + ToString(lineno)
+                           + xmlstr::XML_ERR_DATA_ERR_3
+                           + reason);
+    incorrect_data e(m_errorMessage);
+    throw e;
+}
+
+void
+XmlErrorSupport::ThrowFileError(const string& reason)
+{
+    m_errorMessage.append(xmlstr::XML_ERR_FILE_ERR + reason);
+    file_error e(m_errorMessage);
+    throw e;
+}
+
+void
+XmlErrorSupport::ThrowXMLError(const string& reason)
+{
+    m_errorMessage.append(xmlstr::XML_ERR_0 + reason);
+    incorrect_xml e(m_errorMessage);
+    throw e;
+}
+
+void
+XmlErrorSupport::ThrowXMLError(const string& reason, long lineno)
+{
+    m_errorMessage.append(xmlstr::XML_ERR_0
+                          + xmlstr::XML_ERR_1
+                          + m_fileName
+                          + xmlstr::XML_ERR_2
+                          + ToString(lineno)
+                          + xmlstr::XML_ERR_3
+                          + reason);
+    incorrect_xml e(m_errorMessage);
+    throw e;
+}
+
+void
+XmlErrorSupport::ThrowInternalXMLError(const string& reason)
+{
+    m_errorMessage.append(xmlstr::XML_ERR_INTERNAL + reason);
+    incorrect_xml e(m_errorMessage);
+    throw e;
+}
+
+//------------------------------------------------------------------------------------
+
+XmlParser::XmlParser(ParseTreeSchema & schema, FrontEndWarnings & warnings)
+    :
+    XmlErrorSupport(defaults::datafilename),
+    m_schema(schema),
+    m_frontEndWarnings(warnings)
+{
+}
+
+XmlParser::~XmlParser()
+{
+}
+
+void
+XmlParser::ParseFileData(string fileName)
+{
+    m_fileName = fileName;
+    m_document.LoadFile(GetFileName());
+    if(m_document.Error())
+    {
+        throw tixml_error(m_document.ErrorDesc());  // EWFIX -- row ??
+    }
+    else
+        // XML parser is happy, but now we need to make
+        // sure that we've got data
+    {
+        TiXmlElement * root = GetRootElement();
+        if(root == NULL)
+        {
+            string msg = GetFileName() + ":"
+                + xmlstr::XML_ERR_NO_XML_DATA;
+            throw tixml_error(msg);
+        }
+    }
+
+    checkSchema();
+}
+
+TiXmlElement *
+XmlParser::GetRootElement()
+{
+    return m_document.RootElement();
+}
+
+void
+XmlParser::checkSchema()
+{
+    TiXmlElement * rootElement = GetRootElement();
+    TiXmlNode * extraNode = rootElement->NextSibling();
+    while(extraNode != NULL)
+    {
+        if(extraNode->Type() != TiXmlNode::TINYXML_COMMENT)
+        {
+            ThrowXMLError( xmlstr::XML_ERR_EXTRA_TAG_TOP_0
+                           + extraNode->Value()
+                           + xmlstr::XML_ERR_EXTRA_TAG_TOP_1,
+                           extraNode->Row());
+        }
+        extraNode = extraNode->NextSibling();
+    }
+    checkSchema(rootElement);
+}
+
+void
+XmlParser::checkAttrPresent(TiXmlElement * elem, string attrName)
+{
+    const string * attrValue = elem->Attribute(attrName);
+    if(!attrValue)
+    {
+        ThrowXMLError( xmlstr::XML_ERR_ATTR_MISSING_0
+                       +attrName
+                       +xmlstr::XML_ERR_ATTR_MISSING_1
+                       +elem->Value()
+                       +xmlstr::XML_ERR_ATTR_MISSING_2,
+                       elem->Row());
+
+    }
+}
+
+void
+XmlParser::checkElemPresent(TiXmlElement * elem, string elemName)
+{
+    TiXmlNode * firstChild = elem->IterateChildren(elemName,NULL);
+    if(firstChild == NULL)
+    {
+        ThrowXMLError( xmlstr::XML_ERR_ELEM_MISSING_0
+                       +elemName
+                       +xmlstr::XML_ERR_ELEM_MISSING_1
+                       +elem->Value()
+                       +xmlstr::XML_ERR_ELEM_MISSING_2,
+                       elem->Row());
+    }
+}
+
+void
+XmlParser::checkAttrAllowed(TiXmlElement * parent, string attrName)
+{
+    if(!m_schema.AllowsAttribute(string(parent->Value()),attrName,parent->Row()))
+    {
+        ThrowXMLError(xmlstr::XML_ERR_UNEXPECTED_ATTR_0
+                      + attrName
+                      + xmlstr::XML_ERR_UNEXPECTED_ATTR_1
+                      + parent->Value()
+                      + xmlstr::XML_ERR_UNEXPECTED_ATTR_2,
+                      parent->Row());
+    }
+}
+
+void
+XmlParser::checkElemCount(TiXmlElement * parent, string childName)
+{
+    TiXmlNode * child = parent->IterateChildren(childName,NULL);
+    assert(child != NULL);  // because we found it as child of parent
+
+    if(!m_schema.AllowsElement(string(parent->Value()),childName,child->Row()))
+    {
+        ThrowXMLError(xmlstr::XML_ERR_UNEXPECTED_TAG_0
+                      + childName
+                      + xmlstr::XML_ERR_UNEXPECTED_TAG_1
+                      + parent->Value()
+                      + xmlstr::XML_ERR_UNEXPECTED_TAG_2,
+                      child->Row());
+    }
+
+    // now check if there is more than one
+    child = parent->IterateChildren(childName,child);
+    if(child != NULL)
+    {
+        if(!m_schema.AllowsAdditionalElements(string(parent->Value()),childName,child->Row()))
+        {
+            ThrowXMLError(xmlstr::XML_ERR_EXTRA_TAG_0
+                          + childName
+                          + xmlstr::XML_ERR_EXTRA_TAG_1
+                          + parent->Value()
+                          + xmlstr::XML_ERR_EXTRA_TAG_2,
+                          child->Row());
+        }
+    }
+}
+
+void
+XmlParser::checkSchema(TiXmlElement * parentElem)
+{
+
+    string parentString(parentElem->Value());
+
+    // check all required attributes are present
+    std::set<string> attrs = m_schema.RequiredAttributes(parentString,parentElem->Row());
+    for(std::set<string>::const_iterator i= attrs.begin(); i != attrs.end(); i++)
+    {
+        checkAttrPresent(parentElem,*i);
+    }
+
+    // check all required elements are present
+    std::set<string> elems = m_schema.RequiredElements(parentString,parentElem->Row());
+    for(std::set<string>::const_iterator i= elems.begin(); i != elems.end(); i++)
+    {
+        checkElemPresent(parentElem,*i);
+    }
+
+    // check all attributes present are allowed
+    for(TiXmlAttribute * attr = parentElem->FirstAttribute(); attr != NULL; attr = attr->Next())
+    {
+        string attrName(attr->Name());
+        checkAttrAllowed(parentElem,attrName);
+    }
+
+    // check all elements present are allowed and in right number
+    TiXmlNode * child = NULL;
+    while ((child = parentElem->IterateChildren(child)))
+    {
+        if(child->Type() == TiXmlNode::TINYXML_ELEMENT)
+        {
+            TiXmlElement * newElem = child->ToElement();
+            string childString(newElem->Value());
+            if(m_schema.DeprecatedElement(parentString,childString,newElem->Row()))
+            {
+                m_frontEndWarnings.AddWarning(xmlstr::XML_WARN_DEPRECATED_TAG_0
+                                              + childString
+                                              + xmlstr::XML_WARN_DEPRECATED_TAG_1
+                                              + parentString
+                                              + xmlstr::XML_WARN_DEPRECATED_TAG_2
+                                              + ToString(newElem->Row()));
+            }
+            else
+            {
+                checkElemCount(parentElem,string(childString));
+                checkSchema(newElem); // recursive check
+            }
+        }
+    }
+}
+
+//____________________________________________________________________________________
diff --git a/src/xml/xml.h b/src/xml/xml.h
new file mode 100644
index 0000000..25089c7
--- /dev/null
+++ b/src/xml/xml.h
@@ -0,0 +1,78 @@
+// $Id: xml.h,v 1.50 2011/03/07 06:08:54 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef XML_H
+#define XML_H
+
+#include <string>
+#include "parsetreeschema.h"
+#include "tinyxml.h"
+
+using std::string;
+
+/******************************************************************
+ This class reads the data file, including both molecular data and
+ option settings, and produces a Tiny XML parse tree of the contents.
+ It does not process those contents. Instead they are read by
+ classes ParseTreeToData and ParseTreeToSettings, which walk the
+ TinyXML tree this class generates.
+
+*********************************************************************/
+
+class XmlErrorSupport
+{
+  protected:
+    string m_fileName;
+    string m_errorMessage;
+
+    XmlErrorSupport();          // undefined
+
+  public:
+    XmlErrorSupport(string fileName);
+    virtual ~XmlErrorSupport();
+
+    string      GetFileName();
+    void        ThrowDataError(const string& reason);
+    void        ThrowDataError(const string& reason, long lineno);
+    void        ThrowFileError(const string& reason);
+    void        ThrowInternalXMLError(const string& reason);
+    void        ThrowXMLError(const string& reason);
+    void        ThrowXMLError(const string& reason, long lineno);
+};
+
+class FrontEndWarnings;
+
+class XmlParser : public XmlErrorSupport
+{
+  private:
+    XmlParser();        // undefined
+
+    TiXmlDocument       m_document;
+    ParseTreeSchema &   m_schema;
+    FrontEndWarnings &  m_frontEndWarnings;
+
+  protected:
+    void checkAttrAllowed(TiXmlElement * elem, string attrName);
+    void checkAttrPresent(TiXmlElement * elem, string attrName);
+    void checkElemCount(TiXmlElement * elem, string childName);
+    void checkElemPresent(TiXmlElement * elem, string elemName);
+    void checkSchema(TiXmlElement *);
+    void checkSchema();
+
+  public:
+    XmlParser(ParseTreeSchema & schema, FrontEndWarnings & warnings);
+    virtual ~XmlParser();
+    void ParseFileData(string fileName);   // builds TiXML structures
+    TiXmlElement * GetRootElement();
+};
+
+#endif // XML_H
+
+//____________________________________________________________________________________
diff --git a/src/xml/xml_strings.cpp b/src/xml/xml_strings.cpp
new file mode 100644
index 0000000..4b74257
--- /dev/null
+++ b/src/xml/xml_strings.cpp
@@ -0,0 +1,357 @@
+// $Id: xml_strings.cpp,v 1.74 2012/06/30 01:32:43 bobgian Exp $
+
+/*
+  Copyright 2002  Mary Kuhner, Jon Yamato, and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#include <string>
+
+#include "local_build.h"                // for definition of LAMARC_NEW_FEATURE_RELATIVE_SAMPLING
+#include "xml_strings.h"
+
+using std::string;
+
+//------------------------------------------------------------------------------------
+// xml tags for lamarc input file
+
+const string xmlstr::XML_COMMENT_SEED_FROM_CLOCK_0 =
+    "<!-- The tag below documents the seed used for this run. -->";
+const string xmlstr::XML_COMMENT_SEED_FROM_CLOCK_1 =
+    "<!-- It is ignored if you use this file as lamarc input -->";
+
+const string xmlstr::XML_STRING_UNKNOWN_FILE    = "<unknown>";
+const string xmlstr::XML_STRING_COLON           = ": ";
+const string xmlstr::XML_STRING_DASH            = "-";
+const string xmlstr::XML_STRING_USER            = "USER";
+const string xmlstr::XML_STRING_NEWICK          = "newick";
+const string xmlstr::XML_STRING_ARG             = "arg";
+
+const string xmlstr::XML_ERR_0                  = "XML Error: ";
+const string xmlstr::XML_ERR_1                  = "in file \"";
+const string xmlstr::XML_ERR_2                  = "\"  near line ";
+const string xmlstr::XML_ERR_3                  = ": ";
+const string xmlstr::XML_ERR_ANCESTOR_TRUEPOP_0 = "Population \"";
+const string xmlstr::XML_ERR_ANCESTOR_TRUEPOP_1 = "\" appears as internal node in divergence tree.";
+const string xmlstr::XML_ERR_ATTR_MISSING_0     = "Couldn't find required attribute \"";
+const string xmlstr::XML_ERR_ATTR_MISSING_1     = "\" for element <";
+const string xmlstr::XML_ERR_ATTR_MISSING_2     = ">";
+const string xmlstr::XML_ERR_BAD_ANCESTOR_TREE      = "Badly specified divergence tree.";
+const string xmlstr::XML_ERR_BOTH_SEED_TYPES_0  = "Only one of the following tags can appear in a lamarc file: <seed> ( near line ";
+const string xmlstr::XML_ERR_BOTH_SEED_TYPES_1  = ") and <seed-from-system-clock> ( near line ";
+const string xmlstr::XML_ERR_BOTH_SEED_TYPES_2  = ") of file \"";
+const string xmlstr::XML_ERR_BOTH_SEED_TYPES_3  = "\".";
+const string xmlstr::XML_ERR_DATA_ERR_0         = "Data Error: ";
+const string xmlstr::XML_ERR_DATA_ERR_1         = "in file \"";
+const string xmlstr::XML_ERR_DATA_ERR_2         = "\"  near line ";
+const string xmlstr::XML_ERR_DATA_ERR_3         = ": ";
+const string xmlstr::XML_ERR_DUPLICATE_ANCESTOR_0 = "Population name \"";
+const string xmlstr::XML_ERR_DUPLICATE_ANCESTOR_1 = "\" appears in more than one <ancestor> tag.";
+const string xmlstr::XML_ERR_DUPLICATE_REGIONNAME_0 = "There is more than one region named '";
+const string xmlstr::XML_ERR_DUPLICATE_REGIONNAME_1 = "' in the input file.";
+const string xmlstr::XML_ERR_DUPLICATE_SAMPLENAME_0 = "There is more than one sample named '";
+const string xmlstr::XML_ERR_DUPLICATE_SAMPLENAME_1 = "' in region ";
+const string xmlstr::XML_ERR_ELEM_MISSING_0     = "Couldn't find required subtag <";
+const string xmlstr::XML_ERR_ELEM_MISSING_1     = "> for element <";
+const string xmlstr::XML_ERR_ELEM_MISSING_2     = ">";
+const string xmlstr::XML_ERR_EMPTY_ANCESTOR     = "Tag <ancestor> appears to be empty";
+const string xmlstr::XML_ERR_EXTRA_TAG_0        = "Found more <";
+const string xmlstr::XML_ERR_EXTRA_TAG_1        = "> tags under tag <";
+const string xmlstr::XML_ERR_EXTRA_TAG_2        = "> than expected";
+const string xmlstr::XML_ERR_EXTRA_TAG_TOP_0    = "Extra tag \"<";
+const string xmlstr::XML_ERR_EXTRA_TAG_TOP_1    = ">\" found in file";
+const string xmlstr::XML_ERR_FILE_ERR           = "File Error: ";
+const string xmlstr::XML_ERR_FILE_NOT_FOUND_0   = "File \"";
+const string xmlstr::XML_ERR_FILE_NOT_FOUND_1   = "\" not found";
+const string xmlstr::XML_ERR_INCONSISTENT_REGION = "Inconsistent specifications in region ";
+const string xmlstr::XML_ERR_INTERNAL           = "Internal XML error. Is schema out of date?: ";
+const string xmlstr::XML_ERR_INVALID_METHOD_0   = "Invalid method ";
+const string xmlstr::XML_ERR_INVALID_METHOD_1   = " for force type ";
+const string xmlstr::XML_ERR_METHOD_TYPE_COUNT_0= "Tag <method> should have had ";
+const string xmlstr::XML_ERR_METHOD_TYPE_COUNT_1= " parameters for force ";
+const string xmlstr::XML_ERR_METHOD_TYPE_COUNT_2= ", but found ";
+const string xmlstr::XML_ERR_METHOD_TYPE_COUNT_3= " instead.";
+const string xmlstr::XML_ERR_METHOD_USER_WITHOUT_VALUE_0="Tag <method> for force \"";
+const string xmlstr::XML_ERR_METHOD_USER_WITHOUT_VALUE_1="\" has a parameter of type \"USER\" without a supplied value. Taking the program default value of ";
+const string xmlstr::XML_ERR_METHOD_USER_WITHOUT_VALUE_2=" .";
+const string xmlstr::XML_ERR_MISSING_CONTENT_0  = "Missing content between <";
+const string xmlstr::XML_ERR_MISSING_CONTENT_1  = "> tags";
+const string xmlstr::XML_ERR_MISSING_NEWPOP_0 = "Population \"";
+const string xmlstr::XML_ERR_MISSING_NEWPOP_1 = "\" never appears in a <new-population> tag in divergence tree.";
+const string xmlstr::XML_ERR_MISSING_TAG_0      = "Could not find a <";
+const string xmlstr::XML_ERR_MISSING_TAG_1      = "> tag. Expected at least 1.";
+const string xmlstr::XML_ERR_MISSING_TAG_HIER_0 = "Expected a <";
+const string xmlstr::XML_ERR_MISSING_TAG_HIER_1 = "> tag Within the <";
+const string xmlstr::XML_ERR_MISSING_TAG_HIER_2 = "> tag.";
+const string xmlstr::XML_ERR_MULTIPLE_ANCESTORS_0 = "Population name \"";
+const string xmlstr::XML_ERR_MULTIPLE_ANCESTORS_1 = "\" appears in multiple <new-populations> tags with <ancestor> ";
+const string xmlstr::XML_ERR_MULTIPLE_ANCESTORS_2 = " and ";
+const string xmlstr::XML_ERR_MULTIPLE_ANCESTORS_3 = " or more.";
+const string xmlstr::XML_ERR_NEST_0             = "Nested <";
+const string xmlstr::XML_ERR_NEST_1             = "> tags encountered";
+const string xmlstr::XML_ERR_NEWPOP             = "Tag <new-populations> appears to contain other than two populations";
+const string xmlstr::XML_ERR_NO_SUBTAG_0        = "No <";
+const string xmlstr::XML_ERR_NO_SUBTAG_1        = "> tag found in <";
+const string xmlstr::XML_ERR_NO_SUBTAG_2        = "> element";
+const string xmlstr::XML_ERR_NO_TAG_0           = "tag <";
+const string xmlstr::XML_ERR_NO_TAG_1           = "> not found";
+const string xmlstr::XML_ERR_NOT_LAMARC         = "Input does not appear to be a Lamarc XML file";
+const string xmlstr::XML_ERR_NOT_MAPFILE        = "Input does not appear to be a Lamarc map file";
+const string xmlstr::XML_ERR_NOT_XML            = "Input does not appear to be an XML file";
+const string xmlstr::XML_ERR_NO_XML_DATA        = "Input does not appear to have any XML tags";
+const string xmlstr::XML_ERR_START_VALUE_COUNT_0= "Tag <start-values> should have had ";
+const string xmlstr::XML_ERR_START_VALUE_COUNT_1= " parameters for force ";
+const string xmlstr::XML_ERR_START_VALUE_COUNT_2= ", but we found ";
+const string xmlstr::XML_ERR_START_VALUE_COUNT_3= " instead.";
+const string xmlstr::XML_ERR_UNEXPECTED_ATTR_0  = "Attribute \"";
+const string xmlstr::XML_ERR_UNEXPECTED_ATTR_1  = "\" undefined or not legal to assign for tag <";
+const string xmlstr::XML_ERR_UNEXPECTED_ATTR_2  = ">";
+const string xmlstr::XML_ERR_UNEXPECTED_TAG_0   = "Tag <";
+const string xmlstr::XML_ERR_UNEXPECTED_TAG_1   = "> undefined or not legal to appear under tag <";
+const string xmlstr::XML_ERR_UNEXPECTED_TAG_2   = ">";
+
+const string xmlstr::XML_IERR_DUP_ATTR_0        = "tried to add attribute \"";
+const string xmlstr::XML_IERR_DUP_ATTR_1        = "\" to element \"";
+const string xmlstr::XML_IERR_DUP_ATTR_2        = "\" twice. Fix schema.";
+const string xmlstr::XML_IERR_DUP_CHILD_0       = "tried to add child element <";
+const string xmlstr::XML_IERR_DUP_CHILD_1       = "> to parent element <";
+const string xmlstr::XML_IERR_DUP_CHILD_2       = "> twice. Fix schema.";
+const string xmlstr::XML_IERR_DUP_TAG_0         = "So sorry. You tried to add tag <";
+const string xmlstr::XML_IERR_DUP_TAG_1         = "> twice. Simple schema doesn't support this.";
+const string xmlstr::XML_IERR_NO_PARENT_TAG_0   = "tried to add element <";
+const string xmlstr::XML_IERR_NO_PARENT_TAG_1   = "> before parent <";
+const string xmlstr::XML_IERR_NO_PARENT_TAG_2   = "> was added. Fix schema.";
+const string xmlstr::XML_IERR_NO_TAG_0          = "Did not recognize tag <";
+const string xmlstr::XML_IERR_NO_TAG_1          = "> near line ";
+
+#ifdef NDEBUG
+const string xmlstr::XML_IERR_NO_TAG_2          = ".";
+#else
+const string xmlstr::XML_IERR_NO_TAG_2          = ". Do you need to update the schema?";
+#endif
+
+const string xmlstr::XML_ATTRTYPE_ATTR_NAME     = "attr.name";
+const string xmlstr::XML_ATTRTYPE_ATTR_TYPE     = "attr.type";
+const string xmlstr::XML_ATTRTYPE_CONSTRAINT    = "constraint";
+const string xmlstr::XML_ATTRTYPE_FOR           = "for";
+const string xmlstr::XML_ATTRTYPE_EDGEDEFAULT   = "edgedefault";
+const string xmlstr::XML_ATTRTYPE_ID            = "id";
+const string xmlstr::XML_ATTRTYPE_KEY           = "key";
+const string xmlstr::XML_ATTRTYPE_LOCUS_NAME    = "locus";
+const string xmlstr::XML_ATTRTYPE_NAME          = "name";
+const string xmlstr::XML_ATTRTYPE_SOURCE        = "source";
+const string xmlstr::XML_ATTRTYPE_TARGET        = "target";
+const string xmlstr::XML_ATTRTYPE_TYPE          = "type";
+const string xmlstr::XML_ATTRTYPE_VALUE         = "value";
+const string xmlstr::XML_ATTRTYPE_VERSION       = "version";
+
+const string xmlstr::XML_TAG_ALLELES            = "alleles";
+const string xmlstr::XML_TAG_ALPHA              = "alpha";
+const string xmlstr::XML_TAG_ANALYSIS           = "analysis";
+const string xmlstr::XML_TAG_ANCESTOR           = "ancestor";
+const string xmlstr::XML_TAG_ARGDATA            = "argdata";
+
+#ifdef LAMARC_QA_TREE_DUMP
+const string xmlstr::XML_TAG_ARGFILE_PREFIX     = "argfile-prefix";
+#endif // LAMARC_QA_TREE_DUMP
+
+const string xmlstr::XML_TAG_AUTOCORRELATION    = "autocorrelation";
+const string xmlstr::XML_TAG_BASE_FREQS         = "base-freqs";
+const string xmlstr::XML_TAG_BAYESIAN           = "bayesian";
+const string xmlstr::XML_TAG_BAYESIAN_ANALYSIS  = "bayesian-analysis";
+const string xmlstr::XML_TAG_BLOCK              = "block";
+const string xmlstr::XML_TAG_CALCULATED         = "calculated";
+const string xmlstr::XML_TAG_CATEGORIES         = "categories";
+const string xmlstr::XML_TAG_CHAINS             = "chains";
+const string xmlstr::XML_TAG_COALESCENCE        = "coalescence";
+const string xmlstr::XML_TAG_CONSTRAINTS        = "constraints";
+const string xmlstr::XML_TAG_CONVERT_OUTPUT     = "convert-output-to-eliminate-zero";
+const string xmlstr::XML_TAG_CREATING           = "creating";
+const string xmlstr::XML_TAG_CURVEFILE_PREFIX   = "curvefile-prefix";
+const string xmlstr::XML_TAG_DATA               = "data";
+const string xmlstr::XML_TAG_DATABLOCK          = "datablock";
+const string xmlstr::XML_TAG_DISCARD            = "discard";
+const string xmlstr::XML_TAG_DISEASE            = "disease";
+const string xmlstr::XML_TAG_DISEASELOCATION    = "location";
+const string xmlstr::XML_TAG_DISEASESTATUS      = "disease-status";
+const string xmlstr::XML_TAG_DIVERGENCE         = "divergence";
+const string xmlstr::XML_TAG_DIVMIG             = "divergence-migration";
+const string xmlstr::XML_TAG_ECHO               = "echo";
+const string xmlstr::XML_TAG_EDGE               = "edge";
+const string xmlstr::XML_TAG_EDGEDEFAULT        = "edgedefault";
+const string xmlstr::XML_TAG_EFFECTIVE_POPSIZE  = "effective-popsize";
+const string xmlstr::XML_TAG_END                = "end";
+const string xmlstr::XML_TAG_EPOCH_BOUNDARY     = "epoch-boundary";
+const string xmlstr::XML_TAG_EPOCHSIZEARRANGER  = "epoch-size";
+const string xmlstr::XML_TAG_FINAL              = "final";
+const string xmlstr::XML_TAG_FOR                = "for";
+const string xmlstr::XML_TAG_FORCES             = "forces";
+const string xmlstr::XML_TAG_FORMAT             = "format";
+const string xmlstr::XML_TAG_GENOTYPE           = "genotype";
+const string xmlstr::XML_TAG_GENOTYPE_RESOLUTIONS = "genotype-resolutions";
+const string xmlstr::XML_TAG_GRAPH              = "graph";
+const string xmlstr::XML_TAG_GROUP              = "group";
+const string xmlstr::XML_TAG_GROWTH             = "growth";
+const string xmlstr::XML_TAG_GTRRATES           = "gtr-rates";
+const string xmlstr::XML_TAG_HAPLOTYPES         = "haplotypes";
+const string xmlstr::XML_TAG_HAPLOTYPING        = "haplotyping";
+const string xmlstr::XML_TAG_HAP_COUNT          = "samples-per-individual";
+const string xmlstr::XML_TAG_HEATING            = "heating";
+const string xmlstr::XML_TAG_HEATING_STRATEGY   = "adaptive";
+const string xmlstr::XML_TAG_INDIVIDUAL         = "individual";
+const string xmlstr::XML_TAG_INITIAL            = "initial";
+const string xmlstr::XML_TAG_INTERVAL           = "interval";
+const string xmlstr::XML_TAG_IN_SUMMARY_FILE    = "in-summary-file";
+const string xmlstr::XML_TAG_ISOPT              = "optimize";
+const string xmlstr::XML_TAG_KEY                = "key";
+const string xmlstr::XML_TAG_LAMARC             = "lamarc";
+const string xmlstr::XML_TAG_LENGTH             = "length";
+const string xmlstr::XML_TAG_LOCATIONS          = "locations";
+const string xmlstr::XML_TAG_LOCUSARRANGER      = "trait-arranger";
+const string xmlstr::XML_TAG_LOGISTICSELECTION  = "logistic-selection-coefficient";
+const string xmlstr::XML_TAG_STOCHASTICSELECTION = "stochastic-selection";
+const string xmlstr::XML_TAG_MANY_ARGFILES      = "many-arg-files";
+const string xmlstr::XML_TAG_MAPFILE            = "lamarc-map-file";
+const string xmlstr::XML_TAG_MAP_POSITION       = "map-position";
+const string xmlstr::XML_TAG_MARKER_WEIGHTS     = "marker-weights";
+const string xmlstr::XML_TAG_MATRIX_MEMBERS     = "matrix-members";
+const string xmlstr::XML_TAG_MAX_EVENTS         = "max-events";
+const string xmlstr::XML_TAG_MERGING            = "merging";
+const string xmlstr::XML_TAG_METHOD             = "method";
+const string xmlstr::XML_TAG_MIGRATION          = "migration";
+const string xmlstr::XML_TAG_MODEL              = "model";
+const string xmlstr::XML_TAG_MU                 = "mu";
+const string xmlstr::XML_TAG_NAME               = "name";
+const string xmlstr::XML_TAG_NEWICKTREEFILE_PREFIX = "newicktreefile-prefix";
+const string xmlstr::XML_TAG_NEWPOP             = "new-populations";
+const string xmlstr::XML_TAG_NODE               = "node";
+const string xmlstr::XML_TAG_NORMALIZE          = "normalize";
+const string xmlstr::XML_TAG_NU                 = "nu";
+const string xmlstr::XML_TAG_NUMBER             = "number";
+const string xmlstr::XML_TAG_NUM_CATEGORIES     = "num-categories";
+const string xmlstr::XML_TAG_NUM_SITES          = "sites";
+const string xmlstr::XML_TAG_OFFSET             = "offset";
+const string xmlstr::XML_TAG_OLD_SUMMARY_FILE   = "summary-file";
+const string xmlstr::XML_TAG_OUT_SUMMARY_FILE   = "out-summary-file";
+const string xmlstr::XML_TAG_OUT_XML_FILE       = "out-xml-file";
+const string xmlstr::XML_TAG_PANEL              = "panel";
+const string xmlstr::XML_TAG_PANELSIZE          = "panel-size";
+const string xmlstr::XML_TAG_PARAMETER_FILE     = "parameter-file";
+const string xmlstr::XML_TAG_PARAMINDEX         = "paramindex";
+const string xmlstr::XML_TAG_PENETRANCE         = "penetrance";
+const string xmlstr::XML_TAG_PER_BASE_ERROR_RATE= "per-base-error-rate";
+const string xmlstr::XML_TAG_PHASE              = "phase";
+const string xmlstr::XML_TAG_PHENOTYPE          = "phenotype";
+const string xmlstr::XML_TAG_PHENOTYPES         = "phenotypes";
+const string xmlstr::XML_TAG_PHENOTYPE_NAME     = "phenotype-name";
+const string xmlstr::XML_TAG_PLOTTING           = "plotting";
+const string xmlstr::XML_TAG_POPULATION         = "population";
+const string xmlstr::XML_TAG_POPTREE            = "population-tree";
+const string xmlstr::XML_TAG_POSSIBLE_LOCATIONS = "possible-locations";
+const string xmlstr::XML_TAG_POSTERIOR          = "posterior";
+const string xmlstr::XML_TAG_PRIOR              = "prior";
+const string xmlstr::XML_TAG_PRIORLOWERBOUND    = "lower";
+const string xmlstr::XML_TAG_PRIORUPPERBOUND    = "upper";
+const string xmlstr::XML_TAG_PROBABILITIES      = "probabilities";
+const string xmlstr::XML_TAG_PROFILE            = "profile";
+const string xmlstr::XML_TAG_PROFILES           = "profiles";
+const string xmlstr::XML_TAG_PROFILE_PREFIX     = "profile-prefix";
+const string xmlstr::XML_TAG_PROGRESS_REPORTS   = "progress-reports";
+const string xmlstr::XML_TAG_RANGE              = "range";
+const string xmlstr::XML_TAG_RATES              = "rates";
+const string xmlstr::XML_TAG_RECLOCFILE_PREFIX  = "reclocfile-prefix";
+const string xmlstr::XML_TAG_RECOMBINATION      = "recombination";
+const string xmlstr::XML_TAG_REGION             = "region";
+const string xmlstr::XML_TAG_REGION_GAMMA       = "gamma-over-regions";
+const string xmlstr::XML_TAG_RELATIVE_MURATE    = "relative-murate";
+
+#ifdef LAMARC_NEW_FEATURE_RELATIVE_SAMPLING
+const string xmlstr::XML_TAG_RELATIVE_SAMPLE_RATE    = "relative-sample-rate";
+#endif
+
+const string xmlstr::XML_TAG_REPLICATES         = "replicates";
+const string xmlstr::XML_TAG_REPORT_XML_FILE    = "xml-report-file";
+const string xmlstr::XML_TAG_RESIMULATING       = "resimulating";
+const string xmlstr::XML_TAG_RESULTS_FILE       = "results-file";
+const string xmlstr::XML_TAG_SAMPLE             = "sample";
+const string xmlstr::XML_TAG_SAMPLES            = "samples";
+const string xmlstr::XML_TAG_SEED               = "seed";
+const string xmlstr::XML_TAG_SEED_FROM_CLOCK    = "seed-from-system-clock";
+const string xmlstr::XML_TAG_SPACING            = "spacing";
+const string xmlstr::XML_TAG_START              = "start";
+const string xmlstr::XML_TAG_START_VALUES       = "start-values";
+const string xmlstr::XML_TAG_STATUS             = "status";
+const string xmlstr::XML_TAG_STAIRARRANGER      = "stick-arranger";
+const string xmlstr::XML_TAG_STRATEGY           = "strategy";
+const string xmlstr::XML_TAG_SWAP_INTERVAL      = "swap-interval";
+const string xmlstr::XML_TAG_TEMPERATURES       = "temperatures";
+const string xmlstr::XML_TAG_TRACEFILE_PREFIX   = "tracefile-prefix";
+const string xmlstr::XML_TAG_TRAIT              = "trait";
+const string xmlstr::XML_TAG_TRAITS             = "traits";
+const string xmlstr::XML_TAG_TRAIT_NAME         = "trait-name";
+const string xmlstr::XML_TAG_TREE               = "tree";
+const string xmlstr::XML_TAG_TREESIZE           = "tree-size";
+const string xmlstr::XML_TAG_TRUEVALUE          = "true-value";
+const string xmlstr::XML_TAG_TTRATIO            = "ttratio";
+
+#ifdef LAMARC_QA_TREE_DUMP
+const string xmlstr::XML_TAG_USE_ARGFILES       = "use-argfiles";
+#endif // LAMARC_QA_TREE_DUMP
+
+const string xmlstr::XML_TAG_USE_CURVEFILES     = "use-curvefiles";
+const string xmlstr::XML_TAG_USE_IN_SUMMARY     = "use-in-summary";
+const string xmlstr::XML_TAG_USE_NEWICKTREEFILE = "use-newicktreefile";
+const string xmlstr::XML_TAG_USE_OUT_SUMMARY    = "use-out-summary";
+const string xmlstr::XML_TAG_USE_RECLOCFILE     = "use-reclocfile";
+const string xmlstr::XML_TAG_USE_TRACEFILE      = "use-tracefile";
+const string xmlstr::XML_TAG_VERBOSITY          = "verbosity";
+
+const string xmlstr::XML_ATTRVALUE_ASITES        = "asites";
+const string xmlstr::XML_ATTRVALUE_CURVE         = "curve";
+const string xmlstr::XML_ATTRVALUE_DOUBLE        = "double";
+const string xmlstr::XML_ATTRVALUE_KNOWN         = "known";
+const string xmlstr::XML_ATTRVALUE_LIVE_SITES    = "live_sites";
+const string xmlstr::XML_ATTRVALUE_LONG          = "long";
+const string xmlstr::XML_ATTRVALUE_NLABEL        = "nlabel";
+const string xmlstr::XML_ATTRVALUE_NODE_LABEL    = "node_label";
+const string xmlstr::XML_ATTRVALUE_NODE_TIME     = "node_time";
+const string xmlstr::XML_ATTRVALUE_NODE_TYPE     = "node_type";
+const string xmlstr::XML_ATTRVALUE_NTIME         = "ntime";
+const string xmlstr::XML_ATTRVALUE_NTYPE         = "ntype";
+const string xmlstr::XML_ATTRVALUE_OFF           = "off";
+const string xmlstr::XML_ATTRVALUE_PARTITIONS    = "partitions";
+const string xmlstr::XML_ATTRVALUE_PTYPE         = "ptype";
+const string xmlstr::XML_ATTRVALUE_REC_LOCATION  = "rec_location";
+const string xmlstr::XML_ATTRVALUE_RLOC          = "rloc";
+const string xmlstr::XML_ATTRVALUE_STRING        = "string";
+const string xmlstr::XML_ATTRVALUE_TRANS_SITES   = "transmitted_sites";
+const string xmlstr::XML_ATTRVALUE_UNKNOWN       = "unknown";
+
+const string xmlstr::XML_BRANCHTYPE_COAL      = "Coal";
+const string xmlstr::XML_BRANCHTYPE_DISEASE   = "Disease";
+const string xmlstr::XML_BRANCHTYPE_DIVMIG    = "DivMig";
+const string xmlstr::XML_BRANCHTYPE_EPOCH     = "Epoch";
+const string xmlstr::XML_BRANCHTYPE_MIG       = "Mig";
+const string xmlstr::XML_BRANCHTYPE_REC       = "Rec";
+const string xmlstr::XML_BRANCHTYPE_TIP       = "Tip";
+
+// ignoring case, xmlstr::XML_ATTRVALUE must be the same as
+// lamarcstrings::shortStickExpName for use in
+// stringx.cpp::StringMatchesGrowthType() used by
+// stringx.cpp::ProduceGrowthTypeOrBarf()
+
+const string xmlstr::XML_ATTRVALUE_STICKEXP = "stick-exp";
+const string xmlstr::XML_ATTRVALUE_STICK    = "stick";
+
+const string xmlstr::XML_WARN_DEPRECATED_TAG_0  = "WARNING: ignoring deprecated tag <";
+const string xmlstr::XML_WARN_DEPRECATED_TAG_1  = "> appearing under tag <";
+const string xmlstr::XML_WARN_DEPRECATED_TAG_2  = "> near line ";
+
+const string xmlstr::DATA_MODEL_F84 = "F84";
+
+// const string xmlstr::WHAT_DOES_VALGRIND_THINK = "as;dlfjas;ldfkj;sldkjf";
+
+//____________________________________________________________________________________
diff --git a/src/xml/xml_strings.h b/src/xml/xml_strings.h
new file mode 100644
index 0000000..a500165
--- /dev/null
+++ b/src/xml/xml_strings.h
@@ -0,0 +1,341 @@
+// $Id: xml_strings.h,v 1.80 2012/06/30 01:32:43 bobgian Exp $
+
+/*
+  Copyright 2002  Peter Beerli, Mary Kuhner, Jon Yamato and Joseph Felsenstein
+
+  This software is distributed free of charge for non-commercial use
+  and is copyrighted.  Of course, we do not guarantee that the software
+  works, and are not responsible for any damage you may cause or have.
+*/
+
+#ifndef XMLSTRINGS_H
+#define XMLSTRINGS_H
+
+#include <string>
+#include "local_build.h"
+
+class xmlstr
+{
+  public:
+
+    static const std::string XML_COMMENT_SEED_FROM_CLOCK_0  ;
+    static const std::string XML_COMMENT_SEED_FROM_CLOCK_1  ;
+
+    static const std::string XML_STRING_UNKNOWN_FILE   ;
+    static const std::string XML_STRING_COLON          ;
+    static const std::string XML_STRING_DASH           ;
+    static const std::string XML_STRING_USER           ;
+    static const std::string XML_STRING_NEWICK         ;
+    static const std::string XML_STRING_ARG            ;
+
+    static const std::string XML_ERR_0                 ;
+    static const std::string XML_ERR_1                 ;
+    static const std::string XML_ERR_2                 ;
+    static const std::string XML_ERR_3                 ;
+    static const std::string XML_ERR_ANCESTOR_TRUEPOP_0    ;
+    static const std::string XML_ERR_ANCESTOR_TRUEPOP_1    ;
+    static const std::string XML_ERR_ATTR_MISSING_0    ;
+    static const std::string XML_ERR_ATTR_MISSING_1    ;
+    static const std::string XML_ERR_ATTR_MISSING_2    ;
+    static const std::string XML_ERR_ELEM_MISSING_0    ;
+    static const std::string XML_ERR_ELEM_MISSING_1    ;
+    static const std::string XML_ERR_ELEM_MISSING_2    ;
+    static const std::string XML_ERR_BAD_ANCESTOR_TREE     ;
+    static const std::string XML_ERR_BOTH_SEED_TYPES_0 ;
+    static const std::string XML_ERR_BOTH_SEED_TYPES_1 ;
+    static const std::string XML_ERR_BOTH_SEED_TYPES_2 ;
+    static const std::string XML_ERR_BOTH_SEED_TYPES_3 ;
+    static const std::string XML_ERR_DATA_ERR_0        ;
+    static const std::string XML_ERR_DATA_ERR_1        ;
+    static const std::string XML_ERR_DATA_ERR_2        ;
+    static const std::string XML_ERR_DATA_ERR_3        ;
+    static const std::string XML_ERR_DUPLICATE_ANCESTOR_0 ;
+    static const std::string XML_ERR_DUPLICATE_ANCESTOR_1 ;
+    static const std::string XML_ERR_DUPLICATE_REGIONNAME_0;
+    static const std::string XML_ERR_DUPLICATE_REGIONNAME_1;
+    static const std::string XML_ERR_DUPLICATE_SAMPLENAME_0;
+    static const std::string XML_ERR_DUPLICATE_SAMPLENAME_1;
+    static const std::string XML_ERR_EMPTY_ANCESTOR    ;
+    static const std::string XML_ERR_EXTRA_TAG_0       ;
+    static const std::string XML_ERR_EXTRA_TAG_1       ;
+    static const std::string XML_ERR_EXTRA_TAG_2       ;
+    static const std::string XML_ERR_EXTRA_TAG_TOP_0   ;
+    static const std::string XML_ERR_EXTRA_TAG_TOP_1   ;
+    static const std::string XML_ERR_FILE_ERR          ;
+    static const std::string XML_ERR_FILE_NOT_FOUND_0  ;
+    static const std::string XML_ERR_FILE_NOT_FOUND_1  ;
+    static const std::string XML_ERR_INCONSISTENT_REGION;
+    static const std::string XML_ERR_INTERNAL          ;
+    static const std::string XML_ERR_INVALID_METHOD_0  ;
+    static const std::string XML_ERR_INVALID_METHOD_1  ;
+    static const std::string XML_ERR_METHOD_TYPE_COUNT_0;
+    static const std::string XML_ERR_METHOD_TYPE_COUNT_1;
+    static const std::string XML_ERR_METHOD_TYPE_COUNT_2;
+    static const std::string XML_ERR_METHOD_TYPE_COUNT_3;
+    static const std::string XML_ERR_METHOD_USER_WITHOUT_VALUE_0;
+    static const std::string XML_ERR_METHOD_USER_WITHOUT_VALUE_1;
+    static const std::string XML_ERR_METHOD_USER_WITHOUT_VALUE_2;
+    static const std::string XML_ERR_MISSING_CONTENT_0 ;
+    static const std::string XML_ERR_MISSING_CONTENT_1 ;
+    static const std::string XML_ERR_MISSING_NEWPOP_0 ;
+    static const std::string XML_ERR_MISSING_NEWPOP_1 ;
+    static const std::string XML_ERR_MISSING_TAG_0     ;
+    static const std::string XML_ERR_MISSING_TAG_1     ;
+    static const std::string XML_ERR_MISSING_TAG_HIER_0;
+    static const std::string XML_ERR_MISSING_TAG_HIER_1;
+    static const std::string XML_ERR_MISSING_TAG_HIER_2;
+    static const std::string XML_ERR_MULTIPLE_ANCESTORS_0 ;
+    static const std::string XML_ERR_MULTIPLE_ANCESTORS_1 ;
+    static const std::string XML_ERR_MULTIPLE_ANCESTORS_2 ;
+    static const std::string XML_ERR_MULTIPLE_ANCESTORS_3 ;
+    static const std::string XML_ERR_NEST_0            ;
+    static const std::string XML_ERR_NEST_1            ;
+    static const std::string XML_ERR_NEWPOP            ;
+    static const std::string XML_ERR_NO_SUBTAG_0       ;
+    static const std::string XML_ERR_NO_SUBTAG_1       ;
+    static const std::string XML_ERR_NO_SUBTAG_2       ;
+    static const std::string XML_ERR_NO_TAG_0          ;
+    static const std::string XML_ERR_NO_TAG_1          ;
+    static const std::string XML_ERR_NOT_LAMARC        ;
+    static const std::string XML_ERR_NOT_MAPFILE       ;
+    static const std::string XML_ERR_NOT_XML           ;
+    static const std::string XML_ERR_NO_XML_DATA       ;
+    static const std::string XML_ERR_START_VALUE_COUNT_0;
+    static const std::string XML_ERR_START_VALUE_COUNT_1;
+    static const std::string XML_ERR_START_VALUE_COUNT_2;
+    static const std::string XML_ERR_START_VALUE_COUNT_3;
+    static const std::string XML_ERR_UNEXPECTED_ATTR_0 ;
+    static const std::string XML_ERR_UNEXPECTED_ATTR_1 ;
+    static const std::string XML_ERR_UNEXPECTED_ATTR_2 ;
+    static const std::string XML_ERR_UNEXPECTED_TAG_0  ;
+    static const std::string XML_ERR_UNEXPECTED_TAG_1  ;
+    static const std::string XML_ERR_UNEXPECTED_TAG_2  ;
+
+    static const std::string XML_IERR_DUP_ATTR_0       ;
+    static const std::string XML_IERR_DUP_ATTR_1       ;
+    static const std::string XML_IERR_DUP_ATTR_2       ;
+    static const std::string XML_IERR_DUP_CHILD_0      ;
+    static const std::string XML_IERR_DUP_CHILD_1      ;
+    static const std::string XML_IERR_DUP_CHILD_2      ;
+    static const std::string XML_IERR_DUP_TAG_0        ;
+    static const std::string XML_IERR_DUP_TAG_1        ;
+    static const std::string XML_IERR_NO_PARENT_TAG_0  ;
+    static const std::string XML_IERR_NO_PARENT_TAG_1  ;
+    static const std::string XML_IERR_NO_PARENT_TAG_2  ;
+    static const std::string XML_IERR_NO_TAG_0         ;
+    static const std::string XML_IERR_NO_TAG_1         ;
+    static const std::string XML_IERR_NO_TAG_2         ;
+
+    static const std::string XML_ATTRTYPE_ATTR_NAME    ;
+    static const std::string XML_ATTRTYPE_ATTR_TYPE    ;
+    static const std::string XML_ATTRTYPE_CONSTRAINT   ;
+    static const std::string XML_ATTRTYPE_FOR          ;
+    static const std::string XML_ATTRTYPE_EDGEDEFAULT  ;
+    static const std::string XML_ATTRTYPE_ID           ;
+    static const std::string XML_ATTRTYPE_KEY          ;
+    static const std::string XML_ATTRTYPE_LOCUS_NAME   ;
+    static const std::string XML_ATTRTYPE_NAME         ;
+    static const std::string XML_ATTRTYPE_SOURCE       ;
+    static const std::string XML_ATTRTYPE_TYPE         ;
+    static const std::string XML_ATTRTYPE_TARGET       ;
+    static const std::string XML_ATTRTYPE_VALUE        ;
+    static const std::string XML_ATTRTYPE_VERSION      ;
+
+    static const std::string XML_TAG_ALLELES           ;
+    static const std::string XML_TAG_ALPHA             ;
+    static const std::string XML_TAG_ANALYSIS          ;
+    static const std::string XML_TAG_ANCESTOR          ;
+    static const std::string XML_TAG_ARGDATA           ;
+    static const std::string XML_TAG_ARGFILE_PREFIX    ;
+    static const std::string XML_TAG_AUTOCORRELATION   ;
+    static const std::string XML_TAG_BASE_FREQS        ;
+    static const std::string XML_TAG_BAYESIAN          ;
+    static const std::string XML_TAG_BAYESIAN_ANALYSIS ;
+    static const std::string XML_TAG_BLOCK             ;
+    static const std::string XML_TAG_CALCULATED        ;
+    static const std::string XML_TAG_CATEGORIES        ;
+    static const std::string XML_TAG_CHAINS            ;
+    static const std::string XML_TAG_COALESCENCE       ;
+    static const std::string XML_TAG_CONSTRAINTS       ;
+    static const std::string XML_TAG_CONVERT_OUTPUT    ;
+    static const std::string XML_TAG_CREATING          ;
+    static const std::string XML_TAG_CURVEFILE_PREFIX  ;
+    static const std::string XML_TAG_DATA              ;
+    static const std::string XML_TAG_DATABLOCK         ;
+    static const std::string XML_TAG_DISCARD           ;
+    static const std::string XML_TAG_DISEASE           ;
+    static const std::string XML_TAG_DISEASELOCATION   ;
+    static const std::string XML_TAG_DISEASESTATUS     ;
+    static const std::string XML_TAG_DIVERGENCE        ;
+    static const std::string XML_TAG_DIVMIG            ;
+    static const std::string XML_TAG_ECHO              ;
+    static const std::string XML_TAG_EDGE              ;
+    static const std::string XML_TAG_EDGEDEFAULT       ;
+    static const std::string XML_TAG_EFFECTIVE_POPSIZE ;
+    static const std::string XML_TAG_END               ;
+    static const std::string XML_TAG_EPOCH_BOUNDARY    ;
+    static const std::string XML_TAG_EPOCHSIZEARRANGER ;
+    static const std::string XML_TAG_FINAL             ;
+    static const std::string XML_TAG_FOR               ;
+    static const std::string XML_TAG_FORCES            ;
+    static const std::string XML_TAG_FORMAT            ;
+    static const std::string XML_TAG_GENOTYPE          ;
+    static const std::string XML_TAG_GENOTYPE_RESOLUTIONS;
+    static const std::string XML_TAG_GRAPH             ;
+    static const std::string XML_TAG_GROUP             ;
+    static const std::string XML_TAG_GROWTH            ;
+    static const std::string XML_TAG_GTRRATES          ;
+    static const std::string XML_TAG_HAPLOTYPES        ;
+    static const std::string XML_TAG_HAPLOTYPING       ;
+    static const std::string XML_TAG_HAP_COUNT         ;
+    static const std::string XML_TAG_HEATING           ;
+    static const std::string XML_TAG_HEATING_STRATEGY  ;
+    static const std::string XML_TAG_INDIVIDUAL        ;
+    static const std::string XML_TAG_INITIAL           ;
+    static const std::string XML_TAG_INTERVAL          ;
+    static const std::string XML_TAG_IN_SUMMARY_FILE   ;
+    static const std::string XML_TAG_ISOPT             ;
+    static const std::string XML_TAG_KEY               ;
+    static const std::string XML_TAG_LAMARC            ;
+    static const std::string XML_TAG_LENGTH            ;
+    static const std::string XML_TAG_LOCATIONS         ;
+    static const std::string XML_TAG_LOCUSARRANGER     ;
+    static const std::string XML_TAG_LOGISTICSELECTION ;
+    static const std::string XML_TAG_STOCHASTICSELECTION;
+    static const std::string XML_TAG_MANY_ARGFILES     ;
+    static const std::string XML_TAG_MAPFILE           ;
+    static const std::string XML_TAG_MAP_POSITION      ;
+    static const std::string XML_TAG_MARKER_WEIGHTS    ;
+    static const std::string XML_TAG_MATRIX_MEMBERS    ;
+    static const std::string XML_TAG_MAX_EVENTS        ;
+    static const std::string XML_TAG_MERGING           ;
+    static const std::string XML_TAG_METHOD            ;
+    static const std::string XML_TAG_MIGRATION         ;
+    static const std::string XML_TAG_MODEL             ;
+    static const std::string XML_TAG_MU                ;
+    static const std::string XML_TAG_NAME              ;
+    static const std::string XML_TAG_NEWICKTREEFILE_PREFIX;
+    static const std::string XML_TAG_NEWPOP            ;
+    static const std::string XML_TAG_NODE              ;
+    static const std::string XML_TAG_NORMALIZE         ;
+    static const std::string XML_TAG_NU                ;
+    static const std::string XML_TAG_NUMBER            ;
+    static const std::string XML_TAG_NUM_CATEGORIES    ;
+    static const std::string XML_TAG_NUM_SITES         ;
+    static const std::string XML_TAG_OFFSET            ;
+    static const std::string XML_TAG_OLD_SUMMARY_FILE  ;
+    static const std::string XML_TAG_OUT_SUMMARY_FILE  ;
+    static const std::string XML_TAG_OUT_XML_FILE      ;
+    static const std::string XML_TAG_PANEL             ;
+    static const std::string XML_TAG_PANELSIZE         ;
+    static const std::string XML_TAG_PARAMETER_FILE    ;
+    static const std::string XML_TAG_PARAMINDEX        ;
+    static const std::string XML_TAG_PENETRANCE        ;
+    static const std::string XML_TAG_PER_BASE_ERROR_RATE;
+    static const std::string XML_TAG_PHASE             ;
+    static const std::string XML_TAG_PHENOTYPE         ;
+    static const std::string XML_TAG_PHENOTYPES        ;
+    static const std::string XML_TAG_PHENOTYPE_NAME    ;
+    static const std::string XML_TAG_PLOTTING          ;
+    static const std::string XML_TAG_POPULATION        ;
+    static const std::string XML_TAG_POPTREE           ;
+    static const std::string XML_TAG_POSSIBLE_LOCATIONS;
+    static const std::string XML_TAG_POSTERIOR         ;
+    static const std::string XML_TAG_PRIOR             ;
+    static const std::string XML_TAG_PRIORLOWERBOUND   ;
+    static const std::string XML_TAG_PRIORUPPERBOUND   ;
+    static const std::string XML_TAG_PROBABILITIES     ;
+    static const std::string XML_TAG_PROFILE           ;
+    static const std::string XML_TAG_PROFILE_PREFIX    ;
+    static const std::string XML_TAG_PROFILES          ;
+    static const std::string XML_TAG_PROGRESS_REPORTS  ;
+    static const std::string XML_TAG_RANGE             ;
+    static const std::string XML_TAG_RATES             ;
+    static const std::string XML_TAG_RECLOCFILE_PREFIX ;
+    static const std::string XML_TAG_RECOMBINATION     ;
+    static const std::string XML_TAG_REGION            ;
+    static const std::string XML_TAG_REGION_GAMMA      ;
+    static const std::string XML_TAG_RELATIVE_MURATE   ;
+
+#ifdef LAMARC_NEW_FEATURE_RELATIVE_SAMPLING
+    static const std::string XML_TAG_RELATIVE_SAMPLE_RATE   ;
+#endif
+
+    static const std::string XML_TAG_REPLICATES        ;
+    static const std::string XML_TAG_REPORT_XML_FILE   ;
+    static const std::string XML_TAG_RESIMULATING      ;
+    static const std::string XML_TAG_RESULTS_FILE      ;
+    static const std::string XML_TAG_SAMPLE            ;
+    static const std::string XML_TAG_SAMPLES           ;
+    static const std::string XML_TAG_SEED              ;
+    static const std::string XML_TAG_SEED_FROM_CLOCK   ;
+    static const std::string XML_TAG_SPACING           ;
+    static const std::string XML_TAG_START             ;
+    static const std::string XML_TAG_START_VALUES      ;
+    static const std::string XML_TAG_STATUS            ;
+    static const std::string XML_TAG_STAIRARRANGER     ;
+    static const std::string XML_TAG_STRATEGY          ;
+    static const std::string XML_TAG_SWAP_INTERVAL     ;
+    static const std::string XML_TAG_TEMPERATURES      ;
+    static const std::string XML_TAG_TRACEFILE_PREFIX  ;
+    static const std::string XML_TAG_TRAIT             ;
+    static const std::string XML_TAG_TRAITS            ;
+    static const std::string XML_TAG_TRAIT_NAME        ;
+    static const std::string XML_TAG_TREE              ;
+    static const std::string XML_TAG_TREESIZE          ;
+    static const std::string XML_TAG_TRUEVALUE         ;
+    static const std::string XML_TAG_TTRATIO           ;
+    static const std::string XML_TAG_USE_ARGFILES      ;
+    static const std::string XML_TAG_USE_CURVEFILES    ;
+    static const std::string XML_TAG_USE_IN_SUMMARY    ;
+    static const std::string XML_TAG_USE_NEWICKTREEFILE;
+    static const std::string XML_TAG_USE_OUT_SUMMARY   ;
+    static const std::string XML_TAG_USE_RECLOCFILE    ;
+    static const std::string XML_TAG_USE_TRACEFILE     ;
+    static const std::string XML_TAG_VERBOSITY         ;
+
+    static const std::string XML_ATTRVALUE_ASITES      ;
+    static const std::string XML_ATTRVALUE_CURVE       ;
+    static const std::string XML_ATTRVALUE_DOUBLE      ;
+    static const std::string XML_ATTRVALUE_KNOWN       ;
+    static const std::string XML_ATTRVALUE_LIVE_SITES  ;
+    static const std::string XML_ATTRVALUE_LONG        ;
+    static const std::string XML_ATTRVALUE_NLABEL      ;
+    static const std::string XML_ATTRVALUE_NODE_LABEL  ;
+    static const std::string XML_ATTRVALUE_NODE_TIME   ;
+    static const std::string XML_ATTRVALUE_NODE_TYPE   ;
+    static const std::string XML_ATTRVALUE_NTIME       ;
+    static const std::string XML_ATTRVALUE_NTYPE       ;
+    static const std::string XML_ATTRVALUE_OFF         ;
+    static const std::string XML_ATTRVALUE_PARTITIONS  ;
+    static const std::string XML_ATTRVALUE_PTYPE       ;
+    static const std::string XML_ATTRVALUE_REC_LOCATION;
+    static const std::string XML_ATTRVALUE_RLOC        ;
+    static const std::string XML_ATTRVALUE_STRING      ;
+    static const std::string XML_ATTRVALUE_TRANS_SITES ;
+    static const std::string XML_ATTRVALUE_UNKNOWN     ;
+
+    static const std::string XML_ATTRVALUE_STICK       ;
+    static const std::string XML_ATTRVALUE_STICKEXP    ;
+
+    static const std::string XML_BRANCHTYPE_COAL   ;
+    static const std::string XML_BRANCHTYPE_DISEASE;
+    static const std::string XML_BRANCHTYPE_DIVMIG ;
+    static const std::string XML_BRANCHTYPE_EPOCH  ;
+    static const std::string XML_BRANCHTYPE_MIG    ;
+    static const std::string XML_BRANCHTYPE_REC    ;
+    static const std::string XML_BRANCHTYPE_TIP    ;
+
+    static const std::string XML_WARN_DEPRECATED_TAG_0;
+    static const std::string XML_WARN_DEPRECATED_TAG_1;
+    static const std::string XML_WARN_DEPRECATED_TAG_2;
+
+    static const std::string DATA_MODEL_F84;
+
+    // static const std::string WHAT_DOES_VALGRIND_THINK;
+
+};
+
+#endif // XMLSTRINGS_H
+
+//____________________________________________________________________________________

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-med/lamarc.git



More information about the debian-med-commit mailing list